aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Android.mk1
-rw-r--r--Changes.md347
-rw-r--r--CleanSpec.mk151
-rw-r--r--OWNERS14
-rw-r--r--Usage.txt17
-rw-r--r--common/core.mk56
-rw-r--r--common/json.mk35
-rw-r--r--common/math.mk270
-rw-r--r--common/strings.mk117
-rw-r--r--core/Makefile3070
-rw-r--r--core/OWNERS3
-rw-r--r--core/aapt2.mk6
-rw-r--r--core/android_manifest.mk120
-rw-r--r--core/android_vts_host_config.mk (renamed from target/board/treble_common_64.mk)15
-rw-r--r--core/apicheck_msg_current.txt17
-rw-r--r--core/apicheck_msg_last.txt7
-rw-r--r--core/apidiff.mk181
-rw-r--r--core/app_certificate_validate.mk10
-rw-r--r--core/app_prebuilt_internal.mk279
-rw-r--r--core/autogen_test_config.mk35
-rw-r--r--core/aux_config.mk4
-rw-r--r--core/aux_executable.mk1
-rw-r--r--core/aux_toolchain.mk1
-rw-r--r--core/base_rules.mk382
-rw-r--r--core/binary.mk753
-rw-r--r--core/board_config.mk526
-rw-r--r--core/build-system.html19
-rw-r--r--core/build_id.mk2
-rw-r--r--core/build_rro_package.mk18
-rw-r--r--core/ccache.mk41
-rw-r--r--core/check_elf_file.mk49
-rw-r--r--core/clang/HOST_CROSS_x86.mk1
-rw-r--r--core/clang/HOST_CROSS_x86_64.mk1
-rw-r--r--core/clang/HOST_x86.mk1
-rw-r--r--core/clang/HOST_x86_64.mk1
-rw-r--r--core/clang/TARGET_arm.mk2
-rw-r--r--core/clang/TARGET_arm64.mk2
-rw-r--r--core/clang/TARGET_mips.mk1
-rw-r--r--core/clang/TARGET_mips64.mk1
-rw-r--r--core/clang/TARGET_x86.mk2
-rw-r--r--core/clang/TARGET_x86_64.mk5
-rw-r--r--core/clang/config.mk42
-rw-r--r--core/clear_vars.mk86
-rw-r--r--core/combo/HOST_darwin-x86.mk69
-rw-r--r--core/combo/HOST_darwin-x86_64.mk45
-rw-r--r--core/combo/HOST_linux-x86.mk2
-rw-r--r--core/combo/TARGET_linux-arm.mk37
-rw-r--r--core/combo/arch/arm/armv7-a.mk5
-rw-r--r--core/combo/arch/arm/armv8-2a.mk9
-rw-r--r--core/combo/arch/x86/amberlake.mk13
-rw-r--r--core/combo/arch/x86/broadwell.mk13
-rw-r--r--core/combo/arch/x86/icelake.mk14
-rw-r--r--core/combo/arch/x86/kabylake.mk13
-rw-r--r--core/combo/arch/x86/skylake.mk15
-rw-r--r--core/combo/arch/x86/stoneyridge.mk12
-rw-r--r--core/combo/arch/x86/tigerlake.mk14
-rw-r--r--core/combo/arch/x86/whiskeylake.mk13
-rw-r--r--core/combo/arch/x86/x86.mk3
-rw-r--r--core/combo/arch/x86_64/amberlake.mk13
-rw-r--r--core/combo/arch/x86_64/broadwell.mk13
-rw-r--r--core/combo/arch/x86_64/icelake.mk14
-rw-r--r--core/combo/arch/x86_64/kabylake.mk13
-rw-r--r--core/combo/arch/x86_64/skylake.mk14
-rw-r--r--core/combo/arch/x86_64/stoneyridge.mk12
-rw-r--r--core/combo/arch/x86_64/tigerlake.mk14
-rw-r--r--core/combo/arch/x86_64/whiskeylake.mk13
-rwxr-xr-xcore/combo/arch/x86_64/x86_64.mk3
-rw-r--r--core/config.mk603
-rw-r--r--core/config_sanitizers.mk138
-rw-r--r--core/configure_module_stem.mk6
-rwxr-xr-xcore/construct_context.sh65
-rw-r--r--core/cxx_stl_setup.mk23
-rw-r--r--core/definitions.mk818
-rw-r--r--core/dex_preopt.mk123
-rw-r--r--core/dex_preopt_config.mk188
-rw-r--r--core/dex_preopt_libart.mk220
-rw-r--r--core/dex_preopt_libart_boot.mk142
-rw-r--r--core/dex_preopt_odex_install.mk581
-rw-r--r--core/distdir.mk67
-rw-r--r--core/dpi_specific_apk.mk24
-rw-r--r--core/droiddoc.mk367
-rw-r--r--core/dynamic_binary.mk113
-rw-r--r--core/envsetup.mk525
-rw-r--r--core/executable.mk3
-rw-r--r--core/executable_internal.mk29
-rw-r--r--core/force_aapt2.mk63
-rw-r--r--core/fuzz_test.mk6
-rw-r--r--core/generate_enforce_rro.mk44
-rw-r--r--core/goma.mk26
-rw-r--r--core/header_library.mk36
-rw-r--r--core/host_dalvik_java_library.mk54
-rw-r--r--core/host_executable.mk57
-rw-r--r--core/host_executable_internal.mk2
-rw-r--r--core/host_java_library.mk1
-rw-r--r--core/host_java_library_common.mk13
-rw-r--r--core/host_shared_library.mk40
-rw-r--r--core/host_shared_library_internal.mk7
-rw-r--r--core/host_static_library.mk32
-rw-r--r--core/host_test_internal.mk1
-rw-r--r--core/install_jni_libs.mk16
-rw-r--r--core/install_jni_libs_internal.mk19
-rw-r--r--core/instrumentation_test_config_template.xml2
-rw-r--r--core/jacoco.mk2
-rw-r--r--core/java.mk148
-rw-r--r--core/java_common.mk123
-rw-r--r--core/java_host_test_config_template.xml26
-rw-r--r--core/java_library.mk38
-rw-r--r--core/java_renderscript.mk15
-rw-r--r--core/java_test_config_template.xml33
-rw-r--r--core/jetifier.mk2
-rw-r--r--core/local_systemsdk.mk5
-rw-r--r--core/local_vndk.mk2
-rw-r--r--core/main.mk939
-rw-r--r--core/math.mk152
-rw-r--r--core/module_arch_supported.mk6
-rw-r--r--core/multi_prebuilt.mk4
-rw-r--r--core/native_benchmark.mk3
-rw-r--r--core/native_benchmark_test_config_template.xml31
-rw-r--r--core/native_host_test_config_template.xml26
-rw-r--r--core/native_test_config_template.xml5
-rw-r--r--core/ninja_config.mk4
-rw-r--r--core/notice_files.mk24
-rw-r--r--core/pack_dyn_relocs_setup.mk34
-rw-r--r--core/package.mk4
-rw-r--r--core/package_internal.mk348
-rw-r--r--core/pdk_config.mk18
-rw-r--r--core/pdk_fusion_modules.mk2
-rw-r--r--core/prebuilt.mk36
-rw-r--r--core/prebuilt_internal.mk420
-rw-r--r--core/product-graph.mk26
-rw-r--r--core/product.mk515
-rw-r--r--core/product_config.mk297
-rw-r--r--core/proguard.flags31
-rw-r--r--core/proguard_basic_keeps.flags4
-rw-r--r--core/python_binary_host_test_config_template.xml21
-rw-r--r--core/rbe.mk81
-rw-r--r--core/setup_one_odex.mk144
-rw-r--r--core/shared_library.mk1
-rw-r--r--core/shared_library_internal.mk28
-rw-r--r--core/soong_android_app_set.mk37
-rw-r--r--core/soong_app_prebuilt.mk130
-rw-r--r--core/soong_cc_prebuilt.mk234
-rw-r--r--core/soong_config.mk105
-rw-r--r--core/soong_droiddoc_prebuilt.mk44
-rw-r--r--core/soong_java_prebuilt.mk130
-rw-r--r--core/static_java_library.mk50
-rw-r--r--core/static_library.mk1
-rw-r--r--core/static_library_internal.mk8
-rw-r--r--core/target_test_internal.mk7
-rw-r--r--core/tasks/apicheck.mk165
-rw-r--r--core/tasks/apidiff.mk2
-rwxr-xr-xcore/tasks/check_boot_jars/check_boot_jars.py12
-rw-r--r--core/tasks/check_boot_jars/package_whitelist.txt1
-rw-r--r--core/tasks/check_emu_boot.mk23
-rw-r--r--core/tasks/collect_gpl_sources.mk16
-rw-r--r--core/tasks/cts.mk1
-rw-r--r--core/tasks/deps_licenses.mk2
-rw-r--r--core/tasks/device-tests.mk8
-rw-r--r--core/tasks/find-shareduid-violation.mk32
-rwxr-xr-xcore/tasks/find-shareduid-violation.py78
-rw-r--r--core/tasks/general-tests.mk68
-rw-r--r--core/tasks/module-info.mk7
-rw-r--r--core/tasks/mts.mk (renamed from target/Android.mk)18
-rw-r--r--core/tasks/oem_image.mk2
-rw-r--r--core/tasks/owners.mk33
-rw-r--r--core/tasks/sdk-addon.mk28
-rw-r--r--core/tasks/sts.mk2
-rw-r--r--core/tasks/test_mapping.mk14
-rw-r--r--core/tasks/tools/build_custom_image.mk2
-rw-r--r--core/tasks/tools/compatibility.mk2
-rw-r--r--core/tasks/tools/package-modules.mk23
-rw-r--r--core/tasks/vendor_module_check.mk10
-rw-r--r--core/tasks/vndk.mk152
-rw-r--r--core/use_lld_setup.mk20
-rwxr-xr-xcore/verify_uses_libraries.sh4
-rw-r--r--core/version_defaults.mk69
-rw-r--r--envsetup.sh466
-rwxr-xr-xhelp.sh4
-rw-r--r--packaging/distdir.mk46
-rw-r--r--packaging/main.mk (renamed from tools/Android.mk)26
-rw-r--r--rbesetup.sh75
-rw-r--r--target/OWNERS1
-rw-r--r--target/board/Android.mk56
-rw-r--r--target/board/BoardConfigEmuCommon.mk57
-rw-r--r--target/board/BoardConfigGsiCommon.mk61
-rw-r--r--target/board/BoardConfigMainlineCommon.mk46
-rw-r--r--target/board/generic/BoardConfig.mk103
-rw-r--r--target/board/generic/device.mk8
-rw-r--r--target/board/generic_arm64/BoardConfig.mk78
-rw-r--r--target/board/generic_arm64/device.mk17
-rw-r--r--target/board/generic_arm64_ab/BoardConfig.mk15
-rw-r--r--target/board/generic_arm64_ab/sepolicy/OWNERS8
-rw-r--r--target/board/generic_arm_ab/BoardConfig.mk19
-rw-r--r--target/board/generic_mips/BoardConfig.mk76
-rw-r--r--target/board/generic_mips/README.txt9
-rw-r--r--target/board/generic_mips/device.mk36
-rw-r--r--target/board/generic_mips64/BoardConfig.mk93
-rw-r--r--target/board/generic_mips64/README.txt8
-rw-r--r--target/board/generic_mips64/device.mk35
-rw-r--r--target/board/generic_mips64/system.prop6
-rw-r--r--target/board/generic_x86/BoardConfig.mk97
-rw-r--r--target/board/generic_x86/device.mk22
-rwxr-xr-xtarget/board/generic_x86_64/BoardConfig.mk93
-rwxr-xr-xtarget/board/generic_x86_64/device.mk22
-rw-r--r--target/board/generic_x86_64_ab/BoardConfig.mk11
-rw-r--r--target/board/generic_x86_ab/BoardConfig.mk12
-rw-r--r--target/board/generic_x86_arm/BoardConfig.mk54
-rw-r--r--target/board/generic_x86_arm/README.txt10
-rw-r--r--target/board/generic_x86_arm/device.mk (renamed from target/product/languages_small.mk)16
-rw-r--r--target/board/generic_x86_arm/system.prop (renamed from target/board/generic_mips/system.prop)1
-rw-r--r--target/board/go_defaults.prop (renamed from tools/droiddoc/Android.mk)8
-rw-r--r--target/board/go_defaults_512.prop (renamed from target/product/sdk_mips.mk)10
-rw-r--r--target/board/go_defaults_common.prop42
-rw-r--r--target/board/gsi_arm64/BoardConfig.mk37
-rw-r--r--target/board/gsi_system.prop11
-rw-r--r--target/board/gsi_system_user.prop16
-rw-r--r--target/board/mainline_arm64/BoardConfig.mk (renamed from target/board/generic_arm64_a/BoardConfig.mk)17
-rw-r--r--target/board/mainline_arm64/bluetooth/bdroid_buildcfg.h28
-rw-r--r--target/board/treble_common.mk61
-rw-r--r--target/board/treble_system.prop8
-rw-r--r--target/product/AndroidProducts.mk60
-rw-r--r--target/product/OWNERS1
-rw-r--r--target/product/aosp_arm.mk28
-rw-r--r--target/product/aosp_arm64.mk41
-rw-r--r--target/product/aosp_arm64_a.mk27
-rw-r--r--target/product/aosp_arm64_ab.mk27
-rw-r--r--target/product/aosp_arm_a.mk27
-rw-r--r--target/product/aosp_arm_ab.mk14
-rw-r--r--target/product/aosp_mips.mk31
-rw-r--r--target/product/aosp_mips64.mk44
-rw-r--r--target/product/aosp_x86.mk28
-rw-r--r--target/product/aosp_x86_64.mk51
-rw-r--r--target/product/aosp_x86_64_a.mk27
-rw-r--r--target/product/aosp_x86_64_ab.mk27
-rw-r--r--target/product/aosp_x86_a.mk27
-rw-r--r--target/product/aosp_x86_ab.mk14
-rw-r--r--target/product/aosp_x86_arm.mk37
-rw-r--r--target/product/base.mk179
-rw-r--r--target/product/base_product.mk22
-rw-r--r--target/product/base_system.mk396
-rw-r--r--target/product/base_vendor.mk71
-rw-r--r--target/product/core.mk77
-rw-r--r--target/product/core_base.mk65
-rw-r--r--target/product/core_minimal.mk174
-rw-r--r--target/product/core_tiny.mk150
-rw-r--r--target/product/embedded.mk122
-rw-r--r--target/product/emulator_vendor.mk70
-rw-r--r--target/product/full.mk3
-rw-r--r--target/product/full_base.mk6
-rw-r--r--target/product/full_mips.mk31
-rw-r--r--target/product/full_x86.mk5
-rw-r--r--target/product/generic.mk2
-rw-r--r--target/product/generic_mips.mk26
-rw-r--r--target/product/generic_no_telephony.mk66
-rw-r--r--target/product/go_defaults.mk3
-rw-r--r--target/product/go_defaults_512.mk12
-rw-r--r--target/product/go_defaults_common.mk48
-rw-r--r--target/product/gsi/28.txt (renamed from target/product/vndk/28.txt)0
-rw-r--r--target/product/gsi/29.txt273
-rw-r--r--target/product/gsi/Android.mk (renamed from target/product/vndk/Android.mk)29
-rw-r--r--target/product/gsi/OWNERS3
-rw-r--r--target/product/gsi/current.txt (renamed from target/product/vndk/current.txt)91
-rw-r--r--target/product/gsi/init.gsi.rc3
-rw-r--r--target/product/gsi/init.legacy-gsi.rc (renamed from target/product/vndk/init.gsi.rc)1
-rw-r--r--target/product/gsi/init.vndk-27.rc (renamed from target/product/vndk/init.vndk-27.rc)0
-rw-r--r--target/product/gsi/skip_mount.cfg2
-rw-r--r--target/product/gsi_arm64.mk (renamed from target/product/treble_common_64.mk)26
-rw-r--r--target/product/gsi_common.mk96
-rw-r--r--target/product/gsi_keys.mk22
-rw-r--r--target/product/handheld_product.mk46
-rw-r--r--target/product/handheld_system.mk90
-rw-r--r--target/product/handheld_vendor.mk29
-rw-r--r--target/product/languages_default.mk105
-rw-r--r--target/product/languages_full.mk97
-rw-r--r--target/product/legacy_gsi_common.mk37
-rw-r--r--target/product/mainline.mk37
-rw-r--r--target/product/mainline_arm64.mk67
-rw-r--r--target/product/mainline_system.mk123
-rw-r--r--target/product/mainline_system_arm64.mk36
-rw-r--r--target/product/media_product.mk (renamed from target/board/generic_arm_a/BoardConfig.mk)19
-rw-r--r--target/product/media_system.mk82
-rw-r--r--target/product/media_vendor.mk26
-rw-r--r--target/product/profile_boot_common.mk3
-rw-r--r--target/product/runtime_libart.mk38
-rw-r--r--target/product/sdk_base.mk174
-rw-r--r--target/product/sdk_phone_arm64.mk4
-rw-r--r--target/product/sdk_phone_armv7.mk4
-rw-r--r--target/product/sdk_phone_mips.mk34
-rw-r--r--target/product/sdk_phone_mips64.mk35
-rw-r--r--target/product/security/networkstack.pk8bin0 -> 2375 bytes
-rw-r--r--target/product/security/networkstack.x509.pem34
-rw-r--r--target/product/telephony.mk19
-rw-r--r--target/product/telephony_product.mk24
-rw-r--r--target/product/telephony_system.mk (renamed from target/board/treble_common_32.mk)15
-rw-r--r--target/product/telephony_vendor.mk (renamed from target/board/generic_x86_64_a/BoardConfig.mk)15
-rw-r--r--target/product/treble_common.mk68
-rw-r--r--target/product/treble_common_32.mk22
-rw-r--r--target/product/updatable_apex.mk21
-rw-r--r--tools/OWNERS3
-rw-r--r--tools/apicheck/Android.bp22
-rw-r--r--tools/apicheck/Android.mk27
-rw-r--r--tools/atree/files.cpp2
-rw-r--r--tools/auto_gen_test_config_test.py4
-rwxr-xr-xtools/buildinfo.sh14
-rwxr-xr-xtools/buildinfo_common.sh29
-rwxr-xr-xtools/check_elf_file.py545
-rwxr-xr-xtools/check_identical_lib.sh30
-rwxr-xr-xtools/checkowners.py51
-rw-r--r--tools/docker/Dockerfile3
-rw-r--r--tools/droiddoc/Android.bp18
-rw-r--r--tools/droiddoc/test/generics/Android.mk28
-rw-r--r--tools/droiddoc/test/stubs/Android.mk29
-rwxr-xr-xtools/extract_kernel.py196
-rwxr-xr-xtools/findleaves.py2
-rw-r--r--tools/fs_config/Android.bp108
-rw-r--r--tools/fs_config/Android.mk370
-rw-r--r--tools/fs_config/OWNERS2
-rw-r--r--tools/fs_config/README22
-rw-r--r--tools/fs_config/android_filesystem_config_test_data.h56
-rw-r--r--tools/fs_config/default/android_filesystem_config.h37
-rw-r--r--tools/fs_config/end_to_end_test/config.fs108
-rw-r--r--tools/fs_config/end_to_end_test/product_fs_config_dirsbin0 -> 72 bytes
-rw-r--r--tools/fs_config/end_to_end_test/product_fs_config_filesbin0 -> 128 bytes
-rwxr-xr-xtools/fs_config/end_to_end_test/run_test.sh76
-rw-r--r--tools/fs_config/end_to_end_test/system_fs_config_dirsbin0 -> 64 bytes
-rw-r--r--tools/fs_config/end_to_end_test/system_fs_config_filesbin0 -> 72 bytes
-rw-r--r--tools/fs_config/end_to_end_test/vendor_fs_config_dirsbin0 -> 72 bytes
-rw-r--r--tools/fs_config/end_to_end_test/vendor_fs_config_filesbin0 -> 128 bytes
-rw-r--r--tools/fs_config/fs_config.go60
-rw-r--r--tools/fs_config/fs_config_generate.c224
-rwxr-xr-xtools/fs_config/fs_config_generator.py393
-rw-r--r--tools/fs_config/fs_config_test.cpp223
-rwxr-xr-xtools/fs_config/test_fs_config_generator.py40
-rwxr-xr-xtools/generate-enforce-rro-android-manifest.py19
-rwxr-xr-xtools/generate-notice-files.py6
-rw-r--r--tools/releasetools/OWNERS1
-rwxr-xr-xtools/releasetools/add_img_to_target_files.py525
-rw-r--r--tools/releasetools/apex_utils.py221
-rw-r--r--tools/releasetools/blockimgdiff.py730
-rwxr-xr-xtools/releasetools/build_image.py854
-rwxr-xr-xtools/releasetools/build_super_image.py222
-rwxr-xr-xtools/releasetools/check_ota_package_signature.py13
-rwxr-xr-xtools/releasetools/check_target_files_signatures.py14
-rw-r--r--tools/releasetools/common.py1081
-rw-r--r--tools/releasetools/edify_generator.py109
-rwxr-xr-xtools/releasetools/img_from_target_files.py146
-rwxr-xr-xtools/releasetools/make_recovery_patch.py13
-rwxr-xr-xtools/releasetools/merge_target_files.py814
-rwxr-xr-xtools/releasetools/ota_from_target_files.py810
-rw-r--r--tools/releasetools/pylintrc2
-rwxr-xr-xtools/releasetools/sign_apex.py103
-rwxr-xr-xtools/releasetools/sign_target_files_apks.py568
-rwxr-xr-x[-rw-r--r--]tools/releasetools/sparse_img.py74
-rwxr-xr-xtools/releasetools/target_files_diff.py1
-rw-r--r--tools/releasetools/test_add_img_to_target_files.py341
-rw-r--r--tools/releasetools/test_apex_utils.py87
-rw-r--r--tools/releasetools/test_blockimgdiff.py79
-rw-r--r--tools/releasetools/test_build_image.py128
-rw-r--r--tools/releasetools/test_common.py643
-rw-r--r--tools/releasetools/test_merge_target_files.py77
-rw-r--r--tools/releasetools/test_ota_from_target_files.py167
-rw-r--r--tools/releasetools/test_rangelib.py10
-rw-r--r--tools/releasetools/test_sign_target_files_apks.py345
-rw-r--r--tools/releasetools/test_utils.py13
-rw-r--r--tools/releasetools/test_validate_target_files.py216
-rw-r--r--tools/releasetools/test_verity_utils.py380
-rw-r--r--tools/releasetools/testdata/TestApp.apkbin0 -> 4817 bytes
-rw-r--r--tools/releasetools/testdata/merge_config_system_item_list9
-rwxr-xr-x[-rw-r--r--]tools/releasetools/testdata/signing_helper.sh (renamed from target/board/generic_x86_a/BoardConfig.mk)12
-rw-r--r--tools/releasetools/testdata/testkey.key28
-rw-r--r--tools/releasetools/testdata/testkey_RSA4096.key52
-rw-r--r--tools/releasetools/testdata/testkey_mincryptbin0 -> 524 bytes
-rw-r--r--tools/releasetools/testdata/testkey_with_passwd.key28
-rw-r--r--tools/releasetools/testdata/verity_mincryptbin0 -> 524 bytes
-rwxr-xr-xtools/releasetools/validate_target_files.py278
-rw-r--r--tools/releasetools/verity_utils.py693
-rw-r--r--tools/signapk/Android.bp12
-rw-r--r--tools/signapk/Android.mk26
-rw-r--r--tools/signapk/src/com/android/signapk/CountingOutputStream.java59
-rw-r--r--tools/signapk/src/com/android/signapk/SignApk.java77
-rw-r--r--tools/signtos/Android.bp4
-rw-r--r--tools/test_extract_kernel.py30
-rwxr-xr-xtools/vendor_buildinfo.sh15
-rwxr-xr-xtools/warn.py1133
-rw-r--r--tools/zipalign/ZipAlign.cpp26
-rw-r--r--tools/zipalign/ZipEntry.cpp26
-rw-r--r--tools/zipalign/ZipEntry.h4
-rw-r--r--tools/zipalign/ZipFile.cpp66
-rw-r--r--tools/zipalign/ZipFile.h2
389 files changed, 24001 insertions, 12597 deletions
diff --git a/Android.mk b/Android.mk
deleted file mode 100644
index 5053e7d643..0000000000
--- a/Android.mk
+++ /dev/null
@@ -1 +0,0 @@
-include $(call all-subdir-makefiles)
diff --git a/Changes.md b/Changes.md
index 37bbad09f4..1fadcefdca 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,352 @@
# Build System Changes for Android.mk Writers
+## `PRODUCT_HOST_PACKAGES` split from `PRODUCT_PACKAGES` {#PRODUCT_HOST_PACKAGES}
+
+Previously, adding a module to `PRODUCT_PACKAGES` that supported both the host
+and the target (`host_supported` in Android.bp; two modules with the same name
+in Android.mk) would cause both to be built and installed. In many cases you
+only want either the host or target versions to be built/installed by default,
+and would be over-building with both. So `PRODUCT_PACKAGES` will be changing to
+just affect target modules, while `PRODUCT_HOST_PACKAGES` is being added for
+host modules.
+
+Functional differences between `PRODUCT_PACKAGES` and `PRODUCT_HOST_PACKAGES`:
+
+* `PRODUCT_HOST_PACKAGES` does not have `_ENG`/`_DEBUG` variants, as that's a
+ property of the target, not the host.
+* `PRODUCT_HOST_PACKAGES` does not support `LOCAL_MODULE_OVERRIDES`.
+* `PRODUCT_HOST_PACKAGES` requires listed modules to exist, and be host
+ modules. (Unless `ALLOW_MISSING_DEPENDENCIES` is set)
+
+This is still an active migration, so currently it still uses
+`PRODUCT_PACKAGES` to make installation decisions, but verifies that if we used
+`PRODUCT_HOST_PACKAGES`, it would trigger installation for all of the same host
+packages. This check ignores shared libraries, as those are not normally
+necessary in `PRODUCT_*PACKAGES`, and tended to be over-built (especially the
+32-bit variants).
+
+Future changes will switch installation decisions to `PRODUCT_HOST_PACKAGES`
+for host modules, error when there's a host-only module in `PRODUCT_PACKAGES`,
+and do some further cleanup where `LOCAL_REQUIRED_MODULES` are still merged
+between host and target modules with the same name.
+
+## `*.c.arm` / `*.cpp.arm` deprecation {#file_arm}
+
+In Android.mk files, you used to be able to change LOCAL_ARM_MODE for each
+source file by appending `.arm` to the end of the filename in
+`LOCAL_SRC_FILES`.
+
+Soong does not support this uncommonly used behavior, instead expecting those
+files to be split out into a separate static library that chooses `arm` over
+`thumb` for the entire library. This must now also be done in Android.mk files.
+
+## Windows cross-compiles no longer supported in Android.mk
+
+Modules that build for Windows (our only `HOST_CROSS` OS currently) must now be
+defined in `Android.bp` files.
+
+## `LOCAL_MODULE_TAGS := eng debug` deprecation {#LOCAL_MODULE_TAGS}
+
+`LOCAL_MODULE_TAGS` value `eng` and `debug` are being deprecated. They allowed
+modules to specify that they should always be installed on `-eng`, or `-eng`
+and `-userdebug` builds. This conflicted with the ability for products to
+specify which modules should be installed, effectively making it impossible to
+build a stripped down product configuration that did not include those modules.
+
+For the equivalent functionality, specify the modules in `PRODUCT_PACKAGES_ENG`
+or `PRODUCT_PACKAGES_DEBUG` in the appropriate product makefiles.
+
+Core android packages like `su` got added to the list in
+`build/make/target/product/base_system.mk`, but for device-specific modules
+there are often better base product makefiles to use instead.
+
+## `USER` deprecation {#USER}
+
+`USER` will soon be `nobody` in many cases due to the addition of a sandbox
+around the Android build. Most of the time you shouldn't need to know the
+identity of the user running the build, but if you do, it's available in the
+make variable `BUILD_USERNAME` for now.
+
+Similarly, the `hostname` tool will also be returning a more consistent value
+of `android-build`. The real value is available as `BUILD_HOSTNAME`.
+
+## `BUILD_NUMBER` removal from Android.mk {#BUILD_NUMBER}
+
+`BUILD_NUMBER` should not be used directly in Android.mk files, as it would
+trigger them to be re-read every time the `BUILD_NUMBER` changes (which it does
+on every build server build). If possible, just remove the use so that your
+builds are more reproducible. If you do need it, use `BUILD_NUMBER_FROM_FILE`:
+
+``` make
+$(LOCAL_BUILT_MODULE):
+ mytool --build_number $(BUILD_NUMBER_FROM_FILE) -o $@
+```
+
+That will expand out to a subshell that will read the current `BUILD_NUMBER`
+whenever it's run. It will not re-run your command if the build number has
+changed, so incremental builds will have the build number from the last time
+the particular output was rebuilt.
+
+## `DIST_DIR`, `dist_goal`, and `dist-for-goals` {#dist}
+
+`DIST_DIR` and `dist_goal` are no longer available when reading Android.mk
+files (or other build tasks). Always use `dist-for-goals` instead, which takes
+a PHONY goal, and a list of files to copy to `$DIST_DIR`. Whenever `dist` is
+specified, and the goal would be built (either explicitly on the command line,
+or as a dependency of something on the command line), that file will be copied
+into `$DIST_DIR`. For example,
+
+``` make
+$(call dist-for-goals,foo,bar/baz)
+```
+
+will copy `bar/baz` into `$DIST_DIR/baz` when `m foo dist` is run.
+
+#### Renames during copy
+
+Instead of specifying just a file, a destination name can be specified,
+including subdirectories:
+
+``` make
+$(call dist-for-goals,foo,bar/baz:logs/foo.log)
+```
+
+will copy `bar/baz` into `$DIST_DIR/logs/foo.log` when `m foo dist` is run.
+
+## `.PHONY` rule enforcement {#phony_targets}
+
+There are several new warnings/errors meant to ensure the proper use of
+`.PHONY` targets in order to improve the speed and reliability of incremental
+builds.
+
+`.PHONY`-marked targets are often used as shortcuts to provide "friendly" names
+for real files to be built, but any target marked with `.PHONY` is also always
+considered dirty, needing to be rebuilt every build. This isn't a problem for
+aliases or one-off user-requested operations, but if real builds steps depend
+on a `.PHONY` target, it can get quite expensive for what should be a tiny
+build.
+
+``` make
+...mk:42: warning: PHONY target "out/.../foo" looks like a real file (contains a "/")
+```
+
+Between this warning and the next, we're requiring that `.PHONY` targets do not
+have "/" in them, and real file targets do have a "/". This makes it more
+obvious when reading makefiles what is happening, and will help the build
+system differentiate these in the future too.
+
+``` make
+...mk:42: warning: writing to readonly directory: "kernel-modules"
+```
+
+This warning will show up for one of two reasons:
+
+1. The target isn't intended to be a real file, and should be marked with
+ `.PHONY`. This would be the case for this example.
+2. The target is a real file, but it's outside the output directories. All
+ outputs from the build system should be within the output directory,
+ otherwise `m clean` is unable to clean the build, and future builds may not
+ work properly.
+
+``` make
+...mk:42: warning: real file "out/.../foo" depends on PHONY target "buildbins"
+```
+
+If the first target isn't intended to be a real file, then it should be marked
+with `.PHONY`, which will satisfy this warning. This isn't the case for this
+example, as we require `.PHONY` targets not to have '/' in them.
+
+If the second (PHONY) target is a real file, it may unnecessarily be marked
+with `.PHONY`.
+
+### `.PHONY` and calling other build systems
+
+One common pattern (mostly outside AOSP) that we've seen hit these warning is
+when building with external build systems (firmware, bootloader, kernel, etc).
+Those are often marked as `.PHONY` because the Android build system doesn't
+have enough dependencies to know when to run the other build system again
+during an incremental build.
+
+We recommend to build these outside of Android, and deliver prebuilts into the
+Android tree instead of decreasing the speed and reliability of the incremental
+Android build.
+
+In cases where that's not desired, to preserve the speed of Android
+incrementals, over-specifying dependencies is likely a better option than
+marking it with `.PHONY`:
+
+``` make
+out/target/.../zImage: $(sort $(shell find -L $(KERNEL_SRCDIR)))
+ ...
+```
+
+For reliability, many of these other build systems do not guarantee the same
+level of incremental build assurances as the Android Build is attempting to do
+-- without custom checks, Make doesn't rebuild objects when CFLAGS change, etc.
+In order to fix this, our recommendation is to do clean builds for each of
+these external build systems every time anything they rely on changes. For
+relatively smaller builds (like the kernel), this may be reasonable as long as
+you're not trying to actively debug the kernel.
+
+## `export` and `unexport` deprecation {#export_keyword}
+
+The `export` and `unexport` keywords have been deprecated, and will throw
+warnings or errors depending on where they are used.
+
+Early in the make system, during product configuration and BoardConfig.mk
+reading: these will throw a warnings, and will be an error in the future.
+Device specific configuration should not be able to affect common core build
+steps -- we're looking at triggering build steps to be invalidated if the set
+of environment variables they can access changes. If device specific
+configuration is allowed to change those, switching devices with the same
+output directory could become significantly more expensive than it already can
+be.
+
+Later, during Android.mk files, and later tasks: these will throw errors, since
+it is increasingly likely that they are being used incorrectly, attempting to
+change the environment for a single build step, and instead setting it for
+hundreds of thousands.
+
+It is not recommended to just move the environment variable setting outside of
+the build (in vendorsetup.sh, or some other configuration script or wrapper).
+We expect to limit the environment variables that the build respects in the
+future, others will be cleared. (There will be methods to get custom variables
+into the build, just not to every build step)
+
+Instead, write the export commands into the rule command lines themselves:
+
+``` make
+$(intermediates)/generated_output.img:
+ rm -rf $@
+ export MY_ENV_A="$(MY_A)"; make ...
+```
+
+If you want to set many environment variables, and/or use them many times,
+write them out to a script and source the script:
+
+``` make
+envsh := $(intermediates)/env.sh
+$(envsh):
+ rm -rf $@
+ echo 'export MY_ENV_A="$(MY_A)"' >$@
+ echo 'export MY_ENV_B="$(MY_B)"' >>$@
+
+$(intermediates)/generated_output.img: PRIVATE_ENV := $(envsh)
+$(intermediates)/generated_output.img: $(envsh) a/b/c/package.sh
+ rm -rf $@
+ source $(PRIVATE_ENV); make ...
+ source $(PRIVATE_ENV); a/b/c/package.sh ...
+```
+
+## Implicit make rules are obsolete {#implicit_rules}
+
+Implicit rules look something like the following:
+
+``` make
+$(TARGET_OUT_SHARED_LIBRARIES)/%_vendor.so: $(TARGET_OUT_SHARED_LIBRARIES)/%.so
+ ...
+
+%.o : %.foo
+ ...
+```
+
+These can have wide ranging effects across unrelated modules, so they're now obsolete. Instead, use static pattern rules, which are similar, but explicitly match the specified outputs:
+
+``` make
+libs := $(foreach lib,libfoo libbar,$(TARGET_OUT_SHARED_LIBRARIES)/$(lib)_vendor.so)
+$(libs): %_vendor.so: %.so
+ ...
+
+files := $(wildcard $(LOCAL_PATH)/*.foo)
+gen := $(patsubst $(LOCAL_PATH)/%.foo,$(intermediates)/%.o,$(files))
+$(gen): %.o : %.foo
+ ...
+```
+
+## Removing '/' from Valid Module Names {#name_slash}
+
+The build system uses module names in path names in many places. Having an
+extra '/' or '../' being inserted can cause problems -- and not just build
+breaks, but stranger invalid behavior.
+
+In every case we've seen, the fix is relatively simple: move the directory into
+`LOCAL_MODULE_RELATIVE_PATH` (or `LOCAL_MODULE_PATH` if you're still using it).
+If this causes multiple modules to be named the same, use unique module names
+and `LOCAL_MODULE_STEM` to change the installed file name:
+
+``` make
+include $(CLEAR_VARS)
+LOCAL_MODULE := ver1/code.bin
+LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/firmware
+...
+include $(BUILD_PREBUILT)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := ver2/code.bin
+LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/firmware
+...
+include $(BUILD_PREBUILT)
+```
+
+Can be rewritten as:
+
+```
+include $(CLEAR_VARS)
+LOCAL_MODULE := ver1_code.bin
+LOCAL_MODULE_STEM := code.bin
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/firmware/ver1
+...
+include $(BUILD_PREBUILT)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := ver2_code.bin
+LOCAL_MODULE_STEM := code.bin
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/firmware/ver2
+...
+include $(BUILD_PREBUILT)
+```
+
+You just need to make sure that any other references (`PRODUCT_PACKAGES`,
+`LOCAL_REQUIRED_MODULES`, etc) are converted to the new names.
+
+## Valid Module Names {#name}
+
+We've adopted lexical requirements very similar to [Bazel's
+requirements](https://docs.bazel.build/versions/master/build-ref.html#name) for
+target names. Valid characters are `a-z`, `A-Z`, `0-9`, and the special
+characters `_.+-=,@~`. This currently applies to `LOCAL_PACKAGE_NAME`,
+`LOCAL_MODULE`, and `LOCAL_MODULE_SUFFIX`, and `LOCAL_MODULE_STEM*`.
+
+Many other characters already caused problems if you used them, so we don't
+expect this to have a large effect.
+
+## PATH Tools {#PATH_Tools}
+
+The build has started restricting the external host tools usable inside the
+build. This will help ensure that build results are reproducible across
+different machines, and catch mistakes before they become larger issues.
+
+To start with, this includes replacing the $PATH with our own directory of
+tools, mirroring that of the host PATH. The only difference so far is the
+removal of the host GCC tools. Anything that is not explicitly in the
+configuration as allowed will continue functioning, but will generate a log
+message. This is expected to become more restrictive over time.
+
+The configuration is located in build/soong/ui/build/paths/config.go, and
+contains all the common tools in use in many builds. Anything not in that list
+will currently print a warning in the `$OUT_DIR/soong.log` file, including the
+command and arguments used, and the process tree in order to help locate the
+usage.
+
+In order to fix any issues brought up by these checks, the best way to fix them
+is to use tools checked into the tree -- either as prebuilts, or building them
+as host tools during the build.
+
+As a temporary measure, you can set `TEMPORARY_DISABLE_PATH_RESTRICTIONS=true`
+in your environment to temporarily turn off the error checks and allow any tool
+to be used (with logging). Beware that GCC didn't work well with the interposer
+used for logging, so this may not help in all cases.
+
## Deprecating / obsoleting envsetup.sh variables in Makefiles
It is not required to source envsetup.sh before running a build. Many scripts,
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 767726108d..7c9c6da6f9 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -488,6 +488,157 @@ $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/Launcher3)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/priv-app/Launcher3)
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/Launcher3_intermediates)
+# Remove old merged AndroidManifest.xml location
+$(call add-clean-step, rm -rf $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS/*_intermediates/AndroidManifest.xml)
+
+$(call add-clean-step, find $(PRODUCT_OUT) -type f -name "vr_hwc*" -print0 | xargs -0 rm -f)
+
+$(call add-clean-step, rm -rf $(SOONG_OUT_DIR)/.intermediates/system/vold)
+
+# Remove product-services related files / images
+$(call add-clean-step, find $(PRODUCT_OUT) -type f -name "*product-services*" -print0 | xargs -0 rm -rf)
+$(call add-clean-step, find $(PRODUCT_OUT) -type d -name "*product-services*" -print0 | xargs -0 rm -rf)
+$(call add-clean-step, find $(PRODUCT_OUT) -type l -name "*product-services*" -print0 | xargs -0 rm -rf)
+
+# Remove obsolete recovery etc files
+$(call add-clean-step, rm -rf $(TARGET_RECOVERY_ROOT_OUT)/etc)
+
+# Remove *_OUT_INTERMEDIATE_LIBRARIES
+$(call add-clean-step, rm -rf $(addsuffix /lib,\
+ $(HOST_OUT_INTERMEDIATES) $(2ND_HOST_OUT_INTERMEDIATES) \
+ $(HOST_CROSS_OUT_INTERMEDIATES) $(2ND_HOST_CROSS_OUT_INTERMEDIATES) \
+ $(TARGET_OUT_INTERMEDIATES) $(2ND_TARGET_OUT_INTERMEDIATES)))
+
+# Remove strip.sh intermediates to save space
+$(call add-clean-step, find $(OUT_DIR) \( -name "*.so.debug" -o -name "*.so.dynsyms" -o -name "*.so.funcsyms" -o -name "*.so.keep_symbols" -o -name "*.so.mini_debuginfo.xz" \) -print0 | xargs -0 rm -f)
+
+# Clean up old ninja files
+$(call add-clean-step, rm -f $(OUT_DIR)/build-*-dist*.ninja)
+
+$(call add-clean-step, rm -f $(HOST_OUT)/*ts/host-libprotobuf-java-*.jar)
+
+$(call add-clean-step, find $(OUT_DIR)/target/product/mainline_arm64/system -type f -name "*.*dex" -print0 | xargs -0 rm -f)
+
+# Clean up aidegen
+$(call add-clean-step, rm -f $(HOST_OUT)/bin/aidegen)
+
+# Remove perfprofd
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/perfprofd)
+
+# Remove incorrectly created directories in the source tree
+$(call add-clean-step, find system/app system/priv-app system/framework system_other -depth -type d -print0 | xargs -0 rmdir)
+$(call add-clean-step, rm -f .d)
+
+# Remove obsolete apps
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+
+# Remove corrupt generated rule due to using toybox's sed
+$(call add-clean-step, rm -rf $(SOONG_OUT_DIR)/.intermediates/system/core/init/generated_stub_builtin_function_map)
+
+# Clean up core JNI libraries moved to runtime apex
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/lib*/libjavacore.so)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/lib*/libopenjdk.so)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/lib*/libexpat.so)
+
+# Merge product_services into product
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/product_services)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/product_services)
+
+# Clean up old location of hiddenapi files
+$(call add-clean-step, rm -f $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi*)
+
+# Clean up previous default location of RROs
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/overlay)
+
+# Remove ART artifacts installed only by modules `art-runtime` and
+# `art-tools` in /system on target.
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dalvikvm)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dalvikvm32)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dalvikvm64)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dex2oat)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dex2oatd)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dexdiag)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dexdump)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dexlist)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dexoptanalyzer)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/dexoptanalyzerd)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/oatdump)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/profman)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/profmand)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libadbconnection.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libadbconnectiond.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libart-compiler.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libartd-compiler.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libart-dexlayout.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libartd-dexlayout.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libart-disassembler.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libart.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libartd.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libartbase.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libartbased.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libdexfile.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libdexfiled.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libdexfile_external.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libdexfile_support.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libdt_fd_forward.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libdt_socket.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libjdwp.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libnpt.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libopenjdkd.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libopenjdkjvm.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libopenjdkjvmd.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libopenjdkjvmti.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libopenjdkjvmtid.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libprofile.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libprofiled.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libtombstoned_client.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libvixl.so)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libvixld.so)
+
+# Clean up old location of dexpreopted boot jars
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/dex_bootjars)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/dex_bootjars_input)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libnpt.so)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*)
+
+# Clean up old testcase files
+$(call add-clean-step, rm -rf $(TARGET_OUT_TESTCASES)/*)
+$(call add-clean-step, rm -rf $(HOST_OUT_TESTCASES)/*)
+$(call add-clean-step, rm -rf $(HOST_CROSS_OUT_TESTCASES)/*)
+$(call add-clean-step, rm -rf $(TARGET_OUT_DATA)/*)
+$(call add-clean-step, rm -rf $(HOST_OUT)/vts/*)
+$(call add-clean-step, rm -rf $(HOST_OUT)/framework/vts-tradefed.jar)
+
+# Clean up old location of system_other.avbpubkey
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/security/avb/)
+
+# Clean up bufferhub files
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/hw/android.frameworks.bufferhub@1.0-service)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/init/android.frameworks.bufferhub@1.0-service.rc)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/super.img)
+
+$(call add-clean-step, find $(PRODUCT_OUT) -type f -name "generated_*_image_info.txt" -print0 | xargs -0 rm -f)
+
+# Clean up libicuuc.so and libicui18n.so
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libicu*)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/target/common/obj/framework.aidl)
+
+# Clean up adb_debug.propr
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/adb_debug.prop)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libjavacrypto.so)
+
+# Clean up old location of soft OMX plugins
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libstagefright_soft*)
+
+# Move odm build.prop to /odm/etc/.
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/odm/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/odm/build.prop)
+
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/OWNERS b/OWNERS
index c630fe138f..320b40af01 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,13 +1,9 @@
+# Core build team (MTV)
ccross@android.com
dwillemsen@google.com
-nanzhang@google.com
-per-file * = ccross@android.com
-per-file * = dwillemsen@google.com
-per-file * = nanzhang@google.com
+# To expedite LON reviews
+hansson@google.com
-# for version updates
-per-file version_defaults.mk = aseaton@google.com
-per-file version_defaults.mk = elisapascual@google.com
-per-file version_defaults.mk = lubomir@google.com
-per-file version_defaults.mk = pscovanner@google.com \ No newline at end of file
+# For version updates
+per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
diff --git a/Usage.txt b/Usage.txt
index 004210b478..558329bdac 100644
--- a/Usage.txt
+++ b/Usage.txt
@@ -29,7 +29,7 @@ Ways to specify what to build:
An alternative to setting $TARGET_PRODUCT and $TARGET_BUILD_VARIANT,
which you may see in build servers, is to execute:
- make PRODUCT-<product>-<variant>
+ m PRODUCT-<product>-<variant>
A target may be a file path. For example, out/host/linux-x86/bin/adb .
@@ -46,6 +46,17 @@ Ways to specify what to build:
files named Android.bp
these files are defined in Blueprint syntax
+ During a build, a few log files are generated in ${OUT} (or ${DIST_DIR}/logs
+ for dist builds):
+
+ verbose.log.gz
+ every command run, along with its outputs. This is similar to the
+ previous `m showcommands` option.
+ error.log
+ list of actions that failed during the build, and their outputs.
+ soong.log
+ verbose debug information from soong_ui
+
For now, the full (extremely large) compiled list of targets can be found
(after running the build once), split among these two files:
@@ -57,8 +68,6 @@ Ways to specify what to build:
tool here.
Targets that adjust an existing build:
- showcommands Display the individual commands run to implement
- the build
dist Copy into ${DIST_DIR} the portion of the build
that must be distributed
@@ -71,7 +80,7 @@ Variables
Variables can either be set in the surrounding shell environment or can be
passed as command-line arguments. For example:
export I_AM_A_SHELL_VAR=1
- I_AM_ANOTHER_SHELL_VAR=2 make droid I_AM_A_MAKE_VAR=3
+ I_AM_ANOTHER_SHELL_VAR=2 m droid I_AM_A_MAKE_VAR=3
Here are some common variables and their meanings:
TARGET_PRODUCT The <product> to build # as described above
TARGET_BUILD_VARIANT The <variant> to build # as described above
diff --git a/common/core.mk b/common/core.mk
new file mode 100644
index 0000000000..e5264b072d
--- /dev/null
+++ b/common/core.mk
@@ -0,0 +1,56 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Only use ANDROID_BUILD_SHELL to wrap around bash.
+# DO NOT use other shells such as zsh.
+ifdef ANDROID_BUILD_SHELL
+SHELL := $(ANDROID_BUILD_SHELL)
+else
+# Use bash, not whatever shell somebody has installed as /bin/sh
+# This is repeated from main.mk, since envsetup.sh runs this file
+# directly.
+SHELL := /bin/bash
+endif
+
+# Utility variables.
+empty :=
+space := $(empty) $(empty)
+comma := ,
+# Note that make will eat the newline just before endef.
+define newline
+
+
+endef
+# The pound character "#"
+define pound
+#
+endef
+# Unfortunately you can't simply define backslash as \ or \\.
+backslash := \a
+backslash := $(patsubst %a,%,$(backslash))
+
+# Prevent accidentally changing these variables
+.KATI_READONLY := SHELL empty space comma newline pound backslash
+
+# Basic warning/error wrappers. These will be redefined to include the local
+# module information when reading Android.mk files.
+define pretty-warning
+$(warning $(1))
+endef
+
+define pretty-error
+$(error $(1))
+endef
diff --git a/common/json.mk b/common/json.mk
new file mode 100644
index 0000000000..ba8ffa73ec
--- /dev/null
+++ b/common/json.mk
@@ -0,0 +1,35 @@
+4space :=$= $(space)$(space)$(space)$(space)
+invert_bool =$= $(if $(strip $(1)),,true)
+
+# Converts a list to a JSON list.
+# $1: List separator.
+# $2: List.
+_json_list =$= [$(if $(2),"$(subst $(1),"$(comma)",$(2))")]
+
+# Converts a space-separated list to a JSON list.
+json_list =$= $(call _json_list,$(space),$(1))
+
+# Converts a comma-separated list to a JSON list.
+csv_to_json_list =$= $(call _json_list,$(comma),$(1))
+
+# Adds or removes 4 spaces from _json_indent
+json_increase_indent =$= $(eval _json_indent := $$(_json_indent)$$(4space))
+json_decrease_indent =$= $(eval _json_indent := $$(subst _,$$(space),$$(patsubst %____,%,$$(subst $$(space),_,$$(_json_indent)))))
+
+# 1: Key name
+# 2: Value
+add_json_val =$= $(eval _json_contents := $$(_json_contents)$$(_json_indent)"$$(strip $$(1))": $$(strip $$(2))$$(comma)$$(newline))
+add_json_str =$= $(call add_json_val,$(1),"$(strip $(2))")
+add_json_list =$= $(call add_json_val,$(1),$(call json_list,$(patsubst %,%,$(2))))
+add_json_csv =$= $(call add_json_val,$(1),$(call csv_to_json_list,$(strip $(2))))
+add_json_bool =$= $(call add_json_val,$(1),$(if $(strip $(2)),true,false))
+add_json_map =$= $(eval _json_contents := $$(_json_contents)$$(_json_indent)"$$(strip $$(1))": {$$(newline))$(json_increase_indent)
+end_json_map =$= $(json_decrease_indent)$(eval _json_contents := $$(_json_contents)$$(if $$(filter %$$(comma),$$(lastword $$(_json_contents))),__SV_END)$$(_json_indent)},$$(newline))
+
+# Clears _json_contents to start a new json file
+json_start =$= $(eval _json_contents := {$$(newline))$(eval _json_indent := $$(4space))
+
+# Adds the trailing close brace to _json_contents, and removes any trailing commas if necessary
+json_end =$= $(eval _json_contents := $$(subst $$(comma)$$(newline)__SV_END,$$(newline),$$(_json_contents)__SV_END}$$(newline)))
+
+json_contents =$= $(_json_contents)
diff --git a/common/math.mk b/common/math.mk
new file mode 100644
index 0000000000..ac3151e3c5
--- /dev/null
+++ b/common/math.mk
@@ -0,0 +1,270 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+###########################################################
+# Basic math functions for non-negative integers <= 100
+#
+# (SDK versions for example)
+###########################################################
+__MATH_POS_NUMBERS := 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 \
+ 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 \
+ 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 \
+ 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 \
+ 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
+__MATH_NUMBERS := 0 $(__MATH_POS_NUMBERS)
+
+math-error = $(call pretty-error,$(1))
+math-expect :=
+math-expect-true :=
+math-expect :=
+math-expect-error :=
+
+# Run the math tests with:
+# make -f ${ANDROID_BUILD_TOP}/build/make/core/math.mk RUN_MATH_TESTS=true
+# $(get_build_var CKATI) -f ${ANDROID_BUILD_TOP}//build/make/core/math.mk RUN_MATH_TESTS=true
+ifdef RUN_MATH_TESTS
+ MATH_TEST_FAILURE :=
+ MATH_TEST_ERROR :=
+ math-error = $(if $(MATH_TEST_ERROR),,$(eval MATH_TEST_ERROR:=$(1)))
+ define math-expect
+ $(eval got:=$$$1) \
+ $(if $(subst $(got),,$(2))$(subst $(2),,$(got))$(MATH_TEST_ERROR), \
+ $(if $(MATH_TEST_ERROR),$(warning $(MATH_TEST_ERROR)),$(warning $$$1 '$(got)' != '$(2)')) \
+ $(eval MATH_TEST_FAILURE := true)) \
+ $(eval MATH_TEST_ERROR :=) \
+ $(eval got:=)
+ endef
+ math-expect-true = $(call math-expect,$(1),true)
+ math-expect-false = $(call math-expect,$(1),)
+
+ define math-expect-error
+ $(eval got:=$$$1) \
+ $(if $(subst $(MATH_TEST_ERROR),,$(2))$(subst $(2),,$(MATH_TEST_ERROR)), \
+ $(warning '$(MATH_TEST_ERROR)' != '$(2)') \
+ $(eval MATH_TEST_FAILURE := true)) \
+ $(eval MATH_TEST_ERROR :=) \
+ $(eval got:=)
+ endef
+endif
+
+# Returns true if $(1) is a non-negative integer <= 100, otherwise returns nothing.
+define math_is_number
+$(strip \
+ $(if $(1),,$(call math-error,Argument missing)) \
+ $(if $(word 2,$(1)),$(call math-error,Multiple words in a single argument: $(1))) \
+ $(if $(filter $(1),$(__MATH_NUMBERS)),true))
+endef
+
+define math_is_zero
+$(strip \
+ $(if $(word 2,$(1)),$(call math-error,Multiple words in a single argument: $(1))) \
+ $(if $(filter 0,$(1)),true))
+endef
+
+$(call math-expect-true,(call math_is_number,0))
+$(call math-expect-true,(call math_is_number,2))
+$(call math-expect-false,(call math_is_number,foo))
+$(call math-expect-false,(call math_is_number,-1))
+$(call math-expect-error,(call math_is_number,1 2),Multiple words in a single argument: 1 2)
+$(call math-expect-error,(call math_is_number,no 2),Multiple words in a single argument: no 2)
+
+$(call math-expect-true,(call math_is_zero,0))
+$(call math-expect-false,(call math_is_zero,1))
+$(call math-expect-false,(call math_is_zero,foo))
+$(call math-expect-error,(call math_is_zero,1 2),Multiple words in a single argument: 1 2)
+$(call math-expect-error,(call math_is_zero,no 2),Multiple words in a single argument: no 2)
+
+define _math_check_valid
+$(if $(call math_is_number,$(1)),,$(call math-error,Only non-negative integers <= 100 are supported (not $(1))))
+endef
+
+$(call math-expect,(call _math_check_valid,0))
+$(call math-expect,(call _math_check_valid,1))
+$(call math-expect,(call _math_check_valid,100))
+$(call math-expect-error,(call _math_check_valid,-1),Only non-negative integers <= 100 are supported (not -1))
+$(call math-expect-error,(call _math_check_valid,101),Only non-negative integers <= 100 are supported (not 101))
+$(call math-expect-error,(call _math_check_valid,),Argument missing)
+$(call math-expect-error,(call _math_check_valid,1 2),Multiple words in a single argument: 1 2)
+
+# return a list containing integers ranging from [$(1),$(2)]
+define int_range_list
+$(strip \
+ $(call _math_check_valid,$(1))$(call _math_check_valid,$(2)) \
+ $(if $(call math_is_zero,$(1)),0)\
+ $(wordlist $(if $(call math_is_zero,$(1)),1,$(1)),$(2),$(__MATH_POS_NUMBERS)))
+endef
+
+$(call math-expect,(call int_range_list,0,1),0 1)
+$(call math-expect,(call int_range_list,1,1),1)
+$(call math-expect,(call int_range_list,1,2),1 2)
+$(call math-expect,(call int_range_list,2,1),)
+$(call math-expect-error,(call int_range_list,1,101),Only non-negative integers <= 100 are supported (not 101))
+
+
+# Returns the greater of $1 or $2.
+# If $1 or $2 is not a positive integer <= 100, then an error is generated.
+define math_max
+$(strip $(call _math_check_valid,$(1)) $(call _math_check_valid,$(2)) \
+ $(lastword $(filter $(1) $(2),$(__MATH_NUMBERS))))
+endef
+
+$(call math-expect-error,(call math_max),Argument missing)
+$(call math-expect-error,(call math_max,1),Argument missing)
+$(call math-expect-error,(call math_max,1 2,3),Multiple words in a single argument: 1 2)
+$(call math-expect,(call math_max,0,1),1)
+$(call math-expect,(call math_max,1,0),1)
+$(call math-expect,(call math_max,1,1),1)
+$(call math-expect,(call math_max,5,42),42)
+$(call math-expect,(call math_max,42,5),42)
+
+define math_gt_or_eq
+$(if $(filter $(1),$(call math_max,$(1),$(2))),true)
+endef
+
+define math_lt
+$(if $(call math_gt_or_eq,$(1),$(2)),,true)
+endef
+
+$(call math-expect-true,(call math_gt_or_eq, 2, 1))
+$(call math-expect-true,(call math_gt_or_eq, 1, 1))
+$(call math-expect-false,(call math_gt_or_eq, 1, 2))
+
+# $1 is the variable name to increment
+define inc_and_print
+$(strip $(eval $(1) := $($(1)) .)$(words $($(1))))
+endef
+
+ifdef RUN_MATH_TESTS
+a :=
+$(call math-expect,(call inc_and_print,a),1)
+$(call math-expect,(call inc_and_print,a),2)
+$(call math-expect,(call inc_and_print,a),3)
+$(call math-expect,(call inc_and_print,a),4)
+endif
+
+# Returns the words in $2 that are numbers and are less than $1
+define numbers_less_than
+$(strip \
+ $(foreach n,$2, \
+ $(if $(call math_is_number,$(n)), \
+ $(if $(call math_lt,$(n),$(1)), \
+ $(n)))))
+endef
+
+$(call math-expect,(call numbers_less_than,0,0 1 2 3),)
+$(call math-expect,(call numbers_less_than,1,0 2 1 3),0)
+$(call math-expect,(call numbers_less_than,2,0 2 1 3),0 1)
+$(call math-expect,(call numbers_less_than,3,0 2 1 3),0 2 1)
+$(call math-expect,(call numbers_less_than,4,0 2 1 3),0 2 1 3)
+$(call math-expect,(call numbers_less_than,3,0 2 1 3 2),0 2 1 2)
+
+_INT_LIMIT_WORDS := $(foreach a,x x,$(foreach b,x x x x x x x x x x x x x x x x,\
+ $(foreach c,x x x x x x x x x x x x x x x x,x x x x x x x x x x x x x x x x)))
+
+define _int_encode
+$(if $(filter $(words x $(_INT_LIMIT_WORDS)),$(words $(wordlist 1,$(1),x $(_INT_LIMIT_WORDS)))),\
+ $(call math-error,integer greater than $(words $(_INT_LIMIT_WORDS)) is not supported!),\
+ $(wordlist 1,$(1),$(_INT_LIMIT_WORDS)))
+endef
+
+# _int_max returns the maximum of the two arguments
+# input: two (x) lists; output: one (x) list
+# integer cannot be passed in directly. It has to be converted using _int_encode.
+define _int_max
+$(subst xx,x,$(join $(1),$(2)))
+endef
+
+# first argument is greater than second argument
+# output: non-empty if true
+# integer cannot be passed in directly. It has to be converted using _int_encode.
+define _int_greater-than
+$(filter-out $(words $(2)),$(words $(call _int_max,$(1),$(2))))
+endef
+
+# first argument equals to second argument
+# output: non-empty if true
+# integer cannot be passed in directly. It has to be converted using _int_encode.
+define _int_equal
+$(filter $(words $(1)),$(words $(2)))
+endef
+
+# first argument is greater than or equal to second argument
+# output: non-empty if true
+# integer cannot be passed in directly. It has to be converted using _int_encode.
+define _int_greater-or-equal
+$(call _int_greater-than,$(1),$(2))$(call _int_equal,$(1),$(2))
+endef
+
+define int_plus
+$(words $(call _int_encode,$(1)) $(call _int_encode,$(2)))
+endef
+
+$(call math-expect,(call int_plus,0,0),0)
+$(call math-expect,(call int_plus,0,1),1)
+$(call math-expect,(call int_plus,1,0),1)
+$(call math-expect,(call int_plus,1,100),101)
+$(call math-expect,(call int_plus,100,100),200)
+
+define int_subtract
+$(strip \
+ $(if $(call _int_greater-or-equal,$(call _int_encode,$(1)),$(call _int_encode,$(2))),\
+ $(words $(filter-out xx,$(join $(call _int_encode,$(1)),$(call _int_encode,$(2))))),\
+ $(call math-error,subtract underflow $(1) - $(2))))
+endef
+
+$(call math-expect,(call int_subtract,0,0),0)
+$(call math-expect,(call int_subtract,1,0),1)
+$(call math-expect,(call int_subtract,1,1),0)
+$(call math-expect,(call int_subtract,100,1),99)
+$(call math-expect,(call int_subtract,200,100),100)
+$(call math-expect-error,(call int_subtract,0,1),subtract underflow 0 - 1)
+
+define int_multiply
+$(words $(foreach a,$(call _int_encode,$(1)),$(call _int_encode,$(2))))
+endef
+
+$(call math-expect,(call int_multiply,0,0),0)
+$(call math-expect,(call int_multiply,1,0),0)
+$(call math-expect,(call int_multiply,1,1),1)
+$(call math-expect,(call int_multiply,100,1),100)
+$(call math-expect,(call int_multiply,1,100),100)
+$(call math-expect,(call int_multiply,4,100),400)
+$(call math-expect,(call int_multiply,100,4),400)
+
+define int_divide
+$(if $(filter 0,$(2)),$(call math-error,division by zero is not allowed!),$(strip \
+ $(if $(call _int_greater-or-equal,$(call _int_encode,$(1)),$(call _int_encode,$(2))), \
+ $(call int_plus,$(call int_divide,$(call int_subtract,$(1),$(2)),$(2)),1),0)))
+endef
+
+$(call math-expect,(call int_divide,1,1),1)
+$(call math-expect,(call int_divide,200,1),200)
+$(call math-expect,(call int_divide,200,3),66)
+$(call math-expect,(call int_divide,1,2),0)
+$(call math-expect-error,(call int_divide,0,0),division by zero is not allowed!)
+$(call math-expect-error,(call int_divide,1,0),division by zero is not allowed!)
+
+ifdef RUN_MATH_TESTS
+ ifdef MATH_TEST_FAILURE
+ math-tests:
+ @echo FAIL
+ @false
+ else
+ math-tests:
+ @echo PASS
+ endif
+ .PHONY: math-tests
+endif
diff --git a/common/strings.mk b/common/strings.mk
new file mode 100644
index 0000000000..ce6d6fbe9f
--- /dev/null
+++ b/common/strings.mk
@@ -0,0 +1,117 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+###########################################################
+## Convert to lower case without requiring a shell, which isn't cacheable.
+##
+## $(1): string
+###########################################################
+to-lower=$(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$1))))))))))))))))))))))))))
+
+###########################################################
+## Convert to upper case without requiring a shell, which isn't cacheable.
+##
+## $(1): string
+###########################################################
+to-upper=$(subst a,A,$(subst b,B,$(subst c,C,$(subst d,D,$(subst e,E,$(subst f,F,$(subst g,G,$(subst h,H,$(subst i,I,$(subst j,J,$(subst k,K,$(subst l,L,$(subst m,M,$(subst n,N,$(subst o,O,$(subst p,P,$(subst q,Q,$(subst r,R,$(subst s,S,$(subst t,T,$(subst u,U,$(subst v,V,$(subst w,W,$(subst x,X,$(subst y,Y,$(subst z,Z,$1))))))))))))))))))))))))))
+
+# Sanity-check to-lower and to-upper
+lower := abcdefghijklmnopqrstuvwxyz-_
+upper := ABCDEFGHIJKLMNOPQRSTUVWXYZ-_
+
+ifneq ($(lower),$(call to-lower,$(upper)))
+ $(error to-lower sanity check failure)
+endif
+
+ifneq ($(upper),$(call to-upper,$(lower)))
+ $(error to-upper sanity check failure)
+endif
+
+lower :=
+upper :=
+
+###########################################################
+## Returns true if $(1) and $(2) are equal. Returns
+## the empty string if they are not equal.
+###########################################################
+define streq
+$(strip $(if $(strip $(1)),\
+ $(if $(strip $(2)),\
+ $(if $(filter-out __,_$(subst $(strip $(1)),,$(strip $(2)))$(subst $(strip $(2)),,$(strip $(1)))_),,true), \
+ ),\
+ $(if $(strip $(2)),\
+ ,\
+ true)\
+ ))
+endef
+
+###########################################################
+## Convert "a b c" into "a:b:c"
+###########################################################
+define normalize-path-list
+$(subst $(space),:,$(strip $(1)))
+endef
+
+###########################################################
+## Convert "a b c" into "a,b,c"
+###########################################################
+define normalize-comma-list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+###########################################################
+## Read the word out of a colon-separated list of words.
+## This has the same behavior as the built-in function
+## $(word n,str).
+##
+## The individual words may not contain spaces.
+##
+## $(1): 1 based index
+## $(2): value of the form a:b:c...
+###########################################################
+
+define word-colon
+$(word $(1),$(subst :,$(space),$(2)))
+endef
+
+###########################################################
+## Convert "a=b c= d e = f" into "a=b c=d e=f"
+##
+## $(1): list to collapse
+## $(2): if set, separator word; usually "=", ":", or ":="
+## Defaults to "=" if not set.
+###########################################################
+
+define collapse-pairs
+$(eval _cpSEP := $(strip $(if $(2),$(2),=)))\
+$(strip $(subst $(space)$(_cpSEP)$(space),$(_cpSEP),$(strip \
+ $(subst $(_cpSEP), $(_cpSEP) ,$(1)))$(space)))
+endef
+
+###########################################################
+## Given a list of pairs, if multiple pairs have the same
+## first components, keep only the first pair.
+##
+## $(1): list of pairs
+## $(2): the separator word, such as ":", "=", etc.
+define uniq-pairs-by-first-component
+$(eval _upbfc_fc_set :=)\
+$(strip $(foreach w,$(1), $(eval _first := $(word 1,$(subst $(2),$(space),$(w))))\
+ $(if $(filter $(_upbfc_fc_set),$(_first)),,$(w)\
+ $(eval _upbfc_fc_set += $(_first)))))\
+$(eval _upbfc_fc_set :=)\
+$(eval _first:=)
+endef
diff --git a/core/Makefile b/core/Makefile
index 0a2f6e39d0..589f6ac536 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -36,16 +36,20 @@ $(foreach cf,$(unique_product_copy_files_pairs), \
$(eval $(call copy-xml-file-checked,$(_src),$(_fulldest))),\
$(if $(and $(filter %.jar,$(_dest)),$(filter $(basename $(notdir $(_dest))),$(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))),\
$(eval $(call copy-and-uncompress-dexs,$(_src),$(_fulldest))), \
- $(eval $(call copy-one-file,$(_src),$(_fulldest))))) \
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(_fulldest)) \
+ $(if $(filter init%rc,$(notdir $(_dest)))$(filter %/etc/init,$(dir $(_dest))),\
+ $(eval $(call copy-init-script-file-checked,$(_src),$(_fulldest))),\
+ $(eval $(call copy-one-file,$(_src),$(_fulldest)))))) \
$(eval unique_product_copy_files_destinations += $(_dest))))
# Dump a list of overriden (and ignored PRODUCT_COPY_FILES entries)
-$(file >$(PRODUCT_OUT)/product_copy_files_ignored.txt,$(subst $(space),$(newline),$(strip $(product_copy_files_ignored))))
-ifdef dist_goal
-$(file >$(DIST_DIR)/logs/product_copy_files_ignored.txt,$(subst $(space),$(newline),$(strip $(product_copy_files_ignored))))
-endif
+pcf_ignored_file := $(PRODUCT_OUT)/product_copy_files_ignored.txt
+$(pcf_ignored_file): PRIVATE_IGNORED := $(sort $(product_copy_files_ignored))
+$(pcf_ignored_file):
+ echo "$(PRIVATE_IGNORED)" | tr " " "\n" >$@
+
+$(call dist-for-goals,droidcore,$(pcf_ignored_file):logs/$(notdir $(pcf_ignored_file)))
+pcf_ignored_file :=
product_copy_files_ignored :=
unique_product_copy_files_pairs :=
unique_product_copy_files_destinations :=
@@ -68,15 +72,30 @@ endef
# If more than one makefile declared a header, print a warning,
# then copy the last one defined. This matches the previous make
# behavior.
+has_dup_copy_headers :=
$(foreach dest,$(ALL_COPIED_HEADERS), \
$(eval _srcs := $(ALL_COPIED_HEADERS.$(dest).SRC)) \
- $(eval _src := $(word $(words $(_srcs)),$(_srcs))) \
+ $(eval _src := $(lastword $(_srcs))) \
$(if $(call streq,$(_src),$(_srcs)),, \
$(warning Duplicate header copy: $(dest)) \
- $(warning Defined in: $(ALL_COPIED_HEADERS.$(dest).MAKEFILE))) \
+ $(warning _ Using $(_src)) \
+ $(warning __ from $(lastword $(ALL_COPIED_HEADERS.$(dest).MAKEFILE))) \
+ $(eval _makefiles := $$(wordlist 1,$(call int_subtract,$(words $(ALL_COPIED_HEADERS.$(dest).MAKEFILE)),1),$$(ALL_COPIED_HEADERS.$$(dest).MAKEFILE))) \
+ $(foreach src,$(wordlist 1,$(call int_subtract,$(words $(_srcs)),1),$(_srcs)), \
+ $(warning _ Ignoring $(src)) \
+ $(warning __ from $(firstword $(_makefiles))) \
+ $(eval _makefiles := $$(wordlist 2,9999,$$(_makefiles)))) \
+ $(eval has_dup_copy_headers := true)) \
$(eval $(call copy-one-header,$(_src),$(dest))))
all_copied_headers: $(ALL_COPIED_HEADERS)
+ifdef has_dup_copy_headers
+ has_dup_copy_headers :=
+ ifneq ($(BUILD_BROKEN_DUP_COPY_HEADERS),true)
+ $(error duplicate header copies are no longer allowed. For more information about headers, see: https://android.googlesource.com/platform/build/soong/+/master/docs/best_practices.md#headers)
+ endif
+endif
+
# -----------------------------------------------------------------
# docs/index.html
ifeq (,$(TARGET_BUILD_APPS))
@@ -108,6 +127,8 @@ ndk-docs: $(ndk_doxygen_out)/index.html
.PHONY: ndk-docs
endif
+$(call dist-for-goals,sdk,$(API_FINGERPRINT))
+
# -----------------------------------------------------------------
# property_overrides_split_enabled
property_overrides_split_enabled :=
@@ -134,11 +155,75 @@ else
endif
FINAL_VENDOR_DEFAULT_PROPERTIES += \
$(call collapse-pairs, $(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
+
+# Add cpu properties for bionic and ART.
+FINAL_VENDOR_DEFAULT_PROPERTIES += ro.bionic.arch=$(TARGET_ARCH)
+FINAL_VENDOR_DEFAULT_PROPERTIES += ro.bionic.cpu_variant=$(TARGET_CPU_VARIANT_RUNTIME)
+FINAL_VENDOR_DEFAULT_PROPERTIES += ro.bionic.2nd_arch=$(TARGET_2ND_ARCH)
+FINAL_VENDOR_DEFAULT_PROPERTIES += ro.bionic.2nd_cpu_variant=$(TARGET_2ND_CPU_VARIANT_RUNTIME)
+
+FINAL_VENDOR_DEFAULT_PROPERTIES += persist.sys.dalvik.vm.lib.2=libart.so
+FINAL_VENDOR_DEFAULT_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).variant=$(DEX2OAT_TARGET_CPU_VARIANT_RUNTIME)
+ifneq ($(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
+ FINAL_VENDOR_DEFAULT_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).features=$(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
+endif
+
+ifdef TARGET_2ND_ARCH
+ FINAL_VENDOR_DEFAULT_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).variant=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT_RUNTIME)
+ ifneq ($($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
+ FINAL_VENDOR_DEFAULT_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).features=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
+ endif
+endif
+
+# Although these variables are prefixed with TARGET_RECOVERY_, they are also needed under charger
+# mode (via libminui).
+ifdef TARGET_RECOVERY_DEFAULT_ROTATION
+FINAL_VENDOR_DEFAULT_PROPERTIES += \
+ ro.minui.default_rotation=$(TARGET_RECOVERY_DEFAULT_ROTATION)
+endif
+ifdef TARGET_RECOVERY_OVERSCAN_PERCENT
+FINAL_VENDOR_DEFAULT_PROPERTIES += \
+ ro.minui.overscan_percent=$(TARGET_RECOVERY_OVERSCAN_PERCENT)
+endif
+ifdef TARGET_RECOVERY_PIXEL_FORMAT
+FINAL_VENDOR_DEFAULT_PROPERTIES += \
+ ro.minui.pixel_format=$(TARGET_RECOVERY_PIXEL_FORMAT)
+endif
FINAL_VENDOR_DEFAULT_PROPERTIES := $(call uniq-pairs-by-first-component, \
$(FINAL_VENDOR_DEFAULT_PROPERTIES),=)
# -----------------------------------------------------------------
# prop.default
+
+BUILDINFO_SH := build/make/tools/buildinfo.sh
+BUILDINFO_COMMON_SH := build/make/tools/buildinfo_common.sh
+
+# Generates a set of sysprops common to all partitions to a file.
+# $(1): Partition name
+# $(2): Output file name
+define generate-common-build-props
+ PRODUCT_BRAND="$(PRODUCT_BRAND)" \
+ PRODUCT_DEVICE="$(TARGET_DEVICE)" \
+ PRODUCT_MANUFACTURER="$(PRODUCT_MANUFACTURER)" \
+ PRODUCT_MODEL="$(PRODUCT_MODEL)" \
+ PRODUCT_NAME="$(TARGET_PRODUCT)" \
+ $(call generate-common-build-props-with-product-vars-set,$(1),$(2))
+endef
+
+# Like the above macro, but requiring the relevant PRODUCT_ environment
+# variables to be set when called.
+define generate-common-build-props-with-product-vars-set
+ BUILD_FINGERPRINT="$(BUILD_FINGERPRINT_FROM_FILE)" \
+ BUILD_ID="$(BUILD_ID)" \
+ BUILD_NUMBER="$(BUILD_NUMBER_FROM_FILE)" \
+ BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
+ DATE="$(DATE_FROM_FILE)" \
+ PLATFORM_SDK_VERSION="$(PLATFORM_SDK_VERSION)" \
+ PLATFORM_VERSION="$(PLATFORM_VERSION)" \
+ TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \
+ bash $(BUILDINFO_COMMON_SH) "$(1)" >> $(2)
+endef
+
ifdef property_overrides_split_enabled
INSTALLED_DEFAULT_PROP_TARGET := $(TARGET_OUT)/etc/prop.default
INSTALLED_DEFAULT_PROP_OLD_TARGET := $(TARGET_ROOT_OUT)/default.prop
@@ -161,7 +246,7 @@ FINAL_DEFAULT_PROPERTIES := $(call uniq-pairs-by-first-component, \
intermediate_system_build_prop := $(call intermediates-dir-for,ETC,system_build_prop)/build.prop
-$(INSTALLED_DEFAULT_PROP_TARGET): $(intermediate_system_build_prop)
+$(INSTALLED_DEFAULT_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(intermediate_system_build_prop)
@echo Target buildinfo: $@
@mkdir -p $(dir $@)
@rm -f $@
@@ -169,13 +254,7 @@ $(INSTALLED_DEFAULT_PROP_TARGET): $(intermediate_system_build_prop)
echo "# ADDITIONAL_DEFAULT_PROPERTIES" >> $@; \
echo "#" >> $@;
$(hide) $(foreach line,$(FINAL_DEFAULT_PROPERTIES), \
- echo "$(line)" >> $@;)
- $(hide) echo "#" >> $@; \
- echo "# BOOTIMAGE_BUILD_PROPERTIES" >> $@; \
- echo "#" >> $@;
- $(hide) echo ro.bootimage.build.date=`$(DATE_FROM_FILE)`>>$@
- $(hide) echo ro.bootimage.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
- $(hide) echo ro.bootimage.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
+ echo "$(line)" >> $@;)
$(hide) build/make/tools/post_process_props.py $@
ifdef property_overrides_split_enabled
$(hide) mkdir -p $(TARGET_ROOT_OUT)
@@ -196,7 +275,7 @@ $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET): $(INSTALLED_DEFAULT_PROP_TARGET)
echo "# ADDITIONAL VENDOR DEFAULT PROPERTIES" >> $@; \
echo "#" >> $@;
$(hide) $(foreach line,$(FINAL_VENDOR_DEFAULT_PROPERTIES), \
- echo "$(line)" >> $@;)
+ echo "$(line)" >> $@;)
$(hide) build/make/tools/post_process_props.py $@
endif # property_overrides_split_enabled
@@ -236,7 +315,7 @@ $(intermediate_system_build_prop): PRIVATE_BUILD_DESC := $(build_desc)
# The string used to uniquely identify the combined build and product; used by the OTA server.
ifeq (,$(strip $(BUILD_FINGERPRINT)))
ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
- BF_BUILD_NUMBER := $(USER)$$($(DATE_FROM_FILE) +%m%d%H%M)
+ BF_BUILD_NUMBER := $(BUILD_USERNAME)$$($(DATE_FROM_FILE) +%m%d%H%M)
else
BF_BUILD_NUMBER := $(file <$(BUILD_NUMBER_FILE))
endif
@@ -272,7 +351,7 @@ KNOWN_OEM_THUMBPRINT_PROPERTIES := \
ro.product.name \
ro.product.device
OEM_THUMBPRINT_PROPERTIES := $(filter $(KNOWN_OEM_THUMBPRINT_PROPERTIES),\
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES))
+ $(PRODUCT_OEM_PROPERTIES))
# Display parameters shown under Settings -> About Phone
ifeq ($(TARGET_BUILD_VARIANT),user)
@@ -298,9 +377,6 @@ define get-default-product-locale
$(strip $(subst _,-, $(firstword $(1))))
endef
-BUILDINFO_SH := build/make/tools/buildinfo.sh
-VENDOR_BUILDINFO_SH := build/make/tools/vendor_buildinfo.sh
-
# TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test
# harness to distinguish builds. Only add _asan for a sanitized build
# if it isn't already a part of the flavor (via a dedicated lunch
@@ -317,68 +393,72 @@ system_prop_file := $(TARGET_SYSTEM_PROP)
else
system_prop_file := $(wildcard $(TARGET_DEVICE_DIR)/system.prop)
endif
-$(intermediate_system_build_prop): $(BUILDINFO_SH) $(INTERNAL_BUILD_ID_MAKEFILE) $(BUILD_SYSTEM)/version_defaults.mk $(system_prop_file) $(INSTALLED_ANDROID_INFO_TXT_TARGET)
+$(intermediate_system_build_prop): $(BUILDINFO_SH) $(BUILDINFO_COMMON_SH) $(INTERNAL_BUILD_ID_MAKEFILE) $(BUILD_SYSTEM)/version_defaults.mk $(system_prop_file) $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(API_FINGERPRINT)
@echo Target buildinfo: $@
@mkdir -p $(dir $@)
$(hide) echo > $@
-ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES),)
+ifneq ($(PRODUCT_OEM_PROPERTIES),)
$(hide) echo "#" >> $@; \
echo "# PRODUCT_OEM_PROPERTIES" >> $@; \
echo "#" >> $@;
- $(hide) $(foreach prop,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES), \
- echo "import /oem/oem.prop $(prop)" >> $@;)
-endif
+ $(hide) $(foreach prop,$(PRODUCT_OEM_PROPERTIES), \
+ echo "import /oem/oem.prop $(prop)" >> $@;)
+endif
+ $(hide) PRODUCT_BRAND="$(PRODUCT_SYSTEM_BRAND)" \
+ PRODUCT_MANUFACTURER="$(PRODUCT_SYSTEM_MANUFACTURER)" \
+ PRODUCT_MODEL="$(PRODUCT_SYSTEM_MODEL)" \
+ PRODUCT_NAME="$(PRODUCT_SYSTEM_NAME)" \
+ PRODUCT_DEVICE="$(PRODUCT_SYSTEM_DEVICE)" \
+ $(call generate-common-build-props-with-product-vars-set,system,$@)
$(hide) TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \
- TARGET_BUILD_FLAVOR="$(TARGET_BUILD_FLAVOR)" \
- TARGET_DEVICE="$(TARGET_DEVICE)" \
- PRODUCT_NAME="$(TARGET_PRODUCT)" \
- PRODUCT_BRAND="$(PRODUCT_BRAND)" \
- PRODUCT_DEFAULT_LOCALE="$(call get-default-product-locale,$(PRODUCT_LOCALES))" \
- PRODUCT_DEFAULT_WIFI_CHANNELS="$(PRODUCT_DEFAULT_WIFI_CHANNELS)" \
- PRODUCT_MODEL="$(PRODUCT_MODEL)" \
- PRODUCT_MANUFACTURER="$(PRODUCT_MANUFACTURER)" \
- PRIVATE_BUILD_DESC="$(PRIVATE_BUILD_DESC)" \
- BUILD_ID="$(BUILD_ID)" \
- BUILD_DISPLAY_ID="$(BUILD_DISPLAY_ID)" \
- DATE="$(DATE_FROM_FILE)" \
- BUILD_NUMBER="$(BUILD_NUMBER_FROM_FILE)" \
- BOARD_BUILD_SYSTEM_ROOT_IMAGE="$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)" \
- AB_OTA_UPDATER="$(AB_OTA_UPDATER)" \
- PLATFORM_VERSION="$(PLATFORM_VERSION)" \
- PLATFORM_SECURITY_PATCH="$(PLATFORM_SECURITY_PATCH)" \
- PLATFORM_BASE_OS="$(PLATFORM_BASE_OS)" \
- PLATFORM_SDK_VERSION="$(PLATFORM_SDK_VERSION)" \
- PLATFORM_PREVIEW_SDK_VERSION="$(PLATFORM_PREVIEW_SDK_VERSION)" \
- PLATFORM_VERSION_CODENAME="$(PLATFORM_VERSION_CODENAME)" \
- PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
- PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION="$(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION)" \
- BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
- BUILD_FINGERPRINT="$(BUILD_FINGERPRINT_FROM_FILE)" \
- $(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT_FROM_FILE)") \
- TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
- TARGET_CPU_ABI_LIST_32_BIT="$(TARGET_CPU_ABI_LIST_32_BIT)" \
- TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
- TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
- TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
- TARGET_AAPT_CHARACTERISTICS="$(TARGET_AAPT_CHARACTERISTICS)" \
+ TARGET_BUILD_FLAVOR="$(TARGET_BUILD_FLAVOR)" \
+ TARGET_DEVICE="$(TARGET_DEVICE)" \
+ PRODUCT_DEFAULT_LOCALE="$(call get-default-product-locale,$(PRODUCT_LOCALES))" \
+ PRODUCT_DEFAULT_WIFI_CHANNELS="$(PRODUCT_DEFAULT_WIFI_CHANNELS)" \
+ PRIVATE_BUILD_DESC="$(PRIVATE_BUILD_DESC)" \
+ BUILD_ID="$(BUILD_ID)" \
+ BUILD_DISPLAY_ID="$(BUILD_DISPLAY_ID)" \
+ DATE="$(DATE_FROM_FILE)" \
+ BUILD_USERNAME="$(BUILD_USERNAME)" \
+ BUILD_HOSTNAME="$(BUILD_HOSTNAME)" \
+ BUILD_NUMBER="$(BUILD_NUMBER_FROM_FILE)" \
+ BOARD_BUILD_SYSTEM_ROOT_IMAGE="$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)" \
+ AB_OTA_UPDATER="$(AB_OTA_UPDATER)" \
+ PLATFORM_VERSION="$(PLATFORM_VERSION)" \
+ PLATFORM_SECURITY_PATCH="$(PLATFORM_SECURITY_PATCH)" \
+ PLATFORM_BASE_OS="$(PLATFORM_BASE_OS)" \
+ PLATFORM_SDK_VERSION="$(PLATFORM_SDK_VERSION)" \
+ PLATFORM_PREVIEW_SDK_VERSION="$(PLATFORM_PREVIEW_SDK_VERSION)" \
+ PLATFORM_PREVIEW_SDK_FINGERPRINT="$$(cat $(API_FINGERPRINT))" \
+ PLATFORM_VERSION_CODENAME="$(PLATFORM_VERSION_CODENAME)" \
+ PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
+ PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION="$(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION)" \
+ BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
+ $(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT_FROM_FILE)") \
+ TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
+ TARGET_CPU_ABI_LIST_32_BIT="$(TARGET_CPU_ABI_LIST_32_BIT)" \
+ TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
+ TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
+ TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
bash $(BUILDINFO_SH) >> $@
$(hide) $(foreach file,$(system_prop_file), \
- if [ -f "$(file)" ]; then \
- echo "#" >> $@; \
- echo Target buildinfo from: "$(file)"; \
- echo "# from $(file)" >> $@; \
- echo "#" >> $@; \
- cat $(file) >> $@; \
- fi;)
+ if [ -f "$(file)" ]; then \
+ echo Target buildinfo from: "$(file)"; \
+ echo "" >> $@; \
+ echo "#" >> $@; \
+ echo "# from $(file)" >> $@; \
+ echo "#" >> $@; \
+ cat $(file) >> $@; \
+ echo "# end of $(file)" >> $@; \
+ fi;)
$(if $(FINAL_BUILD_PROPERTIES), \
- $(hide) echo >> $@; \
- echo "#" >> $@; \
- echo "# ADDITIONAL_BUILD_PROPERTIES" >> $@; \
- echo "#" >> $@; )
+ $(hide) echo >> $@; \
+ echo "#" >> $@; \
+ echo "# ADDITIONAL_BUILD_PROPERTIES" >> $@; \
+ echo "#" >> $@; )
$(hide) $(foreach line,$(FINAL_BUILD_PROPERTIES), \
- echo "$(line)" >> $@;)
- $(hide) cat $(INSTALLED_ANDROID_INFO_TXT_TARGET) | grep 'require version-' | sed -e 's/require version-/ro.build.expect./g' >> $@
- $(hide) build/make/tools/post_process_props.py $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_PROPERTY_BLACKLIST)
+ echo "$(line)" >> $@;)
+ $(hide) build/make/tools/post_process_props.py $@ $(PRODUCT_SYSTEM_PROPERTY_BLACKLIST)
build_desc :=
@@ -391,9 +471,6 @@ endif
$(INSTALLED_BUILD_PROP_TARGET): $(intermediate_system_build_prop) $(INSTALLED_RECOVERYIMAGE_TARGET)
@echo "Target build info: $@"
$(hide) grep -v 'ro.product.first_api_level' $(intermediate_system_build_prop) > $@
-ifdef INSTALLED_RECOVERYIMAGE_TARGET
- $(hide) echo ro.expect.recovery_id=`cat $(RECOVERYIMAGE_ID_FILE)` >> $@
-endif
# -----------------------------------------------------------------
# vendor build.prop
@@ -409,60 +486,138 @@ FINAL_VENDOR_BUILD_PROPERTIES := $(call uniq-pairs-by-first-component, \
$(FINAL_VENDOR_BUILD_PROPERTIES),=)
endif # property_overrides_split_enabled
-$(INSTALLED_VENDOR_BUILD_PROP_TARGET): $(VENDOR_BUILDINFO_SH) $(intermediate_system_build_prop)
+$(INSTALLED_VENDOR_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(intermediate_system_build_prop)
@echo Target vendor buildinfo: $@
@mkdir -p $(dir $@)
$(hide) echo > $@
+ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
+ $(hide) echo ro.boot.dynamic_partitions=true >> $@
+endif
+ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
+ $(hide) echo ro.boot.dynamic_partitions_retrofit=true >> $@
+endif
$(hide) grep 'ro.product.first_api_level' $(intermediate_system_build_prop) >> $@ || true
- $(hide) echo ro.vendor.build.date=`$(DATE_FROM_FILE)`>>$@
- $(hide) echo ro.vendor.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
- $(hide) echo ro.vendor.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
$(hide) echo ro.vendor.build.security_patch="$(VENDOR_SECURITY_PATCH)">>$@
$(hide) echo ro.vendor.product.cpu.abilist="$(TARGET_CPU_ABI_LIST)">>$@
$(hide) echo ro.vendor.product.cpu.abilist32="$(TARGET_CPU_ABI_LIST_32_BIT)">>$@
$(hide) echo ro.vendor.product.cpu.abilist64="$(TARGET_CPU_ABI_LIST_64_BIT)">>$@
- $(hide) TARGET_DEVICE="$(TARGET_DEVICE)" \
- PRODUCT_NAME="$(TARGET_PRODUCT)" \
- PRODUCT_BRAND="$(PRODUCT_BRAND)" \
- PRODUCT_MODEL="$(PRODUCT_MODEL)" \
- PRODUCT_MANUFACTURER="$(PRODUCT_MANUFACTURER)" \
- TARGET_BOOTLOADER_BOARD_NAME="$(TARGET_BOOTLOADER_BOARD_NAME)" \
- TARGET_BOARD_PLATFORM="$(TARGET_BOARD_PLATFORM)" \
- bash $(VENDOR_BUILDINFO_SH) >> $@
-ifdef property_overrides_split_enabled
+ $(hide) echo ro.product.board="$(TARGET_BOOTLOADER_BOARD_NAME)">>$@
+ $(hide) echo ro.board.platform="$(TARGET_BOARD_PLATFORM)">>$@
+ $(hide) echo ro.hwui.use_vulkan="$(TARGET_USES_VULKAN)">>$@
+ifdef TARGET_SCREEN_DENSITY
+ $(hide) echo ro.sf.lcd_density="$(TARGET_SCREEN_DENSITY)">>$@
+endif
+ $(hide) $(call generate-common-build-props,vendor,$@)
+ $(hide) echo "#" >> $@; \
+ echo "# BOOTIMAGE_BUILD_PROPERTIES" >> $@; \
+ echo "#" >> $@;
+ $(hide) echo ro.bootimage.build.date=`$(DATE_FROM_FILE)`>>$@
+ $(hide) echo ro.bootimage.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
+ $(hide) echo ro.bootimage.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
$(hide) echo "#" >> $@; \
echo "# ADDITIONAL VENDOR BUILD PROPERTIES" >> $@; \
echo "#" >> $@;
+ $(hide) cat $(INSTALLED_ANDROID_INFO_TXT_TARGET) | grep 'require version-' | sed -e 's/require version-/ro.build.expect./g' >> $@
+ifdef property_overrides_split_enabled
$(hide) $(foreach line,$(FINAL_VENDOR_BUILD_PROPERTIES), \
- echo "$(line)" >> $@;)
+ echo "$(line)" >> $@;)
endif # property_overrides_split_enabled
- $(hide) build/make/tools/post_process_props.py $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_PROPERTY_BLACKLIST)
+ $(hide) build/make/tools/post_process_props.py $@ $(PRODUCT_VENDOR_PROPERTY_BLACKLIST)
# -----------------------------------------------------------------
# product build.prop
INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/build.prop
ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_PRODUCT_BUILD_PROP_TARGET)
+ifdef TARGET_PRODUCT_PROP
+product_prop_files := $(TARGET_PRODUCT_PROP)
+else
+product_prop_files := $(wildcard $(TARGET_DEVICE_DIR)/product.prop)
+endif
+
FINAL_PRODUCT_PROPERTIES += \
- $(call collapse-pairs, $(PRODUCT_PRODUCT_PROPERTIES))
+ $(call collapse-pairs, $(PRODUCT_PRODUCT_PROPERTIES) $(ADDITIONAL_PRODUCT_PROPERTIES))
FINAL_PRODUCT_PROPERTIES := $(call uniq-pairs-by-first-component, \
$(FINAL_PRODUCT_PROPERTIES),=)
-$(INSTALLED_PRODUCT_BUILD_PROP_TARGET):
+$(INSTALLED_PRODUCT_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(product_prop_files)
@echo Target product buildinfo: $@
@mkdir -p $(dir $@)
$(hide) echo > $@
ifdef BOARD_USES_PRODUCTIMAGE
- $(hide) echo ro.product.build.date=`$(DATE_FROM_FILE)`>>$@
- $(hide) echo ro.product.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
- $(hide) echo ro.product.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
+ $(hide) $(call generate-common-build-props,product,$@)
endif # BOARD_USES_PRODUCTIMAGE
+ $(hide) $(foreach file,$(product_prop_files), \
+ if [ -f "$(file)" ]; then \
+ echo Target product properties from: "$(file)"; \
+ echo "" >> $@; \
+ echo "#" >> $@; \
+ echo "# from $(file)" >> $@; \
+ echo "#" >> $@; \
+ cat $(file) >> $@; \
+ echo "# end of $(file)" >> $@; \
+ fi;)
$(hide) echo "#" >> $@; \
echo "# ADDITIONAL PRODUCT PROPERTIES" >> $@; \
- echo "#" >> $@;
+ echo "#" >> $@; \
+ echo "ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)" >> $@;
$(hide) $(foreach line,$(FINAL_PRODUCT_PROPERTIES), \
- echo "$(line)" >> $@;)
+ echo "$(line)" >> $@;)
+ $(hide) build/make/tools/post_process_props.py $@
+
+# ----------------------------------------------------------------
+# odm build.prop
+INSTALLED_ODM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM)/etc/build.prop
+ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_ODM_BUILD_PROP_TARGET)
+
+FINAL_ODM_BUILD_PROPERTIES += \
+ $(call collapse-pairs, $(PRODUCT_ODM_PROPERTIES))
+FINAL_ODM_BUILD_PROPERTIES := $(call uniq-pairs-by-first-component, \
+ $(FINAL_ODM_BUILD_PROPERTIES),=)
+
+$(INSTALLED_ODM_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH)
+ @echo Target odm buildinfo: $@
+ @mkdir -p $(dir $@)
+ $(hide) echo > $@
+ $(hide) echo ro.odm.product.cpu.abilist="$(TARGET_CPU_ABI_LIST)">>$@
+ $(hide) echo ro.odm.product.cpu.abilist32="$(TARGET_CPU_ABI_LIST_32_BIT)">>$@
+ $(hide) echo ro.odm.product.cpu.abilist64="$(TARGET_CPU_ABI_LIST_64_BIT)">>$@
+ $(hide) $(call generate-common-build-props,odm,$@)
+ $(hide) echo "#" >> $@; \
+ echo "# ADDITIONAL ODM BUILD PROPERTIES" >> $@; \
+ echo "#" >> $@;
+ $(hide) $(foreach line,$(FINAL_ODM_BUILD_PROPERTIES), \
+ echo "$(line)" >> $@;)
+ $(hide) build/make/tools/post_process_props.py $@
+
+# -----------------------------------------------------------------
+# product_services build.prop (unless it's merged into /product)
+ifdef MERGE_PRODUCT_SERVICES_INTO_PRODUCT
+ ifneq (,$(PRODUCT_PRODUCT_SERVICES_PROPERTIES))
+ $(error PRODUCT_PRODUCT_SERVICES_PROPERTIES is not supported in this build.)
+ endif
+else
+INSTALLED_PRODUCT_SERVICES_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT_SERVICES)/build.prop
+ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_PRODUCT_SERVICES_BUILD_PROP_TARGET)
+
+FINAL_PRODUCT_SERVICES_PROPERTIES += \
+ $(call collapse-pairs, $(PRODUCT_PRODUCT_SERVICES_PROPERTIES))
+FINAL_PRODUCT_SERVICES_PROPERTIES := $(call uniq-pairs-by-first-component, \
+ $(FINAL_PRODUCT_SERVICES_PROPERTIES),=)
+$(INSTALLED_PRODUCT_SERVICES_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH)
+ @echo Target product_services buildinfo: $@
+ @mkdir -p $(dir $@)
+ $(hide) echo > $@
+ifdef BOARD_USES_PRODUCT_SERVICESIMAGE
+ $(hide) $(call generate-common-build-props,product_services,$@)
+endif # BOARD_USES_PRODUCT_SERVICESIMAGE
+ $(hide) echo "#" >> $@; \
+ echo "# ADDITIONAL PRODUCT_SERVICES PROPERTIES" >> $@; \
+ echo "#" >> $@;
+ $(hide) $(foreach line,$(FINAL_PRODUCT_SERVICES_PROPERTIES), \
+ echo "$(line)" >> $@;)
$(hide) build/make/tools/post_process_props.py $@
+endif # MERGE_PRODUCT_SERVICES_INTO_PRODUCT
# ----------------------------------------------------------------
@@ -487,9 +642,9 @@ $(INSTALLED_SDK_BUILD_PROP_TARGET): $(INSTALLED_BUILD_PROP_TARGET)
@echo SDK buildinfo: $@
@mkdir -p $(dir $@)
$(hide) grep -v "$(subst $(space),\|,$(strip \
- $(sdk_build_prop_remove)))" $< > $@.tmp
+ $(sdk_build_prop_remove)))" $< > $@.tmp
$(hide) for x in $(sdk_build_prop_remove); do \
- echo "$$x"generic >> $@.tmp; done
+ echo "$$x"generic >> $@.tmp; done
$(hide) mv $@.tmp $@
# -----------------------------------------------------------------
@@ -522,6 +677,16 @@ $(hide) echo '' >> $5
endef
+# -----------------------------------------------------------------
+# Merge an individual apkcerts output into the final apkcerts.txt output.
+# Use a macro to make it compatible with _apkcerts_write_line
+# $1 apkcerts file to be merged
+# $2 output file
+define _apkcerts_merge
+$(hide) cat $1 >> $2
+
+endef
+
name := $(TARGET_PRODUCT)
ifeq ($(TARGET_BUILD_TYPE),debug)
name := $(name)_debug
@@ -530,6 +695,8 @@ name := $(name)-apkcerts-$(FILE_NAME_TAG)
intermediates := \
$(call intermediates-dir-for,PACKAGING,apkcerts)
APKCERTS_FILE := $(intermediates)/$(name).txt
+all_apkcerts_files := $(sort $(foreach p,$(PACKAGES),$(PACKAGES.$(p).APKCERTS_FILE)))
+$(APKCERTS_FILE): $(all_apkcerts_files)
# We don't need to really build all the modules.
# TODO: rebuild APKCERTS_FILE if any app change its cert.
$(APKCERTS_FILE):
@@ -537,9 +704,11 @@ $(APKCERTS_FILE):
@mkdir -p $(dir $@)
@rm -f $@
$(foreach p,$(PACKAGES),\
- $(if $(PACKAGES.$(p).EXTERNAL_KEY),\
- $(call _apkcerts_write_line,$(p),"EXTERNAL","",$(PACKAGES.$(p).COMPRESSED),$@),\
- $(call _apkcerts_write_line,$(p),$(PACKAGES.$(p).CERTIFICATE),$(PACKAGES.$(p).PRIVATE_KEY),$(PACKAGES.$(p).COMPRESSED),$@)))
+ $(if $(PACKAGES.$(p).APKCERTS_FILE),\
+ $(call _apkcerts_merge,$(PACKAGES.$(p).APKCERTS_FILE), $@),\
+ $(if $(PACKAGES.$(p).EXTERNAL_KEY),\
+ $(call _apkcerts_write_line,$(p),"EXTERNAL","",$(PACKAGES.$(p).COMPRESSED),$@),\
+ $(call _apkcerts_write_line,$(p),$(PACKAGES.$(p).CERTIFICATE),$(PACKAGES.$(p).PRIVATE_KEY),$(PACKAGES.$(p).COMPRESSED),$@))))
# In case value of PACKAGES is empty.
$(hide) touch $@
@@ -548,8 +717,10 @@ apkcerts-list: $(APKCERTS_FILE)
ifneq (,$(TARGET_BUILD_APPS))
$(call dist-for-goals, apps_only, $(APKCERTS_FILE):apkcerts.txt)
+ $(call dist-for-goals, apps_only, $(SOONG_APEX_KEYS_FILE):apexkeys.txt)
endif
+
# -----------------------------------------------------------------
# build system stats
BUILD_SYSTEM_STATS := $(PRODUCT_OUT)/build_system_stats.txt
@@ -560,6 +731,15 @@ $(BUILD_SYSTEM_STATS):
$(call dist-for-goals,droidcore,$(BUILD_SYSTEM_STATS))
# -----------------------------------------------------------------
+# build /product/etc/security/avb/system_other.avbpubkey if needed
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
+ifeq ($(BOARD_AVB_ENABLE),true)
+INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET := $(TARGET_OUT_PRODUCT_ETC)/security/avb/system_other.avbpubkey
+ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)
+endif # BOARD_AVB_ENABLE
+endif # BUILDING_SYSTEM_OTHER_IMAGE
+
+# -----------------------------------------------------------------
# Modules ready to be converted to Soong, ordered by how many
# modules depend on them.
SOONG_CONV := $(sort $(SOONG_CONV))
@@ -602,7 +782,7 @@ $(call dist-for-goals,droidcore,$(PGO_PROFILE_MISSING))
# for future OTA packages installed by this system. Actual product
# deliverables will be re-signed by hand. We expect this file to
# exist with the suffixes ".x509.pem" and ".pk8".
-DEFAULT_KEY_CERT_PAIR := $(DEFAULT_SYSTEM_DEV_CERTIFICATE)
+DEFAULT_KEY_CERT_PAIR := $(strip $(DEFAULT_SYSTEM_DEV_CERTIFICATE))
# Rules that need to be present for the all targets, even
@@ -640,7 +820,7 @@ $(all_event_log_tags_file): $(all_event_log_tags_src) $(pdk_fusion_log_tags_file
# directory).
event_log_tags_src := \
$(sort $(foreach m,\
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES) \
+ $(PRODUCT_PACKAGES) \
$(call module-names-for-tag-list,user), \
$(ALL_MODULES.$(m).EVENT_LOG_TAGS)) \
$(filter-out vendor/% device/% out/%,$(all_event_log_tags_src)))
@@ -677,23 +857,67 @@ else
endif
# -----------------------------------------------------------------
+# the root dir
+INTERNAL_ROOT_FILES := $(filter $(TARGET_ROOT_OUT)/%, \
+ $(ALL_GENERATED_SOURCES) \
+ $(ALL_DEFAULT_INSTALLED_MODULES))
+
+INSTALLED_FILES_FILE_ROOT := $(PRODUCT_OUT)/installed-files-root.txt
+INSTALLED_FILES_JSON_ROOT := $(INSTALLED_FILES_FILE_ROOT:.txt=.json)
+$(INSTALLED_FILES_FILE_ROOT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ROOT)
+$(INSTALLED_FILES_FILE_ROOT) : $(INTERNAL_ROOT_FILES) $(FILESLIST)
+ @echo Installed file list: $@
+ @mkdir -p $(dir $@)
+ @rm -f $@
+ $(hide) $(FILESLIST) $(TARGET_ROOT_OUT) > $(@:.txt=.json)
+ $(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
+$(call dist-for-goals, sdk win_sdk sdk_addon, $(INSTALLED_FILES_FILE_ROOT))
+
+#------------------------------------------------------------------
+# dtb
+ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
+INSTALLED_DTBIMAGE_TARGET := $(PRODUCT_OUT)/dtb.img
+ifdef BOARD_PREBUILT_DTBIMAGE_DIR
+$(INSTALLED_DTBIMAGE_TARGET) : $(sort $(wildcard $(BOARD_PREBUILT_DTBIMAGE_DIR)/*.dtb))
+ cat $^ > $@
+endif
+endif
+
+# -----------------------------------------------------------------
# the ramdisk
-INTERNAL_RAMDISK_FILES := $(filter $(TARGET_ROOT_OUT)/%, \
+ifdef BUILDING_RAMDISK_IMAGE
+INTERNAL_RAMDISK_FILES := $(filter $(TARGET_RAMDISK_OUT)/%, \
$(ALL_GENERATED_SOURCES) \
$(ALL_DEFAULT_INSTALLED_MODULES))
+INSTALLED_FILES_FILE_RAMDISK := $(PRODUCT_OUT)/installed-files-ramdisk.txt
+INSTALLED_FILES_JSON_RAMDISK := $(INSTALLED_FILES_FILE_RAMDISK:.txt=.json)
+$(INSTALLED_FILES_FILE_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RAMDISK)
+$(INSTALLED_FILES_FILE_RAMDISK) : $(INTERNAL_RAMDISK_FILES) $(FILESLIST)
+ @echo Installed file list: $@
+ @mkdir -p $(TARGET_RAMDISK_OUT)
+ @mkdir -p $(dir $@)
+ @rm -f $@
+ $(hide) $(FILESLIST) $(TARGET_RAMDISK_OUT) > $(@:.txt=.json)
+ $(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
+$(call dist-for-goals, sdk win_sdk sdk_addon, $(INSTALLED_FILES_FILE_RAMDISK))
BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
# We just build this directly to the install location.
INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
-$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) | $(MINIGZIP)
+$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) $(INSTALLED_FILES_FILE_RAMDISK) | $(MINIGZIP)
$(call pretty,"Target ram disk: $@")
- $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_ROOT_OUT) | $(MINIGZIP) > $@
+ $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(MINIGZIP) > $@
.PHONY: ramdisk-nodeps
ramdisk-nodeps: $(MKBOOTFS) | $(MINIGZIP)
@echo "make $@: ignoring dependencies"
- $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_ROOT_OUT) | $(MINIGZIP) > $(INSTALLED_RAMDISK_TARGET)
+ $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(MINIGZIP) > $(INSTALLED_RAMDISK_TARGET)
+
+endif # BUILDING_RAMDISK_IMAGE
+
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
@@ -705,6 +929,10 @@ INTERNAL_BOOTIMAGE_ARGS := \
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
--kernel $(INSTALLED_KERNEL_TARGET)
+ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
+ INTERNAL_BOOTIMAGE_ARGS += --dtb $(INSTALLED_DTBIMAGE_TARGET)
+endif
+
ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
endif
@@ -719,14 +947,14 @@ ifdef BOARD_KERNEL_PAGESIZE
INTERNAL_BOOTIMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
endif
-ifeq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),true)
+ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-VERITY_KEYID := veritykeyid=id:`openssl x509 -in $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem -text \
+VERITY_KEYID := veritykeyid=id:`openssl x509 -in $(PRODUCT_VERITY_SIGNING_KEY).x509.pem -text \
| grep keyid | sed 's/://g' | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]' | sed 's/keyid//g'`
endif
endif
-INTERNAL_KERNEL_CMDLINE := $(strip $(BOARD_KERNEL_CMDLINE) buildvariant=$(TARGET_BUILD_VARIANT) $(VERITY_KEYID))
+INTERNAL_KERNEL_CMDLINE := $(strip $(INTERNAL_KERNEL_CMDLINE) buildvariant=$(TARGET_BUILD_VARIANT) $(VERITY_KEYID))
ifdef INTERNAL_KERNEL_CMDLINE
INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(INTERNAL_KERNEL_CMDLINE)"
endif
@@ -735,13 +963,6 @@ INTERNAL_MKBOOTIMG_VERSION_ARGS := \
--os_version $(PLATFORM_VERSION) \
--os_patch_level $(PLATFORM_SECURITY_PATCH)
-# BOARD_USES_RECOVERY_AS_BOOT = true must have BOARD_BUILD_SYSTEM_ROOT_IMAGE = true.
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- $(error BOARD_BUILD_SYSTEM_ROOT_IMAGE must be enabled for BOARD_USES_RECOVERY_AS_BOOT.)
-endif
-endif
-
# We build recovery as boot image if BOARD_USES_RECOVERY_AS_BOOT is true.
ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
@@ -770,34 +991,34 @@ bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
--partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
-else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)) # BOARD_AVB_ENABLE != true
+else ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)) # BOARD_AVB_ENABLE != true
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
$(call pretty,"Target boot image: $@")
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(BOOT_SIGNER) /boot $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
+ $(BOOT_SIGNER) /boot $@ $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
@echo "make $@: ignoring dependencies"
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
- $(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(INSTALLED_BOOTIMAGE_TARGET)
+ $(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(INSTALLED_BOOTIMAGE_TARGET)
$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
-else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true
+else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(FUTILITY)
$(call pretty,"Target boot image: $@")
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@.unsigned
- $(VBOOT_SIGNER) $(FUTILITY) $@.unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $@.keyblock $@
+ $(VBOOT_SIGNER) $(FUTILITY) $@.unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $@.keyblock $@
$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) $(FUTILITY)
@echo "make $@: ignoring dependencies"
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET).unsigned
- $(VBOOT_SIGNER) $(FUTILITY) $(INSTALLED_BOOTIMAGE_TARGET).unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(INSTALLED_BOOTIMAGE_TARGET).keyblock $(INSTALLED_BOOTIMAGE_TARGET)
+ $(VBOOT_SIGNER) $(FUTILITY) $(INSTALLED_BOOTIMAGE_TARGET).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(INSTALLED_BOOTIMAGE_TARGET).keyblock $(INSTALLED_BOOTIMAGE_TARGET)
$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
else # PRODUCT_SUPPORTS_VBOOT != true
@@ -816,23 +1037,17 @@ bootimage-nodeps: $(MKBOOTIMG)
endif # TARGET_BOOTIMAGE_USE_EXT2
endif # BOARD_USES_RECOVERY_AS_BOOT
-else # TARGET_NO_KERNEL
+else # TARGET_NO_KERNEL == "true"
ifdef BOARD_PREBUILT_BOOTIMAGE
ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
# Remove when b/63676296 is resolved.
$(error Prebuilt bootimage is only supported for AB targets)
endif
$(eval $(call copy-one-file,$(BOARD_PREBUILT_BOOTIMAGE),$(INSTALLED_BOOTIMAGE_TARGET)))
-else
-INTERNAL_KERNEL_CMDLINE := $(strip $(BOARD_KERNEL_CMDLINE))
-# HACK: The top-level targets depend on the bootimage. Not all targets
-# can produce a bootimage, though, and emulator targets need the ramdisk
-# instead. Fake it out by calling the ramdisk the bootimage.
-# TODO: make the emulator use bootimages, and make mkbootimg accept
-# kernel-less inputs.
-INSTALLED_BOOTIMAGE_TARGET := $(INSTALLED_RAMDISK_TARGET)
-endif
-endif
+else # BOARD_PREBUILT_BOOTIMAGE not defined
+INSTALLED_BOOTIMAGE_TARGET :=
+endif # BOARD_PREBUILT_BOOTIMAGE
+endif # TARGET_NO_KERNEL
# -----------------------------------------------------------------
# NOTICE files
@@ -849,7 +1064,7 @@ endif
.PHONY: notice_files
# Create the rule to combine the files into text and html/xml forms
-# $(1) - xml_excluded_vendor|xml_vendor|html
+# $(1) - xml_excluded_vendor_product|xml_vendor|xml_product|html
# $(2) - Plain text output file
# $(3) - HTML/XML output file
# $(4) - File title
@@ -874,96 +1089,143 @@ $(2) $(3): PRIVATE_DIR := $(5)
$(2) : $(3)
$(3) : $(6) $(BUILD_SYSTEM)/Makefile build/make/tools/generate-notice-files.py
build/make/tools/generate-notice-files.py --text-output $(2) \
- $(if $(filter $(1),xml_excluded_vendor),-e vendor --xml-output, \
- $(if $(filter $(1),xml_vendor),-i vendor --xml-output, \
- --html-output)) $(3) \
- -t $$(PRIVATE_MESSAGE) -s $$(PRIVATE_DIR)/src
+ $(if $(filter $(1),xml_excluded_extra_partitions),-e vendor -e product -e product_services --xml-output, \
+ $(if $(filter $(1),xml_vendor),-i vendor --xml-output, \
+ $(if $(filter $(1),xml_product),-i product --xml-output, \
+ $(if $(filter $(1),xml_product_services),-i product_services --xml-output, \
+ --html-output)))) $(3) \
+ -t $$(PRIVATE_MESSAGE) -s $$(PRIVATE_DIR)/src
notice_files: $(2) $(3)
endef
+# Notice file logic isn't relevant for TARGET_BUILD_APPS
+ifndef TARGET_BUILD_APPS
+
# TODO These intermediate NOTICE.txt/NOTICE.html files should go into
# TARGET_OUT_NOTICE_FILES now that the notice files are gathered from
# the src subdirectory.
-
target_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE.txt
-target_notice_file_html_or_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html
-target_notice_file_html_or_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html.gz
-installed_notice_html_or_xml_gz := $(TARGET_OUT)/etc/NOTICE.html.gz
tools_notice_file_txt := $(HOST_OUT_INTERMEDIATES)/NOTICE.txt
tools_notice_file_html := $(HOST_OUT_INTERMEDIATES)/NOTICE.html
+kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt
+winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
+pdk_fusion_notice_files := $(filter $(TARGET_OUT_NOTICE_FILES)/%, $(ALL_PDK_FUSION_FILES))
# TODO(b/69865032): Make PRODUCT_NOTICE_SPLIT the default behavior.
-ifeq ($(PRODUCT_NOTICE_SPLIT),true)
-target_notice_file_html_or_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml
-target_notice_file_html_or_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml.gz
+ifneq ($(PRODUCT_NOTICE_SPLIT),true)
+target_notice_file_html := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html
+target_notice_file_html_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html.gz
+installed_notice_html_or_xml_gz := $(TARGET_OUT)/etc/NOTICE.html.gz
+$(eval $(call combine-notice-files, html, \
+ $(target_notice_file_txt), \
+ $(target_notice_file_html), \
+ "Notices for files contained in the filesystem images in this directory:", \
+ $(TARGET_OUT_NOTICE_FILES), \
+ $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)))
+$(target_notice_file_html_gz): $(target_notice_file_html) | $(MINIGZIP)
+ $(hide) $(MINIGZIP) -9 < $< > $@
+$(installed_notice_html_or_xml_gz): $(target_notice_file_html_gz)
+ $(copy-file-to-target)
+else
+target_notice_file_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml
+target_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml.gz
installed_notice_html_or_xml_gz := $(TARGET_OUT)/etc/NOTICE.xml.gz
target_vendor_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE_VENDOR.txt
target_vendor_notice_file_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE_VENDOR.xml
target_vendor_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_VENDOR.xml.gz
installed_vendor_notice_xml_gz := $(TARGET_OUT_VENDOR)/etc/NOTICE.xml.gz
-endif
-
-ifndef TARGET_BUILD_APPS
-kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt
-winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
-pdk_fusion_notice_files := $(filter $(TARGET_OUT_NOTICE_FILES)/%, $(ALL_PDK_FUSION_FILES))
-ifdef target_vendor_notice_file_xml_gz
-$(eval $(call combine-notice-files, xml_excluded_vendor, \
- $(target_notice_file_txt), \
- $(target_notice_file_html_or_xml), \
- "Notices for files contained in the filesystem images in this directory:", \
- $(TARGET_OUT_NOTICE_FILES), \
- $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)))
+target_product_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE_PRODUCT.txt
+target_product_notice_file_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE_PRODUCT.xml
+target_product_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_PRODUCT.xml.gz
+installed_product_notice_xml_gz := $(TARGET_OUT_PRODUCT)/etc/NOTICE.xml.gz
+
+target_product_services_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE_PRODUCT_SERVICES.txt
+target_product_services_notice_file_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE_PRODUCT_SERVICES.xml
+target_product_services_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_PRODUCT_SERVICES.xml.gz
+installed_product_services_notice_xml_gz := $(TARGET_OUT_PRODUCT_SERVICES)/etc/NOTICE.xml.gz
+
+# Notice files are copied to TARGET_OUT_NOTICE_FILES as a side-effect of their module
+# being built. A notice xml file must depend on all modules that could potentially
+# install a license file relevant to it.
+license_modules := $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)
+# Phonys/fakes don't have notice files (though their deps might)
+license_modules := $(filter-out $(TARGET_OUT_FAKE)/%,$(license_modules))
+license_modules_vendor := $(filter $(TARGET_OUT_VENDOR)/%,$(license_modules))
+license_modules_product := $(filter $(TARGET_OUT_PRODUCT)/%,$(license_modules))
+license_modules_product_services := $(filter $(TARGET_OUT_PRODUCT_SERVICES)/%,$(license_modules))
+license_modules_agg := $(license_modules_vendor) $(license_modules_product) $(license_modules_product_services)
+license_modules_rest := $(filter-out $(license_modules_agg),$(license_modules))
+
+$(eval $(call combine-notice-files, xml_excluded_extra_partitions, \
+ $(target_notice_file_txt), \
+ $(target_notice_file_xml), \
+ "Notices for files contained in the filesystem images in this directory:", \
+ $(TARGET_OUT_NOTICE_FILES), \
+ $(license_modules_rest)))
$(eval $(call combine-notice-files, xml_vendor, \
- $(target_vendor_notice_file_txt), \
- $(target_vendor_notice_file_xml), \
- "Notices for files contained in the vendor filesystem image in this directory:", \
- $(TARGET_OUT_NOTICE_FILES), \
- $(target_notice_file_html_or_xml)))
-else
-$(eval $(call combine-notice-files, html, \
- $(target_notice_file_txt), \
- $(target_notice_file_html_or_xml), \
- "Notices for files contained in the filesystem images in this directory:", \
- $(TARGET_OUT_NOTICE_FILES), \
- $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)))
-endif
-
-$(eval $(call combine-notice-files, html, \
- $(tools_notice_file_txt), \
- $(tools_notice_file_html), \
- "Notices for files contained in the tools directory:", \
- $(HOST_OUT_NOTICE_FILES), \
- $(ALL_DEFAULT_INSTALLED_MODULES) \
- $(winpthreads_notice_file)))
-
-# Install the html file at /system/etc/NOTICE.html.gz.
-# This is not ideal, but this is very late in the game, after a lot of
-# the module processing has already been done -- in fact, we used the
-# fact that all that has been done to get the list of modules that we
-# need notice files for.
-$(target_notice_file_html_or_xml_gz): $(target_notice_file_html_or_xml) | $(MINIGZIP)
+ $(target_vendor_notice_file_txt), \
+ $(target_vendor_notice_file_xml), \
+ "Notices for files contained in the vendor filesystem image in this directory:", \
+ $(TARGET_OUT_NOTICE_FILES), \
+ $(license_modules_vendor)))
+$(eval $(call combine-notice-files, xml_product, \
+ $(target_product_notice_file_txt), \
+ $(target_product_notice_file_xml), \
+ "Notices for files contained in the product filesystem image in this directory:", \
+ $(TARGET_OUT_NOTICE_FILES), \
+ $(license_modules_product)))
+$(eval $(call combine-notice-files, xml_product_services, \
+ $(target_product_services_notice_file_txt), \
+ $(target_product_services_notice_file_xml), \
+ "Notices for files contained in the product_services filesystem image in this directory:", \
+ $(TARGET_OUT_NOTICE_FILES), \
+ $(license_modules_product_services)))
+
+$(target_notice_file_xml_gz): $(target_notice_file_xml) | $(MINIGZIP)
$(hide) $(MINIGZIP) -9 < $< > $@
-$(installed_notice_html_or_xml_gz): $(target_notice_file_html_or_xml_gz)
- $(copy-file-to-target)
-
-ifdef target_vendor_notice_file_xml_gz
-# Install the vendor html file at /vendor/etc/NOTICE.xml.gz.
$(target_vendor_notice_file_xml_gz): $(target_vendor_notice_file_xml) | $(MINIGZIP)
$(hide) $(MINIGZIP) -9 < $< > $@
+$(target_product_notice_file_xml_gz): $(target_product_notice_file_xml) | $(MINIGZIP)
+ $(hide) $(MINIGZIP) -9 < $< > $@
+$(target_product_services_notice_file_xml_gz): $(target_product_services_notice_file_xml) | $(MINIGZIP)
+ $(hide) $(MINIGZIP) -9 < $< > $@
+$(installed_notice_html_or_xml_gz): $(target_notice_file_xml_gz)
+ $(copy-file-to-target)
$(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
$(copy-file-to-target)
+$(installed_product_notice_xml_gz): $(target_product_notice_file_xml_gz)
+ $(copy-file-to-target)
+
+# No notice file for product_services if its contents are merged into /product.
+# The notices will be part of the /product notice file.
+ifndef MERGE_PRODUCT_SERVICES_INTO_PRODUCT
+$(installed_product_services_notice_xml_gz): $(target_product_services_notice_file_xml_gz)
+ $(copy-file-to-target)
endif
# if we've been run my mm, mmm, etc, don't reinstall this every time
ifeq ($(ONE_SHOT_MAKEFILE),)
ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
- ifdef target_vendor_notice_file_xml_gz
- ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
- endif
+ ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
+ ALL_DEFAULT_INSTALLED_MODULES += $(installed_product_notice_xml_gz)
+ ALL_DEFAULT_INSTALLED_MODULES += $(installed_product_services_notice_xml_gz)
+endif
+endif # PRODUCT_NOTICE_SPLIT
+
+ifeq ($(ONE_SHOT_MAKEFILE),)
+ ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
endif
+
+$(eval $(call combine-notice-files, html, \
+ $(tools_notice_file_txt), \
+ $(tools_notice_file_html), \
+ "Notices for files contained in the tools directory:", \
+ $(HOST_OUT_NOTICE_FILES), \
+ $(ALL_DEFAULT_INSTALLED_MODULES) \
+ $(winpthreads_notice_file)))
+
endif # TARGET_BUILD_APPS
# The kernel isn't really a module, so to get its module file in there, we
@@ -990,12 +1252,12 @@ $(winpthreads_notice_file): \
# This rule adds to ALL_DEFAULT_INSTALLED_MODULES, so it needs to come
# before the rules that use that variable to build the image.
ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/security/otacerts.zip
-$(TARGET_OUT_ETC)/security/otacerts.zip: KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
-$(TARGET_OUT_ETC)/security/otacerts.zip: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR)) | $(ZIPTIME)
+$(TARGET_OUT_ETC)/security/otacerts.zip: PRIVATE_CERT := $(DEFAULT_KEY_CERT_PAIR).x509.pem
+$(TARGET_OUT_ETC)/security/otacerts.zip: $(SOONG_ZIP)
+$(TARGET_OUT_ETC)/security/otacerts.zip: $(DEFAULT_KEY_CERT_PAIR).x509.pem
$(hide) rm -f $@
$(hide) mkdir -p $(dir $@)
- $(hide) zip -qjX $@ $<
- $(remove-timestamps-from-package)
+ $(hide) $(SOONG_ZIP) -o $@ -C $(dir $(PRIVATE_CERT)) -f $(PRIVATE_CERT)
# Carry the public key for update_engine if it's a non-IoT target that
# uses the AB updater. We use the same key as otacerts but in RSA public key
@@ -1003,13 +1265,15 @@ $(TARGET_OUT_ETC)/security/otacerts.zip: $(addsuffix .x509.pem,$(DEFAULT_KEY_CER
ifeq ($(AB_OTA_UPDATER),true)
ifneq ($(PRODUCT_IOT),true)
ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
-$(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR))
+$(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem: $(DEFAULT_KEY_CERT_PAIR).x509.pem
$(hide) rm -f $@
$(hide) mkdir -p $(dir $@)
$(hide) openssl x509 -pubkey -noout -in $< > $@
-ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_RECOVERY_ROOT_OUT)/etc/update_engine/update-payload-key.pub.pem
-$(TARGET_RECOVERY_ROOT_OUT)/etc/update_engine/update-payload-key.pub.pem: $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
+ALL_DEFAULT_INSTALLED_MODULES += \
+ $(TARGET_RECOVERY_ROOT_OUT)/system/etc/update_engine/update-payload-key.pub.pem
+$(TARGET_RECOVERY_ROOT_OUT)/system/etc/update_engine/update-payload-key.pub.pem: \
+ $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
$(hide) cp -f $< $@
endif
endif
@@ -1024,15 +1288,12 @@ otacerts: $(TARGET_OUT_ETC)/security/otacerts.zip
INTERNAL_USERIMAGES_EXT_VARIANT :=
ifeq ($(TARGET_USERIMAGES_USE_EXT2),true)
-INTERNAL_USERIMAGES_USE_EXT := true
INTERNAL_USERIMAGES_EXT_VARIANT := ext2
else
ifeq ($(TARGET_USERIMAGES_USE_EXT3),true)
-INTERNAL_USERIMAGES_USE_EXT := true
INTERNAL_USERIMAGES_EXT_VARIANT := ext3
else
ifeq ($(TARGET_USERIMAGES_USE_EXT4),true)
-INTERNAL_USERIMAGES_USE_EXT := true
INTERNAL_USERIMAGES_EXT_VARIANT := ext4
endif
endif
@@ -1049,13 +1310,11 @@ ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))
INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s
endif
-ifeq ($(INTERNAL_USERIMAGES_USE_EXT),true)
INTERNAL_USERIMAGES_DEPS := $(SIMG2IMG)
-INTERNAL_USERIMAGES_DEPS += $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(E2FSCK)
+INTERNAL_USERIMAGES_DEPS += $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(E2FSCK) $(TUNE2FS)
ifeq ($(TARGET_USERIMAGES_USE_F2FS),true)
INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG) $(MAKE_F2FS)
endif
-endif
ifeq ($(BOARD_AVB_ENABLE),true)
INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
@@ -1064,15 +1323,15 @@ endif
ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
endif
-ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
+ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE) $(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
endif
INTERNAL_USERIMAGES_BINARY_PATHS := $(sort $(dir $(INTERNAL_USERIMAGES_DEPS)))
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY))
-INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY_FEC))
+ifeq (true,$(PRODUCT_SUPPORTS_VERITY))
+INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_METADATA) $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
+ifeq (true,$(PRODUCT_SUPPORTS_VERITY_FEC))
INTERNAL_USERIMAGES_DEPS += $(FEC)
endif
endif
@@ -1082,74 +1341,127 @@ INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
INTERNAL_USERIMAGES_DEPS += $(BLK_ALLOC_TO_BASE_FS)
-ifeq ($(INTERNAL_USERIMAGES_USE_EXT),true)
INTERNAL_USERIMAGES_DEPS += $(MKE2FS_CONF)
+
+ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
+
+ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
+ $(error vboot 1.0 doesn't support logical partition)
endif
+# TODO(b/80195851): Should not define BOARD_AVB_SYSTEM_KEY_PATH without
+# BOARD_AVB_SYSTEM_DETACHED_VBMETA.
+
+endif # PRODUCT_USE_DYNAMIC_PARTITIONS
+
# $(1): the path of the output dictionary file
-# $(2): additional "key=value" pairs to append to the dictionary file.
-define generate-userimage-prop-dictionary
+# $(2): a subset of "system vendor cache userdata product product_services oem odm"
+# $(3): additional "key=value" pairs to append to the dictionary file.
+define generate-image-prop-dictionary
+$(if $(filter $(2),system),\
+ $(if $(BOARD_SYSTEMIMAGE_PARTITION_SIZE),$(hide) echo "system_size=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE)" >> $(1))
+ $(if $(INTERNAL_SYSTEM_OTHER_PARTITION_SIZE),$(hide) echo "system_other_size=$(INTERNAL_SYSTEM_OTHER_PARTITION_SIZE)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "system_fs_type=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "system_extfs_inode_count=$(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_EXTFS_RSV_PCT),$(hide) echo "system_extfs_rsv_pct=$(BOARD_SYSTEMIMAGE_EXTFS_RSV_PCT)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "system_squashfs_compressor=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "system_squashfs_compressor_opt=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "system_squashfs_disable_4k_align=$(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
+ $(if $(PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1))
+ $(if $(PRODUCT_SYSTEM_HEADROOM),$(hide) echo "system_headroom=$(PRODUCT_SYSTEM_HEADROOM)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "system_reserved_size=$(BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+)
+$(if $(filter $(2),userdata),\
+ $(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+ $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
+)
+$(if $(filter $(2),cache),\
+ $(if $(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "cache_fs_type=$(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+ $(if $(BOARD_CACHEIMAGE_PARTITION_SIZE),$(hide) echo "cache_size=$(BOARD_CACHEIMAGE_PARTITION_SIZE)" >> $(1))
+)
+$(if $(filter $(2),vendor),\
+ $(if $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "vendor_fs_type=$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_EXTFS_INODE_COUNT),$(hide) echo "vendor_extfs_inode_count=$(BOARD_VENDORIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_EXTFS_RSV_PCT),$(hide) echo "vendor_extfs_rsv_pct=$(BOARD_VENDORIMAGE_EXTFS_RSV_PCT)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_PARTITION_SIZE),$(hide) echo "vendor_size=$(BOARD_VENDORIMAGE_PARTITION_SIZE)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_JOURNAL_SIZE),$(hide) echo "vendor_journal_size=$(BOARD_VENDORIMAGE_JOURNAL_SIZE)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "vendor_squashfs_compressor=$(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "vendor_squashfs_compressor_opt=$(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "vendor_squashfs_block_size=$(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "vendor_squashfs_disable_4k_align=$(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
+ $(if $(PRODUCT_VENDOR_BASE_FS_PATH),$(hide) echo "vendor_base_fs_file=$(PRODUCT_VENDOR_BASE_FS_PATH)" >> $(1))
+ $(if $(BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "vendor_reserved_size=$(BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+)
+$(if $(filter $(2),product),\
+ $(if $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "product_fs_type=$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT),$(hide) echo "product_extfs_inode_count=$(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_EXTFS_RSV_PCT),$(hide) echo "product_extfs_rsv_pct=$(BOARD_PRODUCTIMAGE_EXTFS_RSV_PCT)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_PARTITION_SIZE),$(hide) echo "product_size=$(BOARD_PRODUCTIMAGE_PARTITION_SIZE)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_JOURNAL_SIZE),$(hide) echo "product_journal_size=$(BOARD_PRODUCTIMAGE_JOURNAL_SIZE)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "product_squashfs_compressor=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "product_squashfs_compressor_opt=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "product_squashfs_block_size=$(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "product_squashfs_disable_4k_align=$(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
+ $(if $(PRODUCT_PRODUCT_BASE_FS_PATH),$(hide) echo "product_base_fs_file=$(PRODUCT_PRODUCT_BASE_FS_PATH)" >> $(1))
+ $(if $(BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "product_reserved_size=$(BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+)
+$(if $(filter $(2),product_services),\
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "product_services_fs_type=$(BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_EXTFS_INODE_COUNT),$(hide) echo "product_services_extfs_inode_count=$(BOARD_PRODUCT_SERVICESIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_EXTFS_RSV_PCT),$(hide) echo "product_services_extfs_rsv_pct=$(BOARD_PRODUCT_SERVICESIMAGE_EXTFS_RSV_PCT)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE),$(hide) echo "product_services_size=$(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_JOURNAL_SIZE),$(hide) echo "product_services_journal_size=$(BOARD_PRODUCT_SERVICESIMAGE_JOURNAL_SIZE)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "product_services_squashfs_compressor=$(BOARD_PRODUCT_SERVICESIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "product_services_squashfs_compressor_opt=$(BOARD_PRODUCT_SERVICESIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "product_services_squashfs_block_size=$(BOARD_PRODUCT_SERVICESIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "product_services_squashfs_disable_4k_align=$(BOARD_PRODUCT_SERVICESIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "product_services_reserved_size=$(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+)
+$(if $(filter $(2),odm),\
+ $(if $(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "odm_fs_type=$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "odm_extfs_inode_count=$(BOARD_ODMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_EXTFS_RSV_PCT),$(hide) echo "odm_extfs_rsv_pct=$(BOARD_ODMIMAGE_EXTFS_RSV_PCT)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_PARTITION_SIZE),$(hide) echo "odm_size=$(BOARD_ODMIMAGE_PARTITION_SIZE)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_JOURNAL_SIZE),$(hide) echo "odm_journal_size=$(BOARD_ODMIMAGE_JOURNAL_SIZE)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "odm_squashfs_compressor=$(BOARD_ODMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "odm_squashfs_compressor_opt=$(BOARD_ODMIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "odm_squashfs_block_size=$(BOARD_ODMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "odm_squashfs_disable_4k_align=$(BOARD_ODMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
+ $(if $(PRODUCT_ODM_BASE_FS_PATH),$(hide) echo "odm_base_fs_file=$(PRODUCT_ODM_BASE_FS_PATH)" >> $(1))
+ $(if $(BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "odm_reserved_size=$(BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+)
+$(if $(filter $(2),oem),\
+ $(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
+ $(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1))
+ $(if $(BOARD_OEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "oem_extfs_inode_count=$(BOARD_OEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+ $(if $(BOARD_OEMIMAGE_EXTFS_RSV_PCT),$(hide) echo "oem_extfs_rsv_pct=$(BOARD_OEMIMAGE_EXTFS_RSV_PCT)" >> $(1))
+)
$(hide) echo "ext_mkuserimg=$(notdir $(MKEXTUSERIMG))" >> $(1)
+
$(if $(INTERNAL_USERIMAGES_EXT_VARIANT),$(hide) echo "fs_type=$(INTERNAL_USERIMAGES_EXT_VARIANT)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_PARTITION_SIZE),$(hide) echo "system_size=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "system_fs_type=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "system_extfs_inode_count=$(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_EXTFS_RSV_PCT),$(hide) echo "system_extfs_rsv_pct=$(BOARD_SYSTEMIMAGE_EXTFS_RSV_PCT)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1))
+$(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
+$(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1))
$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "system_squashfs_compressor=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "system_squashfs_compressor_opt=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
-$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "system_squashfs_disable_4k_align=$(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM),$(hide) echo "system_headroom=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM)" >> $(1))
-$(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
-$(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
$(if $(BOARD_FLASH_ERASE_BLOCK_SIZE), $(hide) echo "flash_erase_block_size=$(BOARD_FLASH_ERASE_BLOCK_SIZE)" >> $(1))
-$(if $(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "cache_fs_type=$(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
-$(if $(BOARD_CACHEIMAGE_PARTITION_SIZE),$(hide) echo "cache_size=$(BOARD_CACHEIMAGE_PARTITION_SIZE)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "vendor_fs_type=$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_EXTFS_INODE_COUNT),$(hide) echo "vendor_extfs_inode_count=$(BOARD_VENDORIMAGE_EXTFS_INODE_COUNT)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_EXTFS_RSV_PCT),$(hide) echo "vendor_extfs_rsv_pct=$(BOARD_VENDORIMAGE_EXTFS_RSV_PCT)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_PARTITION_SIZE),$(hide) echo "vendor_size=$(BOARD_VENDORIMAGE_PARTITION_SIZE)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_JOURNAL_SIZE),$(hide) echo "vendor_journal_size=$(BOARD_VENDORIMAGE_JOURNAL_SIZE)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "vendor_squashfs_compressor=$(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "vendor_squashfs_compressor_opt=$(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "vendor_squashfs_block_size=$(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
-$(if $(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "vendor_squashfs_disable_4k_align=$(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),$(hide) echo "vendor_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "product_fs_type=$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT),$(hide) echo "product_extfs_inode_count=$(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_EXTFS_RSV_PCT),$(hide) echo "product_extfs_rsv_pct=$(BOARD_PRODUCTIMAGE_EXTFS_RSV_PCT)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_PARTITION_SIZE),$(hide) echo "product_size=$(BOARD_PRODUCTIMAGE_PARTITION_SIZE)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_JOURNAL_SIZE),$(hide) echo "product_journal_size=$(BOARD_PRODUCTIMAGE_JOURNAL_SIZE)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "product_squashfs_compressor=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "product_squashfs_compressor_opt=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "product_squashfs_block_size=$(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
-$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "product_squashfs_disable_4k_align=$(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH),$(hide) echo "product_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH)" >> $(1))
-$(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
-$(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1))
-$(if $(BOARD_OEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "oem_extfs_inode_count=$(BOARD_OEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
-$(if $(BOARD_OEMIMAGE_EXTFS_RSV_PCT),$(hide) echo "oem_extfs_rsv_pct=$(BOARD_OEMIMAGE_EXTFS_RSV_PCT)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1))
$(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(1)
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER),$(hide) echo "boot_signer=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_key=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(notdir $(VERITY_SIGNER))" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY_FEC),$(hide) echo "verity_fec=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY_FEC)" >> $(1))
+$(if $(PRODUCT_SUPPORTS_BOOT_SIGNER),$(hide) echo "boot_signer=$(PRODUCT_SUPPORTS_BOOT_SIGNER)" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity=$(PRODUCT_SUPPORTS_VERITY)" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_key=$(PRODUCT_VERITY_SIGNING_KEY)" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(notdir $(VERITY_SIGNER))" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VERITY_FEC),$(hide) echo "verity_fec=$(PRODUCT_SUPPORTS_VERITY_FEC)" >> $(1))
$(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_VERITY_PARTITION),$(hide) echo "product_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_VERITY_PARTITION)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_key=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "futility=$(notdir $(FUTILITY))" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_signer_cmd=$(VBOOT_SIGNER)" >> $(1))
+$(if $(PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCT_PRODUCT_VERITY_PARTITION),$(hide) echo "product_verity_block_device=$(PRODUCT_PRODUCT_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCT_PRODUCT_SERVICES_VERITY_PARTITION),$(hide) echo "product_services_verity_block_device=$(PRODUCT_PRODUCT_SERVICES_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCT_SUPPORTS_VBOOT)" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_key=$(PRODUCT_VBOOT_SIGNING_KEY)" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "futility=$(notdir $(FUTILITY))" >> $(1))
+$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_signer_cmd=$(VBOOT_SIGNER)" >> $(1))
$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_avbtool=$(notdir $(AVBTOOL))" >> $(1))
$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_system_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_system_add_hashtree_footer_args=$(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
@@ -1158,6 +1470,12 @@ $(if $(BOARD_AVB_ENABLE),\
$(hide) echo "avb_system_key_path=$(BOARD_AVB_SYSTEM_KEY_PATH)" >> $(1)
$(hide) echo "avb_system_algorithm=$(BOARD_AVB_SYSTEM_ALGORITHM)" >> $(1)
$(hide) echo "avb_system_rollback_index_location=$(BOARD_AVB_SYSTEM_ROLLBACK_INDEX_LOCATION)" >> $(1)))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_system_other_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_system_other_add_hashtree_footer_args=$(BOARD_AVB_SYSTEM_OTHER_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+ $(if $(BOARD_AVB_SYSTEM_OTHER_KEY_PATH),\
+ $(hide) echo "avb_system_other_key_path=$(BOARD_AVB_SYSTEM_OTHER_KEY_PATH)" >> $(1)
+ $(hide) echo "avb_system_other_algorithm=$(BOARD_AVB_SYSTEM_OTHER_ALGORITHM)" >> $(1)))
$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_vendor_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_vendor_add_hashtree_footer_args=$(BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
$(if $(BOARD_AVB_ENABLE),\
@@ -1172,12 +1490,40 @@ $(if $(BOARD_AVB_ENABLE),\
$(hide) echo "avb_product_key_path=$(BOARD_AVB_PRODUCT_KEY_PATH)" >> $(1)
$(hide) echo "avb_product_algorithm=$(BOARD_AVB_PRODUCT_ALGORITHM)" >> $(1)
$(hide) echo "avb_product_rollback_index_location=$(BOARD_AVB_PRODUCT_ROLLBACK_INDEX_LOCATION)" >> $(1)))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_product_services_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+ $(hide) echo "avb_product_services_add_hashtree_footer_args=$(BOARD_AVB_PRODUCT_SERVICES_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+ $(if $(BOARD_AVB_PRODUCT_SERVICES_KEY_PATH),\
+ $(hide) echo "avb_product_services_key_path=$(BOARD_AVB_PRODUCT_SERVICES_KEY_PATH)" >> $(1)
+ $(hide) echo "avb_product_services_algorithm=$(BOARD_AVB_PRODUCT_SERVICES_ALGORITHM)" >> $(1)
+ $(hide) echo "avb_product_services_rollback_index_location=$(BOARD_AVB_PRODUCT_SERVICES_ROLLBACK_INDEX_LOCATION)" >> $(1)))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_odm_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_odm_add_hashtree_footer_args=$(BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+ $(if $(BOARD_AVB_ODM_KEY_PATH),\
+ $(hide) echo "avb_odm_key_path=$(BOARD_AVB_ODM_KEY_PATH)" >> $(1)
+ $(hide) echo "avb_odm_algorithm=$(BOARD_AVB_ODM_ALGORITHM)" >> $(1)
+ $(hide) echo "avb_odm_rollback_index_location=$(BOARD_AVB_ODM_ROLLBACK_INDEX_LOCATION)" >> $(1)))
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
$(hide) echo "recovery_as_boot=true" >> $(1))
$(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
- $(hide) echo "system_root_image=true" >> $(1);\
- echo "ramdisk_dir=$(TARGET_ROOT_OUT)" >> $(1))
-$(if $(2),$(hide) $(foreach kv,$(2),echo "$(kv)" >> $(1);))
+ $(hide) echo "system_root_image=true" >> $(1))
+$(hide) echo "root_dir=$(TARGET_ROOT_OUT)" >> $(1)
+$(if $(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),$(hide) echo "use_dynamic_partition_size=true" >> $(1))
+$(if $(3),$(hide) $(foreach kv,$(3),echo "$(kv)" >> $(1);))
+endef
+
+# $(1): the path of the output dictionary file
+# $(2): additional "key=value" pairs to append to the dictionary file.
+define generate-userimage-prop-dictionary
+$(call generate-image-prop-dictionary,$(1),system vendor cache userdata product product_services oem odm,$(2))
+endef
+
+# $(1): the path of the input dictionary file, where each line has the format key=value
+# $(2): the key to look up
+define read-image-prop-dictionary
+$$(grep '$(2)=' $(1) | cut -f2- -d'=')
endef
# $(1): modules list
@@ -1206,19 +1552,38 @@ ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_
INTERNAL_RECOVERYIMAGE_FILES := $(filter $(TARGET_RECOVERY_OUT)/%, \
$(ALL_DEFAULT_INSTALLED_MODULES))
+INSTALLED_FILES_FILE_RECOVERY := $(PRODUCT_OUT)/installed-files-recovery.txt
+INSTALLED_FILES_JSON_RECOVERY := $(INSTALLED_FILES_FILE_RECOVERY:.txt=.json)
+
+# TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other
+# INSTALLED_FILES_FILE_* rules. Because currently there're cp/rsync/rm commands in
+# build-recoveryimage-target, which would touch the files under TARGET_RECOVERY_OUT and race with
+# the call to FILELIST.
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+$(INSTALLED_FILES_FILE_RECOVERY): $(INSTALLED_BOOTIMAGE_TARGET)
+else
+$(INSTALLED_FILES_FILE_RECOVERY): $(INSTALLED_RECOVERYIMAGE_TARGET)
+endif
+
+$(INSTALLED_FILES_FILE_RECOVERY): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RECOVERY)
+$(INSTALLED_FILES_FILE_RECOVERY): $(INTERNAL_RECOVERYIMAGE_FILES) $(FILESLIST)
+ @echo Installed file list: $@
+ @mkdir -p $(dir $@)
+ @rm -f $@
+ $(hide) $(FILESLIST) $(TARGET_RECOVERY_ROOT_OUT) > $(@:.txt=.json)
+ $(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
recovery_initrc := $(call include-path-for, recovery)/etc/init.rc
recovery_sepolicy := \
$(TARGET_RECOVERY_ROOT_OUT)/sepolicy \
$(TARGET_RECOVERY_ROOT_OUT)/plat_file_contexts \
$(TARGET_RECOVERY_ROOT_OUT)/vendor_file_contexts \
$(TARGET_RECOVERY_ROOT_OUT)/plat_property_contexts \
- $(TARGET_RECOVERY_ROOT_OUT)/vendor_property_contexts
-
-ifdef BOARD_ODM_SEPOLICY_DIRS
-recovery_sepolicy += \
+ $(TARGET_RECOVERY_ROOT_OUT)/vendor_property_contexts \
$(TARGET_RECOVERY_ROOT_OUT)/odm_file_contexts \
- $(TARGET_RECOVERY_ROOT_OUT)/odm_property_contexts
-endif
+ $(TARGET_RECOVERY_ROOT_OUT)/odm_property_contexts \
+ $(TARGET_RECOVERY_ROOT_OUT)/product_file_contexts \
+ $(TARGET_RECOVERY_ROOT_OUT)/product_property_contexts
# Passed into rsync from non-recovery root to recovery root, to avoid overwriting recovery-specific
# SELinux files
@@ -1226,23 +1591,22 @@ IGNORE_RECOVERY_SEPOLICY := $(patsubst $(TARGET_RECOVERY_OUT)/%,--exclude=/%,$(r
recovery_kernel := $(INSTALLED_KERNEL_TARGET) # same as a non-recovery system
recovery_ramdisk := $(PRODUCT_OUT)/ramdisk-recovery.img
-recovery_build_props := $(intermediate_system_build_prop)
-ifdef property_overrides_split_enabled
-recovery_build_props += $(INSTALLED_VENDOR_BUILD_PROP_TARGET)
-endif
-ifdef BOARD_USES_PRODUCTIMAGE
-recovery_build_props += $(INSTALLED_PRODUCT_BUILD_PROP_TARGET)
-endif
recovery_resources_common := $(call include-path-for, recovery)/res
-# Set recovery_density to the density bucket of the device.
-recovery_density := unknown
-ifneq (,$(PRODUCT_AAPT_PREF_CONFIG))
-# If PRODUCT_AAPT_PREF_CONFIG includes a dpi bucket, then use that value.
-recovery_density := $(filter %dpi,$(PRODUCT_AAPT_PREF_CONFIG))
-else
-# Otherwise, use the default medium density.
-recovery_densities := mdpi
+# Set recovery_density to a density bucket based on TARGET_SCREEN_DENSITY, PRODUCT_AAPT_PREF_CONFIG,
+# or mdpi, in order of preference. We support both specific buckets (e.g. xdpi) and numbers,
+# which get remapped to a bucket.
+recovery_density := $(or $(TARGET_SCREEN_DENSITY),$(PRODUCT_AAPT_PREF_CONFIG),mdpi)
+ifeq (,$(filter xxxhdpi xxhdpi xhdpi hdpi mdpi,$(recovery_density)))
+recovery_density_value := $(patsubst %dpi,%,$(recovery_density))
+# We roughly use the medium point between the primary densities to split buckets.
+# ------160------240------320----------480------------640------
+# mdpi hdpi xhdpi xxhdpi xxxhdpi
+recovery_density := $(strip \
+ $(or $(if $(filter $(shell echo $$(($(recovery_density_value) >= 560))),1),xxxhdpi),\
+ $(if $(filter $(shell echo $$(($(recovery_density_value) >= 400))),1),xxhdpi),\
+ $(if $(filter $(shell echo $$(($(recovery_density_value) >= 280))),1),xhdpi),\
+ $(if $(filter $(shell echo $$(($(recovery_density_value) >= 200))),1),hdpi,mdpi)))
endif
ifneq (,$(wildcard $(recovery_resources_common)-$(recovery_density)))
@@ -1251,22 +1615,120 @@ else
recovery_resources_common := $(recovery_resources_common)-xhdpi
endif
-# Select the 18x32 font on high-density devices (xhdpi and up); and
-# the 12x22 font on other devices. Note that the font selected here
-# can be overridden for a particular device by putting a font.png in
-# its private recovery resources.
-
-ifneq (,$(filter xxxhdpi 560dpi xxhdpi 400dpi xhdpi,$(recovery_density)))
+# Select the 18x32 font on high-density devices (xhdpi and up); and the 12x22 font on other devices.
+# Note that the font selected here can be overridden for a particular device by putting a font.png
+# in its private recovery resources.
+ifneq (,$(filter xxxhdpi xxhdpi xhdpi,$(recovery_density)))
recovery_font := $(call include-path-for, recovery)/fonts/18x32.png
else
recovery_font := $(call include-path-for, recovery)/fonts/12x22.png
endif
+
+# We will only generate the recovery background text images if the variable
+# TARGET_RECOVERY_UI_SCREEN_WIDTH is defined. For devices with xxxhdpi and xxhdpi, we set the
+# variable to the commonly used values here, if it hasn't been intialized elsewhere. While for
+# devices with lower density, they must have TARGET_RECOVERY_UI_SCREEN_WIDTH defined in their
+# BoardConfig in order to use this feature.
+ifeq ($(recovery_density),xxxhdpi)
+TARGET_RECOVERY_UI_SCREEN_WIDTH ?= 1440
+else ifeq ($(recovery_density),xxhdpi)
+TARGET_RECOVERY_UI_SCREEN_WIDTH ?= 1080
+endif
+
+ifneq ($(TARGET_RECOVERY_UI_SCREEN_WIDTH),)
+# Subtracts the margin width and menu indent from the screen width; it's safe to be conservative.
+ifeq ($(TARGET_RECOVERY_UI_MARGIN_WIDTH),)
+ recovery_image_width := $$(($(TARGET_RECOVERY_UI_SCREEN_WIDTH) - 10))
+else
+ recovery_image_width := $$(($(TARGET_RECOVERY_UI_SCREEN_WIDTH) - $(TARGET_RECOVERY_UI_MARGIN_WIDTH) - 10))
+endif
+
+
+RECOVERY_INSTALLING_TEXT_FILE := $(call intermediates-dir-for,PACKAGING,recovery_text_res)/installing_text.png
+RECOVERY_INSTALLING_SECURITY_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/installing_security_text.png
+RECOVERY_ERASING_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/erasing_text.png
+RECOVERY_ERROR_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/error_text.png
+RECOVERY_NO_COMMAND_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/no_command_text.png
+
+RECOVERY_CANCEL_WIPE_DATA_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/cancel_wipe_data_text.png
+RECOVERY_FACTORY_DATA_RESET_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/factory_data_reset_text.png
+RECOVERY_TRY_AGAIN_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/try_again_text.png
+RECOVERY_WIPE_DATA_CONFIRMATION_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/wipe_data_confirmation_text.png
+RECOVERY_WIPE_DATA_MENU_HEADER_TEXT_FILE := $(dir $(RECOVERY_INSTALLING_TEXT_FILE))/wipe_data_menu_header_text.png
+
+generated_recovery_text_files := \
+ $(RECOVERY_INSTALLING_TEXT_FILE) \
+ $(RECOVERY_INSTALLING_SECURITY_TEXT_FILE) \
+ $(RECOVERY_ERASING_TEXT_FILE) \
+ $(RECOVERY_ERROR_TEXT_FILE) \
+ $(RECOVERY_NO_COMMAND_TEXT_FILE) \
+ $(RECOVERY_CANCEL_WIPE_DATA_TEXT_FILE) \
+ $(RECOVERY_FACTORY_DATA_RESET_TEXT_FILE) \
+ $(RECOVERY_TRY_AGAIN_TEXT_FILE) \
+ $(RECOVERY_WIPE_DATA_CONFIRMATION_TEXT_FILE) \
+ $(RECOVERY_WIPE_DATA_MENU_HEADER_TEXT_FILE)
+
+resource_dir := $(call include-path-for, recovery)/tools/recovery_l10n/res/
+image_generator_jar := $(HOST_OUT_JAVA_LIBRARIES)/RecoveryImageGenerator.jar
+zopflipng := $(HOST_OUT_EXECUTABLES)/zopflipng
+$(RECOVERY_INSTALLING_TEXT_FILE): PRIVATE_SOURCE_FONTS := $(recovery_noto-fonts_dep) $(recovery_roboto-fonts_dep)
+$(RECOVERY_INSTALLING_TEXT_FILE): PRIVATE_RECOVERY_FONT_FILES_DIR := $(call intermediates-dir-for,PACKAGING,recovery_font_files)
+$(RECOVERY_INSTALLING_TEXT_FILE): PRIVATE_RESOURCE_DIR := $(resource_dir)
+$(RECOVERY_INSTALLING_TEXT_FILE): PRIVATE_IMAGE_GENERATOR_JAR := $(image_generator_jar)
+$(RECOVERY_INSTALLING_TEXT_FILE): PRIVATE_ZOPFLIPNG := $(zopflipng)
+$(RECOVERY_INSTALLING_TEXT_FILE): PRIVATE_RECOVERY_IMAGE_WIDTH := $(recovery_image_width)
+$(RECOVERY_INSTALLING_TEXT_FILE): PRIVATE_RECOVERY_BACKGROUND_TEXT_LIST := \
+ recovery_installing \
+ recovery_installing_security \
+ recovery_erasing \
+ recovery_error \
+ recovery_no_command
+$(RECOVERY_INSTALLING_TEXT_FILE): PRIVATE_RECOVERY_WIPE_DATA_TEXT_LIST := \
+ recovery_cancel_wipe_data \
+ recovery_factory_data_reset \
+ recovery_try_again \
+ recovery_wipe_data_menu_header \
+ recovery_wipe_data_confirmation
+$(RECOVERY_INSTALLING_TEXT_FILE): .KATI_IMPLICIT_OUTPUTS := $(filter-out $(RECOVERY_INSTALLING_TEXT_FILE),$(generated_recovery_text_files))
+$(RECOVERY_INSTALLING_TEXT_FILE): $(image_generator_jar) $(resource_dir) $(recovery_noto-fonts_dep) $(recovery_roboto-fonts_dep) $(zopflipng)
+ # Prepares the font directory.
+ @rm -rf $(PRIVATE_RECOVERY_FONT_FILES_DIR)
+ @mkdir -p $(PRIVATE_RECOVERY_FONT_FILES_DIR)
+ $(foreach filename,$(PRIVATE_SOURCE_FONTS), cp $(filename) $(PRIVATE_RECOVERY_FONT_FILES_DIR) &&) true
+ @rm -rf $(dir $@)
+ @mkdir -p $(dir $@)
+ $(foreach text_name,$(PRIVATE_RECOVERY_BACKGROUND_TEXT_LIST) $(PRIVATE_RECOVERY_WIPE_DATA_TEXT_LIST), \
+ $(eval output_file := $(dir $@)/$(patsubst recovery_%,%_text.png,$(text_name))) \
+ $(eval center_alignment := $(if $(filter $(text_name),$(PRIVATE_RECOVERY_BACKGROUND_TEXT_LIST)), --center_alignment)) \
+ java -jar $(PRIVATE_IMAGE_GENERATOR_JAR) \
+ --image_width $(PRIVATE_RECOVERY_IMAGE_WIDTH) \
+ --text_name $(text_name) \
+ --font_dir $(PRIVATE_RECOVERY_FONT_FILES_DIR) \
+ --resource_dir $(PRIVATE_RESOURCE_DIR) \
+ --output_file $(output_file) $(center_alignment) && \
+ $(PRIVATE_ZOPFLIPNG) -y --iterations=1 --filters=0 $(output_file) $(output_file) > /dev/null &&) true
+else
+RECOVERY_INSTALLING_TEXT_FILE :=
+RECOVERY_INSTALLING_SECURITY_TEXT_FILE :=
+RECOVERY_ERASING_TEXT_FILE :=
+RECOVERY_ERROR_TEXT_FILE :=
+RECOVERY_NO_COMMAND_TEXT_FILE :=
+RECOVERY_CANCEL_WIPE_DATA_TEXT_FILE :=
+RECOVERY_FACTORY_DATA_RESET_TEXT_FILE :=
+RECOVERY_TRY_AGAIN_TEXT_FILE :=
+RECOVERY_WIPE_DATA_CONFIRMATION_TEXT_FILE :=
+RECOVERY_WIPE_DATA_MENU_HEADER_TEXT_FILE :=
+endif # TARGET_RECOVERY_UI_SCREEN_WIDTH
+
ifndef TARGET_PRIVATE_RES_DIRS
TARGET_PRIVATE_RES_DIRS := $(wildcard $(TARGET_DEVICE_DIR)/recovery/res)
endif
recovery_resource_deps := $(shell find $(recovery_resources_common) \
$(TARGET_PRIVATE_RES_DIRS) -type f)
+recovery_resource_deps += $(generated_recovery_text_files)
+
+
ifdef TARGET_RECOVERY_FSTAB
recovery_fstab := $(TARGET_RECOVERY_FSTAB)
else
@@ -1288,23 +1750,72 @@ endif
# (BOARD_USES_FULL_RECOVERY_IMAGE = true);
# b) We build a single image that contains boot and recovery both - no recovery image to install
# (BOARD_USES_RECOVERY_AS_BOOT = true);
-# c) We build the root into system image - not needing the resource file as we do bsdiff
+# c) We mount the system image as / and therefore do not have a ramdisk in boot.img
# (BOARD_BUILD_SYSTEM_ROOT_IMAGE = true).
# d) We include the recovery DTBO image within recovery - not needing the resource file as we
# do bsdiff because boot and recovery will contain different number of entries
# (BOARD_INCLUDE_RECOVERY_DTBO = true).
-# Note that condition b) implies condition c), because of the earlier check in this file:
-# "BOARD_USES_RECOVERY_AS_BOOT = true must have BOARD_BUILD_SYSTEM_ROOT_IMAGE = true" (not vice
-# versa though).
+# e) We include the recovery ACPIO image within recovery - not needing the resource file as we
+# do bsdiff because boot and recovery will contain different number of entries
+# (BOARD_INCLUDE_RECOVERY_ACPIO = true).
-ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) \
- $(BOARD_INCLUDE_RECOVERY_DTBO)))
+ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT) \
+ $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO)))
# Named '.dat' so we don't attempt to use imgdiff for patching it.
RECOVERY_RESOURCE_ZIP := $(TARGET_OUT)/etc/recovery-resource.dat
else
RECOVERY_RESOURCE_ZIP :=
endif
+INSTALLED_RECOVERY_BUILD_PROP_TARGET := $(TARGET_RECOVERY_ROOT_OUT)/prop.default
+
+$(INSTALLED_RECOVERY_BUILD_PROP_TARGET): PRIVATE_RECOVERY_UI_PROPERTIES := \
+ TARGET_RECOVERY_UI_ANIMATION_FPS:animation_fps \
+ TARGET_RECOVERY_UI_MARGIN_HEIGHT:margin_height \
+ TARGET_RECOVERY_UI_MARGIN_WIDTH:margin_width \
+ TARGET_RECOVERY_UI_MENU_UNUSABLE_ROWS:menu_unusable_rows \
+ TARGET_RECOVERY_UI_PROGRESS_BAR_BASELINE:progress_bar_baseline \
+ TARGET_RECOVERY_UI_TOUCH_LOW_THRESHOLD:touch_low_threshold \
+ TARGET_RECOVERY_UI_TOUCH_HIGH_THRESHOLD:touch_high_threshold \
+ TARGET_RECOVERY_UI_VR_STEREO_OFFSET:vr_stereo_offset
+
+# Parses the given list of build variables and writes their values as build properties if defined.
+# For example, if a target defines `TARGET_RECOVERY_UI_MARGIN_HEIGHT := 100`,
+# `ro.recovery.ui.margin_height=100` will be appended to the given output file.
+# $(1): Map from the build variable names to property names
+# $(2): Output file
+define append-recovery-ui-properties
+echo "#" >> $(2)
+echo "# RECOVERY UI BUILD PROPERTIES" >> $(2)
+echo "#" >> $(2)
+$(foreach prop,$(1), \
+ $(eval _varname := $(call word-colon,1,$(prop))) \
+ $(eval _propname := $(call word-colon,2,$(prop))) \
+ $(eval _value := $($(_varname))) \
+ $(if $(_value), \
+ echo ro.recovery.ui.$(_propname)=$(_value) >> $(2) &&)) true
+endef
+
+$(INSTALLED_RECOVERY_BUILD_PROP_TARGET): \
+ $(INSTALLED_DEFAULT_PROP_TARGET) \
+ $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET) \
+ $(intermediate_system_build_prop) \
+ $(INSTALLED_VENDOR_BUILD_PROP_TARGET) \
+ $(INSTALLED_ODM_BUILD_PROP_TARGET) \
+ $(INSTALLED_PRODUCT_BUILD_PROP_TARGET) \
+ $(INSTALLED_PRODUCT_SERVICES_BUILD_PROP_TARGET)
+ @echo "Target recovery buildinfo: $@"
+ $(hide) mkdir -p $(dir $@)
+ $(hide) rm -f $@
+ $(hide) cat $(INSTALLED_DEFAULT_PROP_TARGET) > $@
+ $(hide) cat $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET) >> $@
+ $(hide) cat $(intermediate_system_build_prop) >> $@
+ $(hide) cat $(INSTALLED_VENDOR_BUILD_PROP_TARGET) >> $@
+ $(hide) cat $(INSTALLED_ODM_BUILD_PROP_TARGET) >> $@
+ $(hide) cat $(INSTALLED_PRODUCT_BUILD_PROP_TARGET) >> $@
+ $(hide) cat $(INSTALLED_PRODUCT_SERVICES_BUILD_PROP_TARGET) >> $@
+ $(call append-recovery-ui-properties,$(PRIVATE_RECOVERY_UI_PROPERTIES),$@)
+
INTERNAL_RECOVERYIMAGE_ARGS := \
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
--kernel $(recovery_kernel) \
@@ -1321,8 +1832,18 @@ ifdef BOARD_KERNEL_PAGESIZE
INTERNAL_RECOVERYIMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
endif
ifdef BOARD_INCLUDE_RECOVERY_DTBO
+ifdef BOARD_PREBUILT_RECOVERY_DTBOIMAGE
+ INTERNAL_RECOVERYIMAGE_ARGS += --recovery_dtbo $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE)
+else
INTERNAL_RECOVERYIMAGE_ARGS += --recovery_dtbo $(BOARD_PREBUILT_DTBOIMAGE)
endif
+endif
+ifdef BOARD_INCLUDE_RECOVERY_ACPIO
+ INTERNAL_RECOVERYIMAGE_ARGS += --recovery_acpio $(BOARD_RECOVERY_ACPIO)
+endif
+ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
+ INTERNAL_RECOVERYIMAGE_ARGS += --dtb $(INSTALLED_DTBIMAGE_TARGET)
+endif
# Keys authorized to sign OTA packages this build will accept. The
# build always uses dev-keys for this; release packaging tools will
@@ -1332,15 +1853,13 @@ OTA_PUBLIC_KEYS := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
# Generate a file containing the keys that will be read by the
# recovery binary.
RECOVERY_INSTALL_OTA_KEYS := \
- $(call intermediates-dir-for,PACKAGING,ota_keys)/keys
-DUMPKEY_JAR := $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar
+ $(call intermediates-dir-for,PACKAGING,ota_keys)/otacerts.zip
$(RECOVERY_INSTALL_OTA_KEYS): PRIVATE_OTA_PUBLIC_KEYS := $(OTA_PUBLIC_KEYS)
$(RECOVERY_INSTALL_OTA_KEYS): extra_keys := $(patsubst %,%.x509.pem,$(PRODUCT_EXTRA_RECOVERY_KEYS))
-$(RECOVERY_INSTALL_OTA_KEYS): $(OTA_PUBLIC_KEYS) $(DUMPKEY_JAR) $(extra_keys)
- @echo "DumpPublicKey: $@ <= $(PRIVATE_OTA_PUBLIC_KEYS) $(extra_keys)"
- @rm -rf $@
- @mkdir -p $(dir $@)
- $(JAVA) -jar $(DUMPKEY_JAR) $(PRIVATE_OTA_PUBLIC_KEYS) $(extra_keys) > $@
+$(RECOVERY_INSTALL_OTA_KEYS): $(SOONG_ZIP) $(OTA_PUBLIC_KEYS) $(extra_keys)
+ $(hide) rm -f $@
+ $(hide) mkdir -p $(dir $@)
+ $(hide) $(SOONG_ZIP) -o $@ $(foreach key_file, $(PRIVATE_OTA_PUBLIC_KEYS) $(extra_keys), -C $(dir $(key_file)) -f $(key_file))
RECOVERYIMAGE_ID_FILE := $(PRODUCT_OUT)/recovery.id
@@ -1348,55 +1867,47 @@ RECOVERYIMAGE_ID_FILE := $(PRODUCT_OUT)/recovery.id
define build-recoveryimage-target
# Making recovery image
$(hide) mkdir -p $(TARGET_RECOVERY_OUT)
- $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/etc $(TARGET_RECOVERY_ROOT_OUT)/sdcard $(TARGET_RECOVERY_ROOT_OUT)/tmp
+ $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/sdcard $(TARGET_RECOVERY_ROOT_OUT)/tmp
# Copying baseline ramdisk...
# Use rsync because "cp -Rf" fails to overwrite broken symlinks on Mac.
- $(hide) rsync -a --exclude=etc --exclude=sdcard $(IGNORE_RECOVERY_SEPOLICY) $(IGNORE_CACHE_LINK) $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT)
- # Copy adbd from system/bin to recovery/root/sbin
- $(hide) cp -f $(TARGET_OUT_EXECUTABLES)/adbd $(TARGET_RECOVERY_ROOT_OUT)/sbin/adbd
+ $(hide) rsync -a --exclude=sdcard $(IGNORE_RECOVERY_SEPOLICY) $(IGNORE_CACHE_LINK) $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT)
# Modifying ramdisk contents...
+ $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),, \
+ $(hide) ln -sf /system/bin/init $(TARGET_RECOVERY_ROOT_OUT)/init)
$(if $(BOARD_RECOVERY_KERNEL_MODULES), \
$(call build-image-kernel-modules,$(BOARD_RECOVERY_KERNEL_MODULES),$(TARGET_RECOVERY_ROOT_OUT),,$(call intermediates-dir-for,PACKAGING,depmod_recovery)))
# Removes $(TARGET_RECOVERY_ROOT_OUT)/init*.rc EXCEPT init.recovery*.rc.
$(hide) find $(TARGET_RECOVERY_ROOT_OUT) -maxdepth 1 -name 'init*.rc' -type f -not -name "init.recovery.*.rc" | xargs rm -f
$(hide) cp -f $(recovery_initrc) $(TARGET_RECOVERY_ROOT_OUT)/
- $(hide) cp $(TARGET_ROOT_OUT)/init.recovery.*.rc $(TARGET_RECOVERY_ROOT_OUT)/ || true # Ignore error when the src file doesn't exist.
+ $(hide) cp $(TARGET_ROOT_OUT)/init.recovery.*.rc $(TARGET_RECOVERY_ROOT_OUT)/ 2> /dev/null || true # Ignore error when the src file doesn't exist.
$(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/res
$(hide) rm -rf $(TARGET_RECOVERY_ROOT_OUT)/res/*
$(hide) cp -rf $(recovery_resources_common)/* $(TARGET_RECOVERY_ROOT_OUT)/res
+ $(hide) $(foreach recovery_text_file,$(generated_recovery_text_files), \
+ cp -rf $(recovery_text_file) $(TARGET_RECOVERY_ROOT_OUT)/res/images/ &&) true
$(hide) cp -f $(recovery_font) $(TARGET_RECOVERY_ROOT_OUT)/res/images/font.png
$(hide) $(foreach item,$(TARGET_PRIVATE_RES_DIRS), \
cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/$(newline))
$(hide) $(foreach item,$(recovery_fstab), \
- cp -f $(item) $(TARGET_RECOVERY_ROOT_OUT)/etc/recovery.fstab)
+ cp -f $(item) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.fstab)
$(if $(strip $(recovery_wipe)), \
- $(hide) cp -f $(recovery_wipe) $(TARGET_RECOVERY_ROOT_OUT)/etc/recovery.wipe)
- $(hide) cp $(RECOVERY_INSTALL_OTA_KEYS) $(TARGET_RECOVERY_ROOT_OUT)/res/keys
- $(hide) cat $(INSTALLED_DEFAULT_PROP_TARGET) \
- > $(TARGET_RECOVERY_ROOT_OUT)/prop.default
- $(if $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET), \
- $(hide) cat $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET) \
- >> $(TARGET_RECOVERY_ROOT_OUT)/prop.default)
- $(hide) cat $(recovery_build_props) \
- >> $(TARGET_RECOVERY_ROOT_OUT)/prop.default
+ $(hide) cp -f $(recovery_wipe) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.wipe)
+ $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security
+ $(hide) cp $(RECOVERY_INSTALL_OTA_KEYS) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security/otacerts.zip
$(hide) ln -sf prop.default $(TARGET_RECOVERY_ROOT_OUT)/default.prop
$(BOARD_RECOVERY_IMAGE_PREPARE)
- $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)), \
- $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/system_root; \
- rm -rf $(TARGET_RECOVERY_ROOT_OUT)/system; \
- ln -sf /system_root/system $(TARGET_RECOVERY_ROOT_OUT)/system) # Mount the system_root_image to /system_root and symlink /system.
$(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
- $(if $(filter true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)), \
+ $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
$(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned, \
$(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1) --id > $(RECOVERYIMAGE_ID_FILE))
- $(if $(filter true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)),\
+ $(if $(filter true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)),\
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
- $(BOOT_SIGNER) /boot $(1) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1),\
- $(BOOT_SIGNER) /recovery $(1) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)\
+ $(BOOT_SIGNER) /boot $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1),\
+ $(BOOT_SIGNER) /recovery $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)\
)\
)
- $(if $(filter true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)), \
- $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
+ $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
+ $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
$(hide) $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE))), \
$(hide) $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))))
@@ -1406,46 +1917,74 @@ define build-recoveryimage-target
$(hide) $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(BOARD_RECOVERYIMAGE_PARTITION_SIZE) --partition_name recovery $(INTERNAL_AVB_RECOVERY_SIGNING_ARGS) $(BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS)))
endef
-ADBD := $(TARGET_OUT_EXECUTABLES)/adbd
-
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER))
+ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER))
$(INSTALLED_BOOTIMAGE_TARGET) : $(BOOT_SIGNER)
endif
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
+ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
$(INSTALLED_BOOTIMAGE_TARGET) : $(VBOOT_SIGNER)
endif
ifeq (true,$(BOARD_AVB_ENABLE))
$(INSTALLED_BOOTIMAGE_TARGET) : $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
endif
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) $(ADBD) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INTERNAL_RECOVERYIMAGE_FILES) \
- $(recovery_initrc) $(recovery_sepolicy) $(recovery_kernel) \
- $(INSTALLED_2NDBOOTLOADER_TARGET) \
- $(recovery_build_props) $(recovery_resource_deps) \
- $(recovery_fstab) \
- $(RECOVERY_INSTALL_OTA_KEYS) \
- $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET) \
- $(BOARD_RECOVERY_KERNEL_MODULES) \
- $(DEPMOD)
- $(call pretty,"Target boot image from recovery: $@")
- $(call build-recoveryimage-target, $@)
-endif
-
-$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) $(ADBD) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INTERNAL_RECOVERYIMAGE_FILES) \
- $(recovery_initrc) $(recovery_sepolicy) $(recovery_kernel) \
- $(INSTALLED_2NDBOOTLOADER_TARGET) \
- $(recovery_build_props) $(recovery_resource_deps) \
- $(recovery_fstab) \
- $(RECOVERY_INSTALL_OTA_KEYS) \
- $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET) \
- $(BOARD_RECOVERY_KERNEL_MODULES) \
- $(DEPMOD)
- $(call build-recoveryimage-target, $@)
+ifdef BOARD_INCLUDE_RECOVERY_DTBO
+ifdef BOARD_PREBUILT_RECOVERY_DTBOIMAGE
+$(INSTALLED_BOOTIMAGE_TARGET): $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE)
+else
+$(INSTALLED_BOOTIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
+endif
+endif
+ifdef BOARD_INCLUDE_RECOVERY_ACPIO
+$(INSTALLED_BOOTIMAGE_TARGET): $(BOARD_RECOVERY_ACPIO)
+endif
+ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
+$(INSTALLED_BOOTIMAGE_TARGET): $(INSTALLED_DTBIMAGE_TARGET)
+endif
+
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
+ $(INTERNAL_ROOT_FILES) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INTERNAL_RECOVERYIMAGE_FILES) \
+ $(recovery_initrc) $(recovery_sepolicy) $(recovery_kernel) \
+ $(INSTALLED_2NDBOOTLOADER_TARGET) \
+ $(INSTALLED_RECOVERY_BUILD_PROP_TARGET) \
+ $(recovery_resource_deps) \
+ $(recovery_fstab) \
+ $(RECOVERY_INSTALL_OTA_KEYS) \
+ $(BOARD_RECOVERY_KERNEL_MODULES) \
+ $(DEPMOD)
+ $(call pretty,"Target boot image from recovery: $@")
+ $(call build-recoveryimage-target, $@)
+endif # BOARD_USES_RECOVERY_AS_BOOT
+
+ifdef BOARD_INCLUDE_RECOVERY_DTBO
+ifdef BOARD_PREBUILT_RECOVERY_DTBOIMAGE
+$(INSTALLED_RECOVERYIMAGE_TARGET): $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE)
+else
+$(INSTALLED_RECOVERYIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
+endif
+endif
+ifdef BOARD_INCLUDE_RECOVERY_ACPIO
+$(INSTALLED_RECOVERYIMAGE_TARGET): $(BOARD_RECOVERY_ACPIO)
+endif
+ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
+$(INSTALLED_RECOVERYIMAGE_TARGET): $(INSTALLED_DTBIMAGE_TARGET)
+endif
+
+$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
+ $(INTERNAL_ROOT_FILES) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INTERNAL_RECOVERYIMAGE_FILES) \
+ $(recovery_initrc) $(recovery_sepolicy) $(recovery_kernel) \
+ $(INSTALLED_2NDBOOTLOADER_TARGET) \
+ $(INSTALLED_RECOVERY_BUILD_PROP_TARGET) \
+ $(recovery_resource_deps) \
+ $(recovery_fstab) \
+ $(RECOVERY_INSTALL_OTA_KEYS) \
+ $(BOARD_RECOVERY_KERNEL_MODULES) \
+ $(DEPMOD)
+ $(call build-recoveryimage-target, $@)
ifdef RECOVERY_RESOURCE_ZIP
$(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ZIPTIME)
@@ -1475,6 +2014,134 @@ $(error MTD device is no longer supported and thus BOARD_NAND_SPARE_SIZE is depr
endif
# -----------------------------------------------------------------
+# the debug ramdisk, which is the original ramdisk plus additional
+# files: force_debuggable, adb_debug.prop and userdebug sepolicy.
+# When /force_debuggable is present, /init will load userdebug sepolicy
+# and property files to allow adb root, if the device is unlocked.
+
+ifdef BUILDING_RAMDISK_IMAGE
+BUILT_DEBUG_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk-debug.img
+INSTALLED_DEBUG_RAMDISK_TARGET := $(BUILT_DEBUG_RAMDISK_TARGET)
+
+INTERNAL_DEBUG_RAMDISK_FILES := $(filter $(TARGET_DEBUG_RAMDISK_OUT)/%, \
+ $(ALL_GENERATED_SOURCES) \
+ $(ALL_DEFAULT_INSTALLED_MODULES))
+
+# Note: TARGET_DEBUG_RAMDISK_OUT will be $(PRODUCT_OUT)/debug_ramdisk/first_stage_ramdisk,
+# if BOARD_USES_RECOVERY_AS_BOOT is true. Otherwise, it will be $(PRODUCT_OUT)/debug_ramdisk.
+# But the root dir of the ramdisk to build is always $(PRODUCT_OUT)/debug_ramdisk.
+my_debug_ramdisk_root_dir := $(PRODUCT_OUT)/debug_ramdisk
+
+INSTALLED_FILES_FILE_DEBUG_RAMDISK := $(PRODUCT_OUT)/installed-files-ramdisk-debug.txt
+INSTALLED_FILES_JSON_DEBUG_RAMDISK := $(INSTALLED_FILES_FILE_DEBUG_RAMDISK:.txt=.json)
+$(INSTALLED_FILES_FILE_DEBUG_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_DEBUG_RAMDISK)
+$(INSTALLED_FILES_FILE_DEBUG_RAMDISK): DEBUG_RAMDISK_ROOT_DIR := $(my_debug_ramdisk_root_dir)
+
+# Cannot just depend on INTERNAL_DEBUG_RAMDISK_FILES like other INSTALLED_FILES_FILE_* rules.
+# Because ramdisk-debug.img will rsync from either ramdisk.img or ramdisk-recovery.img.
+# Need to depend on the built ramdisk-debug.img, to get a complete list of the installed files.
+$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INSTALLED_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INTERNAL_DEBUG_RAMDISK_FILES) $(FILESLIST)
+ echo Installed file list: $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(DEBUG_RAMDISK_ROOT_DIR) > $(@:.txt=.json)
+ build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
+# ramdisk-debug.img will rsync the content from either ramdisk.img or ramdisk-recovery.img,
+# depending on whether BOARD_USES_RECOVERY_AS_BOOT is set or not.
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+my_debug_ramdisk_sync_dir := $(TARGET_RECOVERY_ROOT_OUT)
+else
+my_debug_ramdisk_sync_dir := $(TARGET_RAMDISK_OUT)
+endif # BOARD_USES_RECOVERY_AS_BOOT
+
+$(INSTALLED_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_SYNC_DIR := $(my_debug_ramdisk_sync_dir)
+$(INSTALLED_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_ROOT_DIR := $(my_debug_ramdisk_root_dir)
+
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+# ramdisk-recovery.img isn't a make target, need to depend on boot.img if it's for recovery.
+$(INSTALLED_DEBUG_RAMDISK_TARGET): $(INSTALLED_BOOTIMAGE_TARGET)
+else
+# Depends on ramdisk.img, note that some target has ramdisk.img but no boot.img, e.g., emulator.
+$(INSTALLED_DEBUG_RAMDISK_TARGET): $(INSTALLED_RAMDISK_TARGET)
+endif # BOARD_USES_RECOVERY_AS_BOOT
+$(INSTALLED_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_DEBUG_RAMDISK_FILES) | $(MINIGZIP)
+ $(call pretty,"Target debug ram disk: $@")
+ mkdir -p $(TARGET_DEBUG_RAMDISK_OUT)
+ touch $(TARGET_DEBUG_RAMDISK_OUT)/force_debuggable
+ rsync -a $(DEBUG_RAMDISK_SYNC_DIR)/ $(DEBUG_RAMDISK_ROOT_DIR)
+ $(MKBOOTFS) -d $(TARGET_OUT) $(DEBUG_RAMDISK_ROOT_DIR) | $(MINIGZIP) > $@
+
+.PHONY: ramdisk_debug-nodeps
+ramdisk_debug-nodeps: DEBUG_RAMDISK_SYNC_DIR := $(my_debug_ramdisk_sync_dir)
+ramdisk_debug-nodeps: DEBUG_RAMDISK_ROOT_DIR := $(my_debug_ramdisk_root_dir)
+ramdisk_debug-nodeps: $(MKBOOTFS) | $(MINIGZIP)
+ echo "make $@: ignoring dependencies"
+ mkdir -p $(TARGET_DEBUG_RAMDISK_OUT)
+ touch $(TARGET_DEBUG_RAMDISK_OUT)/force_debuggable
+ rsync -a $(DEBUG_RAMDISK_SYNC_DIR)/ $(DEBUG_RAMDISK_ROOT_DIR)
+ $(MKBOOTFS) -d $(TARGET_OUT) $(DEBUG_RAMDISK_ROOT_DIR) | $(MINIGZIP) > $(INSTALLED_DEBUG_RAMDISK_TARGET)
+
+my_debug_ramdisk_sync_dir :=
+my_debug_ramdisk_root_dir :=
+
+endif # BUILDING_RAMDISK_IMAGE
+
+# -----------------------------------------------------------------
+# the boot-debug.img, which is the kernel plus ramdisk-debug.img
+#
+# Note: it's intentional to skip signing for boot-debug.img, because it
+# can only be used if the device is unlocked with verification error.
+ifneq ($(strip $(TARGET_NO_KERNEL)),true)
+
+INSTALLED_DEBUG_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot-debug.img
+
+# Replace ramdisk.img in $(MKBOOTIMG) ARGS with ramdisk-debug.img to build boot-debug.img
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+INTERNAL_DEBUG_BOOTIMAGE_ARGS := $(subst $(recovery_ramdisk),$(INSTALLED_DEBUG_RAMDISK_TARGET), $(INTERNAL_RECOVERYIMAGE_ARGS))
+else
+INTERNAL_DEBUG_BOOTIMAGE_ARGS := $(subst $(INSTALLED_RAMDISK_TARGET),$(INSTALLED_DEBUG_RAMDISK_TARGET), $(INTERNAL_BOOTIMAGE_ARGS))
+endif
+
+# If boot.img is chained but boot-debug.img is not signed, libavb in bootloader
+# will fail to find valid AVB metadata from the end of /boot, thus stop booting.
+# Using a test key to sign boot-debug.img to continue booting with the mismatched
+# public key, if the device is unlocked.
+ifneq ($(BOARD_AVB_BOOT_KEY_PATH),)
+BOARD_AVB_DEBUG_BOOT_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
+$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): PRIVATE_AVB_DEBUG_BOOT_SIGNING_ARGS := \
+ --algorithm SHA256_RSA2048 --key $(BOARD_AVB_DEBUG_BOOT_KEY_PATH)
+$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_DEBUG_BOOT_KEY_PATH)
+endif
+
+# Depends on original boot.img and ramdisk-debug.img, to build the new boot-debug.img
+$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
+ $(call pretty,"Target boot debug image: $@")
+ $(MKBOOTIMG) $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
+ $(if $(BOARD_AVB_BOOT_KEY_PATH),\
+ $(call assert-max-image-size,$@,$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE))); \
+ $(AVBTOOL) add_hash_footer \
+ --image $@ \
+ --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
+ --partition_name boot $(PRIVATE_AVB_DEBUG_BOOT_SIGNING_ARGS), \
+ $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
+
+.PHONY: bootimage_debug-nodeps
+bootimage_debug-nodeps: $(MKBOOTIMG)
+ echo "make $@: ignoring dependencies"
+ $(MKBOOTIMG) $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
+ $(if $(BOARD_AVB_BOOT_KEY_PATH),\
+ $(call assert-max-image-size,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE))); \
+ $(AVBTOOL) add_hash_footer \
+ --image $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
+ --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
+ --partition_name boot $(PRIVATE_AVB_DEBUG_BOOT_SIGNING_ARGS), \
+ $(call assert-max-image-size,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
+
+endif # TARGET_NO_KERNEL
+
+# -----------------------------------------------------------------
# system image
#
# Remove overridden packages from $(ALL_PDK_FUSION_FILES)
@@ -1482,30 +2149,36 @@ PDK_FUSION_SYSIMG_FILES := \
$(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk), \
$(ALL_PDK_FUSION_FILES))
-INTERNAL_SYSTEMIMAGE_FILES := $(filter $(TARGET_OUT)/%, \
+INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
$(ALL_GENERATED_SOURCES) \
$(ALL_DEFAULT_INSTALLED_MODULES) \
$(PDK_FUSION_SYSIMG_FILES) \
$(RECOVERY_RESOURCE_ZIP)) \
- $(PDK_FUSION_SYMLINK_STAMP)
+ $(PDK_FUSION_SYMLINK_STAMP))
FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
# ASAN libraries in the system image - add dependency.
ASAN_IN_SYSTEM_INSTALLED := $(TARGET_OUT)/asan.tar.bz2
-ifneq (,$(SANITIZE_TARGET))
+ifneq (,$(filter address, $(SANITIZE_TARGET)))
ifeq (true,$(SANITIZE_TARGET_SYSTEM))
FULL_SYSTEMIMAGE_DEPS += $(ASAN_IN_SYSTEM_INSTALLED)
endif
endif
+FULL_SYSTEMIMAGE_DEPS += $(INTERNAL_ROOT_FILES) $(INSTALLED_FILES_FILE_ROOT)
+
# -----------------------------------------------------------------
+ifdef BUILDING_SYSTEM_IMAGE
+
# installed file list
# Depending on anything that $(BUILT_SYSTEMIMAGE) depends on.
# We put installed-files.txt ahead of image itself in the dependency graph
# so that we can get the size stat even if the build fails due to too large
# system image.
INSTALLED_FILES_FILE := $(PRODUCT_OUT)/installed-files.txt
+INSTALLED_FILES_JSON := $(INSTALLED_FILES_FILE:.txt=.json)
+$(INSTALLED_FILES_FILE): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON)
$(INSTALLED_FILES_FILE): $(FULL_SYSTEMIMAGE_DEPS) $(FILESLIST)
@echo Installed file list: $@
@mkdir -p $(dir $@)
@@ -1552,43 +2225,72 @@ define create-system-product-symlink
endef
endif
+# Create symlink /system/product_services to /product_services if necessary.
+ifdef BOARD_USES_PRODUCT_SERVICESIMAGE
+define create-system-product_services-symlink
+$(hide) if [ -d $(TARGET_OUT)/product_services ] && [ ! -h $(TARGET_OUT)/product_services ]; then \
+ echo 'Non-symlink $(TARGET_OUT)/product_services detected!' 1>&2; \
+ echo 'You cannot install files to $(TARGET_OUT)/product_services while building a separate product_services.img!' 1>&2; \
+ exit 1; \
+fi
+$(hide) ln -sf /product_services $(TARGET_OUT)/product_services
+endef
+else
+define create-system-product_services-symlink
+endef
+endif
+
+# Create symlink /vendor/odm to /odm if necessary.
+ifdef BOARD_USES_ODMIMAGE
+define create-vendor-odm-symlink
+$(hide) if [ -d $(TARGET_OUT_VENDOR)/odm ] && [ ! -h $(TARGET_OUT_VENDOR)/odm ]; then \
+ echo 'Non-symlink $(TARGET_OUT_VENDOR)/odm detected!' 1>&2; \
+ echo 'You cannot install files to $(TARGET_OUT_VENDOR)/odm while building a separate odm.img!' 1>&2; \
+ exit 1; \
+fi
+$(hide) ln -sf /odm $(TARGET_OUT_VENDOR)/odm
+endef
+else
+define create-vendor-odm-symlink
+endef
+endif
+
# $(1): output file
define build-systemimage-target
@echo "Target system fs image: $(1)"
$(call create-system-vendor-symlink)
$(call create-system-product-symlink)
+ $(call create-system-product_services-symlink)
+ $(call check-apex-libs-absence-on-disk)
@mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
- $(call generate-userimage-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt, \
+ $(call generate-image-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt,system, \
skip_fsck=true)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
build/make/tools/releasetools/build_image.py \
$(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1) $(TARGET_OUT) \
- || ( echo "Out of space? the tree size of $(TARGET_OUT) is (MB): " 1>&2 ;\
- du -sm $(TARGET_OUT) 1>&2;\
- if [ "$(INTERNAL_USERIMAGES_EXT_VARIANT)" == "ext4" ]; then \
- maxsize=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE); \
- echo "The max is $$(( maxsize / 1048576 )) MB." 1>&2 ;\
- else \
- echo "The max is $$(( $(BOARD_SYSTEMIMAGE_PARTITION_SIZE) / 1048576 )) MB." 1>&2 ;\
- fi; \
- mkdir -p $(DIST_DIR); cp $(INSTALLED_FILES_FILE) $(DIST_DIR)/installed-files-rescued.txt; \
+ || ( mkdir -p $${DIST_DIR}; cp $(INSTALLED_FILES_FILE) $${DIST_DIR}/installed-files-rescued.txt; \
exit 1 )
endef
$(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(BUILD_IMAGE_SRCS)
$(call build-systemimage-target,$@)
-INSTALLED_SYSTEMIMAGE := $(PRODUCT_OUT)/system.img
+INSTALLED_SYSTEMIMAGE_TARGET := $(PRODUCT_OUT)/system.img
SYSTEMIMAGE_SOURCE_DIR := $(TARGET_OUT)
+# INSTALLED_SYSTEMIMAGE_TARGET used to be named INSTALLED_SYSTEMIMAGE. Create an alias for backward
+# compatibility, in case device-specific Makefiles still refer to the old name.
+INSTALLED_SYSTEMIMAGE := $(INSTALLED_SYSTEMIMAGE_TARGET)
+
# The system partition needs room for the recovery image as well. We
# now store the recovery image as a binary patch using the boot image
# as the source (since they are very similar). Generate the patch so
# we can see how big it's going to be, and include that in the system
# image size check calculation.
+ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
ifneq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
-ifneq (,$(filter true, $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO)))
+ifneq (,$(filter true, $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO)))
diff_tool := $(HOST_OUT_EXECUTABLES)/bsdiff
else
diff_tool := $(HOST_OUT_EXECUTABLES)/imgdiff
@@ -1597,31 +2299,31 @@ intermediates := $(call intermediates-dir-for,PACKAGING,recovery_patch)
RECOVERY_FROM_BOOT_PATCH := $(intermediates)/recovery_from_boot.p
$(RECOVERY_FROM_BOOT_PATCH): PRIVATE_DIFF_TOOL := $(diff_tool)
$(RECOVERY_FROM_BOOT_PATCH): \
- $(INSTALLED_RECOVERYIMAGE_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(diff_tool)
+ $(INSTALLED_RECOVERYIMAGE_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(diff_tool)
@echo "Construct recovery from boot"
mkdir -p $(dir $@)
$(PRIVATE_DIFF_TOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@
else # $(BOARD_USES_FULL_RECOVERY_IMAGE) == true
RECOVERY_FROM_BOOT_PATCH := $(INSTALLED_RECOVERYIMAGE_TARGET)
-endif
-endif
-
+endif # BOARD_USES_FULL_RECOVERY_IMAGE
+endif # INSTALLED_RECOVERYIMAGE_TARGET
+endif # INSTALLED_BOOTIMAGE_TARGET
-$(INSTALLED_SYSTEMIMAGE): $(BUILT_SYSTEMIMAGE) $(RECOVERY_FROM_BOOT_PATCH)
+$(INSTALLED_SYSTEMIMAGE_TARGET): $(BUILT_SYSTEMIMAGE) $(RECOVERY_FROM_BOOT_PATCH)
@echo "Install system fs image: $@"
$(copy-file-to-target)
$(hide) $(call assert-max-image-size,$@ $(RECOVERY_FROM_BOOT_PATCH),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
-systemimage: $(INSTALLED_SYSTEMIMAGE)
+systemimage: $(INSTALLED_SYSTEMIMAGE_TARGET)
.PHONY: systemimage-nodeps snod
systemimage-nodeps snod: $(filter-out systemimage-nodeps snod,$(MAKECMDGOALS)) \
| $(INTERNAL_USERIMAGES_DEPS)
@echo "make $@: ignoring dependencies"
- $(call build-systemimage-target,$(INSTALLED_SYSTEMIMAGE))
- $(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
+ $(call build-systemimage-target,$(INSTALLED_SYSTEMIMAGE_TARGET))
+ $(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
ifneq (,$(filter systemimage-nodeps snod, $(MAKECMDGOALS)))
ifeq (true,$(WITH_DEXPREOPT))
@@ -1629,8 +2331,10 @@ $(warning Warning: with dexpreopt enabled, you may need a full rebuild.)
endif
endif
-.PHONY: sync
-sync: $(INTERNAL_SYSTEMIMAGE_FILES)
+endif # BUILDING_SYSTEM_IMAGE
+
+.PHONY: sync syncsys
+sync syncsys: $(INTERNAL_SYSTEMIMAGE_FILES)
#######
## system tarball
@@ -1638,6 +2342,7 @@ define build-systemtarball-target
$(call pretty,"Target system fs tarball: $(INSTALLED_SYSTEMTARBALL_TARGET)")
$(call create-system-vendor-symlink)
$(call create-system-product-symlink)
+ $(call create-system-product_services-symlink)
$(MKTARBALL) $(FS_GET_STATS) \
$(PRODUCT_OUT) system $(PRIVATE_SYSTEM_TAR) \
$(INSTALLED_SYSTEMTARBALL_TARGET) $(TARGET_OUT)
@@ -1668,10 +2373,11 @@ stnod: systemtarball-nodeps
## PDK_PLATFORM_ZIP_PRODUCT_BINARIES is used to store specified files to platform.zip.
## The variable will be typically set from BoardConfig.mk.
## Files under out dir will be rejected to prevent possible conflicts with other rules.
+ifneq (,$(BUILD_PLATFORM_ZIP))
pdk_odex_javalibs := $(strip $(foreach m,$(DEXPREOPT.MODULES.JAVA_LIBRARIES),\
- $(if $(filter $(DEXPREOPT.$(m).INSTALLED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m))))
+ $(if $(filter $(DEXPREOPT.$(m).INSTALLED_STRIPPED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m))))
pdk_odex_apps := $(strip $(foreach m,$(DEXPREOPT.MODULES.APPS),\
- $(if $(filter $(DEXPREOPT.$(m).INSTALLED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m))))
+ $(if $(filter $(DEXPREOPT.$(m).INSTALLED_STRIPPED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m))))
pdk_classes_dex := $(strip \
$(foreach m,$(pdk_odex_javalibs),$(call intermediates-dir-for,JAVA_LIBRARIES,$(m),,COMMON)/javalib.jar) \
$(foreach m,$(pdk_odex_apps),$(call intermediates-dir-for,APPS,$(m))/package.dex.apk))
@@ -1707,31 +2413,46 @@ INSTALLED_PLATFORM_ZIP := $(PRODUCT_OUT)/platform.zip
$(INSTALLED_PLATFORM_ZIP): PRIVATE_DEX_FILES := $(pdk_classes_dex)
$(INSTALLED_PLATFORM_ZIP): PRIVATE_ODEX_CONFIG := $(pdk_odex_config_mk)
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_SYSTEMIMAGE_FILES) $(pdk_odex_config_mk)
+$(INSTALLED_PLATFORM_ZIP) : $(SOONG_ZIP)
+# dependencies for the other partitions are defined below after their file lists
+# are known
+$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_SYSTEMIMAGE_FILES) $(pdk_classes_dex) $(pdk_odex_config_mk) $(API_FINGERPRINT)
$(call pretty,"Platform zip package: $(INSTALLED_PLATFORM_ZIP)")
- $(hide) rm -f $@
- $(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
- $(TARGET_COPY_OUT_SYSTEM) \
- $(patsubst $(PRODUCT_OUT)/%, %, $(TARGET_OUT_NOTICE_FILES)) \
- $(addprefix symbols/,$(PDK_SYMBOL_FILES_LIST))
-ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
- $(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
- $(TARGET_COPY_OUT_VENDOR)
-endif
-ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
- $(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
- $(TARGET_COPY_OUT_PRODUCT)
+ rm -f $@ $@.lst
+ echo "-C $(PRODUCT_OUT)" >> $@.lst
+ echo "-D $(TARGET_OUT)" >> $@.lst
+ echo "-D $(TARGET_OUT_NOTICE_FILES)" >> $@.lst
+ echo "$(addprefix -f $(TARGET_OUT_UNSTRIPPED)/,$(PDK_SYMBOL_FILES_LIST))" >> $@.lst
+ifdef BUILDING_VENDOR_IMAGE
+ echo "-D $(TARGET_OUT_VENDOR)" >> $@.lst
+endif
+ifdef BUILDING_PRODUCT_IMAGE
+ echo "-D $(TARGET_OUT_PRODUCT)" >> $@.lst
+endif
+ifdef BUILDING_PRODUCT_SERVICES_IMAGE
+ echo "-D $(TARGET_OUT_PRODUCT_SERVICES)" >> $@.lst
+endif
+ifdef BUILDING_ODM_IMAGE
+ echo "-D $(TARGET_OUT_ODM)" >> $@.lst
endif
ifneq ($(PDK_PLATFORM_JAVA_ZIP_CONTENTS),)
- $(hide) cd $(OUT_DIR) && zip -qryX $(patsubst $(OUT_DIR)/%,%,$@) $(PDK_PLATFORM_JAVA_ZIP_CONTENTS)
+ echo "-C $(OUT_DIR)" >> $@.lst
+ for f in $(filter-out $(PRIVATE_DEX_FILES),$(addprefix -f $(OUT_DIR)/,$(PDK_PLATFORM_JAVA_ZIP_CONTENTS))); do \
+ if [ -e $$f ]; then \
+ echo "-f $$f"; \
+ fi \
+ done >> $@.lst
endif
ifneq ($(PDK_PLATFORM_ZIP_PRODUCT_BINARIES),)
- $(hide) zip -qryX $@ $(PDK_PLATFORM_ZIP_PRODUCT_BINARIES)
+ echo "-C . $(addprefix -f ,$(PDK_PLATFORM_ZIP_PRODUCT_BINARIES))" >> $@.lst
endif
@# Add dex-preopt files and config.
- $(if $(PRIVATE_DEX_FILES),$(hide) cd $(OUT_DIR) && zip -qryX $(patsubst $(OUT_DIR)/%,%,$@ $(PRIVATE_DEX_FILES)))
- $(hide) touch $(PRODUCT_OUT)/pdk.mk
- $(hide) zip -qryXj $@ $(PRIVATE_ODEX_CONFIG) $(PRODUCT_OUT)/pdk.mk
+ $(if $(PRIVATE_DEX_FILES),\
+ echo "-C $(OUT_DIR) $(addprefix -f ,$(PRIVATE_DEX_FILES))") >> $@.lst
+ echo "-C $(dir $(API_FINGERPRINT)) -f $(API_FINGERPRINT)" >> $@.lst
+ touch $(PRODUCT_OUT)/pdk.mk
+ echo "-C $(PRODUCT_OUT) -f $(PRIVATE_ODEX_CONFIG) -f $(PRODUCT_OUT)/pdk.mk" >> $@.lst
+ $(SOONG_ZIP) --ignore_missing_files -o $@ @$@.lst
.PHONY: platform
platform: $(INSTALLED_PLATFORM_ZIP)
@@ -1744,6 +2465,8 @@ ifneq (,$(filter platform platform-java, $(MAKECMDGOALS)))
$(call dist-for-goals, platform platform-java, $(INSTALLED_PLATFORM_ZIP))
endif
+endif # BUILD_PLATFORM_ZIP
+
# -----------------------------------------------------------------
## boot tarball
define build-boottarball-target
@@ -1777,15 +2500,7 @@ boottarball-nodeps btnod: $(FS_GET_STATS) \
INTERNAL_USERDATAIMAGE_FILES := \
$(filter $(TARGET_OUT_DATA)/%,$(ALL_DEFAULT_INSTALLED_MODULES))
-# Don't build userdata.img if it's extfs but no partition size
-skip_userdata.img :=
-ifdef INTERNAL_USERIMAGES_EXT_VARIANT
-ifndef BOARD_USERDATAIMAGE_PARTITION_SIZE
-skip_userdata.img := true
-endif
-endif
-
-ifneq ($(skip_userdata.img),true)
+ifdef BUILDING_USERDATA_IMAGE
userdataimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,userdata)
BUILT_USERDATAIMAGE_TARGET := $(PRODUCT_OUT)/userdata.img
@@ -1794,7 +2509,7 @@ define build-userdataimage-target
$(call pretty,"Target userdata fs image: $(INSTALLED_USERDATAIMAGE_TARGET)")
@mkdir -p $(TARGET_OUT_DATA)
@mkdir -p $(userdataimage_intermediates) && rm -rf $(userdataimage_intermediates)/userdata_image_info.txt
- $(call generate-userimage-prop-dictionary, $(userdataimage_intermediates)/userdata_image_info.txt, skip_fsck=true)
+ $(call generate-image-prop-dictionary, $(userdataimage_intermediates)/userdata_image_info.txt,userdata,skip_fsck=true)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
build/make/tools/releasetools/build_image.py \
$(TARGET_OUT_DATA) $(userdataimage_intermediates)/userdata_image_info.txt $(INSTALLED_USERDATAIMAGE_TARGET) $(TARGET_OUT)
@@ -1814,8 +2529,7 @@ $(INSTALLED_USERDATAIMAGE_TARGET): $(INSTALLED_USERDATAIMAGE_TARGET_DEPS)
userdataimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
$(build-userdataimage-target)
-endif # not skip_userdata.img
-skip_userdata.img :=
+endif # BUILDING_USERDATA_IMAGE
# ASAN libraries in the system image - build rule.
ASAN_OUT_DIRS_FOR_SYSTEM_INSTALL := $(sort $(patsubst $(PRODUCT_OUT)/%,%,\
@@ -1836,8 +2550,8 @@ define build-userdatatarball-target
$(call pretty,"Target userdata fs tarball: " \
"$(INSTALLED_USERDATATARBALL_TARGET)")
$(MKTARBALL) $(FS_GET_STATS) \
- $(PRODUCT_OUT) data $(PRIVATE_USERDATA_TAR) \
- $(INSTALLED_USERDATATARBALL_TARGET) $(TARGET_OUT)
+ $(PRODUCT_OUT) data $(PRIVATE_USERDATA_TAR) \
+ $(INSTALLED_USERDATATARBALL_TARGET) $(TARGET_OUT)
endef
userdata_tar := $(PRODUCT_OUT)/userdata.tar
@@ -1889,7 +2603,7 @@ endif # BOARD_BPT_INPUT_FILES
# -----------------------------------------------------------------
# cache partition image
-ifdef BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE
+ifdef BUILDING_CACHE_IMAGE
INTERNAL_CACHEIMAGE_FILES := \
$(filter $(TARGET_OUT_CACHE)/%,$(ALL_DEFAULT_INSTALLED_MODULES))
@@ -1901,7 +2615,7 @@ define build-cacheimage-target
$(call pretty,"Target cache fs image: $(INSTALLED_CACHEIMAGE_TARGET)")
@mkdir -p $(TARGET_OUT_CACHE)
@mkdir -p $(cacheimage_intermediates) && rm -rf $(cacheimage_intermediates)/cache_image_info.txt
- $(call generate-userimage-prop-dictionary, $(cacheimage_intermediates)/cache_image_info.txt, skip_fsck=true)
+ $(call generate-image-prop-dictionary, $(cacheimage_intermediates)/cache_image_info.txt,cache,skip_fsck=true)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
build/make/tools/releasetools/build_image.py \
$(TARGET_OUT_CACHE) $(cacheimage_intermediates)/cache_image_info.txt $(INSTALLED_CACHEIMAGE_TARGET) $(TARGET_OUT)
@@ -1917,16 +2631,15 @@ $(INSTALLED_CACHEIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_CACHEIMAG
cacheimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
$(build-cacheimage-target)
-else # BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE
+else # BUILDING_CACHE_IMAGE
# we need to ignore the broken cache link when doing the rsync
IGNORE_CACHE_LINK := --exclude=cache
-endif # BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE
+endif # BUILDING_CACHE_IMAGE
# -----------------------------------------------------------------
# system_other partition image
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
-BOARD_USES_SYSTEM_OTHER := true
-
# Marker file to identify that odex files are installed
INSTALLED_SYSTEM_OTHER_ODEX_MARKER := $(TARGET_OUT_SYSTEM_OTHER)/system-other-odex-marker
ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)
@@ -1934,14 +2647,18 @@ $(INSTALLED_SYSTEM_OTHER_ODEX_MARKER):
$(hide) touch $@
endif
-ifdef BOARD_USES_SYSTEM_OTHER
INTERNAL_SYSTEMOTHERIMAGE_FILES := \
$(filter $(TARGET_OUT_SYSTEM_OTHER)/%,\
$(ALL_DEFAULT_INSTALLED_MODULES)\
$(ALL_PDK_FUSION_FILES)) \
$(PDK_FUSION_SYMLINK_STAMP)
+# system_other dex files are installed as a side-effect of installing system image files
+INTERNAL_SYSTEMOTHERIMAGE_FILES += $(INTERNAL_SYSTEMIMAGE_FILES)
+
INSTALLED_FILES_FILE_SYSTEMOTHER := $(PRODUCT_OUT)/installed-files-system-other.txt
+INSTALLED_FILES_JSON_SYSTEMOTHER := $(INSTALLED_FILES_FILE_SYSTEMOTHER:.txt=.json)
+$(INSTALLED_FILES_FILE_SYSTEMOTHER): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_SYSTEMOTHER)
$(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(FILESLIST)
@echo Installed file list: $@
@mkdir -p $(dir $@)
@@ -1949,6 +2666,17 @@ $(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(FILES
$(hide) $(FILESLIST) $(TARGET_OUT_SYSTEM_OTHER) > $(@:.txt=.json)
$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+# Determines partition size for system_other.img.
+ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
+ifneq ($(filter system,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES)),)
+INTERNAL_SYSTEM_OTHER_PARTITION_SIZE := $(BOARD_SUPER_PARTITION_SYSTEM_DEVICE_SIZE)
+endif
+endif
+
+ifndef INTERNAL_SYSTEM_OTHER_PARTITION_SIZE
+INTERNAL_SYSTEM_OTHER_PARTITION_SIZE:= $(BOARD_SYSTEMIMAGE_PARTITION_SIZE)
+endif
+
systemotherimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,system_other)
BUILT_SYSTEMOTHERIMAGE_TARGET := $(PRODUCT_OUT)/system_other.img
@@ -1958,7 +2686,7 @@ define build-systemotherimage-target
$(call pretty,"Target system_other fs image: $(INSTALLED_SYSTEMOTHERIMAGE_TARGET)")
@mkdir -p $(TARGET_OUT_SYSTEM_OTHER)
@mkdir -p $(systemotherimage_intermediates) && rm -rf $(systemotherimage_intermediates)/system_other_image_info.txt
- $(call generate-userimage-prop-dictionary, $(systemotherimage_intermediates)/system_other_image_info.txt, skip_fsck=true)
+ $(call generate-image-prop-dictionary, $(systemotherimage_intermediates)/system_other_image_info.txt,system,skip_fsck=true)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
build/make/tools/releasetools/build_image.py \
$(TARGET_OUT_SYSTEM_OTHER) $(systemotherimage_intermediates)/system_other_image_info.txt $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) $(TARGET_OUT)
@@ -1977,22 +2705,110 @@ endif
systemotherimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
$(build-systemotherimage-target)
-endif # BOARD_USES_SYSTEM_OTHER
+endif # BUILDING_SYSTEM_OTHER_IMAGE
# -----------------------------------------------------------------
# vendor partition image
-ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+ifdef BUILDING_VENDOR_IMAGE
INTERNAL_VENDORIMAGE_FILES := \
$(filter $(TARGET_OUT_VENDOR)/%,\
$(ALL_DEFAULT_INSTALLED_MODULES)\
$(ALL_PDK_FUSION_FILES)) \
$(PDK_FUSION_SYMLINK_STAMP)
+# Final Vendor VINTF manifest including fragments. This is not assembled
+# on the device because it depends on everything in a given device
+# image which defines a vintf_fragment.
+ifdef BUILT_VENDOR_MANIFEST
+BUILT_ASSEMBLED_VENDOR_MANIFEST := $(PRODUCT_OUT)/verified_assembled_vendor_manifest.xml
+ifeq (true,$(PRODUCT_ENFORCE_VINTF_MANIFEST))
+ifneq ($(strip $(DEVICE_FRAMEWORK_COMPATIBILITY_MATRIX_FILE) $(DEVICE_PRODUCT_COMPATIBILITY_MATRIX_FILE)),)
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): PRIVATE_SYSTEM_ASSEMBLE_VINTF_ENV_VARS := VINTF_ENFORCE_NO_UNUSED_HALS=true
+endif # DEVICE_FRAMEWORK_COMPATIBILITY_MATRIX_FILE or DEVICE_PRODUCT_COMPATIBILITY_MATRIX_FILE
+endif # PRODUCT_ENFORCE_VINTF_MANIFEST
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): $(HOST_OUT_EXECUTABLES)/assemble_vintf
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): $(BUILT_SYSTEM_MATRIX)
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): $(BUILT_VENDOR_MANIFEST)
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): $(INTERNAL_VENDORIMAGE_FILES)
+
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): PRIVATE_FLAGS :=
+
+# -- Kernel version and configurations.
+ifeq ($(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS),true)
+
+# BOARD_KERNEL_CONFIG_FILE and BOARD_KERNEL_VERSION can be used to override the values extracted
+# from INSTALLED_KERNEL_TARGET.
+ifdef BOARD_KERNEL_CONFIG_FILE
+ifdef BOARD_KERNEL_VERSION
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): $(BOARD_KERNEL_CONFIG_FILE)
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): PRIVATE_FLAGS += --kernel $(BOARD_KERNEL_VERSION):$(BOARD_KERNEL_CONFIG_FILE)
+my_board_extracted_kernel := true
+endif # BOARD_KERNEL_VERSION
+endif # BOARD_KERNEL_CONFIG_FILE
+
+ifneq ($(my_board_extracted_kernel),true)
+ifndef INSTALLED_KERNEL_TARGET
+$(warning No INSTALLED_KERNEL_TARGET is defined when PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS \
+ is true. Information about the updated kernel cannot be built into OTA update package. \
+ You can fix this by: (1) setting TARGET_NO_KERNEL to false and installing the built kernel \
+ to $(PRODUCT_OUT)/kernel, so that kernel information will be extracted from the built kernel; \
+ or (2) extracting kernel configuration and defining BOARD_KERNEL_CONFIG_FILE and \
+ BOARD_KERNEL_VERSION manually; or (3) unsetting PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS \
+ manually.)
+else
+intermediates := $(call intermediates-dir-for,ETC,$(notdir $(BUILT_ASSEMBLED_VENDOR_MANIFEST)))
+
+# Tools for decompression that is not in PATH.
+# Check $(EXTRACT_KERNEL) for decompression algorithms supported by the script.
+# Algorithms that are in the script but not in this list will be found in PATH.
+my_decompress_tools := \
+ lz4:$(HOST_OUT_EXECUTABLES)/lz4 \
+
+my_kernel_configs := $(intermediates)/kernel_configs.txt
+my_kernel_version := $(intermediates)/kernel_version.txt
+$(my_kernel_configs): .KATI_IMPLICIT_OUTPUTS := $(my_kernel_version)
+$(my_kernel_configs): PRIVATE_KERNEL_VERSION_FILE := $(my_kernel_version)
+$(my_kernel_configs): PRIVATE_DECOMPRESS_TOOLS := $(my_decompress_tools)
+$(my_kernel_configs): $(foreach pair,$(my_decompress_tools),$(call word-colon,2,$(pair)))
+$(my_kernel_configs): $(EXTRACT_KERNEL) $(INSTALLED_KERNEL_TARGET)
+ $< --tools $(PRIVATE_DECOMPRESS_TOOLS) --input $(INSTALLED_KERNEL_TARGET) \
+ --output-configs $@ \
+ --output-version $(PRIVATE_KERNEL_VERSION_FILE)
+
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): $(my_kernel_configs) $(my_kernel_version)
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST): PRIVATE_FLAGS += --kernel $$(cat $(my_kernel_version)):$(my_kernel_configs)
+
+intermediates :=
+my_kernel_configs :=
+my_kernel_version :=
+my_decompress_tools :=
+
+endif # my_board_extracted_kernel
+my_board_extracted_kernel :=
+
+endif # INSTALLED_KERNEL_TARGET
+endif # PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
+
+$(BUILT_ASSEMBLED_VENDOR_MANIFEST):
+ @echo "Verifying vendor VINTF manifest."
+ PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
+ $(PRIVATE_SYSTEM_ASSEMBLE_VINTF_ENV_VARS) \
+ $(HOST_OUT_EXECUTABLES)/assemble_vintf \
+ $(PRIVATE_FLAGS) \
+ -c $(BUILT_SYSTEM_MATRIX) \
+ -i $(BUILT_VENDOR_MANIFEST) \
+ $$([ -d $(TARGET_OUT_VENDOR)/etc/vintf/manifest ] && \
+ find $(TARGET_OUT_VENDOR)/etc/vintf/manifest -type f -name "*.xml" | \
+ sed "s/^/-i /" | tr '\n' ' ') -o $@
+endif # BUILT_VENDOR_MANIFEST
+
# platform.zip depends on $(INTERNAL_VENDORIMAGE_FILES).
$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDORIMAGE_FILES)
INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
+INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
+$(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST)
@echo Installed file list: $@
@mkdir -p $(dir $@)
@@ -2006,8 +2822,9 @@ BUILT_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
define build-vendorimage-target
$(call pretty,"Target vendor fs image: $(INSTALLED_VENDORIMAGE_TARGET)")
@mkdir -p $(TARGET_OUT_VENDOR)
+ $(call create-vendor-odm-symlink)
@mkdir -p $(vendorimage_intermediates) && rm -rf $(vendorimage_intermediates)/vendor_image_info.txt
- $(call generate-userimage-prop-dictionary, $(vendorimage_intermediates)/vendor_image_info.txt, skip_fsck=true)
+ $(call generate-image-prop-dictionary, $(vendorimage_intermediates)/vendor_image_info.txt,vendor,skip_fsck=true)
$(if $(BOARD_VENDOR_KERNEL_MODULES), \
$(call build-image-kernel-modules,$(BOARD_VENDOR_KERNEL_MODULES),$(TARGET_OUT_VENDOR),vendor/,$(call intermediates-dir-for,PACKAGING,depmod_vendor)))
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
@@ -2018,6 +2835,9 @@ endef
# We just build this directly to the install location.
INSTALLED_VENDORIMAGE_TARGET := $(BUILT_VENDORIMAGE_TARGET)
+ifdef BUILT_VENDOR_MANIFEST
+$(INSTALLED_VENDORIMAGE_TARGET): $(BUILT_ASSEMBLED_VENDOR_MANIFEST)
+endif
$(INSTALLED_VENDORIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDORIMAGE_FILES) $(INSTALLED_FILES_FILE_VENDOR) $(BUILD_IMAGE_SRCS) $(DEPMOD) $(BOARD_VENDOR_KERNEL_MODULES)
$(build-vendorimage-target)
@@ -2034,7 +2854,7 @@ endif
# -----------------------------------------------------------------
# product partition image
-ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+ifdef BUILDING_PRODUCT_IMAGE
INTERNAL_PRODUCTIMAGE_FILES := \
$(filter $(TARGET_OUT_PRODUCT)/%,\
$(ALL_DEFAULT_INSTALLED_MODULES)\
@@ -2045,6 +2865,8 @@ INTERNAL_PRODUCTIMAGE_FILES := \
$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_PRODUCTIMAGE_FILES)
INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt
+INSTALLED_FILES_JSON_PRODUCT := $(INSTALLED_FILES_FILE_PRODUCT:.txt=.json)
+$(INSTALLED_FILES_FILE_PRODUCT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT)
$(INSTALLED_FILES_FILE_PRODUCT) : $(INTERNAL_PRODUCTIMAGE_FILES) $(FILESLIST)
@echo Installed file list: $@
@mkdir -p $(dir $@)
@@ -2059,7 +2881,7 @@ define build-productimage-target
$(call pretty,"Target product fs image: $(INSTALLED_PRODUCTIMAGE_TARGET)")
@mkdir -p $(TARGET_OUT_PRODUCT)
@mkdir -p $(productimage_intermediates) && rm -rf $(productimage_intermediates)/product_image_info.txt
- $(call generate-userimage-prop-dictionary, $(productimage_intermediates)/product_image_info.txt, skip_fsck=true)
+ $(call generate-image-prop-dictionary, $(productimage_intermediates)/product_image_info.txt,product,skip_fsck=true)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
./build/tools/releasetools/build_image.py \
$(TARGET_OUT_PRODUCT) $(productimage_intermediates)/product_image_info.txt $(INSTALLED_PRODUCTIMAGE_TARGET) $(TARGET_OUT)
@@ -2083,6 +2905,136 @@ $(eval $(call copy-one-file,$(BOARD_PREBUILT_PRODUCTIMAGE),$(INSTALLED_PRODUCTIM
endif
# -----------------------------------------------------------------
+# Final Framework VINTF manifest including fragments. This is not assembled
+# on the device because it depends on everything in a given device
+# image which defines a vintf_fragment.
+
+BUILT_ASSEMBLED_FRAMEWORK_MANIFEST := $(PRODUCT_OUT)/verified_assembled_framework_manifest.xml
+$(BUILT_ASSEMBLED_FRAMEWORK_MANIFEST): $(HOST_OUT_EXECUTABLES)/assemble_vintf \
+ $(BUILT_VENDOR_MATRIX) \
+ $(BUILT_SYSTEM_MANIFEST) \
+ $(FULL_SYSTEMIMAGE_DEPS) \
+ $(BUILT_PRODUCT_MANIFEST) \
+ $(BUILT_PRODUCTIMAGE_TARGET)
+ @echo "Verifying framework VINTF manifest."
+ PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
+ $(HOST_OUT_EXECUTABLES)/assemble_vintf \
+ -o $@ \
+ -c $(BUILT_VENDOR_MATRIX) \
+ -i $(BUILT_SYSTEM_MANIFEST) \
+ $(addprefix -i ,\
+ $(filter $(TARGET_OUT)/etc/vintf/manifest/%.xml,$(FULL_SYSTEMIMAGE_DEPS)) \
+ $(BUILT_PRODUCT_MANIFEST) \
+ $(filter $(TARGET_OUT_PRODUCT)/etc/vintf/manifest/%.xml,$(INTERNAL_PRODUCTIMAGE_FILES)))
+
+droidcore: $(BUILT_ASSEMBLED_FRAMEWORK_MANIFEST)
+
+# -----------------------------------------------------------------
+# product_services partition image
+ifdef BUILDING_PRODUCT_SERVICES_IMAGE
+INTERNAL_PRODUCT_SERVICESIMAGE_FILES := \
+ $(filter $(TARGET_OUT_PRODUCT_SERVICES)/%,\
+ $(ALL_DEFAULT_INSTALLED_MODULES)\
+ $(ALL_PDK_FUSION_FILES)) \
+ $(PDK_FUSION_SYMLINK_STAMP)
+
+# platform.zip depends on $(INTERNAL_PRODUCT_SERVICESIMAGE_FILES).
+$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_PRODUCT_SERVICESIMAGE_FILES)
+
+INSTALLED_FILES_FILE_PRODUCT_SERVICES := $(PRODUCT_OUT)/installed-files-product_services.txt
+INSTALLED_FILES_JSON_PRODUCT_SERVICES := $(INSTALLED_FILES_FILE_PRODUCT_SERVICES:.txt=.json)
+$(INSTALLED_FILES_FILE_PRODUCT_SERVICES): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT_SERVICES)
+$(INSTALLED_FILES_FILE_PRODUCT_SERVICES) : $(INTERNAL_PRODUCT_SERVICESIMAGE_FILES) $(FILESLIST)
+ @echo Installed file list: $@
+ @mkdir -p $(dir $@)
+ @rm -f $@
+ $(hide) $(FILESLIST) $(TARGET_OUT_PRODUCT_SERVICES) > $(@:.txt=.json)
+ $(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
+product_servicesimage_intermediates := \
+ $(call intermediates-dir-for,PACKAGING,product_services)
+BUILT_PRODUCT_SERVICESIMAGE_TARGET := $(PRODUCT_OUT)/product_services.img
+define build-product_servicesimage-target
+ $(call pretty,"Target product_services fs image: $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET)")
+ @mkdir -p $(TARGET_OUT_PRODUCT_SERVICES)
+ @mkdir -p $(product_servicesimage_intermediates) && rm -rf $(product_servicesimage_intermediates)/product_services_image_info.txt
+ $(call generate-image-prop-dictionary, $(product_servicesimage_intermediates)/product_services_image_info.txt,product_services, skip_fsck=true)
+ $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+ ./build/tools/releasetools/build_image.py \
+ $(TARGET_OUT_PRODUCT_SERVICES) $(product_servicesimage_intermediates)/product_services_image_info.txt $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) $(TARGET_OUT)
+ $(hide) $(call assert-max-image-size,$(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET),$(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE))
+endef
+
+# We just build this directly to the install location.
+INSTALLED_PRODUCT_SERVICESIMAGE_TARGET := $(BUILT_PRODUCT_SERVICESIMAGE_TARGET)
+$(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_PRODUCT_SERVICESIMAGE_FILES) $(INSTALLED_FILES_FILE_PRODUCT_SERVICES) $(BUILD_IMAGE_SRCS)
+ $(build-product_servicesimage-target)
+
+.PHONY: productservicesimage-nodeps psnod
+productservicesimage-nodeps psnod: | $(INTERNAL_USERIMAGES_DEPS)
+ $(build-product_servicesimage-target)
+
+sync: $(INTERNAL_PRODUCT_SERVICESIMAGE_FILES)
+
+else ifdef BOARD_PREBUILT_PRODUCT_SERVICESIMAGE
+INSTALLED_PRODUCT_SERVICESIMAGE_TARGET := $(PRODUCT_OUT)/product_services.img
+$(eval $(call copy-one-file,$(BOARD_PREBUILT_PRODUCT_SERVICESIMAGE),$(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET)))
+endif
+
+# -----------------------------------------------------------------
+# odm partition image
+ifdef BUILDING_ODM_IMAGE
+INTERNAL_ODMIMAGE_FILES := \
+ $(filter $(TARGET_OUT_ODM)/%,\
+ $(ALL_DEFAULT_INSTALLED_MODULES)\
+ $(ALL_PDK_FUSION_FILES)) \
+ $(PDK_FUSION_SYMLINK_STAMP)
+# platform.zip depends on $(INTERNAL_ODMIMAGE_FILES).
+$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_ODMIMAGE_FILES)
+
+INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt
+INSTALLED_FILES_JSON_ODM := $(INSTALLED_FILES_FILE_ODM:.txt=.json)
+$(INSTALLED_FILES_FILE_ODM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ODM)
+$(INSTALLED_FILES_FILE_ODM) : $(INTERNAL_ODMIMAGE_FILES) $(FILESLIST)
+ @echo Installed file list: $@
+ @mkdir -p $(dir $@)
+ @rm -f $@
+ $(hide) $(FILESLIST) $(TARGET_OUT_ODM) > $(@:.txt=.json)
+ $(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
+odmimage_intermediates := \
+ $(call intermediates-dir-for,PACKAGING,odm)
+BUILT_ODMIMAGE_TARGET := $(PRODUCT_OUT)/odm.img
+define build-odmimage-target
+ $(call pretty,"Target odm fs image: $(INSTALLED_ODMIMAGE_TARGET)")
+ @mkdir -p $(TARGET_OUT_ODM)
+ @mkdir -p $(odmimage_intermediates) && rm -rf $(odmimage_intermediates)/odm_image_info.txt
+ $(call generate-userimage-prop-dictionary, $(odmimage_intermediates)/odm_image_info.txt, skip_fsck=true)
+ $(if $(BOARD_ODM_KERNEL_MODULES), \
+ $(call build-image-kernel-modules,$(BOARD_ODM_KERNEL_MODULES),$(TARGET_OUT_ODM),odm/,$(call intermediates-dir-for,PACKAGING,depmod_odm)))
+ $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+ ./build/tools/releasetools/build_image.py \
+ $(TARGET_OUT_ODM) $(odmimage_intermediates)/odm_image_info.txt $(INSTALLED_ODMIMAGE_TARGET) $(TARGET_OUT)
+ $(hide) $(call assert-max-image-size,$(INSTALLED_ODMIMAGE_TARGET),$(BOARD_ODMIMAGE_PARTITION_SIZE))
+endef
+
+# We just build this directly to the install location.
+INSTALLED_ODMIMAGE_TARGET := $(BUILT_ODMIMAGE_TARGET)
+$(INSTALLED_ODMIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_ODMIMAGE_FILES) $(INSTALLED_FILES_FILE_ODM) $(BUILD_IMAGE_SRCS) $(DEPMOD) $(BOARD_ODM_KERNEL_MODULES)
+ $(build-odmimage-target)
+
+.PHONY: odmimage-nodeps onod
+odmimage-nodeps onod: | $(INTERNAL_USERIMAGES_DEPS) $(DEPMOD)
+ $(build-odmimage-target)
+
+sync: $(INTERNAL_ODMIMAGE_FILES)
+
+else ifdef BOARD_PREBUILT_ODMIMAGE
+INSTALLED_ODMIMAGE_TARGET := $(PRODUCT_OUT)/odm.img
+$(eval $(call copy-one-file,$(BOARD_PREBUILT_ODMIMAGE),$(INSTALLED_ODMIMAGE_TARGET)))
+endif
+
+# -----------------------------------------------------------------
# dtbo image
ifdef BOARD_PREBUILT_DTBOIMAGE
INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
@@ -2091,16 +3043,24 @@ ifeq ($(BOARD_AVB_ENABLE),true)
$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
cp $(BOARD_PREBUILT_DTBOIMAGE) $@
$(AVBTOOL) add_hash_footer \
- --image $@ \
- --partition_size $(BOARD_DTBOIMG_PARTITION_SIZE) \
- --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
- $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
+ --image $@ \
+ --partition_size $(BOARD_DTBOIMG_PARTITION_SIZE) \
+ --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
+ $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
else
$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
cp $(BOARD_PREBUILT_DTBOIMAGE) $@
endif
-endif
+endif # BOARD_PREBUILT_DTBOIMAGE
+
+# Returns a list of image targets corresponding to the given list of partitions. For example, it
+# returns "$(INSTALLED_PRODUCTIMAGE_TARGET)" for "product", or "$(INSTALLED_SYSTEMIMAGE_TARGET)
+# $(INSTALLED_VENDORIMAGE_TARGET)" for "system vendor".
+# (1): list of partitions like "system", "vendor" or "system product product_services".
+define images-for-partitions
+$(strip $(foreach item,$(1),$(INSTALLED_$(call to-upper,$(item))IMAGE_TARGET)))
+endef
# -----------------------------------------------------------------
# vbmeta image
@@ -2117,8 +3077,76 @@ BOARD_AVB_ALGORITHM := SHA256_RSA4096
BOARD_AVB_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
endif
-INTERNAL_AVB_SIGNING_ARGS := \
- --algorithm $(BOARD_AVB_ALGORITHM) --key $(BOARD_AVB_KEY_PATH)
+# AVB signing for system_other.img.
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
+ifdef BOARD_AVB_SYSTEM_OTHER_KEY_PATH
+$(if $(BOARD_AVB_SYSTEM_OTHER_ALGORITHM),,$(error BOARD_AVB_SYSTEM_OTHER_ALGORITHM is not defined))
+else
+# If key path isn't specified, use the same key as BOARD_AVB_KEY_PATH.
+BOARD_AVB_SYSTEM_OTHER_KEY_PATH := $(BOARD_AVB_KEY_PATH)
+BOARD_AVB_SYSTEM_OTHER_ALGORITHM := $(BOARD_AVB_ALGORITHM)
+endif
+
+$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET): $(AVBTOOL) $(BOARD_AVB_SYSTEM_OTHER_KEY_PATH)
+ @echo Extracting system_other avb key: $@
+ @rm -f $@
+ @mkdir -p $(dir $@)
+ $(AVBTOOL) extract_public_key --key $(BOARD_AVB_SYSTEM_OTHER_KEY_PATH) --output $@
+
+ifndef BOARD_AVB_SYSTEM_OTHER_ROLLBACK_INDEX
+BOARD_AVB_SYSTEM_OTHER_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+endif
+
+BOARD_AVB_SYSTEM_OTHER_ADD_HASHTREE_FOOTER_ARGS += --rollback_index $(BOARD_AVB_SYSTEM_OTHER_ROLLBACK_INDEX)
+endif # end of AVB for BUILDING_SYSTEM_OTHER_IMAGE
+
+INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES := \
+ $(BOARD_AVB_VBMETA_SYSTEM) \
+ $(BOARD_AVB_VBMETA_VENDOR)
+
+# Not allowing the same partition to appear in multiple groups.
+ifneq ($(words $(sort $(INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES))),$(words $(INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES)))
+ $(error BOARD_AVB_VBMETA_SYSTEM and BOARD_AVB_VBMETA_VENDOR cannot have duplicates)
+endif
+
+# Appends os version and security patch level as a AVB property descriptor
+
+BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.system.os_version:$(PLATFORM_VERSION) \
+ --prop com.android.build.system.security_patch:$(PLATFORM_SECURITY_PATCH)
+
+BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.product.os_version:$(PLATFORM_VERSION) \
+ --prop com.android.build.product.security_patch:$(PLATFORM_SECURITY_PATCH)
+
+BOARD_AVB_PRODUCT_SERVICES_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.product_services.os_version:$(PLATFORM_VERSION) \
+ --prop com.android.build.product_services.security_patch:$(PLATFORM_SECURITY_PATCH)
+
+BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
+ --prop com.android.build.boot.os_version:$(PLATFORM_VERSION)
+
+BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.vendor.os_version:$(PLATFORM_VERSION)
+
+BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.odm.os_version:$(PLATFORM_VERSION)
+
+# The following vendor- and odm-specific images needs explicit SPL set per board.
+ifdef BOOT_SECURITY_PATCH
+BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
+ --prop com.android.build.boot.security_patch:$(BOOT_SECURITY_PATCH)
+endif
+
+ifdef VENDOR_SECURITY_PATCH
+BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.vendor.security_patch:$(VENDOR_SECURITY_PATCH)
+endif
+
+ifdef ODM_SECURITY_PATCH
+BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.odm.security_patch:$(ODM_SECURITY_PATCH)
+endif
BOOT_FOOTER_ARGS := BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS
DTBO_FOOTER_ARGS := BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS
@@ -2126,12 +3154,14 @@ SYSTEM_FOOTER_ARGS := BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS
VENDOR_FOOTER_ARGS := BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS
RECOVERY_FOOTER_ARGS := BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS
PRODUCT_FOOTER_ARGS := BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS
+PRODUCT_SERVICES_FOOTER_ARGS := BOARD_AVB_PRODUCT_SERVICES_ADD_HASHTREE_FOOTER_ARGS
+ODM_FOOTER_ARGS := BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS
-# Check and set required build variables for a chain partition.
-# $(1): the partition to enable AVB chain, e.g., BOOT or SYSTEM.
-define check-and-set-avb-chain-args
-$(eval PART := $(1))
-$(eval part=$(call to-lower,$(PART)))
+# Helper function that checks and sets required build variables for an AVB chained partition.
+# $(1): the partition to enable AVB chain, e.g., boot or system or vbmeta_system.
+define _check-and-set-avb-chain-args
+$(eval part := $(1))
+$(eval PART=$(call to-upper,$(part)))
$(eval _key_path := BOARD_AVB_$(PART)_KEY_PATH)
$(eval _signing_algorithm := BOARD_AVB_$(PART)_ALGORITHM)
@@ -2150,62 +3180,67 @@ $(eval $(_signing_args) := \
$(eval INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
--chain_partition $(part):$($(_rollback_index_location)):$(AVB_CHAIN_KEY_DIR)/$(part).avbpubkey)
-# Set rollback_index via footer args
-$(eval _footer_args := $(PART)_FOOTER_ARGS)
-$(eval $($(_footer_args)) += --rollback_index $($(_rollback_index)))
+# Set rollback_index via footer args for non-chained vbmeta image. Chained vbmeta image will pick up
+# the index via a separate flag (e.g. BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX).
+$(if $(filter $(part),$(part:vbmeta_%=%)),\
+ $(eval _footer_args := $(PART)_FOOTER_ARGS) \
+ $(eval $($(_footer_args)) += --rollback_index $($(_rollback_index))))
endef
-ifdef BOARD_AVB_BOOT_KEY_PATH
-$(eval $(call check-and-set-avb-chain-args,BOOT))
-else
-INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
- --include_descriptors_from_image $(INSTALLED_BOOTIMAGE_TARGET)
-endif
+# Checks and sets the required build variables for an AVB partition. The partition will be
+# configured as a chained partition, if BOARD_AVB_<partition>_KEY_PATH is defined. Otherwise the
+# image descriptor will be included into vbmeta.img, unless it has been already added to any chained
+# VBMeta image.
+# $(1): Partition name, e.g. boot or system.
+define check-and-set-avb-args
+$(eval _in_chained_vbmeta := $(filter $(1),$(INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES)))
+$(if $(BOARD_AVB_$(call to-upper,$(1))_KEY_PATH),\
+ $(if $(_in_chained_vbmeta),\
+ $(error Chaining partition "$(1)" in chained VBMeta image is not supported)) \
+ $(call _check-and-set-avb-chain-args,$(1)),\
+ $(if $(_in_chained_vbmeta),,\
+ $(eval INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+ --include_descriptors_from_image $(call images-for-partitions,$(1)))))
+endef
-ifdef BOARD_AVB_SYSTEM_KEY_PATH
-$(eval $(call check-and-set-avb-chain-args,SYSTEM))
-else
-INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
- --include_descriptors_from_image $(INSTALLED_SYSTEMIMAGE)
+ifdef INSTALLED_BOOTIMAGE_TARGET
+$(eval $(call check-and-set-avb-args,boot))
endif
+$(eval $(call check-and-set-avb-args,system))
+
ifdef INSTALLED_VENDORIMAGE_TARGET
-ifdef BOARD_AVB_VENDOR_KEY_PATH
-$(eval $(call check-and-set-avb-chain-args,VENDOR))
-else
-INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
- --include_descriptors_from_image $(INSTALLED_VENDORIMAGE_TARGET)
-endif
+$(eval $(call check-and-set-avb-args,vendor))
endif
ifdef INSTALLED_PRODUCTIMAGE_TARGET
-ifdef BOARD_AVB_PRODUCT_KEY_PATH
-$(eval $(call check-and-set-avb-chain-args,PRODUCT))
-else
-INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
- --include_descriptors_from_image $(INSTALLED_PRODUCTIMAGE_TARGET)
+$(eval $(call check-and-set-avb-args,product))
endif
+
+ifdef INSTALLED_PRODUCT_SERVICESIMAGE_TARGET
+$(eval $(call check-and-set-avb-args,product_services))
endif
-ifdef INSTALLED_DTBOIMAGE_TARGET
-ifdef BOARD_AVB_DTBO_KEY_PATH
-$(eval $(call check-and-set-avb-chain-args,DTBO))
-else
-INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
- --include_descriptors_from_image $(INSTALLED_DTBOIMAGE_TARGET)
+ifdef INSTALLED_ODMIMAGE_TARGET
+$(eval $(call check-and-set-avb-args,odm))
endif
+
+ifdef INSTALLED_DTBOIMAGE_TARGET
+$(eval $(call check-and-set-avb-args,dtbo))
endif
ifdef INSTALLED_RECOVERYIMAGE_TARGET
-ifdef BOARD_AVB_RECOVERY_KEY_PATH
-$(eval $(call check-and-set-avb-chain-args,RECOVERY))
-else
-INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
- --include_descriptors_from_image $(INSTALLED_RECOVERYIMAGE_TARGET)
+$(eval $(call check-and-set-avb-args,recovery))
endif
+
+# Not using INSTALLED_VBMETA_SYSTEMIMAGE_TARGET as it won't be set yet.
+ifdef BOARD_AVB_VBMETA_SYSTEM
+$(eval $(call check-and-set-avb-args,vbmeta_system))
endif
-BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --padding_size 4096
+ifdef BOARD_AVB_VBMETA_VENDOR
+$(eval $(call check-and-set-avb-args,vbmeta_vendor))
+endif
# Add kernel cmdline descriptor for kernel to mount system.img as root with
# dm-verity. This works when system.img is either chained or not-chained:
@@ -2214,19 +3249,32 @@ BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --padding_size 4096
# - not-chained: The --include_descriptors_from_image option for make_vbmeta_image
# will include the kernel cmdline descriptor from system.img into vbmeta.img
ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+ifeq ($(filter system, $(BOARD_SUPER_PARTITION_PARTITION_LIST)),)
BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --setup_as_rootfs_from_kernel
endif
+endif
+
+BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --padding_size 4096
+BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS += --padding_size 4096
+BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS += --padding_size 4096
+
+ifeq (eng,$(filter eng, $(TARGET_BUILD_VARIANT)))
+# We only need the flag in top-level vbmeta.img.
+BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --set_hashtree_disabled_flag
+endif
ifdef BOARD_AVB_ROLLBACK_INDEX
BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --rollback_index $(BOARD_AVB_ROLLBACK_INDEX)
endif
-ifndef BOARD_BOOTIMAGE_PARTITION_SIZE
- $(error BOARD_BOOTIMAGE_PARTITION_SIZE must be set for BOARD_AVB_ENABLE)
+ifdef BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX
+BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS += \
+ --rollback_index $(BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX)
endif
-ifndef BOARD_SYSTEMIMAGE_PARTITION_SIZE
- $(error BOARD_SYSTEMIMAGE_PARTITION_SIZE must be set for BOARD_AVB_ENABLE)
+ifdef BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX
+BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS += \
+ --rollback_index $(BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX)
endif
# $(1): the directory to extract public keys to
@@ -2243,51 +3291,212 @@ define extract-avb-chain-public-keys
$(if $(BOARD_AVB_PRODUCT_KEY_PATH),\
$(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_PRODUCT_KEY_PATH) \
--output $(1)/product.avbpubkey)
+ $(if $(BOARD_AVB_PRODUCT_SERVICES_KEY_PATH),\
+ $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_PRODUCT_SERVICES_KEY_PATH) \
+ --output $(1)/product_services.avbpubkey)
+ $(if $(BOARD_AVB_ODM_KEY_PATH),\
+ $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_ODM_KEY_PATH) \
+ --output $(1)/odm.avbpubkey)
$(if $(BOARD_AVB_DTBO_KEY_PATH),\
$(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_DTBO_KEY_PATH) \
--output $(1)/dtbo.avbpubkey)
$(if $(BOARD_AVB_RECOVERY_KEY_PATH),\
$(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_RECOVERY_KEY_PATH) \
--output $(1)/recovery.avbpubkey)
+ $(if $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH),\
+ $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH) \
+ --output $(1)/vbmeta_system.avbpubkey)
+ $(if $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH),\
+ $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH) \
+ --output $(1)/vbmeta_vendor.avbpubkey)
+endef
+
+# Builds a chained VBMeta image. This VBMeta image will contain the descriptors for the partitions
+# specified in BOARD_AVB_VBMETA_<NAME>. The built VBMeta image will be included into the top-level
+# vbmeta image as a chained partition. For example, if a target defines `BOARD_AVB_VBMETA_SYSTEM
+# := system product_services`, `vbmeta_system.img` will be created that includes the descriptors
+# for `system.img` and `product_services.img`. `vbmeta_system.img` itself will be included into
+# `vbmeta.img` as a chained partition.
+# $(1): VBMeta image name, such as "vbmeta_system", "vbmeta_vendor" etc.
+# $(2): Output filename.
+define build-chained-vbmeta-image
+ $(call pretty,"Target chained vbmeta image: $@")
+ $(hide) $(AVBTOOL) make_vbmeta_image \
+ $(INTERNAL_AVB_$(call to-upper,$(1))_SIGNING_ARGS) \
+ $(BOARD_AVB_MAKE_$(call to-upper,$(1))_IMAGE_ARGS) \
+ $(foreach image,$(BOARD_AVB_$(call to-upper,$(1))), \
+ --include_descriptors_from_image $(call images-for-partitions,$(image))) \
+ --output $@
endef
+ifdef BOARD_AVB_VBMETA_SYSTEM
+INSTALLED_VBMETA_SYSTEMIMAGE_TARGET := $(PRODUCT_OUT)/vbmeta_system.img
+$(INSTALLED_VBMETA_SYSTEMIMAGE_TARGET): \
+ $(AVBTOOL) \
+ $(call images-for-partitions,$(BOARD_AVB_VBMETA_SYSTEM)) \
+ $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH)
+ $(call build-chained-vbmeta-image,vbmeta_system)
+endif
+
+ifdef BOARD_AVB_VBMETA_VENDOR
+INSTALLED_VBMETA_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vbmeta_vendor.img
+$(INSTALLED_VBMETA_VENDORIMAGE_TARGET): \
+ $(AVBTOOL) \
+ $(call images-for-partitions,$(BOARD_AVB_VBMETA_VENDOR)) \
+ $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH)
+ $(call build-chained-vbmeta-image,vbmeta_vendor)
+endif
+
define build-vbmetaimage-target
$(call pretty,"Target vbmeta image: $(INSTALLED_VBMETAIMAGE_TARGET)")
$(hide) mkdir -p $(AVB_CHAIN_KEY_DIR)
$(call extract-avb-chain-public-keys, $(AVB_CHAIN_KEY_DIR))
$(hide) $(AVBTOOL) make_vbmeta_image \
$(INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS) \
- $(INTERNAL_AVB_SIGNING_ARGS) \
+ $(PRIVATE_AVB_VBMETA_SIGNING_ARGS) \
$(BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS) \
--output $@
$(hide) rm -rf $(AVB_CHAIN_KEY_DIR)
endef
INSTALLED_VBMETAIMAGE_TARGET := $(BUILT_VBMETAIMAGE_TARGET)
+$(INSTALLED_VBMETAIMAGE_TARGET): PRIVATE_AVB_VBMETA_SIGNING_ARGS := \
+ --algorithm $(BOARD_AVB_ALGORITHM) --key $(BOARD_AVB_KEY_PATH)
+
$(INSTALLED_VBMETAIMAGE_TARGET): \
- $(AVBTOOL) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_SYSTEMIMAGE) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_DTBOIMAGE_TARGET) \
- $(INSTALLED_RECOVERYIMAGE_TARGET) \
- $(BOARD_AVB_KEY_PATH)
+ $(AVBTOOL) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) \
+ $(INSTALLED_ODMIMAGE_TARGET) \
+ $(INSTALLED_DTBOIMAGE_TARGET) \
+ $(INSTALLED_RECOVERYIMAGE_TARGET) \
+ $(INSTALLED_VBMETA_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_VBMETA_VENDORIMAGE_TARGET) \
+ $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH) \
+ $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH) \
+ $(BOARD_AVB_KEY_PATH)
$(build-vbmetaimage-target)
.PHONY: vbmetaimage-nodeps
vbmetaimage-nodeps:
$(build-vbmetaimage-target)
-else ifeq (true,$(BOARD_BUILD_DISABLED_VBMETAIMAGE))
-BUILT_DISABLED_VBMETAIMAGE := $(PRODUCT_OUT)/vbmeta.img
+endif # BOARD_AVB_ENABLE
-INSTALLED_VBMETAIMAGE_TARGET := $(BUILT_DISABLED_VBMETAIMAGE)
-$(INSTALLED_VBMETAIMAGE_TARGET): $(AVBTOOL)
- $(hide) $(AVBTOOL) make_vbmeta_image \
- --flag 2 --padding_size 4096 --output $@
+# -----------------------------------------------------------------
+# Check image sizes <= size of super partition
-endif # BOARD_AVB_ENABLE
+ifeq (,$(TARGET_BUILD_APPS))
+# Do not check for apps-only build
+
+ifeq (true,$(PRODUCT_BUILD_SUPER_PARTITION))
+
+# (1): list of items like "system", "vendor", "product", "product_services"
+# return: map each item into a command ( wrapped in $$() ) that reads the size
+define read-size-of-partitions
+$(foreach image,$(call images-for-partitions,$(1)),$$( \
+ build/make/tools/releasetools/sparse_img.py --get_partition_size $(image)))
+endef
+
+# round result to BOARD_SUPER_PARTITION_ALIGNMENT
+#$(1): the calculated size
+ifeq (,$(BOARD_SUPER_PARTITION_ALIGNMENT))
+define round-partition-size
+$(1)
+endef
+else
+define round-partition-size
+$$((($(1)+$(BOARD_SUPER_PARTITION_ALIGNMENT)-1)/$(BOARD_SUPER_PARTITION_ALIGNMENT)*$(BOARD_SUPER_PARTITION_ALIGNMENT)))
+endef
+endif
+
+define super-slot-suffix
+$(if $(filter true,$(AB_OTA_UPDATER)),$(if $(filter true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)),,_a))
+endef
+
+droid_targets: check-all-partition-sizes
+
+.PHONY: check-all-partition-sizes check-all-partition-sizes-nodeps
+
+# Add image dependencies so that generated_*_image_info.txt are written before checking.
+check-all-partition-sizes: \
+ build/make/tools/releasetools/sparse_img.py \
+ $(call images-for-partitions,$(BOARD_SUPER_PARTITION_PARTITION_LIST))
+
+ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
+# Check sum(super partition block devices) == super partition
+# Non-retrofit devices already defines BOARD_SUPER_PARTITION_SUPER_DEVICE_SIZE = BOARD_SUPER_PARTITION_SIZE
+define check-super-partition-size
+ size_list="$(foreach device,$(call to-upper,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES)),$(BOARD_SUPER_PARTITION_$(device)_DEVICE_SIZE))"; \
+ sum_sizes_expr=$$(sed -e 's/ /+/g' <<< "$${size_list}"); \
+ max_size_expr="$(BOARD_SUPER_PARTITION_SIZE)"; \
+ if [ $$(( $${sum_sizes_expr} )) -ne $$(( $${max_size_expr} )) ]; then \
+ echo "The sum of super partition block device sizes is not equal to BOARD_SUPER_PARTITION_SIZE:"; \
+ echo $${sum_sizes_expr} '!=' $${max_size_expr}; \
+ exit 1; \
+ else \
+ echo "The sum of super partition block device sizes is equal to BOARD_SUPER_PARTITION_SIZE:"; \
+ echo $${sum_sizes_expr} '==' $${max_size_expr}; \
+ fi
+endef
+endif
+
+# $(1): human-readable max size string
+# $(2): max size expression
+# $(3): list of partition names
+define check-sum-of-partition-sizes
+ partition_size_list="$$(for i in $(call read-size-of-partitions,$(3)); do \
+ echo $(call round-partition-size,$${i}); \
+ done)"; \
+ sum_sizes_expr=$$(tr '\n' '+' <<< "$${partition_size_list}" | sed 's/+$$//'); \
+ if [ $$(( $${sum_sizes_expr} )) -gt $$(( $(2) )) ]; then \
+ echo "The sum of sizes of [$(strip $(3))] is larger than $(strip $(1)):"; \
+ echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '>' "$(2)" '==' $$(( $(2) )); \
+ exit 1; \
+ else \
+ echo "The sum of sizes of [$(strip $(3))] is within $(strip $(1)):"; \
+ echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '<=' "$(2)" '==' $$(( $(2) )); \
+ fi;
+endef
+
+define check-all-partition-sizes-target
+ # Check sum(all partitions) <= super partition (/ 2 for A/B devices launched with dynamic partitions)
+ $(if $(BOARD_SUPER_PARTITION_SIZE),$(if $(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+ $(call check-sum-of-partition-sizes,BOARD_SUPER_PARTITION_SIZE$(if $(call super-slot-suffix), / 2), \
+ $(BOARD_SUPER_PARTITION_SIZE)$(if $(call super-slot-suffix), / 2),$(BOARD_SUPER_PARTITION_PARTITION_LIST))))
+
+ # For each group, check sum(partitions in group) <= group size
+ $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+ $(if $(BOARD_$(group)_SIZE),$(if $(BOARD_$(group)_PARTITION_LIST), \
+ $(call check-sum-of-partition-sizes,BOARD_$(group)_SIZE,$(BOARD_$(group)_SIZE),$(BOARD_$(group)_PARTITION_LIST)))))
+
+ # Check sum(all group sizes) <= super partition (/ 2 for A/B devices launched with dynamic partitions)
+ if [[ ! -z $(BOARD_SUPER_PARTITION_SIZE) ]]; then \
+ group_size_list="$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)),$(BOARD_$(group)_SIZE))"; \
+ sum_sizes_expr=$$(sed -e 's/ /+/g' <<< "$${group_size_list}"); \
+ max_size_tail=$(if $(call super-slot-suffix)," / 2"); \
+ max_size_expr="$(BOARD_SUPER_PARTITION_SIZE)$${max_size_tail}"; \
+ if [ $$(( $${sum_sizes_expr} )) -gt $$(( $${max_size_expr} )) ]; then \
+ echo "The sum of sizes of [$(strip $(BOARD_SUPER_PARTITION_GROUPS))] is larger than BOARD_SUPER_PARTITION_SIZE$${max_size_tail}:"; \
+ echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '>' $${max_size_expr} '==' $$(( $${max_size_expr} )); \
+ exit 1; \
+ else \
+ echo "The sum of sizes of [$(strip $(BOARD_SUPER_PARTITION_GROUPS))] is within BOARD_SUPER_PARTITION_SIZE$${max_size_tail}:"; \
+ echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '<=' $${max_size_expr} '==' $$(( $${max_size_expr} )); \
+ fi \
+ fi
+endef
+
+check-all-partition-sizes check-all-partition-sizes-nodeps:
+ $(call check-all-partition-sizes-target)
+ $(call check-super-partition-size)
+
+endif # PRODUCT_BUILD_SUPER_PARTITION
+
+endif # TARGET_BUILD_APPS
# -----------------------------------------------------------------
# bring in the installer image generation defines if necessary
@@ -2298,33 +3507,42 @@ endif
# -----------------------------------------------------------------
# host tools needed to build dist and OTA packages
-build_ota_package := true
-ifeq ($(TARGET_SKIP_OTA_PACKAGE),true)
-build_ota_package := false
-endif
ifeq ($(BUILD_OS),darwin)
-build_ota_package := false
-endif
-ifneq ($(strip $(SANITIZE_TARGET)),)
-build_ota_package := false
-endif
-ifeq ($(TARGET_PRODUCT),sdk)
-build_ota_package := false
-endif
-ifneq ($(filter generic%,$(TARGET_DEVICE)),)
-build_ota_package := false
-endif
-ifeq ($(TARGET_NO_KERNEL),true)
-build_ota_package := false
-endif
-ifeq ($(recovery_fstab),)
-build_ota_package := false
-endif
-ifeq ($(TARGET_BUILD_PDK),true)
-build_ota_package := false
+ build_ota_package := false
+ build_otatools_package := false
+else
+ # set build_ota_package, and allow opt-out below
+ build_ota_package := true
+ ifeq ($(TARGET_SKIP_OTA_PACKAGE),true)
+ build_ota_package := false
+ endif
+ ifneq (,$(filter address, $(SANITIZE_TARGET)))
+ build_ota_package := false
+ endif
+ ifeq ($(TARGET_PRODUCT),sdk)
+ build_ota_package := false
+ endif
+ ifneq ($(filter generic%,$(TARGET_DEVICE)),)
+ build_ota_package := false
+ endif
+ ifeq ($(TARGET_NO_KERNEL),true)
+ build_ota_package := false
+ endif
+ ifeq ($(recovery_fstab),)
+ build_ota_package := false
+ endif
+ ifeq ($(TARGET_BUILD_PDK),true)
+ build_ota_package := false
+ endif
+
+ # set build_otatools_package, and allow opt-out below
+ build_otatools_package := true
+ ifeq ($(TARGET_SKIP_OTATOOLS_PACKAGE),true)
+ build_otatools_package := false
+ endif
endif
-ifeq ($(build_ota_package),true)
+ifeq ($(build_otatools_package),true)
OTATOOLS := $(HOST_OUT_EXECUTABLES)/minigzip \
$(HOST_OUT_EXECUTABLES)/aapt \
$(HOST_OUT_EXECUTABLES)/checkvintf \
@@ -2334,13 +3552,13 @@ OTATOOLS := $(HOST_OUT_EXECUTABLES)/minigzip \
$(HOST_OUT_EXECUTABLES)/zipalign \
$(HOST_OUT_EXECUTABLES)/bsdiff \
$(HOST_OUT_EXECUTABLES)/imgdiff \
- $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar \
$(HOST_OUT_JAVA_LIBRARIES)/signapk.jar \
$(HOST_OUT_JAVA_LIBRARIES)/BootSignature.jar \
$(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar \
$(HOST_OUT_EXECUTABLES)/mke2fs \
- $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs.sh \
+ $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs \
$(HOST_OUT_EXECUTABLES)/e2fsdroid \
+ $(HOST_OUT_EXECUTABLES)/tune2fs \
$(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh \
$(HOST_OUT_EXECUTABLES)/mksquashfs \
$(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh \
@@ -2348,7 +3566,6 @@ OTATOOLS := $(HOST_OUT_EXECUTABLES)/minigzip \
$(HOST_OUT_EXECUTABLES)/sload_f2fs \
$(HOST_OUT_EXECUTABLES)/simg2img \
$(HOST_OUT_EXECUTABLES)/e2fsck \
- $(HOST_OUT_EXECUTABLES)/build_verity_tree \
$(HOST_OUT_EXECUTABLES)/generate_verity_key \
$(HOST_OUT_EXECUTABLES)/verity_signer \
$(HOST_OUT_EXECUTABLES)/verity_verifier \
@@ -2359,11 +3576,17 @@ OTATOOLS := $(HOST_OUT_EXECUTABLES)/minigzip \
$(HOST_OUT_EXECUTABLES)/brillo_update_payload \
$(HOST_OUT_EXECUTABLES)/lib/shflags/shflags \
$(HOST_OUT_EXECUTABLES)/delta_generator \
+ $(HOST_OUT_EXECUTABLES)/care_map_generator \
+ $(HOST_OUT_EXECUTABLES)/fc_sort \
+ $(HOST_OUT_EXECUTABLES)/sefcontext_compile \
+ $(LPMAKE) \
$(AVBTOOL) \
$(BLK_ALLOC_TO_BASE_FS) \
- $(BROTLI)
+ $(BROTLI) \
+ $(BUILD_VERITY_METADATA) \
+ $(BUILD_VERITY_TREE)
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
+ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
OTATOOLS += \
$(FUTILITY) \
$(VBOOT_SIGNER)
@@ -2397,7 +3620,11 @@ OTATOOLS += \
$(HOST_LIBRARY_PATH)/libsparse-host$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libbase$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libpcre2$(HOST_SHLIB_SUFFIX) \
- $(HOST_LIBRARY_PATH)/libbrotli$(HOST_SHLIB_SUFFIX)
+ $(HOST_LIBRARY_PATH)/libbrotli$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/liblp$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libext4_utils$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libfec$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libsquashfs_utils$(HOST_SHLIB_SUFFIX)
.PHONY: otatools
@@ -2407,38 +3634,48 @@ BUILT_OTATOOLS_PACKAGE := $(PRODUCT_OUT)/otatools.zip
$(BUILT_OTATOOLS_PACKAGE): zip_root := $(call intermediates-dir-for,PACKAGING,otatools)/otatools
OTATOOLS_DEPS := \
- system/extras/verity/build_verity_metadata.py \
system/extras/ext4_utils/mke2fs.conf \
+ $(sort $(shell find build/target/product/security -type f -name "*.x509.pem" -o -name "*.pk8" -o \
+ -name verity_key))
+
+ifneq (,$(wildcard device))
+OTATOOLS_DEPS += \
+ $(sort $(shell find device $(wildcard vendor) -type f -name "*.pk8" -o -name "verifiedboot*" -o \
+ -name "*.pem" -o -name "oem*.prop" -o -name "*.avbpubkey"))
+endif
+ifneq (,$(wildcard external/avb))
+OTATOOLS_DEPS += \
$(sort $(shell find external/avb/test/data -type f -name "testkey_*.pem" -o \
- -name "atx_metadata.bin")) \
- $(sort $(shell find system/update_engine/scripts -name \*.pyc -prune -o -type f -print)) \
- $(sort $(shell find build/target/product/security -type f -name \*.x509.pem -o -name \*.pk8 -o \
- -name verity_key)) \
- $(sort $(shell find device $(wildcard vendor) -type f -name \*.pk8 -o -name verifiedboot\* -o \
- -name \*.x509.pem -o -name oem\*.prop))
+ -name "atx_metadata.bin"))
+endif
+ifneq (,$(wildcard system/update_engine))
+OTATOOLS_DEPS += \
+ $(sort $(shell find system/update_engine/scripts -name "*.pyc" -prune -o -type f -print))
+endif
OTATOOLS_RELEASETOOLS := \
- $(sort $(shell find build/make/tools/releasetools -name \*.pyc -prune -o -type f))
+ $(sort $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o -type f))
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
+ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
OTATOOLS_DEPS += \
$(sort $(shell find external/vboot_reference/tests/devkeys -type f))
endif
-$(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) $(OTATOOLS_DEPS) $(OTATOOLS_RELEASETOOLS) | $(ACP)
+$(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) $(OTATOOLS_DEPS) $(OTATOOLS_RELEASETOOLS) $(SOONG_ZIP)
@echo "Package OTA tools: $@"
$(hide) rm -rf $@ $(zip_root)
$(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools
$(call copy-files-with-structure,$(OTATOOLS),$(HOST_OUT)/,$(zip_root))
- $(hide) $(ACP) -r -d -p build/make/tools/releasetools/* $(zip_root)/releasetools
+ $(hide) cp $(SOONG_ZIP) $(zip_root)/bin/
+ $(hide) cp -r -d -p build/make/tools/releasetools/* $(zip_root)/releasetools
$(hide) rm -rf $@ $(zip_root)/releasetools/*.pyc
- $(hide) (cd $(zip_root) && zip -qryX $(abspath $@) *)
- $(hide) echo $(OTATOOLS_DEPS) | xargs zip -qryX $(abspath $@)>/dev/null || true
+ $(hide) $(SOONG_ZIP) -o $@ -C $(zip_root) -D $(zip_root) \
+ -C . $(addprefix -f ,$(OTATOOLS_DEPS))
.PHONY: otatools-package
otatools-package: $(BUILT_OTATOOLS_PACKAGE)
-endif # build_ota_package
+endif # build_otatools_package
# -----------------------------------------------------------------
# A zip of the directories that map to the target filesystem.
@@ -2455,7 +3692,7 @@ intermediates := $(call intermediates-dir-for,PACKAGING,target_files)
BUILT_TARGET_FILES_PACKAGE := $(intermediates)/$(name).zip
$(BUILT_TARGET_FILES_PACKAGE): intermediates := $(intermediates)
$(BUILT_TARGET_FILES_PACKAGE): \
- zip_root := $(intermediates)/$(name)
+ zip_root := $(intermediates)/$(name)
# $(1): Directory to copy
# $(2): Location to copy it to
@@ -2470,7 +3707,7 @@ endef
built_ota_tools :=
# We can't build static executables when SANITIZE_TARGET=address
-ifeq ($(strip $(SANITIZE_TARGET)),)
+ifeq (,$(filter address, $(SANITIZE_TARGET)))
built_ota_tools += \
$(call intermediates-dir-for,EXECUTABLES,updater,,,$(TARGET_PREFER_32_BIT))/updater
endif
@@ -2506,10 +3743,6 @@ $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY
endif
ifeq ($(AB_OTA_UPDATER),true)
- ifdef BRILLO_VENDOR_PARTITIONS
- $(BUILT_TARGET_FILES_PACKAGE): $(foreach p,$(BRILLO_VENDOR_PARTITIONS),\
- $(call word-colon,1,$(p))/$(call word-colon,2,$(p)))
- endif
ifdef OSRELEASED_DIRECTORY
$(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id
$(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
@@ -2524,46 +3757,92 @@ define fs_config
(cd $(1); find . -type d | sed 's,$$,/,'; find . \! -type d) | cut -c 3- | sort | sed 's,^,$(2),' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) -R "$(2)"
endef
+# $(1): file
+define dump-dynamic-partitions-info
+ $(if $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITIONS)), \
+ echo "use_dynamic_partitions=true" >> $(1))
+ $(if $(filter true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)), \
+ echo "dynamic_partition_retrofit=true" >> $(1))
+ echo "lpmake=$(notdir $(LPMAKE))" >> $(1)
+ $(if $(filter true,$(PRODUCT_BUILD_SUPER_PARTITION)), $(if $(BOARD_SUPER_PARTITION_SIZE), \
+ echo "build_super_partition=true" >> $(1)))
+ $(if $(filter true,$(BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE)), \
+ echo "build_retrofit_dynamic_partitions_ota_package=true" >> $(1))
+ echo "super_metadata_device=$(BOARD_SUPER_PARTITION_METADATA_DEVICE)" >> $(1)
+ $(if $(BOARD_SUPER_PARTITION_BLOCK_DEVICES), \
+ echo "super_block_devices=$(BOARD_SUPER_PARTITION_BLOCK_DEVICES)" >> $(1))
+ $(foreach device,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES), \
+ echo "super_$(device)_device_size=$(BOARD_SUPER_PARTITION_$(call to-upper,$(device))_DEVICE_SIZE)" >> $(1);)
+ $(if $(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+ echo "dynamic_partition_list=$(BOARD_SUPER_PARTITION_PARTITION_LIST)" >> $(1))
+ $(if $(BOARD_SUPER_PARTITION_GROUPS),
+ echo "super_partition_groups=$(BOARD_SUPER_PARTITION_GROUPS)" >> $(1))
+ $(foreach group,$(BOARD_SUPER_PARTITION_GROUPS), \
+ echo "super_$(group)_group_size=$(BOARD_$(call to-upper,$(group))_SIZE)" >> $(1); \
+ $(if $(BOARD_$(call to-upper,$(group))_PARTITION_LIST), \
+ echo "super_$(group)_partition_list=$(BOARD_$(call to-upper,$(group))_PARTITION_LIST)" >> $(1);))
+ $(if $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)), \
+ echo "build_non_sparse_super_partition=true" >> $(1))
+ $(if $(filter true,$(BOARD_SUPER_IMAGE_IN_UPDATE_PACKAGE)), \
+ echo "super_image_in_update_package=true" >> $(1))
+endef
+
# Depending on the various images guarantees that the underlying
# directories are up-to-date.
$(BUILT_TARGET_FILES_PACKAGE): \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_RADIOIMAGE_TARGET) \
- $(INSTALLED_RECOVERYIMAGE_TARGET) \
- $(FULL_SYSTEMIMAGE_DEPS) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_CACHEIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_VBMETAIMAGE_TARGET) \
- $(INSTALLED_DTBOIMAGE_TARGET) \
- $(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
- $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
- $(INSTALLED_KERNEL_TARGET) \
- $(INSTALLED_2NDBOOTLOADER_TARGET) \
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH) \
- $(SELINUX_FC) \
- $(APKCERTS_FILE) \
- $(SOONG_ZIP) \
- $(HOST_OUT_EXECUTABLES)/fs_config \
- $(HOST_OUT_EXECUTABLES)/imgdiff \
- $(HOST_OUT_EXECUTABLES)/bsdiff \
- $(BUILD_IMAGE_SRCS) \
- $(BUILT_VENDOR_MANIFEST) \
- $(BUILT_VENDOR_MATRIX) \
- | $(ACP)
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INSTALLED_RADIOIMAGE_TARGET) \
+ $(INSTALLED_RECOVERYIMAGE_TARGET) \
+ $(FULL_SYSTEMIMAGE_DEPS) \
+ $(INSTALLED_USERDATAIMAGE_TARGET) \
+ $(INSTALLED_CACHEIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) \
+ $(INSTALLED_VBMETAIMAGE_TARGET) \
+ $(INSTALLED_ODMIMAGE_TARGET) \
+ $(INSTALLED_DTBOIMAGE_TARGET) \
+ $(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
+ $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
+ $(INSTALLED_KERNEL_TARGET) \
+ $(INSTALLED_DTBIMAGE_TARGET) \
+ $(INSTALLED_2NDBOOTLOADER_TARGET) \
+ $(BOARD_PREBUILT_DTBOIMAGE) \
+ $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) \
+ $(BOARD_RECOVERY_ACPIO) \
+ $(PRODUCT_SYSTEM_BASE_FS_PATH) \
+ $(PRODUCT_VENDOR_BASE_FS_PATH) \
+ $(PRODUCT_PRODUCT_BASE_FS_PATH) \
+ $(PRODUCT_PRODUCT_SERVICES_BASE_FS_PATH) \
+ $(PRODUCT_ODM_BASE_FS_PATH) \
+ $(LPMAKE) \
+ $(SELINUX_FC) \
+ $(APKCERTS_FILE) \
+ $(SOONG_APEX_KEYS_FILE) \
+ $(SOONG_ZIP) \
+ $(HOST_OUT_EXECUTABLES)/fs_config \
+ $(HOST_OUT_EXECUTABLES)/imgdiff \
+ $(HOST_OUT_EXECUTABLES)/bsdiff \
+ $(HOST_OUT_EXECUTABLES)/care_map_generator \
+ $(BUILD_IMAGE_SRCS) \
+ $(BUILT_ASSEMBLED_FRAMEWORK_MANIFEST) \
+ $(BUILT_ASSEMBLED_VENDOR_MANIFEST) \
+ $(BUILT_SYSTEM_MATRIX) \
+ $(BUILT_VENDOR_MATRIX) \
+ | $(ACP)
@echo "Package target files: $@"
$(call create-system-vendor-symlink)
$(call create-system-product-symlink)
+ $(call create-system-product_services-symlink)
+ $(call create-vendor-odm-symlink)
$(hide) rm -rf $@ $@.list $(zip_root)
$(hide) mkdir -p $(dir $@) $(zip_root)
ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
@# Components of the recovery image
$(hide) mkdir -p $(zip_root)/$(PRIVATE_RECOVERY_OUT)
$(hide) $(call package_files-copy-root, \
- $(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK)
+ $(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK)
ifdef INSTALLED_KERNEL_TARGET
$(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel
endif
@@ -2571,7 +3850,17 @@ ifdef INSTALLED_2NDBOOTLOADER_TARGET
$(hide) cp $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/second
endif
ifdef BOARD_INCLUDE_RECOVERY_DTBO
- $(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/recovery_dtbo
+ifdef BOARD_PREBUILT_RECOVERY_DTBOIMAGE
+ $(hide) cp $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/recovery_dtbo
+else
+ $(hide) cp $(BOARD_PREBUILT_DTBOIMAGE) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/recovery_dtbo
+endif
+endif
+ifdef BOARD_INCLUDE_RECOVERY_ACPIO
+ $(hide) cp $(BOARD_RECOVERY_ACPIO) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/recovery_acpio
+endif
+ifdef INSTALLED_DTBIMAGE_TARGET
+ $(hide) cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/dtb
endif
ifdef INTERNAL_KERNEL_CMDLINE
$(hide) echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline
@@ -2585,22 +3874,24 @@ endif
endif # INSTALLED_RECOVERYIMAGE_TARGET defined or BOARD_USES_RECOVERY_AS_BOOT is true
@# Components of the boot image
$(hide) mkdir -p $(zip_root)/BOOT
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
$(hide) mkdir -p $(zip_root)/ROOT
$(hide) $(call package_files-copy-root, \
- $(TARGET_ROOT_OUT),$(zip_root)/ROOT)
-else
- $(hide) $(call package_files-copy-root, \
- $(TARGET_ROOT_OUT),$(zip_root)/BOOT/RAMDISK)
-endif
+ $(TARGET_ROOT_OUT),$(zip_root)/ROOT)
@# If we are using recovery as boot, this is already done when processing recovery.
ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+ $(hide) $(call package_files-copy-root, \
+ $(TARGET_RAMDISK_OUT),$(zip_root)/BOOT/RAMDISK)
+endif
ifdef INSTALLED_KERNEL_TARGET
$(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/kernel
endif
ifdef INSTALLED_2NDBOOTLOADER_TARGET
$(hide) cp $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/BOOT/second
endif
+ifdef INSTALLED_DTBIMAGE_TARGET
+ $(hide) cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/BOOT/dtb
+endif
ifdef INTERNAL_KERNEL_CMDLINE
$(hide) echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline
endif
@@ -2614,26 +3905,40 @@ endif # BOARD_USES_RECOVERY_AS_BOOT
$(hide) $(foreach t,$(INSTALLED_RADIOIMAGE_TARGET),\
mkdir -p $(zip_root)/RADIO; \
cp $(t) $(zip_root)/RADIO/$(notdir $(t));)
+ifdef BUILDING_SYSTEM_IMAGE
@# Contents of the system image
$(hide) $(call package_files-copy-root, \
- $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM)
+ $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM)
+endif
+ifdef BUILDING_USERDATA_IMAGE
@# Contents of the data image
$(hide) $(call package_files-copy-root, \
- $(TARGET_OUT_DATA),$(zip_root)/DATA)
-ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+ $(TARGET_OUT_DATA),$(zip_root)/DATA)
+endif
+ifdef BUILDING_VENDOR_IMAGE
@# Contents of the vendor image
$(hide) $(call package_files-copy-root, \
- $(TARGET_OUT_VENDOR),$(zip_root)/VENDOR)
+ $(TARGET_OUT_VENDOR),$(zip_root)/VENDOR)
endif
-ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+ifdef BUILDING_PRODUCT_IMAGE
@# Contents of the product image
$(hide) $(call package_files-copy-root, \
- $(TARGET_OUT_PRODUCT),$(zip_root)/PRODUCT)
+ $(TARGET_OUT_PRODUCT),$(zip_root)/PRODUCT)
+endif
+ifdef BUILDING_PRODUCT_SERVICES_IMAGE
+ @# Contents of the product_services image
+ $(hide) $(call package_files-copy-root, \
+ $(TARGET_OUT_PRODUCT_SERVICES),$(zip_root)/PRODUCT_SERVICES)
endif
-ifdef INSTALLED_SYSTEMOTHERIMAGE_TARGET
+ifdef BUILDING_ODM_IMAGE
+ @# Contents of the odm image
+ $(hide) $(call package_files-copy-root, \
+ $(TARGET_OUT_ODM),$(zip_root)/ODM)
+endif
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
@# Contents of the system_other image
$(hide) $(call package_files-copy-root, \
- $(TARGET_OUT_SYSTEM_OTHER),$(zip_root)/SYSTEM_OTHER)
+ $(TARGET_OUT_SYSTEM_OTHER),$(zip_root)/SYSTEM_OTHER)
endif
@# Extra contents of the OTA package
$(hide) mkdir -p $(zip_root)/OTA
@@ -2648,6 +3953,7 @@ endif
@# build them.
$(hide) mkdir -p $(zip_root)/META
$(hide) cp $(APKCERTS_FILE) $(zip_root)/META/apkcerts.txt
+ $(hide) cp $(SOONG_APEX_KEYS_FILE) $(zip_root)/META/apexkeys.txt
ifneq ($(tool_extension),)
$(hide) cp $(PRIVATE_TOOL_EXTENSION) $(zip_root)/META/
endif
@@ -2667,6 +3973,9 @@ endif
ifdef BOARD_INCLUDE_RECOVERY_DTBO
$(hide) echo "include_recovery_dtbo=true" >> $(zip_root)/META/misc_info.txt
endif
+ifdef BOARD_INCLUDE_RECOVERY_ACPIO
+ $(hide) echo "include_recovery_acpio=true" >> $(zip_root)/META/misc_info.txt
+endif
ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
$(hide) echo "recovery_size=$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
endif
@@ -2689,19 +3998,27 @@ ifneq ($(OEM_THUMBPRINT_PROPERTIES),)
# OTA scripts are only interested in fingerprint related properties
$(hide) echo "oem_fingerprint_properties=$(OEM_THUMBPRINT_PROPERTIES)" >> $(zip_root)/META/misc_info.txt
endif
-ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),)
- $(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
- $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH))
+ifneq ($(PRODUCT_SYSTEM_BASE_FS_PATH),)
+ $(hide) cp $(PRODUCT_SYSTEM_BASE_FS_PATH) \
+ $(zip_root)/META/$(notdir $(PRODUCT_SYSTEM_BASE_FS_PATH))
+endif
+ifneq ($(PRODUCT_VENDOR_BASE_FS_PATH),)
+ $(hide) cp $(PRODUCT_VENDOR_BASE_FS_PATH) \
+ $(zip_root)/META/$(notdir $(PRODUCT_VENDOR_BASE_FS_PATH))
endif
-ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),)
- $(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
- $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH))
+ifneq ($(PRODUCT_PRODUCT_BASE_FS_PATH),)
+ $(hide) cp $(PRODUCT_PRODUCT_BASE_FS_PATH) \
+ $(zip_root)/META/$(notdir $(PRODUCT_PRODUCT_BASE_FS_PATH))
endif
-ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH),)
- $(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH) \
- $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH))
+ifneq ($(PRODUCT_PRODUCT_SERVICES_BASE_FS_PATH),)
+ $(hide) cp $(PRODUCT_PRODUCT_SERVICES_BASE_FS_PATH) \
+ $(zip_root)/META/$(notdir $(PRODUCT_PRODUCT_SERVICES_BASE_FS_PATH))
endif
-ifneq ($(strip $(SANITIZE_TARGET)),)
+ifneq ($(PRODUCT_ODM_BASE_FS_PATH),)
+ $(hide) cp $(PRODUCT_ODM_BASE_FS_PATH) \
+ $(zip_root)/META/$(notdir $(PRODUCT_ODM_BASE_FS_PATH))
+endif
+ifneq (,$(filter address, $(SANITIZE_TARGET)))
# We need to create userdata.img with real data because the instrumented libraries are in userdata.img.
$(hide) echo "userdata_img_with_data=true" >> $(zip_root)/META/misc_info.txt
endif
@@ -2725,6 +4042,20 @@ ifdef BOARD_AVB_RECOVERY_KEY_PATH
$(hide) echo "avb_recovery_algorithm=$(BOARD_AVB_RECOVERY_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "avb_recovery_rollback_index_location=$(BOARD_AVB_RECOVERY_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
endif # BOARD_AVB_RECOVERY_KEY_PATH
+ifneq (,$(strip $(BOARD_AVB_VBMETA_SYSTEM)))
+ $(hide) echo "avb_vbmeta_system=$(BOARD_AVB_VBMETA_SYSTEM)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_system_args=$(BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_system_key_path=$(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_system_algorithm=$(BOARD_AVB_VBMETA_SYSTEM_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_system_rollback_index_location=$(BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
+endif # BOARD_AVB_VBMETA_SYSTEM
+ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
+ $(hide) echo "avb_vbmeta_vendor=$(BOARD_AVB_VBMETA_VENDOR)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_vendor_key_path=$(BOARD_AVB_VBMETA_VENDOR_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_vendor_algorithm=$(BOARD_AVB_VBMETA_VENDOR_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_vendor_rollback_index_location=$(BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
+endif # BOARD_AVB_VBMETA_VENDOR_KEY_PATH
endif # BOARD_AVB_ENABLE
ifdef BOARD_BPT_INPUT_FILES
$(hide) echo "board_bpt_enable=true" >> $(zip_root)/META/misc_info.txt
@@ -2736,8 +4067,10 @@ ifdef BOARD_BPT_DISK_SIZE
endif
$(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt)
ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
+ifdef BUILDING_SYSTEM_IMAGE
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
build/make/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root)
+endif # BUILDING_SYSTEM_IMAGE
endif
ifeq ($(AB_OTA_UPDATER),true)
@# When using the A/B updater, include the updater config files in the zip.
@@ -2751,17 +4084,6 @@ ifeq ($(AB_OTA_UPDATER),true)
@# Include the build type in META/misc_info.txt so the server can easily differentiate production builds.
$(hide) echo "build_type=$(TARGET_BUILD_VARIANT)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "ab_update=true" >> $(zip_root)/META/misc_info.txt
-ifdef BRILLO_VENDOR_PARTITIONS
- $(hide) mkdir -p $(zip_root)/VENDOR_IMAGES
- $(hide) for f in $(BRILLO_VENDOR_PARTITIONS); do \
- pair1="$$(echo $$f | awk -F':' '{print $$1}')"; \
- pair2="$$(echo $$f | awk -F':' '{print $$2}')"; \
- src=$${pair1}/$${pair2}; \
- dest=$(zip_root)/VENDOR_IMAGES/$${pair2}; \
- mkdir -p $$(dirname "$${dest}"); \
- cp $${src} $${dest}; \
- done;
-endif
ifdef OSRELEASED_DIRECTORY
$(hide) cp $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id $(zip_root)/META/product_id.txt
$(hide) cp $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version $(zip_root)/META/product_version.txt
@@ -2772,14 +4094,6 @@ ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
@# If breakpad symbols have been generated, add them to the zip.
$(hide) $(ACP) -r $(TARGET_OUT_BREAKPAD) $(zip_root)/BREAKPAD
endif
-# BOARD_BUILD_DISABLED_VBMETAIMAGE is used to build a special vbmeta.img
-# that disables AVB verification. The content is fixed and we can just copy
-# it to $(zip_root)/IMAGES without passing some info into misc_info.txt for
-# regeneration.
-ifeq (true,$(BOARD_BUILD_DISABLED_VBMETAIMAGE))
- $(hide) mkdir -p $(zip_root)/IMAGES
- $(hide) cp $(INSTALLED_VBMETAIMAGE_TARGET) $(zip_root)/IMAGES/
-endif
ifdef BOARD_PREBUILT_VENDORIMAGE
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
@@ -2788,10 +4102,18 @@ ifdef BOARD_PREBUILT_PRODUCTIMAGE
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_PRODUCTIMAGE_TARGET) $(zip_root)/IMAGES/
endif
+ifdef BOARD_PREBUILT_PRODUCT_SERVICESIMAGE
+ $(hide) mkdir -p $(zip_root)/IMAGES
+ $(hide) cp $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
ifdef BOARD_PREBUILT_BOOTIMAGE
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_BOOTIMAGE_TARGET) $(zip_root)/IMAGES/
endif
+ifdef BOARD_PREBUILT_ODMIMAGE
+ $(hide) mkdir -p $(zip_root)/IMAGES
+ $(hide) cp $(INSTALLED_ODMIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
ifdef BOARD_PREBUILT_DTBOIMAGE
$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
@@ -2807,6 +4129,7 @@ ifdef BOARD_AVB_DTBO_KEY_PATH
endif # BOARD_AVB_DTBO_KEY_PATH
endif # BOARD_AVB_ENABLE
endif # BOARD_PREBUILT_DTBOIMAGE
+ $(call dump-dynamic-partitions-info,$(zip_root)/META/misc_info.txt)
@# The radio images in BOARD_PACK_RADIOIMAGES will be additionally copied from RADIO/ into
@# IMAGES/, which then will be added into <product>-img.zip. Such images must be listed in
@# INSTALLED_RADIOIMAGE_TARGET.
@@ -2814,41 +4137,61 @@ endif # BOARD_PREBUILT_DTBOIMAGE
echo $(part) >> $(zip_root)/META/pack_radioimages.txt;)
@# Run fs_config on all the system, vendor, boot ramdisk,
@# and recovery ramdisk files in the zip, and save the output
+ifdef BUILDING_SYSTEM_IMAGE
$(hide) $(call fs_config,$(zip_root)/SYSTEM,system/) > $(zip_root)/META/filesystem_config.txt
-ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+endif
+ifdef BUILDING_VENDOR_IMAGE
$(hide) $(call fs_config,$(zip_root)/VENDOR,vendor/) > $(zip_root)/META/vendor_filesystem_config.txt
endif
-ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+ifdef BUILDING_PRODUCT_IMAGE
$(hide) $(call fs_config,$(zip_root)/PRODUCT,product/) > $(zip_root)/META/product_filesystem_config.txt
endif
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- @# When using BOARD_BUILD_SYSTEM_ROOT_IMAGE, ROOT always contains the files for the root under
- @# normal boot. BOOT/RAMDISK exists only if additionally using BOARD_USES_RECOVERY_AS_BOOT.
+ifdef BUILDING_PRODUCT_SERVICES_IMAGE
+ $(hide) $(call fs_config,$(zip_root)/PRODUCT_SERVICES,product_services/) > $(zip_root)/META/product_services_filesystem_config.txt
+endif
+ifdef BUILDING_ODM_IMAGE
+ $(hide) $(call fs_config,$(zip_root)/ODM,odm/) > $(zip_root)/META/odm_filesystem_config.txt
+endif
+ @# ROOT always contains the files for the root under normal boot.
$(hide) $(call fs_config,$(zip_root)/ROOT,) > $(zip_root)/META/root_filesystem_config.txt
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ @# BOOT/RAMDISK exists and contains the ramdisk for recovery if using BOARD_USES_RECOVERY_AS_BOOT.
$(hide) $(call fs_config,$(zip_root)/BOOT/RAMDISK,) > $(zip_root)/META/boot_filesystem_config.txt
endif
-else # BOARD_BUILD_SYSTEM_ROOT_IMAGE != true
+ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+ @# BOOT/RAMDISK also exists and contains the first stage ramdisk if not using BOARD_BUILD_SYSTEM_ROOT_IMAGE.
$(hide) $(call fs_config,$(zip_root)/BOOT/RAMDISK,) > $(zip_root)/META/boot_filesystem_config.txt
endif
ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
$(hide) $(call fs_config,$(zip_root)/RECOVERY/RAMDISK,) > $(zip_root)/META/recovery_filesystem_config.txt
endif
-ifdef INSTALLED_SYSTEMOTHERIMAGE_TARGET
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
$(hide) $(call fs_config,$(zip_root)/SYSTEM_OTHER,system/) > $(zip_root)/META/system_other_filesystem_config.txt
endif
@# Metadata for compatibility verification.
- $(hide) cp $(BUILT_SYSTEM_MANIFEST) $(zip_root)/META/system_manifest.xml
- $(hide) cp $(BUILT_SYSTEM_COMPATIBILITY_MATRIX) $(zip_root)/META/system_matrix.xml
-ifdef BUILT_VENDOR_MANIFEST
- $(hide) cp $(BUILT_VENDOR_MANIFEST) $(zip_root)/META/vendor_manifest.xml
+ $(hide) cp $(BUILT_SYSTEM_MATRIX) $(zip_root)/META/system_matrix.xml
+ $(hide) cp $(BUILT_ASSEMBLED_FRAMEWORK_MANIFEST) $(zip_root)/META/system_manifest.xml
+ifdef BUILT_ASSEMBLED_VENDOR_MANIFEST
+ $(hide) cp $(BUILT_ASSEMBLED_VENDOR_MANIFEST) $(zip_root)/META/vendor_manifest.xml
endif
ifdef BUILT_VENDOR_MATRIX
$(hide) cp $(BUILT_VENDOR_MATRIX) $(zip_root)/META/vendor_matrix.xml
endif
-
+ifneq ($(BOARD_SUPER_PARTITION_GROUPS),)
+ $(hide) echo "super_partition_groups=$(BOARD_SUPER_PARTITION_GROUPS)" > $(zip_root)/META/dynamic_partitions_info.txt
+ @# Remove 'vendor' from the group partition list if the image is not available. This should only
+ @# happen to AOSP targets built without vendor.img. We can't remove the partition from the
+ @# BoardConfig file, as it's still needed elsewhere (e.g. when creating super_empty.img).
+ $(foreach group,$(BOARD_SUPER_PARTITION_GROUPS), \
+ $(eval _group_partition_list := $(BOARD_$(call to-upper,$(group))_PARTITION_LIST)) \
+ $(if $(INSTALLED_VENDORIMAGE_TARGET),,$(eval _group_partition_list := $(filter-out vendor,$(_group_partition_list)))) \
+ echo "$(group)_size=$(BOARD_$(call to-upper,$(group))_SIZE)" >> $(zip_root)/META/dynamic_partitions_info.txt; \
+ $(if $(_group_partition_list), \
+ echo "$(group)_partition_list=$(_group_partition_list)" >> $(zip_root)/META/dynamic_partitions_info.txt;))
+endif # BOARD_SUPER_PARTITION_GROUPS
+ @# TODO(b/134525174): Remove `-r` after addressing the issue with recovery patch generation.
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
- build/make/tools/releasetools/add_img_to_target_files -a -v -p $(HOST_OUT) $(zip_root)
+ build/make/tools/releasetools/add_img_to_target_files -a -r -v -p $(HOST_OUT) $(zip_root)
@# Zip everything up, preserving symlinks and placing META/ files first to
@# help early validation of the .zip file while uploading it.
$(hide) find $(zip_root)/META | sort >$@.list
@@ -2865,7 +4208,7 @@ endif
# -----------------------------------------------------------------
# NDK Sysroot Package
NDK_SYSROOT_TARGET := $(PRODUCT_OUT)/ndk_sysroot.tar.bz2
-$(NDK_SYSROOT_TARGET): ndk
+$(NDK_SYSROOT_TARGET): $(SOONG_OUT_DIR)/ndk.timestamp
@echo Package NDK sysroot...
$(hide) tar cjf $@ -C $(SOONG_OUT_DIR) ndk
@@ -2875,6 +4218,19 @@ ifeq ($(build_ota_package),true)
# -----------------------------------------------------------------
# OTA update package
+# $(1): output file
+# $(2): additional args
+define build-ota-package-target
+PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
+ build/make/tools/releasetools/ota_from_target_files -v \
+ --block \
+ --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
+ -p $(HOST_OUT) \
+ $(if $(OEM_OTA_CONFIG), -o $(OEM_OTA_CONFIG)) \
+ $(2) \
+ $(BUILT_TARGET_FILES_PACKAGE) $(1)
+endef
+
name := $(TARGET_PRODUCT)
ifeq ($(TARGET_BUILD_TYPE),debug)
name := $(name)_debug
@@ -2883,6 +4239,8 @@ name := $(name)-ota-$(FILE_NAME_TAG)
INTERNAL_OTA_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
+INTERNAL_OTA_METADATA := $(PRODUCT_OUT)/ota_metadata
+
$(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
ifeq ($(AB_OTA_UPDATER),true)
@@ -2891,41 +4249,66 @@ else
$(INTERNAL_OTA_PACKAGE_TARGET): $(BROTLI)
endif
+$(INTERNAL_OTA_PACKAGE_TARGET): .KATI_IMPLICIT_OUTPUTS := $(INTERNAL_OTA_METADATA)
+
$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) \
- build/make/tools/releasetools/ota_from_target_files
+ build/make/tools/releasetools/ota_from_target_files
@echo "Package OTA: $@"
- $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
- build/make/tools/releasetools/ota_from_target_files -v \
- --block \
- --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
- -p $(HOST_OUT) \
- -k $(KEY_CERT_PAIR) \
- $(if $(OEM_OTA_CONFIG), -o $(OEM_OTA_CONFIG)) \
- $(BUILT_TARGET_FILES_PACKAGE) $@
+ $(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --output_metadata_path $(INTERNAL_OTA_METADATA))
.PHONY: otapackage
otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
-endif # build_ota_package
-
-# -----------------------------------------------------------------
-# The update package
-
+ifeq ($(BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE),true)
name := $(TARGET_PRODUCT)
ifeq ($(TARGET_BUILD_TYPE),debug)
name := $(name)_debug
endif
-name := $(name)-img-$(FILE_NAME_TAG)
+name := $(name)-ota-retrofit-$(FILE_NAME_TAG)
-INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
+INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
-$(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(ZIP2ZIP)
- @echo "Package: $@"
- $(hide) $(ZIP2ZIP) -i $(BUILT_TARGET_FILES_PACKAGE) -o $@ \
- OTA/android-info.txt:android-info.txt "IMAGES/*.img:."
+$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
-.PHONY: updatepackage
-updatepackage: $(INTERNAL_UPDATE_PACKAGE_TARGET)
+ifeq ($(AB_OTA_UPDATER),true)
+$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): $(BRILLO_UPDATE_PAYLOAD)
+else
+$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): $(BROTLI)
+endif
+
+$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) \
+ build/make/tools/releasetools/ota_from_target_files
+ @echo "Package OTA (retrofit dynamic partitions): $@"
+ $(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --retrofit_dynamic_partitions)
+
+.PHONY: otardppackage
+
+otapackage otardppackage: $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET)
+
+endif # BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE
+
+endif # build_ota_package
+
+# -----------------------------------------------------------------
+# A zip of the appcompat directory containing logs
+APPCOMPAT_ZIP := $(PRODUCT_OUT)/appcompat.zip
+# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
+ifndef TARGET_BUILD_APPS
+$(APPCOMPAT_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INSTALLED_USERDATAIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET)
+endif
+$(APPCOMPAT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,appcompat)/filelist
+$(APPCOMPAT_ZIP): $(SOONG_ZIP)
+ @echo "appcompat logs: $@"
+ $(hide) rm -rf $@ $(PRIVATE_LIST_FILE)
+ $(hide) mkdir -p $(dir $@) $(PRODUCT_OUT)/appcompat $(dir $(PRIVATE_LIST_FILE))
+ $(hide) find $(PRODUCT_OUT)/appcompat | sort >$(PRIVATE_LIST_FILE)
+ $(hide) $(SOONG_ZIP) -d -o $@ -C $(PRODUCT_OUT)/appcompat -l $(PRIVATE_LIST_FILE)
# -----------------------------------------------------------------
# A zip of the symbols directory. Keep the full paths to make it
@@ -2940,35 +4323,40 @@ name := $(name)-symbols-$(FILE_NAME_TAG)
SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
ifndef TARGET_BUILD_APPS
-$(SYMBOLS_ZIP): $(INSTALLED_SYSTEMIMAGE) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(updater_dep)
+$(SYMBOLS_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INSTALLED_USERDATAIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) \
+ $(INSTALLED_ODMIMAGE_TARGET) \
+ $(updater_dep)
endif
$(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
$(SYMBOLS_ZIP): $(SOONG_ZIP)
@echo "Package symbols: $@"
$(hide) rm -rf $@ $(PRIVATE_LIST_FILE)
$(hide) mkdir -p $(dir $@) $(TARGET_OUT_UNSTRIPPED) $(dir $(PRIVATE_LIST_FILE))
- $(hide) find $(TARGET_OUT_UNSTRIPPED) | sort >$(PRIVATE_LIST_FILE)
+ $(hide) find -L $(TARGET_OUT_UNSTRIPPED) -type f | sort >$(PRIVATE_LIST_FILE)
$(hide) $(SOONG_ZIP) -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
# -----------------------------------------------------------------
# A zip of the coverage directory.
#
-name := $(TARGET_PRODUCT)
+name := gcov-report-files-all
ifeq ($(TARGET_BUILD_TYPE),debug)
name := $(name)_debug
endif
-name := $(name)-coverage-$(FILE_NAME_TAG)
COVERAGE_ZIP := $(PRODUCT_OUT)/$(name).zip
ifndef TARGET_BUILD_APPS
-$(COVERAGE_ZIP): $(INSTALLED_SYSTEMIMAGE) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET)
+$(COVERAGE_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INSTALLED_USERDATAIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) \
+ $(INSTALLED_ODMIMAGE_TARGET)
endif
$(COVERAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,coverage)/filelist
$(COVERAGE_ZIP): $(SOONG_ZIP)
@@ -2989,16 +4377,16 @@ endif
name := $(name)-apps-$(FILE_NAME_TAG)
APPS_ZIP := $(PRODUCT_OUT)/$(name).zip
-$(APPS_ZIP): $(INSTALLED_SYSTEMIMAGE)
+$(APPS_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET)
@echo "Package apps: $@"
$(hide) rm -rf $@
$(hide) mkdir -p $(dir $@)
$(hide) apps_to_zip=`find $(TARGET_OUT_APPS) $(TARGET_OUT_APPS_PRIVILEGED) -mindepth 2 -maxdepth 3 -name "*.apk"`; \
if [ -z "$$apps_to_zip" ]; then \
- echo "No apps to zip up. Generating empty apps archive." ; \
- a=$$(mktemp /tmp/XXXXXXX) && touch $$a && zip $@ $$a && zip -d $@ $$a; \
+ echo "No apps to zip up. Generating empty apps archive." ; \
+ a=$$(mktemp /tmp/XXXXXXX) && touch $$a && zip $@ $$a && zip -d $@ $$a; \
else \
- zip -qjX $@ $$apps_to_zip; \
+ zip -qjX $@ $$apps_to_zip; \
fi
ifeq (true,$(EMMA_INSTRUMENT))
@@ -3012,7 +4400,7 @@ JACOCO_REPORT_CLASSES_ALL := $(PRODUCT_OUT)/jacoco-report-classes-all.jar
$(JACOCO_REPORT_CLASSES_ALL) :
@echo "Collecting uninstrumented classes"
$(hide) find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" | \
- zip -@ -0 -q -X $@
+ zip -@ -0 -q -X $@
# Meaning of these options:
# -@ scan stdin for file paths to add to the zip
# -0 don't do any compression
@@ -3032,51 +4420,247 @@ PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-$(FILE_NAME_
$(PROGUARD_DICT_ZIP) :
@echo "Packaging Proguard obfuscation dictionary files."
$(hide) dict_files=`find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_dictionary`; \
- if [ -n "$$dict_files" ]; then \
- unobfuscated_jars=$${dict_files//proguard_dictionary/classes.jar}; \
- zip -qX $@ $$dict_files $$unobfuscated_jars; \
- else \
- touch $(dir $@)/zipdummy; \
- (cd $(dir $@) && zip -q $(notdir $@) zipdummy); \
- zip -qd $@ zipdummy; \
- rm $(dir $@)/zipdummy; \
- fi
+ if [ -n "$$dict_files" ]; then \
+ unobfuscated_jars=$${dict_files//proguard_dictionary/classes.jar}; \
+ zip -qX $@ $$dict_files $$unobfuscated_jars; \
+ else \
+ touch $(dir $@)/zipdummy; \
+ (cd $(dir $@) && zip -q $(notdir $@) zipdummy); \
+ zip -qd $@ zipdummy; \
+ rm $(dir $@)/zipdummy; \
+ fi
endif # TARGET_BUILD_APPS
# -----------------------------------------------------------------
+# super partition image (dist)
+
+ifeq (true,$(PRODUCT_BUILD_SUPER_PARTITION))
+
+# BOARD_SUPER_PARTITION_SIZE must be defined to build super image.
+ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
+
+# Dump variables used by build_super_image.py.
+define dump-super-image-info
+ $(call dump-dynamic-partitions-info,$(1))
+ $(if $(filter true,$(AB_OTA_UPDATER)), \
+ echo "ab_update=true" >> $(1))
+endef
+
+ifneq (true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS))
+
+# For real devices and for dist builds, build super image from target files to an intermediate directory.
+INTERNAL_SUPERIMAGE_DIST_TARGET := $(call intermediates-dir-for,PACKAGING,super.img)/super.img
+$(INTERNAL_SUPERIMAGE_DIST_TARGET): extracted_input_target_files := $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE))
+$(INTERNAL_SUPERIMAGE_DIST_TARGET): $(LPMAKE) $(BUILT_TARGET_FILES_PACKAGE) $(BUILD_SUPER_IMAGE)
+ $(call pretty,"Target super fs image from target files: $@")
+ PATH=$(dir $(LPMAKE)):$$PATH \
+ $(BUILD_SUPER_IMAGE) -v $(extracted_input_target_files) $@
+
+# Skip packing it in dist package because it is in update package.
+ifneq (true,$(BOARD_SUPER_IMAGE_IN_UPDATE_PACKAGE))
+$(call dist-for-goals,dist_files,$(INTERNAL_SUPERIMAGE_DIST_TARGET))
+endif
+
+.PHONY: superimage_dist
+superimage_dist: $(INTERNAL_SUPERIMAGE_DIST_TARGET)
+
+endif # PRODUCT_RETROFIT_DYNAMIC_PARTITIONS != "true"
+endif # BOARD_SUPER_PARTITION_SIZE != ""
+endif # PRODUCT_BUILD_SUPER_PARTITION == "true"
+
+# -----------------------------------------------------------------
+# super partition image for development
+
+ifeq (true,$(PRODUCT_BUILD_SUPER_PARTITION))
+ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
+ifneq (true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS))
+
+# Build super.img by using $(INSTALLED_*IMAGE_TARGET) to $(1)
+# $(1): built image path
+# $(2): misc_info.txt path; its contents should match expectation of build_super_image.py
+define build-superimage-target
+ mkdir -p $(dir $(2))
+ rm -rf $(2)
+ $(call dump-super-image-info,$(2))
+ $(foreach p,$(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+ echo "$(p)_image=$(INSTALLED_$(call to-upper,$(p))IMAGE_TARGET)" >> $(2);)
+ mkdir -p $(dir $(1))
+ PATH=$(dir $(LPMAKE)):$$PATH \
+ $(BUILD_SUPER_IMAGE) -v $(2) $(1)
+endef
+
+INSTALLED_SUPERIMAGE_TARGET := $(PRODUCT_OUT)/super.img
+INSTALLED_SUPERIMAGE_DEPENDENCIES := $(LPMAKE) $(BUILD_SUPER_IMAGE) \
+ $(foreach p, $(BOARD_SUPER_PARTITION_PARTITION_LIST), $(INSTALLED_$(call to-upper,$(p))IMAGE_TARGET))
+
+# If BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT is set, super.img is built from images in the
+# $(PRODUCT_OUT) directory, and is built to $(PRODUCT_OUT)/super.img. Also, it will
+# be built for non-dist builds. This is useful for devices that uses super.img directly, e.g.
+# virtual devices.
+ifeq (true,$(BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT))
+$(INSTALLED_SUPERIMAGE_TARGET): $(INSTALLED_SUPERIMAGE_DEPENDENCIES)
+ $(call pretty,"Target super fs image for debug: $@")
+ $(call build-superimage-target,$(INSTALLED_SUPERIMAGE_TARGET),\
+ $(call intermediates-dir-for,PACKAGING,superimage_debug)/misc_info.txt)
+
+droidcore: $(INSTALLED_SUPERIMAGE_TARGET)
+
+# For devices that uses super image directly, the superimage target points to the file in $(PRODUCT_OUT).
+.PHONY: superimage
+superimage: $(INSTALLED_SUPERIMAGE_TARGET)
+endif # BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT
+
+# Build $(PRODUCT_OUT)/super.img without dependencies.
+.PHONY: superimage-nodeps supernod
+superimage-nodeps supernod: intermediates :=
+superimage-nodeps supernod: | $(INSTALLED_SUPERIMAGE_DEPENDENCIES)
+ $(call pretty,"make $(INSTALLED_SUPERIMAGE_TARGET): ignoring dependencies")
+ $(call build-superimage-target,$(INSTALLED_SUPERIMAGE_TARGET),\
+ $(call intermediates-dir-for,PACKAGING,superimage-nodeps)/misc_info.txt)
+
+endif # PRODUCT_RETROFIT_DYNAMIC_PARTITIONS != "true"
+endif # BOARD_SUPER_PARTITION_SIZE != ""
+endif # PRODUCT_BUILD_SUPER_PARTITION == "true"
+
+# -----------------------------------------------------------------
+# super empty image
+
+ifeq (true,$(PRODUCT_BUILD_SUPER_PARTITION))
+ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
+
+INSTALLED_SUPERIMAGE_EMPTY_TARGET := $(PRODUCT_OUT)/super_empty.img
+$(INSTALLED_SUPERIMAGE_EMPTY_TARGET): intermediates := $(call intermediates-dir-for,PACKAGING,super_empty)
+$(INSTALLED_SUPERIMAGE_EMPTY_TARGET): $(LPMAKE) $(BUILD_SUPER_IMAGE)
+ $(call pretty,"Target empty super fs image: $@")
+ mkdir -p $(intermediates)
+ rm -rf $(intermediates)/misc_info.txt
+ $(call dump-super-image-info,$(intermediates)/misc_info.txt)
+ PATH=$(dir $(LPMAKE)):$$PATH \
+ $(BUILD_SUPER_IMAGE) -v $(intermediates)/misc_info.txt $@
+
+$(call dist-for-goals,dist_files,$(INSTALLED_SUPERIMAGE_EMPTY_TARGET))
+
+endif # BOARD_SUPER_PARTITION_SIZE != ""
+endif # PRODUCT_BUILD_SUPER_PARTITION == "true"
+
+
+# -----------------------------------------------------------------
+# The update package
+
+name := $(TARGET_PRODUCT)
+ifeq ($(TARGET_BUILD_TYPE),debug)
+ name := $(name)_debug
+endif
+name := $(name)-img-$(FILE_NAME_TAG)
+
+INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
+
+$(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(ZIP2ZIP)
+
+ifeq (true,$(BOARD_SUPER_IMAGE_IN_UPDATE_PACKAGE))
+$(INTERNAL_UPDATE_PACKAGE_TARGET): $(INTERNAL_SUPERIMAGE_DIST_TARGET)
+ @echo "Package: $@"
+ # Filter out super_empty and images in BOARD_SUPER_PARTITION_PARTITION_LIST.
+ # Filter out system_other for launch DAP devices because it is in super image.
+ # Include OTA/super_*.img for retrofit devices and super.img for non-retrofit
+ # devices.
+ $(hide) $(ZIP2ZIP) -i $(BUILT_TARGET_FILES_PACKAGE) -o $@ \
+ -x IMAGES/super_empty.img \
+ $(foreach partition,$(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+ -x IMAGES/$(partition).img) \
+ $(if $(filter system, $(BOARD_SUPER_PARTITION_PARTITION_LIST)), \
+ $(if $(filter true, $(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)),, \
+ -x IMAGES/system_other.img)) \
+ $(if $(filter true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)), \
+ $(foreach device,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES), \
+ OTA/super_$(device).img:super_$(device).img)) \
+ OTA/android-info.txt:android-info.txt "IMAGES/*.img:."
+ $(if $(INTERNAL_SUPERIMAGE_DIST_TARGET), zip -q -j -u $@ $(INTERNAL_SUPERIMAGE_DIST_TARGET))
+else
+$(INTERNAL_UPDATE_PACKAGE_TARGET):
+ @echo "Package: $@"
+ $(hide) $(ZIP2ZIP) -i $(BUILT_TARGET_FILES_PACKAGE) -o $@ \
+ OTA/android-info.txt:android-info.txt "IMAGES/*.img:."
+endif # BOARD_SUPER_IMAGE_IN_UPDATE_PACKAGE
+
+.PHONY: updatepackage
+updatepackage: $(INTERNAL_UPDATE_PACKAGE_TARGET)
+
+
+# -----------------------------------------------------------------
# dalvik something
.PHONY: dalvikfiles
dalvikfiles: $(INTERNAL_DALVIK_MODULES)
ifeq ($(BUILD_QEMU_IMAGES),true)
-INSTALLED_QEMU_SYSTEMIMAGE := $(PRODUCT_OUT)/system-qemu.img
MK_QEMU_IMAGE_SH := device/generic/goldfish/tools/mk_qemu_image.sh
+MK_COMBINE_QEMU_IMAGE_SH := device/generic/goldfish/tools/mk_combined_img.py
SGDISK_HOST := $(HOST_OUT_EXECUTABLES)/sgdisk
-$(INSTALLED_QEMU_SYSTEMIMAGE): $(INSTALLED_SYSTEMIMAGE) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST)
- @echo Create system-qemu.img
- (export SGDISK=$(SGDISK_HOST); $(MK_QEMU_IMAGE_SH) ${PRODUCT_OUT}/system.img)
+
+ifdef INSTALLED_SYSTEMIMAGE_TARGET
+INSTALLED_QEMU_SYSTEMIMAGE := $(PRODUCT_OUT)/system-qemu.img
+INSTALLED_SYSTEM_QEMU_CONFIG := $(PRODUCT_OUT)/system-qemu-config.txt
+$(INSTALLED_SYSTEM_QEMU_CONFIG): $(INSTALLED_SUPERIMAGE_TARGET) $(INSTALLED_VBMETAIMAGE_TARGET)
+ @echo "$(PRODUCT_OUT)/vbmeta.img vbmeta 1" > $@
+ @echo "$(INSTALLED_SUPERIMAGE_TARGET) super 2" >> $@
+$(INSTALLED_QEMU_SYSTEMIMAGE): $(INSTALLED_VBMETAIMAGE_TARGET) $(MK_COMBINE_QEMU_IMAGE_SH) $(SGDISK_HOST) $(SIMG2IMG) \
+ $(INSTALLED_SUPERIMAGE_TARGET) $(INSTALLED_SYSTEM_QEMU_CONFIG)
+ @echo Create system-qemu.img now
+ (export SGDISK=$(SGDISK_HOST) SIMG2IMG=$(SIMG2IMG); \
+ $(MK_COMBINE_QEMU_IMAGE_SH) -i $(INSTALLED_SYSTEM_QEMU_CONFIG) -o $@)
systemimage: $(INSTALLED_QEMU_SYSTEMIMAGE)
droidcore: $(INSTALLED_QEMU_SYSTEMIMAGE)
-ifeq ($(BOARD_USES_VENDORIMAGE),true)
+endif
+ifdef INSTALLED_VENDORIMAGE_TARGET
INSTALLED_QEMU_VENDORIMAGE := $(PRODUCT_OUT)/vendor-qemu.img
-$(INSTALLED_QEMU_VENDORIMAGE): $(INSTALLED_VENDORIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST)
+$(INSTALLED_QEMU_VENDORIMAGE): $(INSTALLED_VENDORIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST) $(SIMG2IMG)
@echo Create vendor-qemu.img
- (export SGDISK=$(SGDISK_HOST); $(MK_QEMU_IMAGE_SH) ${PRODUCT_OUT}/vendor.img)
+ (export SGDISK=$(SGDISK_HOST) SIMG2IMG=$(SIMG2IMG); $(MK_QEMU_IMAGE_SH) $(INSTALLED_VENDORIMAGE_TARGET))
vendorimage: $(INSTALLED_QEMU_VENDORIMAGE)
droidcore: $(INSTALLED_QEMU_VENDORIMAGE)
endif
-ifeq ($(BOARD_USES_PRODUCTIMAGE),true)
+ifdef INSTALLED_PRODUCTIMAGE_TARGET
INSTALLED_QEMU_PRODUCTIMAGE := $(PRODUCT_OUT)/product-qemu.img
-$(INSTALLED_QEMU_PRODUCTIMAGE): $(INSTALLED_PRODUCTIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST)
+$(INSTALLED_QEMU_PRODUCTIMAGE): $(INSTALLED_PRODUCTIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST) $(SIMG2IMG)
@echo Create product-qemu.img
- (export SGDISK=$(SGDISK_HOST); $(MK_QEMU_IMAGE_SH) ${PRODUCT_OUT}/product.img)
+ (export SGDISK=$(SGDISK_HOST) SIMG2IMG=$(SIMG2IMG); $(MK_QEMU_IMAGE_SH) $(INSTALLED_PRODUCTIMAGE_TARGET))
productimage: $(INSTALLED_QEMU_PRODUCTIMAGE)
droidcore: $(INSTALLED_QEMU_PRODUCTIMAGE)
endif
+ifdef INSTALLED_PRODUCT_SERVICESIMAGE_TARGET
+INSTALLED_QEMU_PRODUCT_SERVICESIMAGE := $(PRODUCT_OUT)/product_services-qemu.img
+$(INSTALLED_QEMU_PRODUCT_SERVICESIMAGE): $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST) $(SIMG2IMG)
+ @echo Create product_services-qemu.img
+ (export SGDISK=$(SGDISK_HOST) SIMG2IMG=$(SIMG2IMG); $(MK_QEMU_IMAGE_SH) $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET))
+
+productservicesimage: $(INSTALLED_QEMU_PRODUCT_SERVICESIMAGE)
+droidcore: $(INSTALLED_QEMU_PRODUCT_SERVICESIMAGE)
+endif
+ifdef INSTALLED_ODMIMAGE_TARGET
+INSTALLED_QEMU_ODMIMAGE := $(PRODUCT_OUT)/odm-qemu.img
+$(INSTALLED_QEMU_ODMIMAGE): $(INSTALLED_ODMIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST)
+ @echo Create odm-qemu.img
+ (export SGDISK=$(SGDISK_HOST); $(MK_QEMU_IMAGE_SH) $(INSTALLED_ODMIMAGE_TARGET))
+
+odmimage: $(INSTALLED_QEMU_ODMIMAGE)
+droidcore: $(INSTALLED_QEMU_ODMIMAGE)
+endif
+
+QEMU_VERIFIED_BOOT_PARAMS := $(PRODUCT_OUT)/VerifiedBootParams.textproto
+MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
+$(QEMU_VERIFIED_BOOT_PARAMS): $(INSTALLED_VBMETAIMAGE_TARGET) $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH) $(AVBTOOL)
+ @echo Creating $@
+ (export AVBTOOL=$(AVBTOOL); $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH) $(INSTALLED_VBMETAIMAGE_TARGET) \
+ $(INSTALLED_SYSTEMIMAGE_TARGET) $(QEMU_VERIFIED_BOOT_PARAMS))
+
+systemimage: $(QEMU_VERIFIED_BOOT_PARAMS)
+droidcore: $(QEMU_VERIFIED_BOOT_PARAMS)
+
endif
# -----------------------------------------------------------------
# The emulator package
@@ -3085,8 +4669,8 @@ INTERNAL_EMULATOR_PACKAGE_FILES += \
$(HOST_OUT_EXECUTABLES)/emulator$(HOST_EXECUTABLE_SUFFIX) \
prebuilts/qemu-kernel/$(TARGET_ARCH)/kernel-qemu \
$(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_SYSTEMIMAGE) \
- $(INSTALLED_USERDATAIMAGE_TARGET)
+ $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_USERDATAIMAGE_TARGET)
name := $(TARGET_PRODUCT)-emulator-$(FILE_NAME_TAG)
@@ -3140,6 +4724,7 @@ ATREE_FILES := \
$(ALL_DEFAULT_INSTALLED_MODULES) \
$(INSTALLED_RAMDISK_TARGET) \
$(ALL_DOCS) \
+ $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/api-stubs-docs_annotations.zip \
$(ALL_SDK_FILES)
endif
@@ -3158,8 +4743,8 @@ ifneq (,$(strip $(wildcard $(atree_dir)/sdk-android-$(TARGET_CPU_ABI).atree)))
sdk_atree_files += $(atree_dir)/sdk-android-$(TARGET_CPU_ABI).atree
endif
-ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ATREE_FILES),)
-sdk_atree_files += $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ATREE_FILES)
+ifneq ($(PRODUCT_SDK_ATREE_FILES),)
+sdk_atree_files += $(PRODUCT_SDK_ATREE_FILES)
else
sdk_atree_files += $(atree_dir)/sdk.atree
endif
@@ -3170,11 +4755,14 @@ deps := \
$(target_notice_file_txt) \
$(tools_notice_file_txt) \
$(OUT_DOCS)/offline-sdk-timestamp \
+ $(SDK_METADATA_FILES) \
$(SYMBOLS_ZIP) \
$(COVERAGE_ZIP) \
- $(INSTALLED_SYSTEMIMAGE) \
+ $(APPCOMPAT_ZIP) \
+ $(INSTALLED_SYSTEMIMAGE_TARGET) \
$(INSTALLED_QEMU_SYSTEMIMAGE) \
$(INSTALLED_QEMU_VENDORIMAGE) \
+ $(QEMU_VERIFIED_BOOT_PARAMS) \
$(INSTALLED_USERDATAIMAGE_TARGET) \
$(INSTALLED_RAMDISK_TARGET) \
$(INSTALLED_SDK_BUILD_PROP_TARGET) \
@@ -3208,29 +4796,29 @@ $(INTERNAL_SDK_TARGET): $(deps)
if [ $$FAIL ]; then exit 1; fi
$(hide) echo $(notdir $(SDK_FONT_DEPS)) | tr " " "\n" > $(SDK_FONT_TEMP)/fontsInSdk.txt
$(hide) ( \
- ATREE_STRIP="strip -x" \
- $(HOST_OUT_EXECUTABLES)/atree \
- $(addprefix -f ,$(PRIVATE_INPUT_FILES)) \
- -m $(PRIVATE_DEP_FILE) \
- -I . \
- -I $(PRODUCT_OUT) \
- -I $(HOST_OUT) \
- -I $(TARGET_COMMON_OUT_ROOT) \
- -v "PLATFORM_NAME=android-$(PLATFORM_VERSION)" \
- -v "OUT_DIR=$(OUT_DIR)" \
- -v "HOST_OUT=$(HOST_OUT)" \
- -v "TARGET_ARCH=$(TARGET_ARCH)" \
- -v "TARGET_CPU_ABI=$(TARGET_CPU_ABI)" \
- -v "DLL_EXTENSION=$(HOST_SHLIB_SUFFIX)" \
- -v "FONT_OUT=$(SDK_FONT_TEMP)" \
- -o $(PRIVATE_DIR) && \
- cp -f $(target_notice_file_txt) \
- $(PRIVATE_DIR)/system-images/android-$(PLATFORM_VERSION)/$(TARGET_CPU_ABI)/NOTICE.txt && \
- cp -f $(tools_notice_file_txt) $(PRIVATE_DIR)/platform-tools/NOTICE.txt && \
- HOST_OUT_EXECUTABLES=$(HOST_OUT_EXECUTABLES) HOST_OS=$(HOST_OS) \
- development/build/tools/sdk_clean.sh $(PRIVATE_DIR) && \
- chmod -R ug+rwX $(PRIVATE_DIR) && \
- cd $(dir $@) && zip -rqX $(notdir $@) $(PRIVATE_NAME) \
+ ATREE_STRIP="$(HOST_STRIP) -x" \
+ $(HOST_OUT_EXECUTABLES)/atree \
+ $(addprefix -f ,$(PRIVATE_INPUT_FILES)) \
+ -m $(PRIVATE_DEP_FILE) \
+ -I . \
+ -I $(PRODUCT_OUT) \
+ -I $(HOST_OUT) \
+ -I $(TARGET_COMMON_OUT_ROOT) \
+ -v "PLATFORM_NAME=android-$(PLATFORM_VERSION)" \
+ -v "OUT_DIR=$(OUT_DIR)" \
+ -v "HOST_OUT=$(HOST_OUT)" \
+ -v "TARGET_ARCH=$(TARGET_ARCH)" \
+ -v "TARGET_CPU_ABI=$(TARGET_CPU_ABI)" \
+ -v "DLL_EXTENSION=$(HOST_SHLIB_SUFFIX)" \
+ -v "FONT_OUT=$(SDK_FONT_TEMP)" \
+ -o $(PRIVATE_DIR) && \
+ cp -f $(target_notice_file_txt) \
+ $(PRIVATE_DIR)/system-images/android-$(PLATFORM_VERSION)/$(TARGET_CPU_ABI)/NOTICE.txt && \
+ cp -f $(tools_notice_file_txt) $(PRIVATE_DIR)/platform-tools/NOTICE.txt && \
+ HOST_OUT_EXECUTABLES=$(HOST_OUT_EXECUTABLES) HOST_OS=$(HOST_OS) \
+ development/build/tools/sdk_clean.sh $(PRIVATE_DIR) && \
+ chmod -R ug+rwX $(PRIVATE_DIR) && \
+ cd $(dir $@) && zip -rqX $(notdir $@) $(PRIVATE_NAME) \
) || ( rm -rf $(PRIVATE_DIR) $@ && exit 44 )
@@ -3283,9 +4871,3 @@ include $(BUILD_SYSTEM)/product-graph.mk
ifneq ($(sdk_repo_goal),)
include $(TOPDIR)development/build/tools/sdk_repo.mk
endif
-
-#------------------------------------------------------------------
-# Find lsdump paths
-FIND_LSDUMPS_FILE := $(PRODUCT_OUT)/lsdump_paths.txt
-$(FIND_LSDUMPS_FILE) : $(LSDUMP_PATHS)
- $(hide) rm -rf $@ && echo "$^" > $@
diff --git a/core/OWNERS b/core/OWNERS
new file mode 100644
index 0000000000..750f1fa20d
--- /dev/null
+++ b/core/OWNERS
@@ -0,0 +1,3 @@
+per-file dex_preopt*.mk = ngeoffray@google.com,calin@google.com,mathewi@google.com,dbrazdil@google.com
+per-file construct_context.sh = ngeoffray@google.com,calin@google.com,mathieuc@google.com
+per-file verify_uses_libraries.sh = ngeoffray@google.com,calin@google.com,mathieuc@google.com
diff --git a/core/aapt2.mk b/core/aapt2.mk
index b3a7c935f8..fbbf3dd58f 100644
--- a/core/aapt2.mk
+++ b/core/aapt2.mk
@@ -64,9 +64,11 @@ endif
$(my_res_resources_flat) $(my_overlay_resources_flat) $(my_resources_flata): \
PRIVATE_AAPT2_CFLAGS := --pseudo-localize
-my_static_library_resources := $(foreach l, $(call reverse-list,$(LOCAL_STATIC_ANDROID_LIBRARIES)),\
+# TODO(b/78447299): Forbid LOCAL_STATIC_JAVA_AAR_LIBRARIES in aapt2 and remove
+# support for it.
+my_static_library_resources := $(foreach l, $(call reverse-list,$(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES)),\
$(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/package-res.apk)
-my_static_library_extra_packages := $(foreach l, $(call reverse-list,$(LOCAL_STATIC_ANDROID_LIBRARIES)),\
+my_static_library_extra_packages := $(foreach l, $(call reverse-list,$(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES)),\
$(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/extra_packages)
my_shared_library_resources := $(foreach l, $(LOCAL_SHARED_ANDROID_LIBRARIES),\
$(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/package-res.apk)
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 7d573d3fc3..1f7acf1a05 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -1,45 +1,115 @@
# Handle AndroidManifest.xmls
-# Input: LOCAL_MANIFEST_FILE, LOCAL_FULL_MANIFEST_FILE, LOCAL_FULL_LIBS_MANIFEST_FILES
+# Input: LOCAL_MANIFEST_FILE, LOCAL_FULL_MANIFEST_FILE, LOCAL_FULL_LIBS_MANIFEST_FILES,
+# LOCAL_USE_EMBEDDED_NATIVE_LIBS
# Output: full_android_manifest
ifeq ($(strip $(LOCAL_MANIFEST_FILE)),)
LOCAL_MANIFEST_FILE := AndroidManifest.xml
endif
ifdef LOCAL_FULL_MANIFEST_FILE
- full_android_manifest := $(LOCAL_FULL_MANIFEST_FILE)
+ main_android_manifest := $(LOCAL_FULL_MANIFEST_FILE)
else
- full_android_manifest := $(LOCAL_PATH)/$(LOCAL_MANIFEST_FILE)
+ main_android_manifest := $(LOCAL_PATH)/$(LOCAL_MANIFEST_FILE)
endif
-my_full_libs_manifest_files := $(LOCAL_FULL_LIBS_MANIFEST_FILES)
-my_full_libs_manifest_deps := $(LOCAL_FULL_LIBS_MANIFEST_FILES)
-
-# Set up dependency on aar libraries
LOCAL_STATIC_JAVA_AAR_LIBRARIES := $(strip $(LOCAL_STATIC_JAVA_AAR_LIBRARIES))
-ifdef LOCAL_STATIC_JAVA_AAR_LIBRARIES
-my_full_libs_manifest_deps += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
- $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/classes.jar)
-my_full_libs_manifest_files += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
- $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/AndroidManifest.xml)
+
+my_full_libs_manifest_files :=
+
+ifndef LOCAL_DONT_MERGE_MANIFESTS
+ my_full_libs_manifest_files += $(LOCAL_FULL_LIBS_MANIFEST_FILES)
+
+ my_full_libs_manifest_files += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES) $(LOCAL_STATIC_ANDROID_LIBRARIES),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/manifest/AndroidManifest.xml)
+endif
# With aapt2, we'll link in the built resource from the AAR.
-ifndef LOCAL_USE_AAPT2
-LOCAL_RESOURCE_DIR += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
- $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/res)
-endif # LOCAL_USE_AAPT2
-endif # LOCAL_STATIC_JAVA_AAR_LIBRARIES
-
-# Set up rules to merge library manifest files
-ifdef my_full_libs_manifest_files
-main_android_manifest := $(full_android_manifest)
-full_android_manifest := $(intermediates.COMMON)/AndroidManifest.xml
-$(full_android_manifest): PRIVATE_LIBS_MANIFESTS := $(my_full_libs_manifest_files)
-$(full_android_manifest): $(ANDROID_MANIFEST_MERGER_CLASSPATH)
-$(full_android_manifest) : $(main_android_manifest) $(my_full_libs_manifest_deps)
+ifneq ($(LOCAL_USE_AAPT2),true)
+ LOCAL_RESOURCE_DIR += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/res)
+endif
+
+full_android_manifest := $(intermediates.COMMON)/manifest/AndroidManifest.xml
+
+ifneq (,$(strip $(my_full_libs_manifest_files)))
+ # Set up rules to merge library manifest files
+ fixed_android_manifest := $(intermediates.COMMON)/manifest/AndroidManifest.xml.fixed
+
+ $(full_android_manifest): PRIVATE_LIBS_MANIFESTS := $(my_full_libs_manifest_files)
+ $(full_android_manifest): $(ANDROID_MANIFEST_MERGER)
+ $(full_android_manifest) : $(fixed_android_manifest) $(my_full_libs_manifest_files)
@echo "Merge android manifest files: $@ <-- $< $(PRIVATE_LIBS_MANIFESTS)"
@mkdir -p $(dir $@)
$(hide) $(ANDROID_MANIFEST_MERGER) --main $< \
--libs $(call normalize-path-list,$(PRIVATE_LIBS_MANIFESTS)) \
--out $@
+else
+ fixed_android_manifest := $(full_android_manifest)
+endif
+my_target_sdk_version := $(call module-target-sdk-version)
+
+ifdef TARGET_BUILD_APPS
+ ifndef TARGET_BUILD_APPS_USE_PREBUILT_SDK
+ ifeq ($(my_target_sdk_version),$(PLATFORM_VERSION_CODENAME))
+ ifdef UNBUNDLED_BUILD_TARGET_SDK_WITH_API_FINGERPRINT
+ my_target_sdk_version := $(my_target_sdk_version).$$(cat $(API_FINGERPRINT))
+ $(fixed_android_manifest): $(API_FINGERPRINT)
+ endif
+ endif
+ endif
+endif
+
+$(fixed_android_manifest): PRIVATE_MIN_SDK_VERSION := $(call module-min-sdk-version)
+$(fixed_android_manifest): PRIVATE_TARGET_SDK_VERSION := $(my_target_sdk_version)
+
+my_exported_sdk_libs_file := $(call local-intermediates-dir,COMMON)/exported-sdk-libs
+$(fixed_android_manifest): PRIVATE_EXPORTED_SDK_LIBS_FILE := $(my_exported_sdk_libs_file)
+$(fixed_android_manifest): $(my_exported_sdk_libs_file)
+
+my_manifest_fixer_flags :=
+ifneq ($(LOCAL_MODULE_CLASS),APPS)
+ my_manifest_fixer_flags += --library
+endif
+ifeq ($(LOCAL_PRIVATE_PLATFORM_APIS),true)
+ my_manifest_fixer_flags += --uses-non-sdk-api
endif
+
+ifeq (true,$(LOCAL_USE_EMBEDDED_DEX))
+ my_manifest_fixer_flags += --use-embedded-dex
+endif
+
+ifeq ($(LOCAL_MODULE_CLASS),APPS)
+ ifeq (true,$(call math_gt_or_eq,$(patsubst $(PLATFORM_VERSION_CODENAME),100,$(call module-min-sdk-version)),23))
+ ifeq (true,$(LOCAL_USE_EMBEDDED_NATIVE_LIBS))
+ my_manifest_fixer_flags += --extract-native-libs=false
+ else
+ my_manifest_fixer_flags += --extract-native-libs=true
+ endif
+ else ifeq (true,$(LOCAL_USE_EMBEDDED_NATIVE_LIBS))
+ $(call pretty-error,LOCAL_USE_EMBEDDED_NATIVE_LIBS is set but minSdkVersion $(call module-min-sdk-version) does not support it)
+ endif
+endif
+
+$(fixed_android_manifest): PRIVATE_MANIFEST_FIXER_FLAGS := $(my_manifest_fixer_flags)
+# These two libs are added as optional dependencies (<uses-library> with
+# android:required set to false). This is because they haven't existed in pre-P
+# devices, but classes in them were in bootclasspath jars, etc. So making them
+# hard dependencies (andriod:required=true) would prevent apps from being
+# installed to such legacy devices.
+$(fixed_android_manifest): PRIVATE_OPTIONAL_SDK_LIB_NAMES := android.test.base android.test.mock
+$(fixed_android_manifest): $(MANIFEST_FIXER)
+$(fixed_android_manifest): $(main_android_manifest)
+ echo $(PRIVATE_OPTIONAL_SDK_LIB_NAMES) | tr ' ' '\n' > $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional
+ @echo "Fix manifest: $@"
+ $(MANIFEST_FIXER) \
+ --minSdkVersion $(PRIVATE_MIN_SDK_VERSION) \
+ --targetSdkVersion $(PRIVATE_TARGET_SDK_VERSION) \
+ --raise-min-sdk-version \
+ $(PRIVATE_MANIFEST_FIXER_FLAGS) \
+ $(if (PRIVATE_EXPORTED_SDK_LIBS_FILE),\
+ $$(cat $(PRIVATE_EXPORTED_SDK_LIBS_FILE) | grep -v -f $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional | sort -u | sed -e 's/^/\ --uses-library\ /' | tr '\n' ' ') \
+ $$(cat $(PRIVATE_EXPORTED_SDK_LIBS_FILE) | grep -f $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional | sort -u | sed -e 's/^/\ --optional-uses-library\ /' | tr '\n' ' ') \
+ ) \
+ $< $@
+ rm $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional
diff --git a/target/board/treble_common_64.mk b/core/android_vts_host_config.mk
index 0a6eb172d4..38ba19de09 100644
--- a/target/board/treble_common_64.mk
+++ b/core/android_vts_host_config.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,10 +14,13 @@
# limitations under the License.
#
-include build/make/target/board/treble_common.mk
+LOCAL_MODULE_CLASS := FAKE
+LOCAL_IS_HOST_MODULE := true
+LOCAL_COMPATIBILITY_SUITE := vts
-# Enable 64-bits binder
-TARGET_USES_64_BIT_BINDER := true
+include $(BUILD_SYSTEM)/base_rules.mk
+
+$(LOCAL_BUILT_MODULE):
+ @echo "VTS host-driven test target: $(PRIVATE_MODULE)"
+ $(hide) touch $@
-# Partition size is default 1.5GB (1536MB) for 64 bits projects
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1610612736
diff --git a/core/apicheck_msg_current.txt b/core/apicheck_msg_current.txt
deleted file mode 100644
index 440e7f8862..0000000000
--- a/core/apicheck_msg_current.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-
-******************************
-You have tried to change the API from what has been previously approved.
-
-To make these errors go away, you have two choices:
- 1) You can add "@hide" javadoc comments to the methods, etc. listed in the
- errors above.
-
- 2) You can update current.txt by executing the following command:
- make update-api
-
- To submit the revised current.txt to the main Android repository,
- you will need approval.
-******************************
-
-
-
diff --git a/core/apicheck_msg_last.txt b/core/apicheck_msg_last.txt
deleted file mode 100644
index 2993157b16..0000000000
--- a/core/apicheck_msg_last.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-
-******************************
-You have tried to change the API from what has been previously released in
-an SDK. Please fix the errors listed above.
-******************************
-
-
diff --git a/core/apidiff.mk b/core/apidiff.mk
deleted file mode 100644
index 36d75fe38b..0000000000
--- a/core/apidiff.mk
+++ /dev/null
@@ -1,181 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-##
-##
-## Common to both jdiff and javadoc
-##
-##
-
-LOCAL_IS_HOST_MODULE := $(call true-or-empty,$(LOCAL_IS_HOST_MODULE))
-ifeq ($(LOCAL_IS_HOST_MODULE),true)
-my_prefix := HOST_
-LOCAL_HOST_PREFIX :=
-else
-my_prefix := TARGET_
-endif
-
-LOCAL_MODULE_CLASS := $(strip $(LOCAL_MODULE_CLASS))
-ifndef LOCAL_MODULE_CLASS
-$(error $(LOCAL_PATH): LOCAL_MODULE_CLASS not defined)
-endif
-
-full_src_files := $(patsubst %,$(LOCAL_PATH)/%,$(LOCAL_SRC_FILES))
-out_dir := $(OUT_DOCS)/$(LOCAL_MODULE)/api_diff/current
-full_target := $(call doc-timestamp-for,$(LOCAL_MODULE)-diff)
-
-ifeq ($(LOCAL_IS_HOST_MODULE),true)
-$(full_target): PRIVATE_BOOTCLASSPATH :=
-full_java_libs := $(addprefix $(HOST_OUT_JAVA_LIBRARIES)/,\
- $(addsuffix $(COMMON_JAVA_PACKAGE_SUFFIX),$(LOCAL_JAVA_LIBRARIES)))
-full_java_lib_deps := $(full_java_libs)
-
-else
-
-ifneq ($(LOCAL_SDK_VERSION),)
- ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
- # Use android_stubs_current if LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
- LOCAL_JAVA_LIBRARIES := android_stubs_current $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_stubs_current)
- else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
- LOCAL_JAVA_LIBRARIES := android_system_stubs_current $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_system_stubs_current)
- else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
- LOCAL_JAVA_LIBRARIES := android_test_stubs_current $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_test_stubs_current)
- else
- # core_<ver> is subset of <ver>. Instead of defining a prebuilt lib for core_<ver>,
- # use the stub for <ver> when building for apps.
- _version := $(patsubst core_%,%,$(LOCAL_SDK_VERSION))
- LOCAL_JAVA_LIBRARIES := sdk_v$(_version) $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(_version))
- _version :=
- endif
-else
- LOCAL_JAVA_LIBRARIES := core-oj core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-oj):$(call java-lib-files, core-libart)
-endif # LOCAL_SDK_VERSION
-LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
-
-full_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES)) $(LOCAL_CLASSPATH)
-full_java_lib_deps := $(call java-lib-deps,$(LOCAL_JAVA_LIBRARIES)) $(LOCAL_CLASSPATH)
-endif # !LOCAL_IS_HOST_MODULE
-
-$(full_target): PRIVATE_CLASSPATH := $(subst $(space),:,$(full_java_libs))
-$(full_target): PRIVATE_DOCLAVA_CLASSPATH := $(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX):$(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX)
-
-intermediates.COMMON := $(call local-intermediates-dir,COMMON)
-
-$(full_target): PRIVATE_SOURCE_PATH := $(call normalize-path-list,$(LOCAL_DROIDDOC_SOURCE_PATH))
-$(full_target): PRIVATE_JAVA_FILES := $(filter %.java,$(full_src_files))
-$(full_target): PRIVATE_JAVA_FILES += $(addprefix $($(my_prefix)OUT_COMMON_INTERMEDIATES)/, $(filter %.java,$(LOCAL_INTERMEDIATE_SOURCES)))
-$(full_target): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/src
-$(full_target): PRIVATE_SRC_LIST_FILE := $(intermediates.COMMON)/droiddoc-src-list
-
-ifneq ($(strip $(LOCAL_ADDITIONAL_JAVA_DIR)),)
-$(full_target): PRIVATE_ADDITIONAL_JAVA_DIR := $(LOCAL_ADDITIONAL_JAVA_DIR)
-endif
-
-# Lists the input files for the doc build into a text file
-# suitable for the @ syntax of javadoc.
-# $(1): the file to create
-# $(2): files to include
-# $(3): list of directories to search for java files in
-define prepare-doc-source-list
-$(hide) mkdir -p $(dir $(1))
-$(call dump-words-to-file, $(2), $(1))
-$(hide) for d in $(3) ; do find $$d -name '*.java' -and -not -name '.*' >> $(1) 2> /dev/null ; done ; true
-endef
-
-##
-##
-## jdiff only
-##
-##
-
-jdiff := \
- $(HOST_JDK_TOOLS_JAR) \
- $(HOST_OUT_JAVA_LIBRARIES)/jdiff$(COMMON_JAVA_PACKAGE_SUFFIX)
-
-doclava := \
- $(HOST_JDK_TOOLS_JAR) \
- $(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX)
-
-$(full_target): PRIVATE_NEWAPI := $(LOCAL_APIDIFF_NEWAPI)
-$(full_target): PRIVATE_OLDAPI := $(LOCAL_APIDIFF_OLDAPI)
-$(full_target): PRIVATE_OUT_DIR := $(out_dir)
-$(full_target): PRIVATE_OUT_NEWAPI := $(out_dir)/current.xml
-$(full_target): PRIVATE_OUT_OLDAPI := $(out_dir)/$(notdir $(basename $(LOCAL_APIDIFF_OLDAPI))).xml
-$(full_target): PRIVATE_DOCLETPATH := $(HOST_OUT_JAVA_LIBRARIES)/jdiff$(COMMON_JAVA_PACKAGE_SUFFIX)
-$(full_target): \
- $(full_src_files) \
- $(full_java_lib_deps) \
- $(jdiff) \
- $(doclava) \
- $(LOCAL_MODULE)-docs \
- $(LOCAL_ADDITIONAL_DEPENDENCIES)
- @echo Generating API diff: $(PRIVATE_OUT_DIR)
- @echo Old API: $(PRIVATE_OLDAPI)
- @echo New API: $(PRIVATE_NEWAPI)
- @echo Old XML: $(PRIVATE_OUT_OLDAPI)
- @echo New XML: $(PRIVATE_OUT_NEWAPI)
- $(hide) mkdir -p $(dir $@)
- @echo Converting API files to XML...
- $(hide) mkdir -p $(PRIVATE_OUT_DIR)
- $(hide) ( \
- $(JAVA) \
- $(addprefix -classpath ,$(PRIVATE_CLASSPATH):$(PRIVATE_DOCLAVA_CLASSPATH):$(PRIVATE_BOOTCLASSPATH):$(HOST_JDK_TOOLS_JAR)) \
- com.google.doclava.apicheck.ApiCheck \
- -convert2xml \
- $(basename $(PRIVATE_NEWAPI)).txt \
- $(basename $(PRIVATE_OUT_NEWAPI)).xml \
- ) || (rm -rf $(PRIVATE_OUT_DIR) $(PRIVATE_SRC_LIST_FILE); exit 45)
- $(hide) ( \
- $(JAVA) \
- $(addprefix -classpath ,$(PRIVATE_CLASSPATH):$(PRIVATE_DOCLAVA_CLASSPATH):$(PRIVATE_BOOTCLASSPATH):$(HOST_JDK_TOOLS_JAR)) \
- com.google.doclava.apicheck.ApiCheck \
- -convert2xml \
- $(basename $(PRIVATE_OLDAPI)).txt \
- $(basename $(PRIVATE_OUT_OLDAPI)).xml \
- ) || (rm -rf $(PRIVATE_OUT_DIR) $(PRIVATE_SRC_LIST_FILE); exit 45)
- @echo Running JDiff...
- $(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \
- $(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR))
- $(hide) ( \
- $(JAVADOC) \
- -encoding UTF-8 \
- \@$(PRIVATE_SRC_LIST_FILE) \
- -J-Xmx1600m \
- -XDignore.symbol.file \
- -quiet \
- -doclet jdiff.JDiff \
- -docletpath $(PRIVATE_DOCLETPATH) \
- $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH)) \
- $(addprefix -classpath ,$(PRIVATE_CLASSPATH)) \
- -sourcepath $(PRIVATE_SOURCE_PATH)$(addprefix :,$(PRIVATE_CLASSPATH)) \
- -d $(PRIVATE_OUT_DIR) \
- -newapi $(notdir $(basename $(PRIVATE_OUT_NEWAPI))) \
- -newapidir $(dir $(PRIVATE_OUT_NEWAPI)) \
- -oldapi $(notdir $(basename $(PRIVATE_OUT_OLDAPI))) \
- -oldapidir $(dir $(PRIVATE_OUT_OLDAPI)) \
- -javadocnew ../../../reference/ \
- && touch -f $@ \
- ) || (rm -rf $(PRIVATE_OUT_DIR) $(PRIVATE_SRC_LIST_FILE); exit 45)
-
-ALL_DOCS += $(full_target)
-
-.PHONY: $(LOCAL_MODULE)-diff
-$(LOCAL_MODULE)-diff : $(full_target)
diff --git a/core/app_certificate_validate.mk b/core/app_certificate_validate.mk
new file mode 100644
index 0000000000..c01526a689
--- /dev/null
+++ b/core/app_certificate_validate.mk
@@ -0,0 +1,10 @@
+
+ifeq (true,$(non_system_module))
+ ifneq (,$(filter $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))%,$(LOCAL_CERTIFICATE)))
+ CERTIFICATE_VIOLATION_MODULES += $(LOCAL_MODULE)
+ ifeq (true,$(PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT))
+ $(if $(filter $(LOCAL_MODULE),$(PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST)),,\
+ $(call pretty-error,The module in product partition cannot be signed with certificate in system.))
+ endif
+ endif
+endif
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
new file mode 100644
index 0000000000..dd263dd330
--- /dev/null
+++ b/core/app_prebuilt_internal.mk
@@ -0,0 +1,279 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+############################################################
+# Internal build rules for APPS prebuilt modules
+############################################################
+
+ifneq (APPS,$(LOCAL_MODULE_CLASS))
+$(call pretty-error,app_prebuilt_internal.mk is for APPS modules only)
+endif
+
+ifdef LOCAL_COMPRESSED_MODULE
+ ifneq (true,$(LOCAL_COMPRESSED_MODULE))
+ $(call pretty-error, Unknown value for LOCAL_COMPRESSED_MODULE $(LOCAL_COMPRESSED_MODULE))
+ endif
+ LOCAL_BUILT_MODULE_STEM := package.apk.gz
+ ifndef LOCAL_INSTALLED_MODULE_STEM
+ PACKAGES.$(LOCAL_MODULE).COMPRESSED := gz
+ LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk.gz
+ endif
+else # LOCAL_COMPRESSED_MODULE
+ LOCAL_BUILT_MODULE_STEM := package.apk
+ ifndef LOCAL_INSTALLED_MODULE_STEM
+ LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk
+ endif
+endif # LOCAL_COMPRESSED_MODULE
+
+include $(BUILD_SYSTEM)/base_rules.mk
+built_module := $(LOCAL_BUILT_MODULE)
+
+# Run veridex on product, product_services and vendor modules.
+# We skip it for unbundled app builds where we cannot build veridex.
+module_run_appcompat :=
+ifeq (true,$(non_system_module))
+ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) # ! unbundled app build
+ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true)
+ module_run_appcompat := true
+endif
+endif
+endif
+
+PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
+
+my_extract_apk := $(strip $(LOCAL_EXTRACT_APK))
+
+# Select dpi-specific source
+ifdef LOCAL_DPI_VARIANTS
+my_dpi := $(firstword $(filter $(LOCAL_DPI_VARIANTS),$(PRODUCT_AAPT_PREF_CONFIG) $(PRODUCT_AAPT_PREBUILT_DPI)))
+ifdef my_dpi
+ifdef LOCAL_DPI_FILE_STEM
+my_prebuilt_dpi_file_stem := $(LOCAL_DPI_FILE_STEM)
+else
+my_prebuilt_dpi_file_stem := $(LOCAL_MODULE)_%.apk
+endif
+my_prebuilt_src_file := $(dir $(my_prebuilt_src_file))$(subst %,$(my_dpi),$(my_prebuilt_dpi_file_stem))
+
+ifneq ($(strip $(LOCAL_EXTRACT_DPI_APK)),)
+my_extract_apk := $(subst %,$(my_dpi),$(LOCAL_EXTRACT_DPI_APK))
+endif # LOCAL_EXTRACT_DPI_APK
+endif # my_dpi
+endif # LOCAL_DPI_VARIANTS
+
+ifdef my_extract_apk
+my_extracted_apk := $(intermediates)/extracted.apk
+
+$(my_extracted_apk): PRIVATE_EXTRACT := $(my_extract_apk)
+$(my_extracted_apk): $(my_prebuilt_src_file)
+ @echo Extract APK: $@
+ $(hide) mkdir -p $(dir $@) && rm -f $@
+ $(hide) unzip -p $< $(PRIVATE_EXTRACT) >$@
+
+my_prebuilt_src_file := $(my_extracted_apk)
+my_extracted_apk :=
+my_extract_apk :=
+ifeq ($(PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK),true)
+# If the product property is set, always preopt for extracted modules to prevent executing out of
+# the APK.
+my_preopt_for_extracted_apk := true
+endif
+endif
+
+dex_preopt_profile_src_file := $(my_prebuilt_src_file)
+
+rs_compatibility_jni_libs :=
+include $(BUILD_SYSTEM)/install_jni_libs.mk
+
+ifeq ($(LOCAL_CERTIFICATE),EXTERNAL)
+ # The magic string "EXTERNAL" means this package will be signed with
+ # the default dev key throughout the build process, but we expect
+ # the final package to be signed with a different key.
+ #
+ # This can be used for packages where we don't have access to the
+ # keys, but want the package to be predexopt'ed.
+ LOCAL_CERTIFICATE := $(DEFAULT_SYSTEM_DEV_CERTIFICATE)
+ PACKAGES.$(LOCAL_MODULE).EXTERNAL_KEY := 1
+
+ $(built_module) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
+ $(built_module) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
+ $(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
+endif
+ifeq ($(LOCAL_CERTIFICATE),)
+ # It is now a build error to add a prebuilt .apk without
+ # specifying a key for it.
+ $(error No LOCAL_CERTIFICATE specified for prebuilt "$(my_prebuilt_src_file)")
+else ifeq ($(LOCAL_CERTIFICATE),PRESIGNED)
+ # The magic string "PRESIGNED" means this package is already checked
+ # signed with its release key.
+ #
+ # By setting .CERTIFICATE but not .PRIVATE_KEY, this package will be
+ # mentioned in apkcerts.txt (with certificate set to "PRESIGNED")
+ # but the dexpreopt process will not try to re-sign the app.
+ PACKAGES.$(LOCAL_MODULE).CERTIFICATE := PRESIGNED
+ PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
+else
+ # If this is not an absolute certificate, assign it to a generic one.
+ ifeq ($(dir $(strip $(LOCAL_CERTIFICATE))),./)
+ LOCAL_CERTIFICATE := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))$(LOCAL_CERTIFICATE)
+ endif
+
+ PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
+ PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
+ PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
+
+ $(built_module) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
+ $(built_module) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
+ $(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
+endif
+
+include $(BUILD_SYSTEM)/app_certificate_validate.mk
+
+# Disable dex-preopt of prebuilts to save space, if requested.
+ifndef LOCAL_DEX_PREOPT
+ifeq ($(DONT_DEXPREOPT_PREBUILTS),true)
+LOCAL_DEX_PREOPT := false
+endif
+endif
+
+# If the module is a compressed module, we don't pre-opt it because its final
+# installation location will be the data partition.
+ifdef LOCAL_COMPRESSED_MODULE
+LOCAL_DEX_PREOPT := false
+endif
+
+my_dex_jar := $(my_prebuilt_src_file)
+
+#######################################
+# defines built_odex along with rule to install odex
+include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
+#######################################
+ifneq ($(LOCAL_REPLACE_PREBUILT_APK_INSTALLED),)
+# There is a replacement for the prebuilt .apk we can install without any processing.
+$(built_module) : $(LOCAL_REPLACE_PREBUILT_APK_INSTALLED)
+ $(transform-prebuilt-to-target)
+
+else # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
+# Sign and align non-presigned .apks.
+# The embedded prebuilt jni to uncompress.
+ifeq ($(LOCAL_CERTIFICATE),PRESIGNED)
+# For PRESIGNED apks we must uncompress every .so file:
+# even if the .so file isn't for the current TARGET_ARCH,
+# we can't strip the file.
+embedded_prebuilt_jni_libs :=
+endif
+ifndef embedded_prebuilt_jni_libs
+# No LOCAL_PREBUILT_JNI_LIBS, uncompress all.
+embedded_prebuilt_jni_libs :=
+endif
+$(built_module): PRIVATE_EMBEDDED_JNI_LIBS := $(embedded_prebuilt_jni_libs)
+
+ifdef LOCAL_COMPRESSED_MODULE
+$(built_module) : $(MINIGZIP)
+endif
+
+ifeq ($(module_run_appcompat),true)
+$(built_module) : $(appcompat-files)
+$(LOCAL_BUILT_MODULE): PRIVATE_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+endif
+
+ifneq ($(BUILD_PLATFORM_ZIP),)
+$(built_module) : .KATI_IMPLICIT_OUTPUTS := $(dir $(LOCAL_BUILT_MODULE))package.dex.apk
+endif
+ifneq ($(LOCAL_CERTIFICATE),PRESIGNED)
+ifdef LOCAL_DEX_PREOPT
+$(built_module) : PRIVATE_STRIP_SCRIPT := $(intermediates)/strip.sh
+$(built_module) : $(intermediates)/strip.sh
+$(built_module) : | $(DEXPREOPT_STRIP_DEPS)
+$(built_module) : .KATI_DEPFILE := $(built_module).d
+endif
+endif
+ifeq ($(module_run_appcompat),true)
+$(built_module) : $(AAPT2)
+endif
+$(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(ZIP2ZIP) $(SIGNAPK_JAR)
+ $(transform-prebuilt-to-target)
+ $(uncompress-prebuilt-embedded-jni-libs)
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
+ $(uncompress-dexs)
+endif # LOCAL_UNCOMPRESS_DEX
+ifdef LOCAL_DEX_PREOPT
+ifneq ($(BUILD_PLATFORM_ZIP),)
+ @# Keep a copy of apk with classes.dex unstripped
+ $(hide) cp -f $@ $(dir $@)package.dex.apk
+endif # BUILD_PLATFORM_ZIP
+endif # LOCAL_DEX_PREOPT
+ifneq ($(LOCAL_CERTIFICATE),PRESIGNED)
+ @# Only strip out files if we can re-sign the package.
+# Run appcompat before stripping the classes.dex file.
+ifeq ($(module_run_appcompat),true)
+ $(call appcompat-header, aapt2)
+ $(run-appcompat)
+endif # module_run_appcompat
+ifdef LOCAL_DEX_PREOPT
+ mv -f $@ $@.tmp
+ $(PRIVATE_STRIP_SCRIPT) $@.tmp $@
+endif # LOCAL_DEX_PREOPT
+ $(sign-package)
+ # No need for align-package because sign-package takes care of alignment
+else # LOCAL_CERTIFICATE == PRESIGNED
+ $(align-package)
+endif # LOCAL_CERTIFICATE
+ifdef LOCAL_COMPRESSED_MODULE
+ $(compress-package)
+endif # LOCAL_COMPRESSED_MODULE
+endif # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
+
+
+###############################
+## Install split apks.
+ifdef LOCAL_PACKAGE_SPLITS
+ifdef LOCAL_COMPRESSED_MODULE
+$(error $(LOCAL_MODULE): LOCAL_COMPRESSED_MODULE is not currently supported for split installs)
+endif # LOCAL_COMPRESSED_MODULE
+
+# LOCAL_PACKAGE_SPLITS is a list of apks to be installed.
+built_apk_splits := $(addprefix $(intermediates)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+
+# Rules to sign the split apks.
+my_src_dir := $(sort $(dir $(LOCAL_PACKAGE_SPLITS)))
+ifneq (1,$(words $(my_src_dir)))
+$(error You must put all the split source apks in the same folder: $(LOCAL_PACKAGE_SPLITS))
+endif
+my_src_dir := $(LOCAL_PATH)/$(my_src_dir)
+
+$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
+$(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
+$(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
+$(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
+ $(copy-file-to-new-target)
+ $(sign-package)
+
+# Rules to install the split apks.
+$(installed_apk_splits) : $(my_module_path)/%.apk : $(intermediates)/%.apk
+ @echo "Install: $@"
+ $(copy-file-to-new-target)
+
+# Register the additional built and installed files.
+ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
+ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
+ $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(intermediates)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
+
+# Make sure to install the splits when you run "make <module_name>".
+$(my_all_targets): $(installed_apk_splits)
+
+endif # LOCAL_PACKAGE_SPLITS
+
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
index c359bacdfe..d4ca56f1b8 100644
--- a/core/autogen_test_config.mk
+++ b/core/autogen_test_config.mk
@@ -17,48 +17,49 @@
# This build rule allows TradeFed test config file to be created based on
# following inputs:
# is_native: If the test is a native test.
-# LOCAL_MANIFEST_FILE: Name of the AndroidManifest file for the test. If it's
-# not set, default value `AndroidManifest.xml` will be used.
+# full_android_manifest: Name of the AndroidManifest file for the test.
# Output:
# autogen_test_config_file: Path to the test config file generated.
autogen_test_config_file := $(dir $(LOCAL_BUILT_MODULE))$(LOCAL_MODULE).config
ifeq (true,$(is_native))
+ifeq ($(LOCAL_NATIVE_BENCHMARK),true)
+autogen_test_config_template := $(NATIVE_BENCHMARK_TEST_CONFIG_TEMPLATE)
+else
+ ifeq ($(LOCAL_IS_HOST_MODULE),true)
+ autogen_test_config_template := $(NATIVE_HOST_TEST_CONFIG_TEMPLATE)
+ else
+ autogen_test_config_template := $(NATIVE_TEST_CONFIG_TEMPLATE)
+ endif
+endif
# Auto generating test config file for native test
-$(autogen_test_config_file) : $(NATIVE_TEST_CONFIG_TEMPLATE)
+$(autogen_test_config_file): PRIVATE_MODULE_NAME := $(LOCAL_MODULE)
+$(autogen_test_config_file) : $(autogen_test_config_template)
@echo "Auto generating test config $(notdir $@)"
- $(hide) sed 's&{MODULE}&$(PRIVATE_MODULE)&g' $^ > $@
+ $(hide) sed 's&{MODULE}&$(PRIVATE_MODULE_NAME)&g;s&{EXTRA_CONFIGS}&&g' $< > $@
my_auto_generate_config := true
else
# Auto generating test config file for instrumentation test
-ifeq ($(strip $(LOCAL_MANIFEST_FILE)),)
- LOCAL_MANIFEST_FILE := AndroidManifest.xml
-endif
-ifdef LOCAL_FULL_MANIFEST_FILE
- my_android_manifest := $(LOCAL_FULL_MANIFEST_FILE)
-else
- my_android_manifest := $(LOCAL_PATH)/$(LOCAL_MANIFEST_FILE)
-endif
-ifneq (,$(wildcard $(my_android_manifest)))
+ifneq (,$(full_android_manifest))
$(autogen_test_config_file): PRIVATE_AUTOGEN_TEST_CONFIG_SCRIPT := $(AUTOGEN_TEST_CONFIG_SCRIPT)
-$(autogen_test_config_file): PRIVATE_TEST_CONFIG_ANDROID_MANIFEST := $(my_android_manifest)
+$(autogen_test_config_file): PRIVATE_TEST_CONFIG_ANDROID_MANIFEST := $(full_android_manifest)
$(autogen_test_config_file): PRIVATE_EMPTY_TEST_CONFIG := $(EMPTY_TEST_CONFIG)
$(autogen_test_config_file): PRIVATE_TEMPLATE := $(INSTRUMENTATION_TEST_CONFIG_TEMPLATE)
-$(autogen_test_config_file) : $(my_android_manifest) $(EMPTY_TEST_CONFIG) $(INSTRUMENTATION_TEST_CONFIG_TEMPLATE) $(AUTOGEN_TEST_CONFIG_SCRIPT)
+$(autogen_test_config_file) : $(full_android_manifest) $(EMPTY_TEST_CONFIG) $(INSTRUMENTATION_TEST_CONFIG_TEMPLATE) $(AUTOGEN_TEST_CONFIG_SCRIPT)
@echo "Auto generating test config $(notdir $@)"
@rm -f $@
$(hide) $(PRIVATE_AUTOGEN_TEST_CONFIG_SCRIPT) $@ $(PRIVATE_TEST_CONFIG_ANDROID_MANIFEST) $(PRIVATE_EMPTY_TEST_CONFIG) $(PRIVATE_TEMPLATE)
my_auto_generate_config := true
-endif # ifeq (,$(wildcard $(my_android_manifest)))
+endif # ifneq (,$(full_android_manifest))
endif # ifneq (true,$(is_native))
ifeq (true,$(my_auto_generate_config))
LOCAL_INTERMEDIATE_TARGETS += $(autogen_test_config_file)
$(LOCAL_BUILT_MODULE): $(autogen_test_config_file)
ALL_MODULES.$(my_register_name).auto_test_config := true
+ $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_autogen := true
else
autogen_test_config_file :=
endif
-my_android_manifest :=
my_auto_generate_config :=
diff --git a/core/aux_config.mk b/core/aux_config.mk
index 6a5cd63b7f..a508a2dd15 100644
--- a/core/aux_config.mk
+++ b/core/aux_config.mk
@@ -47,7 +47,6 @@ $(eval AUX_OUT_$(1) := $(AUX_OUT_ROOT_$(1))/$(AUX_OS_$(1))-$(AUX_ARCH_$(1))-$(AU
$(eval AUX_OUT_INTERMEDIATES_$(1) := $(AUX_OUT_$(1))/obj) \
$(eval AUX_OUT_COMMON_INTERMEDIATES_$(1) := $(AUX_COMMON_OUT_ROOT_$(1))/obj) \
$(eval AUX_OUT_HEADERS_$(1) := $(AUX_OUT_INTERMEDIATES_$(1))/include) \
-$(eval AUX_OUT_INTERMEDIATE_LIBRARIES_$(1) := $(AUX_OUT_INTERMEDIATES_$(1))/lib) \
$(eval AUX_OUT_NOTICE_FILES_$(1) := $(AUX_OUT_INTERMEDIATES_$(1))/NOTICE_FILES) \
$(eval AUX_OUT_FAKE_$(1) := $(AUX_OUT_$(1))/fake_packages) \
$(eval AUX_OUT_GEN_$(1) := $(AUX_OUT_$(1))/gen) \
@@ -78,7 +77,6 @@ $(eval AUX_OUT := $(AUX_OUT_$(1))) \
$(eval AUX_OUT_INTERMEDIATES := $(AUX_OUT_INTERMEDIATES_$(1))) \
$(eval AUX_OUT_COMMON_INTERMEDIATES := $(AUX_OUT_COMMON_INTERMEDIATES_$(1))) \
$(eval AUX_OUT_HEADERS := $(AUX_OUT_HEADERS_$(1))) \
-$(eval AUX_OUT_INTERMEDIATE_LIBRARIES := $(AUX_OUT_INTERMEDIATE_LIBRARIES_$(1))) \
$(eval AUX_OUT_NOTICE_FILES := $(AUX_OUT_NOTICE_FILES_$(1))) \
$(eval AUX_OUT_FAKE := $(AUX_OUT_FAKE_$(1))) \
$(eval AUX_OUT_GEN := $(AUX_OUT_GEN_$(1))) \
@@ -183,5 +181,3 @@ $(foreach v,$(AUX_ALL_VARIANTS),\
endif
INSTALLED_AUX_TARGETS :=
-
-droidcore: auxiliary
diff --git a/core/aux_executable.mk b/core/aux_executable.mk
index daf30e705c..5395e61964 100644
--- a/core/aux_executable.mk
+++ b/core/aux_executable.mk
@@ -80,7 +80,6 @@ $(cleantarget): PRIVATE_CLEAN_FILES += \
$(linked_module) \
# Define PRIVATE_ variables from global vars
-$(linked_module): PRIVATE_TARGET_OUT_INTERMEDIATE_LIBRARIES := $(AUX_OUT_INTERMEDIATE_LIBRARIES)
$(linked_module): PRIVATE_POST_LINK_CMD := $(LOCAL_POST_LINK_CMD)
ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
diff --git a/core/aux_toolchain.mk b/core/aux_toolchain.mk
index de0b139d30..c7102288d0 100644
--- a/core/aux_toolchain.mk
+++ b/core/aux_toolchain.mk
@@ -50,4 +50,3 @@ LOCAL_NO_DEFAULT_COMPILER_FLAGS := true
LOCAL_SYSTEM_SHARED_LIBRARIES :=
LOCAL_CXX_STL := none
LOCAL_NO_PIC := true
-LOCAL_NO_LIBCOMPILER_RT := true
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 22e7aef465..94aa1e4a92 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -31,6 +31,7 @@ LOCAL_MODULE := $(strip $(LOCAL_MODULE))
ifeq ($(LOCAL_MODULE),)
$(error $(LOCAL_PATH): LOCAL_MODULE is not defined)
endif
+$(call verify-module-name)
LOCAL_IS_HOST_MODULE := $(strip $(LOCAL_IS_HOST_MODULE))
LOCAL_IS_AUX_MODULE := $(strip $(LOCAL_IS_AUX_MODULE))
@@ -65,6 +66,12 @@ else
my_host_cross :=
endif
+ifeq (true, $(LOCAL_PRODUCT_MODULE))
+ifneq (,$(filter $(LOCAL_MODULE),$(PRODUCT_FORCE_PRODUCT_MODULES_TO_SYSTEM_PARTITION)))
+ LOCAL_PRODUCT_MODULE :=
+endif
+endif
+
_path := $(LOCAL_MODULE_PATH) $(LOCAL_MODULE_PATH_32) $(LOCAL_MODULE_PATH_64)
ifneq ($(filter $(TARGET_OUT_VENDOR)%,$(_path)),)
LOCAL_VENDOR_MODULE := true
@@ -74,6 +81,8 @@ else ifneq ($(filter $(TARGET_OUT_ODM)/%,$(_path)),)
LOCAL_ODM_MODULE := true
else ifneq ($(filter $(TARGET_OUT_PRODUCT)/%,$(_path)),)
LOCAL_PRODUCT_MODULE := true
+else ifneq ($(filter $(TARGET_OUT_PRODUCT_SERVICES)/%,$(_path)),)
+LOCAL_PRODUCT_SERVICES_MODULE := true
endif
_path :=
@@ -87,6 +96,12 @@ ifneq ($(filter-out $(LOCAL_PROPRIETARY_MODULE),$(LOCAL_VENDOR_MODULE))$(filter-
$(call pretty-error,Only one of LOCAL_PROPRIETARY_MODULE[$(LOCAL_PROPRIETARY_MODULE)] and LOCAL_VENDOR_MODULE[$(LOCAL_VENDOR_MODULE)] may be set, or they must be equal)
endif
+non_system_module := $(filter true, \
+ $(LOCAL_PRODUCT_MODULE) \
+ $(LOCAL_PRODUCT_SERVICES_MODULE) \
+ $(LOCAL_VENDOR_MODULE) \
+ $(LOCAL_PROPRIETARY_MODULE))
+
include $(BUILD_SYSTEM)/local_vndk.mk
include $(BUILD_SYSTEM)/local_systemsdk.mk
@@ -147,6 +162,19 @@ ifneq ($(filter $(my_module_tags),user),)
$(error user tag detected on module.)
endif
+my_bad_module_tags := $(filter eng debug,$(my_module_tags))
+ifdef my_bad_module_tags
+ ifeq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+ $(call pretty-warning,LOCAL_MODULE_TAGS := $(my_bad_module_tags) does not do anything for uninstallable modules)
+ endif
+ ifneq ($(BUILD_BROKEN_ENG_DEBUG_TAGS),true)
+ $(call pretty-error,LOCAL_MODULE_TAGS := $(my_bad_module_tags) is obsolete. See $(CHANGES_URL)#LOCAL_MODULE_TAGS)
+ else
+ $(call pretty-warning,LOCAL_MODULE_TAGS := $(my_bad_module_tags) is deprecated. See $(CHANGES_URL)#LOCAL_MODULE_TAGS)
+ endif
+ my_bad_module_tags :=
+endif
+
# Only the tags mentioned in this test are expected to be set by module
# makefiles. Anything else is either a typo or a source of unexpected
# behaviors.
@@ -196,14 +224,16 @@ my_module_relative_path := $(strip $(LOCAL_MODULE_RELATIVE_PATH))
ifdef LOCAL_IS_HOST_MODULE
partition_tag :=
else
-ifeq (true,$(LOCAL_VENDOR_MODULE))
+ifeq (true,$(strip $(LOCAL_VENDOR_MODULE)))
partition_tag := _VENDOR
-else ifeq (true,$(LOCAL_OEM_MODULE))
+else ifeq (true,$(strip $(LOCAL_OEM_MODULE)))
partition_tag := _OEM
-else ifeq (true,$(LOCAL_ODM_MODULE))
+else ifeq (true,$(strip $(LOCAL_ODM_MODULE)))
partition_tag := _ODM
-else ifeq (true,$(LOCAL_PRODUCT_MODULE))
+else ifeq (true,$(strip $(LOCAL_PRODUCT_MODULE)))
partition_tag := _PRODUCT
+else ifeq (true,$(strip $(LOCAL_PRODUCT_SERVICES_MODULE)))
+ partition_tag := _PRODUCT_SERVICES
else ifeq (NATIVE_TESTS,$(LOCAL_MODULE_CLASS))
partition_tag := _DATA
else
@@ -212,6 +242,31 @@ else
partition_tag := $(if $(call should-install-to-system,$(my_module_tags)),,_DATA)
endif
endif
+# For test modules that lack a suite tag, set null-suite as the default.
+# We only support adding a default suite to native tests, native benchmarks, and instrumentation tests.
+# This is because they are the only tests we currently auto-generate test configs for.
+ifndef LOCAL_COMPATIBILITY_SUITE
+ ifneq ($(filter NATIVE_TESTS NATIVE_BENCHMARK, $(LOCAL_MODULE_CLASS)),)
+ LOCAL_COMPATIBILITY_SUITE := null-suite
+ endif
+ ifneq ($(filter APPS, $(LOCAL_MODULE_CLASS)),)
+ ifneq ($(filter $(my_module_tags),tests),)
+ LOCAL_COMPATIBILITY_SUITE := null-suite
+ endif
+ endif
+endif
+
+use_testcase_folder :=
+ifeq ($(my_module_path),)
+ ifneq ($(LOCAL_MODULE),$(filter $(LOCAL_MODULE),$(DEFAULT_DATA_OUT_MODULES)))
+ ifdef LOCAL_COMPATIBILITY_SUITE
+ ifneq (true, $(LOCAL_IS_HOST_MODULE))
+ use_testcase_folder := true
+ endif
+ endif
+ endif
+endif
+
ifeq ($(my_module_path),)
install_path_var := $(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT$(partition_tag)_$(LOCAL_MODULE_CLASS)
ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
@@ -219,6 +274,16 @@ ifeq ($(my_module_path),)
endif
my_module_path := $($(install_path_var))
+
+ # If use_testcase_folder be set, and LOCAL_MODULE_PATH not set,
+ # overwrite the default path under testcase.
+ ifeq ($(use_testcase_folder),true)
+ arch_dir := $($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+ testcase_folder := $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE)/$(arch_dir)
+ my_module_path := $(testcase_folder)
+ arch_dir :=
+ endif
+
ifeq ($(strip $(my_module_path)),)
$(error $(LOCAL_PATH): unhandled install path "$(install_path_var) for $(LOCAL_MODULE)")
endif
@@ -266,14 +331,16 @@ intermediates.COMMON := $(call local-intermediates-dir,COMMON)
generated_sources_dir := $(call local-generated-sources-dir)
ifneq ($(LOCAL_OVERRIDES_MODULES),)
- ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
- ifndef LOCAL_IS_HOST_MODULE
+ ifndef LOCAL_IS_HOST_MODULE
+ ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
EXECUTABLES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_MODULES))
+ else ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
+ SHARED_LIBRARIES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_MODULES))
else
- $(call pretty-error,host modules cannot use LOCAL_OVERRIDES_MODULES)
+ $(call pretty-error,LOCAL_MODULE_CLASS := $(LOCAL_MODULE_CLASS) cannot use LOCAL_OVERRIDES_MODULES)
endif
else
- $(call pretty-error,LOCAL_MODULE_CLASS := $(LOCAL_MODULE_CLASS) cannot use LOCAL_OVERRIDES_MODULES)
+ $(call pretty-error,host modules cannot use LOCAL_OVERRIDES_MODULES)
endif
endif
@@ -284,16 +351,6 @@ include $(BUILD_SYSTEM)/configure_module_stem.mk
LOCAL_BUILT_MODULE := $(intermediates)/$(my_built_module_stem)
-# OVERRIDE_BUILT_MODULE_PATH is only allowed to be used by the
-# internal SHARED_LIBRARIES build files.
-OVERRIDE_BUILT_MODULE_PATH := $(strip $(OVERRIDE_BUILT_MODULE_PATH))
-ifdef OVERRIDE_BUILT_MODULE_PATH
- ifneq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
- $(error $(LOCAL_PATH): Illegal use of OVERRIDE_BUILT_MODULE_PATH)
- endif
- $(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE),$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)))
-endif
-
ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
# Apk and its attachments reside in its own subdir.
ifeq ($(LOCAL_MODULE_CLASS),APPS)
@@ -302,7 +359,9 @@ ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
# Neither do Runtime Resource Overlay apks, which contain just the overlaid resources.
else ifeq ($(LOCAL_IS_RUNTIME_RESOURCE_OVERLAY),true)
else
- my_module_path := $(my_module_path)/$(LOCAL_MODULE)
+ ifneq ($(use_testcase_folder),true)
+ my_module_path := $(my_module_path)/$(LOCAL_MODULE)
+ endif
endif
endif
LOCAL_INSTALLED_MODULE := $(my_module_path)/$(my_installed_module_stem)
@@ -317,7 +376,11 @@ LOCAL_INTERMEDIATE_TARGETS += $(LOCAL_BUILT_MODULE)
# As .KATI_RESTAT is specified to .toc files and commit-change-for-toc is used,
# dependent binaries of a .toc file will be rebuilt only when the content of
# the .toc file is changed.
+#
+# Don't create .toc files for Soong shared libraries, that is handled in
+# Soong and soong_cc_prebuilt.mk
###########################################################
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
LOCAL_INTERMEDIATE_TARGETS += $(LOCAL_BUILT_MODULE).toc
$(LOCAL_BUILT_MODULE).toc: $(LOCAL_BUILT_MODULE)
@@ -328,10 +391,6 @@ $(LOCAL_BUILT_MODULE).toc: $(LOCAL_BUILT_MODULE)
.KATI_RESTAT: $(LOCAL_BUILT_MODULE).toc
# Build .toc file when using mm, mma, or make $(my_register_name)
$(my_all_targets): $(LOCAL_BUILT_MODULE).toc
-
-ifdef OVERRIDE_BUILT_MODULE_PATH
-$(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE).toc,$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc))
-$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc: $(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)
endif
endif
@@ -351,6 +410,7 @@ endif
## make clean- targets
###########################################################
cleantarget := clean-$(my_register_name)
+.PHONY: $(cleantarget)
$(cleantarget) : PRIVATE_MODULE := $(my_register_name)
$(cleantarget) : PRIVATE_CLEAN_FILES := \
$(LOCAL_BUILT_MODULE) \
@@ -379,7 +439,7 @@ $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_MODULE:= $(my_register_name)
# We name both BUILT and INSTALLED in case
# LOCAL_UNINSTALLABLE_MODULE is set.
.PHONY: $(my_all_targets)
-$(my_all_targets): $(LOCAL_BUILT_MODULE) $(LOCAL_INSTALLED_MODULE)
+$(my_all_targets): $(LOCAL_BUILT_MODULE) $(LOCAL_INSTALLED_MODULE) $(LOCAL_ADDITIONAL_CHECKED_MODULE)
.PHONY: $(my_register_name)
$(my_register_name): $(my_all_targets)
@@ -406,12 +466,21 @@ $(foreach c, $(my_path_components),\
my_init_rc_installed :=
my_init_rc_pairs :=
my_installed_symlinks :=
+my_default_test_module :=
+ifeq ($(use_testcase_folder),true)
+arch_dir := $($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+my_default_test_module := $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE)/$(arch_dir)/$(my_installed_module_stem)
+arch_dir :=
+endif
+
ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+ifneq ($(LOCAL_INSTALLED_MODULE),$(my_default_test_module))
$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
$(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
@echo "Install: $@"
$(copy-file-to-new-target)
$(PRIVATE_POST_INSTALL_CMD)
+endif
ifndef LOCAL_IS_HOST_MODULE
# Rule to install the module's companion init.rc.
@@ -423,7 +492,7 @@ my_init_rc_installed := $(foreach rc,$(my_init_rc_pairs),$(call word-colon,2,$(r
# Make sure we only set up the copy rules once, even if another arch variant
# shares a common LOCAL_INIT_RC.
my_init_rc_new_pairs := $(filter-out $(ALL_INIT_RC_INSTALLED_PAIRS),$(my_init_rc_pairs))
-my_init_rc_new_installed := $(call copy-many-files,$(my_init_rc_new_pairs))
+my_init_rc_new_installed := $(call copy-many-init-script-files-checked,$(my_init_rc_new_pairs))
ALL_INIT_RC_INSTALLED_PAIRS += $(my_init_rc_new_pairs)
$(my_all_targets) : $(my_init_rc_installed)
@@ -440,6 +509,30 @@ $(my_all_targets) : | $(my_installed_symlinks)
endif # !LOCAL_UNINSTALLABLE_MODULE
###########################################################
+## VINTF manifest fragment goals
+###########################################################
+
+my_vintf_installed:=
+my_vintf_pairs:=
+ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+ifndef LOCAL_IS_HOST_MODULE
+ifneq ($(strip $(LOCAL_VINTF_FRAGMENTS)),)
+
+my_vintf_pairs := $(foreach xml,$(LOCAL_VINTF_FRAGMENTS),$(LOCAL_PATH)/$(xml):$(TARGET_OUT$(partition_tag)_ETC)/vintf/manifest/$(notdir $(xml)))
+my_vintf_installed := $(foreach xml,$(my_vintf_pairs),$(call word-colon,2,$(xml)))
+
+# Only set up copy rules once, even if another arch variant shares it
+my_vintf_new_pairs := $(filter-out $(ALL_VINTF_MANIFEST_FRAGMENTS_LIST),$(my_vintf_pairs))
+my_vintf_new_installed := $(call copy-many-vintf-manifest-files-checked,$(my_vintf_pairs))
+
+ALL_VINTF_MANIFEST_FRAGMENTS_LIST += $(my_vintf_new_pairs)
+
+$(my_all_targets) : $(my_vintf_installed)
+endif # LOCAL_VINTF_FRAGMENTS
+endif # !LOCAL_IS_HOST_MODULE
+endif # !LOCAL_UNINSTALLABLE_MODULE
+
+###########################################################
## CHECK_BUILD goals
###########################################################
my_checked_module :=
@@ -451,6 +544,8 @@ else
my_checked_module := $(LOCAL_BUILT_MODULE)
endif
+my_checked_module += $(LOCAL_ADDITIONAL_CHECKED_MODULE)
+
# If they request that this module not be checked, then don't.
# PLEASE DON'T SET THIS. ANY PLACES THAT SET THIS WITHOUT
# GOOD REASON WILL HAVE IT REMOVED.
@@ -482,8 +577,9 @@ my_test_data_pairs := $(strip $(foreach td,$(LOCAL_TEST_DATA), \
$(eval _src_base := $(call word-colon,1,$(td))), \
$(eval _src_base := $(LOCAL_PATH)) \
$(eval _file := $(call word-colon,1,$(td)))) \
- $(if $(findstring ..,$(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include '..': $(_file))) \
- $(if $(filter /%,$(_src_base) $(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include absolute paths: $(_src_base) $(_file))) \
+ $(if $(call streq,$(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK)),, \
+ $(if $(findstring ..,$(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include '..': $(_file))) \
+ $(if $(filter /%,$(_src_base) $(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include absolute paths: $(_src_base) $(_file)))) \
$(eval my_test_data_file_pairs := $(my_test_data_file_pairs) $(call append-path,$(_src_base),$(_file)):$(_file)) \
$(call append-path,$(_src_base),$(_file)):$(call append-path,$(my_module_path),$(_file))))
@@ -494,20 +590,6 @@ endif
endif
endif
-# For test modules that lack a suite tag, set null-suite as the default.
-# We only support adding a default suite to native tests, native benchmarks, and instrumentation tests.
-# This is because they are the only tests we currently auto-generate test configs for.
-ifndef LOCAL_COMPATIBILITY_SUITE
-ifneq ($(filter NATIVE_TESTS NATIVE_BENCHMARK, $(LOCAL_MODULE_CLASS)),)
-LOCAL_COMPATIBILITY_SUITE := null-suite
-endif
-ifneq ($(filter APPS, $(LOCAL_MODULE_CLASS)),)
-ifneq ($(filter $(my_module_tags),tests),)
-LOCAL_COMPATIBILITY_SUITE := null-suite
-endif
-endif
-endif
-
###########################################################
## Compatibility suite files.
###########################################################
@@ -522,37 +604,42 @@ ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
is_native := true
multi_arch := true
endif
-ifeq ($(LOCAL_MODULE_CLASS),NATIVE_BENCHMARK)
- is_native := true
- multi_arch := true
-endif
ifdef LOCAL_MULTILIB
multi_arch := true
endif
+
ifdef multi_arch
+arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+else
+ifeq ($(use_testcase_folder),true)
arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
endif
+endif
+
multi_arch :=
+my_default_test_module :=
+my_default_test_module := $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE)$(arch_dir)/$(my_installed_module_stem)
+ifneq ($(LOCAL_INSTALLED_MODULE),$(my_default_test_module))
+# Install into the testcase folder
+$(LOCAL_INSTALLED_MODULE) : $(my_default_test_module)
+endif
+
# The module itself.
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
- $(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem))))
+ $(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem))) \
+ $(eval my_compat_dist_config_$(suite) := ))
-# Make sure we only add the files once for multilib modules.
-ifndef $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
-$(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files := true
-# LOCAL_COMPATIBILITY_SUPPORT_FILES is a list of <src>[:<dest>].
-$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
- $(eval my_compat_dist_$(suite) += $(foreach f, $(LOCAL_COMPATIBILITY_SUPPORT_FILES), \
- $(eval p := $(subst :,$(space),$(f))) \
- $(eval s := $(word 1,$(p))) \
- $(eval n := $(or $(word 2,$(p)),$(notdir $(word 1, $(p))))) \
- $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
- $(s):$(dir)/$(n)))))
-
-test_config := $(wildcard $(LOCAL_PATH)/AndroidTest.xml)
+# Auto-generate build config.
+ifneq (,$(LOCAL_FULL_TEST_CONFIG))
+ test_config := $(LOCAL_FULL_TEST_CONFIG)
+else ifneq (,$(LOCAL_TEST_CONFIG))
+ test_config := $(LOCAL_PATH)/$(LOCAL_TEST_CONFIG)
+else
+ test_config := $(wildcard $(LOCAL_PATH)/AndroidTest.xml)
+endif
ifeq (,$(test_config))
ifneq (true,$(is_native))
is_instrumentation_test := true
@@ -579,31 +666,67 @@ ifeq (,$(test_config))
endif
endif
endif
-
is_instrumentation_test :=
-ifneq (,$(test_config))
-$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
- $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
- $(test_config):$(dir)/$(LOCAL_MODULE).config)))
-endif
+# Make sure we only add the files once for multilib modules.
+ifdef $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
+ # Sync the auto_test_config value for multilib modules.
+ ifdef $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_autogen
+ ALL_MODULES.$(my_register_name).auto_test_config := true
+ endif
+else
+ $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files := true
+ # LOCAL_COMPATIBILITY_SUPPORT_FILES is a list of <src>[:<dest>].
+ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_$(suite) += $(foreach f, $(LOCAL_COMPATIBILITY_SUPPORT_FILES), \
+ $(eval p := $(subst :,$(space),$(f))) \
+ $(eval s := $(word 1,$(p))) \
+ $(eval n := $(or $(word 2,$(p)),$(notdir $(word 1, $(p))))) \
+ $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+ $(s):$(dir)/$(n)))))
+
+ ifneq (,$(test_config))
+ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+ $(test_config):$(dir)/$(LOCAL_MODULE).config)))
+ endif
-test_config :=
+ ifneq (,$(wildcard $(LOCAL_PATH)/DynamicConfig.xml))
+ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+ $(LOCAL_PATH)/DynamicConfig.xml:$(dir)/$(LOCAL_MODULE).dynamic)))
+ endif
-ifneq (,$(wildcard $(LOCAL_PATH)/DynamicConfig.xml))
-$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
- $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
- $(LOCAL_PATH)/DynamicConfig.xml:$(dir)/$(LOCAL_MODULE).dynamic)))
+ ifneq (,$(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config))
+ $(foreach extra_config, $(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config), \
+ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+ $(extra_config):$(dir)/$(notdir $(extra_config))))))
+ endif
+endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
+
+# HACK: pretend a soong LOCAL_FULL_TEST_CONFIG is autogenerated by setting the flag in
+# module-info.json
+# TODO: (b/113029686) Add explicit flag from Soong to determine if a test was
+# autogenerated.
+ifneq (,$(filter $(SOONG_OUT_DIR)%,$(LOCAL_FULL_TEST_CONFIG)))
+ ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ ALL_MODULES.$(my_register_name).auto_test_config := true
+ endif
endif
-ifneq (,$(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config))
-$(foreach extra_config, $(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config), \
+
+ifeq ($(use_testcase_folder),true)
+ifneq ($(my_test_data_file_pairs),)
+$(foreach pair, $(my_test_data_file_pairs), \
+ $(eval parts := $(subst :,$(space),$(pair))) \
+ $(eval src_path := $(word 1,$(parts))) \
+ $(eval file := $(word 2,$(parts))) \
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
- $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
- $(extra_config):$(dir)/$(notdir $(extra_config))))))
+ $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
+ $(call filter-copy-pair,$(src_path),$(call append-path,$(dir),$(file)),$(my_installed_test_data))))))
endif
-endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
-
+else
ifneq ($(my_test_data_file_pairs),)
$(foreach pair, $(my_test_data_file_pairs), \
$(eval parts := $(subst :,$(space),$(pair))) \
@@ -613,18 +736,42 @@ $(foreach pair, $(my_test_data_file_pairs), \
$(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
$(src_path):$(call append-path,$(dir),$(file))))))
endif
+endif
+
+
arch_dir :=
is_native :=
$(call create-suite-dependencies)
+$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_config_$(suite) := ))
endif # LOCAL_COMPATIBILITY_SUITE
###########################################################
+## Add test module to ALL_DISABLED_PRESUBMIT_TESTS if LOCAL_PRESUBMIT_DISABLED is set to true.
+###########################################################
+ifeq ($(LOCAL_PRESUBMIT_DISABLED),true)
+ ALL_DISABLED_PRESUBMIT_TESTS += $(LOCAL_MODULE)
+endif # LOCAL_PRESUBMIT_DISABLED
+
+###########################################################
## Register with ALL_MODULES
###########################################################
+ifeq ($(filter $(my_register_name),$(ALL_MODULES)),)
+ # These keys are no longer used, they've been replaced by keys that specify
+ # target/host/host_cross (REQUIRED_FROM_TARGET / REQUIRED_FROM_HOST) and similar.
+ #
+ # Marking them obsolete to ensure that anyone using these internal variables looks for
+ # alternates.
+ $(KATI_obsolete_var ALL_MODULES.$(my_register_name).REQUIRED)
+ $(KATI_obsolete_var ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED)
+ $(KATI_obsolete_var ALL_MODULES.$(my_register_name).HOST_REQUIRED)
+ $(KATI_obsolete_var ALL_MODULES.$(my_register_name).TARGET_REQUIRED)
+endif
+
ALL_MODULES += $(my_register_name)
# Don't use += on subvars, or else they'll end up being
@@ -639,15 +786,19 @@ ALL_MODULES.$(my_register_name).CHECKED := \
$(ALL_MODULES.$(my_register_name).CHECKED) $(my_checked_module)
ALL_MODULES.$(my_register_name).BUILT := \
$(ALL_MODULES.$(my_register_name).BUILT) $(LOCAL_BUILT_MODULE)
+ifndef LOCAL_IS_HOST_MODULE
+ALL_MODULES.$(my_register_name).TARGET_BUILT := \
+ $(ALL_MODULES.$(my_register_name).TARGET_BUILT) $(LOCAL_BUILT_MODULE)
+endif
ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
ALL_MODULES.$(my_register_name).INSTALLED := \
$(strip $(ALL_MODULES.$(my_register_name).INSTALLED) \
$(LOCAL_INSTALLED_MODULE) $(my_init_rc_installed) $(my_installed_symlinks) \
- $(my_installed_test_data))
+ $(my_installed_test_data) $(my_vintf_installed))
ALL_MODULES.$(my_register_name).BUILT_INSTALLED := \
$(strip $(ALL_MODULES.$(my_register_name).BUILT_INSTALLED) \
$(LOCAL_BUILT_MODULE):$(LOCAL_INSTALLED_MODULE) \
- $(my_init_rc_pairs) $(my_test_data_pairs))
+ $(my_init_rc_pairs) $(my_test_data_pairs) $(my_vintf_pairs))
endif
ifdef LOCAL_PICKUP_FILES
# Files or directories ready to pick up by the build system
@@ -655,22 +806,68 @@ ifdef LOCAL_PICKUP_FILES
ALL_MODULES.$(my_register_name).PICKUP_FILES := \
$(ALL_MODULES.$(my_register_name).PICKUP_FILES) $(LOCAL_PICKUP_FILES)
endif
+
my_required_modules := $(LOCAL_REQUIRED_MODULES) \
$(LOCAL_REQUIRED_MODULES_$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
ifdef LOCAL_IS_HOST_MODULE
my_required_modules += $(LOCAL_REQUIRED_MODULES_$($(my_prefix)OS))
endif
-ALL_MODULES.$(my_register_name).REQUIRED := \
- $(strip $(ALL_MODULES.$(my_register_name).REQUIRED) $(my_required_modules))
-ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED := \
- $(strip $(ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED)\
- $(my_required_modules))
-ALL_MODULES.$(my_register_name).TARGET_REQUIRED := \
- $(strip $(ALL_MODULES.$(my_register_name).TARGET_REQUIRED)\
- $(LOCAL_TARGET_REQUIRED_MODULES))
-ALL_MODULES.$(my_register_name).HOST_REQUIRED := \
- $(strip $(ALL_MODULES.$(my_register_name).HOST_REQUIRED)\
- $(LOCAL_HOST_REQUIRED_MODULES))
+
+###############################################################################
+## When compiling against the VNDK, add the .vendor suffix to required modules.
+###############################################################################
+ifneq ($(LOCAL_USE_VNDK),)
+ ####################################################
+ ## Soong modules may be built twice, once for /system
+ ## and once for /vendor. If we're using the VNDK,
+ ## switch all soong libraries over to the /vendor
+ ## variant.
+ ####################################################
+ ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ # We don't do this renaming for soong-defined modules since they already
+ # have correct names (with .vendor suffix when necessary) in their
+ # LOCAL_*_LIBRARIES.
+ my_required_modules := $(foreach l,$(my_required_modules),\
+ $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ endif
+endif
+
+ifdef LOCAL_IS_HOST_MODULE
+ ifneq ($(my_host_cross),true)
+ ALL_MODULES.$(my_register_name).REQUIRED_FROM_HOST := \
+ $(strip $(ALL_MODULES.$(my_register_name).REQUIRED_FROM_HOST) $(my_required_modules))
+ ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED_FROM_HOST := \
+ $(strip $(ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED_FROM_HOST)\
+ $(my_required_modules))
+ ALL_MODULES.$(my_register_name).TARGET_REQUIRED_FROM_HOST := \
+ $(strip $(ALL_MODULES.$(my_register_name).TARGET_REQUIRED_FROM_HOST)\
+ $(LOCAL_TARGET_REQUIRED_MODULES))
+ else
+ ALL_MODULES.$(my_register_name).REQUIRED_FROM_HOST_CROSS := \
+ $(strip $(ALL_MODULES.$(my_register_name).REQUIRED_FROM_HOST_CROSS) $(my_required_modules))
+ ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED_FROM_HOST_CROSS := \
+ $(strip $(ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED_FROM_HOST_CROSS)\
+ $(my_required_modules))
+ ifdef LOCAL_TARGET_REQUIRED_MODULES
+ $(call pretty-error,LOCAL_TARGET_REQUIRED_MODULES may not be used from host_cross modules)
+ endif
+ endif
+ ifdef LOCAL_HOST_REQUIRED_MODULES
+ $(call pretty-error,LOCAL_HOST_REQUIRED_MODULES may not be used from host modules. Use LOCAL_REQUIRED_MODULES instead)
+ endif
+else
+ ALL_MODULES.$(my_register_name).REQUIRED_FROM_TARGET := \
+ $(strip $(ALL_MODULES.$(my_register_name).REQUIRED_FROM_TARGET) $(my_required_modules))
+ ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED_FROM_TARGET := \
+ $(strip $(ALL_MODULES.$(my_register_name).EXPLICITLY_REQUIRED_FROM_TARGET)\
+ $(my_required_modules))
+ ALL_MODULES.$(my_register_name).HOST_REQUIRED_FROM_TARGET := \
+ $(strip $(ALL_MODULES.$(my_register_name).HOST_REQUIRED_FROM_TARGET)\
+ $(LOCAL_HOST_REQUIRED_MODULES))
+ ifdef LOCAL_TARGET_REQUIRED_MODULES
+ $(call pretty-error,LOCAL_TARGET_REQUIRED_MODULES may not be used from target modules. Use LOCAL_REQUIRED_MODULES instead)
+ endif
+endif
ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS := \
$(ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS) $(event_log_tags)
ALL_MODULES.$(my_register_name).MAKEFILE := \
@@ -683,14 +880,17 @@ ifdef LOCAL_2ND_ARCH_VAR_PREFIX
ALL_MODULES.$(my_register_name).FOR_2ND_ARCH := true
endif
ALL_MODULES.$(my_register_name).FOR_HOST_CROSS := $(my_host_cross)
+ALL_MODULES.$(my_register_name).MODULE_NAME := $(LOCAL_MODULE)
ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES := $(LOCAL_COMPATIBILITY_SUITE)
+ALL_MODULES.$(my_register_name).TEST_CONFIG := $(test_config)
+test_config :=
INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
##########################################################
# Track module-level dependencies.
# Use $(LOCAL_MODULE) instead of $(my_register_name) to ignore module's bitness.
-ALL_DEPS.MODULES := $(sort $(ALL_DEPS.MODULES) $(LOCAL_MODULE))
+ALL_DEPS.MODULES := $(ALL_DEPS.MODULES) $(LOCAL_MODULE)
ALL_DEPS.$(LOCAL_MODULE).ALL_DEPS := $(sort \
$(ALL_MODULES.$(LOCAL_MODULE).ALL_DEPS) \
$(LOCAL_STATIC_LIBRARIES) \
@@ -744,8 +944,10 @@ ifneq (,$(filter $(my_module_tags),tests))
$(j_or_n)-$(h_or_t)-tests $(j_or_n)-tests $(h_or_t)-tests : $(my_checked_module)
endif
$(LOCAL_MODULE)-$(h_or_hc_or_t) : $(my_all_targets)
+.PHONY: $(LOCAL_MODULE)-$(h_or_hc_or_t)
ifeq ($(j_or_n),native)
$(LOCAL_MODULE)-$(h_or_hc_or_t)$(my_32_64_bit_suffix) : $(my_all_targets)
+.PHONY: $(LOCAL_MODULE)-$(h_or_hc_or_t)$(my_32_64_bit_suffix)
endif
endif
diff --git a/core/binary.mk b/core/binary.mk
index 61cd5cf73f..87a8a92b6d 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -7,6 +7,7 @@
#######################################
include $(BUILD_SYSTEM)/base_rules.mk
+include $(BUILD_SYSTEM)/use_lld_setup.mk
#######################################
##################################################
@@ -45,14 +46,15 @@ my_header_libraries := $(LOCAL_HEADER_LIBRARIES)
my_cflags := $(LOCAL_CFLAGS)
my_conlyflags := $(LOCAL_CONLYFLAGS)
my_cppflags := $(LOCAL_CPPFLAGS)
-my_cflags_no_override := $(GLOBAL_CFLAGS_NO_OVERRIDE)
-my_cppflags_no_override := $(GLOBAL_CPPFLAGS_NO_OVERRIDE)
+my_cflags_no_override := $(GLOBAL_CLANG_CFLAGS_NO_OVERRIDE)
+my_cppflags_no_override := $(GLOBAL_CLANG_CPPFLAGS_NO_OVERRIDE)
my_ldflags := $(LOCAL_LDFLAGS)
my_ldlibs := $(LOCAL_LDLIBS)
my_asflags := $(LOCAL_ASFLAGS)
my_cc := $(LOCAL_CC)
my_cc_wrapper := $(CC_WRAPPER)
my_cxx := $(LOCAL_CXX)
+my_cxx_link := $(LOCAL_CXX)
my_cxx_ldlibs :=
my_cxx_wrapper := $(CXX_WRAPPER)
my_c_includes := $(LOCAL_C_INCLUDES)
@@ -71,6 +73,35 @@ endif
else
my_native_coverage := false
endif
+ifneq ($(NATIVE_COVERAGE),true)
+ my_native_coverage := false
+endif
+
+ifneq ($(strip $(ENABLE_XOM)),false)
+ ifndef LOCAL_IS_HOST_MODULE
+ my_xom := true
+ # Disable XOM in excluded paths.
+ combined_xom_exclude_paths := $(XOM_EXCLUDE_PATHS) \
+ $(PRODUCT_XOM_EXCLUDE_PATHS)
+ ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_xom_exclude_paths)),\
+ $(filter $(dir)%,$(LOCAL_PATH)))),)
+ my_xom := false
+ endif
+
+ # Allow LOCAL_XOM to override the above
+ ifdef LOCAL_XOM
+ my_xom := $(LOCAL_XOM)
+ endif
+
+ ifeq ($(strip $(my_xom)),true)
+ ifeq (arm64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
+ ifeq ($(my_use_clang_lld),true)
+ my_ldflags += -Wl,-execute-only
+ endif
+ endif
+ endif
+ endif
+endif
my_allow_undefined_symbols := $(strip $(LOCAL_ALLOW_UNDEFINED_SYMBOLS))
ifdef SANITIZE_HOST
@@ -200,7 +231,6 @@ ifneq ($(LOCAL_SDK_VERSION),)
$(my_ndk_source_root)/cxx-stl/llvm-libc++/include
my_ndk_stl_include_path += \
$(my_ndk_source_root)/cxx-stl/llvm-libc++abi/include
- my_ndk_stl_include_path += $(my_ndk_source_root)/android/support/include
my_libcxx_libdir := \
$(my_ndk_source_root)/cxx-stl/llvm-libc++/libs/$(my_cpu_variant)
@@ -213,7 +243,13 @@ ifneq ($(LOCAL_SDK_VERSION),)
my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
endif
- my_ndk_stl_static_lib += $(my_libcxx_libdir)/libandroid_support.a
+ ifneq ($(my_ndk_api),current)
+ ifeq ($(call math_lt,$(my_ndk_api),21),true)
+ my_ndk_stl_include_path += $(my_ndk_source_root)/android/support/include
+ my_ndk_stl_static_lib += $(my_libcxx_libdir)/libandroid_support.a
+ endif
+ endif
+
ifneq (,$(filter armeabi armeabi-v7a,$(my_cpu_variant)))
my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
endif
@@ -222,6 +258,28 @@ ifneq ($(LOCAL_SDK_VERSION),)
else # LOCAL_NDK_STL_VARIANT must be none
# Do nothing.
endif
+
+ # Clang's coverage/profile runtime needs symbols like 'stderr' that were not
+ # exported from libc prior to API level 23
+ ifneq ($(my_ndk_api),current)
+ ifeq ($(call math_lt, $(my_ndk_api),23),true)
+ my_native_coverage := false
+ endif
+ endif
+endif
+
+ifeq ($(NATIVE_COVERAGE),true)
+ ifndef LOCAL_IS_HOST_MODULE
+ my_ldflags += -Wl,--wrap,getenv
+
+ ifneq ($(LOCAL_MODULE_CLASS),STATIC_LIBRARIES)
+ ifeq ($(LOCAL_SDK_VERSION),)
+ my_whole_static_libraries += libprofile-extras
+ else
+ my_whole_static_libraries += libprofile-extras_ndk
+ endif
+ endif
+ endif
endif
ifneq ($(LOCAL_USE_VNDK),)
@@ -255,11 +313,9 @@ my_ldlibs := $(filter $(my_allowed_ldlibs),$(my_ldlibs))
else # LOCAL_IS_HOST_MODULE
# Add -ldl, -lpthread, -lm and -lrt to host builds to match the default behavior of
# device builds
- ifneq ($($(my_prefix)OS),windows)
- my_ldlibs += -ldl -lpthread -lm
- ifneq ($(HOST_OS),darwin)
- my_ldlibs += -lrt
- endif
+ my_ldlibs += -ldl -lpthread -lm
+ ifneq ($(HOST_OS),darwin)
+ my_ldlibs += -lrt
endif
endif
@@ -280,13 +336,16 @@ endif
# all code is position independent, and then those warnings get promoted to
# errors.
ifneq ($(LOCAL_NO_PIC),true)
-ifneq ($($(my_prefix)OS),windows)
-ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
-my_cflags += -fPIE
-else
-my_cflags += -fPIC
-endif
-endif
+ ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ my_cflags += -fPIE
+ ifndef BUILD_HOST_static
+ ifneq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
+ my_ldflags += -pie
+ endif
+ endif
+ else
+ my_cflags += -fPIC
+ endif
endif
ifdef LOCAL_IS_HOST_MODULE
@@ -331,36 +390,8 @@ endif
ifdef LOCAL_CLANG_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
my_clang := $(strip $(LOCAL_CLANG_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
endif
-
-# if custom toolchain is in use, default is not to use clang, if not explicitly required
-ifneq ($(my_cc)$(my_cxx),)
- ifeq ($(my_clang),)
- my_clang := false
- endif
-endif
-# Issue warning if LOCAL_CLANG* is set to false and the local makefile is not found
-# in the exception project list.
ifeq ($(my_clang),false)
- ifeq ($(call find_in_local_clang_exception_projects,$(LOCAL_MODULE_MAKEFILE))$(LOCAL_IS_AUX_MODULE),)
- $(error $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): LOCAL_CLANG is set to false)
- endif
-endif
-
-# clang is enabled by default for host builds
-# enable it unless we've specifically disabled clang above
-ifdef LOCAL_IS_HOST_MODULE
- ifneq ($($(my_prefix)CLANG_SUPPORTED),true)
- ifeq ($(my_clang),true)
- $(call pretty-error,Clang is not yet supported for $($(my_prefix)OS) binaries)
- endif
- my_clang := false
- else
- ifeq ($(my_clang),)
- my_clang := true
- endif
- endif
-else ifeq ($(my_clang),)
- my_clang := true
+ $(call pretty-error,LOCAL_CLANG false is no longer supported)
endif
ifeq ($(LOCAL_C_STD),)
@@ -379,21 +410,6 @@ else
my_cpp_std_version := $(LOCAL_CPP_STD)
endif
-ifneq ($(my_clang),true)
- # GCC uses an invalid C++14 ABI (emits calls to
- # __cxa_throw_bad_array_length, which is not a valid C++ RT ABI).
- # http://b/25022512
- my_cpp_std_version := $(DEFAULT_GCC_CPP_STD_VERSION)
-endif
-
-ifdef LOCAL_IS_HOST_MODULE
- ifneq ($(my_clang),true)
- # The host GCC doesn't support C++14 (and is deprecated, so likely
- # never will). Build these modules with C++11.
- my_cpp_std_version := $(DEFAULT_GCC_CPP_STD_VERSION)
- endif
-endif
-
my_c_std_conlyflags :=
my_cpp_std_cppflags :=
ifneq (,$(my_c_std_version))
@@ -404,15 +420,17 @@ ifneq (,$(my_cpp_std_version))
my_cpp_std_cppflags := -std=$(my_cpp_std_version)
endif
+# Extra cflags for projects under external/ directory
+ifneq ($(filter external/%,$(LOCAL_PATH)),)
+ my_cflags += $(CLANG_EXTERNAL_CFLAGS)
+endif
+
# arch-specific static libraries go first so that generic ones can depend on them
my_static_libraries := $(LOCAL_STATIC_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_STATIC_LIBRARIES_$(my_32_64_bit_suffix)) $(my_static_libraries)
my_whole_static_libraries := $(LOCAL_WHOLE_STATIC_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_WHOLE_STATIC_LIBRARIES_$(my_32_64_bit_suffix)) $(my_whole_static_libraries)
my_header_libraries := $(LOCAL_HEADER_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_HEADER_LIBRARIES_$(my_32_64_bit_suffix)) $(my_header_libraries)
-# soong defined modules already have done through this
-ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
include $(BUILD_SYSTEM)/cxx_stl_setup.mk
-endif
# Add static HAL libraries
ifdef LOCAL_HAL_STATIC_LIBRARIES
@@ -429,32 +447,14 @@ else
my_linker := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LINKER)
endif
-# Modules from soong do not need this since the dependencies are already handled there.
-ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
include $(BUILD_SYSTEM)/config_sanitizers.mk
-ifneq ($(LOCAL_NO_LIBCOMPILER_RT),true)
-# Add in libcompiler_rt for all regular device builds
-ifeq (,$(WITHOUT_LIBCOMPILER_RT))
- my_static_libraries += $(COMPILER_RT_CONFIG_EXTRA_STATIC_LIBRARIES)
-endif
-endif
-
-# Statically link libwinpthread when cross compiling win32.
-ifeq ($($(my_prefix)OS),windows)
- my_static_libraries += libwinpthread
-endif
-endif # this module is not from soong
-
ifneq ($(filter ../%,$(my_src_files)),)
my_soong_problems += dotdot_srcs
endif
ifneq ($(foreach i,$(my_c_includes),$(filter %/..,$(i))$(findstring /../,$(i))),)
my_soong_problems += dotdot_incs
endif
-ifneq ($(filter %.arm,$(my_src_files)),)
-my_soong_problems += srcs_dotarm
-endif
####################################################
## Add FDO flags if FDO is turned on and supported
@@ -484,167 +484,6 @@ endif
my_asflags += -D__ASSEMBLY__
###########################################################
-## Define PRIVATE_ variables from global vars
-###########################################################
-ifndef LOCAL_IS_HOST_MODULE
-ifdef LOCAL_USE_VNDK
-my_target_global_c_includes := \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_INCLUDES)
-my_target_global_c_system_includes := \
- $(TARGET_OUT_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_SYSTEM_INCLUDES)
-else ifdef LOCAL_SDK_VERSION
-my_target_global_c_includes :=
-my_target_global_c_system_includes := $(my_ndk_stl_include_path) $(my_ndk_sysroot_include)
-else ifdef BOARD_VNDK_VERSION
-my_target_global_c_includes := $(SRC_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_INCLUDES) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
-my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_SYSTEM_INCLUDES) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
-else
-my_target_global_c_includes := $(SRC_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_INCLUDES) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
-my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) $(TARGET_OUT_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_SYSTEM_INCLUDES) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
-endif
-
-ifeq ($(my_clang),true)
-my_target_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CFLAGS)
-my_target_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags)
-my_target_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags)
-my_target_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LDFLAGS)
-else
-my_target_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CFLAGS)
-my_target_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags)
-my_target_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags)
-my_target_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_LDFLAGS)
-endif # my_clang
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_GLOBAL_C_INCLUDES := $(my_target_global_c_includes)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_GLOBAL_C_SYSTEM_INCLUDES := $(my_target_global_c_system_includes)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_GLOBAL_CFLAGS := $(my_target_global_cflags)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_GLOBAL_CONLYFLAGS := $(my_target_global_conlyflags)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_GLOBAL_CPPFLAGS := $(my_target_global_cppflags)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_GLOBAL_LDFLAGS := $(my_target_global_ldflags)
-
-else # LOCAL_IS_HOST_MODULE
-
-my_host_global_c_includes := $(SRC_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
-my_host_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
-
-ifeq ($(my_clang),true)
-my_host_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CFLAGS)
-my_host_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags)
-my_host_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags)
-my_host_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LDFLAGS)
-else
-my_host_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CFLAGS)
-my_host_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags)
-my_host_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags)
-my_host_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_LDFLAGS)
-endif # my_clang
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_GLOBAL_C_INCLUDES := $(my_host_global_c_includes)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_GLOBAL_C_SYSTEM_INCLUDES := $(my_host_global_c_system_includes)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_GLOBAL_CFLAGS := $(my_host_global_cflags)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_GLOBAL_CONLYFLAGS := $(my_host_global_conlyflags)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_GLOBAL_CPPFLAGS := $(my_host_global_cppflags)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_GLOBAL_LDFLAGS := $(my_host_global_ldflags)
-endif # LOCAL_IS_HOST_MODULE
-
-# To enable coverage for a given module, set LOCAL_NATIVE_COVERAGE=true and
-# build with NATIVE_COVERAGE=true in your enviornment. Note that the build
-# system is not sensitive to changes to NATIVE_COVERAGE, so you should do a
-# clean build of your module after toggling it.
-ifeq ($(NATIVE_COVERAGE),true)
- ifeq ($(my_native_coverage),true)
- # Note that clang coverage doesn't play nicely with acov out of the box.
- # Clang apparently generates .gcno files that aren't compatible with
- # gcov-4.8. This can be solved by installing gcc-4.6 and invoking lcov
- # with `--gcov-tool /usr/bin/gcov-4.6`.
- #
- # http://stackoverflow.com/questions/17758126/clang-code-coverage-invalid-output
- my_cflags += --coverage -O0
- my_ldflags += --coverage
- endif
-
- ifeq ($(my_clang),true)
- my_coverage_lib := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBPROFILE_RT)
- else
- my_coverage_lib := $(call intermediates-dir-for,STATIC_LIBRARIES,libgcov,$(filter AUX,$(my_kind)),,$(LOCAL_2ND_ARCH_VAR_PREFIX))/libgcov.a
- endif
-
- $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_COVERAGE_LIB := $(my_coverage_lib)
- $(LOCAL_INTERMEDIATE_TARGETS): $(my_coverage_lib)
-else
- my_native_coverage := false
-endif
-
-###########################################################
-## Define PRIVATE_ variables used by multiple module types
-###########################################################
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_NO_DEFAULT_COMPILER_FLAGS := \
- $(strip $(LOCAL_NO_DEFAULT_COMPILER_FLAGS))
-
-ifeq ($(strip $(WITH_STATIC_ANALYZER)),)
- LOCAL_NO_STATIC_ANALYZER := true
-endif
-
-# Clang does not recognize all gcc flags.
-# Use static analyzer only if clang is used.
-ifneq ($(my_clang),true)
- LOCAL_NO_STATIC_ANALYZER := true
-endif
-
-ifneq ($(strip $(LOCAL_IS_HOST_MODULE)),)
- my_syntax_arch := host
-else
- my_syntax_arch := $($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
-endif
-
-ifeq ($(strip $(my_cc)),)
- ifeq ($(my_clang),true)
- my_cc := $(CLANG)
- else
- my_cc := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)CC)
- endif
- my_cc := $(my_cc_wrapper) $(my_cc)
-endif
-
-SYNTAX_TOOLS_PREFIX := \
- $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/libexec
-
-ifneq ($(LOCAL_NO_STATIC_ANALYZER),true)
- my_cc := CCC_CC=$(CLANG) CLANG=$(CLANG) \
- $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer
-endif
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CC := $(my_cc)
-
-ifeq ($(strip $(my_cxx)),)
- ifeq ($(my_clang),true)
- my_cxx := $(CLANG_CXX)
- else
- my_cxx := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)CXX)
- endif
- my_cxx := $(my_cxx_wrapper) $(my_cxx)
-endif
-
-ifneq ($(LOCAL_NO_STATIC_ANALYZER),true)
- my_cxx := CCC_CXX=$(CLANG_CXX) CLANG_CXX=$(CLANG_CXX) \
- $(SYNTAX_TOOLS_PREFIX)/c++-analyzer
-endif
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_LINKER := $(my_linker)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CXX := $(my_cxx)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLANG := $(my_clang)
-
# TODO: support a mix of standard extensions so that this isn't necessary
LOCAL_CPP_EXTENSION := $(strip $(LOCAL_CPP_EXTENSION))
ifeq ($(LOCAL_CPP_EXTENSION),)
@@ -679,23 +518,15 @@ endif
###########################################################
LOCAL_ARM_MODE := $(strip $(LOCAL_ARM_MODE))
ifeq ($($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH),arm)
-arm_objects_mode := $(if $(LOCAL_ARM_MODE),$(LOCAL_ARM_MODE),arm)
normal_objects_mode := $(if $(LOCAL_ARM_MODE),$(LOCAL_ARM_MODE),thumb)
# Read the values from something like TARGET_arm_CFLAGS or
# TARGET_thumb_CFLAGS. HOST_(arm|thumb)_CFLAGS values aren't
# actually used (although they are usually empty).
-arm_objects_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)$(arm_objects_mode)_CFLAGS)
normal_objects_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)$(normal_objects_mode)_CFLAGS)
-ifeq ($(my_clang),true)
-arm_objects_cflags := $(call convert-to-clang-flags,$(arm_objects_cflags))
-normal_objects_cflags := $(call convert-to-clang-flags,$(normal_objects_cflags))
-endif
else
-arm_objects_mode :=
normal_objects_mode :=
-arm_objects_cflags :=
normal_objects_cflags :=
endif
@@ -795,7 +626,7 @@ $(RenderScript_file_stamp): PRIVATE_RS_CC := $(LOCAL_RENDERSCRIPT_CC)
$(RenderScript_file_stamp): PRIVATE_RS_FLAGS := $(renderscript_flags)
$(RenderScript_file_stamp): PRIVATE_RS_SOURCE_FILES := $(renderscript_sources_fullpath)
$(RenderScript_file_stamp): PRIVATE_RS_OUTPUT_DIR := $(renderscript_intermediate)
-$(RenderScript_file_stamp): PRIVATE_RS_TARGET_API := $(renderscript_target_api)
+$(RenderScript_file_stamp): PRIVATE_RS_TARGET_API := $(patsubst current,0,$(renderscript_target_api))
$(RenderScript_file_stamp): PRIVATE_DEP_FILES := $(bc_dep_files)
$(RenderScript_file_stamp): $(renderscript_sources_fullpath) $(LOCAL_RENDERSCRIPT_CC)
$(transform-renderscripts-to-cpp-and-bc)
@@ -834,11 +665,11 @@ proto_gen_dir := $(generated_sources_dir)/proto
proto_sources_fullpath := $(addprefix $(LOCAL_PATH)/, $(proto_sources))
my_rename_cpp_ext :=
-ifneq (,$(filter nanopb-c nanopb-c-enable_malloc, $(LOCAL_PROTOC_OPTIMIZE_TYPE)))
+ifneq (,$(filter nanopb-c nanopb-c-enable_malloc nanopb-c-16bit nanopb-c-enable_malloc-16bit nanopb-c-32bit nanopb-c-enable_malloc-32bit, $(LOCAL_PROTOC_OPTIMIZE_TYPE)))
my_proto_source_suffix := .c
my_proto_c_includes := external/nanopb-c
my_protoc_flags := --nanopb_out=$(proto_gen_dir) \
- --plugin=external/nanopb-c/generator/protoc-gen-nanopb
+ --plugin=$(HOST_OUT_EXECUTABLES)/protoc-gen-nanopb
my_protoc_deps := $(NANOPB_SRCS) $(proto_sources_fullpath:%.proto=%.options)
else
my_proto_source_suffix := $(LOCAL_CPP_EXTENSION)
@@ -889,6 +720,14 @@ ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nanopb-c-enable_malloc)
my_static_libraries += libprotobuf-c-nano-enable_malloc
else ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nanopb-c)
my_static_libraries += libprotobuf-c-nano
+else ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nanopb-c-enable_malloc-16bit)
+ my_static_libraries += libprotobuf-c-nano-enable_malloc-16bit
+else ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nanopb-c-16bit)
+ my_static_libraries += libprotobuf-c-nano-16bit
+else ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nanopb-c-enable_malloc-32bit)
+ my_static_libraries += libprotobuf-c-nano-enable_malloc-32bit
+else ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nanopb-c-32bit)
+ my_static_libraries += libprotobuf-c-nano-32bit
else ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),full)
ifdef LOCAL_SDK_VERSION
my_static_libraries += libprotobuf-cpp-full-ndk
@@ -954,8 +793,6 @@ vts_gen_include_root := $(intermediates)/vts-generated/include
# Thus we'll actually generate source for each architecture.
$(foreach s,$(vts_src),\
$(eval $(call define-vts-cpp-rule,$(s),$(vts_gen_cpp_root),vts_gen_cpp)))
-$(foreach cpp,$(vts_gen_cpp), \
- $(call include-depfile,$(addsuffix .vts.P,$(basename $(cpp))),$(cpp)))
$(call track-src-file-gen,$(vts_src),$(vts_gen_cpp))
$(vts_gen_cpp) : PRIVATE_MODULE := $(LOCAL_MODULE)
@@ -1032,22 +869,9 @@ endif
## C++: Compile .cpp files to .o.
###########################################################
-# we also do this on host modules, even though
-# it's not really arm, because there are files that are shared.
-cpp_arm_sources := $(patsubst %$(LOCAL_CPP_EXTENSION).arm,%$(LOCAL_CPP_EXTENSION),$(filter %$(LOCAL_CPP_EXTENSION).arm,$(my_src_files)))
-dotdot_arm_sources := $(filter ../%,$(cpp_arm_sources))
-cpp_arm_sources := $(filter-out ../%,$(cpp_arm_sources))
-cpp_arm_objects := $(addprefix $(intermediates)/,$(cpp_arm_sources:$(LOCAL_CPP_EXTENSION)=.o))
-$(call track-src-file-obj,$(patsubst %,%.arm,$(cpp_arm_sources)),$(cpp_arm_objects))
-
-# For source files starting with ../, we remove all the ../ in the object file path,
-# to avoid object file escaping the intermediate directory.
-dotdot_arm_objects :=
-$(foreach s,$(dotdot_arm_sources),\
- $(eval $(call compile-dotdot-cpp-file,$(s),\
- $(my_additional_dependencies),\
- dotdot_arm_objects)))
-$(call track-src-file-obj,$(patsubst %,%.arm,$(dotdot_arm_sources)),$(dotdot_arm_objects))
+ifneq ($(filter %$(LOCAL_CPP_EXTENSION).arm,$(my_src_files)),)
+$(call pretty-error,Files ending in $(LOCAL_CPP_EXTENSION).arm are deprecated. See $(CHANGES_URL)#file_arm)
+endif
dotdot_sources := $(filter ../%$(LOCAL_CPP_EXTENSION),$(my_src_files))
dotdot_objects :=
@@ -1058,15 +882,11 @@ $(foreach s,$(dotdot_sources),\
$(call track-src-file-obj,$(dotdot_sources),$(dotdot_objects))
cpp_normal_sources := $(filter-out ../%,$(filter %$(LOCAL_CPP_EXTENSION),$(my_src_files)))
-cpp_normal_objects := $(addprefix $(intermediates)/,$(cpp_normal_sources:$(LOCAL_CPP_EXTENSION)=.o))
-$(call track-src-file-obj,$(cpp_normal_sources),$(cpp_normal_objects))
-
-$(dotdot_arm_objects) $(cpp_arm_objects): PRIVATE_ARM_MODE := $(arm_objects_mode)
-$(dotdot_arm_objects) $(cpp_arm_objects): PRIVATE_ARM_CFLAGS := $(arm_objects_cflags)
-$(dotdot_objects) $(cpp_normal_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
-$(dotdot_objects) $(cpp_normal_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
+cpp_objects := $(addprefix $(intermediates)/,$(cpp_normal_sources:$(LOCAL_CPP_EXTENSION)=.o))
+$(call track-src-file-obj,$(cpp_normal_sources),$(cpp_objects))
-cpp_objects := $(cpp_arm_objects) $(cpp_normal_objects)
+$(dotdot_objects) $(cpp_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
+$(dotdot_objects) $(cpp_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
ifneq ($(strip $(cpp_objects)),)
$(cpp_objects): $(intermediates)/%.o: \
@@ -1076,7 +896,7 @@ $(cpp_objects): $(intermediates)/%.o: \
$(call include-depfiles-for-objs, $(cpp_objects))
endif
-cpp_objects += $(dotdot_arm_objects) $(dotdot_objects)
+cpp_objects += $(dotdot_objects)
###########################################################
## C++: Compile generated .cpp files to .o.
@@ -1088,7 +908,6 @@ $(call track-gen-file-obj,$(gen_cpp_sources),$(gen_cpp_objects))
ifneq ($(strip $(gen_cpp_objects)),)
# Compile all generated files as thumb.
-# TODO: support compiling certain generated files as arm.
$(gen_cpp_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
$(gen_cpp_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
$(gen_cpp_objects): $(intermediates)/%.o: \
@@ -1136,20 +955,9 @@ gen_o_objects := $(filter %.o,$(my_generated_sources))
## C: Compile .c files to .o.
###########################################################
-c_arm_sources := $(patsubst %.c.arm,%.c,$(filter %.c.arm,$(my_src_files)))
-dotdot_arm_sources := $(filter ../%,$(c_arm_sources))
-c_arm_sources := $(filter-out ../%,$(c_arm_sources))
-c_arm_objects := $(addprefix $(intermediates)/,$(c_arm_sources:.c=.o))
-$(call track-src-file-obj,$(patsubst %,%.arm,$(c_arm_sources)),$(c_arm_objects))
-
-# For source files starting with ../, we remove all the ../ in the object file path,
-# to avoid object file escaping the intermediate directory.
-dotdot_arm_objects :=
-$(foreach s,$(dotdot_arm_sources),\
- $(eval $(call compile-dotdot-c-file,$(s),\
- $(my_additional_dependencies),\
- dotdot_arm_objects)))
-$(call track-src-file-obj,$(patsubst %,%.arm,$(dotdot_arm_sources)),$(dotdot_arm_objects))
+ifneq ($(filter %.c.arm,$(my_src_files)),)
+$(call pretty-error,Files ending in .c.arm are deprecated. See $(CHANGES_URL)#file_arm)
+endif
dotdot_sources := $(filter ../%.c, $(my_src_files))
dotdot_objects :=
@@ -1160,15 +968,11 @@ $(foreach s, $(dotdot_sources),\
$(call track-src-file-obj,$(dotdot_sources),$(dotdot_objects))
c_normal_sources := $(filter-out ../%,$(filter %.c,$(my_src_files)))
-c_normal_objects := $(addprefix $(intermediates)/,$(c_normal_sources:.c=.o))
-$(call track-src-file-obj,$(c_normal_sources),$(c_normal_objects))
-
-$(dotdot_arm_objects) $(c_arm_objects): PRIVATE_ARM_MODE := $(arm_objects_mode)
-$(dotdot_arm_objects) $(c_arm_objects): PRIVATE_ARM_CFLAGS := $(arm_objects_cflags)
-$(dotdot_objects) $(c_normal_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
-$(dotdot_objects) $(c_normal_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
+c_objects := $(addprefix $(intermediates)/,$(c_normal_sources:.c=.o))
+$(call track-src-file-obj,$(c_normal_sources),$(c_objects))
-c_objects := $(c_arm_objects) $(c_normal_objects)
+$(dotdot_objects) $(c_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
+$(dotdot_objects) $(c_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
ifneq ($(strip $(c_objects)),)
$(c_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.c \
@@ -1177,7 +981,7 @@ $(c_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.c \
$(call include-depfiles-for-objs, $(c_objects))
endif
-c_objects += $(dotdot_arm_objects) $(dotdot_objects)
+c_objects += $(dotdot_objects)
###########################################################
## C: Compile generated .c files to .o.
@@ -1189,7 +993,6 @@ $(call track-gen-file-obj,$(gen_c_sources),$(gen_c_objects))
ifneq ($(strip $(gen_c_objects)),)
# Compile all generated files as thumb.
-# TODO: support compiling certain generated files as arm.
$(gen_c_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
$(gen_c_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
$(gen_c_objects): $(intermediates)/%.o: $(intermediates)/%.c \
@@ -1304,6 +1107,24 @@ ifneq ($(filter cfi,$(my_sanitize)),)
$(my_static_libraries))
endif
+ifneq ($(LOCAL_USE_VNDK),)
+ my_soong_hwasan_static_libraries := $(SOONG_HWASAN_VENDOR_STATIC_LIBRARIES)
+else
+ my_soong_hwasan_static_libraries = $(SOONG_HWASAN_STATIC_LIBRARIES)
+endif
+
+define use_soong_hwasan_static_libraries
+ $(foreach l,$(1),$(if $(filter $(l),$(my_soong_hwasan_static_libraries)),\
+ $(l).hwasan,$(l)))
+endef
+
+ifneq ($(filter hwaddress,$(my_sanitize)),)
+ my_whole_static_libraries := $(call use_soong_hwasan_static_libraries,\
+ $(my_whole_static_libraries))
+ my_static_libraries := $(call use_soong_hwasan_static_libraries,\
+ $(my_static_libraries))
+endif
+
###########################################################
## When compiling against the VNDK, use LL-NDK libraries
###########################################################
@@ -1314,29 +1135,23 @@ ifneq ($(LOCAL_USE_VNDK),)
## switch all soong libraries over to the /vendor
## variant.
####################################################
- ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
- # We don't do this renaming for soong-defined modules since they already have correct
- # names (with .vendor suffix when necessary) in their LOCAL_*_LIBRARIES.
- my_whole_static_libraries := $(foreach l,$(my_whole_static_libraries),\
- $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
- my_static_libraries := $(foreach l,$(my_static_libraries),\
- $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
- my_shared_libraries := $(foreach l,$(my_shared_libraries),\
- $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
- my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries),\
- $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
- my_header_libraries := $(foreach l,$(my_header_libraries),\
- $(if $(SPLIT_VENDOR.HEADER_LIBRARIES.$(l)),$(l).vendor,$(l)))
- endif
+ my_whole_static_libraries := $(foreach l,$(my_whole_static_libraries),\
+ $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ my_static_libraries := $(foreach l,$(my_static_libraries),\
+ $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+ $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries),\
+ $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ my_header_libraries := $(foreach l,$(my_header_libraries),\
+ $(if $(SPLIT_VENDOR.HEADER_LIBRARIES.$(l)),$(l).vendor,$(l)))
endif
# Platform can use vendor public libraries. If a required shared lib is one of
# the vendor public libraries, the lib is switched to the stub version of the lib.
ifeq ($(LOCAL_USE_VNDK),)
- ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
- my_shared_libraries := $(foreach l,$(my_shared_libraries),\
- $(if $(filter $(l),$(VENDOR_PUBLIC_LIBRARIES)),$(l).vendorpublic,$(l)))
- endif
+ my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+ $(if $(filter $(l),$(VENDOR_PUBLIC_LIBRARIES)),$(l).vendorpublic,$(l)))
endif
##########################################################
@@ -1414,12 +1229,17 @@ else ifdef LOCAL_USE_VNDK
# with vendor_available: false
my_link_type := native:vendor
my_warn_types :=
- my_allowed_types := native:vendor native:vndk
+ my_allowed_types := native:vendor native:vndk native:platform_vndk
endif
+else ifneq ($(filter $(TARGET_RECOVERY_OUT)/%,$(call get_non_asan_path,$(LOCAL_MODULE_PATH))),)
+my_link_type := native:recovery
+my_warn_types :=
+# TODO(b/113303515) remove native:platform and my_allowed_ndk_types
+my_allowed_types := native:recovery native:platform native:platform_vndk $(my_allowed_ndk_types)
else
my_link_type := native:platform
my_warn_types := $(my_warn_ndk_types)
-my_allowed_types := $(my_allowed_ndk_types) native:platform
+my_allowed_types := $(my_allowed_ndk_types) native:platform native:platform_vndk
endif
my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
@@ -1469,6 +1289,8 @@ normal_objects += $(addprefix $(TOPDIR)$(LOCAL_PATH)/,$(LOCAL_PREBUILT_OBJ_FILES
all_objects := $(normal_objects) $(gen_o_objects)
+LOCAL_INTERMEDIATE_TARGETS += $(all_objects)
+
# Cleanup file tracking
$(foreach f,$(my_tracked_gen_files),$(eval my_src_file_gen_$(s):=))
my_tracked_gen_files :=
@@ -1531,9 +1353,9 @@ a_suffix := $($(my_prefix)STATIC_LIB_SUFFIX)
ifneq ($(LOCAL_SDK_VERSION),)
built_shared_libraries := \
- $(addprefix $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)/, \
- $(addsuffix $(so_suffix), \
- $(my_shared_libraries)))
+ $(foreach lib,$(my_shared_libraries), \
+ $(call intermediates-dir-for, \
+ SHARED_LIBRARIES,$(lib),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/$(lib)$(so_suffix))
built_shared_library_deps := $(addsuffix .toc, $(built_shared_libraries))
# Add the NDK libraries to the built module dependency
@@ -1557,9 +1379,9 @@ built_shared_libraries += \
else
built_shared_libraries := \
- $(addprefix $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)/, \
- $(addsuffix $(so_suffix), \
- $(installed_shared_library_module_names)))
+ $(foreach lib,$(installed_shared_library_module_names), \
+ $(call intermediates-dir-for, \
+ SHARED_LIBRARIES,$(lib),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/$(lib)$(so_suffix))
built_shared_library_deps := $(addsuffix .toc, $(built_shared_libraries))
my_system_shared_libraries_fullpath :=
endif
@@ -1582,10 +1404,15 @@ built_whole_libraries := \
# libraries have already been linked into the module at that point.
# We do, however, care about the NOTICE files for any static
# libraries that we use. (see notice_files.mk)
-
+#
+# Don't do this in mm, since many of the targets won't exist.
+ifeq ($(ONE_SHOT_MAKEFILE),)
installed_static_library_notice_file_targets := \
$(foreach lib,$(my_static_libraries) $(my_whole_static_libraries), \
- NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST,TARGET)-STATIC_LIBRARIES-$(lib))
+ NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-STATIC_LIBRARIES-$(lib))
+else
+installed_static_library_notice_file_targets :=
+endif
$(notice_target): | $(installed_static_library_notice_file_targets)
$(LOCAL_INSTALLED_MODULE): | $(notice_target)
@@ -1599,12 +1426,9 @@ endif
# Rule-specific variable definitions
###########################################################
-ifeq ($(my_clang),true)
my_cflags += $(LOCAL_CLANG_CFLAGS)
my_conlyflags += $(LOCAL_CLANG_CONLYFLAGS)
my_cppflags += $(LOCAL_CLANG_CPPFLAGS)
-my_cflags_no_override += $(GLOBAL_CLANG_CFLAGS_NO_OVERRIDE)
-my_cppflags_no_override += $(GLOBAL_CLANG_CPPFLAGS_NO_OVERRIDE)
my_asflags += $(LOCAL_CLANG_ASFLAGS)
my_ldflags += $(LOCAL_CLANG_LDFLAGS)
my_cflags += $(LOCAL_CLANG_CFLAGS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_CLANG_CFLAGS_$(my_32_64_bit_suffix))
@@ -1616,11 +1440,6 @@ my_cflags := $(call convert-to-clang-flags,$(my_cflags))
my_cppflags := $(call convert-to-clang-flags,$(my_cppflags))
my_asflags := $(call convert-to-clang-flags,$(my_asflags))
my_ldflags := $(call convert-to-clang-flags,$(my_ldflags))
-else
-# gcc does not handle hidden functions in a manner compatible with LLVM libcxx
-# see b/27908145
-my_cflags += -Wno-attributes
-endif
ifeq ($(my_fdo_build), true)
my_cflags := $(patsubst -Os,-O2,$(my_cflags))
@@ -1653,26 +1472,23 @@ ifeq ($(my_strict),true)
endif
# Check if -Werror or -Wno-error is used in C compiler flags.
-# Modules defined in $(SOONG_ANDROID_MK) are checked in soong's cc.go.
-ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
- # Header libraries do not need cflags.
- ifneq (HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS))
- # Prebuilt modules do not need cflags.
- ifeq (,$(LOCAL_PREBUILT_MODULE_FILE))
- my_all_cflags := $(my_cflags) $(my_cppflags) $(my_cflags_no_override)
- # Issue warning if -Wno-error is used.
- ifneq (,$(filter -Wno-error,$(my_all_cflags)))
- $(eval MODULES_USING_WNO_ERROR := $(MODULES_USING_WNO_ERROR) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
- else
- # Issue warning if -Werror is not used. Add it.
- ifeq (,$(filter -Werror,$(my_all_cflags)))
- # Add -Wall -Werror unless the project is in the WARNING_ALLOWED project list.
- ifeq (,$(strip $(call find_warning_allowed_projects,$(LOCAL_PATH))))
- my_cflags := -Wall -Werror $(my_cflags)
- else
- $(eval MODULES_ADDED_WALL := $(MODULES_ADDED_WALL) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
- my_cflags := -Wall $(my_cflags)
- endif
+# Header libraries do not need cflags.
+ifneq (HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS))
+ # Prebuilt modules do not need cflags.
+ ifeq (,$(LOCAL_PREBUILT_MODULE_FILE))
+ my_all_cflags := $(my_cflags) $(my_cppflags) $(my_cflags_no_override)
+ # Issue warning if -Wno-error is used.
+ ifneq (,$(filter -Wno-error,$(my_all_cflags)))
+ $(eval MODULES_USING_WNO_ERROR := $(MODULES_USING_WNO_ERROR) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
+ else
+ # Issue warning if -Werror is not used. Add it.
+ ifeq (,$(filter -Werror,$(my_all_cflags)))
+ # Add -Wall -Werror unless the project is in the WARNING_ALLOWED project list.
+ ifeq (,$(strip $(call find_warning_allowed_projects,$(LOCAL_PATH))))
+ my_cflags := -Wall -Werror $(my_cflags)
+ else
+ $(eval MODULES_ADDED_WALL := $(MODULES_ADDED_WALL) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
+ my_cflags := -Wall $(my_cflags)
endif
endif
endif
@@ -1694,40 +1510,53 @@ endif
my_tidy_checks :=
my_tidy_flags :=
ifneq (,$(filter 1 true,$(my_tidy_enabled)))
- ifneq ($(my_clang),true)
- # Disable clang-tidy if clang is disabled.
- my_tidy_enabled := false
- else
- tidy_only: $(cpp_objects) $(c_objects)
- # Set up global default checks
- my_tidy_checks := $(WITH_TIDY_CHECKS)
- ifeq ($(my_tidy_checks),)
- my_tidy_checks := $(call default_global_tidy_checks,$(LOCAL_PATH))
- endif
- # Append local clang-tidy checks.
- ifneq ($(LOCAL_TIDY_CHECKS),)
- my_tidy_checks := $(my_tidy_checks),$(LOCAL_TIDY_CHECKS)
- endif
- my_tidy_flags += $(WITH_TIDY_FLAGS) $(LOCAL_TIDY_FLAGS)
- # If tidy flags are not specified, default to check all header files.
- ifeq ($(my_tidy_flags),)
- my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
- endif
- # If clang-tidy is not enabled globally, add the -quiet flag.
- ifeq (,$(filter 1 true,$(WITH_TIDY)))
- my_tidy_flags += -quiet -extra-arg-before=-fno-caret-diagnostics
- endif
+ tidy_only: $(cpp_objects) $(c_objects) $(gen_c_objects) $(gen_cpp_objects)
+ # Set up global default checks
+ my_tidy_checks := $(WITH_TIDY_CHECKS)
+ ifeq ($(my_tidy_checks),)
+ my_tidy_checks := $(call default_global_tidy_checks,$(LOCAL_PATH))
+ endif
+ # Append local clang-tidy checks.
+ ifneq ($(LOCAL_TIDY_CHECKS),)
+ my_tidy_checks := $(my_tidy_checks),$(LOCAL_TIDY_CHECKS)
+ endif
+ my_tidy_flags := $(strip $(WITH_TIDY_FLAGS) $(LOCAL_TIDY_FLAGS))
+ # If tidy flags are not specified, default to check all header files.
+ ifeq ($(my_tidy_flags),)
+ my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
+ endif
+ # If clang-tidy is not enabled globally, add the -quiet flag.
+ ifeq (,$(filter 1 true,$(WITH_TIDY)))
+ my_tidy_flags += -quiet -extra-arg-before=-fno-caret-diagnostics
+ endif
+ ifneq ($(my_tidy_checks),)
# We might be using the static analyzer through clang-tidy.
# https://bugs.llvm.org/show_bug.cgi?id=32914
- ifneq ($(my_tidy_checks),)
- my_tidy_flags += -extra-arg-before=-D__clang_analyzer__
- endif
+ my_tidy_flags += -extra-arg-before=-D__clang_analyzer__
+
+ # A recent change in clang-tidy (r328258) enabled destructor inlining,
+ # which appears to cause a number of false positives. Until that's
+ # resolved, this turns off the effects of r328258.
+ # https://bugs.llvm.org/show_bug.cgi?id=37459
+ my_tidy_flags += -extra-arg-before=-Xclang
+ my_tidy_flags += -extra-arg-before=-analyzer-config
+ my_tidy_flags += -extra-arg-before=-Xclang
+ my_tidy_flags += -extra-arg-before=c++-temp-dtor-inlining=false
endif
endif
my_tidy_checks := $(subst $(space),,$(my_tidy_checks))
+# Add dependency of clang-tidy and clang-tidy.sh
+ifneq ($(my_tidy_checks),)
+ my_clang_tidy_programs := $(PATH_TO_CLANG_TIDY) $(PATH_TO_CLANG_TIDY_SHELL)
+ $(cpp_objects): $(intermediates)/%.o: $(my_clang_tidy_programs)
+ $(c_objects): $(intermediates)/%.o: $(my_clang_tidy_programs)
+ $(gen_cpp_objects): $(intermediates)/%.o: $(my_clang_tidy_programs)
+ $(gen_c_objects): $(intermediates)/%.o: $(my_clang_tidy_programs)
+endif
+
# Move -l* entries from ldflags to ldlibs, and everything else to ldflags
my_ldlib_flags := $(my_ldflags) $(my_ldlibs)
my_ldlibs := $(filter -l%,$(my_ldlib_flags))
@@ -1752,6 +1581,148 @@ endif
# (start-group/end-group), so append after the check above.
my_ldlibs += $(my_cxx_ldlibs)
+###########################################################
+## Define PRIVATE_ variables from global vars
+###########################################################
+ifndef LOCAL_IS_HOST_MODULE
+ifdef LOCAL_USE_VNDK
+my_target_global_c_includes := \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_INCLUDES)
+my_target_global_c_system_includes := \
+ $(TARGET_OUT_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_SYSTEM_INCLUDES)
+else ifdef LOCAL_SDK_VERSION
+my_target_global_c_includes :=
+my_target_global_c_system_includes := $(my_ndk_stl_include_path) $(my_ndk_sysroot_include)
+else ifdef BOARD_VNDK_VERSION
+my_target_global_c_includes := $(SRC_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_INCLUDES) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
+my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_SYSTEM_INCLUDES) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
+else
+my_target_global_c_includes := $(SRC_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_INCLUDES) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
+my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) $(TARGET_OUT_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_SYSTEM_INCLUDES) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
+endif
+
+my_target_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CFLAGS)
+my_target_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags)
+my_target_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags)
+ifeq ($(my_use_clang_lld),true)
+ my_target_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LLDFLAGS)
+ include $(BUILD_SYSTEM)/pack_dyn_relocs_setup.mk
+ ifeq ($(my_pack_module_relocations),false)
+ my_target_global_ldflags += -Wl,--pack-dyn-relocs=none
+ endif
+else
+ my_target_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LDFLAGS)
+endif # my_use_clang_lld
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_GLOBAL_C_INCLUDES := $(my_target_global_c_includes)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_GLOBAL_C_SYSTEM_INCLUDES := $(my_target_global_c_system_includes)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_GLOBAL_CFLAGS := $(my_target_global_cflags)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_GLOBAL_CONLYFLAGS := $(my_target_global_conlyflags)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_GLOBAL_CPPFLAGS := $(my_target_global_cppflags)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_GLOBAL_LDFLAGS := $(my_target_global_ldflags)
+
+else # LOCAL_IS_HOST_MODULE
+
+my_host_global_c_includes := $(SRC_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
+my_host_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
+
+my_host_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CFLAGS)
+my_host_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags)
+my_host_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags)
+ifeq ($(my_use_clang_lld),true)
+ my_host_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LLDFLAGS)
+else
+ my_host_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LDFLAGS)
+endif # my_use_clang_lld
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_GLOBAL_C_INCLUDES := $(my_host_global_c_includes)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_GLOBAL_C_SYSTEM_INCLUDES := $(my_host_global_c_system_includes)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_GLOBAL_CFLAGS := $(my_host_global_cflags)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_GLOBAL_CONLYFLAGS := $(my_host_global_conlyflags)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_GLOBAL_CPPFLAGS := $(my_host_global_cppflags)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_GLOBAL_LDFLAGS := $(my_host_global_ldflags)
+endif # LOCAL_IS_HOST_MODULE
+
+# To enable coverage for a given module, set LOCAL_NATIVE_COVERAGE=true and
+# build with NATIVE_COVERAGE=true in your enviornment.
+ifeq ($(NATIVE_COVERAGE),true)
+ ifeq ($(my_native_coverage),true)
+ # Note that clang coverage doesn't play nicely with acov out of the box.
+ # Clang apparently generates .gcno files that aren't compatible with
+ # gcov-4.8. This can be solved by installing gcc-4.6 and invoking lcov
+ # with `--gcov-tool /usr/bin/gcov-4.6`.
+ #
+ # http://stackoverflow.com/questions/17758126/clang-code-coverage-invalid-output
+ my_cflags += --coverage -O0
+ my_ldflags += --coverage
+ endif
+
+ my_coverage_lib := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBPROFILE_RT)
+
+ $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_COVERAGE_LIB := $(my_coverage_lib)
+ $(LOCAL_INTERMEDIATE_TARGETS): $(my_coverage_lib)
+endif
+
+###########################################################
+## Define PRIVATE_ variables used by multiple module types
+###########################################################
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_NO_DEFAULT_COMPILER_FLAGS := \
+ $(strip $(LOCAL_NO_DEFAULT_COMPILER_FLAGS))
+
+ifeq ($(strip $(WITH_STATIC_ANALYZER)),)
+ LOCAL_NO_STATIC_ANALYZER := true
+endif
+
+ifneq ($(strip $(LOCAL_IS_HOST_MODULE)),)
+ my_syntax_arch := host
+else
+ my_syntax_arch := $($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+endif
+
+ifeq ($(strip $(my_cc)),)
+ my_cc := $(my_cc_wrapper) $(CLANG)
+endif
+
+SYNTAX_TOOLS_PREFIX := \
+ $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/libexec
+
+ifneq ($(LOCAL_NO_STATIC_ANALYZER),true)
+ my_cc := CCC_CC=$(CLANG) CLANG=$(CLANG) \
+ $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer
+endif
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CC := $(my_cc)
+
+ifeq ($(strip $(my_cxx)),)
+ my_cxx := $(my_cxx_wrapper) $(CLANG_CXX)
+endif
+
+ifeq ($(strip $(my_cxx_link)),)
+ my_cxx_link := $(CLANG_CXX)
+endif
+
+ifneq ($(LOCAL_NO_STATIC_ANALYZER),true)
+ my_cxx := CCC_CXX=$(CLANG_CXX) CLANG_CXX=$(CLANG_CXX) \
+ $(SYNTAX_TOOLS_PREFIX)/c++-analyzer
+ my_cxx_link := CCC_CXX=$(CLANG_CXX) CLANG_CXX=$(CLANG_CXX) \
+ $(SYNTAX_TOOLS_PREFIX)/c++-analyzer
+endif
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_LINKER := $(my_linker)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CXX := $(my_cxx)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CXX_LINK := $(my_cxx_link)
+
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_YACCFLAGS := $(LOCAL_YACCFLAGS)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASFLAGS := $(my_asflags)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CONLYFLAGS := $(my_conlyflags)
@@ -1791,12 +1762,6 @@ all_libraries := \
###########################################################
export_includes := $(intermediates)/export_includes
export_cflags := $(foreach d,$(my_export_c_include_dirs),-I $(d))
-# Soong exports cflags instead of include dirs, so that -isystem can be included.
-ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-export_cflags += $(LOCAL_EXPORT_CFLAGS)
-else ifdef LOCAL_EXPORT_CFLAGS
-$(call pretty-error,LOCAL_EXPORT_CFLAGS can only be used by Soong, use LOCAL_EXPORT_C_INCLUDE_DIRS instead)
-endif
$(export_includes): PRIVATE_EXPORT_CFLAGS := $(export_cflags)
# Headers exported by whole static libraries are also exported by this library.
export_include_deps := $(strip \
@@ -1841,7 +1806,6 @@ export_cflags :=
# Make sure export_includes gets generated when you are running mm/mmm
$(LOCAL_BUILT_MODULE) : | $(export_includes)
-ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
ifneq (,$(filter-out $(LOCAL_PATH)/%,$(my_export_c_include_dirs)))
my_soong_problems += non_local__export_c_include_dirs
endif
@@ -1857,7 +1821,6 @@ SOONG_CONV.$(LOCAL_MODULE).DEPS := \
$(my_system_shared_libraries))
SOONG_CONV.$(LOCAL_MODULE).TYPE := native
SOONG_CONV := $(SOONG_CONV) $(LOCAL_MODULE)
-endif
###########################################################
# Coverage packaging.
diff --git a/core/board_config.mk b/core/board_config.mk
new file mode 100644
index 0000000000..ac0f27da53
--- /dev/null
+++ b/core/board_config.mk
@@ -0,0 +1,526 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# ###############################################################
+# This file includes BoardConfig.mk for the device being built,
+# and sanity-checks the variable defined therein.
+# ###############################################################
+
+_board_strip_readonly_list := \
+ BOARD_EGL_CFG \
+ BOARD_HAVE_BLUETOOTH \
+ BOARD_INSTALLER_CMDLINE \
+ BOARD_KERNEL_CMDLINE \
+ BOARD_KERNEL_BASE \
+ BOARD_USES_GENERIC_AUDIO \
+ BOARD_VENDOR_USE_AKMD \
+ BOARD_WPA_SUPPLICANT_DRIVER \
+ BOARD_WLAN_DEVICE \
+ TARGET_ARCH \
+ TARGET_ARCH_VARIANT \
+ TARGET_CPU_ABI \
+ TARGET_CPU_ABI2 \
+ TARGET_CPU_VARIANT \
+ TARGET_CPU_VARIANT_RUNTIME \
+ TARGET_2ND_ARCH \
+ TARGET_2ND_ARCH_VARIANT \
+ TARGET_2ND_CPU_ABI \
+ TARGET_2ND_CPU_ABI2 \
+ TARGET_2ND_CPU_VARIANT \
+ TARGET_2ND_CPU_VARIANT_RUNTIME \
+ TARGET_BOARD_PLATFORM \
+ TARGET_BOARD_PLATFORM_GPU \
+ TARGET_BOOTLOADER_BOARD_NAME \
+ TARGET_FS_CONFIG_GEN \
+ TARGET_NO_BOOTLOADER \
+ TARGET_NO_KERNEL \
+ TARGET_NO_RECOVERY \
+ TARGET_NO_RADIOIMAGE \
+ TARGET_HARDWARE_3D \
+ WITH_DEXPREOPT \
+
+# File system variables
+_board_strip_readonly_list += \
+ BOARD_FLASH_BLOCK_SIZE \
+ BOARD_BOOTIMAGE_PARTITION_SIZE \
+ BOARD_RECOVERYIMAGE_PARTITION_SIZE \
+ BOARD_SYSTEMIMAGE_PARTITION_SIZE \
+ BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_USERDATAIMAGE_PARTITION_SIZE \
+ BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_CACHEIMAGE_PARTITION_SIZE \
+ BOARD_VENDORIMAGE_PARTITION_SIZE \
+ BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_PRODUCTIMAGE_PARTITION_SIZE \
+ BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE \
+ BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_ODMIMAGE_PARTITION_SIZE \
+ BOARD_ODMIMAGE_FILE_SYSTEM_TYPE \
+
+# Logical partitions related variables.
+_dynamic_partitions_var_list += \
+ BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_PRODUCT_SERVICESIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_SUPER_PARTITION_SIZE \
+ BOARD_SUPER_PARTITION_GROUPS \
+
+_board_strip_readonly_list += $(_dynamic_partitions_var_list)
+
+_build_broken_var_list := \
+ BUILD_BROKEN_ANDROIDMK_EXPORTS \
+ BUILD_BROKEN_DUP_COPY_HEADERS \
+ BUILD_BROKEN_DUP_RULES \
+ BUILD_BROKEN_PHONY_TARGETS \
+ BUILD_BROKEN_ENG_DEBUG_TAGS \
+ BUILD_BROKEN_USES_NETWORK \
+
+_board_true_false_vars := $(_build_broken_var_list)
+_board_strip_readonly_list += $(_build_broken_var_list)
+
+# Conditional to building on linux, as dex2oat currently does not work on darwin.
+ifeq ($(HOST_OS),linux)
+ WITH_DEXPREOPT := true
+endif
+
+# ###############################################################
+# Broken build defaults
+# ###############################################################
+$(foreach v,$(_build_broken_var_list),$(eval $(v) :=))
+
+# Boards may be defined under $(SRC_TARGET_DIR)/board/$(TARGET_DEVICE)
+# or under vendor/*/$(TARGET_DEVICE). Search in both places, but
+# make sure only one exists.
+# Real boards should always be associated with an OEM vendor.
+ifdef TARGET_DEVICE_DIR
+ ifneq ($(origin TARGET_DEVICE_DIR),command line)
+ $(error TARGET_DEVICE_DIR may not be set manually)
+ endif
+ board_config_mk := $(TARGET_DEVICE_DIR)/BoardConfig.mk
+else
+ board_config_mk := \
+ $(strip $(sort $(wildcard \
+ $(SRC_TARGET_DIR)/board/$(TARGET_DEVICE)/BoardConfig.mk \
+ $(shell test -d device && find -L device -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
+ $(shell test -d vendor && find -L vendor -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
+ )))
+ ifeq ($(board_config_mk),)
+ $(error No config file found for TARGET_DEVICE $(TARGET_DEVICE))
+ endif
+ ifneq ($(words $(board_config_mk)),1)
+ $(error Multiple board config files for TARGET_DEVICE $(TARGET_DEVICE): $(board_config_mk))
+ endif
+ TARGET_DEVICE_DIR := $(patsubst %/,%,$(dir $(board_config_mk)))
+ .KATI_READONLY := TARGET_DEVICE_DIR
+endif
+include $(board_config_mk)
+ifeq ($(TARGET_ARCH),)
+ $(error TARGET_ARCH not defined by board config: $(board_config_mk))
+endif
+ifneq ($(MALLOC_IMPL),)
+ $(warning *** Unsupported option MALLOC_IMPL defined by board config: $(board_config_mk).)
+ $(error Use `MALLOC_SVELTE := true` to configure jemalloc for low-memory)
+endif
+board_config_mk :=
+
+# Clean up and verify BoardConfig variables
+$(foreach var,$(_board_strip_readonly_list),$(eval $(var) := $$(strip $$($(var)))))
+$(foreach var,$(_board_true_false_vars), \
+ $(if $(filter-out true false,$($(var))), \
+ $(error Valid values of $(var) are "true", "false", and "". Not "$($(var))")))
+
+# Default *_CPU_VARIANT_RUNTIME to CPU_VARIANT if unspecified.
+TARGET_CPU_VARIANT_RUNTIME := $(or $(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_CPU_VARIANT))
+TARGET_2ND_CPU_VARIANT_RUNTIME := $(or $(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_CPU_VARIANT))
+
+# The combo makefiles sanity-check and set defaults for various CPU configuration
+combo_target := TARGET_
+combo_2nd_arch_prefix :=
+include $(BUILD_SYSTEM)/combo/select.mk
+
+ifdef TARGET_2ND_ARCH
+ combo_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+ include $(BUILD_SYSTEM)/combo/select.mk
+endif
+
+.KATI_READONLY := $(_board_strip_readonly_list)
+
+INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
+ifeq ($(TARGET_CPU_ABI),)
+ $(error No TARGET_CPU_ABI defined by board config: $(board_config_mk))
+endif
+ifneq ($(filter %64,$(TARGET_ARCH)),)
+ TARGET_IS_64_BIT := true
+endif
+
+ifeq (,$(filter true,$(TARGET_SUPPORTS_32_BIT_APPS) $(TARGET_SUPPORTS_64_BIT_APPS)))
+ TARGET_SUPPORTS_32_BIT_APPS := true
+endif
+
+# Sanity check to warn about likely cryptic errors later in the build.
+ifeq ($(TARGET_IS_64_BIT),true)
+ ifeq (,$(filter true false,$(TARGET_SUPPORTS_64_BIT_APPS)))
+ $(warning Building a 32-bit-app-only product on a 64-bit device. \
+ If this is intentional, set TARGET_SUPPORTS_64_BIT_APPS := false)
+ endif
+endif
+
+# "ro.product.cpu.abilist32" and "ro.product.cpu.abilist64" are
+# comma separated lists of the 32 and 64 bit ABIs (in order of
+# preference) that the target supports. If TARGET_CPU_ABI_LIST_{32,64}_BIT
+# are defined by the board config, we use them. Else, we construct
+# these lists based on whether TARGET_IS_64_BIT is set.
+#
+# Note that this assumes that the 2ND_CPU_ABI for a 64 bit target
+# is always 32 bits. If this isn't the case, these variables should
+# be overriden in the board configuration.
+ifeq (,$(TARGET_CPU_ABI_LIST_64_BIT))
+ ifeq (true|true,$(TARGET_IS_64_BIT)|$(TARGET_SUPPORTS_64_BIT_APPS))
+ TARGET_CPU_ABI_LIST_64_BIT := $(TARGET_CPU_ABI) $(TARGET_CPU_ABI2)
+ endif
+endif
+
+ifeq (,$(TARGET_CPU_ABI_LIST_32_BIT))
+ ifneq (true,$(TARGET_IS_64_BIT))
+ TARGET_CPU_ABI_LIST_32_BIT := $(TARGET_CPU_ABI) $(TARGET_CPU_ABI2)
+ else
+ ifeq (true,$(TARGET_SUPPORTS_32_BIT_APPS))
+ # For a 64 bit target, assume that the 2ND_CPU_ABI
+ # is a 32 bit ABI.
+ TARGET_CPU_ABI_LIST_32_BIT := $(TARGET_2ND_CPU_ABI) $(TARGET_2ND_CPU_ABI2)
+ endif
+ endif
+endif
+
+# "ro.product.cpu.abilist" is a comma separated list of ABIs (in order
+# of preference) that the target supports. If a TARGET_CPU_ABI_LIST
+# is specified by the board configuration, we use that. If not, we
+# build a list out of the TARGET_CPU_ABIs specified by the config.
+ifeq (,$(TARGET_CPU_ABI_LIST))
+ ifeq ($(TARGET_IS_64_BIT)|$(TARGET_PREFER_32_BIT_APPS),true|true)
+ TARGET_CPU_ABI_LIST := $(TARGET_CPU_ABI_LIST_32_BIT) $(TARGET_CPU_ABI_LIST_64_BIT)
+ else
+ TARGET_CPU_ABI_LIST := $(TARGET_CPU_ABI_LIST_64_BIT) $(TARGET_CPU_ABI_LIST_32_BIT)
+ endif
+endif
+
+# Strip whitespace from the ABI list string.
+TARGET_CPU_ABI_LIST := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST)))
+TARGET_CPU_ABI_LIST_32_BIT := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST_32_BIT)))
+TARGET_CPU_ABI_LIST_64_BIT := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST_64_BIT)))
+
+ifneq ($(BUILD_BROKEN_ANDROIDMK_EXPORTS),true)
+$(KATI_obsolete_export It is a global setting. See $(CHANGES_URL)#export_keyword)
+endif
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_RAMDISK
+ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+TARGET_COPY_OUT_RAMDISK := $(TARGET_COPY_OUT_ROOT)
+endif
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_DEBUG_RAMDISK
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk/first_stage_ramdisk
+endif
+
+###########################################
+# Configure whether we're building the system image
+BUILDING_SYSTEM_IMAGE := true
+ifeq ($(PRODUCT_BUILD_SYSTEM_IMAGE),)
+ ifndef PRODUCT_USE_DYNAMIC_PARTITION_SIZE
+ ifndef BOARD_SYSTEMIMAGE_PARTITION_SIZE
+ BUILDING_SYSTEM_IMAGE :=
+ endif
+ endif
+else ifeq ($(PRODUCT_BUILD_SYSTEM_IMAGE),false)
+ BUILDING_SYSTEM_IMAGE :=
+endif
+.KATI_READONLY := BUILDING_SYSTEM_IMAGE
+
+# Are we building a system_other image
+BUILDING_SYSTEM_OTHER_IMAGE :=
+ifeq ($(PRODUCT_BUILD_SYSTEM_OTHER_IMAGE),)
+ ifdef BUILDING_SYSTEM_IMAGE
+ ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
+ BUILDING_SYSTEM_OTHER_IMAGE := true
+ endif
+ endif
+else ifeq ($(PRODUCT_BUILD_SYSTEM_OTHER_IMAGE),true)
+ BUILDING_SYSTEM_OTHER_IMAGE := true
+ ifndef BUILDING_SYSTEM_IMAGE
+ $(error PRODUCT_BUILD_SYSTEM_OTHER_IMAGE = true requires building the system image)
+ endif
+endif
+.KATI_READONLY := BUILDING_SYSTEM_OTHER_IMAGE
+
+# Are we building a cache image
+BUILDING_CACHE_IMAGE :=
+ifeq ($(PRODUCT_BUILD_CACHE_IMAGE),)
+ ifdef BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE
+ BUILDING_CACHE_IMAGE := true
+ endif
+else ifeq ($(PRODUCT_BUILD_CACHE_IMAGE),true)
+ BUILDING_CACHE_IMAGE := true
+ ifndef BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE
+ $(error PRODUCT_BUILD_CACHE_IMAGE set to true, but BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE not defined)
+ endif
+endif
+.KATI_READONLY := BUILDING_CACHE_IMAGE
+
+# TODO: Add BUILDING_BOOT_IMAGE / BUILDING_RECOVERY_IMAGE
+# This gets complicated with BOARD_USES_RECOVERY_AS_BOOT, so skipping for now.
+
+# Are we building a ramdisk image
+BUILDING_RAMDISK_IMAGE := true
+ifeq ($(PRODUCT_BUILD_RAMDISK_IMAGE),)
+ # TODO: Be smarter about this. This probably only needs to happen when one of the follow is true:
+ # BUILDING_BOOT_IMAGE
+ # BUILDING_RECOVERY_IMAGE
+else ifeq ($(PRODUCT_BUILD_RAMDISK_IMAGE),false)
+ BUILDING_RAMDISK_IMAGE :=
+endif
+.KATI_READONLY := BUILDING_RAMDISK_IMAGE
+
+# Are we building a userdata image
+BUILDING_USERDATA_IMAGE :=
+ifeq ($(PRODUCT_BUILD_USERDATA_IMAGE),)
+ ifdef BOARD_USERDATAIMAGE_PARTITION_SIZE
+ BUILDING_USERDATA_IMAGE := true
+ endif
+else ifeq ($(PRODUCT_BUILD_USERDATA_IMAGE),true)
+ BUILDING_USERDATA_IMAGE := true
+endif
+.KATI_READONLY := BUILDING_USERDATA_IMAGE
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_VENDOR
+ifeq ($(TARGET_COPY_OUT_VENDOR),$(_vendor_path_placeholder))
+ TARGET_COPY_OUT_VENDOR := system/vendor
+else ifeq ($(filter vendor system/vendor,$(TARGET_COPY_OUT_VENDOR)),)
+ $(error TARGET_COPY_OUT_VENDOR must be either 'vendor' or 'system/vendor', seeing '$(TARGET_COPY_OUT_VENDOR)'.)
+endif
+PRODUCT_COPY_FILES := $(subst $(_vendor_path_placeholder),$(TARGET_COPY_OUT_VENDOR),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_VENDORIMAGE :=
+ifdef BOARD_PREBUILT_VENDORIMAGE
+ BOARD_USES_VENDORIMAGE := true
+endif
+ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+ BOARD_USES_VENDORIMAGE := true
+endif
+ifeq ($(TARGET_COPY_OUT_VENDOR),vendor)
+ BOARD_USES_VENDORIMAGE := true
+else ifdef BOARD_USES_VENDORIMAGE
+ $(error TARGET_COPY_OUT_VENDOR must be set to 'vendor' to use a vendor image)
+endif
+.KATI_READONLY := BOARD_USES_VENDORIMAGE
+
+BUILDING_VENDOR_IMAGE :=
+ifeq ($(PRODUCT_BUILD_VENDOR_IMAGE),)
+ ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+ BUILDING_VENDOR_IMAGE := true
+ endif
+else ifeq ($(PRODUCT_BUILD_VENDOR_IMAGE),true)
+ BUILDING_VENDOR_IMAGE := true
+ ifndef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+ $(error PRODUCT_BUILD_VENDOR_IMAGE set to true, but BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE not defined)
+ endif
+endif
+ifdef BOARD_PREBUILT_VENDORIMAGE
+ BUILDING_VENDOR_IMAGE :=
+endif
+.KATI_READONLY := BUILDING_VENDOR_IMAGE
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_PRODUCT
+ifeq ($(TARGET_COPY_OUT_PRODUCT),$(_product_path_placeholder))
+TARGET_COPY_OUT_PRODUCT := system/product
+else ifeq ($(filter product system/product,$(TARGET_COPY_OUT_PRODUCT)),)
+$(error TARGET_COPY_OUT_PRODUCT must be either 'product' or 'system/product', seeing '$(TARGET_COPY_OUT_PRODUCT)'.)
+endif
+PRODUCT_COPY_FILES := $(subst $(_product_path_placeholder),$(TARGET_COPY_OUT_PRODUCT),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_PRODUCTIMAGE :=
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+ BOARD_USES_PRODUCTIMAGE := true
+endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+ BOARD_USES_PRODUCTIMAGE := true
+endif
+ifeq ($(TARGET_COPY_OUT_PRODUCT),product)
+ BOARD_USES_PRODUCTIMAGE := true
+else ifdef BOARD_USES_PRODUCTIMAGE
+ $(error TARGET_COPY_OUT_PRODUCT must be set to 'product' to use a product image)
+endif
+.KATI_READONLY := BOARD_USES_PRODUCTIMAGE
+
+BUILDING_PRODUCT_IMAGE :=
+ifeq ($(PRODUCT_BUILD_PRODUCT_IMAGE),)
+ ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+ BUILDING_PRODUCT_IMAGE := true
+ endif
+else ifeq ($(PRODUCT_BUILD_PRODUCT_IMAGE),true)
+ BUILDING_PRODUCT_IMAGE := true
+ ifndef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+ $(error PRODUCT_BUILD_PRODUCT_IMAGE set to true, but BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE not defined)
+ endif
+endif
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+ BUILDING_PRODUCT_IMAGE :=
+endif
+.KATI_READONLY := BUILDING_PRODUCT_IMAGE
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_PRODUCT_SERVICES
+MERGE_PRODUCT_SERVICES_INTO_PRODUCT :=
+ifeq ($(TARGET_COPY_OUT_PRODUCT_SERVICES),$(_product_services_path_placeholder))
+ TARGET_COPY_OUT_PRODUCT_SERVICES := $(TARGET_COPY_OUT_PRODUCT)
+ MERGE_PRODUCT_SERVICES_INTO_PRODUCT := true
+else ifeq ($(TARGET_COPY_OUT_PRODUCT),$(TARGET_COPY_OUT_PRODUCT_SERVICES))
+ MERGE_PRODUCT_SERVICES_INTO_PRODUCT := true
+else ifeq ($(filter system/product_services,$(TARGET_COPY_OUT_PRODUCT_SERVICES)),)
+ $(error TARGET_COPY_OUT_PRODUCT_SERVICES must be either '$(TARGET_COPY_OUT_PRODUCT)'\
+ or 'system/product_services', seeing '$(TARGET_COPY_OUT_PRODUCT_SERVICES)'.)
+endif
+.KATI_READONLY := MERGE_PRODUCT_SERVICES_INTO_PRODUCT
+PRODUCT_COPY_FILES := $(subst $(_product_services_path_placeholder),$(TARGET_COPY_OUT_PRODUCT_SERVICES),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_PRODUCT_SERVICESIMAGE :=
+ifdef BOARD_PREBUILT_PRODUCT_SERVICESIMAGE
+ BOARD_USES_PRODUCT_SERVICESIMAGE := true
+endif
+ifdef BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE
+ BOARD_USES_PRODUCT_SERVICESIMAGE := true
+endif
+ifeq ($(TARGET_COPY_OUT_PRODUCT_SERVICES),product_services)
+ BOARD_USES_PRODUCT_SERVICESIMAGE := true
+else ifdef BOARD_USES_PRODUCT_SERVICESIMAGE
+ $(error A 'product_services' partition should not be used. Use 'system/product_services' instead.)
+endif
+
+BUILDING_PRODUCT_SERVICES_IMAGE :=
+ifeq ($(PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE),)
+ ifdef BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE
+ BUILDING_PRODUCT_SERVICES_IMAGE := true
+ endif
+else ifeq ($(PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE),true)
+ BUILDING_PRODUCT_SERVICES_IMAGE := true
+ ifndef BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE
+ $(error PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE set to true, but BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE not defined)
+ endif
+endif
+ifdef BOARD_PREBUILT_PRODUCT_SERVICESIMAGE
+ BUILDING_PRODUCT_SERVICES_IMAGE :=
+endif
+.KATI_READONLY := BUILDING_PRODUCT_SERVICES_IMAGE
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_ODM
+ifeq ($(TARGET_COPY_OUT_ODM),$(_odm_path_placeholder))
+ TARGET_COPY_OUT_ODM := vendor/odm
+else ifeq ($(filter odm vendor/odm,$(TARGET_COPY_OUT_ODM)),)
+ $(error TARGET_COPY_OUT_ODM must be either 'odm' or 'vendor/odm', seeing '$(TARGET_COPY_OUT_ODM)'.)
+endif
+PRODUCT_COPY_FILES := $(subst $(_odm_path_placeholder),$(TARGET_COPY_OUT_ODM),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_ODMIMAGE :=
+ifdef BOARD_PREBUILT_ODMIMAGE
+ BOARD_USES_ODMIMAGE := true
+endif
+ifdef BOARD_ODMIMAGE_FILE_SYSTEM_TYPE
+ BOARD_USES_ODMIMAGE := true
+endif
+ifeq ($(TARGET_COPY_OUT_ODM),odm)
+ BOARD_USES_ODMIMAGE := true
+else ifdef BOARD_USES_ODMIMAGE
+ $(error TARGET_COPY_OUT_ODM must be set to 'odm' to use an odm image)
+endif
+
+BUILDING_ODM_IMAGE :=
+ifeq ($(PRODUCT_BUILD_ODM_IMAGE),)
+ ifdef BOARD_ODMIMAGE_FILE_SYSTEM_TYPE
+ BUILDING_ODM_IMAGE := true
+ endif
+else ifeq ($(PRODUCT_BUILD_ODM_IMAGE),true)
+ BUILDING_ODM_IMAGE := true
+ ifndef BOARD_ODMIMAGE_FILE_SYSTEM_TYPE
+ $(error PRODUCT_BUILD_ODM_IMAGE set to true, but BOARD_ODMIMAGE_FILE_SYSTEM_TYPE not defined)
+ endif
+endif
+ifdef BOARD_PREBUILT_ODMIMAGE
+ BUILDING_ODM_IMAGE :=
+endif
+.KATI_READONLY := BUILDING_ODM_IMAGE
+
+###########################################
+# Ensure that only TARGET_RECOVERY_UPDATER_LIBS *or* AB_OTA_UPDATER is set.
+TARGET_RECOVERY_UPDATER_LIBS ?=
+AB_OTA_UPDATER ?=
+.KATI_READONLY := TARGET_RECOVERY_UPDATER_LIBS AB_OTA_UPDATER
+ifeq ($(AB_OTA_UPDATER),true)
+ ifneq ($(strip $(TARGET_RECOVERY_UPDATER_LIBS)),)
+ $(error Do not use TARGET_RECOVERY_UPDATER_LIBS when using AB_OTA_UPDATER)
+ endif
+endif
+
+ifdef BOARD_PREBUILT_DTBIMAGE_DIR
+ ifneq ($(BOARD_INCLUDE_DTB_IN_BOOTIMG),true)
+ $(error BOARD_PREBUILT_DTBIMAGE_DIR with 'BOARD_INCLUDE_DTB_IN_BOOTIMG != true' is not supported)
+ endif
+endif
+
+# Check BOARD_VNDK_VERSION
+define check_vndk_version
+ $(eval vndk_path := prebuilts/vndk/v$(1)) \
+ $(if $(wildcard $(vndk_path)/*/Android.bp),,$(error VNDK version $(1) not found))
+endef
+
+ifdef BOARD_VNDK_VERSION
+ ifneq ($(BOARD_VNDK_VERSION),current)
+ $(error BOARD_VNDK_VERSION: Only "current" is implemented)
+ endif
+
+ TARGET_VENDOR_TEST_SUFFIX := /vendor
+else
+ TARGET_VENDOR_TEST_SUFFIX :=
+endif
+
+###########################################
+# APEXes are by default flattened, i.e. non-updatable.
+# It can be unflattened (and updatable) by inheriting from
+# updatable_apex.mk
+ifeq (,$(TARGET_FLATTEN_APEX))
+TARGET_FLATTEN_APEX := true
+endif
+
+ifeq (,$(TARGET_BUILD_APPS))
+ifdef PRODUCT_EXTRA_VNDK_VERSIONS
+ $(foreach v,$(PRODUCT_EXTRA_VNDK_VERSIONS),$(call check_vndk_version,$(v)))
+endif
+endif
+
+# Ensure that BOARD_SYSTEMSDK_VERSIONS are all within PLATFORM_SYSTEMSDK_VERSIONS
+_unsupported_systemsdk_versions := $(filter-out $(PLATFORM_SYSTEMSDK_VERSIONS),$(BOARD_SYSTEMSDK_VERSIONS))
+ifneq (,$(_unsupported_systemsdk_versions))
+ $(error System SDK versions '$(_unsupported_systemsdk_versions)' in BOARD_SYSTEMSDK_VERSIONS are not supported.\
+ Supported versions are $(PLATFORM_SYSTEMSDK_VERSIONS))
+endif
diff --git a/core/build-system.html b/core/build-system.html
index 3d86e24a9b..3a11a477dc 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -147,12 +147,6 @@ figure out exactly how this will work.</p>
.c, .cpp, .h, .java, java libraries, etc., should all work without intervention
in the Android.mk file.</p>
-<h3>Hiding command lines</h3>
-<p>The default of the build system will be to hide the command lines being
-executed for make steps. It will be possible to override this by specifying
-the showcommands pseudo-target, and possibly by setting an environment
-variable.</p>
-
<h3>Wildcard source files</h3>
<p>Wildcarding source file will be discouraged. It may be useful in some
scenarios. The default <code>$(wildcard *)</code> will not work due to the
@@ -326,19 +320,6 @@ and intermediate files for all configurations. This is the same as
directory inside the current combo directory. This is especially useful on the
simulator and emulator, where the persistent data remains present between
builds.</li>
-<li><b>showcommands</b> - <code>showcommands</code> is a modifier target
-which causes the build system to show the actual command lines for the build
-steps, instead of the brief descriptions. Most people don't like seeing the
-actual commands, because they're quite long and hard to read, but if you need
-to for debugging purposes, you can add <code>showcommands</code> to the list
-of targets you build. For example <code>make showcommands</code> will build
-the default android configuration, and <code>make runtime showcommands</code>
-will build just the runtime, and targets that it depends on, while displaying
-the full command lines. Please note that there are a couple places where the
-commands aren't shown here. These are considered bugs, and should be fixed,
-but they're often hard to track down. Please let
-<a href="mailto:android-build-team">android-build-team</a> know if you find
-any.</li>
<li><b>LOCAL_MODULE</b> - Anything you specify as a <code>LOCAL_MODULE</code>
in an Android.mk is made into a pseudotarget. For example, <code>make
runtime</code> might be shorthand for <code>make
diff --git a/core/build_id.mk b/core/build_id.mk
index 932e2144a7..2329288076 100644
--- a/core/build_id.mk
+++ b/core/build_id.mk
@@ -18,4 +18,4 @@
# (like "CRB01"). It must be a single word, and is
# capitalized by convention.
-export BUILD_ID=PI
+BUILD_ID=QT
diff --git a/core/build_rro_package.mk b/core/build_rro_package.mk
index ffefb9cbfe..a6921d5bad 100644
--- a/core/build_rro_package.mk
+++ b/core/build_rro_package.mk
@@ -15,11 +15,23 @@ ifneq ($(LOCAL_SRC_FILES),)
$(error runtime resource overlay package should not contain sources)
endif
+partition :=
+ifeq ($(strip $(LOCAL_ODM_MODULE)),true)
+ partition := $(TARGET_OUT_ODM)
+else ifeq ($(strip $(LOCAL_VENDOR_MODULE)),true)
+ partition := $(TARGET_OUT_VENDOR)
+else ifeq ($(strip $(LOCAL_PRODUCT_SERVICES_MODULE)),true)
+ partition := $(TARGET_OUT_PRODUCT_SERVICES)
+else
+ partition := $(TARGET_OUT_PRODUCT)
+endif
+
ifeq ($(LOCAL_RRO_THEME),)
- LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay
+ LOCAL_MODULE_PATH := $(partition)/overlay
else
- LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay/$(LOCAL_RRO_THEME)
+ LOCAL_MODULE_PATH := $(partition)/overlay/$(LOCAL_RRO_THEME)
endif
-include $(BUILD_SYSTEM)/package.mk
+partition :=
+include $(BUILD_SYSTEM)/package.mk
diff --git a/core/ccache.mk b/core/ccache.mk
index d67bce6c7b..d10aceb01c 100644
--- a/core/ccache.mk
+++ b/core/ccache.mk
@@ -14,39 +14,48 @@
# limitations under the License.
#
+# We no longer provide a ccache prebuilt.
+#
+# Ours was old, and had a number of issues that triggered non-reproducible
+# results and other failures. Newer ccache versions may fix some of those
+# issues, but at the large scale of our build servers, we weren't seeing
+# significant performance gains from using ccache -- you end up needing very
+# good locality and/or very large caches if you're building many different
+# configurations.
+#
+# Local no-change full rebuilds were showing better results, but why not just
+# use incremental builds at that point?
+#
+# So if you still want to use ccache, continue setting USE_CCACHE, but also set
+# the CCACHE_EXEC environment variable to the path to your ccache executable.
+ifneq ($(CCACHE_EXEC),)
ifneq ($(filter-out false,$(USE_CCACHE)),)
# The default check uses size and modification time, causing false misses
# since the mtime depends when the repo was checked out
- export CCACHE_COMPILERCHECK ?= content
+ CCACHE_COMPILERCHECK ?= content
# See man page, optimizations to get more cache hits
# implies that __DATE__ and __TIME__ are not critical for functionality.
# Ignore include file modification time since it will depend on when
# the repo was checked out
- export CCACHE_SLOPPINESS := time_macros,include_file_mtime,file_macro
+ CCACHE_SLOPPINESS := time_macros,include_file_mtime,file_macro
# Turn all preprocessor absolute paths into relative paths.
# Fixes absolute paths in preprocessed source due to use of -g.
# We don't really use system headers much so the rootdir is
# fine; ensures these paths are relative for all Android trees
# on a workstation.
- export CCACHE_BASEDIR := /
+ CCACHE_BASEDIR := /
# Workaround for ccache with clang.
# See http://petereisentraut.blogspot.com/2011/09/ccache-and-clang-part-2.html
- export CCACHE_CPP2 := true
+ CCACHE_CPP2 := true
- CCACHE_HOST_TAG := $(HOST_PREBUILT_TAG)
- ccache := prebuilts/misc/$(CCACHE_HOST_TAG)/ccache/ccache
- # Check that the executable is here.
- ccache := $(strip $(wildcard $(ccache)))
- ifdef ccache
- ifndef CC_WRAPPER
- CC_WRAPPER := $(ccache)
- endif
- ifndef CXX_WRAPPER
- CXX_WRAPPER := $(ccache)
- endif
- ccache =
+ ifndef CC_WRAPPER
+ CC_WRAPPER := $(CCACHE_EXEC)
endif
+ ifndef CXX_WRAPPER
+ CXX_WRAPPER := $(CCACHE_EXEC)
+ endif
+endif
endif
diff --git a/core/check_elf_file.mk b/core/check_elf_file.mk
new file mode 100644
index 0000000000..0faaadd3fe
--- /dev/null
+++ b/core/check_elf_file.mk
@@ -0,0 +1,49 @@
+# Check the correctness of the prebuilt ELF files
+#
+# This check ensures that DT_SONAME matches with the filename, DT_NEEDED
+# matches the shared libraries specified in LOCAL_SHARED_LIBRARIES, and all
+# undefined symbols in the prebuilt binary can be found in one of the shared
+# libraries specified in LOCAL_SHARED_LIBRARIES.
+#
+# Inputs:
+# - LOCAL_ALLOW_UNDEFINED_SYMBOLS
+# - LOCAL_BUILT_MODULE
+# - LOCAL_IS_HOST_MODULE
+# - LOCAL_MODULE_CLASS
+# - intermediates
+# - my_installed_module_stem
+# - my_prebuilt_src_file
+# - my_check_elf_file_shared_lib_files
+
+ifndef LOCAL_IS_HOST_MODULE
+ifneq ($(filter $(LOCAL_MODULE_CLASS),SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS),)
+check_elf_files_stamp := $(intermediates)/check_elf_files.timestamp
+$(check_elf_files_stamp): PRIVATE_SONAME := $(if $(filter $(LOCAL_MODULE_CLASS),SHARED_LIBRARIES),$(my_installed_module_stem))
+$(check_elf_files_stamp): PRIVATE_ALLOW_UNDEFINED_SYMBOLS := $(LOCAL_ALLOW_UNDEFINED_SYMBOLS)
+# PRIVATE_SHARED_LIBRARY_FILES are file paths to built shared libraries.
+# In addition to $(my_check_elf_file_shared_lib_files), some file paths are
+# added by `resolve-shared-libs-for-elf-file-check` from `core/main.mk`.
+$(check_elf_files_stamp): PRIVATE_SHARED_LIBRARY_FILES := $(my_check_elf_file_shared_lib_files)
+$(check_elf_files_stamp): $(my_prebuilt_src_file) $(my_check_elf_file_shared_lib_files) $(CHECK_ELF_FILE) $(LLVM_READOBJ)
+ @echo Check prebuilt ELF binary: $<
+ $(hide) mkdir -p $(dir $@)
+ $(hide) rm -f $@
+ $(hide) $(CHECK_ELF_FILE) \
+ --skip-bad-elf-magic \
+ --skip-unknown-elf-machine \
+ $(if $(PRIVATE_SONAME),--soname $(PRIVATE_SONAME)) \
+ $(foreach l,$(PRIVATE_SHARED_LIBRARY_FILES),--shared-lib $(l)) \
+ $(if $(PRIVATE_ALLOW_UNDEFINED_SYMBOLS),--allow-undefined-symbols) \
+ --llvm-readobj=$(LLVM_READOBJ) \
+ $<
+ $(hide) touch $@
+
+ifneq ($(PRODUCT_CHECK_ELF_FILES)$(CHECK_ELF_FILES),)
+ifneq ($(strip $(LOCAL_CHECK_ELF_FILES)),false)
+$(LOCAL_BUILT_MODULE): $(check_elf_files_stamp)
+check-elf-files: $(check_elf_files_stamp)
+endif # LOCAL_CHECK_ELF_FILES
+endif # PRODUCT_CHECK_ELF_FILES or CHECK_ELF_FILES
+
+endif # SHARED_LIBRARIES, EXECUTABLES, NATIVE_TESTS
+endif # !LOCAL_IS_HOST_MODULE
diff --git a/core/clang/HOST_CROSS_x86.mk b/core/clang/HOST_CROSS_x86.mk
deleted file mode 100644
index ffd78119a6..0000000000
--- a/core/clang/HOST_CROSS_x86.mk
+++ /dev/null
@@ -1 +0,0 @@
-$(clang_2nd_arch_prefix)HOST_CROSS_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i386.a
diff --git a/core/clang/HOST_CROSS_x86_64.mk b/core/clang/HOST_CROSS_x86_64.mk
deleted file mode 100644
index f921a1c1c3..0000000000
--- a/core/clang/HOST_CROSS_x86_64.mk
+++ /dev/null
@@ -1 +0,0 @@
-$(clang_2nd_arch_prefix)HOST_CROSS_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-x86_64.a
diff --git a/core/clang/HOST_x86.mk b/core/clang/HOST_x86.mk
index 28035173f0..2e0865bbeb 100644
--- a/core/clang/HOST_x86.mk
+++ b/core/clang/HOST_x86.mk
@@ -1 +1,2 @@
$(clang_2nd_arch_prefix)HOST_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i386.a
+$(clang_2nd_arch_prefix)HOST_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-i386.a
diff --git a/core/clang/HOST_x86_64.mk b/core/clang/HOST_x86_64.mk
index 4fdffd844d..3fd0541d3a 100644
--- a/core/clang/HOST_x86_64.mk
+++ b/core/clang/HOST_x86_64.mk
@@ -1 +1,2 @@
HOST_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-x86_64.a
+HOST_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-x86_64.a
diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk
index 9c1a8368fd..f18747a44b 100644
--- a/core/clang/TARGET_arm.mk
+++ b/core/clang/TARGET_arm.mk
@@ -3,6 +3,8 @@ $(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS :=
$(clang_2nd_arch_prefix)RS_COMPAT_TRIPLE := armv7-none-linux-gnueabi
$(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-arm-android.a
+$(clang_2nd_arch_prefix)TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-arm-android.a
# Address sanitizer clang config
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
diff --git a/core/clang/TARGET_arm64.mk b/core/clang/TARGET_arm64.mk
index 9a67b6b15b..42bed0aaed 100644
--- a/core/clang/TARGET_arm64.mk
+++ b/core/clang/TARGET_arm64.mk
@@ -3,6 +3,8 @@ RS_TRIPLE_CFLAGS :=
RS_COMPAT_TRIPLE := aarch64-linux-android
TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-aarch64-android.a
+TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-aarch64-android.a
# Address sanitizer clang config
ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
+ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
diff --git a/core/clang/TARGET_mips.mk b/core/clang/TARGET_mips.mk
index dfd99b2816..3e54a6626a 100644
--- a/core/clang/TARGET_mips.mk
+++ b/core/clang/TARGET_mips.mk
@@ -6,3 +6,4 @@ $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.p
# Address sanitizer clang config
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
diff --git a/core/clang/TARGET_mips64.mk b/core/clang/TARGET_mips64.mk
index a3684cc3da..cb6a3cde23 100644
--- a/core/clang/TARGET_mips64.mk
+++ b/core/clang/TARGET_mips64.mk
@@ -6,3 +6,4 @@ TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-mips64el-android.
# Address sanitizer clang config
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
diff --git a/core/clang/TARGET_x86.mk b/core/clang/TARGET_x86.mk
index 1b9c78c91c..5491a05978 100644
--- a/core/clang/TARGET_x86.mk
+++ b/core/clang/TARGET_x86.mk
@@ -3,6 +3,8 @@ $(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS := -D__i386__
$(clang_2nd_arch_prefix)RS_COMPAT_TRIPLE := i686-linux-android
$(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686-android.a
+$(clang_2nd_arch_prefix)TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-i686-android.a
# Address sanitizer clang config
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
diff --git a/core/clang/TARGET_x86_64.mk b/core/clang/TARGET_x86_64.mk
index 0d3ee3fbbb..167db72e74 100644
--- a/core/clang/TARGET_x86_64.mk
+++ b/core/clang/TARGET_x86_64.mk
@@ -3,3 +3,8 @@ RS_TRIPLE_CFLAGS := -D__x86_64__
RS_COMPAT_TRIPLE := x86_64-linux-android
TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-x86_64-android.a
+TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-x86_64-android.a
+
+# Address sanitizer clang config
+ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
+ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
diff --git a/core/clang/config.mk b/core/clang/config.mk
index 408f688c66..28a75ecd07 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -1,9 +1,8 @@
## Clang configurations.
-LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_PATH)/../lib64/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
+LLVM_READOBJ := $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-readobj
-CLANG_TBLGEN := $(BUILD_OUT_EXECUTABLES)/clang-tblgen$(BUILD_EXECUTABLE_SUFFIX)
-LLVM_TBLGEN := $(BUILD_OUT_EXECUTABLES)/llvm-tblgen$(BUILD_EXECUTABLE_SUFFIX)
+LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib64/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
define convert-to-clang-flags
$(strip $(filter-out $(CLANG_CONFIG_UNKNOWN_CFLAGS),$(1)))
@@ -45,15 +44,6 @@ clang_2nd_arch_prefix := $(HOST_2ND_ARCH_VAR_PREFIX)
include $(BUILD_SYSTEM)/clang/HOST_$(HOST_2ND_ARCH).mk
endif
-ifdef HOST_CROSS_ARCH
-clang_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/clang/HOST_CROSS_$(HOST_CROSS_ARCH).mk
-ifdef HOST_CROSS_2ND_ARCH
-clang_2nd_arch_prefix := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/clang/HOST_CROSS_$(HOST_CROSS_2ND_ARCH).mk
-endif
-endif
-
# TARGET config
clang_2nd_arch_prefix :=
include $(BUILD_SYSTEM)/clang/TARGET_$(TARGET_ARCH).mk
@@ -64,32 +54,4 @@ clang_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
include $(BUILD_SYSTEM)/clang/TARGET_$(TARGET_2ND_ARCH).mk
endif
-# This allows us to use the superset of functionality that compiler-rt
-# provides to Clang (for supporting features like -ftrapv).
-COMPILER_RT_CONFIG_EXTRA_STATIC_LIBRARIES := libcompiler_rt-extras
-
-# A list of projects that are allowed to set LOCAL_CLANG to false.
-# INTERNAL_LOCAL_CLANG_EXCEPTION_PROJECTS is defined later in other config.mk.
-LOCAL_CLANG_EXCEPTION_PROJECTS = \
- bionic/tests/ \
- device/google/contexthub/ \
- device/huawei/angler/ \
- device/lge/bullhead/ \
- external/gentoo/integration/ \
- hardware/qcom/ \
- test/vts/hals/camera/bullhead/ \
- test/vts/hals/etc/libqdutils/ \
- vendor/huawei/angler/ \
- vendor/lge/bullhead/ \
- $(INTERNAL_LOCAL_CLANG_EXCEPTION_PROJECTS)
-
-# Find $1 in the exception project list.
-define find_in_local_clang_exception_projects
-$(subst $(space),, \
- $(foreach project,$(LOCAL_CLANG_EXCEPTION_PROJECTS), \
- $(if $(filter $(project)%,$(1)),$(project)) \
- ) \
-)
-endef
-
include $(BUILD_SYSTEM)/clang/tidy.mk
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index bd86cfbd11..d3afc65b91 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -3,12 +3,14 @@
###########################################################
# '',true
+LOCAL_2ND_ARCH_VAR_PREFIX:=
LOCAL_32_BIT_ONLY:=
LOCAL_AAPT2_ONLY:=
LOCAL_AAPT_FLAGS:=
LOCAL_AAPT_INCLUDE_ALL_RESOURCES:=
LOCAL_AAPT_NAMESPACES:=
LOCAL_ADDITIONAL_CERTIFICATES:=
+LOCAL_ADDITIONAL_CHECKED_MODULE:=
LOCAL_ADDITIONAL_DEPENDENCIES:=
LOCAL_ADDITIONAL_HTML_DIR:=
LOCAL_ADDITIONAL_JAVA_DIR:=
@@ -19,6 +21,8 @@ LOCAL_ANNOTATION_PROCESSOR_CLASSES:=
LOCAL_APIDIFF_NEWAPI:=
LOCAL_APIDIFF_OLDAPI:=
LOCAL_APK_LIBRARIES:=
+LOCAL_APK_SET_MASTER_FILE:=
+LOCAL_APKCERTS_FILE:=
LOCAL_ARM_MODE:=
LOCAL_ASFLAGS:=
LOCAL_ASSET_DIR:=
@@ -42,7 +46,6 @@ LOCAL_COMPRESSED_MODULE:=
LOCAL_CONLYFLAGS:=
LOCAL_COPY_HEADERS:=
LOCAL_COPY_HEADERS_TO:=
-LOCAL_COPY_TO_INTERMEDIATE_LIBRARIES:=
LOCAL_CPP_EXTENSION:=
LOCAL_CPPFLAGS:=
LOCAL_CPP_STD:=
@@ -51,11 +54,9 @@ LOCAL_CTS_TEST_PACKAGE:=
LOCAL_CTS_TEST_RUNNER:=
LOCAL_CXX:=
LOCAL_CXX_STL := default
-LOCAL_DATA_BINDING:=
LOCAL_DEX_PREOPT_APP_IMAGE:=
LOCAL_DEX_PREOPT_FLAGS:=
LOCAL_DEX_PREOPT_GENERATE_PROFILE:=
-LOCAL_DEX_PREOPT_IMAGE_LOCATION:=
LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING:=
LOCAL_DEX_PREOPT:= # '',true,false,nostripping
LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG:=
@@ -63,22 +64,24 @@ LOCAL_DISABLE_RESOLVE_SUPPORT_LIBRARIES:=
LOCAL_DONT_CHECK_MODULE:=
# Don't delete the META_INF dir when merging static Java libraries.
LOCAL_DONT_DELETE_JAR_META_INF:=
+LOCAL_DONT_MERGE_MANIFESTS:=
LOCAL_DPI_FILE_STEM:=
LOCAL_DPI_VARIANTS:=
+LOCAL_DROIDDOC_ANNOTATIONS_ZIP :=
+LOCAL_DROIDDOC_API_VERSIONS_XML :=
LOCAL_DROIDDOC_ASSET_DIR:=
LOCAL_DROIDDOC_CUSTOM_ASSET_DIR:=
LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR:=
+LOCAL_DROIDDOC_DOC_ZIP :=
+LOCAL_DROIDDOC_JDIFF_DOC_ZIP :=
LOCAL_DROIDDOC_HTML_DIR:=
+LOCAL_DROIDDOC_METADATA_ZIP:=
LOCAL_DROIDDOC_OPTIONS:=
LOCAL_DROIDDOC_SOURCE_PATH:=
LOCAL_DROIDDOC_STUB_OUT_DIR:=
+LOCAL_DROIDDOC_STUBS_SRCJAR :=
LOCAL_DROIDDOC_TEMPLATE_DIR:=
LOCAL_DROIDDOC_USE_STANDARD_DOCLET:=
-LOCAL_DROIDDOC_USE_METALAVA:=
-LOCAL_DROIDDOC_METALAVA_PREVIOUS_API:=
-LOCAL_DROIDDOC_METALAVA_ANNOTATIONS_ENABLED:=
-LOCAL_DROIDDOC_METALAVA_MERGE_ANNOTATIONS_DIR:=
-LOCAL_DROIDDOC_METALAVA_DOCS_STUB_OUT_DIR:=
LOCAL_DX_FLAGS:=
LOCAL_EMMA_COVERAGE_FILTER:=
LOCAL_EMMA_INSTRUMENT:=
@@ -90,6 +93,7 @@ LOCAL_EXPORT_C_INCLUDE_DIRS:=
LOCAL_EXPORT_HEADER_LIBRARY_HEADERS:=
LOCAL_EXPORT_PACKAGE_RESOURCES:=
LOCAL_EXPORT_PROGUARD_FLAG_FILES:=
+LOCAL_EXPORT_SDK_LIBRARIES:=
LOCAL_EXPORT_SHARED_LIBRARY_HEADERS:=
LOCAL_EXPORT_STATIC_LIBRARY_HEADERS:=
LOCAL_EXTRACT_APK:=
@@ -97,10 +101,11 @@ LOCAL_EXTRACT_DPI_APK:=
LOCAL_FDO_SUPPORT:=
LOCAL_FINDBUGS_FLAGS:=
LOCAL_FORCE_STATIC_EXECUTABLE:=
-LOCAL_FULL_LIBS_MANIFEST_FILES:=
-LOCAL_FULL_MANIFEST_FILE:=
LOCAL_FULL_CLASSES_JACOCO_JAR:=
LOCAL_FULL_CLASSES_PRE_JACOCO_JAR:=
+LOCAL_FULL_LIBS_MANIFEST_FILES:=
+LOCAL_FULL_MANIFEST_FILE:=
+LOCAL_FULL_TEST_CONFIG:=
LOCAL_FUZZ_ENGINE:=
LOCAL_GCNO_FILES:=
LOCAL_GENERATED_SOURCES:=
@@ -109,6 +114,7 @@ LOCAL_GROUP_STATIC_LIBRARIES:=
LOCAL_GTEST:=true
LOCAL_HAL_STATIC_LIBRARIES:=
LOCAL_HEADER_LIBRARIES:=
+LOCAL_HOST_PREFIX:=
LOCAL_HOST_REQUIRED_MODULES:=
LOCAL_INIT_RC:=
LOCAL_INSTALLED_MODULE:=
@@ -156,7 +162,6 @@ LOCAL_MODULE_CLASS:=
LOCAL_MODULE_HOST_ARCH:=
LOCAL_MODULE_HOST_ARCH_WARN:=
LOCAL_MODULE_HOST_CROSS_ARCH:=
-LOCAL_MODULE_HOST_CROSS_ARCH_WARN:=
LOCAL_MODULE_HOST_OS:=
LOCAL_MODULE_OWNER:=
LOCAL_MODULE_PATH:=
@@ -169,18 +174,17 @@ LOCAL_MODULE_TARGET_ARCH:=
LOCAL_MODULE_TARGET_ARCH_WARN:=
LOCAL_MODULE_UNSUPPORTED_HOST_ARCH:=
LOCAL_MODULE_UNSUPPORTED_HOST_ARCH_WARN:=
-LOCAL_MODULE_UNSUPPORTED_HOST_CROSS_ARCH:=
-LOCAL_MODULE_UNSUPPORTED_HOST_CROSS_ARCH_WARN:=
LOCAL_MODULE_UNSUPPORTED_TARGET_ARCH:=
LOCAL_MODULE_UNSUPPORTED_TARGET_ARCH_WARN:=
LOCAL_MULTILIB:=
+LOCAL_NATIVE_BENCHMARK:=
LOCAL_NDK_STL_VARIANT:=
LOCAL_NDK_VERSION:=current
LOCAL_NO_CRT:=
LOCAL_NO_DEFAULT_COMPILER_FLAGS:=
LOCAL_NO_FPIE :=
-LOCAL_NO_LIBCOMPILER_RT:=
LOCAL_NO_LIBGCC:=
+LOCAL_NO_LIBCRT_BUILTINS:=
LOCAL_NO_NOTICE_FILE:=
LOCAL_NO_PIC:=
LOCAL_NOSANITIZE:=
@@ -208,16 +212,20 @@ LOCAL_PREBUILT_MODULE_FILE:=
LOCAL_PREBUILT_OBJ_FILES:=
LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES:=
LOCAL_PREBUILT_STRIP_COMMENTS:=
+LOCAL_USE_EMBEDDED_DEX:=
+LOCAL_USE_EMBEDDED_NATIVE_LIBS:=
+LOCAL_PRESUBMIT_DISABLED:=
LOCAL_PRIVATE_PLATFORM_APIS:=
LOCAL_PRIVILEGED_MODULE:=
# '',full,custom,disabled,obfuscation,optimization
LOCAL_PRODUCT_MODULE:=
+LOCAL_PRODUCT_SERVICES_MODULE:=
LOCAL_PROGUARD_ENABLED:=
LOCAL_PROGUARD_FLAG_FILES:=
LOCAL_PROGUARD_FLAGS:=
LOCAL_PROPRIETARY_MODULE:=
LOCAL_PROTOC_FLAGS:=
-# lite(default),micro,nano,stream,full,nanopb-c,nanopb-c-enable_malloc
+# lite(default),micro,nano,stream,full,nanopb-c,nanopb-c-enable_malloc,nanopb-c-16bit,nanopb-c-enable_malloc-16bit,nanopb-c-32bit,nanopb-c-enable_malloc-32bit
LOCAL_PROTOC_OPTIMIZE_TYPE:=
LOCAL_PROTO_JAVA_OUTPUT_PARAMS:=
LOCAL_R8_FLAG_FILES:=
@@ -240,22 +248,30 @@ LOCAL_RTTI_FLAG:=
LOCAL_SANITIZE:=
LOCAL_SANITIZE_DIAG:=
LOCAL_SANITIZE_RECOVER:=
+LOCAL_SANITIZE_NO_RECOVER:=
LOCAL_SANITIZE_BLACKLIST :=
+LOCAL_SDK_LIBRARIES :=
LOCAL_SDK_RES_VERSION:=
LOCAL_SDK_VERSION:=
LOCAL_SHARED_ANDROID_LIBRARIES:=
LOCAL_SHARED_LIBRARIES:=
+LOCAL_SOONG_AAR :=
+LOCAL_SOONG_BUILT_INSTALLED :=
+LOCAL_SOONG_BUNDLE :=
LOCAL_SOONG_CLASSES_JAR :=
LOCAL_SOONG_DEX_JAR :=
LOCAL_SOONG_EXPORT_PROGUARD_FLAGS :=
LOCAL_SOONG_HEADER_JAR :=
LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR :=
+LOCAL_SOONG_LINK_TYPE :=
LOCAL_SOONG_PROGUARD_DICT :=
LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
-LOCAL_SOONG_RRO_DIRS :=
+LOCAL_SOONG_DEVICE_RRO_DIRS :=
+LOCAL_SOONG_PRODUCT_RRO_DIRS :=
LOCAL_SOONG_STATIC_LIBRARY_EXTRA_PACKAGES :=
-LOCAL_DROIDDOC_STUBS_JAR :=
-LOCAL_DROIDDOC_DOC_ZIP :=
+LOCAL_SOONG_SYMBOL_PATH :=
+LOCAL_SOONG_TOC :=
+LOCAL_SOONG_UNSTRIPPED_BINARY :=
# '',true
LOCAL_SOURCE_FILES_ALL_GENERATED:=
LOCAL_SRC_FILES:=
@@ -268,24 +284,32 @@ LOCAL_STATIC_LIBRARIES:=
LOCAL_STRIP_MODULE:=
LOCAL_SYSTEM_SHARED_LIBRARIES:=none
LOCAL_TARGET_REQUIRED_MODULES:=
+LOCAL_TEST_CONFIG:=
LOCAL_TEST_CONFIG_OPTIONS:=
LOCAL_TEST_DATA:=
LOCAL_TEST_MODULE_TO_PROGUARD_WITH:=
LOCAL_TIDY:=
LOCAL_TIDY_CHECKS:=
LOCAL_TIDY_FLAGS:=
+LOCAL_UNCOMPRESS_DEX:=
LOCAL_UNINSTALLABLE_MODULE:=
LOCAL_UNSTRIPPED_PATH:=
LOCAL_USE_AAPT2:=$(USE_AAPT2)
+LOCAL_USE_CLANG_LLD:=
LOCAL_USE_VNDK:=
LOCAL_USES_LIBRARIES:=
LOCAL_VENDOR_MODULE:=
+LOCAL_VINTF_FRAGMENTS:=
+LOCAL_VNDK_DEPEND_ON_CORE_VARIANT:=
LOCAL_VTSC_FLAGS:=
LOCAL_VTS_INCLUDES:=
LOCAL_VTS_MODE:=
LOCAL_WARNINGS_ENABLE:=
LOCAL_WHOLE_STATIC_LIBRARIES:=
+LOCAL_XOM:=
LOCAL_YACCFLAGS:=
+LOCAL_CHECK_ELF_FILES:=
+# TODO: deprecate, it does nothing
OVERRIDE_BUILT_MODULE_PATH:=
# arch specific variables
@@ -305,6 +329,7 @@ LOCAL_PACK_MODULE_RELOCATIONS_$(TARGET_ARCH):=
LOCAL_PREBUILT_JNI_LIBS_$(TARGET_ARCH):=
LOCAL_REQUIRED_MODULES_$(TARGET_ARCH):=
LOCAL_SHARED_LIBRARIES_$(TARGET_ARCH):=
+LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH):=
LOCAL_SRC_FILES_EXCLUDE_$(TARGET_ARCH):=
LOCAL_SRC_FILES_$(TARGET_ARCH):=
LOCAL_STATIC_LIBRARIES_$(TARGET_ARCH):=
@@ -327,6 +352,7 @@ LOCAL_PACK_MODULE_RELOCATIONS_$(TARGET_2ND_ARCH):=
LOCAL_PREBUILT_JNI_LIBS_$(TARGET_2ND_ARCH):=
LOCAL_REQUIRED_MODULES_$(TARGET_2ND_ARCH):=
LOCAL_SHARED_LIBRARIES_$(TARGET_2ND_ARCH):=
+LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH):=
LOCAL_SRC_FILES_EXCLUDE_$(TARGET_2ND_ARCH):=
LOCAL_SRC_FILES_$(TARGET_2ND_ARCH):=
LOCAL_STATIC_LIBRARIES_$(TARGET_2ND_ARCH):=
@@ -385,31 +411,10 @@ LOCAL_SHARED_LIBRARIES_$(HOST_OS):=
LOCAL_SRC_FILES_$(HOST_OS):=
LOCAL_STATIC_LIBRARIES_$(HOST_OS):=
-ifdef HOST_CROSS_OS
-LOCAL_ASFLAGS_$(HOST_CROSS_OS):=
-LOCAL_CFLAGS_$(HOST_CROSS_OS):=
-LOCAL_C_INCLUDES_$(HOST_CROSS_OS):=
-LOCAL_CPPFLAGS_$(HOST_CROSS_OS):=
-LOCAL_GENERATED_SOURCES_$(HOST_CROSS_OS):=
-LOCAL_HEADER_LIBRARIES_$(HOST_CROSS_OS):=
-LOCAL_LDFLAGS_$(HOST_CROSS_OS):=
-LOCAL_LDLIBS_$(HOST_CROSS_OS):=
-LOCAL_REQUIRED_MODULES_$(HOST_CROSS_OS):=
-LOCAL_SHARED_LIBRARIES_$(HOST_CROSS_OS):=
-LOCAL_SRC_FILES_$(HOST_CROSS_OS):=
-LOCAL_STATIC_LIBRARIES_$(HOST_CROSS_OS):=
-endif
-
LOCAL_SRC_FILES_$(HOST_OS)_$(HOST_ARCH):=
ifdef HOST_2ND_ARCH
LOCAL_SRC_FILES_$(HOST_OS)_$(HOST_2ND_ARCH):=
endif
-ifdef HOST_CROSS_OS
-LOCAL_SRC_FILES_$(HOST_CROSS_OS)_$(HOST_CROSS_ARCH):=
-ifdef HOST_CROSS_2ND_ARCH
-LOCAL_SRC_FILES_$(HOST_CROSS_OS)_$(HOST_CROSS_2ND_ARCH):=
-endif
-endif
LOCAL_ASFLAGS_32:=
LOCAL_ASFLAGS_64:=
@@ -472,6 +477,9 @@ LOCAL_CUSTOM_BUILD_STEP_INPUT:=
LOCAL_CUSTOM_BUILD_STEP_OUTPUT:=
LOCAL_IS_AUX_MODULE :=
+full_android_manifest :=
+non_system_module :=
+
# Trim MAKEFILE_LIST so that $(call my-dir) doesn't need to
# iterate over thousands of entries every time.
# Leave the current makefile to make sure we don't break anything
diff --git a/core/combo/HOST_darwin-x86.mk b/core/combo/HOST_darwin-x86.mk
deleted file mode 100644
index 9a55cb540b..0000000000
--- a/core/combo/HOST_darwin-x86.mk
+++ /dev/null
@@ -1,69 +0,0 @@
-#
-# Copyright (C) 2006 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Configuration for Darwin (Mac OS X) on x86.
-# Included by combo/select.mk
-
-define $(combo_var_prefix)transform-shared-lib-to-toc
-$(call _gen_toc_command_for_macho,$(1),$(2))
-endef
-
-$(combo_2nd_arch_prefix)HOST_GLOBAL_ARFLAGS := cqs
-
-############################################################
-## Macros after this line are shared by the 64-bit config.
-
-HOST_CUSTOM_LD_COMMAND := true
-
-define transform-host-o-to-shared-lib-inner
-$(hide) $(PRIVATE_CXX) \
- -dynamiclib -single_module -read_only_relocs suppress \
- $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
- $(PRIVATE_HOST_GLOBAL_LDFLAGS) \
- ) \
- $(PRIVATE_ALL_OBJECTS) \
- $(addprefix -force_load , $(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES)) \
- $(PRIVATE_ALL_SHARED_LIBRARIES) \
- $(PRIVATE_ALL_STATIC_LIBRARIES) \
- $(PRIVATE_LDLIBS) \
- -o $@ \
- -install_name @rpath/$(notdir $@) \
- -Wl,-rpath,@loader_path/../$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)) \
- -Wl,-rpath,@loader_path/$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)) \
- $(PRIVATE_LDFLAGS)
-endef
-
-define transform-host-o-to-executable-inner
-$(hide) $(PRIVATE_CXX) \
- $(foreach path,$(PRIVATE_RPATHS), \
- -Wl,-rpath,@loader_path/$(path)) \
- -o $@ \
- -Wl,-headerpad_max_install_names \
- $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
- $(PRIVATE_HOST_GLOBAL_LDFLAGS) \
- ) \
- $(PRIVATE_ALL_SHARED_LIBRARIES) \
- $(PRIVATE_ALL_OBJECTS) \
- $(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES) \
- $(PRIVATE_ALL_STATIC_LIBRARIES) \
- $(PRIVATE_LDFLAGS) \
- $(PRIVATE_LDLIBS)
-endef
-
-# $(1): The file to check
-define get-file-size
-stat -f "%z" $(1)
-endef
diff --git a/core/combo/HOST_darwin-x86_64.mk b/core/combo/HOST_darwin-x86_64.mk
index 6cca167e48..07f8d9f78a 100644
--- a/core/combo/HOST_darwin-x86_64.mk
+++ b/core/combo/HOST_darwin-x86_64.mk
@@ -23,7 +23,44 @@ endef
HOST_GLOBAL_ARFLAGS := cqs
-# We Reuse the following functions with the same name from HOST_darwin-x86.mk:
-# transform-host-o-to-shared-lib-inner
-# transform-host-o-to-executable-inner
-# get-file-size
+HOST_CUSTOM_LD_COMMAND := true
+
+define transform-host-o-to-shared-lib-inner
+$(hide) $(PRIVATE_CXX) \
+ -dynamiclib -single_module -read_only_relocs suppress \
+ $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
+ $(PRIVATE_HOST_GLOBAL_LDFLAGS) \
+ ) \
+ $(PRIVATE_ALL_OBJECTS) \
+ $(addprefix -force_load , $(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES)) \
+ $(PRIVATE_ALL_SHARED_LIBRARIES) \
+ $(PRIVATE_ALL_STATIC_LIBRARIES) \
+ $(PRIVATE_LDLIBS) \
+ -o $@ \
+ -install_name @rpath/$(notdir $@) \
+ -Wl,-rpath,@loader_path/../$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)) \
+ -Wl,-rpath,@loader_path/$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)) \
+ $(PRIVATE_LDFLAGS)
+endef
+
+define transform-host-o-to-executable-inner
+$(hide) $(PRIVATE_CXX) \
+ $(foreach path,$(PRIVATE_RPATHS), \
+ -Wl,-rpath,@loader_path/$(path)) \
+ -o $@ \
+ -Wl,-headerpad_max_install_names \
+ $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
+ $(PRIVATE_HOST_GLOBAL_LDFLAGS) \
+ ) \
+ $(PRIVATE_ALL_SHARED_LIBRARIES) \
+ $(PRIVATE_ALL_OBJECTS) \
+ $(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES) \
+ $(PRIVATE_ALL_STATIC_LIBRARIES) \
+ $(PRIVATE_LDFLAGS) \
+ $(PRIVATE_LDLIBS)
+endef
+
+# $(1): The file to check
+define get-file-size
+stat -f "%z" $(1)
+endef
diff --git a/core/combo/HOST_linux-x86.mk b/core/combo/HOST_linux-x86.mk
index 4e83dc4c0b..deed943a61 100644
--- a/core/combo/HOST_linux-x86.mk
+++ b/core/combo/HOST_linux-x86.mk
@@ -26,5 +26,5 @@ endef
# $(1): The file to check
define get-file-size
-stat --format "%s" "$(1)" | tr -d '\n'
+stat -c "%s" "$(1)" | tr -d '\n'
endef
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 3ce64f984e..cbca1fbfab 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -29,33 +29,38 @@
# include defines, and compiler settings for the given architecture
# version.
#
-ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)),)
-TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT := generic
-endif
-KNOWN_ARMv8_CORES := cortex-a53 cortex-a53.a57 cortex-a55 cortex-a73 cortex-a75
-KNOWN_ARMv8_CORES += kryo denver64 exynos-m1 exynos-m2
+KNOWN_ARMv8_CORES := cortex-a53 cortex-a53.a57 cortex-a55 cortex-a73 cortex-a75 cortex-a76
+KNOWN_ARMv8_CORES += kryo kryo385 exynos-m1 exynos-m2
+
+KNOWN_ARMv82a_CORES := cortex-a55 cortex-a75 kryo385
-# Many devices (incorrectly) use armv7-a-neon as the 2nd architecture variant
-# for cores that implement armv8-a ISAs. The following sets it to armv8-a.
-ifneq (,$(filter $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT), $(KNOWN_ARMv8_CORES)))
- ifneq ($(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT),armv8-a)
- $(warning $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT) is armv8-a.)
- ifneq (,$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT))
- $(warning TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT, $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT), ignored! Use armv8-a instead.)
- endif
- # Overwrite TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT
+ifeq (,$(strip $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)))
+ TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT := generic
+endif
+
+# This sanity checks TARGET_2ND_ARCH_VARIANT against the lists above.
+ifneq (,$(filter $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT), $(KNOWN_ARMv82a_CORES)))
+ ifeq (,$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT))
+ TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT := armv8-2a
+ else ifneq (armv8-2a,$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT))
+ $(error Incorrect TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT, $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT). Use armv8-2a instead.)
+ endif
+else ifneq (,$(filter $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT), $(KNOWN_ARMv8_CORES)))
+ ifeq (,$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT))
TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT := armv8-a
+ else ifneq ($(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT),armv8-a)
+ $(error Incorrect TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT, $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT). Use armv8-a instead.)
endif
endif
ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)),)
-$(error TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT must be set)
+ $(error TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT must be set)
endif
TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT).mk
ifeq ($(strip $(wildcard $(TARGET_ARCH_SPECIFIC_MAKEFILE))),)
-$(error Unknown ARM architecture version: $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT))
+ $(error Unknown ARM architecture version: $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT))
endif
include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
diff --git a/core/combo/arch/arm/armv7-a.mk b/core/combo/arch/arm/armv7-a.mk
deleted file mode 100644
index 0c2f04d24a..0000000000
--- a/core/combo/arch/arm/armv7-a.mk
+++ /dev/null
@@ -1,5 +0,0 @@
-# Configuration for Linux on ARM.
-# Generating binaries for the ARMv7-a architecture and higher
-#
-ARCH_ARM_HAVE_ARMV7A := true
-ARCH_ARM_HAVE_VFP := true
diff --git a/core/combo/arch/arm/armv8-2a.mk b/core/combo/arch/arm/armv8-2a.mk
new file mode 100644
index 0000000000..c1d818243e
--- /dev/null
+++ b/core/combo/arch/arm/armv8-2a.mk
@@ -0,0 +1,9 @@
+# Configuration for Linux on ARM.
+# Generating binaries for the ARMv8-2a architecture
+#
+# Many libraries are not aware of armv8-2a, and AArch32 is (almost) a superset
+# of armv7-a-neon. So just let them think we are just like v7.
+ARCH_ARM_HAVE_ARMV7A := true
+ARCH_ARM_HAVE_VFP := true
+ARCH_ARM_HAVE_VFP_D32 := true
+ARCH_ARM_HAVE_NEON := true
diff --git a/core/combo/arch/x86/amberlake.mk b/core/combo/arch/x86/amberlake.mk
new file mode 100644
index 0000000000..37100a40ef
--- /dev/null
+++ b/core/combo/arch/x86/amberlake.mk
@@ -0,0 +1,13 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86/broadwell.mk b/core/combo/arch/x86/broadwell.mk
new file mode 100644
index 0000000000..37100a40ef
--- /dev/null
+++ b/core/combo/arch/x86/broadwell.mk
@@ -0,0 +1,13 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86/icelake.mk b/core/combo/arch/x86/icelake.mk
new file mode 100644
index 0000000000..76fe212969
--- /dev/null
+++ b/core/combo/arch/x86/icelake.mk
@@ -0,0 +1,14 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_AVX512 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86/kabylake.mk b/core/combo/arch/x86/kabylake.mk
new file mode 100644
index 0000000000..50518d6859
--- /dev/null
+++ b/core/combo/arch/x86/kabylake.mk
@@ -0,0 +1,13 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors.
+# that support AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86/skylake.mk b/core/combo/arch/x86/skylake.mk
new file mode 100644
index 0000000000..03705c0d79
--- /dev/null
+++ b/core/combo/arch/x86/skylake.mk
@@ -0,0 +1,15 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors.
+# that support AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_AVX512 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
+
diff --git a/core/combo/arch/x86/stoneyridge.mk b/core/combo/arch/x86/stoneyridge.mk
new file mode 100644
index 0000000000..30405a173d
--- /dev/null
+++ b/core/combo/arch/x86/stoneyridge.mk
@@ -0,0 +1,12 @@
+# Configuration for Linux on x86.
+# Generating binaries for Stoney Ridge processors.
+#
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AES_NI := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86/tigerlake.mk b/core/combo/arch/x86/tigerlake.mk
new file mode 100644
index 0000000000..76fe212969
--- /dev/null
+++ b/core/combo/arch/x86/tigerlake.mk
@@ -0,0 +1,14 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_AVX512 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86/whiskeylake.mk b/core/combo/arch/x86/whiskeylake.mk
new file mode 100644
index 0000000000..37100a40ef
--- /dev/null
+++ b/core/combo/arch/x86/whiskeylake.mk
@@ -0,0 +1,13 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86/x86.mk b/core/combo/arch/x86/x86.mk
index a55cc7aa59..db55ff813c 100644
--- a/core/combo/arch/x86/x86.mk
+++ b/core/combo/arch/x86/x86.mk
@@ -11,3 +11,6 @@
ARCH_X86_HAVE_SSSE3 := false
ARCH_X86_HAVE_MOVBE := false
ARCH_X86_HAVE_POPCNT := false
+ARCH_X86_HAVE_AVX := false
+ARCH_X86_HAVE_AVX2 := false
+ARCH_X86_HAVE_AVX512 := false
diff --git a/core/combo/arch/x86_64/amberlake.mk b/core/combo/arch/x86_64/amberlake.mk
new file mode 100644
index 0000000000..37100a40ef
--- /dev/null
+++ b/core/combo/arch/x86_64/amberlake.mk
@@ -0,0 +1,13 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/broadwell.mk b/core/combo/arch/x86_64/broadwell.mk
new file mode 100644
index 0000000000..37100a40ef
--- /dev/null
+++ b/core/combo/arch/x86_64/broadwell.mk
@@ -0,0 +1,13 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/icelake.mk b/core/combo/arch/x86_64/icelake.mk
new file mode 100644
index 0000000000..76fe212969
--- /dev/null
+++ b/core/combo/arch/x86_64/icelake.mk
@@ -0,0 +1,14 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_AVX512 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/kabylake.mk b/core/combo/arch/x86_64/kabylake.mk
new file mode 100644
index 0000000000..37100a40ef
--- /dev/null
+++ b/core/combo/arch/x86_64/kabylake.mk
@@ -0,0 +1,13 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/skylake.mk b/core/combo/arch/x86_64/skylake.mk
new file mode 100644
index 0000000000..76fe212969
--- /dev/null
+++ b/core/combo/arch/x86_64/skylake.mk
@@ -0,0 +1,14 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_AVX512 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/stoneyridge.mk b/core/combo/arch/x86_64/stoneyridge.mk
new file mode 100644
index 0000000000..f7d95835ec
--- /dev/null
+++ b/core/combo/arch/x86_64/stoneyridge.mk
@@ -0,0 +1,12 @@
+# Configuration for Linux on x86_64.
+# Generating binaries for Stoney Ridge processors.
+#
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AES_NI := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/tigerlake.mk b/core/combo/arch/x86_64/tigerlake.mk
new file mode 100644
index 0000000000..76fe212969
--- /dev/null
+++ b/core/combo/arch/x86_64/tigerlake.mk
@@ -0,0 +1,14 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_AVX512 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/whiskeylake.mk b/core/combo/arch/x86_64/whiskeylake.mk
new file mode 100644
index 0000000000..37100a40ef
--- /dev/null
+++ b/core/combo/arch/x86_64/whiskeylake.mk
@@ -0,0 +1,13 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/x86_64.mk b/core/combo/arch/x86_64/x86_64.mk
index 26a9d0f45a..e7c89283a3 100755
--- a/core/combo/arch/x86_64/x86_64.mk
+++ b/core/combo/arch/x86_64/x86_64.mk
@@ -11,3 +11,6 @@ ARCH_X86_HAVE_POPCNT := true
ARCH_X86_HAVE_SSE4 := true
ARCH_X86_HAVE_SSE4_1 := true
ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AVX := false
+ARCH_X86_HAVE_AVX2 := false
+ARCH_X86_HAVE_AVX512 := false
diff --git a/core/config.mk b/core/config.mk
index d2184088c5..8c73281032 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -17,49 +17,24 @@ $(warning )
$(error done)
endif
-# Only use ANDROID_BUILD_SHELL to wrap around bash.
-# DO NOT use other shells such as zsh.
-ifdef ANDROID_BUILD_SHELL
-SHELL := $(ANDROID_BUILD_SHELL)
-else
-# Use bash, not whatever shell somebody has installed as /bin/sh
-# This is repeated from main.mk, since envsetup.sh runs this file
-# directly.
-SHELL := /bin/bash
-endif
-
-# Utility variables.
-empty :=
-space := $(empty) $(empty)
-comma := ,
-# Note that make will eat the newline just before endef.
-define newline
-
-
-endef
-# The pound character "#"
-define pound
-#
-endef
-# Unfortunately you can't simply define backslash as \ or \\.
-backslash := \a
-backslash := $(patsubst %a,%,$(backslash))
-
-# this turns off the suffix rules built into make
-.SUFFIXES:
+BUILD_SYSTEM :=$= build/make/core
+BUILD_SYSTEM_COMMON :=$= build/make/common
-# this turns off the RCS / SCCS implicit rules of GNU Make
-% : RCS/%,v
-% : RCS/%
-% : %,v
-% : s.%
-% : SCCS/s.%
+include $(BUILD_SYSTEM_COMMON)/core.mk
-# If a rule fails, delete $@.
-.DELETE_ON_ERROR:
+# Mark variables that should be coming as environment variables from soong_ui
+# as readonly
+.KATI_READONLY := OUT_DIR TMPDIR BUILD_DATETIME_FILE
+ifdef CALLED_FROM_SETUP
+ .KATI_READONLY := CALLED_FROM_SETUP
+endif
+ifdef KATI_PACKAGE_MK_DIR
+ .KATI_READONLY := KATI_PACKAGE_MK_DIR
+endif
# Mark variables deprecated/obsolete
CHANGES_URL := https://android.googlesource.com/platform/build/+/master/Changes.md
+.KATI_READONLY := CHANGES_URL
$(KATI_obsolete_var PATH,Do not use PATH directly. See $(CHANGES_URL)#PATH)
$(KATI_obsolete_var PYTHONPATH,Do not use PYTHONPATH directly. See $(CHANGES_URL)#PYTHONPATH)
$(KATI_obsolete_var OUT,Use OUT_DIR instead. See $(CHANGES_URL)#OUT)
@@ -77,8 +52,50 @@ $(KATI_obsolete_var \
,See $(CHANGES_URL)#other_envsetup_variables)
$(KATI_obsolete_var PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE,Set FCM Version in device manifest instead. See $(CHANGES_URL)#PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE)
$(KATI_obsolete_var USE_CLANG_PLATFORM_BUILD,Clang is the only supported Android compiler. See $(CHANGES_URL)#USE_CLANG_PLATFORM_BUILD)
-
-CHANGES_URL :=
+$(KATI_obsolete_var BUILD_DROIDDOC,Droiddoc is only supported in Soong. See details on build/soong/java/droiddoc.go)
+$(KATI_obsolete_var BUILD_APIDIFF,Apidiff is only supported in Soong. See details on build/soong/java/droiddoc.go)
+$(KATI_obsolete_var \
+ DEFAULT_GCC_CPP_STD_VERSION \
+ HOST_GLOBAL_CFLAGS 2ND_HOST_GLOBAL_CFLAGS \
+ HOST_GLOBAL_CONLYFLAGS 2ND_HOST_GLOBAL_CONLYFLAGS \
+ HOST_GLOBAL_CPPFLAGS 2ND_HOST_GLOBAL_CPPFLAGS \
+ HOST_GLOBAL_LDFLAGS 2ND_HOST_GLOBAL_LDFLAGS \
+ HOST_GLOBAL_LLDFLAGS 2ND_HOST_GLOBAL_LLDFLAGS \
+ HOST_CLANG_SUPPORTED 2ND_HOST_CLANG_SUPPORTED \
+ HOST_CC 2ND_HOST_CC \
+ HOST_CXX 2ND_HOST_CXX \
+ HOST_CROSS_GLOBAL_CFLAGS 2ND_HOST_CROSS_GLOBAL_CFLAGS \
+ HOST_CROSS_GLOBAL_CONLYFLAGS 2ND_HOST_CROSS_GLOBAL_CONLYFLAGS \
+ HOST_CROSS_GLOBAL_CPPFLAGS 2ND_HOST_CROSS_GLOBAL_CPPFLAGS \
+ HOST_CROSS_GLOBAL_LDFLAGS 2ND_HOST_CROSS_GLOBAL_LDFLAGS \
+ HOST_CROSS_GLOBAL_LLDFLAGS 2ND_HOST_CROSS_GLOBAL_LLDFLAGS \
+ HOST_CROSS_CLANG_SUPPORTED 2ND_HOST_CROSS_CLANG_SUPPORTED \
+ HOST_CROSS_CC 2ND_HOST_CROSS_CC \
+ HOST_CROSS_CXX 2ND_HOST_CROSS_CXX \
+ TARGET_GLOBAL_CFLAGS 2ND_TARGET_GLOBAL_CFLAGS \
+ TARGET_GLOBAL_CONLYFLAGS 2ND_TARGET_GLOBAL_CONLYFLAGS \
+ TARGET_GLOBAL_CPPFLAGS 2ND_TARGET_GLOBAL_CPPFLAGS \
+ TARGET_GLOBAL_LDFLAGS 2ND_TARGET_GLOBAL_LDFLAGS \
+ TARGET_GLOBAL_LLDFLAGS 2ND_TARGET_GLOBAL_LLDFLAGS \
+ TARGET_CLANG_SUPPORTED 2ND_TARGET_CLANG_SUPPORTED \
+ TARGET_CC 2ND_TARGET_CC \
+ TARGET_CXX 2ND_TARGET_CXX \
+ TARGET_TOOLCHAIN_ROOT 2ND_TARGET_TOOLCHAIN_ROOT \
+ HOST_TOOLCHAIN_ROOT 2ND_HOST_TOOLCHAIN_ROOT \
+ HOST_CROSS_TOOLCHAIN_ROOT 2ND_HOST_CROSS_TOOLCHAIN_ROOT \
+ HOST_TOOLS_PREFIX 2ND_HOST_TOOLS_PREFIX \
+ HOST_CROSS_TOOLS_PREFIX 2ND_HOST_CROSS_TOOLS_PREFIX \
+ HOST_GCC_VERSION 2ND_HOST_GCC_VERSION \
+ HOST_CROSS_GCC_VERSION 2ND_HOST_CROSS_GCC_VERSION \
+ TARGET_NDK_GCC_VERSION 2ND_TARGET_NDK_GCC_VERSION \
+ GLOBAL_CFLAGS_NO_OVERRIDE GLOBAL_CPPFLAGS_NO_OVERRIDE \
+ ,GCC support has been removed. Use Clang instead)
+$(KATI_obsolete_var DIST_DIR dist_goal,Use dist-for-goals instead. See $(CHANGES_URL)#dist)
+$(KATI_obsolete_var TARGET_ANDROID_FILESYSTEM_CONFIG_H,Use TARGET_FS_CONFIG_GEN instead)
+$(KATI_deprecated_var USER,Use BUILD_USERNAME instead. See $(CHANGES_URL)#USER)
+
+# This is marked as obsolete in envsetup.mk after reading the BoardConfig.mk
+$(KATI_deprecate_export It is a global setting. See $(CHANGES_URL)#export_keyword)
# Used to force goals to build. Only use for conditionally defined goals.
.PHONY: FORCE
@@ -86,22 +103,25 @@ FORCE:
ORIGINAL_MAKECMDGOALS := $(MAKECMDGOALS)
-dist_goal := $(strip $(filter dist,$(MAKECMDGOALS)))
-MAKECMDGOALS := $(strip $(filter-out dist,$(MAKECMDGOALS)))
-
UNAME := $(shell uname -sm)
SRC_TARGET_DIR := $(TOPDIR)build/target
-SRC_API_DIR := $(TOPDIR)prebuilts/sdk/api
-SRC_SYSTEM_API_DIR := $(TOPDIR)prebuilts/sdk/system-api
-SRC_TEST_API_DIR := $(TOPDIR)prebuilts/sdk/test-api
# Some specific paths to tools
SRC_DROIDDOC_DIR := $(TOPDIR)build/make/tools/droiddoc
+# Mark some inputs as readonly
+ifdef TARGET_DEVICE_DIR
+ .KATI_READONLY := TARGET_DEVICE_DIR
+endif
+
# Set up efficient math functions which are used in make.
# Here since this file is included by envsetup as well as during build.
-include $(BUILD_SYSTEM)/math.mk
+include $(BUILD_SYSTEM_COMMON)/math.mk
+
+include $(BUILD_SYSTEM_COMMON)/strings.mk
+
+include $(BUILD_SYSTEM_COMMON)/json.mk
# Various mappings to avoid hard-coding paths all over the place
include $(BUILD_SYSTEM)/pathmap.mk
@@ -134,8 +154,6 @@ BUILD_MULTI_PREBUILT:= $(BUILD_SYSTEM)/multi_prebuilt.mk
BUILD_JAVA_LIBRARY:= $(BUILD_SYSTEM)/java_library.mk
BUILD_STATIC_JAVA_LIBRARY:= $(BUILD_SYSTEM)/static_java_library.mk
BUILD_HOST_JAVA_LIBRARY:= $(BUILD_SYSTEM)/host_java_library.mk
-BUILD_DROIDDOC:= $(BUILD_SYSTEM)/droiddoc.mk
-BUILD_APIDIFF:= $(BUILD_SYSTEM)/apidiff.mk
BUILD_COPY_HEADERS := $(BUILD_SYSTEM)/copy_headers.mk
BUILD_NATIVE_TEST := $(BUILD_SYSTEM)/native_test.mk
BUILD_NATIVE_BENCHMARK := $(BUILD_SYSTEM)/native_benchmark.mk
@@ -155,13 +173,6 @@ BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY := $(BUILD_SYSTEM)/host_dalvik_static_java
BUILD_HOST_TEST_CONFIG := $(BUILD_SYSTEM)/host_test_config.mk
BUILD_TARGET_TEST_CONFIG := $(BUILD_SYSTEM)/target_test_config.mk
-INSTRUMENTATION_TEST_CONFIG_TEMPLATE := $(BUILD_SYSTEM)/instrumentation_test_config_template.xml
-NATIVE_TEST_CONFIG_TEMPLATE := $(BUILD_SYSTEM)/native_test_config_template.xml
-EMPTY_TEST_CONFIG := $(BUILD_SYSTEM)/empty_test_config.xml
-
-# Tool to generate TradeFed test config file automatically.
-AUTOGEN_TEST_CONFIG_SCRIPT := build/make/tools/auto_gen_test_config.py
-
# ###############################################################
# Parse out any modifier targets.
# ###############################################################
@@ -196,11 +207,6 @@ else
JAVA_TMPDIR_ARG :=
endif
-# Default to remove the org.apache.http.legacy from bootclasspath
-ifeq ($(REMOVE_OAHL_FROM_BCP),)
-REMOVE_OAHL_FROM_BCP := true
-endif
-
# ###############################################################
# Include sub-configuration files
# ###############################################################
@@ -281,17 +287,7 @@ $(call validate-kernel-headers,$(TARGET_BOARD_KERNEL_HEADERS))
TARGET_PRODUCT_KERNEL_HEADERS := $(strip $(wildcard $(PRODUCT_VENDOR_KERNEL_HEADERS)))
TARGET_PRODUCT_KERNEL_HEADERS := $(patsubst %/,%,$(TARGET_PRODUCT_KERNEL_HEADERS))
$(call validate-kernel-headers,$(TARGET_PRODUCT_KERNEL_HEADERS))
-
-# Clean up/verify variables defined by the board config file.
-TARGET_BOOTLOADER_BOARD_NAME := $(strip $(TARGET_BOOTLOADER_BOARD_NAME))
-TARGET_CPU_ABI := $(strip $(TARGET_CPU_ABI))
-ifeq ($(TARGET_CPU_ABI),)
- $(error No TARGET_CPU_ABI defined by board config: $(board_config_mk))
-endif
-TARGET_CPU_ABI2 := $(strip $(TARGET_CPU_ABI2))
-
-BOARD_KERNEL_BASE := $(strip $(BOARD_KERNEL_BASE))
-BOARD_KERNEL_PAGESIZE := $(strip $(BOARD_KERNEL_PAGESIZE))
+.KATI_READONLY := TARGET_DEVICE_KERNEL_HEADERS TARGET_BOARD_KERNEL_HEADERS TARGET_PRODUCT_KERNEL_HEADERS
# Commands to generate .toc file common to ELF .so files.
define _gen_toc_command_for_elf
@@ -301,55 +297,14 @@ endef
# Commands to generate .toc file from Darwin dynamic library.
define _gen_toc_command_for_macho
-$(hide) otool -l $(1) | grep LC_ID_DYLIB -A 5 > $(2)
-$(hide) nm -gP $(1) | cut -f1-2 -d" " | (grep -v U$$ >> $(2) || true)
+$(hide) $(HOST_OTOOL) -l $(1) | grep LC_ID_DYLIB -A 5 > $(2)
+$(hide) $(HOST_NM) -gP $(1) | cut -f1-2 -d" " | (grep -v U$$ >> $(2) || true)
endef
-combo_target := HOST_
-combo_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/combo/select.mk
-
-# Load the 2nd host arch if it's needed.
-ifdef HOST_2ND_ARCH
-combo_target := HOST_
-combo_2nd_arch_prefix := $(HOST_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/combo/select.mk
-endif
-
-# Load the windows cross compiler under Linux
-ifdef HOST_CROSS_OS
-combo_target := HOST_CROSS_
-combo_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/combo/select.mk
-
-ifdef HOST_CROSS_2ND_ARCH
-combo_target := HOST_CROSS_
-combo_2nd_arch_prefix := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/combo/select.mk
-endif
-endif
-
-# on windows, the tools have .exe at the end, and we depend on the
-# host config stuff being done first
-
-combo_target := TARGET_
-combo_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/combo/select.mk
-
-# Load the 2nd target arch if it's needed.
-ifdef TARGET_2ND_ARCH
-combo_target := TARGET_
-combo_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/combo/select.mk
-endif
-
ifeq ($(CALLED_FROM_SETUP),true)
include $(BUILD_SYSTEM)/ccache.mk
include $(BUILD_SYSTEM)/goma.mk
-
-export CC_WRAPPER
-export CXX_WRAPPER
-export JAVAC_WRAPPER
+include $(BUILD_SYSTEM)/rbe.mk
endif
ifdef TARGET_PREFER_32_BIT
@@ -357,54 +312,6 @@ TARGET_PREFER_32_BIT_APPS := true
TARGET_PREFER_32_BIT_EXECUTABLES := true
endif
-ifeq (,$(TARGET_SUPPORTS_32_BIT_APPS)$(TARGET_SUPPORTS_64_BIT_APPS))
- TARGET_SUPPORTS_32_BIT_APPS := true
-endif
-
-# "ro.product.cpu.abilist32" and "ro.product.cpu.abilist64" are
-# comma separated lists of the 32 and 64 bit ABIs (in order of
-# preference) that the target supports. If TARGET_CPU_ABI_LIST_{32,64}_BIT
-# are defined by the board config, we use them. Else, we construct
-# these lists based on whether TARGET_IS_64_BIT is set.
-#
-# Note that this assumes that the 2ND_CPU_ABI for a 64 bit target
-# is always 32 bits. If this isn't the case, these variables should
-# be overriden in the board configuration.
-ifeq (,$(TARGET_CPU_ABI_LIST_64_BIT))
- ifeq (true|true,$(TARGET_IS_64_BIT)|$(TARGET_SUPPORTS_64_BIT_APPS))
- TARGET_CPU_ABI_LIST_64_BIT := $(TARGET_CPU_ABI) $(TARGET_CPU_ABI2)
- endif
-endif
-
-ifeq (,$(TARGET_CPU_ABI_LIST_32_BIT))
- ifneq (true,$(TARGET_IS_64_BIT))
- TARGET_CPU_ABI_LIST_32_BIT := $(TARGET_CPU_ABI) $(TARGET_CPU_ABI2)
- else
- ifeq (true,$(TARGET_SUPPORTS_32_BIT_APPS))
- # For a 64 bit target, assume that the 2ND_CPU_ABI
- # is a 32 bit ABI.
- TARGET_CPU_ABI_LIST_32_BIT := $(TARGET_2ND_CPU_ABI) $(TARGET_2ND_CPU_ABI2)
- endif
- endif
-endif
-
-# "ro.product.cpu.abilist" is a comma separated list of ABIs (in order
-# of preference) that the target supports. If a TARGET_CPU_ABI_LIST
-# is specified by the board configuration, we use that. If not, we
-# build a list out of the TARGET_CPU_ABIs specified by the config.
-ifeq (,$(TARGET_CPU_ABI_LIST))
- ifeq ($(TARGET_IS_64_BIT)|$(TARGET_PREFER_32_BIT_APPS),true|true)
- TARGET_CPU_ABI_LIST := $(TARGET_CPU_ABI_LIST_32_BIT) $(TARGET_CPU_ABI_LIST_64_BIT)
- else
- TARGET_CPU_ABI_LIST := $(TARGET_CPU_ABI_LIST_64_BIT) $(TARGET_CPU_ABI_LIST_32_BIT)
- endif
-endif
-
-# Strip whitespace from the ABI list string.
-TARGET_CPU_ABI_LIST := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST)))
-TARGET_CPU_ABI_LIST_32_BIT := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST_32_BIT)))
-TARGET_CPU_ABI_LIST_64_BIT := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST_64_BIT)))
-
# GCC version selection
TARGET_GCC_VERSION := 4.9
ifdef TARGET_2ND_ARCH
@@ -507,8 +414,8 @@ endif # pdk or fusion
ifdef PDK_FUSION_PLATFORM_ZIP
TARGET_BUILD_PDK := true
ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_ZIP)))
- ifneq (,$(wildcard $(dir $(PDK_FUSION_PLATFORM_ZIP))/pdk.mk))
- PDK_FUSION_PLATFORM_DIR := $(dir $(PDK_FUSION_PLATFORM_ZIP))
+ ifneq (,$(wildcard $(patsubst %.zip,%,$(PDK_FUSION_PLATFORM_ZIP))/pdk.mk))
+ PDK_FUSION_PLATFORM_DIR := $(patsubst %.zip,%,$(PDK_FUSION_PLATFORM_ZIP))
PDK_FUSION_PLATFORM_ZIP :=
else
$(error Cannot find file $(PDK_FUSION_PLATFORM_ZIP).)
@@ -542,6 +449,13 @@ ALLOW_MISSING_DEPENDENCIES := true
endif
.KATI_READONLY := ALLOW_MISSING_DEPENDENCIES
+TARGET_BUILD_APPS_USE_PREBUILT_SDK :=
+ifdef TARGET_BUILD_APPS
+ ifndef UNBUNDLED_BUILD_SDKS_FROM_SOURCE
+ TARGET_BUILD_APPS_USE_PREBUILT_SDK := true
+ endif
+endif
+
prebuilt_sdk_tools := prebuilts/sdk/tools
prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
@@ -549,8 +463,9 @@ prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
prebuilt_build_tools := prebuilts/build-tools
prebuilt_build_tools_wrappers := prebuilts/build-tools/common/bin
prebuilt_build_tools_jars := prebuilts/build-tools/common/framework
+prebuilt_build_tools_bin_noasan := $(prebuilt_build_tools)/$(HOST_PREBUILT_TAG)/bin
ifeq ($(filter address,$(SANITIZE_HOST)),)
-prebuilt_build_tools_bin := $(prebuilt_build_tools)/$(HOST_PREBUILT_TAG)/bin
+prebuilt_build_tools_bin := $(prebuilt_build_tools_bin_noasan)
else
prebuilt_build_tools_bin := $(prebuilt_build_tools)/$(HOST_PREBUILT_TAG)/asan/bin
endif
@@ -559,14 +474,8 @@ USE_PREBUILT_SDK_TOOLS_IN_PLACE := true
# Work around for b/68406220
# This should match the soong version.
-ifndef USE_D8
- USE_D8 := true
-endif
-
-# Default R8 behavior when USE_R8 is not specified.
-ifndef USE_R8
- USE_R8 := false
-endif
+USE_D8 := true
+.KATI_READONLY := USE_D8
#
# Tools that are prebuilts for TARGET_BUILD_APPS
@@ -575,7 +484,6 @@ ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))
AIDL := $(HOST_OUT_EXECUTABLES)/aidl
AAPT := $(HOST_OUT_EXECUTABLES)/aapt
AAPT2 := $(HOST_OUT_EXECUTABLES)/aapt2
- DESUGAR := $(HOST_OUT_JAVA_LIBRARIES)/desugar.jar
MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
SIGNAPK_JAR := $(HOST_OUT_JAVA_LIBRARIES)/signapk$(COMMON_JAVA_PACKAGE_SUFFIX)
SIGNAPK_JNI_LIBRARY_PATH := $(HOST_OUT_SHARED_LIBRARIES)
@@ -585,7 +493,6 @@ else # TARGET_BUILD_APPS || TARGET_BUILD_PDK
AIDL := $(prebuilt_build_tools_bin)/aidl
AAPT := $(prebuilt_sdk_tools_bin)/aapt
AAPT2 := $(prebuilt_sdk_tools_bin)/aapt2
- DESUGAR := $(prebuilt_build_tools_jars)/desugar.jar
MAINDEXCLASSES := $(prebuilt_sdk_tools)/mainDexClasses
SIGNAPK_JAR := $(prebuilt_sdk_tools)/lib/signapk$(COMMON_JAVA_PACKAGE_SUFFIX)
SIGNAPK_JNI_LIBRARY_PATH := $(prebuilt_sdk_tools)/$(HOST_OS)/lib64
@@ -608,6 +515,7 @@ ACP := $(prebuilt_build_tools_bin)/acp
CKATI := $(prebuilt_build_tools_bin)/ckati
DEPMOD := $(HOST_OUT_EXECUTABLES)/depmod
FILESLIST := $(SOONG_HOST_OUT_EXECUTABLES)/fileslist
+HOST_INIT_VERIFIER := $(HOST_OUT_EXECUTABLES)/host_init_verifier
MAKEPARALLEL := $(prebuilt_build_tools_bin)/makeparallel
SOONG_JAVAC_WRAPPER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_wrapper
SOONG_ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/soong_zip
@@ -619,13 +527,13 @@ ZIPTIME := $(prebuilt_build_tools_bin)/ziptime
# ---------------------------------------------------------------
# Generic tools.
-LEX := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/flex/flex-2.5.39
+LEX := $(prebuilt_build_tools_bin_noasan)/flex
# The default PKGDATADIR built in the prebuilt bison is a relative path
# prebuilts/build-tools/common/bison.
# To run bison from elsewhere you need to set up enviromental variable
# BISON_PKGDATADIR.
BISON_PKGDATADIR := $(PWD)/prebuilts/build-tools/common/bison
-BISON := prebuilts/build-tools/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bin/bison
+BISON := $(prebuilt_build_tools_bin_noasan)/bison
YACC := $(BISON) -d
BISON_DATA := $(wildcard $(BISON_PKGDATADIR)/* $(BISON_PKGDATADIR)/*/*)
@@ -639,10 +547,7 @@ else
BREAKPAD_GENERATE_SYMBOLS := false
endif
PROTOC := $(HOST_OUT_EXECUTABLES)/aprotoc$(HOST_EXECUTABLE_SUFFIX)
-NANOPB_SRCS := external/nanopb-c/generator/protoc-gen-nanopb \
- $(wildcard external/nanopb-c/generator/*.py \
- external/nanopb-c/generator/google/*.py \
- external/nanopb-c/generator/proto/*.py)
+NANOPB_SRCS := $(HOST_OUT_EXECUTABLES)/protoc-gen-nanopb
VTSC := $(HOST_OUT_EXECUTABLES)/vtsc$(HOST_EXECUTABLE_SUFFIX)
MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
MINIGZIP := $(HOST_OUT_EXECUTABLES)/minigzip$(HOST_EXECUTABLE_SUFFIX)
@@ -662,10 +567,10 @@ AVBTOOL := $(HOST_OUT_EXECUTABLES)/avbtool$(HOST_EXECUTABLE_SUFFIX)
else
AVBTOOL := $(BOARD_CUSTOM_AVBTOOL)
endif
-APICHECK := $(HOST_OUT_EXECUTABLES)/apicheck$(HOST_EXECUTABLE_SUFFIX)
+APICHECK := $(HOST_OUT_JAVA_LIBRARIES)/metalava$(COMMON_JAVA_PACKAGE_SUFFIX)
FS_GET_STATS := $(HOST_OUT_EXECUTABLES)/fs_get_stats$(HOST_EXECUTABLE_SUFFIX)
MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/mke2fs$(HOST_EXECUTABLE_SUFFIX)
-MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs.sh
+MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs
MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
BLK_ALLOC_TO_BASE_FS := $(HOST_OUT_EXECUTABLES)/blk_alloc_to_base_fs$(HOST_EXECUTABLE_SUFFIX)
MAKE_SQUASHFS := $(HOST_OUT_EXECUTABLES)/mksquashfs$(HOST_EXECUTABLE_SUFFIX)
@@ -681,13 +586,19 @@ JARJAR := $(HOST_OUT_JAVA_LIBRARIES)/jarjar.jar
DATA_BINDING_COMPILER := $(HOST_OUT_JAVA_LIBRARIES)/databinding-compiler.jar
FAT16COPY := build/make/tools/fat16copy.py
CHECK_LINK_TYPE := build/make/tools/check_link_type.py
+CHECK_ELF_FILE := build/make/tools/check_elf_file.py
+LPMAKE := $(HOST_OUT_EXECUTABLES)/lpmake$(HOST_EXECUTABLE_SUFFIX)
+BUILD_SUPER_IMAGE := build/make/tools/releasetools/build_super_image.py
-PROGUARD := external/proguard/bin/proguard.sh
+PROGUARD_HOME := external/proguard
+PROGUARD := $(PROGUARD_HOME)/bin/proguard.sh
+PROGUARD_DEPS := $(PROGUARD) $(PROGUARD_HOME)/lib/proguard.jar
JAVATAGS := build/make/tools/java-event-log-tags.py
MERGETAGS := build/make/tools/merge-event-log-tags.py
BUILD_IMAGE_SRCS := $(wildcard build/make/tools/releasetools/*.py)
APPEND2SIMG := $(HOST_OUT_EXECUTABLES)/append2simg
VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
+BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata.py
BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
BOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/boot_signer
FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
@@ -697,43 +608,21 @@ BRILLO_UPDATE_PAYLOAD := $(HOST_OUT_EXECUTABLES)/brillo_update_payload
DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
-HIDDENAPI := $(HOST_OUT_EXECUTABLES)/hiddenapi
-
-# relocation packer
-RELOCATION_PACKER := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/relocation_packer/relocation_packer
FINDBUGS_DIR := external/owasp/sanitizer/tools/findbugs/bin
FINDBUGS := $(FINDBUGS_DIR)/findbugs
JETIFIER := prebuilts/sdk/tools/jetifier/jetifier-standalone/bin/jetifier-standalone
-# Tool to merge AndroidManifest.xmls
-ANDROID_MANIFEST_MERGER_CLASSPATH := \
- prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/26.0.0-beta2/manifest-merger-26.0.0-beta2.jar \
- prebuilts/gradle-plugin/com/android/tools/sdk-common/26.0.0-beta2/sdk-common-26.0.0-beta2.jar \
- prebuilts/gradle-plugin/com/android/tools/common/26.0.0-beta2/common-26.0.0-beta2.jar \
- prebuilts/misc/common/guava/guava-21.0.jar
-ANDROID_MANIFEST_MERGER := $(JAVA) \
- -classpath $(subst $(space),:,$(strip $(ANDROID_MANIFEST_MERGER_CLASSPATH))) \
- com.android.manifmerger.Merger
+EXTRACT_KERNEL := build/make/tools/extract_kernel.py
-COLUMN:= column
+USE_OPENJDK9 := true
ifeq ($(EXPERIMENTAL_USE_OPENJDK9),)
-ifeq ($(RUN_ERROR_PRONE),true)
-USE_OPENJDK9 :=
-else
-USE_OPENJDK9 := true
-endif
-TARGET_OPENJDK9 :=
-else ifeq ($(EXPERIMENTAL_USE_OPENJDK9),false)
-USE_OPENJDK9 :=
TARGET_OPENJDK9 :=
else ifeq ($(EXPERIMENTAL_USE_OPENJDK9),1.8)
-USE_OPENJDK9 := true
TARGET_OPENJDK9 :=
else ifeq ($(EXPERIMENTAL_USE_OPENJDK9),true)
-USE_OPENJDK9 := true
TARGET_OPENJDK9 := true
endif
@@ -747,14 +636,7 @@ else
MD5SUM:=md5sum
endif
-APICHECK_CLASSPATH_ENTRIES := \
- $(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX) \
- $(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX) \
- $(HOST_JDK_TOOLS_JAR) \
- )
-APICHECK_CLASSPATH := $(subst $(space),:,$(strip $(APICHECK_CLASSPATH_ENTRIES)))
-
-APICHECK_COMMAND := $(APICHECK) -JXmx1024m -J"classpath $(APICHECK_CLASSPATH)"
+APICHECK_COMMAND := $(JAVA) -Xmx4g -jar $(APICHECK) --no-banner --compatible-output=yes
# Boolean variable determining if the whitelist for compatible properties is enabled
PRODUCT_COMPATIBLE_PROPERTY := false
@@ -802,12 +684,16 @@ $(foreach req,$(requirements),$(eval \
PRODUCT_FULL_TREBLE_OVERRIDE ?=
$(foreach req,$(requirements),$(eval $(req)_OVERRIDE ?=))
+# TODO(b/114488870): disallow PRODUCT_FULL_TREBLE_OVERRIDE from being used.
.KATI_READONLY := \
PRODUCT_FULL_TREBLE_OVERRIDE \
$(foreach req,$(requirements),$(req)_OVERRIDE) \
$(requirements) \
PRODUCT_FULL_TREBLE \
+$(KATI_obsolete_var $(foreach req,$(requirements),$(req)_OVERRIDE) \
+ ,This should be referenced without the _OVERRIDE suffix.)
+
requirements :=
# BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED can be true only if early-mount of
@@ -856,11 +742,6 @@ endif
ifdef PRODUCT_SHIPPING_API_LEVEL
- ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),27),)
- ifneq ($(TARGET_USES_MKE2FS),true)
- $(error When PRODUCT_SHIPPING_API_LEVEL >= 27, TARGET_USES_MKE2FS must be true)
- endif
- endif
ifneq ($(call numbers_less_than,$(PRODUCT_SHIPPING_API_LEVEL),$(BOARD_SYSTEMSDK_VERSIONS)),)
$(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to PRODUCT_SHIPPING_API_LEVEL ($(PRODUCT_SHIPPING_API_LEVEL)))
endif
@@ -870,10 +751,10 @@ ifdef PRODUCT_SHIPPING_API_LEVEL
$(error When PRODUCT_SHIPPING_API_LEVEL >= 28, TARGET_USES_64_BIT_BINDER must be true)
endif
endif
- ifeq ($(PRODUCT_FULL_TREBLE),true)
- ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE), true)
- $(error When PRODUCT_SHIPPING_API_LEVEL >= 28, BOARD_BUILD_SYSTEM_ROOT_IMAGE must be true)
- endif
+ endif
+ ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),)
+ ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),)
+ $(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set)
endif
endif
endif
@@ -884,6 +765,7 @@ ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE
else
DEFAULT_SYSTEM_DEV_CERTIFICATE := build/target/product/security/testkey
endif
+.KATI_READONLY := DEFAULT_SYSTEM_DEV_CERTIFICATE
BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
@@ -900,7 +782,7 @@ BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
# is made which breaks compatibility with the previous platform sepolicy version,
# not just on every increase in PLATFORM_SDK_VERSION. The minor version should
# be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
-sepolicy_major_vers := 28
+sepolicy_major_vers := 29
sepolicy_minor_vers := 0
ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
@@ -919,13 +801,199 @@ sepolicy_minor_vers :=
# A list of SEPolicy versions, besides PLATFORM_SEPOLICY_VERSION, that the framework supports.
PLATFORM_SEPOLICY_COMPAT_VERSIONS := \
26.0 \
- 27.0
+ 27.0 \
+ 28.0 \
.KATI_READONLY := \
PLATFORM_SEPOLICY_COMPAT_VERSIONS \
PLATFORM_SEPOLICY_VERSION \
TOT_SEPOLICY_VERSION \
+ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
+ ifneq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
+ $(error PRODUCT_USE_DYNAMIC_PARTITIONS must be true when PRODUCT_RETROFIT_DYNAMIC_PARTITIONS \
+ is set)
+ endif
+ ifdef PRODUCT_SHIPPING_API_LEVEL
+ ifeq (true,$(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29))
+ $(error Devices with shipping API level $(PRODUCT_SHIPPING_API_LEVEL) must not set \
+ PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)
+ endif
+ endif
+endif
+
+ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
+ ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+ $(error BOARD_BUILD_SYSTEM_ROOT_IMAGE cannot be true for devices with dynamic partitions)
+ endif
+ ifneq ($(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),true)
+ $(error PRODUCT_USE_DYNAMIC_PARTITION_SIZE must be true for devices with dynamic partitions)
+ endif
+endif
+
+ifeq ($(PRODUCT_BUILD_SUPER_PARTITION),true)
+ ifneq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
+ $(error Can only build super partition for devices with dynamic partitions)
+ endif
+endif
+
+
+ifeq ($(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),true)
+
+ifneq ($(BOARD_SYSTEMIMAGE_PARTITION_SIZE),)
+ifneq ($(BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE),)
+$(error Should not define BOARD_SYSTEMIMAGE_PARTITION_SIZE and \
+ BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE together)
+endif
+endif
+
+ifneq ($(BOARD_VENDORIMAGE_PARTITION_SIZE),)
+ifneq ($(BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE),)
+$(error Should not define BOARD_VENDORIMAGE_PARTITION_SIZE and \
+ BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE together)
+endif
+endif
+
+ifneq ($(BOARD_ODMIMAGE_PARTITION_SIZE),)
+ifneq ($(BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE),)
+$(error Should not define BOARD_ODMIMAGE_PARTITION_SIZE and \
+ BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE together)
+endif
+endif
+
+ifneq ($(BOARD_PRODUCTIMAGE_PARTITION_SIZE),)
+ifneq ($(BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE),)
+$(error Should not define BOARD_PRODUCTIMAGE_PARTITION_SIZE and \
+ BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE together)
+endif
+endif
+
+ifneq ($(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE),)
+ifneq ($(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_RESERVED_SIZE),)
+$(error Should not define BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE and \
+ BOARD_PRODUCT_SERVICESIMAGE_PARTITION_RESERVED_SIZE together)
+endif
+endif
+
+endif # PRODUCT_USE_DYNAMIC_PARTITION_SIZE
+
+ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
+
+# BOARD_SUPER_PARTITION_GROUPS defines a list of "updatable groups". Each updatable group is a
+# group of partitions that share the same pool of free spaces.
+# For each group in BOARD_SUPER_PARTITION_GROUPS, a BOARD_{GROUP}_SIZE and
+# BOARD_{GROUP}_PARTITION_PARTITION_LIST may be defined.
+# - BOARD_{GROUP}_SIZE: The maximum sum of sizes of all partitions in the group.
+# Must not be empty.
+# - BOARD_{GROUP}_PARTITION_PARTITION_LIST: the list of partitions that belongs to this group.
+# If empty, no partitions belong to this group, and the sum of sizes is effectively 0.
+$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+ $(eval BOARD_$(group)_PARTITION_LIST ?=) \
+ $(eval .KATI_READONLY := BOARD_$(group)_PARTITION_LIST) \
+)
+ifeq ($(PRODUCT_BUILD_SUPER_PARTITION),true)
+$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+ $(eval BOARD_$(group)_SIZE := $(strip $(BOARD_$(group)_SIZE))) \
+ $(if $(BOARD_$(group)_SIZE),,$(error BOARD_$(group)_SIZE must not be empty)) \
+ $(eval .KATI_READONLY := BOARD_$(group)_SIZE) \
+)
+endif # PRODUCT_BUILD_SUPER_PARTITION
+
+# BOARD_*_PARTITION_LIST: a list of the following tokens
+valid_super_partition_list := system vendor product product_services odm
+$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+ $(if $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)), \
+ $(error BOARD_$(group)_PARTITION_LIST contains invalid partition name \
+ $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)). \
+ Valid names are $(valid_super_partition_list))))
+valid_super_partition_list :=
+
+
+# Define BOARD_SUPER_PARTITION_PARTITION_LIST, the sum of all BOARD_*_PARTITION_LIST
+ifdef BOARD_SUPER_PARTITION_PARTITION_LIST
+$(error BOARD_SUPER_PARTITION_PARTITION_LIST should not be defined, but computed from \
+ BOARD_SUPER_PARTITION_GROUPS and BOARD_*_PARTITION_LIST)
+endif
+BOARD_SUPER_PARTITION_PARTITION_LIST := \
+ $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+ $(BOARD_$(group)_PARTITION_LIST))
+.KATI_READONLY := BOARD_SUPER_PARTITION_PARTITION_LIST
+
+endif # PRODUCT_USE_DYNAMIC_PARTITIONS
+
+ifeq ($(PRODUCT_BUILD_SUPER_PARTITION),true)
+
+ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
+ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
+
+# The metadata device must be specified manually for retrofitting.
+ifeq ($(BOARD_SUPER_PARTITION_METADATA_DEVICE),)
+$(error Must specify BOARD_SUPER_PARTITION_METADATA_DEVICE if PRODUCT_RETROFIT_DYNAMIC_PARTITIONS=true.)
+endif
+
+# The super partition block device list must be specified manually for retrofitting.
+ifeq ($(BOARD_SUPER_PARTITION_BLOCK_DEVICES),)
+$(error Must specify BOARD_SUPER_PARTITION_BLOCK_DEVICES if PRODUCT_RETROFIT_DYNAMIC_PARTITIONS=true.)
+endif
+
+# The metadata device must be included in the super partition block device list.
+ifeq (,$(filter $(BOARD_SUPER_PARTITION_METADATA_DEVICE),$(BOARD_SUPER_PARTITION_BLOCK_DEVICES)))
+$(error BOARD_SUPER_PARTITION_METADATA_DEVICE is not listed in BOARD_SUPER_PARTITION_BLOCK_DEVICES.)
+endif
+
+# The metadata device must be supplied to init via the kernel command-line.
+INTERNAL_KERNEL_CMDLINE += androidboot.super_partition=$(BOARD_SUPER_PARTITION_METADATA_DEVICE)
+
+BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE := true
+
+# If "vendor" is listed as one of the dynamic partitions but without its image available (e.g. an
+# AOSP target built without vendor image), don't build the retrofit full OTA package. Because we
+# won't be able to build meaningful super_* images for retrofitting purpose.
+ifneq (,$(filter vendor,$(BOARD_SUPER_PARTITION_PARTITION_LIST)))
+ifndef BUILDING_VENDOR_IMAGE
+ifndef BOARD_PREBUILT_VENDORIMAGE
+BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE :=
+endif # BOARD_PREBUILT_VENDORIMAGE
+endif # BUILDING_VENDOR_IMAGE
+endif # BOARD_SUPER_PARTITION_PARTITION_LIST
+
+else # PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
+
+# For normal devices, we populate BOARD_SUPER_PARTITION_BLOCK_DEVICES so the
+# build can handle both cases consistently.
+ifeq ($(BOARD_SUPER_PARTITION_METADATA_DEVICE),)
+BOARD_SUPER_PARTITION_METADATA_DEVICE := super
+endif
+
+ifeq ($(BOARD_SUPER_PARTITION_BLOCK_DEVICES),)
+BOARD_SUPER_PARTITION_BLOCK_DEVICES := $(BOARD_SUPER_PARTITION_METADATA_DEVICE)
+endif
+
+# If only one super block device, default to super partition size.
+ifeq ($(word 2,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES)),)
+BOARD_SUPER_PARTITION_$(call to-upper,$(strip $(BOARD_SUPER_PARTITION_BLOCK_DEVICES)))_DEVICE_SIZE ?= \
+ $(BOARD_SUPER_PARTITION_SIZE)
+endif
+
+ifneq ($(BOARD_SUPER_PARTITION_METADATA_DEVICE),super)
+INTERNAL_KERNEL_CMDLINE += androidboot.super_partition=$(BOARD_SUPER_PARTITION_METADATA_DEVICE)
+endif
+BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE :=
+
+endif # PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
+endif # BOARD_SUPER_PARTITION_SIZE
+.KATI_READONLY := BOARD_SUPER_PARTITION_BLOCK_DEVICES
+.KATI_READONLY := BOARD_SUPER_PARTITION_METADATA_DEVICE
+.KATI_READONLY := BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE
+
+$(foreach device,$(call to-upper,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES)), \
+ $(eval BOARD_SUPER_PARTITION_$(device)_DEVICE_SIZE := $(strip $(BOARD_SUPER_PARTITION_$(device)_DEVICE_SIZE))) \
+ $(if $(BOARD_SUPER_PARTITION_$(device)_DEVICE_SIZE),, \
+ $(error BOARD_SUPER_PARTITION_$(device)_DEVICE_SIZE must not be empty)) \
+ $(eval .KATI_READONLY := BOARD_SUPER_PARTITION_$(device)_DEVICE_SIZE))
+
+endif # PRODUCT_BUILD_SUPER_PARTITION
+
# ###############################################################
# Set up final options.
# ###############################################################
@@ -954,11 +1022,13 @@ endif
first_non_empty_of_three = $(if $(1),$(1),$(if $(2),$(2),$(3)))
DEX2OAT_TARGET_ARCH := $(TARGET_ARCH)
DEX2OAT_TARGET_CPU_VARIANT := $(call first_non_empty_of_three,$(TARGET_CPU_VARIANT),$(TARGET_ARCH_VARIANT),default)
+DEX2OAT_TARGET_CPU_VARIANT_RUNTIME := $(call first_non_empty_of_three,$(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_ARCH_VARIANT),default)
DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES := default
ifdef TARGET_2ND_ARCH
$(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH := $(TARGET_2ND_ARCH)
$(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT := $(call first_non_empty_of_three,$(TARGET_2ND_CPU_VARIANT),$(TARGET_2ND_ARCH_VARIANT),default)
+$(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT_RUNTIME := $(call first_non_empty_of_three,$(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_ARCH_VARIANT),default)
$(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES := default
endif
@@ -977,6 +1047,29 @@ else
SUPPORT_LIBRARY_ROOT := frameworks/support
endif
+get-sdk-version = $(if $(findstring _,$(1)),$(subst core_,,$(subst system_,,$(subst test_,,$(1)))),$(1))
+get-sdk-api = $(if $(findstring _,$(1)),$(patsubst %_$(call get-sdk-version,$(1)),%,$(1)),public)
+get-prebuilt-sdk-dir = $(HISTORICAL_SDK_VERSIONS_ROOT)/$(call get-sdk-version,$(1))/$(call get-sdk-api,$(1))
+
+# Resolve LOCAL_SDK_VERSION to prebuilt module name, e.g.:
+# 23 -> sdk_public_23_android
+# system_current -> sdk_system_current_android
+# $(1): An sdk version (LOCAL_SDK_VERSION)
+# $(2): optional library name (default: android)
+define resolve-prebuilt-sdk-module
+$(if $(findstring _,$(1)),\
+ sdk_$(1)_$(or $(2),android),\
+ sdk_public_$(1)_$(or $(2),android))
+endef
+
+# Resolve LOCAL_SDK_VERSION to prebuilt android.jar
+# $(1): LOCAL_SDK_VERSION
+resolve-prebuilt-sdk-jar-path = $(call get-prebuilt-sdk-dir,$(1))/android.jar
+
+# Resolve LOCAL_SDK_VERSION to prebuilt framework.aidl
+# $(1): An sdk version (LOCAL_SDK_VERSION)
+resolve-prebuilt-sdk-aidl-path = $(call get-prebuilt-sdk-dir,$(call get-sdk-version,$(1)))/framework.aidl
+
# Historical SDK version N is stored in $(HISTORICAL_SDK_VERSIONS_ROOT)/N.
# The 'current' version is whatever this source tree is.
#
@@ -993,39 +1086,32 @@ $(shell function sgrax() { \
( sgrax $(1) | sort -g ) )
endef
-TARGET_AVAILABLE_SDK_VERSIONS := $(call numerically_sort,\
- $(patsubst $(HISTORICAL_SDK_VERSIONS_ROOT)/%/android.jar,%, \
- $(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/android.jar)))
-
-TARGET_AVAILABLE_SDK_VERSIONS := $(addprefix system_,$(call numerically_sort,\
- $(patsubst $(HISTORICAL_SDK_VERSIONS_ROOT)/%/android_system.jar,%, \
- $(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/android_system.jar)))) \
- $(TARGET_AVAILABLE_SDK_VERSIONS)
-
-# We don't have prebuilt test_current and core_current SDK yet.
-TARGET_AVAILABLE_SDK_VERSIONS := test_current core_current $(TARGET_AVAILABLE_SDK_VERSIONS)
+# This produces a list like "current/core current/public current/system 4/public"
+TARGET_AVAILABLE_SDK_VERSIONS := $(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/*/android.jar)
+TARGET_AVAILABLE_SDK_VERSIONS := $(patsubst $(HISTORICAL_SDK_VERSIONS_ROOT)/%/android.jar,%,$(TARGET_AVAILABLE_SDK_VERSIONS))
+# Strips and reorganizes the "public", "core" and "system" subdirs.
+TARGET_AVAILABLE_SDK_VERSIONS := $(subst /public,,$(TARGET_AVAILABLE_SDK_VERSIONS))
+TARGET_AVAILABLE_SDK_VERSIONS := $(patsubst %/core,core_%,$(TARGET_AVAILABLE_SDK_VERSIONS))
+TARGET_AVAILABLE_SDK_VERSIONS := $(patsubst %/system,system_%,$(TARGET_AVAILABLE_SDK_VERSIONS))
+# No prebuilt for test_current.
+TARGET_AVAILABLE_SDK_VERSIONS += test_current
+TARGET_AVAIALBLE_SDK_VERSIONS := $(call numerically_sort,$(TARGET_AVAILABLE_SDK_VERSIONS))
TARGET_SDK_VERSIONS_WITHOUT_JAVA_18_SUPPORT := $(call numbers_less_than,24,$(TARGET_AVAILABLE_SDK_VERSIONS))
TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT := $(call numbers_less_than,27,$(TARGET_AVAILABLE_SDK_VERSIONS))
-INTERNAL_PLATFORM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/public_api.txt
-INTERNAL_PLATFORM_DEX_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/public-dex.txt
+ifndef INTERNAL_PLATFORM_PRIVATE_API_FILE
INTERNAL_PLATFORM_PRIVATE_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/private.txt
+endif
+ifndef INTERNAL_PLATFORM_PRIVATE_DEX_API_FILE
INTERNAL_PLATFORM_PRIVATE_DEX_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/private-dex.txt
-INTERNAL_PLATFORM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/removed.txt
-INTERNAL_PLATFORM_REMOVED_DEX_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/removed-dex.txt
-INTERNAL_PLATFORM_SYSTEM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-api.txt
+endif
+ifndef INTERNAL_PLATFORM_SYSTEM_PRIVATE_API_FILE
INTERNAL_PLATFORM_SYSTEM_PRIVATE_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-private.txt
+endif
+ifndef INTERNAL_PLATFORM_SYSTEM_PRIVATE_DEX_API_FILE
INTERNAL_PLATFORM_SYSTEM_PRIVATE_DEX_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-private-dex.txt
-INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-removed.txt
-INTERNAL_PLATFORM_SYSTEM_EXACT_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-exact.txt
-INTERNAL_PLATFORM_TEST_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-api.txt
-INTERNAL_PLATFORM_TEST_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-removed.txt
-INTERNAL_PLATFORM_TEST_EXACT_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-exact.txt
-
-INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-light-greylist.txt
-INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-dark-greylist.txt
-INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-blacklist.txt
+endif
# Missing optional uses-libraries so that the platform doesn't create build rules that depend on
# them. See setup_one_odex.mk.
@@ -1073,9 +1159,13 @@ dont_bother_goals := out \
bptimage-nodeps \
vnod vendorimage-nodeps \
pnod productimage-nodeps \
+ psnod productservicesimage-nodeps \
+ onod odmimage-nodeps \
systemotherimage-nodeps \
ramdisk-nodeps \
+ ramdisk_debug-nodeps \
bootimage-nodeps \
+ bootimage_debug-nodeps \
recoveryimage-nodeps \
vbmetaimage-nodeps \
product-graph dump-products
@@ -1085,4 +1175,9 @@ include $(BUILD_SYSTEM)/ninja_config.mk
include $(BUILD_SYSTEM)/soong_config.mk
endif
+-include external/linux-kselftest/android/kselftest_test_list.mk
+-include external/ltp/android/ltp_package_list.mk
+DEFAULT_DATA_OUT_MODULES := ltp $(ltp_packages) $(kselftest_modules)
+.KATI_READONLY := DEFAULT_DATA_OUT_MODULES
+
include $(BUILD_SYSTEM)/dumpvar.mk
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index d570ccda8e..d3adee5ae2 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -5,21 +5,18 @@
my_sanitize := $(strip $(LOCAL_SANITIZE))
my_sanitize_diag := $(strip $(LOCAL_SANITIZE_DIAG))
-# SANITIZE_HOST is only in effect if the module is already using clang (host
-# modules that haven't set `LOCAL_CLANG := false` and device modules that
-# have set `LOCAL_CLANG := true`.
my_global_sanitize :=
my_global_sanitize_diag :=
-ifeq ($(my_clang),true)
- ifdef LOCAL_IS_HOST_MODULE
+ifdef LOCAL_IS_HOST_MODULE
+ ifneq ($($(my_prefix)OS),windows)
my_global_sanitize := $(strip $(SANITIZE_HOST))
# SANITIZE_HOST=true is a deprecated way to say SANITIZE_HOST=address.
my_global_sanitize := $(subst true,address,$(my_global_sanitize))
- else
- my_global_sanitize := $(strip $(SANITIZE_TARGET))
- my_global_sanitize_diag := $(strip $(SANITIZE_TARGET_DIAG))
endif
+else
+ my_global_sanitize := $(strip $(SANITIZE_TARGET))
+ my_global_sanitize_diag := $(strip $(SANITIZE_TARGET_DIAG))
endif
# Disable global integer_overflow in excluded paths.
@@ -34,6 +31,16 @@ ifneq ($(filter integer_overflow, $(my_global_sanitize)),)
endif
endif
+# Global integer sanitization doesn't support static modules.
+ifeq ($(filter SHARED_LIBRARIES EXECUTABLES,$(LOCAL_MODULE_CLASS)),)
+ my_global_sanitize := $(filter-out integer_overflow,$(my_global_sanitize))
+ my_global_sanitize_diag := $(filter-out integer_overflow,$(my_global_sanitize_diag))
+endif
+ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
+ my_global_sanitize := $(filter-out integer_overflow,$(my_global_sanitize))
+ my_global_sanitize_diag := $(filter-out integer_overflow,$(my_global_sanitize_diag))
+endif
+
# Disable global CFI in excluded paths
ifneq ($(filter cfi, $(my_global_sanitize)),)
combined_exclude_paths := $(CFI_EXCLUDE_PATHS) \
@@ -105,7 +112,6 @@ ifeq ($(filter cfi, $(my_sanitize)),)
ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_include_paths)),\
$(filter $(dir)%,$(LOCAL_PATH)))),)
my_sanitize := cfi $(my_sanitize)
- my_sanitize_diag := cfi $(my_sanitize_diag)
endif
endif
endif
@@ -134,10 +140,12 @@ ifneq ($(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
endif
-# Disable CFI for host targets
+# Disable sanitizers which need the UBSan runtime for host targets.
ifdef LOCAL_IS_HOST_MODULE
my_sanitize := $(filter-out cfi,$(my_sanitize))
my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
+ my_sanitize := $(filter-out signed-integer-overflow unsigned-integer-overflow integer_overflow,$(my_sanitize))
+ my_sanitize_diag := $(filter-out signed-integer-overflow unsigned-integer-overflow integer_overflow,$(my_sanitize_diag))
endif
# Support for local sanitize blacklist paths.
@@ -160,6 +168,25 @@ ifneq ($(my_nosanitize),)
my_sanitize := $(filter-out $(my_nosanitize),$(my_sanitize))
endif
+ifneq ($(filter arm x86 x86_64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
+ my_sanitize := $(filter-out hwaddress,$(my_sanitize))
+endif
+
+ifneq ($(filter hwaddress,$(my_sanitize)),)
+ my_sanitize := $(filter-out address,$(my_sanitize))
+ my_sanitize := $(filter-out thread,$(my_sanitize))
+ my_sanitize := $(filter-out cfi,$(my_sanitize))
+endif
+
+ifneq ($(filter hwaddress,$(my_sanitize)),)
+ my_shared_libraries += $($(LOCAL_2ND_ARCH_VAR_PREFIX)HWADDRESS_SANITIZER_RUNTIME_LIBRARY)
+ ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
+ my_static_libraries := $(my_static_libraries) $($(LOCAL_2ND_ARCH_VAR_PREFIX)HWADDRESS_SANITIZER_STATIC_LIBRARY)
+ endif
+ endif
+endif
+
# TSAN is not supported on 32-bit architectures. For non-multilib cases, make
# its use an error. For multilib cases, don't use it for the 32-bit case.
ifneq ($(filter thread,$(my_sanitize)),)
@@ -180,6 +207,16 @@ ifneq ($(filter safe-stack,$(my_sanitize)),)
endif
endif
+# Disable Scudo if ASan or TSan is enabled.
+ifneq ($(filter address thread hwaddress,$(my_sanitize)),)
+ my_sanitize := $(filter-out scudo,$(my_sanitize))
+endif
+
+# Or if disabled globally.
+ifeq ($(PRODUCT_DISABLE_SCUDO),true)
+ my_sanitize := $(filter-out scudo,$(my_sanitize))
+endif
+
# Undefined symbols can occur if a non-sanitized library links
# sanitized static libraries. That's OK, because the executable
# always depends on the ASan runtime library, which defines these
@@ -194,13 +231,6 @@ ifneq ($(filter address thread,$(strip $(SANITIZE_TARGET))),)
endif
endif
-# Sanitizers can only be used with clang.
-ifneq ($(my_clang),true)
- ifneq ($(my_sanitize),)
- $(error $(LOCAL_PATH): $(LOCAL_MODULE): Use of sanitizers requires LOCAL_CLANG := true)
- endif
-endif
-
ifneq ($(filter default-ub,$(my_sanitize)),)
my_sanitize := $(CLANG_DEFAULT_UB_CHECKS)
endif
@@ -214,23 +244,26 @@ ifneq ($(filter coverage,$(my_sanitize)),)
endif
ifneq ($(filter integer_overflow,$(my_sanitize)),)
- ifneq ($(filter SHARED_LIBRARIES EXECUTABLES,$(LOCAL_MODULE_CLASS)),)
- ifneq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
-
- # Respect LOCAL_NOSANITIZE for integer-overflow flags.
- ifeq ($(filter signed-integer-overflow, $(strip $(LOCAL_NOSANITIZE))),)
- my_sanitize += signed-integer-overflow
- endif
- ifeq ($(filter unsigned-integer-overflow, $(strip $(LOCAL_NOSANITIZE))),)
- my_sanitize += unsigned-integer-overflow
- endif
- my_cflags += $(INTEGER_OVERFLOW_EXTRA_CFLAGS)
+ # Respect LOCAL_NOSANITIZE for integer-overflow flags.
+ ifeq ($(filter signed-integer-overflow, $(strip $(LOCAL_NOSANITIZE))),)
+ my_sanitize += signed-integer-overflow
+ endif
+ ifeq ($(filter unsigned-integer-overflow, $(strip $(LOCAL_NOSANITIZE))),)
+ my_sanitize += unsigned-integer-overflow
+ endif
+ my_cflags += $(INTEGER_OVERFLOW_EXTRA_CFLAGS)
- # Check for diagnostics mode (on by default).
- ifneq ($(filter integer_overflow,$(my_sanitize_diag)),)
+ # Check for diagnostics mode.
+ ifneq ($(filter integer_overflow,$(my_sanitize_diag)),)
+ ifneq ($(filter SHARED_LIBRARIES EXECUTABLES,$(LOCAL_MODULE_CLASS)),)
+ ifneq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
my_sanitize_diag += signed-integer-overflow
my_sanitize_diag += unsigned-integer-overflow
+ else
+ $(call pretty-error,Make cannot apply integer overflow diagnostics to static binary.)
endif
+ else
+ $(call pretty-error,Make cannot apply integer overflow diagnostics to static library.)
endif
endif
my_sanitize := $(filter-out integer_overflow,$(my_sanitize))
@@ -245,6 +278,7 @@ endif
ifneq ($(my_sanitize),)
fsanitize_arg := $(subst $(space),$(comma),$(my_sanitize))
my_cflags += -fsanitize=$(fsanitize_arg)
+ my_asflags += -fsanitize=$(fsanitize_arg)
ifdef LOCAL_IS_HOST_MODULE
my_cflags += -fno-sanitize-recover=all
@@ -265,6 +299,7 @@ ifneq ($(filter cfi,$(my_sanitize)),)
# entire module.
LOCAL_ARM_MODE := thumb
my_cflags += $(CFI_EXTRA_CFLAGS)
+ my_asflags += $(CFI_EXTRA_ASFLAGS)
# Only append the default visibility flag if -fvisibility has not already been
# set to hidden.
ifeq ($(filter -fvisibility=hidden,$(LOCAL_CFLAGS)),)
@@ -306,11 +341,11 @@ ifneq ($(filter address,$(my_global_sanitize) $(my_sanitize)),)
my_ldflags += -Wl,--as-needed
endif
- ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
+ ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
ifneq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
my_linker := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_LINKER)
# Make sure linker_asan get installed.
- $(LOCAL_INSTALLED_MODULE) : | $(PRODUCT_OUT)$($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_LINKER)
+ $(LOCAL_INSTALLED_MODULE) : | $(PRODUCT_OUT)$($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_LINKER_FILE)
endif
endif
endif
@@ -326,6 +361,11 @@ ifneq ($(filter address,$(my_sanitize)),)
endif
endif
+# If local module needs HWASAN, add compiler flags.
+ifneq ($(filter hwaddress,$(my_sanitize)),)
+ my_cflags += $(HWADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS)
+endif
+
# Use minimal diagnostics when integer overflow is enabled; never do it for HOST or AUX modules
ifeq ($(LOCAL_IS_HOST_MODULE)$(LOCAL_IS_AUX_MODULE),)
# Pre-emptively add UBSAN minimal runtime incase a static library dependency requires it
@@ -336,9 +376,9 @@ ifeq ($(LOCAL_IS_HOST_MODULE)$(LOCAL_IS_AUX_MODULE),)
endif
endif
ifneq ($(filter unsigned-integer-overflow signed-integer-overflow integer,$(my_sanitize)),)
- ifeq ($(filter unsigned-integer-overflow signed-integer overflow integer,$(my_sanitize_diag)),)
+ ifeq ($(filter unsigned-integer-overflow signed-integer-overflow integer,$(my_sanitize_diag)),)
ifeq ($(filter cfi,$(my_sanitize_diag)),)
- ifeq ($(filter address,$(my_sanitize)),)
+ ifeq ($(filter address hwaddress,$(my_sanitize)),)
my_cflags += -fsanitize-minimal-runtime
my_cflags += -fno-sanitize-trap=integer
my_cflags += -fno-sanitize-recover=integer
@@ -348,11 +388,28 @@ ifeq ($(LOCAL_IS_HOST_MODULE)$(LOCAL_IS_AUX_MODULE),)
endif
endif
+# For Scudo, we opt for the minimal runtime, unless some diagnostics are enabled.
+ifneq ($(filter scudo,$(my_sanitize)),)
+ ifeq ($(filter unsigned-integer-overflow signed-integer-overflow integer cfi,$(my_sanitize_diag)),)
+ my_cflags += -fsanitize-minimal-runtime
+ endif
+ ifneq ($(filter -fsanitize-minimal-runtime,$(my_cflags)),)
+ my_shared_libraries += $($(LOCAL_2ND_ARCH_VAR_PREFIX)SCUDO_MINIMAL_RUNTIME_LIBRARY)
+ else
+ my_shared_libraries += $($(LOCAL_2ND_ARCH_VAR_PREFIX)SCUDO_RUNTIME_LIBRARY)
+ endif
+endif
+
ifneq ($(strip $(LOCAL_SANITIZE_RECOVER)),)
recover_arg := $(subst $(space),$(comma),$(LOCAL_SANITIZE_RECOVER)),
my_cflags += -fsanitize-recover=$(recover_arg)
endif
+ifneq ($(strip $(LOCAL_SANITIZE_NO_RECOVER)),)
+ no_recover_arg := $(subst $(space),$(comma),$(LOCAL_SANITIZE_NO_RECOVER)),
+ my_cflags += -fno-sanitize-recover=$(no_recover_arg)
+endif
+
ifneq ($(my_sanitize_diag),)
# TODO(vishwath): Add diagnostic support for static executables once
# we switch to clang-4393122 (which adds the static ubsan runtime
@@ -361,9 +418,20 @@ ifneq ($(my_sanitize_diag),)
notrap_arg := $(subst $(space),$(comma),$(my_sanitize_diag)),
my_cflags += -fno-sanitize-trap=$(notrap_arg)
# Diagnostic requires a runtime library, unless ASan or TSan are also enabled.
- ifeq ($(filter address thread,$(my_sanitize)),)
+ ifeq ($(filter address thread scudo hwaddress,$(my_sanitize)),)
# Does not have to be the first DT_NEEDED unlike ASan.
my_shared_libraries += $($(LOCAL_2ND_ARCH_VAR_PREFIX)UBSAN_RUNTIME_LIBRARY)
endif
endif
endif
+
+# http://b/119329758, Android core does not boot up with this sanitizer yet.
+# Previously sanitized modules might not pass new implicit-integer-sign-change check.
+# Disable this check unless it has been explicitly specified.
+ifneq ($(findstring fsanitize,$(my_cflags)),)
+ ifneq ($(findstring integer,$(my_cflags)),)
+ ifeq ($(findstring sanitize=implicit-integer-sign-change,$(my_cflags)),)
+ my_cflags += -fno-sanitize=implicit-integer-sign-change
+ endif
+ endif
+endif
diff --git a/core/configure_module_stem.mk b/core/configure_module_stem.mk
index 48b77871a0..30df8ea1eb 100644
--- a/core/configure_module_stem.mk
+++ b/core/configure_module_stem.mk
@@ -1,20 +1,26 @@
my_multilib_stem := $(LOCAL_MODULE_STEM_$(if $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)IS_64_BIT),64,32))
ifdef my_multilib_stem
my_module_stem := $(my_multilib_stem)
+ $(call verify-module-stem,my_multilib_stem)
else ifdef LOCAL_MODULE_STEM
my_module_stem := $(LOCAL_MODULE_STEM)
+ $(call verify-module-stem,LOCAL_MODULE_STEM)
else
my_module_stem := $(LOCAL_MODULE)
endif
ifdef LOCAL_BUILT_MODULE_STEM
my_built_module_stem := $(LOCAL_BUILT_MODULE_STEM)
+ $(call verify-module-stem,LOCAL_BUILT_MODULE_STEM)
else
my_built_module_stem := $(my_module_stem)$(LOCAL_MODULE_SUFFIX)
+ $(call verify-module-stem,LOCAL_MODULE_SUFFIX)
endif
ifdef LOCAL_INSTALLED_MODULE_STEM
my_installed_module_stem := $(LOCAL_INSTALLED_MODULE_STEM)
+ $(call verify-module-stem,LOCAL_INSTALLED_MODULE_STEM)
else
my_installed_module_stem := $(my_module_stem)$(LOCAL_MODULE_SUFFIX)
+ $(call verify-module-stem,LOCAL_MODULE_SUFFIX)
endif
diff --git a/core/construct_context.sh b/core/construct_context.sh
index b4ae519942..399c15d629 100755
--- a/core/construct_context.sh
+++ b/core/construct_context.sh
@@ -16,39 +16,54 @@
set -e
-# inputs:
-# $1 is PRIVATE_CONDITIONAL_USES_LIBRARIES_HOST
-# $2 is PRIVATE_CONDITIONAL_USES_LIBRARIES_TARGET
-
-# class_loader_context: library paths on the host
-# stored_class_loader_context_libs: library paths on device
-# these are both comma separated paths, example: lib1.jar:lib2.jar or /system/framework/lib1.jar:/system/framework/lib2.jar
-
# target_sdk_version: parsed from manifest
-# my_conditional_host_libs: libraries conditionally added for non P
-# my_conditional_target_libs: target libraries conditionally added for non P
#
# outputs
# class_loader_context_arg: final class loader conext arg
# stored_class_loader_context_arg: final stored class loader context arg
-my_conditional_host_libs=$1
-my_conditional_target_libs=$2
+# The hidl.manager shared library has a dependency on hidl.base. We'll manually
+# add that information to the class loader context if we see those libraries.
+hidl_manager="android.hidl.manager-V1.0-java"
+hidl_base="android.hidl.base-V1.0-java"
+
+function add_to_contexts {
+ for i in $1; do
+ if [[ -z "${class_loader_context}" ]]; then
+ export class_loader_context="PCL[$i]"
+ else
+ export class_loader_context+="#PCL[$i]"
+ fi
+ if [[ $i == *"$hidl_manager"* ]]; then
+ export class_loader_context+="{PCL[${i/$hidl_manager/$hidl_base}]}"
+ fi
+ done
+
+ for i in $2; do
+ if [[ -z "${stored_class_loader_context}" ]]; then
+ export stored_class_loader_context="PCL[$i]"
+ else
+ export stored_class_loader_context+="#PCL[$i]"
+ fi
+ if [[ $i == *"$hidl_manager"* ]]; then
+ export stored_class_loader_context+="{PCL[${i/$hidl_manager/$hidl_base}]}"
+ fi
+ done
+}
+
+# The order below must match what the package manager also computes for
+# class loader context.
-# Note that SDK 28 is P.
if [[ "${target_sdk_version}" -lt "28" ]]; then
- if [[ -z "${class_loader_context}" ]]; then
- export class_loader_context="${my_conditional_host_libs}"
- else
- export class_loader_context="${my_conditional_host_libs}:${class_loader_context}"
- fi
- if [[ -z "${stored_class_loader_context_libs}" ]]; then
- export stored_class_loader_context_libs="${my_conditional_target_libs}";
- else
- export stored_class_loader_context_libs="${my_conditional_target_libs}:${stored_class_loader_context_libs}";
- fi
+ add_to_contexts "${conditional_host_libs_28}" "${conditional_target_libs_28}"
+fi
+
+if [[ "${target_sdk_version}" -lt "29" ]]; then
+ add_to_contexts "${conditional_host_libs_29}" "${conditional_target_libs_29}"
fi
+add_to_contexts "${dex_preopt_host_libraries}" "${dex_preopt_target_libraries}"
+
# Generate the actual context string.
-export class_loader_context_arg="--class-loader-context=PCL[${class_loader_context}]"
-export stored_class_loader_context_arg="--stored-class-loader-context=PCL[${stored_class_loader_context_libs}]"
+export class_loader_context_arg="--class-loader-context=PCL[]{${class_loader_context}}"
+export stored_class_loader_context_arg="--stored-class-loader-context=PCL[]{${stored_class_loader_context}}"
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index 5171b8a9a7..7d3ca5c562 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -13,11 +13,6 @@ ifeq ($(strip $(LOCAL_CXX_STL)),default)
ifneq (,$(BUILD_HOST_static))
my_cxx_stl := libc++_static
endif
-
- ifeq ($($(my_prefix)OS),windows)
- # libc++ is not supported on mingw.
- my_cxx_stl := libstdc++
- endif
endif
else
my_cxx_stl := ndk
@@ -36,14 +31,6 @@ else
# the two options use different names for the STLs.
$(error $(LOCAL_PATH): $(LOCAL_MODULE): Must use LOCAL_NDK_STL_VARIANT rather than LOCAL_CXX_STL for NDK binaries)
endif
- ifdef LOCAL_IS_HOST_MODULE
- ifeq ($($(my_prefix)OS),windows)
- ifneq ($(filter $(my_cxx_stl),libc++ libc++_static),)
- # libc++ is not supported on mingw.
- my_cxx_stl := libstdc++
- endif
- endif
- endif
endif
# Yes, this is actually what the clang driver does.
@@ -51,10 +38,6 @@ linux_dynamic_gcclibs := -lgcc_s -lgcc -lc -lgcc_s -lgcc
linux_static_gcclibs := -Wl,--start-group -lgcc -lgcc_eh -lc -Wl,--end-group
darwin_dynamic_gcclibs := -lc -lSystem
darwin_static_gcclibs := NO_STATIC_HOST_BINARIES_ON_DARWIN
-windows_dynamic_gcclibs := \
- -lmsvcr110 -lmingw32 -lgcc -lmoldname -lmingwex -lmsvcrt -ladvapi32 \
- -lshell32 -luser32 -lkernel32 -lmingw32 -lgcc -lmoldname -lmingwex -lmsvcrt
-windows_static_gcclibs := NO_STATIC_HOST_BINARIES_ON_WINDOWS
my_link_type := dynamic
ifdef LOCAL_IS_HOST_MODULE
@@ -113,11 +96,7 @@ ifneq ($(filter $(my_cxx_stl),libc++ libc++_static),)
else ifeq ($(my_cxx_stl),ndk)
# Using an NDK STL. Handled in binary.mk.
else ifeq ($(my_cxx_stl),libstdc++)
- ifndef LOCAL_IS_HOST_MODULE
- $(error $(LOCAL_PATH): $(LOCAL_MODULE): libstdc++ is not supported for device modules)
- else ifneq ($($(my_prefix)OS),windows)
- $(error $(LOCAL_PATH): $(LOCAL_MODULE): libstdc++ is not supported on $($(my_prefix)OS))
- endif
+ $(error $(LOCAL_PATH): $(LOCAL_MODULE): libstdc++ is not supported)
else ifeq ($(my_cxx_stl),none)
ifdef LOCAL_IS_HOST_MODULE
my_cppflags += -nostdinc++
diff --git a/core/definitions.mk b/core/definitions.mk
index 599ab49e2a..2c9bb0e24d 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -77,6 +77,9 @@ ALL_FINDBUGS_FILES:=
# GPL module license files
ALL_GPL_MODULE_LICENSE_FILES:=
+# Packages with certificate violation
+CERTIFICATE_VIOLATION_MODULES :=
+
# Target and host installed module's dependencies on shared libraries.
# They are list of "<module_name>:<installed_file>:lib1,lib2...".
TARGET_DEPENDENCIES_ON_SHARED_LIBRARIES :=
@@ -99,6 +102,12 @@ HOST_CROSS_DISPLAY := host cross
# All installed initrc files
ALL_INIT_RC_INSTALLED_PAIRS :=
+# All installed vintf manifest fragments for a partition at
+ALL_VINTF_MANIFEST_FRAGMENTS_LIST:=
+
+# All tests that should be skipped in presubmit check.
+ALL_DISABLED_PRESUBMIT_TESTS :=
+
###########################################################
## Debugging; prints a variable list to stdout
###########################################################
@@ -471,8 +480,8 @@ endef
###########################################################
## Find test data in a form required by LOCAL_TEST_DATA
## $(1): the base dir, relative to the root of the source tree.
-## $(3): the file name pattern to be passed to find as "-name"
-## $(2): a list of subdirs of the base dir
+## $(2): the file name pattern to be passed to find as "-name"
+## $(3): a list of subdirs of the base dir
###########################################################
define find-test-data-in-subdirs
@@ -642,6 +651,18 @@ endef
###########################################################
## Convert a list of short modules names (e.g., "framework", "Browser")
+## into the list of files that are built *for the target* for those modules.
+## NOTE: this won't return reliable results until after all
+## sub-makefiles have been included.
+## $(1): target list
+###########################################################
+
+define module-target-built-files
+$(foreach module,$(1),$(ALL_MODULES.$(module).TARGET_BUILT))
+endef
+
+###########################################################
+## Convert a list of short modules names (e.g., "framework", "Browser")
## into the list of files that should be used when linking
## against that module as a public API.
## TODO: Allow this for more than JAVA_LIBRARIES modules
@@ -651,7 +672,8 @@ endef
###########################################################
define module-stubs-files
-$(foreach module,$(1),$(ALL_MODULES.$(module).STUBS))
+$(foreach module,$(1),$(if $(filter $(module),$(JAVA_SDK_LIBRARIES)),\
+$(call java-lib-files,$(module).stubs),$(ALL_MODULES.$(module).STUBS)))
endef
###########################################################
@@ -725,77 +747,11 @@ $(call app-lib-files,$(1))
endef
endif
-###########################################################
-## Returns true if $(1) and $(2) are equal. Returns
-## the empty string if they are not equal.
-###########################################################
-define streq
-$(strip $(if $(strip $(1)),\
- $(if $(strip $(2)),\
- $(if $(filter-out __,_$(subst $(strip $(1)),,$(strip $(2)))$(subst $(strip $(2)),,$(strip $(1)))_),,true), \
- ),\
- $(if $(strip $(2)),\
- ,\
- true)\
- ))
-endef
-
-###########################################################
-## Convert "a b c" into "a:b:c"
-###########################################################
-define normalize-path-list
-$(subst $(space),:,$(strip $(1)))
-endef
-
-###########################################################
-## Convert "a b c" into "a,b,c"
-###########################################################
-define normalize-comma-list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-###########################################################
-## Read the word out of a colon-separated list of words.
-## This has the same behavior as the built-in function
-## $(word n,str).
-##
-## The individual words may not contain spaces.
-##
-## $(1): 1 based index
-## $(2): value of the form a:b:c...
-###########################################################
-
-define word-colon
-$(word $(1),$(subst :,$(space),$(2)))
-endef
-
-###########################################################
-## Convert "a=b c= d e = f" into "a=b c=d e=f"
-##
-## $(1): list to collapse
-## $(2): if set, separator word; usually "=", ":", or ":="
-## Defaults to "=" if not set.
-###########################################################
-
-define collapse-pairs
-$(eval _cpSEP := $(strip $(if $(2),$(2),=)))\
-$(subst $(space)$(_cpSEP)$(space),$(_cpSEP),$(strip \
- $(subst $(_cpSEP), $(_cpSEP) ,$(1))))
-endef
-
-###########################################################
-## Given a list of pairs, if multiple pairs have the same
-## first components, keep only the first pair.
-##
-## $(1): list of pairs
-## $(2): the separator word, such as ":", "=", etc.
-define uniq-pairs-by-first-component
-$(eval _upbfc_fc_set :=)\
-$(strip $(foreach w,$(1), $(eval _first := $(word 1,$(subst $(2),$(space),$(w))))\
- $(if $(filter $(_upbfc_fc_set),$(_first)),,$(w)\
- $(eval _upbfc_fc_set += $(_first)))))\
-$(eval _upbfc_fc_set :=)\
-$(eval _first:=)
+# Get the exported-sdk-libs files which collectively give you the list of exported java sdk
+# lib names that are (transitively) exported from the given set of java libs
+# $(1): library name list
+define exported-sdk-libs-files
+$(foreach lib,$(1),$(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/exported-sdk-libs)
endef
###########################################################
@@ -856,73 +812,22 @@ ESC_RESET := \033[0m
# $(1): path (and optionally line) information
# $(2): message to print
define echo-warning
-echo -e "$(ESC_BOLD)$(1): $(ESC_WARNING)warning:$(ESC_RESET)$(ESC_BOLD)" $(2) "$(ESC_RESET)" >&2
+echo -e "$(ESC_BOLD)$(1): $(ESC_WARNING)warning:$(ESC_RESET)$(ESC_BOLD)" '$(subst ','\'',$(2))' "$(ESC_RESET)" >&2
endef
# $(1): path (and optionally line) information
# $(2): message to print
define echo-error
-echo -e "$(ESC_BOLD)$(1): $(ESC_ERROR)error:$(ESC_RESET)$(ESC_BOLD)" $(2) "$(ESC_RESET)" >&2
-endef
-
-# $(1): message to print
-define pretty-warning
-$(shell $(call echo-warning,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
-endef
-
-# $(1): message to print
-define pretty-error
-$(shell $(call echo-error,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
-$(error done)
+echo -e "$(ESC_BOLD)$(1): $(ESC_ERROR)error:$(ESC_RESET)$(ESC_BOLD)" '$(subst ','\'',$(2))' "$(ESC_RESET)" >&2
endef
###########################################################
-## Package filtering
+## Legacy showcommands compatibility
###########################################################
-# Given a list of installed modules (short or long names)
-# return a list of the packages (yes, .apk packages, not
-# modules in general) that are overridden by this list and,
-# therefore, should not be installed.
-# $(1): mixed list of installed modules
-# TODO: This is fragile; find a reliable way to get this information.
-define _get-package-overrides
- $(eval ### Discard any words containing slashes, unless they end in .apk, \
- ### in which case trim off the directory component and the suffix. \
- ### If there are no slashes, keep the entire word.)
- $(eval _gpo_names := $(subst /,@@@ @@@,$(1)))
- $(eval _gpo_names := \
- $(filter %.apk,$(_gpo_names)) \
- $(filter-out %@@@ @@@%,$(_gpo_names)))
- $(eval _gpo_names := $(patsubst %.apk,%,$(_gpo_names)))
- $(eval _gpo_names := $(patsubst @@@%,%,$(_gpo_names)))
-
- $(eval ### Remove any remaining words that contain dots.)
- $(eval _gpo_names := $(subst .,@@@ @@@,$(_gpo_names)))
- $(eval _gpo_names := $(filter-out %@@@ @@@%,$(_gpo_names)))
-
- $(eval ### Now we have a list of any words that could possibly refer to \
- ### packages, although there may be words that do not. Only \
- ### real packages will be present under PACKAGES.*, though.)
- $(foreach _gpo_name,$(_gpo_names),$(PACKAGES.$(_gpo_name).OVERRIDES))
-endef
-
-define get-package-overrides
-$(sort $(strip $(call _get-package-overrides,$(1))))
-endef
-
-###########################################################
-## Output the command lines, or not
-###########################################################
-
-ifeq ($(strip $(SHOW_COMMANDS)),)
define pretty
@echo $1
endef
-else
-define pretty
-endef
-endif
###########################################################
## Commands for including the dependency files the compiler generates
@@ -1046,11 +951,11 @@ define transform-bc-to-so
$(hide) mkdir -p $(dir $@)
$(hide) $(BCC_COMPAT) -O3 -o $(dir $@)/$(notdir $(<:.bc=.o)) -fPIC -shared \
-rt-path $(RS_PREBUILT_CLCORE) -mtriple $(RS_COMPAT_TRIPLE) $<
-$(hide) $(PRIVATE_CXX) -shared -Wl,-soname,$(notdir $@) -nostdlib \
+$(hide) $(PRIVATE_CXX_LINK) -shared -Wl,-soname,$(notdir $@) -nostdlib \
-Wl,-rpath,\$$ORIGIN/../lib \
$(dir $@)/$(notdir $(<:.bc=.o)) \
$(RS_PREBUILT_COMPILER_RT) \
- -o $@ $(TARGET_GLOBAL_LDFLAGS) -Wl,--hash-style=sysv \
+ -o $@ $(CLANG_TARGET_GLOBAL_LDFLAGS) -Wl,--hash-style=sysv \
-L $(SOONG_OUT_DIR)/ndk/platforms/android-$(PRIVATE_SDK_VERSION)/arch-$(TARGET_ARCH)/usr/lib64 \
-L $(SOONG_OUT_DIR)/ndk/platforms/android-$(PRIVATE_SDK_VERSION)/arch-$(TARGET_ARCH)/usr/lib \
$(call intermediates-dir-for,SHARED_LIBRARIES,libRSSupport)/libRSSupport.so \
@@ -1095,7 +1000,7 @@ define transform-aidl-to-cpp
@mkdir -p $(dir $@)
@mkdir -p $(PRIVATE_HEADER_OUTPUT_DIR)
@echo "Generating C++ from AIDL: $(PRIVATE_MODULE) <= $<"
-$(hide) $(AIDL_CPP) -d$(basename $@).aidl.d -ninja $(PRIVATE_AIDL_FLAGS) \
+$(hide) $(AIDL_CPP) -d$(basename $@).aidl.d --ninja $(PRIVATE_AIDL_FLAGS) \
$< $(PRIVATE_HEADER_OUTPUT_DIR) $@
endef
@@ -1133,7 +1038,7 @@ define transform-vts-to-cpp
@mkdir -p $(dir $@)
@mkdir -p $(PRIVATE_HEADER_OUTPUT_DIR)
@echo "Generating C++ from VTS: $(PRIVATE_MODULE) <= $<"
-$(hide) $(VTSC) -d$(basename $@).vts.P $(PRIVATE_VTS_FLAGS) \
+$(hide) $(VTSC) -TODO_b/120496070 $(PRIVATE_VTS_FLAGS) \
$< $(PRIVATE_HEADER_OUTPUT_DIR) $@
endef
@@ -1164,7 +1069,6 @@ endef
###########################################################
## Commands for running protoc to compile .proto into .java
###########################################################
-# PATH contains HOST_OUT_EXECUTABLES to allow protoc-gen-* plugins
define transform-proto-to-java
@mkdir -p $(dir $@)
@@ -1172,7 +1076,6 @@ define transform-proto-to-java
@rm -rf $(PRIVATE_PROTO_JAVA_OUTPUT_DIR)
@mkdir -p $(PRIVATE_PROTO_JAVA_OUTPUT_DIR)
$(hide) for f in $(PRIVATE_PROTO_SRC_FILES); do \
- PATH=$$PATH:$(HOST_OUT_EXECUTABLES) \
$(PROTOC) \
$(addprefix --proto_path=, $(PRIVATE_PROTO_INCLUDES)) \
$(PRIVATE_PROTO_JAVA_OUTPUT_OPTION)="$(PRIVATE_PROTO_JAVA_OUTPUT_PARAMS):$(PRIVATE_PROTO_JAVA_OUTPUT_DIR)" \
@@ -1185,13 +1088,11 @@ endef
######################################################################
## Commands for running protoc to compile .proto into .pb.cc (or.pb.c) and .pb.h
######################################################################
-# PATH contains HOST_OUT_EXECUTABLES to allow protoc-gen-* plugins
define transform-proto-to-cc
@echo "Protoc: $@ <= $<"
@mkdir -p $(dir $@)
$(hide) \
- PATH=$$PATH:$(HOST_OUT_EXECUTABLES) \
$(PROTOC) \
$(addprefix --proto_path=, $(PRIVATE_PROTO_INCLUDES)) \
$(PRIVATE_PROTOC_FLAGS) \
@@ -1236,10 +1137,16 @@ define transform-cpp-to-o-compiler-args
$(PRIVATE_CPPFLAGS_NO_OVERRIDE)
endef
+# PATH_TO_CLANG_TIDY_SHELL is defined in build/soong
+define call-clang-tidy
+CLANG_TIDY=$(PATH_TO_CLANG_TIDY) \
+ $(PATH_TO_CLANG_TIDY_SHELL) \
+ $(PRIVATE_TIDY_FLAGS) \
+ -checks=$(PRIVATE_TIDY_CHECKS)
+endef
+
define clang-tidy-cpp
-$(hide) $(PATH_TO_CLANG_TIDY) $(PRIVATE_TIDY_FLAGS) \
- -checks=$(PRIVATE_TIDY_CHECKS) \
- $< -- $(transform-cpp-to-o-compiler-args)
+$(hide) $(call-clang-tidy) $< -- $(transform-cpp-to-o-compiler-args)
endef
ifneq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
@@ -1285,9 +1192,7 @@ $(call transform-c-or-s-to-o-compiler-args, \
endef
define clang-tidy-c
-$(hide) $(PATH_TO_CLANG_TIDY) $(PRIVATE_TIDY_FLAGS) \
- -checks=$(PRIVATE_TIDY_CHECKS) \
- $< -- $(transform-c-to-o-compiler-args)
+$(hide) $(call-clang-tidy) $< -- $(transform-c-to-o-compiler-args)
endef
ifneq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
@@ -1355,9 +1260,7 @@ define transform-host-cpp-to-o-compiler-args
endef
define clang-tidy-host-cpp
-$(hide) $(PATH_TO_CLANG_TIDY) $(PRIVATE_TIDY_FLAGS) \
- -checks=$(PRIVATE_TIDY_CHECKS) \
- $< -- $(transform-host-cpp-to-o-compiler-args)
+$(hide) $(call-clang-tidy) $< -- $(transform-host-cpp-to-o-compiler-args)
endef
ifneq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
@@ -1407,9 +1310,7 @@ define transform-host-c-to-o-compiler-args
endef
define clang-tidy-host-c
-$(hide) $(PATH_TO_CLANG_TIDY) $(PRIVATE_TIDY_FLAGS) \
- -checks=$(PRIVATE_TIDY_CHECKS) \
- $< -- $(transform-host-c-to-o-compiler-args)
+$(hide) $(call-clang-tidy) $< -- $(transform-host-c-to-o-compiler-args)
endef
ifneq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
@@ -1633,7 +1534,7 @@ $(hide) mv -f $@.tmp $@
endef
define transform-o-to-aux-executable-inner
-$(hide) $(PRIVATE_CXX) -pie \
+$(hide) $(PRIVATE_CXX_LINK) -pie \
-Bdynamic \
-Wl,--gc-sections \
$(PRIVATE_ALL_OBJECTS) \
@@ -1652,7 +1553,7 @@ $(transform-o-to-aux-executable-inner)
endef
define transform-o-to-aux-static-executable-inner
-$(hide) $(PRIVATE_CXX) \
+$(hide) $(PRIVATE_CXX_LINK) \
-Bstatic \
-Wl,--gc-sections \
$(PRIVATE_ALL_OBJECTS) \
@@ -1753,8 +1654,7 @@ endef
# it to be overriden en-masse see combo/linux-arm.make for an example.
ifneq ($(HOST_CUSTOM_LD_COMMAND),true)
define transform-host-o-to-shared-lib-inner
-$(hide) $(PRIVATE_CXX) \
- -Wl,-rpath-link=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_INTERMEDIATE_LIBRARIES) \
+$(hide) $(PRIVATE_CXX_LINK) \
-Wl,-rpath,\$$ORIGIN/../$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_SHARED_LIBRARIES)) \
-Wl,-rpath,\$$ORIGIN/$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_SHARED_LIBRARIES)) \
-shared -Wl,-soname,$(notdir $@) \
@@ -1795,10 +1695,10 @@ endef
###########################################################
define transform-o-to-shared-lib-inner
-$(hide) $(PRIVATE_CXX) \
+$(hide) $(PRIVATE_CXX_LINK) \
-nostdlib -Wl,-soname,$(notdir $@) \
-Wl,--gc-sections \
- $(if $(filter true,$(PRIVATE_CLANG)),-shared,-Wl$(comma)-shared) \
+ -shared \
$(PRIVATE_TARGET_CRTBEGIN_SO_O) \
$(PRIVATE_ALL_OBJECTS) \
-Wl,--whole-archive \
@@ -1808,6 +1708,7 @@ $(hide) $(PRIVATE_CXX) \
$(PRIVATE_ALL_STATIC_LIBRARIES) \
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
+ $(PRIVATE_TARGET_LIBCRT_BUILTINS) \
$(PRIVATE_TARGET_LIBATOMIC) \
$(PRIVATE_TARGET_LIBGCC) \
$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
@@ -1825,71 +1726,15 @@ $(transform-o-to-shared-lib-inner)
endef
###########################################################
-## Commands for filtering a target executable or library
-###########################################################
-
-ifneq ($(TARGET_BUILD_VARIANT),user)
- TARGET_STRIP_EXTRA = && $(PRIVATE_OBJCOPY) --add-gnu-debuglink=$< $@
- TARGET_STRIP_KEEP_SYMBOLS_EXTRA = --add-gnu-debuglink=$<
-endif
-
-define transform-to-stripped
-@echo "$($(PRIVATE_PREFIX)DISPLAY) Strip: $(PRIVATE_MODULE) ($@)"
-@mkdir -p $(dir $@)
-$(hide) $(PRIVATE_STRIP) --strip-all $< -o $@ \
- $(if $(PRIVATE_NO_DEBUGLINK),,$(TARGET_STRIP_EXTRA))
-endef
-
-define transform-to-stripped-keep-mini-debug-info
-@echo "$($(PRIVATE_PREFIX)DISPLAY) Strip (mini debug info): $(PRIVATE_MODULE) ($@)"
-@mkdir -p $(dir $@)
-$(hide) rm -f $@ $@.dynsyms $@.funcsyms $@.keep_symbols $@.debug $@.mini_debuginfo.xz
-if $(PRIVATE_STRIP) --strip-all -R .comment $< -o $@; then \
- $(PRIVATE_OBJCOPY) --only-keep-debug $< $@.debug && \
- $(PRIVATE_NM) -D $< --format=posix --defined-only | awk '{ print $$1 }' | sort >$@.dynsyms && \
- $(PRIVATE_NM) $< --format=posix --defined-only | awk '{ if ($$2 == "T" || $$2 == "t" || $$2 == "D") print $$1 }' | sort >$@.funcsyms && \
- comm -13 $@.dynsyms $@.funcsyms >$@.keep_symbols && \
- echo >>$@.keep_symbols && \
- $(PRIVATE_OBJCOPY) --rename-section .debug_frame=saved_debug_frame $@.debug $@.mini_debuginfo && \
- $(PRIVATE_OBJCOPY) -S --remove-section .gdb_index --remove-section .comment --keep-symbols=$@.keep_symbols $@.mini_debuginfo && \
- $(PRIVATE_OBJCOPY) --rename-section saved_debug_frame=.debug_frame $@.mini_debuginfo && \
- rm -f $@.mini_debuginfo.xz && \
- xz $@.mini_debuginfo && \
- $(PRIVATE_OBJCOPY) --add-section .gnu_debugdata=$@.mini_debuginfo.xz $@; \
-else \
- cp -f $< $@; \
-fi
-endef
-
-define transform-to-stripped-keep-symbols
-@echo "$($(PRIVATE_PREFIX)DISPLAY) Strip (keep symbols): $(PRIVATE_MODULE) ($@)"
-@mkdir -p $(dir $@)
-$(hide) $(PRIVATE_OBJCOPY) \
- `$(PRIVATE_READELF) -S $< | awk '/.debug_/ {print "-R " $$2}' | xargs` \
- $(TARGET_STRIP_KEEP_SYMBOLS_EXTRA) $< $@
-endef
-
-###########################################################
-## Commands for packing a target executable or library
-###########################################################
-
-define pack-elf-relocations
-@echo "$($(PRIVATE_PREFIX)DISPLAY) Pack Relocations: $(PRIVATE_MODULE) ($@)"
-$(copy-file-to-target)
-$(hide) $(RELOCATION_PACKER) $@
-endef
-
-###########################################################
## Commands for running gcc to link an executable
###########################################################
define transform-o-to-executable-inner
-$(hide) $(PRIVATE_CXX) -pie \
+$(hide) $(PRIVATE_CXX_LINK) -pie \
-nostdlib -Bdynamic \
-Wl,-dynamic-linker,$(PRIVATE_LINKER) \
-Wl,--gc-sections \
-Wl,-z,nocopyreloc \
- -Wl,-rpath-link=$(PRIVATE_TARGET_OUT_INTERMEDIATE_LIBRARIES) \
$(PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O) \
$(PRIVATE_ALL_OBJECTS) \
-Wl,--whole-archive \
@@ -1899,6 +1744,7 @@ $(hide) $(PRIVATE_CXX) -pie \
$(PRIVATE_ALL_STATIC_LIBRARIES) \
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
+ $(PRIVATE_TARGET_LIBCRT_BUILTINS) \
$(PRIVATE_TARGET_LIBATOMIC) \
$(PRIVATE_TARGET_LIBGCC) \
$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
@@ -1927,7 +1773,7 @@ endef
###########################################################
define transform-o-to-static-executable-inner
-$(hide) $(PRIVATE_CXX) \
+$(hide) $(PRIVATE_CXX_LINK) \
-nostdlib -Bstatic \
$(if $(filter $(PRIVATE_LDFLAGS),-shared),,-static) \
-Wl,--gc-sections \
@@ -1939,13 +1785,14 @@ $(hide) $(PRIVATE_CXX) \
-Wl,--whole-archive \
$(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES) \
-Wl,--no-whole-archive \
- $(filter-out %libcompiler_rt.a,$(filter-out %libc_nomalloc.a,$(filter-out %libc.a,$(PRIVATE_ALL_STATIC_LIBRARIES)))) \
+ $(filter-out %libcompiler_rt.hwasan.a %libc_nomalloc.hwasan.a %libc.hwasan.a %libcompiler_rt.a %libc_nomalloc.a %libc.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
-Wl,--start-group \
- $(filter %libc.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
- $(filter %libc_nomalloc.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
+ $(filter %libc.a %libc.hwasan.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
+ $(filter %libc_nomalloc.a %libc_nomalloc.hwasan.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
$(PRIVATE_TARGET_LIBATOMIC) \
- $(filter %libcompiler_rt.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
+ $(filter %libcompiler_rt.a %libcompiler_rt.hwasan.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
+ $(PRIVATE_TARGET_LIBCRT_BUILTINS) \
$(PRIVATE_TARGET_LIBGCC) \
-Wl,--end-group \
$(PRIVATE_TARGET_CRTEND_O)
@@ -1961,19 +1808,10 @@ endef
###########################################################
## Commands for running gcc to link a host executable
###########################################################
-ifdef BUILD_HOST_static
-HOST_FPIE_FLAGS :=
-else
-HOST_FPIE_FLAGS := -pie
-# Force the correct entry point to workaround a bug in binutils that manifests with -pie
-ifeq ($(HOST_CROSS_OS),windows)
-HOST_CROSS_FPIE_FLAGS += -Wl,-e_mainCRTStartup
-endif
-endif
ifneq ($(HOST_CUSTOM_LD_COMMAND),true)
define transform-host-o-to-executable-inner
-$(hide) $(PRIVATE_CXX) \
+$(hide) $(PRIVATE_CXX_LINK) \
$(PRIVATE_ALL_OBJECTS) \
-Wl,--whole-archive \
$(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES) \
@@ -1984,7 +1822,6 @@ $(hide) $(PRIVATE_CXX) \
$(if $(filter true,$(NATIVE_COVERAGE)),-lgcov) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_HOST_LIBPROFILE_RT)) \
$(PRIVATE_ALL_SHARED_LIBRARIES) \
- -Wl,-rpath-link=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_INTERMEDIATE_LIBRARIES) \
$(foreach path,$(PRIVATE_RPATHS), \
-Wl,-rpath,\$$ORIGIN/$(path)) \
$(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
@@ -2002,6 +1839,16 @@ define transform-host-o-to-executable
$(transform-host-o-to-executable-inner)
endef
+###########################################################
+## Commands for packaging native coverage files
+###########################################################
+define package-coverage-files
+ @rm -f $@ $@.lst $@.premerged
+ @touch $@.lst
+ $(foreach obj,$(strip $(PRIVATE_ALL_OBJECTS)), $(hide) echo $(obj) >> $@.lst$(newline))
+ $(hide) $(SOONG_ZIP) -o $@.premerged -C $(OUT_DIR) -l $@.lst
+ $(hide) $(MERGE_ZIPS) -ignore-duplicates $@ $@.premerged $(strip $(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES))
+endef
###########################################################
## Commands for running javac to make .class files
@@ -2108,7 +1955,9 @@ endef
# $(2): the base dir of the output file path
# Returns: the compiled output file path
define aapt2-compiled-resource-out-file
-$(eval _p_w := $(strip $(subst /,$(space),$(dir $(1)))))$(2)/$(subst $(space),/,$(_p_w))_$(if $(filter values%,$(lastword $(_p_w))),$(patsubst %.xml,%.arsc,$(notdir $(1))),$(notdir $(1))).flat
+$(strip \
+ $(eval _p_w := $(strip $(subst /,$(space),$(dir $(call clean-path,$(1))))))
+ $(2)/$(subst $(space),/,$(_p_w))_$(if $(filter values%,$(lastword $(_p_w))),$(patsubst %.xml,%.arsc,$(notdir $(1))),$(notdir $(1))).flat)
endef
define aapt2-link
@@ -2141,6 +1990,19 @@ $(SOONG_ZIP) -o $(PRIVATE_SRCJAR) -C $(PRIVATE_JAVA_GEN_DIR) -D $(PRIVATE_JAVA_G
$(EXTRACT_JAR_PACKAGES) -i $(PRIVATE_SRCJAR) -o $(PRIVATE_AAPT_EXTRA_PACKAGES) --prefix '--extra-packages '
endef
+define _create-default-manifest-file
+$(1):
+ rm -f $1
+ (echo '<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="missing.manifest">' && \
+ echo ' <uses-sdk android:minSdkVersion="$(2)" />' && \
+ echo '</manifest>' ) > $1
+endef
+
+define create-default-manifest-file
+ $(eval $(call _create-default-manifest-file,$(1),$(2)))
+endef
+
+
###########################################################
xlint_unchecked := -Xlint:unchecked
@@ -2295,12 +2157,9 @@ $(hide) if [ -s $(PRIVATE_JAVA_SOURCE_LIST) -o -n "$(PRIVATE_SRCJARS)" ] ; then
$(JAVA) -jar $(TURBINE) \
--output $@.premerged --temp_dir $(dir $@)/classes-turbine \
--sources \@$(PRIVATE_JAVA_SOURCE_LIST) --source_jars $(PRIVATE_SRCJARS) \
- --javacopts $(PRIVATE_JAVACFLAGS) $(COMMON_JDK_FLAGS) \
- $(addprefix --bootclasspath ,$(strip \
- $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH)) \
- $(PRIVATE_EMPTY_BOOTCLASSPATH))) \
- $(addprefix --classpath ,$(strip \
- $(call normalize-path-list,$(PRIVATE_ALL_JAVA_HEADER_LIBRARIES)))) \
+ --javacopts $(PRIVATE_JAVACFLAGS) $(COMMON_JDK_FLAGS) -- \
+ $(addprefix --bootclasspath ,$(strip $(PRIVATE_BOOTCLASSPATH))) \
+ $(addprefix --classpath ,$(strip $(PRIVATE_ALL_JAVA_HEADER_LIBRARIES))) \
|| ( rm -rf $(dir $@)/classes-turbine ; exit 41 ) && \
$(MERGE_ZIPS) -j --ignore-duplicates -stripDir META-INF $@.tmp $@.premerged $(call reverse-list,$(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES)) ; \
else \
@@ -2358,87 +2217,46 @@ define codename-or-sdk-to-sdk
$(if $(filter $(1),$(PLATFORM_VERSION_CODENAME)),10000,$(1))
endef
-# --add-opens is required because desugar reflects via java.lang.invoke.MethodHandles.Lookup
-define desugar-classes-jar
-@echo Desugar: $@
-@mkdir -p $(dir $@)
-$(hide) rm -f $@ $@.tmp
-@rm -rf $(dir $@)/desugar_dumped_classes
-@mkdir $(dir $@)/desugar_dumped_classes
-$(hide) $(JAVA) \
- $(if $(USE_OPENJDK9),--add-opens java.base/java.lang.invoke=ALL-UNNAMED,) \
- -Djdk.internal.lambda.dumpProxyClasses=$(abspath $(dir $@))/desugar_dumped_classes \
- -jar $(DESUGAR) \
- $(addprefix --bootclasspath_entry ,$(PRIVATE_BOOTCLASSPATH)) \
- $(addprefix --classpath_entry ,$(PRIVATE_SHARED_JAVA_HEADER_LIBRARIES)) \
- --min_sdk_version $(call codename-or-sdk-to-sdk,$(PRIVATE_MIN_SDK_VERSION)) \
- --allow_empty_bootclasspath \
- $(if $(filter --core-library,$(PRIVATE_DX_FLAGS)),--core_library) \
- -i $< -o $@.tmp
- mv $@.tmp $@
+# Uses LOCAL_SDK_VERSION and PLATFORM_SDK_VERSION to determine a compileSdkVersion
+# in the form of a number or a codename (28 or P)
+define module-sdk-version
+$(strip \
+ $(if $(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION)), \
+ $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION)), \
+ $(PLATFORM_SDK_VERSION)))
+endef
+
+# Uses LOCAL_SDK_VERSION and DEFAULT_APP_TARGET_SDK to determine
+# a targetSdkVersion in the form of a number or a codename (28 or P).
+define module-target-sdk-version
+$(strip \
+ $(if $(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION)), \
+ $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION)), \
+ $(DEFAULT_APP_TARGET_SDK)))
+endef
+
+# Uses LOCAL_MIN_SDK_VERSION, LOCAL_SDK_VERSION and DEFAULT_APP_TARGET_SDK to determine
+# a minSdkVersion in the form of a number or a codename (28 or P).
+define module-min-sdk-version
+$(if $(LOCAL_MIN_SDK_VERSION),$(LOCAL_MIN_SDK_VERSION),$(call module-target-sdk-version))
endef
define transform-classes.jar-to-dex
@echo "target Dex: $(PRIVATE_MODULE)"
-@mkdir -p $(dir $@)
-$(hide) rm -f $(dir $@)classes*.dex
-$(hide) $(DX_COMMAND) \
- --dex --output=$(dir $@) \
- --min-sdk-version=$(PRIVATE_MIN_SDK_VERSION) \
- $(if $(NO_OPTIMIZE_DX), \
- --no-optimize) \
- $(if $(GENERATE_DEX_DEBUG), \
- --debug --verbose \
- --dump-to=$(@:.dex=.lst) \
- --dump-width=1000) \
- $(PRIVATE_DX_FLAGS) \
- $<
-endef
-
-
-define transform-classes-d8.jar-to-dex
-@echo "target Dex: $(PRIVATE_MODULE)"
-@mkdir -p $(dir $@)
+@mkdir -p $(dir $@)tmp
$(hide) rm -f $(dir $@)classes*.dex $(dir $@)d8_input.jar
$(hide) $(ZIP2ZIP) -j -i $< -o $(dir $@)d8_input.jar "**/*.class"
-$(hide) $(DX_COMMAND) \
- --output $(dir $@) \
+$(hide) $(D8_WRAPPER) $(DX_COMMAND) $(DEX_FLAGS) \
+ --output $(dir $@)tmp \
+ $(addprefix --lib ,$(PRIVATE_D8_LIBS)) \
--min-api $(PRIVATE_MIN_SDK_VERSION) \
$(subst --main-dex-list=, --main-dex-list , \
$(filter-out --core-library --multi-dex --minimal-main-dex,$(PRIVATE_DX_FLAGS))) \
$(dir $@)d8_input.jar
+$(hide) mv $(dir $@)tmp/* $(dir $@)
$(hide) rm -f $(dir $@)d8_input.jar
-endef
-
-# Create a mostly-empty .jar file that we'll add to later.
-# The MacOS jar tool doesn't like creating empty jar files,
-# so we need to give it something.
-# $(1) package to create
-define create-empty-package-at
-@mkdir -p $(dir $(1))
-$(hide) touch $(dir $(1))zipdummy
-$(hide) $(JAR) cf $(1) -C $(dir $(1)) zipdummy
-$(hide) zip -qd $(1) zipdummy
-$(hide) rm $(dir $(1))zipdummy
-endef
-
-# Create a mostly-empty .jar file that we'll add to later.
-# The MacOS jar tool doesn't like creating empty jar files,
-# so we need to give it something.
-define create-empty-package
-$(call create-empty-package-at,$@)
-endef
-
-# Copy an arhchive file and delete any class files and empty folders inside.
-# $(1): the source archive file.
-# $(2): the destination archive file.
-define initialize-package-file
-@mkdir -p $(dir $(2))
-$(hide) cp -f $(1) $(2)
-$(hide) zip -qd $(2) "*.class" \
- $(if $(strip $(PRIVATE_DONT_DELETE_JAR_DIRS)),,"*/") \
- || true # Ignore the error when nothing to delete.
+$(hide) rm -rf $(dir $@)tmp
endef
#TODO: we kinda want to build different asset packages for
@@ -2450,8 +2268,9 @@ endef
#Note that the version numbers are given to aapt as simple default
#values; applications can override these by explicitly stating
#them in their manifest.
-define add-assets-to-package
-$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
+# $(1) the package file
+define create-assets-package
+$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package $(PRIVATE_AAPT_FLAGS) \
$(addprefix -c , $(PRIVATE_PRODUCT_AAPT_CONFIG)) \
$(addprefix --preferred-density , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
$(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
@@ -2466,55 +2285,54 @@ $(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
$(addprefix --rename-manifest-package , $(PRIVATE_MANIFEST_PACKAGE_NAME)) \
$(addprefix --rename-instrumentation-target-package , $(PRIVATE_MANIFEST_INSTRUMENTATION_FOR)) \
--skip-symbols-without-default-localization \
- -F $@
+ -F $(1)
# So that we re-run aapt when the list of input files change
$(hide) echo $(PRIVATE_RESOURCE_LIST) >/dev/null
endef
# We need the extra blank line, so that the command will be on a separate line.
-# $(1): the ABI name
-# $(2): the list of shared libraies
+# $(1): the package
+# $(2): the ABI name
+# $(3): the list of shared libraies
define _add-jni-shared-libs-to-package-per-abi
-$(hide) cp $(2) $(dir $@)lib/$(1)
+$(hide) cp $(3) $(dir $(1))lib/$(2)
endef
-# For apps_only build, don't uncompress/page-align the jni libraries,
-# because the apk may be run on older platforms that don't support loading jni directly from apk.
-ifdef TARGET_BUILD_APPS
-JNI_COMPRESS_FLAGS :=
-ZIPALIGN_PAGE_ALIGN_FLAGS :=
-else
-JNI_COMPRESS_FLAGS := -0
-ZIPALIGN_PAGE_ALIGN_FLAGS := -p
-endif
-
-define add-jni-shared-libs-to-package
-$(hide) rm -rf $(dir $@)lib
-$(hide) mkdir -p $(addprefix $(dir $@)lib/,$(PRIVATE_JNI_SHARED_LIBRARIES_ABI))
+# $(1): the package file
+# $(2): if true, uncompress jni libs
+define create-jni-shared-libs-package
+rm -rf $(dir $(1))lib
+mkdir -p $(addprefix $(dir $(1))lib/,$(PRIVATE_JNI_SHARED_LIBRARIES_ABI))
$(foreach abi,$(PRIVATE_JNI_SHARED_LIBRARIES_ABI),\
- $(call _add-jni-shared-libs-to-package-per-abi,$(abi),\
+ $(call _add-jni-shared-libs-to-package-per-abi,$(1),$(abi),\
$(patsubst $(abi):%,%,$(filter $(abi):%,$(PRIVATE_JNI_SHARED_LIBRARIES)))))
-$(hide) (cd $(dir $@) && zip -qrX $(JNI_COMPRESS_FLAGS) $(notdir $@) lib)
-$(hide) rm -rf $(dir $@)lib
+$(SOONG_ZIP) $(if $(2),-L 0) -o $(1) -C $(dir $(1)) -D $(dir $(1))lib
+rm -rf $(dir $(1))lib
endef
-#TODO: update the manifest to point to the dex file
-define add-dex-to-package
-$(call add-dex-to-package-arg,$@)
+# $(1): the jar file.
+# $(2): the classes.dex file.
+define create-dex-jar
+find $(dir $(2)) -maxdepth 1 -name "classes*.dex" | sort > $(1).lst
+$(SOONG_ZIP) -o $(1) -C $(dir $(2)) -l $(1).lst
endef
-# $(1): the package file.
-define add-dex-to-package-arg
-$(hide) find $(dir $(PRIVATE_DEX_FILE)) -maxdepth 1 -name "classes*.dex" | sort | xargs zip -qjX $(1)
+# Add java resources added by the current module to an existing package.
+# $(1) destination package.
+define add-java-resources-to
+ $(call _java-resources,$(1),u)
endef
-# Add java resources added by the current module.
-# $(1) destination package
-#
-define add-java-resources-to
+# Add java resources added by the current module to a new jar.
+# $(1) destination jar.
+define create-java-resources-jar
+ $(call _java-resources,$(1),c)
+endef
+
+define _java-resources
$(call dump-words-to-file, $(PRIVATE_EXTRA_JAR_ARGS), $(1).jar-arg-list)
-$(hide) $(JAR) uf $(1) @$(1).jar-arg-list
+$(hide) $(JAR) $(2)f $(1) @$(1).jar-arg-list
@rm -f $(1).jar-arg-list
endef
@@ -2529,6 +2347,12 @@ define add-jar-resources-to-package
$(JAR) uf $(1) $(call jar-args-sorted-files-in-directory,$(3))
endef
+# $(1): the output resources jar.
+# $(2): the input jar
+define extract-resources-jar
+ $(ZIP2ZIP) -i $(2) -o $(1) -x '**/*.class' -x '**/*/'
+endef
+
# Sign a package using the specified key/cert.
#
define sign-package
@@ -2551,7 +2375,7 @@ $(hide) if ! $(ZIPALIGN) -c $(ZIPALIGN_PAGE_ALIGN_FLAGS) 4 $@ >/dev/null ; then
mv $@ $@.unaligned; \
$(ZIPALIGN) \
-f \
- $(ZIPALIGN_PAGE_ALIGN_FLAGS) \
+ -p \
4 \
$@.unaligned $@.aligned; \
mv $@.aligned $@; \
@@ -2567,6 +2391,42 @@ $(hide) \
mv $@.compressed $@;
endef
+ifeq ($(HOST_OS),linux)
+# Runs appcompat and store logs in $(PRODUCT_OUT)/appcompat
+define extract-package
+$(if $(filter aapt2, $(1)), \
+ $(AAPT2) dump resources $@ | awk -F ' |=' '/^Package/{print $$3}' >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log &&, \
+ $(AAPT) dump badging $@ | awk -F \' '/^package/{print $$2}' >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log &&)
+endef
+define appcompat-header
+$(hide) \
+ mkdir -p $(PRODUCT_OUT)/appcompat && \
+ rm -f $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+ echo -n "Package name: " >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+ $(call extract-package, $(1)) \
+ echo "Module name in Android tree: $(PRIVATE_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+ echo "Local path in Android tree: $(PRIVATE_PATH)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+ echo "Install path on $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT): $(PRIVATE_INSTALLED_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+ echo >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
+endef
+define run-appcompat
+$(hide) \
+ echo "appcompat.sh output:" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+ PACKAGING=$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING ANDROID_LOG_TAGS="*:e" art/tools/veridex/appcompat.sh --dex-file=$@ --api-flags=$(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
+endef
+appcompat-files = \
+ art/tools/veridex/appcompat.sh \
+ $(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) \
+ $(HOST_OUT_EXECUTABLES)/veridex \
+ $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/core_dex_intermediates/classes.dex \
+ $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/oahl_dex_intermediates/classes.dex
+else
+appcompat-header =
+run-appcompat =
+appcompat-files =
+endif # HOST_OS == linux
+.KATI_READONLY: appcompat-header run-appcompat appcompat-files
+
# Remove dynamic timestamps from packages
#
define remove-timestamps-from-package
@@ -2576,25 +2436,21 @@ endef
# Uncompress dex files embedded in an apk.
#
define uncompress-dexs
-$(hide) if (zipinfo $@ '*.dex' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
- tmpdir=$@.tmpdir; \
- rm -rf $$tmpdir && mkdir $$tmpdir; \
- unzip -q $@ '*.dex' -d $$tmpdir && \
- zip -qd $@ '*.dex' && \
- ( cd $$tmpdir && find . -type f | sort | zip -qD -X -0 ../$(notdir $@) -@ ) && \
- rm -rf $$tmpdir; \
+ if (zipinfo $@ '*.dex' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
+ $(ZIP2ZIP) -i $@ -o $@.tmp -0 "classes*.dex" && \
+ mv -f $@.tmp $@ ; \
fi
endef
-# Uncompress shared libraries embedded in an apk.
+# Uncompress shared JNI libraries embedded in an apk.
#
-define uncompress-shared-libs
-$(hide) if (zipinfo $@ $(PRIVATE_EMBEDDED_JNI_LIBS) 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
- rm -rf $(dir $@)uncompressedlibs && mkdir $(dir $@)uncompressedlibs; \
- unzip -q $@ $(PRIVATE_EMBEDDED_JNI_LIBS) -d $(dir $@)uncompressedlibs && \
- zip -qd $@ 'lib/*.so' && \
- ( cd $(dir $@)uncompressedlibs && find lib -type f | sort | zip -qD -X -0 ../$(notdir $@) -@ ) && \
- rm -rf $(dir $@)uncompressedlibs; \
+define uncompress-prebuilt-embedded-jni-libs
+ if (zipinfo $@ 'lib/*.so' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
+ $(ZIP2ZIP) -i $@ -o $@.tmp -0 'lib/**/*.so' \
+ $(if $(PRIVATE_EMBEDDED_JNI_LIBS), \
+ -x 'lib/**/*.so' \
+ $(addprefix -X ,$(PRIVATE_EMBEDDED_JNI_LIBS))) && \
+ mv -f $@.tmp $@ ; \
fi
endef
@@ -2639,22 +2495,72 @@ $(2): $(1)
endef
define copy-and-uncompress-dexs
-$(2): $(1) $(ZIPALIGN)
+$(2): $(1) $(ZIPALIGN) $(ZIP2ZIP)
@echo "Uncompress dexs in: $$@"
$$(copy-file-to-target)
$$(uncompress-dexs)
$$(align-package)
endef
+# Create copy pair for compatibility suite
+# Filter out $(LOCAL_INSTALLED_MODULE) to prevent overriding target
+# $(1): source path
+# $(2): destination path
+# The format of copy pair is src:dst
+define compat-copy-pair
+$(if $(filter-out $(2), $(LOCAL_INSTALLED_MODULE)), $(1):$(2))
+endef
+
+# Create copy pair for $(1) $(2)
+# If $(2) is substring of $(3) do nothing.
+# $(1): source path
+# $(2): destination path
+# $(3): filter-out target
+# The format of copy pair is src:dst
+define filter-copy-pair
+$(if $(findstring $(2), $(3)),,$(1):$(2))
+endef
+
# Copies many files.
# $(1): The files to copy. Each entry is a ':' separated src:dst pair
+# $(2): An optional directory to prepend to the destination
# Evaluates to the list of the dst files (ie suitable for a dependency list)
define copy-many-files
$(foreach f, $(1), $(strip \
$(eval _cmf_tuple := $(subst :, ,$(f))) \
$(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
$(eval _cmf_dest := $(word 2,$(_cmf_tuple))) \
- $(eval $(call copy-one-file,$(_cmf_src),$(_cmf_dest))) \
+ $(if $(strip $(2)), \
+ $(eval _cmf_dest := $(patsubst %/,%,$(strip $(2)))/$(patsubst /%,%,$(_cmf_dest)))) \
+ $(if $(filter-out $(_cmf_src), $(_cmf_dest)), \
+ $(eval $(call copy-one-file,$(_cmf_src),$(_cmf_dest)))) \
+ $(_cmf_dest)))
+endef
+
+# Copy the file only if it's a well-formed init script file. For use via $(eval).
+# $(1): source file
+# $(2): destination file
+define copy-init-script-file-checked
+# Host init verifier doesn't exist on darwin.
+ifneq ($(HOST_OS),darwin)
+$(2): $(1) $(HOST_INIT_VERIFIER) $(call intermediates-dir-for,ETC,passwd)/passwd
+ $(hide) $(HOST_INIT_VERIFIER) $$< $(call intermediates-dir-for,ETC,passwd)/passwd
+else
+$(2): $(1)
+endif
+ @echo "Copy init script: $$@"
+ $$(copy-file-to-target)
+endef
+
+# Copies many init script files and check they are well-formed.
+# $(1): The init script files to copy. Each entry is a ':' separated src:dst pair.
+# Evaluates to the list of the dst files. (ie suitable for a dependency list.)
+define copy-many-init-script-files-checked
+$(foreach f, $(1), $(strip \
+ $(eval _cmf_tuple := $(subst :, ,$(f))) \
+ $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
+ $(eval _cmf_dest := $(word 2,$(_cmf_tuple))) \
+ $(eval $(call copy-init-script-file-checked,$(_cmf_src),$(_cmf_dest))) \
$(_cmf_dest)))
endef
@@ -2668,6 +2574,40 @@ $(2): $(1) $(XMLLINT)
$$(copy-file-to-target)
endef
+# Copies many xml files and check they are well-formed.
+# $(1): The xml files to copy. Each entry is a ':' separated src:dst pair.
+# Evaluates to the list of the dst files. (ie suitable for a dependency list.)
+define copy-many-xml-files-checked
+$(foreach f, $(1), $(strip \
+ $(eval _cmf_tuple := $(subst :, ,$(f))) \
+ $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
+ $(eval _cmf_dest := $(word 2,$(_cmf_tuple))) \
+ $(eval $(call copy-xml-file-checked,$(_cmf_src),$(_cmf_dest))) \
+ $(_cmf_dest)))
+endef
+
+# Copy the file only if it is a well-formed manifest file. For use viea $(eval)
+# $(1): source file
+# $(2): destination file
+define copy-vintf-manifest-checked
+$(2): $(1) $(HOST_OUT_EXECUTABLES)/assemble_vintf
+ @echo "Copy xml: $$@"
+ $(hide) $(HOST_OUT_EXECUTABLES)/assemble_vintf -i $$< >/dev/null # Don't print the xml file to stdout.
+ $$(copy-file-to-target)
+endef
+
+# Copies many vintf manifest files checked.
+# $(1): The files to copy. Each entry is a ':' separated src:dst pair
+# Evaluates to the list of the dst files (ie suitable for a dependency list)
+define copy-many-vintf-manifest-files-checked
+$(foreach f, $(1), $(strip \
+ $(eval _cmf_tuple := $(subst :, ,$(f))) \
+ $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
+ $(eval _cmf_dest := $(word 2,$(_cmf_tuple))) \
+ $(eval $(call copy-vintf-manifest-checked,$(_cmf_src),$(_cmf_dest))) \
+ $(_cmf_dest)))
+endef
+
# The -t option to acp and the -p option to cp is
# required for OSX. OSX has a ridiculous restriction
# where it's an error for a .a file's modification time
@@ -2764,12 +2704,12 @@ endef
# Copy an apk to a target location while removing classes*.dex
# $(1): source file
# $(2): destination file
-# $(3): LOCAL_DEX_PREOPT, if nostripping then leave classes*.dex
+# $(3): LOCAL_STRIP_DEX, if non-empty then strip classes*.dex
define dexpreopt-copy-jar
$(2): $(1)
@echo "Copy: $$@"
$$(copy-file-to-target)
- $(if $(filter nostripping,$(3)),,$$(call dexpreopt-remove-classes.dex,$$@))
+ $(if $(3),$$(call dexpreopt-remove-classes.dex,$$@))
endef
# $(1): the .jar or .apk to remove classes.dex. Note that if all dex files
@@ -2785,65 +2725,31 @@ done \
fi
endef
-define hiddenapi-copy-dex-files
-$(2): $(1) $(HIDDENAPI) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
- $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
- @rm -rf $(dir $(2))
- @mkdir -p $(dir $(2))
- find $(dir $(1)) -maxdepth 1 -name "classes*.dex" | sort | \
- xargs -I{} cp -f {} $(dir $(2))
- find $(dir $(2)) -name "classes*.dex" | sort | sed 's/^/--dex=/' | \
- xargs $(HIDDENAPI) --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
- --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
- --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
-endef
-
-define hiddenapi-copy-soong-jar
-$(2): PRIVATE_FOLDER := $(dir $(2))dex-hiddenapi
-$(2): $(1) $(HIDDENAPI) $(SOONG_ZIP) $(MERGE_ZIPS) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
- $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
- @echo "Hidden API: $$@"
- $$(copy-file-to-target)
- @rm -rf $${PRIVATE_FOLDER}
- @mkdir -p $${PRIVATE_FOLDER}
- unzip -q $(2) 'classes*.dex' -d $${PRIVATE_FOLDER}
- find $${PRIVATE_FOLDER} -name "classes*.dex" | sort | sed 's/^/--dex=/' | \
- xargs $(HIDDENAPI) --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
- --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
- --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
- $(SOONG_ZIP) -o $${PRIVATE_FOLDER}/classes.dex.jar -C $${PRIVATE_FOLDER} -D $${PRIVATE_FOLDER}
- $(MERGE_ZIPS) -D -zipToNotStrip $${PRIVATE_FOLDER}/classes.dex.jar -stripFile "classes*.dex" \
- $(2) $${PRIVATE_FOLDER}/classes.dex.jar $(1)
-endef
###########################################################
-## Commands to call Proguard
+## Commands to call R8
###########################################################
-ifdef TARGET_OPENJDK9
-define transform-jar-to-proguard
-@echo Skipping Proguard: $< $@
-$(hide) cp '$<' $@
-endef
+
+# Use --debug flag for eng builds by default
+ifeq (eng,$(TARGET_BUILD_VARIANT))
+R8_DEBUG_MODE := --debug
else
-define transform-jar-to-proguard
-@echo Proguard: $@
-$(hide) $(PROGUARD) -injars $< -outjars $@ $(PRIVATE_PROGUARD_FLAGS) \
- $(addprefix -injars , $(PRIVATE_EXTRA_INPUT_JAR))
-endef
+R8_DEBUG_MODE :=
endif
-
-###########################################################
-## Commands to call R8
-###########################################################
define transform-jar-to-dex-r8
@echo R8: $@
-$(hide) $(R8_COMPAT_PROGUARD) -injars '$<' \
+$(hide) rm -f $(PRIVATE_PROGUARD_DICTIONARY)
+$(hide) $(R8_WRAPPER) $(R8_COMPAT_PROGUARD) $(DEX_FLAGS) \
+ -injars '$<' \
--min-api $(PRIVATE_MIN_SDK_VERSION) \
+ --no-data-resources \
--force-proguard-compatibility --output $(subst classes.dex,,$@) \
+ $(R8_DEBUG_MODE) \
$(PRIVATE_PROGUARD_FLAGS) \
$(addprefix -injars , $(PRIVATE_EXTRA_INPUT_JAR)) \
$(PRIVATE_DX_FLAGS)
+$(hide) touch $(PRIVATE_PROGUARD_DICTIONARY)
endef
###########################################################
@@ -2931,17 +2837,18 @@ endef
# $(5): New LOCAL_CERTIFICATE value.
# $(6): New LOCAL_INSTRUMENTATION_FOR value.
# $(7): New LOCAL_MANIFEST_INSTRUMENTATION_FOR value.
+# $(8): New LOCAL_COMPATIBILITY_SUITE value.
#
# Note that LOCAL_PACKAGE_OVERRIDES is NOT cleared in
# clear_vars.mk.
###########################################################
define inherit-package
- $(eval $(call inherit-package-internal,$(1),$(2),$(3),$(4),$(5),$(6),$(7)))
+ $(eval $(call inherit-package-internal,$(1),$(2),$(3),$(4),$(5),$(6),$(7),$(8)))
endef
define inherit-package-internal
LOCAL_PACKAGE_OVERRIDES \
- := $(strip $(1))||$(strip $(2))||$(strip $(3))||$(strip $(4))||&&$(strip $(5))||&&$(strip $(6))||&&$(strip $(7)) $(LOCAL_PACKAGE_OVERRIDES)
+ := $(strip $(1))||$(strip $(2))||$(strip $(3))||$(strip $(4))||&&$(strip $(5))||&&$(strip $(6))||&&$(strip $(7))||&&$(strip $(8)) $(LOCAL_PACKAGE_OVERRIDES)
include $(1)
LOCAL_PACKAGE_OVERRIDES \
:= $(wordlist 1,$(words $(LOCAL_PACKAGE_OVERRIDES)), $(LOCAL_PACKAGE_OVERRIDES))
@@ -2966,6 +2873,7 @@ define set-inherited-package-variables-internal
$(call keep-or-override,LOCAL_CERTIFICATE,$(patsubst &&%,%,$(word 5,$(_o)))) \
$(call keep-or-override,LOCAL_INSTRUMENTATION_FOR,$(patsubst &&%,%,$(word 6,$(_o)))) \
$(call keep-or-override,LOCAL_MANIFEST_INSTRUMENTATION_FOR,$(patsubst &&%,%,$(word 7,$(_o)))) \
+ $(call keep-or-override,LOCAL_COMPATIBILITY_SUITE,$(patsubst &&%,%,$(word 8,$(_o)))) \
$(eval LOCAL_OVERRIDES_PACKAGES := $(sort $(LOCAL_OVERRIDES_PACKAGES) $(word 2,$(_o)))) \
true \
,)
@@ -2990,7 +2898,7 @@ endef
define check-api
$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp: $(2) $(3) $(4) $(APICHECK) $(9)
@echo "Checking API:" $(1)
- $(hide) ( $(APICHECK_COMMAND) $(6) $(2) $(3) $(4) $(5) || ( $(7) ; exit 38 ) )
+ $(hide) ( $(APICHECK_COMMAND) --check-api-files $(6) $(2) $(3) $(4) $(5) || ( $(7) ; exit 38 ) )
$(hide) mkdir -p $$(dir $$@)
$(hide) touch $$@
$(8): $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp
@@ -3089,15 +2997,23 @@ endef
# For each suite:
# 1. Copy the files to the many suite output directories.
+# And for test config files, we'll check the .xml is well-formed before copy.
# 2. Add all the files to each suite's dependent files list.
-# 3. Do the dependency addition to my_all_targets
-# Requires for each suite: my_compat_dist_$(suite) to be defined.
+# 3. Do the dependency addition to my_all_targets.
+# 4. Save the module name to COMPATIBILITY.$(suite).MODULES for each suite.
+# Requires for each suite: use my_compat_dist_config_$(suite) to define the test config.
+# and use my_compat_dist_$(suite) to define the others.
define create-suite-dependencies
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval COMPATIBILITY.$(suite).FILES := \
- $$(COMPATIBILITY.$(suite).FILES) $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))))) \
+ $$(COMPATIBILITY.$(suite).FILES) $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
+ $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f)))) \
+ $(eval COMPATIBILITY.$(suite).MODULES := \
+ $$(COMPATIBILITY.$(suite).MODULES) $$(my_register_name))) \
$(eval $(my_all_targets) : $(call copy-many-files, \
- $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_$(suite))))))
+ $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_$(suite))))) \
+ $(call copy-many-xml-files-checked, \
+ $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_config_$(suite))))))
endef
###########################################################
@@ -3242,7 +3158,7 @@ endif
##
## $(1): path to validate
define try-validate-path-is-subdir
-$(strip
+$(strip \
$(if $(filter /%,$(1)),
$(1) starts with a slash
)
@@ -3408,10 +3324,12 @@ include $(BUILD_SYSTEM)/distdir.mk
# $(4): Whether LOCAL_EXPORT_PACKAGE_RESOURCES is set or
# not for the source module.
# $(5): Resource overlay list.
+# $(6): Target partition
###########################################################
define append_enforce_rro_sources
$(eval ENFORCE_RRO_SOURCES += \
- $(strip $(1))||$(strip $(2))||$(strip $(3))||$(strip $(4))||$(call normalize-path-list, $(strip $(5))))
+ $(strip $(1))||$(strip $(2))||$(strip $(3))||$(strip $(4))||$(call normalize-path-list, $(strip $(5)))||$(strip $(6)) \
+ )
endef
###########################################################
@@ -3426,9 +3344,9 @@ $(foreach source,$(ENFORCE_RRO_SOURCES), \
$(eval enforce_rro_source_manifest_package_info := $(word 3,$(_o))) \
$(eval enforce_rro_use_res_lib := $(word 4,$(_o))) \
$(eval enforce_rro_source_overlays := $(subst :, ,$(word 5,$(_o)))) \
- $(eval enforce_rro_module := $(enforce_rro_source_module)__auto_generated_rro) \
+ $(eval enforce_rro_partition := $(word 6,$(_o))) \
$(eval include $(BUILD_SYSTEM)/generate_enforce_rro.mk) \
- $(eval ALL_MODULES.$(enforce_rro_source_module).REQUIRED += $(enforce_rro_module)) \
+ $(eval ALL_MODULES.$$(enforce_rro_source_module).REQUIRED_FROM_TARGET += $$(LOCAL_PACKAGE_NAME)) \
)
endef
@@ -3451,23 +3369,63 @@ $(filter-out current,\
$(if $(call has-system-sdk-version,$(1)),$(patsubst system_%,%,$(1)),$(1)))
endef
-# Convert to lower case without requiring a shell, which isn't cacheable.
-to-lower=$(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$1))))))))))))))))))))))))))
-
-# Convert to upper case without requiring a shell, which isn't cacheable.
-to-upper=$(subst a,A,$(subst b,B,$(subst c,C,$(subst d,D,$(subst e,E,$(subst f,F,$(subst g,G,$(subst h,H,$(subst i,I,$(subst j,J,$(subst k,K,$(subst l,L,$(subst m,M,$(subst n,N,$(subst o,O,$(subst p,P,$(subst q,Q,$(subst r,R,$(subst s,S,$(subst t,T,$(subst u,U,$(subst v,V,$(subst w,W,$(subst x,X,$(subst y,Y,$(subst z,Z,$1))))))))))))))))))))))))))
-
-# Sanity-check to-lower and to-upper
-lower := abcdefghijklmnopqrstuvwxyz-_
-upper := ABCDEFGHIJKLMNOPQRSTUVWXYZ-_
+###########################################################
+## Verify module name meets character requirements:
+## a-z A-Z 0-9
+## _.+-,@~
+##
+## This is a subset of bazel's target name restrictions:
+## https://docs.bazel.build/versions/master/build-ref.html#name
+##
+## Kati has problems with '=': https://github.com/google/kati/issues/138
+###########################################################
+define verify-module-name
+$(if $(filter-out $(LOCAL_MODULE),$(subst /,,$(LOCAL_MODULE))), \
+ $(call pretty-warning,Module name contains a /$(comma) use LOCAL_MODULE_STEM and LOCAL_MODULE_RELATIVE_PATH instead)) \
+$(if $(call _invalid-name-chars,$(LOCAL_MODULE)), \
+ $(call pretty-error,Invalid characters in module name: $(call _invalid-name-chars,$(LOCAL_MODULE))))
+endef
+define _invalid-name-chars
+$(subst _,,$(subst .,,$(subst +,,$(subst -,,$(subst $(comma),,$(subst @,,$(subst ~,,$(subst 0,,$(subst 1,,$(subst 2,,$(subst 3,,$(subst 4,,$(subst 5,,$(subst 6,,$(subst 7,,$(subst 8,,$(subst 9,,$(subst a,,$(subst b,,$(subst c,,$(subst d,,$(subst e,,$(subst f,,$(subst g,,$(subst h,,$(subst i,,$(subst j,,$(subst k,,$(subst l,,$(subst m,,$(subst n,,$(subst o,,$(subst p,,$(subst q,,$(subst r,,$(subst s,,$(subst t,,$(subst u,,$(subst v,,$(subst w,,$(subst x,,$(subst y,,$(subst z,,$(call to-lower,$(1)))))))))))))))))))))))))))))))))))))))))))))
+endef
+.KATI_READONLY := verify-module-name _invalid-name-chars
-ifneq ($(lower),$(call to-lower,$(upper)))
- $(error to-lower sanity check failure)
-endif
+###########################################################
+## Verify module stem meets character requirements:
+## a-z A-Z 0-9
+## _.+-,@~
+##
+## This is a subset of bazel's target name restrictions:
+## https://docs.bazel.build/versions/master/build-ref.html#name
+##
+## $(1): The module stem variable to check
+###########################################################
+define verify-module-stem
+$(if $(filter-out $($(1)),$(subst /,,$($(1)))), \
+ $(call pretty-warning,Module stem \($(1)\) contains a /$(comma) use LOCAL_MODULE_RELATIVE_PATH instead)) \
+$(if $(call _invalid-name-chars,$($(1))), \
+ $(call pretty-error,Invalid characters in module stem \($(1)\): $(call _invalid-name-chars,$($(1)))))
+endef
+.KATI_READONLY := verify-module-stem
-ifneq ($(upper),$(call to-upper,$(lower)))
- $(error to-upper sanity check failure)
-endif
+$(KATI_obsolete_var \
+ create-empty-package \
+ initialize-package-file \
+ add-jni-shared-libs-to-package,\
+ These functions have been removed)
-lower :=
-upper :=
+###########################################################
+## Verify the variants of a VNDK library are identical
+##
+## $(1): Path to the core variant shared library file.
+## $(2): Path to the vendor variant shared library file.
+## $(3): TOOLS_PREFIX
+###########################################################
+LIBRARY_IDENTITY_CHECK_SCRIPT := build/make/tools/check_identical_lib.sh
+define verify-vndk-libs-identical
+@echo "Checking VNDK vendor variant: $(2)"
+$(hide) CLANG_BIN="$(LLVM_PREBUILTS_PATH)" \
+ CROSS_COMPILE="$(strip $(3))" \
+ XZ="$(XZ)" \
+ $(LIBRARY_IDENTITY_CHECK_SCRIPT) $(SOONG_STRIP_PATH) $(1) $(2)
+endef
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 7298bdee09..180edafbbb 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -3,25 +3,7 @@
#
####################################
-# list of boot classpath jars for dexpreopt
-DEXPREOPT_BOOT_JARS := $(subst $(space),:,$(PRODUCT_BOOT_JARS))
-DEXPREOPT_BOOT_JARS_MODULES := $(PRODUCT_BOOT_JARS)
-PRODUCT_BOOTCLASSPATH := $(subst $(space),:,$(foreach m,$(DEXPREOPT_BOOT_JARS_MODULES),/system/framework/$(m).jar))
-
-PRODUCT_SYSTEM_SERVER_CLASSPATH := $(subst $(space),:,$(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),/system/framework/$(m).jar))
-
-DEXPREOPT_BUILD_DIR := $(OUT_DIR)
-DEXPREOPT_PRODUCT_DIR_FULL_PATH := $(PRODUCT_OUT)/dex_bootjars
-DEXPREOPT_PRODUCT_DIR := $(patsubst $(DEXPREOPT_BUILD_DIR)/%,%,$(DEXPREOPT_PRODUCT_DIR_FULL_PATH))
-DEXPREOPT_BOOT_JAR_DIR := system/framework
-DEXPREOPT_BOOT_JAR_DIR_FULL_PATH := $(DEXPREOPT_PRODUCT_DIR_FULL_PATH)/$(DEXPREOPT_BOOT_JAR_DIR)
-
-# The default value for LOCAL_DEX_PREOPT
-DEX_PREOPT_DEFAULT ?= true
-
-# The default filter for which files go into the system_other image (if it is
-# being used). To bundle everything one should set this to '%'
-SYSTEM_OTHER_ODEX_FILTER ?= app/% priv-app/%
+include $(BUILD_SYSTEM)/dex_preopt_config.mk
# Method returning whether the install path $(1) should be for system_other.
# Under SANITIZE_LITE, we do not want system_other. Just put things under /data/asan.
@@ -31,78 +13,37 @@ else
install-on-system-other = $(filter-out $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(basename $(notdir $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(1)))))
endif
-# The default values for pre-opting: always preopt PIC.
-# Conditional to building on linux, as dex2oat currently does not work on darwin.
-ifeq ($(HOST_OS),linux)
- WITH_DEXPREOPT ?= true
- ifeq (eng,$(TARGET_BUILD_VARIANT))
- # Don't strip for quick development turnarounds.
- DEX_PREOPT_DEFAULT := nostripping
- # For an eng build only pre-opt the boot image and system server. This gives reasonable performance
- # and still allows a simple workflow: building in frameworks/base and syncing.
- WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY ?= true
- endif
- # Add mini-debug-info to the boot classpath unless explicitly asked not to.
- ifneq (false,$(WITH_DEXPREOPT_DEBUG_INFO))
- PRODUCT_DEX_PREOPT_BOOT_FLAGS += --generate-mini-debug-info
- endif
-
- # Non eng linux builds must have preopt enabled so that system server doesn't run as interpreter
- # only. b/74209329
- ifeq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
- ifneq (true,$(WITH_DEXPREOPT))
- ifneq (true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY))
- $(call pretty-error, DEXPREOPT must be enabled for user and userdebug builds)
- endif
- endif
- endif
-endif
-
-GLOBAL_DEXPREOPT_FLAGS :=
-
-# Special rules for building stripped boot jars that override java_library.mk rules
-
-# $(1): boot jar module name
-define _dexpreopt-boot-jar-remove-classes.dex
-_dbj_jar_no_dex := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(1)_nodex.jar
-_dbj_src_jar := $(call intermediates-dir-for,JAVA_LIBRARIES,$(1),,COMMON)/javalib.jar
-
-$(call dexpreopt-copy-jar,$$(_dbj_src_jar),$$(_dbj_jar_no_dex),$(DEX_PREOPT_DEFAULT))
-
-_dbj_jar_no_dex :=
-_dbj_src_jar :=
-endef
-
-$(foreach b,$(DEXPREOPT_BOOT_JARS_MODULES),$(eval $(call _dexpreopt-boot-jar-remove-classes.dex,$(b))))
-
-include $(BUILD_SYSTEM)/dex_preopt_libart.mk
-
-# Define dexpreopt-one-file based on current default runtime.
-# $(1): the input .jar or .apk file
-# $(2): the output .odex file
-define dexpreopt-one-file
-$(call dex2oat-one-file,$(1),$(2))
-endef
-
-DEXPREOPT_ONE_FILE_DEPENDENCY_TOOLS := $(DEX2OAT_DEPENDENCY)
-DEXPREOPT_ONE_FILE_DEPENDENCY_BUILT_BOOT_PREOPT := $(DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME)
-ifdef TARGET_2ND_ARCH
-$(TARGET_2ND_ARCH_VAR_PREFIX)DEXPREOPT_ONE_FILE_DEPENDENCY_BUILT_BOOT_PREOPT := $($(TARGET_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME)
-endif # TARGET_2ND_ARCH
-
-ifeq ($(PRODUCT_DIST_BOOT_AND_SYSTEM_JARS),true)
-boot_profile_jars_zip := $(PRODUCT_OUT)/boot_profile_jars.zip
-all_boot_jars := \
- $(foreach m,$(DEXPREOPT_BOOT_JARS_MODULES),$(PRODUCT_OUT)/system/framework/$(m).jar) \
- $(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),$(PRODUCT_OUT)/system/framework/$(m).jar)
-
-$(boot_profile_jars_zip): PRIVATE_JARS := $(all_boot_jars)
-$(boot_profile_jars_zip): $(all_boot_jars) $(SOONG_ZIP)
- echo "Create boot profiles package: $@"
+# We want to install the profile even if we are not using preopt since it is required to generate
+# the image on the device.
+ALL_DEFAULT_INSTALLED_MODULES += $(call copy-many-files,$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED),$(PRODUCT_OUT))
+
+# Install boot images. Note that there can be multiple.
+DEFAULT_DEX_PREOPT_INSTALLED_IMAGE :=
+$(TARGET_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE :=
+$(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+
+# Build the boot.zip which contains the boot jars and their compilation output
+# We can do this only if preopt is enabled and if the product uses libart config (which sets the
+# default properties for preopting).
+ifeq ($(WITH_DEXPREOPT), true)
+ifeq ($(PRODUCT_USES_ART), true)
+
+boot_zip := $(PRODUCT_OUT)/boot.zip
+bootclasspath_jars := $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
+system_server_jars := $(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),$(PRODUCT_OUT)/system/framework/$(m).jar)
+
+$(boot_zip): PRIVATE_BOOTCLASSPATH_JARS := $(bootclasspath_jars)
+$(boot_zip): PRIVATE_SYSTEM_SERVER_JARS := $(system_server_jars)
+$(boot_zip): $(bootclasspath_jars) $(system_server_jars) $(SOONG_ZIP) $(MERGE_ZIPS) $(DEXPREOPT_IMAGE_ZIP_boot)
+ @echo "Create boot package: $@"
rm -f $@
- $(SOONG_ZIP) -o $@ -C $(PRODUCT_OUT) $(PRIVATE_JARS)
+ $(SOONG_ZIP) -o $@.tmp \
+ -C $(dir $(firstword $(PRIVATE_BOOTCLASSPATH_JARS)))/.. $(addprefix -f ,$(PRIVATE_BOOTCLASSPATH_JARS)) \
+ -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS))
+ $(MERGE_ZIPS) $@ $@.tmp $(DEXPREOPT_IMAGE_ZIP_boot)
+ rm -f $@.tmp
-droidcore: $(boot_profile_jars_zip)
+$(call dist-for-goals, droidcore, $(boot_zip))
-$(call dist-for-goals, droidcore, $(boot_profile_jars_zip))
-endif
+endif #PRODUCT_USES_ART
+endif #WITH_DEXPREOPT
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
new file mode 100644
index 0000000000..b5834b08b3
--- /dev/null
+++ b/core/dex_preopt_config.mk
@@ -0,0 +1,188 @@
+DEX_PREOPT_CONFIG := $(PRODUCT_OUT)/dexpreopt.config
+
+# The default value for LOCAL_DEX_PREOPT
+DEX_PREOPT_DEFAULT ?= true
+
+# The default filter for which files go into the system_other image (if it is
+# being used). To bundle everything one should set this to '%'
+SYSTEM_OTHER_ODEX_FILTER ?= \
+ app/% \
+ priv-app/% \
+ product_services/app/% \
+ product_services/priv-app/% \
+ product/app/% \
+ product/priv-app/% \
+
+# The default values for pre-opting. To support the runtime module we ensure no dex files
+# get stripped.
+ifeq ($(PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING),)
+ PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING := true
+endif
+# Conditional to building on linux, as dex2oat currently does not work on darwin.
+ifeq ($(HOST_OS),linux)
+ ifeq (eng,$(TARGET_BUILD_VARIANT))
+ # Don't strip for quick development turnarounds.
+ DEX_PREOPT_DEFAULT := nostripping
+ # For an eng build only pre-opt the boot image and system server. This gives reasonable performance
+ # and still allows a simple workflow: building in frameworks/base and syncing.
+ WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY ?= true
+ endif
+ # Add mini-debug-info to the boot classpath unless explicitly asked not to.
+ ifneq (false,$(WITH_DEXPREOPT_DEBUG_INFO))
+ PRODUCT_DEX_PREOPT_BOOT_FLAGS += --generate-mini-debug-info
+ endif
+
+ # Non eng linux builds must have preopt enabled so that system server doesn't run as interpreter
+ # only. b/74209329
+ ifeq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
+ ifneq (true,$(WITH_DEXPREOPT))
+ ifneq (true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY))
+ $(call pretty-error, DEXPREOPT must be enabled for user and userdebug builds)
+ endif
+ endif
+ endif
+endif
+
+# Default to debug version to help find bugs.
+# Set USE_DEX2OAT_DEBUG to false for only building non-debug versions.
+ifeq ($(USE_DEX2OAT_DEBUG),false)
+DEX2OAT := $(SOONG_HOST_OUT_EXECUTABLES)/dex2oat$(HOST_EXECUTABLE_SUFFIX)
+else
+DEX2OAT := $(SOONG_HOST_OUT_EXECUTABLES)/dex2oatd$(HOST_EXECUTABLE_SUFFIX)
+endif
+
+DEX2OAT_DEPENDENCY += $(DEX2OAT)
+
+# Use the first preloaded-classes file in PRODUCT_COPY_FILES.
+PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
+ $(filter %system/etc/preloaded-classes,$(PRODUCT_COPY_FILES))))
+
+# Use the first dirty-image-objects file in PRODUCT_COPY_FILES.
+DIRTY_IMAGE_OBJECTS := $(call word-colon,1,$(firstword \
+ $(filter %system/etc/dirty-image-objects,$(PRODUCT_COPY_FILES))))
+
+define get-product-default-property
+$(strip \
+ $(eval _prop := $(patsubst $(1)=%,%,$(filter $(1)=%,$(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))))\
+ $(if $(_prop),$(_prop),$(patsubst $(1)=%,%,$(filter $(1)=%,$(PRODUCT_SYSTEM_DEFAULT_PROPERTIES)))))
+endef
+
+DEX2OAT_IMAGE_XMS := $(call get-product-default-property,dalvik.vm.image-dex2oat-Xms)
+DEX2OAT_IMAGE_XMX := $(call get-product-default-property,dalvik.vm.image-dex2oat-Xmx)
+DEX2OAT_XMS := $(call get-product-default-property,dalvik.vm.dex2oat-Xms)
+DEX2OAT_XMX := $(call get-product-default-property,dalvik.vm.dex2oat-Xmx)
+
+ifeq ($(TARGET_ARCH),$(filter $(TARGET_ARCH),mips mips64))
+# MIPS specific overrides.
+# For MIPS the ART image is loaded at a lower address. This causes issues
+# with the image overlapping with memory on the host cross-compiling and
+# building the image. We therefore limit the Xmx value. This isn't done
+# via a property as we want the larger Xmx value if we're running on a
+# MIPS device.
+DEX2OAT_XMX := 128m
+endif
+
+ifeq ($(WRITE_SOONG_VARIABLES),true)
+
+ $(call json_start)
+
+ $(call add_json_bool, DefaultNoStripping, $(filter nostripping,$(DEX_PREOPT_DEFAULT)))
+ $(call add_json_bool, DisablePreopt, $(call invert_bool,$(filter true,$(WITH_DEXPREOPT))))
+ $(call add_json_list, DisablePreoptModules, $(DEXPREOPT_DISABLED_MODULES))
+ $(call add_json_bool, OnlyPreoptBootImageAndSystemServer, $(filter true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY)))
+ $(call add_json_bool, GenerateApexImage, $(filter true,$(DEXPREOPT_GENERATE_APEX_IMAGE)))
+ $(call add_json_bool, UseApexImage, $(filter true,$(DEXPREOPT_USE_APEX_IMAGE)))
+ $(call add_json_bool, DontUncompressPrivAppsDex, $(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS)))
+ $(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
+ $(call add_json_bool, HasSystemOther, $(BOARD_USES_SYSTEM_OTHER_ODEX))
+ $(call add_json_list, PatternsOnSystemOther, $(SYSTEM_OTHER_ODEX_FILTER))
+ $(call add_json_bool, DisableGenerateProfile, $(filter false,$(WITH_DEX_PREOPT_GENERATE_PROFILE)))
+ $(call add_json_str, ProfileDir, $(PRODUCT_DEX_PREOPT_PROFILE_DIR))
+ $(call add_json_list, BootJars, $(PRODUCT_BOOT_JARS))
+ $(call add_json_list, RuntimeApexJars, $(RUNTIME_APEX_JARS))
+ $(call add_json_list, ProductUpdatableBootModules, $(PRODUCT_UPDATABLE_BOOT_MODULES))
+ $(call add_json_list, ProductUpdatableBootLocations, $(PRODUCT_UPDATABLE_BOOT_LOCATIONS))
+ $(call add_json_list, SystemServerJars, $(PRODUCT_SYSTEM_SERVER_JARS))
+ $(call add_json_list, SystemServerApps, $(PRODUCT_SYSTEM_SERVER_APPS))
+ $(call add_json_list, SpeedApps, $(PRODUCT_DEXPREOPT_SPEED_APPS))
+ $(call add_json_list, PreoptFlags, $(PRODUCT_DEX_PREOPT_DEFAULT_FLAGS))
+ $(call add_json_str, DefaultCompilerFilter, $(PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER))
+ $(call add_json_str, SystemServerCompilerFilter, $(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
+ $(call add_json_bool, GenerateDmFiles, $(PRODUCT_DEX_PREOPT_GENERATE_DM_FILES))
+ $(call add_json_bool, NeverAllowStripping, $(PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING))
+ $(call add_json_bool, NoDebugInfo, $(filter false,$(WITH_DEXPREOPT_DEBUG_INFO)))
+ $(call add_json_bool, DontResolveStartupStrings, $(filter false,$(PRODUCT_DEX_PREOPT_RESOLVE_STARTUP_STRINGS)))
+ $(call add_json_bool, AlwaysSystemServerDebugInfo, $(filter true,$(PRODUCT_SYSTEM_SERVER_DEBUG_INFO)))
+ $(call add_json_bool, NeverSystemServerDebugInfo, $(filter false,$(PRODUCT_SYSTEM_SERVER_DEBUG_INFO)))
+ $(call add_json_bool, AlwaysOtherDebugInfo, $(filter true,$(PRODUCT_OTHER_JAVA_DEBUG_INFO)))
+ $(call add_json_bool, NeverOtherDebugInfo, $(filter false,$(PRODUCT_OTHER_JAVA_DEBUG_INFO)))
+ $(call add_json_list, MissingUsesLibraries, $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES))
+ $(call add_json_bool, IsEng, $(filter eng,$(TARGET_BUILD_VARIANT)))
+ $(call add_json_bool, SanitizeLite, $(SANITIZE_LITE))
+ $(call add_json_bool, DefaultAppImages, $(WITH_DEX_PREOPT_APP_IMAGE))
+ $(call add_json_str, Dex2oatXmx, $(DEX2OAT_XMX))
+ $(call add_json_str, Dex2oatXms, $(DEX2OAT_XMS))
+ $(call add_json_str, EmptyDirectory, $(OUT_DIR)/empty)
+
+ $(call add_json_map, CpuVariant)
+ $(call add_json_str, $(TARGET_ARCH), $(DEX2OAT_TARGET_CPU_VARIANT))
+ ifdef TARGET_2ND_ARCH
+ $(call add_json_str, $(TARGET_2ND_ARCH), $($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT))
+ endif
+ $(call end_json_map)
+
+ $(call add_json_map, InstructionSetFeatures)
+ $(call add_json_str, $(TARGET_ARCH), $(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES))
+ ifdef TARGET_2ND_ARCH
+ $(call add_json_str, $(TARGET_2ND_ARCH), $($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES))
+ endif
+ $(call end_json_map)
+
+ $(call add_json_str, DirtyImageObjects, $(DIRTY_IMAGE_OBJECTS))
+ $(call add_json_str, PreloadedClasses, $(PRELOADED_CLASSES))
+ $(call add_json_list, BootImageProfiles, $(PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION))
+ $(call add_json_bool, UseProfileForBootImage, $(call invert_bool,$(filter false,$(PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE))))
+ $(call add_json_str, BootFlags, $(PRODUCT_DEX_PREOPT_BOOT_FLAGS))
+ $(call add_json_str, Dex2oatImageXmx, $(DEX2OAT_IMAGE_XMX))
+ $(call add_json_str, Dex2oatImageXms, $(DEX2OAT_IMAGE_XMS))
+
+ $(call add_json_map, Tools)
+ $(call add_json_str, Profman, $(SOONG_HOST_OUT_EXECUTABLES)/profman)
+ $(call add_json_str, Dex2oat, $(DEX2OAT))
+ $(call add_json_str, Aapt, $(SOONG_HOST_OUT_EXECUTABLES)/aapt)
+ $(call add_json_str, SoongZip, $(SOONG_ZIP))
+ $(call add_json_str, Zip2zip, $(ZIP2ZIP))
+ $(call add_json_str, VerifyUsesLibraries, $(BUILD_SYSTEM)/verify_uses_libraries.sh)
+ $(call add_json_str, ConstructContext, $(BUILD_SYSTEM)/construct_context.sh)
+ $(call end_json_map)
+
+ $(call json_end)
+
+ $(shell mkdir -p $(dir $(DEX_PREOPT_CONFIG)))
+ $(file >$(DEX_PREOPT_CONFIG).tmp,$(json_contents))
+
+ $(shell \
+ if ! cmp -s $(DEX_PREOPT_CONFIG).tmp $(DEX_PREOPT_CONFIG); then \
+ mv $(DEX_PREOPT_CONFIG).tmp $(DEX_PREOPT_CONFIG); \
+ else \
+ rm $(DEX_PREOPT_CONFIG).tmp; \
+ fi)
+endif
+
+# Dummy rule to create dexpreopt.config, it will already have been created
+# by the $(file) call above, but a rule needs to exist to keep the dangling
+# rule check happy.
+$(DEX_PREOPT_CONFIG):
+ @#empty
+
+DEXPREOPT_GEN_DEPS := \
+ $(SOONG_HOST_OUT_EXECUTABLES)/profman \
+ $(DEX2OAT) \
+ $(SOONG_HOST_OUT_EXECUTABLES)/aapt \
+ $(SOONG_ZIP) \
+ $(ZIP2ZIP) \
+ $(BUILD_SYSTEM)/verify_uses_libraries.sh \
+ $(BUILD_SYSTEM)/construct_context.sh \
+
+DEXPREOPT_STRIP_DEPS := \
+ $(ZIP2ZIP) \
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 9c4d55de77..85f2f3b2d0 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -1,207 +1,49 @@
####################################
-# dexpreopt support for ART
+# ART boot image installation
+# Input variable:
+# my_boot_image_name: the boot image to install
#
####################################
-# Default to debug version to help find bugs.
-# Set USE_DEX2OAT_DEBUG to false for only building non-debug versions.
-ifeq ($(USE_DEX2OAT_DEBUG),false)
-DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oat$(HOST_EXECUTABLE_SUFFIX)
-PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoat$(HOST_EXECUTABLE_SUFFIX)
-else
-DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oatd$(HOST_EXECUTABLE_SUFFIX)
-PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoatd$(HOST_EXECUTABLE_SUFFIX)
-endif
-
-DEX2OAT_DEPENDENCY += $(DEX2OAT)
-PATCHOAT_DEPENDENCY += $(PATCHOAT)
-
-# Use the first preloaded-classes file in PRODUCT_COPY_FILES.
-PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
- $(filter %system/etc/preloaded-classes,$(PRODUCT_COPY_FILES))))
-
-# Use the first compiled-classes file in PRODUCT_COPY_FILES.
-COMPILED_CLASSES := $(call word-colon,1,$(firstword \
- $(filter %system/etc/compiled-classes,$(PRODUCT_COPY_FILES))))
-
-# Use the first dirty-image-objects file in PRODUCT_COPY_FILES.
-DIRTY_IMAGE_OBJECTS := $(call word-colon,1,$(firstword \
- $(filter %system/etc/dirty-image-objects,$(PRODUCT_COPY_FILES))))
-
-define get-product-default-property
-$(strip \
- $(eval _prop := $(patsubst $(1)=%,%,$(filter $(1)=%,$(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))))\
- $(if $(_prop),$(_prop),$(patsubst $(1)=%,%,$(filter $(1)=%,$(PRODUCT_SYSTEM_DEFAULT_PROPERTIES)))))
-endef
-
-DEX2OAT_IMAGE_XMS := $(call get-product-default-property,dalvik.vm.image-dex2oat-Xms)
-DEX2OAT_IMAGE_XMX := $(call get-product-default-property,dalvik.vm.image-dex2oat-Xmx)
-DEX2OAT_XMS := $(call get-product-default-property,dalvik.vm.dex2oat-Xms)
-DEX2OAT_XMX := $(call get-product-default-property,dalvik.vm.dex2oat-Xmx)
-
-ifeq ($(TARGET_ARCH),$(filter $(TARGET_ARCH),mips mips64))
-# MIPS specific overrides.
-# For MIPS the ART image is loaded at a lower address. This causes issues
-# with the image overlapping with memory on the host cross-compiling and
-# building the image. We therefore limit the Xmx value. This isn't done
-# via a property as we want the larger Xmx value if we're running on a
-# MIPS device.
-DEX2OAT_XMX := 128m
-endif
-
-########################################################################
-# The full system boot classpath
-
-# Returns the path to the .odex file
-# $(1): the arch name.
-# $(2): the full path (including file name) of the corresponding .jar or .apk.
-define get-odex-file-path
-$(dir $(2))oat/$(1)/$(basename $(notdir $(2))).odex
-endef
-
-# Returns the full path to the installed .odex file.
-# This handles BOARD_USES_SYSTEM_OTHER_ODEX to install odex files into another
-# partition.
-# $(1): the arch name.
-# $(2): the full install path (including file name) of the corresponding .apk.
-ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
-define get-odex-installed-file-path
-$(if $(call install-on-system-other, $(2)),
- $(call get-odex-file-path,$(1),$(patsubst $(TARGET_OUT)/%,$(TARGET_OUT_SYSTEM_OTHER)/%,$(2))),
- $(call get-odex-file-path,$(1),$(2)))
-endef
-else
-get-odex-installed-file-path = $(get-odex-file-path)
-endif
-
-# Returns the path to the image file (such as "/system/framework/<arch>/boot.art"
-# $(1): the arch name (such as "arm")
-# $(2): the image location (such as "/system/framework/boot.art")
-define get-image-file-path
-$(dir $(2))$(1)/$(notdir $(2))
-endef
+# Install primary arch vdex files into a shared location, and then symlink them to both the primary
+# and secondary arch directories.
+my_vdex_copy_pairs := $(DEXPREOPT_IMAGE_VDEX_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_ARCH))
+my_installed := $(foreach v,$(my_vdex_copy_pairs),$(PRODUCT_OUT)$(call word-colon,2,$(v)))
+$(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
-# note we use core-libart.jar in place of core.jar for ART.
-LIBART_TARGET_BOOT_JARS := $(patsubst core, core-libart,$(DEXPREOPT_BOOT_JARS_MODULES))
-LIBART_TARGET_BOOT_DEX_LOCATIONS := $(foreach jar,$(LIBART_TARGET_BOOT_JARS),/$(DEXPREOPT_BOOT_JAR_DIR)/$(jar).jar)
-LIBART_TARGET_BOOT_DEX_FILES := $(foreach jar,$(LIBART_TARGET_BOOT_JARS),$(call intermediates-dir-for,JAVA_LIBRARIES,$(jar),,COMMON)/javalib.jar)
-
-# dex preopt on the bootclasspath produces multiple files. The first dex file
-# is converted into to boot.art (to match the legacy assumption that boot.art
-# exists), and the rest are converted to boot-<name>.art.
-# In addition, each .art file has an associated .oat file.
-LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).art.rel boot-$(jar).oat)
-LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.art.rel boot.oat
-LIBART_TARGET_BOOT_ART_VDEX_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).vdex)
-LIBART_TARGET_BOOT_ART_VDEX_FILES += boot.vdex
-
-# If we use a boot image profile.
-my_use_profile_for_boot_image := $(PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE)
-ifeq (,$(my_use_profile_for_boot_image))
-# If not set, set the default to true if we are not a PDK build. PDK builds
-# can't build the profile since they don't have frameworks/base.
-ifneq (true,$(TARGET_BUILD_PDK))
-my_use_profile_for_boot_image := true
-endif
-endif
-
-ifeq (true,$(my_use_profile_for_boot_image))
-
-# Location of text based profile for the boot image.
-my_boot_image_profile_location := $(PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION)
-ifeq (,$(my_boot_image_profile_location))
-# If not set, use the default.
-my_boot_image_profile_location := frameworks/base/config/boot-image-profile.txt
-endif
-
-# Code to create the boot image profile, not in dex_preopt_libart_boot.mk since the profile is the same for all archs.
-my_out_boot_image_profile_location := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/boot.prof
-$(my_out_boot_image_profile_location): PRIVATE_PROFILE_INPUT_LOCATION := $(my_boot_image_profile_location)
-$(my_out_boot_image_profile_location): $(PROFMAN) $(LIBART_TARGET_BOOT_DEX_FILES) $(my_boot_image_profile_location)
- @echo "target profman: $@"
- @mkdir -p $(dir $@)
- ANDROID_LOG_TAGS="*:e" $(PROFMAN) \
- --create-profile-from=$(PRIVATE_PROFILE_INPUT_LOCATION) \
- $(addprefix --apk=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
- $(addprefix --dex-location=,$(LIBART_TARGET_BOOT_DEX_LOCATIONS)) \
- --reference-profile-file=$@
-
-# We want to install the profile even if we are not using preopt since it is required to generate
-# the image on the device.
-my_installed_profile := $(TARGET_OUT)/etc/boot-image.prof
-$(eval $(call copy-one-file,$(my_out_boot_image_profile_location),$(my_installed_profile)))
-ALL_DEFAULT_INSTALLED_MODULES += $(my_installed_profile)
+my_built_vdex_dir := $(dir $(call word-colon,1,$(firstword $(my_vdex_copy_pairs))))
+my_installed_vdex_dir := $(PRODUCT_OUT)$(dir $(call word-colon,2,$(firstword $(my_vdex_copy_pairs))))
+$(my_installed): $(my_installed_vdex_dir)% : $(my_built_vdex_dir)%
+ @echo "Install: $@"
+ @rm -f $@
+ $(copy-file-to-target)
+ mkdir -p $(dir $@)/$(TARGET_ARCH)
+ ln -sfn ../$(notdir $@) $(dir $@)/$(TARGET_ARCH)
+ifdef TARGET_2ND_ARCH
+ ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+ mkdir -p $(dir $@)/$(TARGET_2ND_ARCH)
+ ln -sfn ../$(notdir $@) $(dir $@)/$(TARGET_2ND_ARCH)
+ endif
endif
-LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES := $(addprefix $(PRODUCT_OUT)/$(DEXPREOPT_BOOT_JAR_DIR)/,$(LIBART_TARGET_BOOT_ART_VDEX_FILES))
+my_dexpreopt_image_extra_deps := $(firstword $(my_installed))
my_2nd_arch_prefix :=
include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
-ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
ifdef TARGET_2ND_ARCH
-my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
-endif
+ ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+ my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+ include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
+ endif
endif
-# Copy shared vdex to the directory and create corresponding symlinks in primary and secondary arch.
-$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : PRIMARY_ARCH_DIR := $(dir $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE))
-$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : SECOND_ARCH_DIR := $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE))
-$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : $(DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME)
- @echo "Install: $@"
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) cp "$(dir $<)$(notdir $@)" "$@"
- # Make symlink for both the archs. In the case its single arch the symlink will just get overridden.
- @mkdir -p $(PRIMARY_ARCH_DIR)
- $(hide) ln -sf /$(DEXPREOPT_BOOT_JAR_DIR)/$(notdir $@) $(PRIMARY_ARCH_DIR)$(notdir $@)
- @mkdir -p $(SECOND_ARCH_DIR)
- $(hide) ln -sf /$(DEXPREOPT_BOOT_JAR_DIR)/$(notdir $@) $(SECOND_ARCH_DIR)$(notdir $@)
-
my_2nd_arch_prefix :=
-########################################################################
-# For a single jar or APK
-# $(1): the input .jar or .apk file
-# $(2): the output .odex file
-# In the case where LOCAL_ENFORCE_USES_LIBRARIES is true, PRIVATE_DEX2OAT_CLASS_LOADER_CONTEXT
-# contains the normalized path list of the libraries. This makes it easier to conditionally prepend
-# org.apache.http.legacy.boot based on the SDK level if required.
-define dex2oat-one-file
-$(hide) rm -f $(2)
-$(hide) mkdir -p $(dir $(2))
-stored_class_loader_context_libs=$(PRIVATE_DEX2OAT_STORED_CLASS_LOADER_CONTEXT_LIBS) && \
-class_loader_context_arg=--class-loader-context=$(PRIVATE_DEX2OAT_CLASS_LOADER_CONTEXT) && \
-class_loader_context=$(PRIVATE_DEX2OAT_CLASS_LOADER_CONTEXT) && \
-stored_class_loader_context_arg="" && \
-uses_library_names="$(PRIVATE_USES_LIBRARY_NAMES)" && \
-optional_uses_library_names="$(PRIVATE_OPTIONAL_USES_LIBRARY_NAMES)" && \
-$(if $(PRIVATE_ENFORCE_USES_LIBRARIES), \
-source build/make/core/verify_uses_libraries.sh "$(1)" && \
-source build/make/core/construct_context.sh "$(PRIVATE_CONDITIONAL_USES_LIBRARIES_HOST)" "$(PRIVATE_CONDITIONAL_USES_LIBRARIES_TARGET)" && \
-,) \
-ANDROID_LOG_TAGS="*:e" $(DEX2OAT) \
- --runtime-arg -Xms$(DEX2OAT_XMS) --runtime-arg -Xmx$(DEX2OAT_XMX) \
- $${class_loader_context_arg} \
- $${stored_class_loader_context_arg} \
- --boot-image=$(PRIVATE_DEX_PREOPT_IMAGE_LOCATION) \
- --dex-file=$(1) \
- --dex-location=$(PRIVATE_DEX_LOCATION) \
- --oat-file=$(2) \
- --android-root=$(PRODUCT_OUT)/system \
- --instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
- --instruction-set-variant=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT) \
- --instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES) \
- --runtime-arg -Xnorelocate --compile-pic \
- --no-generate-debug-info --generate-build-id \
- --abort-on-hard-verifier-error \
- --force-determinism \
- --no-inline-from=core-oj.jar \
- $(PRIVATE_DEX_PREOPT_FLAGS) \
- $(PRIVATE_ART_FILE_PREOPT_FLAGS) \
- $(PRIVATE_PROFILE_PREOPT_FLAGS) \
- $(GLOBAL_DEXPREOPT_FLAGS)
-endef
+my_vdex_copy_pairs :=
+my_installed :=
+my_built_vdex_dir :=
+my_installed_vdex_dir :=
+my_dexpreopt_image_extra_deps :=
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index a5e7e881ad..34b8526581 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -1,131 +1,25 @@
-# Rules to build boot.art
+# Rules to install a boot image built by dexpreopt_bootjars.go
# Input variables:
+# my_boot_image_name: the boot image to install
# my_2nd_arch_prefix: indicates if this is to build for the 2nd arch.
+# my_dexpreopt_image_extra_deps: extra dependencies to add on the installed boot.art
-# The image "location" is a symbolic path that with multiarchitecture
-# support doesn't really exist on the device. Typically it is
-# /system/framework/boot.art and should be the same for all supported
-# architectures on the device. The concrete architecture specific
-# content actually ends up in a "filename" that contains an
-# architecture specific directory name such as arm, arm64, mips,
-# mips64, x86, x86_64.
-#
-# Here are some example values for an x86_64 / x86 configuration:
-#
-# DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION=out/target/product/generic_x86_64/dex_bootjars/system/framework/boot.art
-# DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME=out/target/product/generic_x86_64/dex_bootjars/system/framework/x86_64/boot.art
-# LIBART_BOOT_IMAGE=/system/framework/x86_64/boot.art
-#
-# 2ND_DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION=out/target/product/generic_x86_64/dex_bootjars/system/framework/boot.art
-# 2ND_DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME=out/target/product/generic_x86_64/dex_bootjars/system/framework/x86/boot.art
-# 2ND_LIBART_BOOT_IMAGE=/system/framework/x86/boot.art
+# Install the boot images compiled by Soong
+# The first file (generally boot.art) is saved as DEFAULT_DEX_PREOPT_INSTALLED_IMAGE,
+# and the rest are added as dependencies of the first.
-$(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/boot.art
-$(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$($(my_2nd_arch_prefix)DEX2OAT_TARGET_ARCH)/boot.art
-$(my_2nd_arch_prefix)LIBART_BOOT_IMAGE_FILENAME := /$(DEXPREOPT_BOOT_JAR_DIR)/$($(my_2nd_arch_prefix)DEX2OAT_TARGET_ARCH)/boot.art
+my_installed := $(call copy-many-files,$(DEXPREOPT_IMAGE_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_$(my_2nd_arch_prefix)ARCH)),$(PRODUCT_OUT))
+$(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
+$(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE += $(firstword $(my_installed))
-# The .oat with symbols
-$(my_2nd_arch_prefix)LIBART_TARGET_BOOT_OAT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)$(patsubst %.art,%.oat,$($(my_2nd_arch_prefix)LIBART_BOOT_IMAGE_FILENAME))
+# Install the unstripped boot images compiled by Soong into the symbols directory
+# The first file (generally boot.art) made a dependency of DEFAULT_DEX_PREOPT_INSTALLED_IMAGE,
+# and the rest are added as dependencies of the first.
+my_installed := $(call copy-many-files,$(DEXPREOPT_IMAGE_UNSTRIPPED_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_$(my_2nd_arch_prefix)ARCH)),$(TARGET_OUT_UNSTRIPPED))
+$(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE): $(firstword $(my_installed))
-$(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE := $(PRODUCT_OUT)$($(my_2nd_arch_prefix)LIBART_BOOT_IMAGE_FILENAME)
-$(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES := $(addprefix $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)),\
- $(LIBART_TARGET_BOOT_ART_EXTRA_FILES))
-$(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_FILES := $(addprefix $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)),\
- $(LIBART_TARGET_BOOT_ART_VDEX_FILES))
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE): $(my_dexpreopt_image_extra_deps)
-# If we have a compiled-classes file, create a parameter.
-COMPILED_CLASSES_FLAGS :=
-ifneq ($(COMPILED_CLASSES),)
- COMPILED_CLASSES_FLAGS := --compiled-classes=$(COMPILED_CLASSES)
-endif
-
-# If we have a dirty-image-objects file, create a parameter.
-DIRTY_IMAGE_OBJECTS_FLAGS :=
-ifneq ($(DIRTY_IMAGE_OBJECTS),)
- DIRTY_IMAGE_OBJECTS_FLAGS := --dirty-image-objects=$(DIRTY_IMAGE_OBJECTS)
-endif
-
-# The rule to install boot.art
-# Depends on installed boot.oat, boot-*.art, boot-*.oat
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES)
- @echo "Install: $@"
- $(copy-file-to-target)
-
-# The rule to install boot.oat, boot-*.art, boot-*.oat
-# Depends on built-but-not-installed boot.art
-$($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP)
- @echo "Install: $@"
- @mkdir -p $(dir $@)
- $(hide) $(ACP) -fp $(dir $<)$(notdir $@) $@
-
-ifeq (,$(my_out_boot_image_profile_location))
-my_boot_image_flags := $(COMPILED_CLASSES_FLAGS)
-my_boot_image_flags += --image-classes=$(PRELOADED_CLASSES)
-my_boot_image_flags += $(DIRTY_IMAGE_OBJECTS_FLAGS)
-else
-my_boot_image_flags := --compiler-filter=speed-profile
-my_boot_image_flags += --profile-file=$(my_out_boot_image_profile_location)
-endif
-
-ifneq (addresstrue,$(SANITIZE_TARGET)$(SANITIZE_LITE))
-# Skip recompiling the boot image for the second sanitization phase. We'll get separate paths
-# and invalidate first-stage artifacts which are crucial to SANITIZE_LITE builds.
-# Note: this is technically incorrect. Compiled code contains stack checks which may depend
-# on ASAN settings.
-
-# Use ANDROID_LOG_TAGS to suppress most logging by default...
-ifeq (,$(ART_BOOT_IMAGE_EXTRA_ARGS))
-DEX2OAT_BOOT_IMAGE_LOG_TAGS := ANDROID_LOG_TAGS="*:e"
-else
-# ...unless the boot image is generated specifically for testing, then allow all logging.
-DEX2OAT_BOOT_IMAGE_LOG_TAGS := ANDROID_LOG_TAGS="*:v"
-endif
-
-# An additional message to print on dex2oat failure.
-DEX2OAT_FAILURE_MESSAGE := ERROR: Dex2oat failed to compile a boot image.
-DEX2OAT_FAILURE_MESSAGE += It is likely that the boot classpath is inconsistent.
-ifeq ($(ONE_SHOT_MAKEFILE),)
- DEX2OAT_FAILURE_MESSAGE += Rebuild with ART_BOOT_IMAGE_EXTRA_ARGS="--runtime-arg -verbose:verifier" to see verification errors.
-else
- DEX2OAT_FAILURE_MESSAGE += Build with m, mma, or mmma instead of mm or mmm to remedy the situation.
-endif
-
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_BOOT_IMAGE_FLAGS := $(my_boot_image_flags)
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_IMAGE_LOCATION := $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
-# Use dex2oat debug version for better error reporting
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(PATCHOAT_DEPENDENCY) $(my_out_boot_image_profile_location)
- @echo "target dex2oat: $@"
- @mkdir -p $(dir $@)
- @mkdir -p $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))
- @rm -f $(dir $@)/*.art $(dir $@)/*.oat $(dir $@)/*.art.rel
- @rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
- @rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
- @rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art.rel
- $(hide) $(DEX2OAT_BOOT_IMAGE_LOG_TAGS) $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
- --runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
- $(PRIVATE_BOOT_IMAGE_FLAGS) \
- $(addprefix --dex-file=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
- $(addprefix --dex-location=,$(LIBART_TARGET_BOOT_DEX_LOCATIONS)) \
- --oat-symbols=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED) \
- --oat-file=$(patsubst %.art,%.oat,$@) \
- --oat-location=$(patsubst %.art,%.oat,$($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_BOOT_IMAGE_FILENAME)) \
- --image=$@ --base=$(LIBART_IMG_TARGET_BASE_ADDRESS) \
- --instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
- --instruction-set-variant=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT) \
- --instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES) \
- --android-root=$(PRODUCT_OUT)/system \
- --runtime-arg -Xnorelocate --compile-pic \
- --no-generate-debug-info --generate-build-id \
- --multi-image --no-inline-from=core-oj.jar \
- --abort-on-hard-verifier-error \
- --abort-on-soft-verifier-error \
- $(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS) \
- || ( echo "$(DEX2OAT_FAILURE_MESSAGE)" ; false ) && \
- $(DEX2OAT_BOOT_IMAGE_LOG_TAGS) ANDROID_ROOT=$(PRODUCT_OUT)/system ANDROID_DATA=$(dir $@) $(PATCHOAT) \
- --input-image-location=$(PRIVATE_IMAGE_LOCATION) \
- --output-image-relocation-directory=$(dir $@) \
- --instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
- --base-offset-delta=0x10000000
-
-endif
+my_installed :=
+my_built_installed :=
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index ce917590bf..85ddbfa36e 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -1,398 +1,295 @@
# dexpreopt_odex_install.mk is used to define odex creation rules for JARs and APKs
# This file depends on variables set in base_rules.mk
-# Output variables: LOCAL_DEX_PREOPT, LOCAL_UNCOMPRESS_DEX, built_odex,
-# dexpreopt_boot_jar_module
+# Output variables: LOCAL_DEX_PREOPT, LOCAL_UNCOMPRESS_DEX
+
+ifeq (true,$(LOCAL_USE_EMBEDDED_DEX))
+ LOCAL_UNCOMPRESS_DEX := true
+else
+ LOCAL_UNCOMPRESS_DEX :=
+endif
# We explicitly uncompress APKs of privileged apps, and used by
# privileged apps
-LOCAL_UNCOMPRESS_DEX := false
ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
-ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
- LOCAL_UNCOMPRESS_DEX := true
-else
+ ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
+ LOCAL_UNCOMPRESS_DEX := true
+ endif
+
ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
LOCAL_UNCOMPRESS_DEX := true
- endif # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
-endif # LOCAL_PRIVILEGED_MODULE
+ endif
endif # DONT_UNCOMPRESS_PRIV_APPS_DEXS
# Setting LOCAL_DEX_PREOPT based on WITH_DEXPREOPT, LOCAL_DEX_PREOPT, etc
LOCAL_DEX_PREOPT := $(strip $(LOCAL_DEX_PREOPT))
-ifneq (true,$(WITH_DEXPREOPT))
- LOCAL_DEX_PREOPT :=
-else # WITH_DEXPREOPT=true
- ifeq (,$(TARGET_BUILD_APPS)) # TARGET_BUILD_APPS empty
- ifndef LOCAL_DEX_PREOPT # LOCAL_DEX_PREOPT undefined
- ifneq ($(filter $(TARGET_OUT)/%,$(my_module_path)),) # Installed to system.img.
- ifeq (,$(LOCAL_APK_LIBRARIES)) # LOCAL_APK_LIBRARIES empty
- # If we have product-specific config for this module?
- ifeq (disable,$(DEXPREOPT.$(TARGET_PRODUCT).$(LOCAL_MODULE).CONFIG))
- LOCAL_DEX_PREOPT := false
- else
- LOCAL_DEX_PREOPT := $(DEX_PREOPT_DEFAULT)
- endif
- else # LOCAL_APK_LIBRARIES not empty
- LOCAL_DEX_PREOPT := nostripping
- endif # LOCAL_APK_LIBRARIES not empty
- endif # Installed to system.img.
- endif # LOCAL_DEX_PREOPT undefined
- endif # TARGET_BUILD_APPS empty
-endif # WITH_DEXPREOPT=true
+ifndef LOCAL_DEX_PREOPT # LOCAL_DEX_PREOPT undefined
+ LOCAL_DEX_PREOPT := $(DEX_PREOPT_DEFAULT)
+endif
+
ifeq (false,$(LOCAL_DEX_PREOPT))
LOCAL_DEX_PREOPT :=
endif
+
+# Only enable preopt for non tests.
+ifneq (,$(filter $(LOCAL_MODULE_TAGS),tests))
+ LOCAL_DEX_PREOPT :=
+endif
+
+# If we have product-specific config for this module?
+ifneq (,$(filter $(LOCAL_MODULE),$(DEXPREOPT_DISABLED_MODULES)))
+ LOCAL_DEX_PREOPT :=
+endif
+
+# Disable preopt for TARGET_BUILD_APPS
+ifneq (,$(TARGET_BUILD_APPS))
+ LOCAL_DEX_PREOPT :=
+endif
+
+# Disable preopt if not WITH_DEXPREOPT
+ifneq (true,$(WITH_DEXPREOPT))
+ LOCAL_DEX_PREOPT :=
+endif
+
ifdef LOCAL_UNINSTALLABLE_MODULE
-LOCAL_DEX_PREOPT :=
+ LOCAL_DEX_PREOPT :=
endif
+
ifeq (,$(strip $(built_dex)$(my_prebuilt_src_file)$(LOCAL_SOONG_DEX_JAR))) # contains no java code
-LOCAL_DEX_PREOPT :=
+ LOCAL_DEX_PREOPT :=
endif
+
# if WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY=true and module is not in boot class path skip
# Also preopt system server jars since selinux prevents system server from loading anything from
# /data. If we don't do this they will need to be extracted which is not favorable for RAM usage
# or performance. If my_preopt_for_extracted_apk is true, we ignore the only preopt boot image
# options.
ifneq (true,$(my_preopt_for_extracted_apk))
-ifeq (true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY))
-ifeq ($(filter $(PRODUCT_SYSTEM_SERVER_JARS) $(DEXPREOPT_BOOT_JARS_MODULES),$(LOCAL_MODULE)),)
-LOCAL_DEX_PREOPT :=
-endif
-endif
+ ifeq (true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY))
+ ifeq ($(filter $(PRODUCT_SYSTEM_SERVER_JARS) $(DEXPREOPT_BOOT_JARS_MODULES),$(LOCAL_MODULE)),)
+ LOCAL_DEX_PREOPT :=
+ endif
+ endif
endif
-ifeq ($(LOCAL_DEX_PREOPT),true)
-
-# Don't strip with dexes we explicitly uncompress (dexopt will not store the dex code).
-ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
-LOCAL_DEX_PREOPT := nostripping
-endif # LOCAL_UNCOMPRESS_DEX
-
-# system_other isn't there for an OTA, so don't strip
-# if module is on system, and odex is on system_other.
-ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
-ifneq ($(call install-on-system-other, $(my_module_path)),)
-LOCAL_DEX_PREOPT := nostripping
-endif # install-on-system-other
-endif # BOARD_USES_SYSTEM_OTHER_ODEX
-
-# We also don't strip if all dexs are uncompressed (dexopt will not store the dex code),
-# but that requires to inspect the source file, which is too early at this point (as we
-# don't know if the source file will actually be used).
-# See dexpreopt-remove-classes.dex.
-
-endif # LOCAL_DEX_PREOPT
-
-built_odex :=
-built_vdex :=
-built_art :=
-installed_odex :=
-installed_vdex :=
-installed_art :=
-built_installed_odex :=
-built_installed_vdex :=
-built_installed_art :=
my_process_profile :=
my_profile_is_text_listing :=
ifeq (false,$(WITH_DEX_PREOPT_GENERATE_PROFILE))
-LOCAL_DEX_PREOPT_GENERATE_PROFILE := false
+ LOCAL_DEX_PREOPT_GENERATE_PROFILE := false
endif
ifndef LOCAL_DEX_PREOPT_GENERATE_PROFILE
-
-
-# If LOCAL_DEX_PREOPT_GENERATE_PROFILE is not defined, default it based on the existence of the
-# profile class listing. TODO: Use product specific directory here.
-my_classes_directory := $(PRODUCT_DEX_PREOPT_PROFILE_DIR)
-LOCAL_DEX_PREOPT_PROFILE := $(my_classes_directory)/$(LOCAL_MODULE).prof
-
-ifneq (,$(wildcard $(LOCAL_DEX_PREOPT_PROFILE)))
-my_process_profile := true
-my_profile_is_text_listing := false
-endif
+ # If LOCAL_DEX_PREOPT_GENERATE_PROFILE is not defined, default it based on the existence of the
+ # profile class listing. TODO: Use product specific directory here.
+ my_classes_directory := $(PRODUCT_DEX_PREOPT_PROFILE_DIR)
+ LOCAL_DEX_PREOPT_PROFILE := $(my_classes_directory)/$(LOCAL_MODULE).prof
+
+ ifneq (,$(wildcard $(LOCAL_DEX_PREOPT_PROFILE)))
+ my_process_profile := true
+ my_profile_is_text_listing :=
+ endif
else
-my_process_profile := $(LOCAL_DEX_PREOPT_GENERATE_PROFILE)
-my_profile_is_text_listing := true
-LOCAL_DEX_PREOPT_PROFILE := $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)
+ my_process_profile := $(LOCAL_DEX_PREOPT_GENERATE_PROFILE)
+ my_profile_is_text_listing := true
+ LOCAL_DEX_PREOPT_PROFILE := $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)
endif
ifeq (true,$(my_process_profile))
-
-ifeq (,$(LOCAL_DEX_PREOPT_APP_IMAGE))
-LOCAL_DEX_PREOPT_APP_IMAGE := true
+ ifndef LOCAL_DEX_PREOPT_PROFILE
+ $(call pretty-error,Must have specified class listing (LOCAL_DEX_PREOPT_PROFILE))
+ endif
+ ifeq (,$(dex_preopt_profile_src_file))
+ $(call pretty-error, Internal error: dex_preopt_profile_src_file must be set)
+ endif
endif
-ifndef LOCAL_DEX_PREOPT_PROFILE
-$(call pretty-error,Must have specified class listing (LOCAL_DEX_PREOPT_PROFILE))
-endif
-ifeq (,$(dex_preopt_profile_src_file))
-$(call pretty-error, Internal error: dex_preopt_profile_src_file must be set)
-endif
-my_built_profile := $(dir $(LOCAL_BUILT_MODULE))/profile.prof
-my_dex_location := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
-# Remove compressed APK extension.
-my_dex_location := $(patsubst %.gz,%,$(my_dex_location))
-$(my_built_profile): PRIVATE_BUILT_MODULE := $(dex_preopt_profile_src_file)
-$(my_built_profile): PRIVATE_DEX_LOCATION := $(my_dex_location)
-$(my_built_profile): PRIVATE_SOURCE_CLASSES := $(LOCAL_DEX_PREOPT_PROFILE)
-$(my_built_profile): $(LOCAL_DEX_PREOPT_PROFILE)
-$(my_built_profile): $(PROFMAN)
-$(my_built_profile): $(dex_preopt_profile_src_file)
-ifeq (true,$(my_profile_is_text_listing))
-# The profile is a test listing of classes (used for framework jars).
-# We need to generate the actual binary profile before being able to compile.
- $(hide) mkdir -p $(dir $@)
- ANDROID_LOG_TAGS="*:e" $(PROFMAN) \
- --create-profile-from=$(PRIVATE_SOURCE_CLASSES) \
- --apk=$(PRIVATE_BUILT_MODULE) \
- --dex-location=$(PRIVATE_DEX_LOCATION) \
- --reference-profile-file=$@
-else
-# The profile is binary profile (used for apps). Run it through profman to
-# ensure the profile keys match the apk.
-$(my_built_profile):
- $(hide) mkdir -p $(dir $@)
- touch $@
- ANDROID_LOG_TAGS="*:i" $(PROFMAN) \
- --copy-and-update-profile-key \
- --profile-file=$(PRIVATE_SOURCE_CLASSES) \
- --apk=$(PRIVATE_BUILT_MODULE) \
- --dex-location=$(PRIVATE_DEX_LOCATION) \
- --reference-profile-file=$@ \
- || echo "Profile out of date for $(PRIVATE_BUILT_MODULE)"
+# If LOCAL_ENFORCE_USES_LIBRARIES is not set, default to true if either of LOCAL_USES_LIBRARIES or
+# LOCAL_OPTIONAL_USES_LIBRARIES are specified.
+ifeq (,$(LOCAL_ENFORCE_USES_LIBRARIES))
+ # Will change the default to true unconditionally in the future.
+ ifneq (,$(LOCAL_OPTIONAL_USES_LIBRARIES))
+ LOCAL_ENFORCE_USES_LIBRARIES := true
+ endif
+ ifneq (,$(LOCAL_USES_LIBRARIES))
+ LOCAL_ENFORCE_USES_LIBRARIES := true
+ endif
endif
-my_profile_is_text_listing :=
-dex_preopt_profile_src_file :=
-
-# Remove compressed APK extension.
-my_installed_profile := $(patsubst %.gz,%,$(LOCAL_INSTALLED_MODULE)).prof
-
-# my_installed_profile := $(LOCAL_INSTALLED_MODULE).prof
-$(eval $(call copy-one-file,$(my_built_profile),$(my_installed_profile)))
-build_installed_profile:=$(my_built_profile):$(my_installed_profile)
-else
-build_installed_profile:=
-my_installed_profile :=
+my_dexpreopt_archs :=
+my_dexpreopt_images :=
+my_dexpreopt_infix := boot
+ifeq (true, $(DEXPREOPT_USE_APEX_IMAGE))
+ my_dexpreopt_infix := apex
endif
ifdef LOCAL_DEX_PREOPT
-
-dexpreopt_boot_jar_module := $(filter $(DEXPREOPT_BOOT_JARS_MODULES),$(LOCAL_MODULE))
-
-# Filter org.apache.http.legacy.boot.
-ifeq ($(dexpreopt_boot_jar_module),org.apache.http.legacy.boot)
-dexpreopt_boot_jar_module :=
-endif
-
-ifdef dexpreopt_boot_jar_module
-# For libart, the boot jars' odex files are replaced by $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE).
-# We use this installed_odex trick to get boot.art installed.
-installed_odex := $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)
-# Append the odex for the 2nd arch if we have one.
-installed_odex += $($(TARGET_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)
-else # boot jar
-ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
-# For a Java library, by default we build odex for both 1st arch and 2nd arch.
-# But it can be overridden with "LOCAL_MULTILIB := first".
-ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS),$(LOCAL_MODULE)))
-# For system server jars, we build for only "first".
-my_module_multilib := first
-else
-my_module_multilib := $(LOCAL_MULTILIB)
-endif
-# #################################################
-# Odex for the 1st arch
-my_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/setup_one_odex.mk
-# #################################################
-# Odex for the 2nd arch
-ifdef TARGET_2ND_ARCH
-ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
-ifneq (first,$(my_module_multilib))
-my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/setup_one_odex.mk
-endif # my_module_multilib is not first.
-endif # TARGET_TRANSLATE_2ND_ARCH not true
-endif # TARGET_2ND_ARCH
-# #################################################
-else # must be APPS
-# The preferred arch
-my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/setup_one_odex.mk
-ifdef TARGET_2ND_ARCH
-ifeq ($(LOCAL_MULTILIB),both)
-# The non-preferred arch
-my_2nd_arch_prefix := $(if $(LOCAL_2ND_ARCH_VAR_PREFIX),,$(TARGET_2ND_ARCH_VAR_PREFIX))
-include $(BUILD_SYSTEM)/setup_one_odex.mk
-endif # LOCAL_MULTILIB is both
-endif # TARGET_2ND_ARCH
-endif # LOCAL_MODULE_CLASS
-endif # boot jar
-
-built_odex := $(strip $(built_odex))
-built_vdex := $(strip $(built_vdex))
-built_art := $(strip $(built_art))
-installed_odex := $(strip $(installed_odex))
-installed_vdex := $(strip $(installed_vdex))
-installed_art := $(strip $(installed_art))
-
-ifdef built_odex
-ifeq (true,$(my_process_profile))
-$(built_odex): $(my_built_profile)
-$(built_odex): PRIVATE_PROFILE_PREOPT_FLAGS := --profile-file=$(my_built_profile)
-else
-$(built_odex): PRIVATE_PROFILE_PREOPT_FLAGS :=
-endif
-
-ifndef LOCAL_DEX_PREOPT_FLAGS
-LOCAL_DEX_PREOPT_FLAGS := $(DEXPREOPT.$(TARGET_PRODUCT).$(LOCAL_MODULE).CONFIG)
-ifndef LOCAL_DEX_PREOPT_FLAGS
-LOCAL_DEX_PREOPT_FLAGS := $(PRODUCT_DEX_PREOPT_DEFAULT_FLAGS)
-endif
-endif
-
-my_system_server_compiler_filter := $(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
-ifeq (,$(my_system_server_compiler_filter))
-my_system_server_compiler_filter := speed
-endif
-
-my_default_compiler_filter := $(PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER)
-ifeq (,$(my_default_compiler_filter))
-# If no default compiler filter is specified, default to 'quicken' to save on storage.
-my_default_compiler_filter := quicken
-endif
-
-ifeq (,$(filter --compiler-filter=%, $(LOCAL_DEX_PREOPT_FLAGS)))
- ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS),$(LOCAL_MODULE)))
- # Jars of system server, use the product option if it is set, speed otherwise.
- LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=$(my_system_server_compiler_filter)
- else
- ifneq (,$(filter $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(LOCAL_MODULE)))
- # Apps loaded into system server, and apps the product default to being compiled with the
- # 'speed' compiler filter.
- LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=speed
- else
- ifeq (true,$(my_process_profile))
- # For non system server jars, use speed-profile when we have a profile.
- LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=speed-profile
- else
- LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=$(my_default_compiler_filter)
- endif
+ ifeq (,$(filter PRESIGNED,$(LOCAL_CERTIFICATE)))
+ # Store uncompressed dex files preopted in /system
+ ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
+ ifeq ($(call install-on-system-other, $(my_module_path)),)
+ LOCAL_UNCOMPRESS_DEX := true
+ endif # install-on-system-other
+ else # BOARD_USES_SYSTEM_OTHER_ODEX
+ LOCAL_UNCOMPRESS_DEX := true
endif
endif
-endif
-my_generate_dm := $(PRODUCT_DEX_PREOPT_GENERATE_DM_FILES)
-ifeq (,$(filter $(LOCAL_DEX_PREOPT_FLAGS),--compiler-filter=verify))
-# Generating DM files only makes sense for verify, avoid doing for non verify compiler filter APKs.
-my_generate_dm := false
-endif
-
-# No reason to use a dm file if the dex is already uncompressed.
-ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
-my_generate_dm := false
-endif
-
-ifeq (true,$(my_generate_dm))
-LOCAL_DEX_PREOPT_FLAGS += --copy-dex-files=false
-LOCAL_DEX_PREOPT := nostripping
-my_built_dm := $(dir $(LOCAL_BUILT_MODULE))generated.dm
-my_installed_dm := $(patsubst %.apk,%,$(LOCAL_INSTALLED_MODULE)).dm
-my_copied_vdex := $(dir $(LOCAL_BUILT_MODULE))primary.vdex
-$(eval $(call copy-one-file,$(built_vdex),$(my_copied_vdex)))
-$(my_built_dm): PRIVATE_INPUT_VDEX := $(my_copied_vdex)
-$(my_built_dm): $(my_copied_vdex) $(ZIPTIME)
- $(hide) mkdir -p $(dir $@)
- $(hide) rm -f $@
- $(hide) zip -qD -j -X -9 $@ $(PRIVATE_INPUT_VDEX)
- $(ZIPTIME) $@
-$(eval $(call copy-one-file,$(my_built_dm),$(my_installed_dm)))
-endif
+ ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
+ my_module_multilib := $(LOCAL_MULTILIB)
+ # If the module is not an SDK library and it's a system server jar, only preopt the primary arch.
+ ifeq (,$(filter $(JAVA_SDK_LIBRARIES),$(LOCAL_MODULE)))
+ # For a Java library, by default we build odex for both 1st arch and 2nd arch.
+ # But it can be overridden with "LOCAL_MULTILIB := first".
+ ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS),$(LOCAL_MODULE)))
+ # For system server jars, we build for only "first".
+ my_module_multilib := first
+ endif
+ endif
-# By default, emit debug info.
-my_dexpreopt_debug_info := true
-# If the global setting suppresses mini-debug-info, disable it.
-ifeq (false,$(WITH_DEXPREOPT_DEBUG_INFO))
- my_dexpreopt_debug_info := false
-endif
+ # Only preopt primary arch for translated arch since there is only an image there.
+ ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+ my_module_multilib := first
+ endif
-# PRODUCT_SYSTEM_SERVER_DEBUG_INFO overrides WITH_DEXPREOPT_DEBUG_INFO.
-# PRODUCT_OTHER_JAVA_DEBUG_INFO overrides WITH_DEXPREOPT_DEBUG_INFO.
-ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS),$(LOCAL_MODULE)))
- ifeq (true,$(PRODUCT_SYSTEM_SERVER_DEBUG_INFO))
- my_dexpreopt_debug_info := true
- else ifeq (false,$(PRODUCT_SYSTEM_SERVER_DEBUG_INFO))
- my_dexpreopt_debug_info := false
+ # #################################################
+ # Odex for the 1st arch
+ my_dexpreopt_archs += $(TARGET_ARCH)
+ my_dexpreopt_images += $(DEXPREOPT_IMAGE_$(my_dexpreopt_infix)_$(TARGET_ARCH))
+ # Odex for the 2nd arch
+ ifdef TARGET_2ND_ARCH
+ ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+ ifneq (first,$(my_module_multilib))
+ my_dexpreopt_archs += $(TARGET_2ND_ARCH)
+ my_dexpreopt_images += $(DEXPREOPT_IMAGE_$(my_dexpreopt_infix)_$(TARGET_2ND_ARCH))
+ endif # my_module_multilib is not first.
+ endif # TARGET_TRANSLATE_2ND_ARCH not true
+ endif # TARGET_2ND_ARCH
+ # #################################################
+ else # must be APPS
+ # The preferred arch
+ # Save the module multilib since setup_one_odex modifies it.
+ my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+ my_dexpreopt_archs += $(TARGET_$(my_2nd_arch_prefix)ARCH)
+ my_dexpreopt_images += \
+ $(DEXPREOPT_IMAGE_$(my_dexpreopt_infix)_$(TARGET_$(my_2nd_arch_prefix)ARCH))
+ ifdef TARGET_2ND_ARCH
+ ifeq ($(my_module_multilib),both)
+ # The non-preferred arch
+ my_2nd_arch_prefix := $(if $(LOCAL_2ND_ARCH_VAR_PREFIX),,$(TARGET_2ND_ARCH_VAR_PREFIX))
+ my_dexpreopt_archs += $(TARGET_$(my_2nd_arch_prefix)ARCH)
+ my_dexpreopt_images += \
+ $(DEXPREOPT_IMAGE_$(my_dexpreopt_infix)_$(TARGET_$(my_2nd_arch_prefix)ARCH))
+ endif # LOCAL_MULTILIB is both
+ endif # TARGET_2ND_ARCH
+ endif # LOCAL_MODULE_CLASS
+
+ # Record dex-preopt config.
+ DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
+ DEXPREOPT.$(LOCAL_MODULE).MULTILIB := $(LOCAL_MULTILIB)
+ DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
+ DEXPREOPT.$(LOCAL_MODULE).PRIVILEGED_MODULE := $(LOCAL_PRIVILEGED_MODULE)
+ DEXPREOPT.$(LOCAL_MODULE).VENDOR_MODULE := $(LOCAL_VENDOR_MODULE)
+ DEXPREOPT.$(LOCAL_MODULE).TARGET_ARCH := $(LOCAL_MODULE_TARGET_ARCH)
+ DEXPREOPT.$(LOCAL_MODULE).INSTALLED_STRIPPED := $(LOCAL_INSTALLED_MODULE)
+ DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS) := $(sort \
+ $(DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS)) $(LOCAL_MODULE))
+
+ $(call json_start)
+
+ # DexPath, StripInputPath, and StripOutputPath are not set, they will
+ # be filled in by dexpreopt_gen.
+
+ $(call add_json_str, Name, $(LOCAL_MODULE))
+ $(call add_json_str, DexLocation, $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE)))
+ $(call add_json_str, BuildPath, $(LOCAL_BUILT_MODULE))
+ $(call add_json_str, ExtrasOutputPath, $$2)
+ $(call add_json_bool, Privileged, $(filter true,$(LOCAL_PRIVILEGED_MODULE)))
+ $(call add_json_bool, UncompressedDex, $(filter true,$(LOCAL_UNCOMPRESS_DEX)))
+ $(call add_json_bool, HasApkLibraries, $(LOCAL_APK_LIBRARIES))
+ $(call add_json_list, PreoptFlags, $(LOCAL_DEX_PREOPT_FLAGS))
+ $(call add_json_str, ProfileClassListing, $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE)))
+ $(call add_json_bool, ProfileIsTextListing, $(my_profile_is_text_listing))
+ $(call add_json_bool, EnforceUsesLibraries, $(LOCAL_ENFORCE_USES_LIBRARIES))
+ $(call add_json_list, OptionalUsesLibraries, $(LOCAL_OPTIONAL_USES_LIBRARIES))
+ $(call add_json_list, UsesLibraries, $(LOCAL_USES_LIBRARIES))
+ $(call add_json_map, LibraryPaths)
+ $(foreach lib,$(sort $(LOCAL_USES_LIBRARIES) $(LOCAL_OPTIONAL_USES_LIBRARIES) org.apache.http.legacy android.hidl.base-V1.0-java android.hidl.manager-V1.0-java),\
+ $(call add_json_str, $(lib), $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar))
+ $(call end_json_map)
+ $(call add_json_list, Archs, $(my_dexpreopt_archs))
+ $(call add_json_list, DexPreoptImages, $(my_dexpreopt_images))
+ $(call add_json_list, PreoptBootClassPathDexFiles, $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES))
+ $(call add_json_list, PreoptBootClassPathDexLocations,$(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS))
+ $(call add_json_bool, PreoptExtractedApk, $(my_preopt_for_extracted_apk))
+ $(call add_json_bool, NoCreateAppImage, $(filter false,$(LOCAL_DEX_PREOPT_APP_IMAGE)))
+ $(call add_json_bool, ForceCreateAppImage, $(filter true,$(LOCAL_DEX_PREOPT_APP_IMAGE)))
+ $(call add_json_bool, PresignedPrebuilt, $(filter PRESIGNED,$(LOCAL_CERTIFICATE)))
+
+ $(call add_json_bool, NoStripping, $(filter nostripping,$(LOCAL_DEX_PREOPT)))
+
+ $(call json_end)
+
+ my_dexpreopt_config := $(intermediates)/dexpreopt.config
+ my_dexpreopt_script := $(intermediates)/dexpreopt.sh
+ my_strip_script := $(intermediates)/strip.sh
+ my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
+
+ $(my_dexpreopt_config): PRIVATE_MODULE := $(LOCAL_MODULE)
+ $(my_dexpreopt_config): PRIVATE_CONTENTS := $(json_contents)
+ $(my_dexpreopt_config):
+ @echo "$(PRIVATE_MODULE) dexpreopt.config"
+ echo -e -n '$(subst $(newline),\n,$(subst ','\'',$(subst \,\\,$(PRIVATE_CONTENTS))))' > $@
+
+ .KATI_RESTAT: $(my_dexpreopt_script) $(my_strip_script)
+ $(my_dexpreopt_script): PRIVATE_MODULE := $(LOCAL_MODULE)
+ $(my_dexpreopt_script): PRIVATE_GLOBAL_CONFIG := $(PRODUCT_OUT)/dexpreopt.config
+ $(my_dexpreopt_script): PRIVATE_MODULE_CONFIG := $(my_dexpreopt_config)
+ $(my_dexpreopt_script): PRIVATE_STRIP_SCRIPT := $(my_strip_script)
+ $(my_dexpreopt_script): .KATI_IMPLICIT_OUTPUTS := $(my_strip_script)
+ $(my_dexpreopt_script): $(DEXPREOPT_GEN)
+ $(my_dexpreopt_script): $(my_dexpreopt_config) $(PRODUCT_OUT)/dexpreopt.config
+ @echo "$(PRIVATE_MODULE) dexpreopt gen"
+ $(DEXPREOPT_GEN) -global $(PRIVATE_GLOBAL_CONFIG) -module $(PRIVATE_MODULE_CONFIG) \
+ -dexpreopt_script $@ -strip_script $(PRIVATE_STRIP_SCRIPT) \
+ -out_dir $(OUT_DIR)
+
+ my_dexpreopt_deps := $(my_dex_jar)
+ my_dexpreopt_deps += $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE))
+ my_dexpreopt_deps += \
+ $(foreach lib,$(sort $(LOCAL_USES_LIBRARIES) $(LOCAL_OPTIONAL_USES_LIBRARIES) org.apache.http.legacy android.hidl.base-V1.0-java android.hidl.manager-V1.0-java),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar)
+ my_dexpreopt_deps += $(my_dexpreopt_images)
+ my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
+
+ $(my_dexpreopt_zip): PRIVATE_MODULE := $(LOCAL_MODULE)
+ $(my_dexpreopt_zip): $(my_dexpreopt_deps)
+ $(my_dexpreopt_zip): | $(DEXPREOPT_GEN_DEPS)
+ $(my_dexpreopt_zip): .KATI_DEPFILE := $(my_dexpreopt_zip).d
+ $(my_dexpreopt_zip): PRIVATE_DEX := $(my_dex_jar)
+ $(my_dexpreopt_zip): PRIVATE_SCRIPT := $(my_dexpreopt_script)
+ $(my_dexpreopt_zip): $(my_dexpreopt_script)
+ @echo "$(PRIVATE_MODULE) dexpreopt"
+ bash $(PRIVATE_SCRIPT) $(PRIVATE_DEX) $@
+
+ ifdef LOCAL_POST_INSTALL_CMD
+ # Add a shell command separator
+ LOCAL_POST_INSTALL_CMD += &&
endif
-else
- ifeq (true,$(PRODUCT_OTHER_JAVA_DEBUG_INFO))
- my_dexpreopt_debug_info := true
- else ifeq (false,$(PRODUCT_OTHER_JAVA_DEBUG_INFO))
- my_dexpreopt_debug_info := false
- endif
-endif
-
-# Never enable on eng.
-ifeq (eng,$(filter eng, $(TARGET_BUILD_VARIANT)))
-my_dexpreopt_debug_info := false
-endif
-
-# Add dex2oat flag for debug-info/no-debug-info.
-ifeq (true,$(my_dexpreopt_debug_info))
- LOCAL_DEX_PREOPT_FLAGS += --generate-mini-debug-info
-else ifeq (false,$(my_dexpreopt_debug_info))
- LOCAL_DEX_PREOPT_FLAGS += --no-generate-mini-debug-info
-endif
-
-# Set the compiler reason to 'prebuilt' to identify the oat files produced
-# during the build, as opposed to compiled on the device.
-LOCAL_DEX_PREOPT_FLAGS += --compilation-reason=prebuilt
-
-$(built_odex): PRIVATE_DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
-$(built_vdex): $(built_odex)
-$(built_art): $(built_odex)
-endif
-
-ifneq (true,$(my_generate_dm))
- # Add the installed_odex to the list of installed files for this module if we aren't generating a
- # dm file.
- ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
- ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
- ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
-
- ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
- ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
- ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
- # Make sure to install the .odex and .vdex when you run "make <module_name>"
- $(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art)
-else
- ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed_dm)
- ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(my_built_dm) $(my_installed_dm)
+ LOCAL_POST_INSTALL_CMD += \
+ for i in $$(zipinfo -1 $(my_dexpreopt_zip)); \
+ do mkdir -p $(PRODUCT_OUT)/$$(dirname $$i); \
+ done && \
+ ( unzip -qo -d $(PRODUCT_OUT) $(my_dexpreopt_zip) 2>&1 | grep -v "zipfile is empty"; exit $${PIPESTATUS[0]} ) || \
+ ( code=$$?; if [ $$code -ne 0 -a $$code -ne 1 ]; then exit $$code; fi )
- # Make sure to install the .dm when you run "make <module_name>"
- $(my_all_targets): $(installed_dm)
-endif
+ $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
+ $(LOCAL_INSTALLED_MODULE): $(my_dexpreopt_zip)
-# Record dex-preopt config.
-DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
-DEXPREOPT.$(LOCAL_MODULE).MULTILIB := $(LOCAL_MULTILIB)
-DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
-DEXPREOPT.$(LOCAL_MODULE).PRIVILEGED_MODULE := $(LOCAL_PRIVILEGED_MODULE)
-DEXPREOPT.$(LOCAL_MODULE).VENDOR_MODULE := $(LOCAL_VENDOR_MODULE)
-DEXPREOPT.$(LOCAL_MODULE).TARGET_ARCH := $(LOCAL_MODULE_TARGET_ARCH)
-DEXPREOPT.$(LOCAL_MODULE).INSTALLED := $(installed_odex)
-DEXPREOPT.$(LOCAL_MODULE).INSTALLED_STRIPPED := $(LOCAL_INSTALLED_MODULE)
-DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS) := $(sort \
- $(DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS)) $(LOCAL_MODULE))
+ $(my_all_targets): $(my_dexpreopt_zip)
+ my_dexpreopt_config :=
+ my_dexpreopt_script :=
+ my_strip_script :=
+ my_dexpreopt_zip :=
endif # LOCAL_DEX_PREOPT
-
-# Profile doesn't depend on LOCAL_DEX_PREOPT.
-ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed_profile)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(build_installed_profile)
-
-my_process_profile :=
-
-$(my_all_targets): $(my_installed_profile)
diff --git a/core/distdir.mk b/core/distdir.mk
index c074186b8d..5f404075df 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -17,52 +17,53 @@
# When specifying "dist", the user has asked that we copy the important
# files from this build into DIST_DIR.
-ifdef dist_goal
-
-# $(1): source file
-# $(2): destination file
-# $(3): goals that should copy the file
-#
-define copy-one-dist-file
-$(3): $(2)
-$(2): $(1)
- @echo "Dist: $$@"
- $$(copy-file-to-new-target-with-cp)
-endef
-
-# A global variable to remember all dist'ed src:dst pairs.
-# So if a src:dst is already dist'ed by another goal,
-# we should just establish the dependency and don't really call the
-# copy-one-dist-file to avoid multiple rules for the same target.
+# list of all goals that depend on any dist files
+_all_dist_goals :=
+# pairs of goal:distfile
+_all_dist_goal_output_pairs :=
+# pairs of srcfile:distfile
_all_dist_src_dst_pairs :=
+
# Other parts of the system should use this function to associate
# certain files with certain goals. When those goals are built
# and "dist" is specified, the marked files will be copied to DIST_DIR.
#
-# $(1): a list of goals (e.g. droid, sdk, pdk, ndk)
+# $(1): a list of goals (e.g. droid, sdk, pdk, ndk). These must be PHONY
# $(2): the dist files to add to those goals. If the file contains ':',
# the text following the colon is the name that the file is copied
# to under the dist directory. Subdirs are ok, and will be created
# at copy time if necessary.
define dist-for-goals
+$(if $(strip $(2)), \
+ $(eval _all_dist_goals += $$(1))) \
$(foreach file,$(2), \
- $(eval fw := $(subst :,$(space),$(file))) \
- $(eval src := $(word 1,$(fw))) \
- $(eval dst := $(word 2,$(fw))) \
- $(eval dst := $(if $(dst),$(dst),$(notdir $(src)))) \
- $(if $(filter $(_all_dist_src_dst_pairs),$(src):$(dst)),\
- $(eval $(call add-dependency,$(1),$(DIST_DIR)/$(dst))),\
- $(eval $(call copy-one-dist-file,\
- $(src),$(DIST_DIR)/$(dst),$(1)))\
- $(eval _all_dist_src_dst_pairs += $(src):$(dst))\
- )\
-)
+ $(eval src := $(call word-colon,1,$(file))) \
+ $(eval dst := $(call word-colon,2,$(file))) \
+ $(if $(dst),,$(eval dst := $$(notdir $$(src)))) \
+ $(eval _all_dist_src_dst_pairs += $$(src):$$(dst)) \
+ $(foreach goal,$(1), \
+ $(eval _all_dist_goal_output_pairs += $$(goal):$$(dst))))
endef
-else # !dist_goal
+#------------------------------------------------------------------
+# To be used at the end of the build to collect all the uses of
+# dist-for-goals, and write them into a file for the packaging step to use.
-# empty definition when not building dist
-define dist-for-goals
+# $(1): The file to write
+define dist-write-file
+$(strip \
+ $(KATI_obsolete_var dist-for-goals,Cannot be used after dist-write-file) \
+ $(foreach goal,$(sort $(_all_dist_goals)), \
+ $(eval $$(goal): _dist_$$(goal))) \
+ $(shell mkdir -p $(dir $(1))) \
+ $(file >$(1).tmp, \
+ DIST_GOAL_OUTPUT_PAIRS := $(sort $(_all_dist_goal_output_pairs)) \
+ $(newline)DIST_SRC_DST_PAIRS := $(sort $(_all_dist_src_dst_pairs))) \
+ $(shell if ! cmp -s $(1).tmp $(1); then \
+ mv $(1).tmp $(1); \
+ else \
+ rm $(1).tmp; \
+ fi))
endef
-endif # !dist_goal
+.KATI_READONLY := dist-for-goals dist-write-file
diff --git a/core/dpi_specific_apk.mk b/core/dpi_specific_apk.mk
index f32daf500e..ad073c7eee 100644
--- a/core/dpi_specific_apk.mk
+++ b/core/dpi_specific_apk.mk
@@ -18,11 +18,7 @@ $(built_dpi_apk): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
$(built_dpi_apk): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
$(built_dpi_apk): PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
$(built_dpi_apk): PRIVATE_RESOURCE_LIST := $(all_res_assets)
-ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
-$(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
-else
-$(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
-endif
+$(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call module-target-sdk-version)
$(built_dpi_apk): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
$(built_dpi_apk): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_INSTRUMENTATION_FOR)
$(built_dpi_apk): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
@@ -47,27 +43,31 @@ $(built_dpi_apk) : $(R_file_stamp)
$(built_dpi_apk) : $(all_library_res_package_export_deps)
$(built_dpi_apk) : $(private_key) $(certificate) $(SIGNAPK_JAR)
$(built_dpi_apk) : $(AAPT)
+$(built_dpi_apk) : $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
$(built_dpi_apk) : $(all_res_assets) $(jni_shared_libraries) $(full_android_manifest)
@echo "target Package: $(PRIVATE_MODULE) ($@)"
- $(if $(PRIVATE_SOURCE_ARCHIVE),\
- $(call initialize-package-file,$(PRIVATE_SOURCE_ARCHIVE),$@),\
- $(create-empty-package))
- $(add-assets-to-package)
+ rm -rf $@.parts
+ mkdir -p $@.parts
+ $(call create-assets-package,$@.parts/apk.zip)
ifneq ($(jni_shared_libraries),)
- $(add-jni-shared-libs-to-package)
+ $(call create-jni-shared-libs-package,$@.parts/jni.zip)
endif
ifeq ($(full_classes_jar),)
# We don't build jar, need to add the Java resources here.
- $(if $(PRIVATE_EXTRA_JAR_ARGS),$(call add-java-resources-to,$@))
+ $(if $(PRIVATE_EXTRA_JAR_ARGS),$(call create-java-resources-jar,$@.parts/res.zip))
else
- $(add-dex-to-package)
+ $(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
+ $(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
endif
+ $(MERGE_ZIPS) $@ $@.parts/*.zip
+ rm -rf $@.parts
$(sign-package)
# Set up global variables to register this apk to the higher-level dependency graph.
ALL_MODULES += $(dpi_apk_name)
ALL_MODULES.$(dpi_apk_name).CLASS := APPS
ALL_MODULES.$(dpi_apk_name).BUILT := $(built_dpi_apk)
+ALL_MODULES.$(dpi_apk_name).TARGET_BUILT := $(built_dpi_apk)
PACKAGES := $(PACKAGES) $(dpi_apk_name)
PACKAGES.$(dpi_apk_name).PRIVATE_KEY := $(private_key)
PACKAGES.$(dpi_apk_name).CERTIFICATE := $(certificate)
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
deleted file mode 100644
index bcd2002e01..0000000000
--- a/core/droiddoc.mk
+++ /dev/null
@@ -1,367 +0,0 @@
-#
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-$(call record-module-type,DROIDDOC)
-###########################################################
-## Common logic to both droiddoc and javadoc
-###########################################################
-LOCAL_IS_HOST_MODULE := $(call true-or-empty,$(LOCAL_IS_HOST_MODULE))
-ifeq ($(LOCAL_IS_HOST_MODULE),true)
-my_prefix := HOST_
-LOCAL_HOST_PREFIX :=
-else
-my_prefix := TARGET_
-endif
-
-LOCAL_MODULE_CLASS := $(strip $(LOCAL_MODULE_CLASS))
-ifndef LOCAL_MODULE_CLASS
-$(error $(LOCAL_PATH): LOCAL_MODULE_CLASS not defined)
-endif
-
-full_src_files := $(patsubst %,$(LOCAL_PATH)/%,$(LOCAL_SRC_FILES))
-out_dir := $(OUT_DOCS)/$(LOCAL_MODULE)
-full_target := $(call doc-timestamp-for,$(LOCAL_MODULE))
-
-ifeq ($(LOCAL_DROIDDOC_SOURCE_PATH),)
-LOCAL_DROIDDOC_SOURCE_PATH := $(LOCAL_PATH)
-endif
-
-ifeq ($(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR),)
-LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR := $(SRC_DROIDDOC_DIR)/$(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)
-endif
-
-ifeq ($(LOCAL_DROIDDOC_ASSET_DIR),)
-LOCAL_DROIDDOC_ASSET_DIR := assets
-endif
-ifeq ($(LOCAL_DROIDDOC_CUSTOM_ASSET_DIR),)
-LOCAL_DROIDDOC_CUSTOM_ASSET_DIR := assets
-endif
-
-ifeq ($(LOCAL_IS_HOST_MODULE),true)
-
-$(full_target): PRIVATE_BOOTCLASSPATH :=
-full_java_libs := $(addprefix $(HOST_OUT_JAVA_LIBRARIES)/,\
- $(addsuffix $(COMMON_JAVA_PACKAGE_SUFFIX),$(LOCAL_JAVA_LIBRARIES)))
-
-else
-
-ifneq ($(LOCAL_SDK_VERSION),)
- ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
- # Use android_stubs_current if LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
- LOCAL_JAVA_LIBRARIES := android_stubs_current $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_stubs_current)
- else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
- LOCAL_JAVA_LIBRARIES := android_system_stubs_current $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_system_stubs_current)
- else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
- LOCAL_JAVA_LIBRARIES := android_test_stubs_current $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_test_stubs_current)
- else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),core_current)
- LOCAL_JAVA_LIBRARIES := core.current.stubs $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core.current.stubs)
- else
- # core_<ver> is subset of <ver>. Instead of defining a prebuilt lib for core_<ver>,
- # use the stub for <ver> when building for apps.
- _version := $(patsubst core_%,%,$(LOCAL_SDK_VERSION))
- LOCAL_JAVA_LIBRARIES := sdk_v$(_version) $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(_version))
- _version :=
- endif
-else
- ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
- LOCAL_JAVA_LIBRARIES := core-oj core-libart
- else
- LOCAL_JAVA_LIBRARIES := core-oj core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
- endif
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-oj):$(call java-lib-files, core-libart)
-endif # LOCAL_SDK_VERSION
-LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
-
-full_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES)) $(LOCAL_CLASSPATH)
-endif # !LOCAL_IS_HOST_MODULE
-
-$(full_target): PRIVATE_CLASSPATH := $(call normalize-path-list,$(full_java_libs))
-
-intermediates.COMMON := $(call local-intermediates-dir,COMMON)
-
-$(full_target): PRIVATE_SOURCE_PATH := $(call normalize-path-list,$(LOCAL_DROIDDOC_SOURCE_PATH))
-$(full_target): PRIVATE_JAVA_FILES := $(filter %.java,$(full_src_files))
-$(full_target): PRIVATE_JAVA_FILES += $(addprefix $($(my_prefix)OUT_COMMON_INTERMEDIATES)/, $(filter %.java,$(LOCAL_INTERMEDIATE_SOURCES)))
-$(full_target): PRIVATE_JAVA_FILES += $(filter %.java,$(LOCAL_GENERATED_SOURCES))
-$(full_target): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
-$(full_target): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/src
-$(full_target): PRIVATE_SRCJAR_INTERMEDIATES_DIR := $(intermediates.COMMON)/srcjars
-$(full_target): PRIVATE_SRC_LIST_FILE := $(intermediates.COMMON)/droiddoc-src-list
-$(full_target): PRIVATE_SRCJAR_LIST_FILE := $(intermediates.COMMON)/droiddoc-srcjar-list
-
-ifneq ($(strip $(LOCAL_ADDITIONAL_JAVA_DIR)),)
-$(full_target): PRIVATE_ADDITIONAL_JAVA_DIR := $(LOCAL_ADDITIONAL_JAVA_DIR)
-endif
-
-$(full_target): PRIVATE_OUT_DIR := $(out_dir)
-$(full_target): PRIVATE_DROIDDOC_OPTIONS := $(LOCAL_DROIDDOC_OPTIONS)
-$(full_target): PRIVATE_STUB_OUT_DIR := $(LOCAL_DROIDDOC_STUB_OUT_DIR)
-$(full_target): PRIVATE_METALAVA_DOCS_STUB_OUT_DIR := $(LOCAL_DROIDDOC_METALAVA_DOCS_STUB_OUT_DIR)
-
-# Lists the input files for the doc build into a text file
-# suitable for the @ syntax of javadoc.
-# $(1): the file to create
-# $(2): files to include
-# $(3): list of directories to search for java files in
-define prepare-doc-source-list
-$(hide) mkdir -p $(dir $(1))
-$(call dump-words-to-file, $(2), $(1))
-$(hide) for d in $(3) ; do find $$d -name '*.java' -and -not -name '.*' >> $(1) 2> /dev/null ; done ; true
-endef
-
-###########################################################
-## Logic for droiddoc only
-###########################################################
-ifneq ($(strip $(LOCAL_DROIDDOC_USE_STANDARD_DOCLET)),true)
-
-droiddoc_templates := \
- $(sort $(shell find $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR) -type f $(if $(ALLOW_MISSING_DEPENDENCIES),2>/dev/null)))
-
-ifdef ALLOW_MISSING_DEPENDENCIES
- ifndef droiddoc_templates
- droiddoc_templates := $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)
- endif
-endif
-
-$(full_target): PRIVATE_DOCLETPATH := $(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX):$(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX)
-$(full_target): PRIVATE_CURRENT_BUILD := -hdf page.build $(BUILD_ID)-$(BUILD_NUMBER_FROM_FILE)
-$(full_target): PRIVATE_CURRENT_TIME := -hdf page.now "$$($(DATE_FROM_FILE) "+%d %b %Y %k:%M")"
-$(full_target): PRIVATE_CUSTOM_TEMPLATE_DIR := $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)
-$(full_target): PRIVATE_IN_CUSTOM_ASSET_DIR := $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)/$(LOCAL_DROIDDOC_CUSTOM_ASSET_DIR)
-$(full_target): PRIVATE_OUT_ASSET_DIR := $(out_dir)/$(LOCAL_DROIDDOC_ASSET_DIR)
-$(full_target): PRIVATE_OUT_CUSTOM_ASSET_DIR := $(out_dir)/$(LOCAL_DROIDDOC_CUSTOM_ASSET_DIR)
-
-html_dir_files :=
-ifneq ($(strip $(LOCAL_DROIDDOC_HTML_DIR)),)
-$(full_target): PRIVATE_DROIDDOC_HTML_DIR := -htmldir $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR)
-html_dir_files := $(sort $(shell find $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR) -type f))
-else
-$(full_target): PRIVATE_DROIDDOC_HTML_DIR :=
-endif
-ifneq ($(strip $(LOCAL_ADDITIONAL_HTML_DIR)),)
-$(full_target): PRIVATE_ADDITIONAL_HTML_DIR := -htmldir2 $(LOCAL_PATH)/$(LOCAL_ADDITIONAL_HTML_DIR)
-else
-$(full_target): PRIVATE_ADDITIONAL_HTML_DIR :=
-endif
-
-# TODO(nanzhang): Remove it if this is not used any more
-$(full_target): PRIVATE_LOCAL_PATH := $(LOCAL_PATH)
-
-ifeq ($(strip $(LOCAL_DROIDDOC_USE_METALAVA)),true)
-ifneq ($(LOCAL_DROIDDOC_METALAVA_PREVIOUS_API),)
-$(full_target): PRIVATE_DROIDDOC_METALAVA_PREVIOUS_API := --previous-api $(LOCAL_DROIDDOC_METALAVA_PREVIOUS_API)
-else
-$(full_target): PRIVATE_DROIDDOC_METALAVA_PREVIOUS_API :=
-endif #!LOCAL_DROIDDOC_METALAVA_PREVIOUS_API
-
-metalava_annotations_deps :=
-ifeq ($(strip $(LOCAL_DROIDDOC_METALAVA_ANNOTATIONS_ENABLED)),true)
-ifeq ($(LOCAL_DROIDDOC_METALAVA_PREVIOUS_API),)
-$(error $(LOCAL_PATH): LOCAL_DROIDDOC_METALAVA_PREVIOUS_API has to be non-empty if metalava annotations was enabled!)
-endif
-ifeq ($(LOCAL_DROIDDOC_METALAVA_MERGE_ANNOTATIONS_DIR),)
-$(error $(LOCAL_PATH): LOCAL_DROIDDOC_METALAVA_MERGE_ANNOTATIONS_DIR has to be non-empty if metalava annotations was enabled!)
-endif
-
-$(full_target): PRIVATE_DROIDDOC_METALAVA_ANNOTATIONS := --include-annotations --migrate-nullness \
- --extract-annotations $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_annotations.zip \
- --merge-annotations $(LOCAL_DROIDDOC_METALAVA_MERGE_ANNOTATIONS_DIR) \
- --hide HiddenTypedefConstant --hide SuperfluousPrefix --hide AnnotationExtraction
-metalava_annotations_deps := $(sort $(shell find $(LOCAL_DROIDDOC_METALAVA_MERGE_ANNOTATIONS_DIR) -type f))
-else
-$(full_target): PRIVATE_DROIDDOC_METALAVA_ANNOTATIONS :=
-endif #LOCAL_DROIDDOC_METALAVA_ANNOTATIONS_ENABLED=true
-
-ifneq (,$(filter --generate-documentation,$(LOCAL_DROIDDOC_OPTIONS)))
-
-pos = $(if $(findstring $1,$2),$(call pos,$1,$(wordlist 2,$(words $2),$2),x $3),$3)
-metalava_args := $(wordlist 1, $(words $(call pos,--generate-documentation,$(LOCAL_DROIDDOC_OPTIONS))), \
- $(LOCAL_DROIDDOC_OPTIONS))
-remaining_args := $(wordlist $(words $(call pos,--generate-documentation,$(LOCAL_DROIDDOC_OPTIONS))), \
- $(words $(LOCAL_DROIDDOC_OPTIONS)), $(LOCAL_DROIDDOC_OPTIONS))
-doclava_args := $(wordlist 2, $(words $(remaining_args)), $(remaining_args))
-
-$(full_target): \
- $(full_src_files) $(LOCAL_GENERATED_SOURCES) \
- $(droiddoc_templates) \
- $(HOST_JDK_TOOLS_JAR) \
- $(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX) \
- $(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX) \
- $(HOST_OUT_JAVA_LIBRARIES)/metalava$(COMMON_JAVA_PACKAGE_SUFFIX) \
- $(html_dir_files) \
- $(full_java_libs) \
- $(ZIPSYNC) \
- $(LOCAL_SRCJARS) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES) \
- $(LOCAL_DROIDDOC_METALAVA_PREVIOUS_API) \
- $(metalava_annotations_deps)
- @echo metalava based docs: $(PRIVATE_OUT_DIR)
- $(hide) mkdir -p $(dir $@)
- $(hide) rm -rf $(PRIVATE_STUB_OUT_DIR)
- $(hide) rm -rf $(PRIVATE_METALAVA_DOCS_STUB_OUT_DIR)
- $(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \
- $(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR))
- $(ZIPSYNC) -d $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) -l $(PRIVATE_SRCJAR_LIST_FILE) -f "*.java" $(PRIVATE_SRCJARS)
- $(hide) ( \
- $(JAVA) -jar $(HOST_OUT_JAVA_LIBRARIES)/metalava$(COMMON_JAVA_PACKAGE_SUFFIX) \
- -encoding UTF-8 -source 1.8 \@$(PRIVATE_SRC_LIST_FILE) \@$(PRIVATE_SRCJAR_LIST_FILE) \
- $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH)) \
- $(addprefix -classpath ,$(PRIVATE_CLASSPATH)) \
- --sourcepath $(PRIVATE_SOURCE_PATH) \
- --no-banner --color --quiet \
- $(addprefix --stubs ,$(PRIVATE_STUB_OUT_DIR)) \
- $(addprefix --doc-stubs ,$(PRIVATE_METALAVA_DOCS_STUB_OUT_DIR)) \
- --write-stubs-source-list $(intermediates.COMMON)/stubs-src-list \
- $(metalava_args) $(PRIVATE_DROIDDOC_METALAVA_PREVIOUS_API) $(PRIVATE_DROIDDOC_METALAVA_ANNOTATIONS) \
- $(JAVADOC) -encoding UTF-8 -source 1.8 STUBS_SOURCE_LIST \
- -J-Xmx1600m -J-XX:-OmitStackTraceInFastThrow -XDignore.symbol.file \
- -quiet -doclet com.google.doclava.Doclava -docletpath $(PRIVATE_DOCLETPATH) \
- -templatedir $(PRIVATE_CUSTOM_TEMPLATE_DIR) \
- $(PRIVATE_DROIDDOC_HTML_DIR) $(PRIVATE_ADDITIONAL_HTML_DIR) \
- $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH)) \
- $(addprefix -classpath ,$(PRIVATE_CLASSPATH)) \
- -sourcepath $(PRIVATE_SOURCE_PATH) \
- -d $(PRIVATE_OUT_DIR) \
- $(PRIVATE_CURRENT_BUILD) $(PRIVATE_CURRENT_TIME) $(doclava_args) \
- && touch -f $@ ) || (rm -rf $(PRIVATE_OUT_DIR) $(PRIVATE_SRC_LIST_FILE); exit 45)
-else
-# no docs generation
-$(full_target): \
- $(full_src_files) $(LOCAL_GENERATED_SOURCES) \
- $(full_java_libs) \
- $(HOST_OUT_JAVA_LIBRARIES)/metalava$(COMMON_JAVA_PACKAGE_SUFFIX) \
- $(ZIPSYNC) \
- $(LOCAL_SRCJARS) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES)
- @echo metalava based stubs: $@
- $(hide) mkdir -p $(dir $@)
- $(hide) rm -rf $(PRIVATE_STUB_OUT_DIR)
- $(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \
- $(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR))
- $(ZIPSYNC) -d $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) -l $(PRIVATE_SRCJAR_LIST_FILE) -f "*.java" $(PRIVATE_SRCJARS)
- $(hide) ( \
- $(JAVA) -jar $(HOST_OUT_JAVA_LIBRARIES)/metalava$(COMMON_JAVA_PACKAGE_SUFFIX) \
- -encoding UTF-8 -source 1.8 \@$(PRIVATE_SRC_LIST_FILE) \@$(PRIVATE_SRCJAR_LIST_FILE) \
- $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH)) \
- $(addprefix -classpath ,$(PRIVATE_CLASSPATH)) \
- --sourcepath $(PRIVATE_SOURCE_PATH) \
- $(PRIVATE_DROIDDOC_OPTIONS) $(PRIVATE_DROIDDOC_METALAVA_PREVIOUS_API) $(PRIVATE_DROIDDOC_METALAVA_ANNOTATIONS) \
- --no-banner --color --quiet \
- $(addprefix --stubs ,$(PRIVATE_STUB_OUT_DIR)) \
- && touch -f $@ ) || (rm -rf $(PRIVATE_SRC_LIST_FILE); exit 45)
-
-endif # stubs + docs generation
-ifeq ($(strip $(LOCAL_DROIDDOC_METALAVA_ANNOTATIONS_ENABLED)),true)
-$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_annotations.zip: $(full_target)
-endif
-
-else # doclava based droiddoc generation
-
-# TODO(tobiast): Clean this up once we move to -source 1.9.
-# OpenJDK 9 does not have the concept of a "boot classpath" so we should
-# then rename PRIVATE_BOOTCLASSPATH to PRIVATE_MODULE or similar. For now,
-# keep -bootclasspath here since it works in combination with -source 1.8.
-$(full_target): \
- $(full_src_files) $(LOCAL_GENERATED_SOURCES) \
- $(droiddoc_templates) \
- $(HOST_JDK_TOOLS_JAR) \
- $(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX) \
- $(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX) \
- $(html_dir_files) \
- $(full_java_libs) \
- $(ZIPSYNC) \
- $(LOCAL_SRCJARS) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES)
- @echo Docs droiddoc: $(PRIVATE_OUT_DIR)
- $(hide) mkdir -p $(dir $@)
- $(hide) rm -rf $(PRIVATE_STUB_OUT_DIR)
- $(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \
- $(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR))
- $(ZIPSYNC) -d $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) -l $(PRIVATE_SRCJAR_LIST_FILE) -f "*.java" $(PRIVATE_SRCJARS)
- $(hide) ( \
- $(JAVADOC) \
- -encoding UTF-8 -source 1.8 \@$(PRIVATE_SRC_LIST_FILE) \@$(PRIVATE_SRCJAR_LIST_FILE) \
- -J-Xmx1600m -J-XX:-OmitStackTraceInFastThrow -XDignore.symbol.file \
- -quiet -doclet com.google.doclava.Doclava -docletpath $(PRIVATE_DOCLETPATH) \
- -templatedir $(PRIVATE_CUSTOM_TEMPLATE_DIR) \
- $(PRIVATE_DROIDDOC_HTML_DIR) $(PRIVATE_ADDITIONAL_HTML_DIR) \
- $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH)) \
- $(addprefix -classpath ,$(PRIVATE_CLASSPATH)) \
- -sourcepath $(PRIVATE_SOURCE_PATH)$(addprefix :,$(PRIVATE_CLASSPATH)) \
- -d $(PRIVATE_OUT_DIR) \
- $(PRIVATE_CURRENT_BUILD) $(PRIVATE_CURRENT_TIME) $(PRIVATE_DROIDDOC_OPTIONS) \
- $(addprefix -stubs ,$(PRIVATE_STUB_OUT_DIR)) \
- && touch -f $@ ) || (rm -rf $(PRIVATE_OUT_DIR) $(PRIVATE_SRC_LIST_FILE); exit 45)
-endif #LOCAL_DROIDDOC_USE_METALAVA
-
-else
-
-###########################################################
-## Logic for javadoc only
-###########################################################
-ifdef USE_OPENJDK9
-# For OpenJDK 9 we use --patch-module to define the core libraries code.
-# TODO(tobiast): Reorganize this when adding proper support for OpenJDK 9
-# modules. Here we treat all code in core libraries as being in java.base
-# to work around the OpenJDK 9 module system. http://b/62049770
-$(full_target): PRIVATE_BOOTCLASSPATH_ARG := --patch-module=java.base=$(PRIVATE_BOOTCLASSPATH)
-else
-# For OpenJDK 8 we can use -bootclasspath to define the core libraries code.
-$(full_target): PRIVATE_BOOTCLASSPATH_ARG := $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH))
-endif
-$(full_target): $(full_src_files) $(LOCAL_GENERATED_SOURCES) $(full_java_libs) $(ZIPSYNC) $(LOCAL_SRCJARS) $(LOCAL_ADDITIONAL_DEPENDENCIES)
- @echo Docs javadoc: $(PRIVATE_OUT_DIR)
- @mkdir -p $(dir $@)
- $(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \
- $(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR))
- $(ZIPSYNC) -d $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) -l $(PRIVATE_SRCJAR_LIST_FILE) -f "*.java" $(PRIVATE_SRCJARS)
- $(hide) ( \
- $(JAVADOC) -encoding UTF-8 \@$(PRIVATE_SRC_LIST_FILE) \@$(PRIVATE_SRCJAR_LIST_FILE) \
- $(PRIVATE_DROIDDOC_OPTIONS) -J-Xmx1024m -XDignore.symbol.file -Xdoclint:none -quiet \
- $(addprefix -classpath ,$(PRIVATE_CLASSPATH)) $(PRIVATE_BOOTCLASSPATH_ARG) \
- -sourcepath $(PRIVATE_SOURCE_PATH) \
- -d $(PRIVATE_OUT_DIR) \
- && touch -f $@ \
- ) || (rm -rf $(PRIVATE_OUT_DIR) $(PRIVATE_SRC_LIST_FILE); exit 45)
-
-endif # !LOCAL_DROIDDOC_USE_STANDARD_DOCLET
-
-ALL_DOCS += $(full_target)
-
-.PHONY: $(LOCAL_MODULE)-docs
-$(LOCAL_MODULE)-docs : $(full_target)
-
-ifeq ($(strip $(LOCAL_UNINSTALLABLE_MODULE)),)
-
-# Define a rule to create a zip of these docs.
-out_zip := $(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip
-$(out_zip): PRIVATE_DOCS_DIR := $(out_dir)
-$(out_zip): $(full_target)
- @echo Package docs: $@
- @rm -f $@
- @mkdir -p $(dir $@)
- $(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_DOCS_DIR) && zip -rqX $$F * )
-
-$(LOCAL_MODULE)-docs.zip : $(out_zip)
-
-$(call dist-for-goals,docs,$(out_zip))
-
-endif #!LOCAL_UNINSTALLABLE_MODULE
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index f44b8a88f2..0accdc0fe7 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -35,49 +35,11 @@ ALL_ORIGINAL_DYNAMIC_BINARIES += $(linked_module)
LOCAL_INTERMEDIATE_TARGETS := $(linked_module)
###################################
+include $(BUILD_SYSTEM)/use_lld_setup.mk
include $(BUILD_SYSTEM)/binary.mk
###################################
###########################################################
-## Pack relocation tables
-###########################################################
-relocation_packer_input := $(linked_module)
-relocation_packer_output := $(intermediates)/PACKED/$(my_built_module_stem)
-
-my_pack_module_relocations := false
-ifneq ($(DISABLE_RELOCATION_PACKER),true)
- my_pack_module_relocations := $(firstword \
- $(LOCAL_PACK_MODULE_RELOCATIONS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
- $(LOCAL_PACK_MODULE_RELOCATIONS))
-endif
-
-ifeq ($(my_pack_module_relocations),)
- my_pack_module_relocations := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_PACK_MODULE_RELOCATIONS)
-endif
-
-# Do not pack relocations for executables. Because packing results in
-# non-zero p_vaddr which causes kernel to load executables to lower
-# address (starting at 0x8000) http://b/20665974
-ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
- my_pack_module_relocations := false
-endif
-
-# TODO (dimitry): Relocation packer is not yet available for darwin
-ifneq ($(HOST_OS),linux)
- my_pack_module_relocations := false
-endif
-
-ifeq (true,$(my_pack_module_relocations))
-# Pack relocations
-$(relocation_packer_output): $(relocation_packer_input)
- $(pack-elf-relocations)
-else
-$(relocation_packer_output): $(relocation_packer_input)
- @echo "target Unpacked: $(PRIVATE_MODULE) ($@)"
- $(copy-file-to-target)
-endif
-
-###########################################################
## Store a copy with symbols for symbolic debugging
###########################################################
ifeq ($(LOCAL_UNSTRIPPED_PATH),)
@@ -85,7 +47,7 @@ my_unstripped_path := $(TARGET_OUT_UNSTRIPPED)/$(patsubst $(PRODUCT_OUT)/%,%,$(m
else
my_unstripped_path := $(LOCAL_UNSTRIPPED_PATH)
endif
-symbolic_input := $(relocation_packer_output)
+symbolic_input := $(linked_module)
symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
$(symbolic_output) : $(symbolic_input)
@echo "target Symbolic: $(PRIVATE_MODULE) ($@)"
@@ -97,7 +59,7 @@ $(symbolic_output) : $(symbolic_input)
ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
my_breakpad_path := $(TARGET_OUT_BREAKPAD)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_module_path))
-breakpad_input := $(relocation_packer_output)
+breakpad_input := $(linked_module)
breakpad_output := $(my_breakpad_path)/$(my_installed_module_stem).sym
$(breakpad_output) : $(breakpad_input) | $(BREAKPAD_DUMP_SYMS) $(PRIVATE_READELF)
@echo "target breakpad: $(PRIVATE_MODULE) ($@)"
@@ -124,50 +86,47 @@ ifeq ($(my_strip_module),)
my_strip_module := mini-debug-info
endif
+ifeq ($(my_strip_module),false)
+ my_strip_module :=
+endif
+
+my_strip_args :=
ifeq ($(my_strip_module),mini-debug-info)
-# Don't use mini-debug-info on mips (both 32-bit and 64-bit). objcopy checks that all
-# SH_MIPS_DWARF sections having name prefix .debug_ or .zdebug_, so there seems no easy
-# way using objcopy to remove all debug sections except .debug_frame on mips.
-ifneq ($(filter mips mips64,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
- my_strip_module := true
+ my_strip_args += --keep-mini-debug-info
+else ifeq ($(my_strip_module),keep_symbols)
+ my_strip_args += --keep-symbols
endif
+
+ifeq (,$(filter no_debuglink mini-debug-info,$(my_strip_module)))
+ ifneq ($(TARGET_BUILD_VARIANT),user)
+ my_strip_args += --add-gnu-debuglink
+ endif
endif
-$(strip_output): PRIVATE_STRIP := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
-$(strip_output): PRIVATE_OBJCOPY := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJCOPY)
-$(strip_output): PRIVATE_NM := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NM)
-$(strip_output): PRIVATE_READELF := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_READELF)
-ifeq ($(my_strip_module),no_debuglink)
-$(strip_output): PRIVATE_NO_DEBUGLINK := true
-else
-$(strip_output): PRIVATE_NO_DEBUGLINK :=
+ifeq ($($(my_prefix)OS),darwin)
+ # llvm-strip does not support Darwin Mach-O yet.
+ my_strip_args += --use-gnu-strip
endif
-ifeq ($(my_strip_module),mini-debug-info)
-# Strip the binary, but keep debug frames and symbol table in a compressed .gnu_debugdata section.
-$(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJCOPY) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NM)
- $(transform-to-stripped-keep-mini-debug-info)
-else ifneq ($(filter true no_debuglink,$(my_strip_module)),)
-# Strip the binary
-$(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
- $(transform-to-stripped)
-else ifeq ($(my_strip_module),keep_symbols)
-# Strip only the debug frames, but leave the symbol table.
-$(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
- $(transform-to-stripped-keep-symbols)
-
-# A product may be configured to strip everything in some build variants.
-# We do the stripping as a post-install command so that LOCAL_BUILT_MODULE
-# is still with the symbols and we don't need to clean it (and relink) when
-# you switch build variant.
-ifneq ($(filter $(STRIP_EVERYTHING_BUILD_VARIANTS),$(TARGET_BUILD_VARIANT)),)
-$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP) --strip-all $(LOCAL_INSTALLED_MODULE)
+valid_strip := mini-debug-info keep_symbols true no_debuglink
+ifneq (,$(filter-out $(valid_strip),$(my_strip_module)))
+ $(call pretty-error,Invalid strip value $(my_strip_module), only one of $(valid_strip) allowed)
endif
+
+ifneq (,$(my_strip_module))
+ $(strip_output): PRIVATE_STRIP_ARGS := $(my_strip_args)
+ $(strip_output): PRIVATE_TOOLS_PREFIX := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)TOOLS_PREFIX)
+ $(strip_output): $(strip_input) $(SOONG_STRIP_PATH)
+ @echo "$($(PRIVATE_PREFIX)DISPLAY) Strip: $(PRIVATE_MODULE) ($@)"
+ CLANG_BIN=$(LLVM_PREBUILTS_PATH) \
+ CROSS_COMPILE=$(PRIVATE_TOOLS_PREFIX) \
+ XZ=$(XZ) \
+ $(SOONG_STRIP_PATH) -i $< -o $@ -d $@.d $(PRIVATE_STRIP_ARGS)
+ $(call include-depfile,$(strip_output).d)
else
-# Don't strip the binary, just copy it. We can't skip this step
-# because a copy of the binary must appear at LOCAL_BUILT_MODULE.
-$(strip_output): $(strip_input)
+ # Don't strip the binary, just copy it. We can't skip this step
+ # because a copy of the binary must appear at LOCAL_BUILT_MODULE.
+ $(strip_output): $(strip_input)
@echo "target Unstripped: $(PRIVATE_MODULE) ($@)"
$(copy-file-to-target)
endif # my_strip_module
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 700189e566..5131598a94 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -25,11 +25,13 @@ endef
#$(warning $(call find_and_earlier,A B C,D))
define version-list
-$(1)PR1 $(1)PD1 $(1)PD2 $(1)PM1 $(1)PM2
+$(1)P1A $(1)P1B $(1)P2A $(1)P2B $(1)D1A $(1)D1B $(1)D2A $(1)D2B $(1)Q1A $(1)Q1B $(1)Q2A $(1)Q2B $(1)Q3A $(1)Q3B
endef
-ALL_VERSIONS := O P Q R S T U V W X Y Z
-ALL_VERSIONS := $(foreach v,$(ALL_VERSIONS),$(call version-list,$(v)))
+PREV_VERSIONS := OPR1 OPD1 OPD2 OPM1 OPM2 PPR1 PPD1 PPD2 PPM1 PPM2 QPR1
+ALL_VERSIONS := Q R S T U V W X Y Z
+ALL_VERSIONS := $(PREV_VERSIONS) $(foreach v,$(ALL_VERSIONS),$(call version-list,$(v)))
+PREV_VERSIONS :=
# Filters ALL_VERSIONS down to the range [$1, $2], and errors if $1 > $2 or $3 is
# not in [$1, $2]
@@ -147,6 +149,50 @@ $(error Building on a 32-bit x86 host is not supported: $(UNAME)!)
endif
endif
+ifeq ($(HOST_OS),darwin)
+ # Mac no longer supports 32-bit executables
+ HOST_2ND_ARCH :=
+endif
+
+HOST_2ND_ARCH_VAR_PREFIX := 2ND_
+HOST_2ND_ARCH_MODULE_SUFFIX := _32
+HOST_CROSS_2ND_ARCH_VAR_PREFIX := 2ND_
+HOST_CROSS_2ND_ARCH_MODULE_SUFFIX := _64
+TARGET_2ND_ARCH_VAR_PREFIX := 2ND_
+.KATI_READONLY := \
+ HOST_ARCH \
+ HOST_2ND_ARCH \
+ HOST_IS_64_BIT \
+ HOST_2ND_ARCH_VAR_PREFIX \
+ HOST_2ND_ARCH_MODULE_SUFFIX \
+ HOST_CROSS_2ND_ARCH_VAR_PREFIX \
+ HOST_CROSS_2ND_ARCH_MODULE_SUFFIX \
+ TARGET_2ND_ARCH_VAR_PREFIX \
+
+combo_target := HOST_
+combo_2nd_arch_prefix :=
+include $(BUILD_COMBOS)/select.mk
+
+ifdef HOST_2ND_ARCH
+ combo_2nd_arch_prefix := $(HOST_2ND_ARCH_VAR_PREFIX)
+ include $(BUILD_SYSTEM)/combo/select.mk
+endif
+
+# Load the windows cross compiler under Linux
+ifdef HOST_CROSS_OS
+ combo_target := HOST_CROSS_
+ combo_2nd_arch_prefix :=
+ include $(BUILD_SYSTEM)/combo/select.mk
+
+ ifdef HOST_CROSS_2ND_ARCH
+ combo_2nd_arch_prefix := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
+ include $(BUILD_SYSTEM)/combo/select.mk
+ endif
+endif
+
+# on windows, the tools have .exe at the end, and we depend on the
+# host config stuff being done first
+
BUILD_ARCH := $(HOST_ARCH)
BUILD_2ND_ARCH := $(HOST_2ND_ARCH)
@@ -178,42 +224,34 @@ TARGET_COPY_OUT_SYSTEM_OTHER := system_other
TARGET_COPY_OUT_DATA := data
TARGET_COPY_OUT_ASAN := $(TARGET_COPY_OUT_DATA)/asan
TARGET_COPY_OUT_OEM := oem
-TARGET_COPY_OUT_ODM := odm
-TARGET_COPY_OUT_PRODUCT := product
+TARGET_COPY_OUT_RAMDISK := ramdisk
+TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk
TARGET_COPY_OUT_ROOT := root
TARGET_COPY_OUT_RECOVERY := recovery
+# The directory used for optional partitions depend on the BoardConfig, so
+# they're defined to placeholder values here and swapped after reading the
+# BoardConfig, to be either the partition dir, or a subdir within 'system'.
+_vendor_path_placeholder := ||VENDOR-PATH-PH||
+_product_path_placeholder := ||PRODUCT-PATH-PH||
+_product_services_path_placeholder := ||PRODUCT_SERVICES-PATH-PH||
+_odm_path_placeholder := ||ODM-PATH-PH||
+TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
+TARGET_COPY_OUT_PRODUCT := $(_product_path_placeholder)
+TARGET_COPY_OUT_PRODUCT_SERVICES := $(_product_services_path_placeholder)
+TARGET_COPY_OUT_ODM := $(_odm_path_placeholder)
# Returns the non-sanitized version of the path provided in $1.
define get_non_asan_path
$(patsubst $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/%,$(PRODUCT_OUT)/%,$1)
endef
-###########################################
-# Define TARGET_COPY_OUT_VENDOR to a placeholder, for at this point
-# we don't know if the device wants to build a separate vendor.img
-# or just build vendor stuff into system.img.
-# A device can set up TARGET_COPY_OUT_VENDOR to "vendor" in its
-# BoardConfig.mk.
-# We'll substitute with the real value after loading BoardConfig.mk.
-_vendor_path_placeholder := ||VENDOR-PATH-PH||
-TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
-###########################################
-
-###########################################
-# Define TARGET_COPY_OUT_PRODUCT to a placeholder, for at this point
-# we don't know if the device wants to build a separate product.img
-# or just build product stuff into system.img.
-# A device can set up TARGET_COPY_OUT_PRODUCT to "product" in its
-# BoardConfig.mk.
-# We'll substitute with the real value after loading BoardConfig.mk.
-_product_path_placeholder := ||PRODUCT-PATH-PH||
-TARGET_COPY_OUT_PRODUCT := $(_product_path_placeholder)
-###########################################
-
#################################################################
# Set up minimal BOOTCLASSPATH list of jars to build/execute
# java code with dalvikvm/art.
-TARGET_CORE_JARS := core-oj core-libart conscrypt okhttp bouncycastle apache-xml
+# Jars present in the runtime apex. These should match exactly the list of
+# Java libraries in the runtime apex build rule.
+RUNTIME_APEX_JARS := core-oj core-libart okhttp bouncycastle apache-xml
+TARGET_CORE_JARS := $(RUNTIME_APEX_JARS) conscrypt
ifeq ($(EMMA_INSTRUMENT),true)
ifneq ($(EMMA_INSTRUMENT_STATIC),true)
# For instrumented build, if Jacoco is not being included statically
@@ -236,127 +274,9 @@ $(error must be empty or one of: eng user userdebug)
endif
SDK_HOST_ARCH := x86
-
-# Boards may be defined under $(SRC_TARGET_DIR)/board/$(TARGET_DEVICE)
-# or under vendor/*/$(TARGET_DEVICE). Search in both places, but
-# make sure only one exists.
-# Real boards should always be associated with an OEM vendor.
-board_config_mk := \
- $(strip $(sort $(wildcard \
- $(SRC_TARGET_DIR)/board/$(TARGET_DEVICE)/BoardConfig.mk \
- $(shell test -d device && find -L device -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
- $(shell test -d vendor && find -L vendor -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
- )))
-ifeq ($(board_config_mk),)
- $(error No config file found for TARGET_DEVICE $(TARGET_DEVICE))
-endif
-ifneq ($(words $(board_config_mk)),1)
- $(error Multiple board config files for TARGET_DEVICE $(TARGET_DEVICE): $(board_config_mk))
-endif
-include $(board_config_mk)
-ifeq ($(TARGET_ARCH),)
- $(error TARGET_ARCH not defined by board config: $(board_config_mk))
-endif
-ifneq ($(MALLOC_IMPL),)
- $(warning *** Unsupported option MALLOC_IMPL defined by board config: $(board_config_mk).)
- $(error Use `MALLOC_SVELTE := true` to configure jemalloc for low-memory)
-endif
-TARGET_DEVICE_DIR := $(patsubst %/,%,$(dir $(board_config_mk)))
-board_config_mk :=
-
-###########################################
-# Now we can substitute with the real value of TARGET_COPY_OUT_VENDOR
-ifeq ($(TARGET_COPY_OUT_VENDOR),$(_vendor_path_placeholder))
-TARGET_COPY_OUT_VENDOR := system/vendor
-else ifeq ($(filter vendor system/vendor,$(TARGET_COPY_OUT_VENDOR)),)
-$(error TARGET_COPY_OUT_VENDOR must be either 'vendor' or 'system/vendor', seeing '$(TARGET_COPY_OUT_VENDOR)'.)
-endif
-PRODUCT_COPY_FILES := $(subst $(_vendor_path_placeholder),$(TARGET_COPY_OUT_VENDOR),$(PRODUCT_COPY_FILES))
-
-BOARD_USES_VENDORIMAGE :=
-ifdef BOARD_PREBUILT_VENDORIMAGE
-BOARD_USES_VENDORIMAGE := true
-endif
-ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
-BOARD_USES_VENDORIMAGE := true
-endif
-ifeq ($(TARGET_COPY_OUT_VENDOR),vendor)
-BOARD_USES_VENDORIMAGE := true
-else ifdef BOARD_USES_VENDORIMAGE
-$(error TARGET_COPY_OUT_VENDOR must be set to 'vendor' to use a vendor image)
-endif
-
-###########################################
-# Now we can substitute with the real value of TARGET_COPY_OUT_PRODUCT
-ifeq ($(TARGET_COPY_OUT_PRODUCT),$(_product_path_placeholder))
-TARGET_COPY_OUT_PRODUCT := system/product
-else ifeq ($(filter product system/product,$(TARGET_COPY_OUT_PRODUCT)),)
-$(error TARGET_COPY_OUT_PRODUCT must be either 'product' or 'system/product', seeing '$(TARGET_COPY_OUT_PRODUCT)'.)
-endif
-PRODUCT_COPY_FILES := $(subst $(_product_path_placeholder),$(TARGET_COPY_OUT_PRODUCT),$(PRODUCT_COPY_FILES))
-
-BOARD_USES_PRODUCTIMAGE :=
-ifdef BOARD_PREBUILT_PRODUCTIMAGE
-BOARD_USES_PRODUCTIMAGE := true
-endif
-ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
-BOARD_USES_PRODUCTIMAGE := true
-endif
-ifeq ($(TARGET_COPY_OUT_PRODUCT),product)
-BOARD_USES_PRODUCTIMAGE := true
-else ifdef BOARD_USES_PRODUCTIMAGE
-$(error TARGET_COPY_OUT_PRODUCT must be set to 'product' to use a product image)
-endif
-
-###########################################
-# Ensure that only TARGET_RECOVERY_UPDATER_LIBS *or* AB_OTA_UPDATER is set.
-TARGET_RECOVERY_UPDATER_LIBS ?=
-AB_OTA_UPDATER ?=
-.KATI_READONLY := TARGET_RECOVERY_UPDATER_LIBS AB_OTA_UPDATER
-ifeq ($(AB_OTA_UPDATER),true)
- ifneq ($(strip $(TARGET_RECOVERY_UPDATER_LIBS)),)
- $(error Do not use TARGET_RECOVERY_UPDATER_LIBS when using AB_OTA_UPDATER)
- endif
-endif
-
-# Check BOARD_VNDK_VERSION
-define check_vndk_version
- $(eval vndk_path := prebuilts/vndk/v$(1)) \
- $(if $(wildcard $(vndk_path)/*/Android.bp),,$(error VNDK version $(1) not found))
-endef
-
-ifdef BOARD_VNDK_VERSION
- ifneq ($(BOARD_VNDK_VERSION),current)
- $(error BOARD_VNDK_VERSION: Only "current" is implemented)
- endif
-
- TARGET_VENDOR_TEST_SUFFIX := /vendor
-else
- TARGET_VENDOR_TEST_SUFFIX :=
-endif
-
-ifdef PRODUCT_EXTRA_VNDK_VERSIONS
- $(foreach v,$(PRODUCT_EXTRA_VNDK_VERSIONS),$(call check_vndk_version,$(v)))
-endif
-
-# Ensure that BOARD_SYSTEMSDK_VERSIONS are all within PLATFORM_SYSTEMSDK_VERSIONS
-_unsupported_systemsdk_versions := $(filter-out $(PLATFORM_SYSTEMSDK_VERSIONS),$(BOARD_SYSTEMSDK_VERSIONS))
-ifneq (,$(_unsupported_systemsdk_versions))
- $(error System SDK versions '$(_unsupported_systemsdk_versions)' in BOARD_SYSTEMSDK_VERSIONS are not supported.\
- Supported versions are $(PLATFORM_SYSTEMSDK_VERSIONS))
-endif
-
-# ---------------------------------------------------------------
-# Set up configuration for target machine.
-# The following must be set:
-# TARGET_OS = { linux }
-# TARGET_ARCH = { arm | x86 | mips }
-
TARGET_OS := linux
-# TARGET_ARCH should be set by BoardConfig.mk and will be checked later
-ifneq ($(filter %64,$(TARGET_ARCH)),)
-TARGET_IS_64_BIT := true
-endif
+
+include $(BUILD_SYSTEM)/board_config.mk
# the target build type defaults to release
ifneq ($(TARGET_BUILD_TYPE),debug)
@@ -366,28 +286,22 @@ endif
# ---------------------------------------------------------------
# figure out the output directories
-ifeq (,$(strip $(OUT_DIR)))
-ifeq (,$(strip $(OUT_DIR_COMMON_BASE)))
-OUT_DIR := $(TOPDIR)out
-else
-OUT_DIR := $(OUT_DIR_COMMON_BASE)/$(notdir $(PWD))
-endif
-endif
-
SOONG_OUT_DIR := $(OUT_DIR)/soong
TARGET_OUT_ROOT := $(OUT_DIR)/target
HOST_OUT_ROOT := $(OUT_DIR)/host
+.KATI_READONLY := SOONG_OUT_DIR TARGET_OUT_ROOT HOST_OUT_ROOT
+
# We want to avoid two host bin directories in multilib build.
HOST_OUT := $(HOST_OUT_ROOT)/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
SOONG_HOST_OUT := $(SOONG_OUT_DIR)/host/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
-# TODO: remove
-BUILD_OUT := $(HOST_OUT)
HOST_CROSS_OUT := $(HOST_OUT_ROOT)/windows-$(HOST_PREBUILT_ARCH)
+.KATI_READONLY := HOST_OUT SOONG_HOST_OUT HOST_CROSS_OUT
+
TARGET_PRODUCT_OUT_ROOT := $(TARGET_OUT_ROOT)/product
TARGET_COMMON_OUT_ROOT := $(TARGET_OUT_ROOT)/common
@@ -395,11 +309,17 @@ HOST_COMMON_OUT_ROOT := $(HOST_OUT_ROOT)/common
PRODUCT_OUT := $(TARGET_PRODUCT_OUT_ROOT)/$(TARGET_DEVICE)
+.KATI_READONLY := TARGET_PRODUCT_OUT_ROOT TARGET_COMMON_OUT_ROOT HOST_COMMON_OUT_ROOT PRODUCT_OUT
+
OUT_DOCS := $(TARGET_COMMON_OUT_ROOT)/docs
OUT_NDK_DOCS := $(TARGET_COMMON_OUT_ROOT)/ndk-docs
+.KATI_READONLY := OUT_DOCS OUT_NDK_DOCS
-BUILD_OUT_EXECUTABLES := $(BUILD_OUT)/bin
+$(call KATI_obsolete,BUILD_OUT,Use HOST_OUT instead)
+
+BUILD_OUT_EXECUTABLES := $(HOST_OUT)/bin
SOONG_HOST_OUT_EXECUTABLES := $(SOONG_HOST_OUT)/bin
+.KATI_READONLY := BUILD_OUT_EXECUTABLES SOONG_HOST_OUT_EXECUTABLES
HOST_OUT_EXECUTABLES := $(HOST_OUT)/bin
HOST_OUT_SHARED_LIBRARIES := $(HOST_OUT)/lib64
@@ -409,56 +329,89 @@ HOST_OUT_SDK_ADDON := $(HOST_OUT)/sdk_addon
HOST_OUT_NATIVE_TESTS := $(HOST_OUT)/nativetest64
HOST_OUT_COVERAGE := $(HOST_OUT)/coverage
HOST_OUT_TESTCASES := $(HOST_OUT)/testcases
+.KATI_READONLY := \
+ HOST_OUT_EXECUTABLES \
+ HOST_OUT_SHARED_LIBRARIES \
+ HOST_OUT_RENDERSCRIPT_BITCODE \
+ HOST_OUT_JAVA_LIBRARIES \
+ HOST_OUT_SDK_ADDON \
+ HOST_OUT_NATIVE_TESTS \
+ HOST_OUT_COVERAGE \
+ HOST_OUT_TESTCASES
HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT)/bin
HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib
HOST_CROSS_OUT_NATIVE_TESTS := $(HOST_CROSS_OUT)/nativetest
HOST_CROSS_OUT_COVERAGE := $(HOST_CROSS_OUT)/coverage
HOST_CROSS_OUT_TESTCASES := $(HOST_CROSS_OUT)/testcases
+.KATI_READONLY := \
+ HOST_CROSS_OUT_EXECUTABLES \
+ HOST_CROSS_OUT_SHARED_LIBRARIES \
+ HOST_CROSS_OUT_NATIVE_TESTS \
+ HOST_CROSS_OUT_COVERAGE \
+ HOST_CROSS_OUT_TESTCASES
HOST_OUT_INTERMEDIATES := $(HOST_OUT)/obj
-HOST_OUT_INTERMEDIATE_LIBRARIES := $(HOST_OUT_INTERMEDIATES)/lib
HOST_OUT_NOTICE_FILES := $(HOST_OUT_INTERMEDIATES)/NOTICE_FILES
HOST_OUT_COMMON_INTERMEDIATES := $(HOST_COMMON_OUT_ROOT)/obj
HOST_OUT_FAKE := $(HOST_OUT)/fake_packages
+.KATI_READONLY := \
+ HOST_OUT_INTERMEDIATES \
+ HOST_OUT_NOTICE_FILES \
+ HOST_OUT_COMMON_INTERMEDIATES \
+ HOST_OUT_FAKE
# Nano environment config
include $(BUILD_SYSTEM)/aux_config.mk
HOST_CROSS_OUT_INTERMEDIATES := $(HOST_CROSS_OUT)/obj
-HOST_CROSS_OUT_INTERMEDIATE_LIBRARIES := $(HOST_CROSS_OUT_INTERMEDIATES)/lib
HOST_CROSS_OUT_NOTICE_FILES := $(HOST_CROSS_OUT_INTERMEDIATES)/NOTICE_FILES
+.KATI_READONLY := \
+ HOST_CROSS_OUT_INTERMEDIATES \
+ HOST_CROSS_OUT_NOTICE_FILES
HOST_OUT_GEN := $(HOST_OUT)/gen
HOST_OUT_COMMON_GEN := $(HOST_COMMON_OUT_ROOT)/gen
+.KATI_READONLY := \
+ HOST_OUT_GEN \
+ HOST_OUT_COMMON_GEN
HOST_CROSS_OUT_GEN := $(HOST_CROSS_OUT)/gen
+.KATI_READONLY := HOST_CROSS_OUT_GEN
HOST_OUT_TEST_CONFIG := $(HOST_OUT)/test_config
+.KATI_READONLY := HOST_OUT_TEST_CONFIG
# Out for HOST_2ND_ARCH
-HOST_2ND_ARCH_VAR_PREFIX := 2ND_
-HOST_2ND_ARCH_MODULE_SUFFIX := _32
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATES := $(HOST_OUT)/obj32
-$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES := $($(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATES)/lib
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES := $(HOST_OUT)/lib
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_EXECUTABLES := $(HOST_OUT_EXECUTABLES)
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_JAVA_LIBRARIES := $(HOST_OUT_JAVA_LIBRARIES)
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_NATIVE_TESTS := $(HOST_OUT)/nativetest
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_TESTCASES := $(HOST_OUT_TESTCASES)
+.KATI_READONLY := \
+ $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATES \
+ $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES \
+ $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_EXECUTABLES \
+ $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_JAVA_LIBRARIES \
+ $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_NATIVE_TESTS \
+ $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_TESTCASES
# The default host library path.
# It always points to the path where we build libraries in the default bitness.
HOST_LIBRARY_PATH := $(HOST_OUT_SHARED_LIBRARIES)
+.KATI_READONLY := HOST_LIBRARY_PATH
# Out for HOST_CROSS_2ND_ARCH
-HOST_CROSS_2ND_ARCH_VAR_PREFIX := 2ND_
-HOST_CROSS_2ND_ARCH_MODULE_SUFFIX := _64
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATES := $(HOST_CROSS_OUT)/obj64
-$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATE_LIBRARIES := $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATES)/lib
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib64
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT_EXECUTABLES)
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_NATIVE_TESTS := $(HOST_CROSS_OUT)/nativetest64
+.KATI_READONLY := \
+ $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATES \
+ $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_SHARED_LIBRARIES \
+ $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_EXECUTABLES \
+ $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_NATIVE_TESTS
ifneq ($(filter address,$(SANITIZE_TARGET)),)
TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_asan
@@ -466,18 +419,21 @@ else
TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj
endif
TARGET_OUT_HEADERS := $(TARGET_OUT_INTERMEDIATES)/include
-TARGET_OUT_INTERMEDIATE_LIBRARIES := $(TARGET_OUT_INTERMEDIATES)/lib
+.KATI_READONLY := TARGET_OUT_INTERMEDIATES TARGET_OUT_HEADERS
ifneq ($(filter address,$(SANITIZE_TARGET)),)
TARGET_OUT_COMMON_INTERMEDIATES := $(TARGET_COMMON_OUT_ROOT)/obj_asan
else
TARGET_OUT_COMMON_INTERMEDIATES := $(TARGET_COMMON_OUT_ROOT)/obj
endif
+.KATI_READONLY := TARGET_OUT_COMMON_INTERMEDIATES
TARGET_OUT_GEN := $(PRODUCT_OUT)/gen
TARGET_OUT_COMMON_GEN := $(TARGET_COMMON_OUT_ROOT)/gen
+.KATI_READONLY := TARGET_OUT_GEN TARGET_OUT_COMMON_GEN
TARGET_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_SYSTEM)
+.KATI_READONLY := TARGET_OUT
ifneq ($(filter address,$(SANITIZE_TARGET)),)
target_out_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/system
ifeq ($(SANITIZE_LITE),true)
@@ -512,6 +468,21 @@ TARGET_OUT_NOTICE_FILES := $(TARGET_OUT_INTERMEDIATES)/NOTICE_FILES
TARGET_OUT_FAKE := $(PRODUCT_OUT)/fake_packages
TARGET_OUT_TESTCASES := $(PRODUCT_OUT)/testcases
TARGET_OUT_TEST_CONFIG := $(PRODUCT_OUT)/test_config
+.KATI_READONLY := \
+ TARGET_OUT_EXECUTABLES \
+ TARGET_OUT_OPTIONAL_EXECUTABLES \
+ TARGET_OUT_SHARED_LIBRARIES \
+ TARGET_OUT_RENDERSCRIPT_BITCODE \
+ TARGET_OUT_JAVA_LIBRARIES \
+ TARGET_OUT_APPS \
+ TARGET_OUT_APPS_PRIVILEGED \
+ TARGET_OUT_KEYLAYOUT \
+ TARGET_OUT_KEYCHARS \
+ TARGET_OUT_ETC \
+ TARGET_OUT_NOTICE_FILES \
+ TARGET_OUT_FAKE \
+ TARGET_OUT_TESTCASES \
+ TARGET_OUT_TEST_CONFIG
ifeq ($(SANITIZE_LITE),true)
# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
@@ -520,22 +491,22 @@ TARGET_OUT_SYSTEM_OTHER := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_
else
TARGET_OUT_SYSTEM_OTHER := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_SYSTEM_OTHER)
endif
+.KATI_READONLY := TARGET_OUT_SYSTEM_OTHER
# Out for TARGET_2ND_ARCH
-TARGET_2ND_ARCH_VAR_PREFIX := $(HOST_2ND_ARCH_VAR_PREFIX)
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
# With this you can reference the arm binary translation library with libfoo_arm in PRODUCT_PACKAGES.
TARGET_2ND_ARCH_MODULE_SUFFIX := _$(TARGET_2ND_ARCH)
else
TARGET_2ND_ARCH_MODULE_SUFFIX := $(HOST_2ND_ARCH_MODULE_SUFFIX)
endif
+.KATI_READONLY := TARGET_2ND_ARCH_MODULE_SUFFIX
ifneq ($(filter address,$(SANITIZE_TARGET)),)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)_asan
else
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)
endif
-$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES)/lib
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES := $(target_out_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
else
@@ -546,7 +517,20 @@ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_EXECUTABLES := $(TARGET_OUT_EXECUTABLES)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_APPS := $(TARGET_OUT_APPS)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_APPS_PRIVILEGED := $(TARGET_OUT_APPS_PRIVILEGED)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_TESTCASES := $(TARGET_OUT_TESTCASES)
-
+.KATI_READONLY := \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_RENDERSCRIPT_BITCODE \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_EXECUTABLES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_APPS \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_APPS_PRIVILEGED \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_TESTCASES
+
+MODULE_CLASS_APPS := app
+MODULE_CLASS_EXECUTABLES := bin
+MODULE_CLASS_JAVA_LIBRARIES := framework
+MODULE_CLASS_NATIVE_TESTS := nativetest
+MODULE_CLASS_METRIC_TESTS := benchmarktest
TARGET_OUT_DATA := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DATA)
TARGET_OUT_DATA_EXECUTABLES := $(TARGET_OUT_EXECUTABLES)
TARGET_OUT_DATA_SHARED_LIBRARIES := $(TARGET_OUT_SHARED_LIBRARIES)
@@ -566,7 +550,28 @@ TARGET_OUT_DATA_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest
TARGET_OUT_VENDOR_NATIVE_TESTS := $(TARGET_OUT_DATA)/nativetest$(TARGET_VENDOR_TEST_SUFFIX)
TARGET_OUT_VENDOR_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest$(TARGET_VENDOR_TEST_SUFFIX)
endif
+MODULE_CLASS_FAKE := fake_packages
TARGET_OUT_DATA_FAKE := $(TARGET_OUT_DATA)/fake_packages
+.KATI_READONLY := \
+ TARGET_OUT_DATA \
+ TARGET_OUT_DATA_EXECUTABLES \
+ TARGET_OUT_DATA_SHARED_LIBRARIES \
+ TARGET_OUT_DATA_JAVA_LIBRARIES \
+ TARGET_OUT_DATA_APPS \
+ TARGET_OUT_DATA_KEYLAYOUT \
+ TARGET_OUT_DATA_KEYCHARS \
+ TARGET_OUT_DATA_ETC \
+ TARGET_OUT_DATA_NATIVE_TESTS \
+ TARGET_OUT_DATA_METRIC_TESTS \
+ TARGET_OUT_VENDOR_NATIVE_TESTS \
+ TARGET_OUT_VENDOR_METRIC_TESTS \
+ TARGET_OUT_DATA_FAKE \
+ MODULE_CLASS_APPS \
+ MODULE_CLASS_EXECUTABLES \
+ MODULE_CLASS_JAVA_LIBRARIES \
+ MODULE_CLASS_NATIVE_TESTS \
+ MODULE_CLASS_METRIC_TESTS \
+ MODULE_CLASS_FAKE
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_EXECUTABLES := $(TARGET_OUT_DATA_EXECUTABLES)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_SHARED_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES)
@@ -582,16 +587,26 @@ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_METRIC_TESTS := $(TARGET_OUT_DATA)/
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_NATIVE_TESTS := $(TARGET_OUT_DATA)/nativetest$(TARGET_VENDOR_TEST_SUFFIX)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest$(TARGET_VENDOR_TEST_SUFFIX)
endif
+.KATI_READONLY := \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_EXECUTABLES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_SHARED_LIBRARIES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_APPS \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_NATIVE_TESTS \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_METRIC_TESTS \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_NATIVE_TESTS \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_METRIC_TESTS \
TARGET_OUT_CACHE := $(PRODUCT_OUT)/cache
+.KATI_READONLY := TARGET_OUT_CACHE
TARGET_OUT_VENDOR := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR)
+.KATI_READONLY := TARGET_OUT_VENDOR
ifneq ($(filter address,$(SANITIZE_TARGET)),)
-target_out_vendor_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/vendor
+target_out_vendor_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_OUT_VENDOR)
ifeq ($(SANITIZE_LITE),true)
# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
# work with unsanitized app_process. For simplicity, generate APKs into /data/asan/.
-target_out_vendor_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/vendor
+target_out_vendor_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_OUT_VENDOR)
else
target_out_vendor_app_base := $(TARGET_OUT_VENDOR)
endif
@@ -612,6 +627,15 @@ TARGET_OUT_VENDOR_JAVA_LIBRARIES := $(TARGET_OUT_VENDOR)/framework
TARGET_OUT_VENDOR_APPS := $(target_out_vendor_app_base)/app
TARGET_OUT_VENDOR_APPS_PRIVILEGED := $(target_out_vendor_app_base)/priv-app
TARGET_OUT_VENDOR_ETC := $(TARGET_OUT_VENDOR)/etc
+.KATI_READONLY := \
+ TARGET_OUT_VENDOR_EXECUTABLES \
+ TARGET_OUT_VENDOR_OPTIONAL_EXECUTABLES \
+ TARGET_OUT_VENDOR_SHARED_LIBRARIES \
+ TARGET_OUT_VENDOR_RENDERSCRIPT_BITCODE \
+ TARGET_OUT_VENDOR_JAVA_LIBRARIES \
+ TARGET_OUT_VENDOR_APPS \
+ TARGET_OUT_VENDOR_APPS_PRIVILEGED \
+ TARGET_OUT_VENDOR_ETC
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_EXECUTABLES := $(TARGET_OUT_VENDOR_EXECUTABLES)
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
@@ -622,6 +646,12 @@ endif
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_RENDERSCRIPT_BITCODE := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_APPS := $(TARGET_OUT_VENDOR_APPS)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_APPS_PRIVILEGED := $(TARGET_OUT_VENDOR_APPS_PRIVILEGED)
+.KATI_READONLY := \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_EXECUTABLES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_RENDERSCRIPT_BITCODE \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_APPS \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_APPS_PRIVILEGED
TARGET_OUT_OEM := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_OEM)
TARGET_OUT_OEM_EXECUTABLES := $(TARGET_OUT_OEM)/bin
@@ -634,6 +664,12 @@ endif
# TARGET_OUT_OEM_JAVA_LIBRARIES:= $(TARGET_OUT_OEM)/framework
TARGET_OUT_OEM_APPS := $(TARGET_OUT_OEM)/app
TARGET_OUT_OEM_ETC := $(TARGET_OUT_OEM)/etc
+.KATI_READONLY := \
+ TARGET_OUT_OEM \
+ TARGET_OUT_OEM_EXECUTABLES \
+ TARGET_OUT_OEM_SHARED_LIBRARIES \
+ TARGET_OUT_OEM_APPS \
+ TARGET_OUT_OEM_ETC
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_EXECUTABLES := $(TARGET_OUT_OEM_EXECUTABLES)
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
@@ -642,32 +678,74 @@ else
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib
endif
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_APPS := $(TARGET_OUT_OEM_APPS)
+.KATI_READONLY := \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_EXECUTABLES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_SHARED_LIBRARIES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_APPS \
TARGET_OUT_ODM := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ODM)
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+target_out_odm_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_OUT_OEM)
+ifeq ($(SANITIZE_LITE),true)
+# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
+# work with unsanitized app_process. For simplicity, generate APKs into /data/asan/.
+target_out_odm_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_OUT_OEM)
+else
+target_out_odm_app_base := $(TARGET_OUT_ODM)
+endif
+else
+target_out_odm_shared_libraries_base := $(TARGET_OUT_ODM)
+target_out_odm_app_base := $(TARGET_OUT_ODM)
+endif
+
TARGET_OUT_ODM_EXECUTABLES := $(TARGET_OUT_ODM)/bin
+TARGET_OUT_ODM_OPTIONAL_EXECUTABLES := $(TARGET_OUT_ODM)/xbin
ifeq ($(TARGET_IS_64_BIT),true)
-TARGET_OUT_ODM_SHARED_LIBRARIES := $(TARGET_OUT_ODM)/lib64
+TARGET_OUT_ODM_SHARED_LIBRARIES := $(target_out_odm_shared_libraries_base)/lib64
else
-TARGET_OUT_ODM_SHARED_LIBRARIES := $(TARGET_OUT_ODM)/lib
+TARGET_OUT_ODM_SHARED_LIBRARIES := $(target_out_odm_shared_libraries_base)/lib
endif
-TARGET_OUT_ODM_APPS := $(TARGET_OUT_ODM)/app
+TARGET_OUT_ODM_RENDERSCRIPT_BITCODE := $(TARGET_OUT_ODM_SHARED_LIBRARIES)
+TARGET_OUT_ODM_JAVA_LIBRARIES := $(TARGET_OUT_ODM)/framework
+TARGET_OUT_ODM_APPS := $(target_out_odm_app_base)/app
+TARGET_OUT_ODM_APPS_PRIVILEGED := $(target_out_odm_app_base)/priv-app
TARGET_OUT_ODM_ETC := $(TARGET_OUT_ODM)/etc
+.KATI_READONLY := \
+ TARGET_OUT_ODM \
+ TARGET_OUT_ODM_EXECUTABLES \
+ TARGET_OUT_ODM_OPTIONAL_EXECUTABLES \
+ TARGET_OUT_ODM_SHARED_LIBRARIES \
+ TARGET_OUT_ODM_RENDERSCRIPT_BITCODE \
+ TARGET_OUT_ODM_JAVA_LIBRARIES \
+ TARGET_OUT_ODM_APPS \
+ TARGET_OUT_ODM_APPS_PRIVILEGED \
+ TARGET_OUT_ODM_ETC
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_EXECUTABLES := $(TARGET_OUT_ODM_EXECUTABLES)
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
-$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES := $(TARGET_OUT_ODM)/lib/$(TARGET_2ND_ARCH)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES := $(target_out_odm_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
else
-$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES := $(TARGET_OUT_ODM)/lib
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES := $(target_out_odm_shared_libraries_base)/lib
endif
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_RENDERSCRIPT_BITCODE := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS := $(TARGET_OUT_ODM_APPS)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS_PRIVILEGED := $(TARGET_OUT_ODM_APPS_PRIVILEGED)
+.KATI_READONLY := \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_EXECUTABLES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_RENDERSCRIPT_BITCODE \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS_PRIVILEGED
TARGET_OUT_PRODUCT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_PRODUCT)
+TARGET_OUT_PRODUCT_EXECUTABLES := $(TARGET_OUT_PRODUCT)/bin
+.KATI_READONLY := TARGET_OUT_PRODUCT
ifneq ($(filter address,$(SANITIZE_TARGET)),)
-target_out_product_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/product
+target_out_product_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_OUT_PRODUCT)
ifeq ($(SANITIZE_LITE),true)
# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
# work with unsanitized app_process. For simplicity, generate APKs into /data/asan/.
-target_out_product_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/product
+target_out_product_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_OUT_PRODUCT)
else
target_out_product_app_base := $(TARGET_OUT_PRODUCT)
endif
@@ -681,11 +759,19 @@ TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_bas
else
TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib
endif
-TARGET_OUT_PRODUCT_JAVA_LIBRARIES:= $(TARGET_OUT_PRODUCT)/framework
+TARGET_OUT_PRODUCT_JAVA_LIBRARIES := $(TARGET_OUT_PRODUCT)/framework
TARGET_OUT_PRODUCT_APPS := $(target_out_product_app_base)/app
TARGET_OUT_PRODUCT_APPS_PRIVILEGED := $(target_out_product_app_base)/priv-app
TARGET_OUT_PRODUCT_ETC := $(TARGET_OUT_PRODUCT)/etc
-
+.KATI_READONLY := \
+ TARGET_OUT_PRODUCT_EXECUTABLES \
+ TARGET_OUT_PRODUCT_SHARED_LIBRARIES \
+ TARGET_OUT_PRODUCT_JAVA_LIBRARIES \
+ TARGET_OUT_PRODUCT_APPS \
+ TARGET_OUT_PRODUCT_APPS_PRIVILEGED \
+ TARGET_OUT_PRODUCT_ETC
+
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_EXECUTABLES := $(TARGET_OUT_PRODUCT_EXECUTABLES)
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
else
@@ -693,8 +779,47 @@ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out
endif
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS := $(TARGET_OUT_PRODUCT_APPS)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS_PRIVILEGED := $(TARGET_OUT_PRODUCT_APPS_PRIVILEGED)
+.KATI_READONLY := \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_EXECUTABLES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS \
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS_PRIVILEGED
+
+TARGET_OUT_PRODUCT_SERVICES := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_PRODUCT_SERVICES)
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+target_out_product_services_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_OUT_PRODUCT_SERVICES)
+ifeq ($(SANITIZE_LITE),true)
+# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
+# work with unsanitized app_process. For simplicity, generate APKs into /data/asan/.
+target_out_product_services_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/$(TARGET_COPY_OUT_PRODUCT_SERVICES)
+else
+target_out_product_services_app_base := $(TARGET_OUT_PRODUCT_SERVICES)
+endif
+else
+target_out_product_services_shared_libraries_base := $(TARGET_OUT_PRODUCT_SERVICES)
+target_out_product_services_app_base := $(TARGET_OUT_PRODUCT_SERVICES)
+endif
+
+ifeq ($(TARGET_IS_64_BIT),true)
+TARGET_OUT_PRODUCT_SERVICES_SHARED_LIBRARIES := $(target_out_product_services_shared_libraries_base)/lib64
+else
+TARGET_OUT_PRODUCT_SERVICES_SHARED_LIBRARIES := $(target_out_product_services_shared_libraries_base)/lib
+endif
+TARGET_OUT_PRODUCT_SERVICES_JAVA_LIBRARIES:= $(TARGET_OUT_PRODUCT_SERVICES)/framework
+TARGET_OUT_PRODUCT_SERVICES_APPS := $(target_out_product_services_app_base)/app
+TARGET_OUT_PRODUCT_SERVICES_APPS_PRIVILEGED := $(target_out_product_services_app_base)/priv-app
+TARGET_OUT_PRODUCT_SERVICES_ETC := $(TARGET_OUT_PRODUCT_SERVICES)/etc
+
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SERVICES_SHARED_LIBRARIES := $(target_out_product_services_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
+else
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SERVICES_SHARED_LIBRARIES := $(target_out_product_services_shared_libraries_base)/lib
+endif
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SERVICES_APPS := $(TARGET_OUT_PRODUCT_SERVICES_APPS)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SERVICES_APPS_PRIVILEGED := $(TARGET_OUT_PRODUCT_SERVICES_APPS_PRIVILEGED)
TARGET_OUT_BREAKPAD := $(PRODUCT_OUT)/breakpad
+.KATI_READONLY := TARGET_OUT_BREAKPAD
TARGET_OUT_UNSTRIPPED := $(PRODUCT_OUT)/symbols
TARGET_OUT_EXECUTABLES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/system/bin
@@ -704,31 +829,59 @@ TARGET_ROOT_OUT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)
TARGET_ROOT_OUT_SBIN_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/sbin
TARGET_ROOT_OUT_BIN_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/bin
TARGET_OUT_COVERAGE := $(PRODUCT_OUT)/coverage
+.KATI_READONLY := \
+ TARGET_OUT_UNSTRIPPED \
+ TARGET_OUT_EXECUTABLES_UNSTRIPPED \
+ TARGET_OUT_SHARED_LIBRARIES_UNSTRIPPED \
+ TARGET_OUT_VENDOR_SHARED_LIBRARIES_UNSTRIPPED \
+ TARGET_ROOT_OUT_UNSTRIPPED \
+ TARGET_ROOT_OUT_SBIN_UNSTRIPPED \
+ TARGET_ROOT_OUT_BIN_UNSTRIPPED \
+ TARGET_OUT_COVERAGE
+
+TARGET_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_RAMDISK)
+TARGET_RAMDISK_OUT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)
+TARGET_DEBUG_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DEBUG_RAMDISK)
TARGET_ROOT_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ROOT)
TARGET_ROOT_OUT_BIN := $(TARGET_ROOT_OUT)/bin
TARGET_ROOT_OUT_SBIN := $(TARGET_ROOT_OUT)/sbin
TARGET_ROOT_OUT_ETC := $(TARGET_ROOT_OUT)/etc
TARGET_ROOT_OUT_USR := $(TARGET_ROOT_OUT)/usr
+.KATI_READONLY := \
+ TARGET_ROOT_OUT \
+ TARGET_ROOT_OUT_BIN \
+ TARGET_ROOT_OUT_SBIN \
+ TARGET_ROOT_OUT_ETC \
+ TARGET_ROOT_OUT_USR
TARGET_RECOVERY_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_RECOVERY)
TARGET_RECOVERY_ROOT_OUT := $(TARGET_RECOVERY_OUT)/root
+.KATI_READONLY := \
+ TARGET_RECOVERY_OUT \
+ TARGET_RECOVERY_ROOT_OUT
TARGET_SYSLOADER_OUT := $(PRODUCT_OUT)/sysloader
TARGET_SYSLOADER_ROOT_OUT := $(TARGET_SYSLOADER_OUT)/root
TARGET_SYSLOADER_SYSTEM_OUT := $(TARGET_SYSLOADER_OUT)/root/system
+.KATI_READONLY := \
+ TARGET_SYSLOADER_OUT \
+ TARGET_SYSLOADER_ROOT_OUT \
+ TARGET_SYSLOADER_SYSTEM_OUT
TARGET_INSTALLER_OUT := $(PRODUCT_OUT)/installer
TARGET_INSTALLER_DATA_OUT := $(TARGET_INSTALLER_OUT)/data
TARGET_INSTALLER_ROOT_OUT := $(TARGET_INSTALLER_OUT)/root
TARGET_INSTALLER_SYSTEM_OUT := $(TARGET_INSTALLER_OUT)/root/system
+.KATI_READONLY := \
+ TARGET_INSTALLER_OUT \
+ TARGET_INSTALLER_DATA_OUT \
+ TARGET_INSTALLER_ROOT_OUT \
+ TARGET_INSTALLER_SYSTEM_OUT
COMMON_MODULE_CLASSES := TARGET-NOTICE_FILES HOST-NOTICE_FILES HOST-JAVA_LIBRARIES
PER_ARCH_MODULE_CLASSES := SHARED_LIBRARIES STATIC_LIBRARIES EXECUTABLES GYP RENDERSCRIPT_BITCODE NATIVE_TESTS HEADER_LIBRARIES
-
-ifeq (,$(strip $(DIST_DIR)))
- DIST_DIR := $(OUT_DIR)/dist
-endif
+.KATI_READONLY := COMMON_MODULE_CLASSES PER_ARCH_MODULE_CLASSES
ifeq ($(CALLED_FROM_SETUP),true)
PRINT_BUILD_CONFIG ?= true
diff --git a/core/executable.mk b/core/executable.mk
index f1b2462e46..e71ff339b8 100644
--- a/core/executable.mk
+++ b/core/executable.mk
@@ -12,6 +12,8 @@ ifneq ($(filter address,$(SANITIZE_TARGET)),)
my_skip_this_target := true
else ifeq (false, $(LOCAL_CLANG))
my_skip_this_target := true
+ else ifeq (never, $(LOCAL_SANITIZE))
+ my_skip_this_target := true
endif
endif
@@ -79,7 +81,6 @@ endif
include $(BUILD_SYSTEM)/module_arch_supported.mk
ifeq ($(my_module_arch_supported),true)
# non-preferred arch is supported
-OVERRIDE_BUILT_MODULE_PATH :=
LOCAL_BUILT_MODULE :=
LOCAL_INSTALLED_MODULE :=
LOCAL_INTERMEDIATE_TARGETS :=
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index 4a62fbfbd1..558e49ba37 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -36,6 +36,11 @@ endif
endif
# Define PRIVATE_ variables from global vars
+ifeq ($(LOCAL_NO_LIBCRT_BUILTINS),true)
+my_target_libcrt_builtins :=
+else
+my_target_libcrt_builtins := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBCRT_BUILTINS)
+endif
ifeq ($(LOCAL_NO_LIBGCC),true)
my_target_libgcc :=
else
@@ -47,39 +52,39 @@ my_target_crtbegin_dynamic_o :=
my_target_crtbegin_static_o :=
my_target_crtend_o :=
else ifdef LOCAL_USE_VNDK
-my_target_crtbegin_dynamic_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.vendor.o
-my_target_crtbegin_static_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.vendor.o
-my_target_crtend_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_android.vendor.o
+my_target_crtbegin_dynamic_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_dynamic.vendor)
+my_target_crtbegin_static_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_static.vendor)
+my_target_crtend_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtend_android.vendor)
else
-my_target_crtbegin_dynamic_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.o
-my_target_crtbegin_static_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
-my_target_crtend_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_android.o
+my_target_crtbegin_dynamic_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_dynamic)
+my_target_crtbegin_static_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_static)
+my_target_crtend_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtend_android)
endif
ifneq ($(LOCAL_SDK_VERSION),)
my_target_crtbegin_dynamic_o := $(wildcard $(my_ndk_sysroot_lib)/crtbegin_dynamic.o)
my_target_crtbegin_static_o := $(wildcard $(my_ndk_sysroot_lib)/crtbegin_static.o)
my_target_crtend_o := $(wildcard $(my_ndk_sysroot_lib)/crtend_android.o)
endif
+$(linked_module): PRIVATE_TARGET_LIBCRT_BUILTINS := $(my_target_libcrt_builtins)
$(linked_module): PRIVATE_TARGET_LIBGCC := $(my_target_libgcc)
$(linked_module): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
$(linked_module): PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O := $(my_target_crtbegin_dynamic_o)
$(linked_module): PRIVATE_TARGET_CRTBEGIN_STATIC_O := $(my_target_crtbegin_static_o)
$(linked_module): PRIVATE_TARGET_CRTEND_O := $(my_target_crtend_o)
-$(linked_module): PRIVATE_TARGET_OUT_INTERMEDIATE_LIBRARIES := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)
$(linked_module): PRIVATE_POST_LINK_CMD := $(LOCAL_POST_LINK_CMD)
ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
-$(linked_module): $(my_target_crtbegin_static_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libgcc) $(my_target_libatomic)
+$(linked_module): $(my_target_crtbegin_static_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libcrt_builtins) $(my_target_libgcc) $(my_target_libatomic)
$(transform-o-to-static-executable)
$(PRIVATE_POST_LINK_CMD)
else
-$(linked_module): $(my_target_crtbegin_dynamic_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libgcc) $(my_target_libatomic)
+$(linked_module): $(my_target_crtbegin_dynamic_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libcrt_builtins) $(my_target_libgcc) $(my_target_libatomic)
$(transform-o-to-executable)
$(PRIVATE_POST_LINK_CMD)
endif
ifeq ($(my_native_coverage),true)
-gcno_suffix := .gcnodir
+gcno_suffix := .zip
built_whole_gcno_libraries := \
$(foreach lib,$(my_whole_static_libraries), \
@@ -101,11 +106,11 @@ endif
GCNO_ARCHIVE := $(my_installed_module_stem)$(gcno_suffix)
+$(intermediates)/$(GCNO_ARCHIVE) : $(SOONG_ZIP) $(MERGE_ZIPS)
$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS := $(strip $(LOCAL_GCNO_FILES))
$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES := $(strip $(built_whole_gcno_libraries)) $(strip $(built_static_gcno_libraries))
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_INTERMEDIATES_DIR := $(intermediates)
$(intermediates)/$(GCNO_ARCHIVE) : $(LOCAL_GCNO_FILES) $(built_whole_gcno_libraries) $(built_static_gcno_libraries)
- $(transform-o-to-static-lib)
+ $(package-coverage-files)
$(my_coverage_path)/$(GCNO_ARCHIVE) : $(intermediates)/$(GCNO_ARCHIVE)
$(copy-file-to-target)
diff --git a/core/force_aapt2.mk b/core/force_aapt2.mk
new file mode 100644
index 0000000000..db2e60f067
--- /dev/null
+++ b/core/force_aapt2.mk
@@ -0,0 +1,63 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Including this makefile will force AAPT2 on if FORCE_AAPT2==true,
+# rewriting some properties to convert standard AAPT usage to AAPT2.
+
+ifneq ($(FORCE_AAPT2),false)
+ ifeq ($(LOCAL_USE_AAPT2),)
+ # Force AAPT2 on
+ LOCAL_USE_AAPT2 := true
+ # Filter out support library resources
+ LOCAL_RESOURCE_DIR := $(filter-out \
+ prebuilts/sdk/current/% \
+ frameworks/support/%,\
+ $(LOCAL_RESOURCE_DIR))
+ # Filter out unnecessary aapt flags
+ ifneq (,$(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))
+ LOCAL_AAPT_FLAGS := $(subst --extra-packages=,--extra-packages$(space), \
+ $(filter-out \
+ --extra-packages=android.support.% \
+ --extra-packages=androidx.%, \
+ $(subst --extra-packages$(space),--extra-packages=,$(LOCAL_AAPT_FLAGS))))
+ ifeq (,$(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))
+ LOCAL_AAPT_FLAGS := $(filter-out --auto-add-overlay,$(LOCAL_AAPT_FLAGS))
+ endif
+ endif
+
+ # AAPT2 is pickier about missing resources. Support library may have references to resources
+ # added in current, so always treat LOCAL_SDK_VERSION as LOCAL_SDK_RES_VERSION := current.
+ ifdef LOCAL_SDK_VERSION
+ LOCAL_SDK_RES_VERSION := current
+ endif
+
+ ifeq (,$(strip $(LOCAL_MANIFEST_FILE)$(LOCAL_FULL_MANIFEST_FILE)))
+ ifeq (,$(wildcard $(LOCAL_PATH)/AndroidManifest.xml))
+ # work around missing manifests by creating a default one
+ LOCAL_FULL_MANIFEST_FILE := $(call local-intermediates-dir,COMMON)/DefaultManifest.xml
+ $(call create-default-manifest-file,$(LOCAL_FULL_MANIFEST_FILE),$(call module-min-sdk-version))
+ endif
+ endif
+ endif
+endif
+
+ifneq ($(LOCAL_USE_AAPT2),true)
+ ifneq ($(LOCAL_USE_AAPT2),false)
+ ifneq ($(LOCAL_USE_AAPT2),)
+ $(call pretty-error,Invalid value for LOCAL_USE_AAPT2: "$(LOCAL_USE_AAPT2)")
+ endif
+ endif
+endif
diff --git a/core/fuzz_test.mk b/core/fuzz_test.mk
index 2cc2e2c6d5..f3bf7147da 100644
--- a/core/fuzz_test.mk
+++ b/core/fuzz_test.mk
@@ -65,12 +65,12 @@ ifdef LOCAL_MODULE_PATH_64
$(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH_64 when building test $(LOCAL_MODULE))
endif
-LOCAL_MODULE_PATH_64 := $(TARGET_OUT_DATA_NATIVE_TESTS)/fuzzers/$(LOCAL_MODULE)
-LOCAL_MODULE_PATH_32 := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_NATIVE_TESTS)/fuzzers/$(LOCAL_MODULE)
+LOCAL_MODULE_PATH_64 := $(TARGET_OUT_DATA_NATIVE_TESTS)/fuzzers/$(my_fuzzer)/$(LOCAL_MODULE)
+LOCAL_MODULE_PATH_32 := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_NATIVE_TESTS)/fuzzers/$(my_fuzzer)/$(LOCAL_MODULE)
ifndef LOCAL_MULTILIB
ifndef LOCAL_32_BIT_ONLY
-LOCAL_MULTILIB := both
+LOCAL_MULTILIB := 64
endif
endif
diff --git a/core/generate_enforce_rro.mk b/core/generate_enforce_rro.mk
index 62a8c8dc90..f7877f2067 100644
--- a/core/generate_enforce_rro.mk
+++ b/core/generate_enforce_rro.mk
@@ -1,24 +1,36 @@
include $(CLEAR_VARS)
+enforce_rro_module := $(enforce_rro_source_module)__auto_generated_rro_$(enforce_rro_partition)
LOCAL_PACKAGE_NAME := $(enforce_rro_module)
intermediates := $(call intermediates-dir-for,APPS,$(LOCAL_PACKAGE_NAME),,COMMON)
rro_android_manifest_file := $(intermediates)/AndroidManifest.xml
ifeq (true,$(enforce_rro_source_is_manifest_package_name))
-$(rro_android_manifest_file): PRIVATE_PACKAGE_NAME := $(enforce_rro_source_manifest_package_info)
-$(rro_android_manifest_file): build/make/tools/generate-enforce-rro-android-manifest.py
- $(hide) build/make/tools/generate-enforce-rro-android-manifest.py -u -p $(PRIVATE_PACKAGE_NAME) -o $@
+ use_package_name_arg := --use-package-name
else
-$(rro_android_manifest_file): PRIVATE_SOURCE_MANIFEST_FILE := $(enforce_rro_source_manifest_package_info)
-$(rro_android_manifest_file): $(enforce_rro_source_manifest_package_info) build/make/tools/generate-enforce-rro-android-manifest.py
- $(hide) build/make/tools/generate-enforce-rro-android-manifest.py -p $(PRIVATE_SOURCE_MANIFEST_FILE) -o $@
+ use_package_name_arg :=
+$(rro_android_manifest_file): $(enforce_rro_source_manifest_package_info)
endif
+$(rro_android_manifest_file): PRIVATE_PACKAGE_INFO := $(enforce_rro_source_manifest_package_info)
+$(rro_android_manifest_file): PRIVATE_USE_PACKAGE_NAME := $(use_package_name_arg)
+$(rro_android_manifest_file): PRIVATE_PARTITION := $(enforce_rro_partition)
+# There should be no duplicate overrides, but just in case, set the priority of
+# /product overlays to be higher than /vendor, to at least get deterministic results.
+$(rro_android_manifest_file): PRIVATE_PRIORITY := $(if $(filter product,$(enforce_rro_partition)),1,0)
+$(rro_android_manifest_file): build/make/tools/generate-enforce-rro-android-manifest.py
+ $(hide) build/make/tools/generate-enforce-rro-android-manifest.py \
+ --package-info $(PRIVATE_PACKAGE_INFO) \
+ $(PRIVATE_USE_PACKAGE_NAME) \
+ --partition $(PRIVATE_PARTITION) \
+ --priority $(PRIVATE_PRIORITY) \
+ -o $@
+
LOCAL_PATH:= $(intermediates)
ifeq ($(enforce_rro_use_res_lib),true)
-LOCAL_RES_LIBRARIES := $(enforce_rro_source_module)
+ LOCAL_RES_LIBRARIES := $(enforce_rro_source_module)
endif
LOCAL_FULL_MANIFEST_FILE := $(rro_android_manifest_file)
@@ -27,4 +39,22 @@ LOCAL_CERTIFICATE := platform
LOCAL_AAPT_FLAGS += --auto-add-overlay
LOCAL_RESOURCE_DIR := $(enforce_rro_source_overlays)
+ifeq (product,$(enforce_rro_partition))
+ LOCAL_PRODUCT_MODULE := true
+else ifeq (vendor,$(enforce_rro_partition))
+ LOCAL_VENDOR_MODULE := true
+else
+ $(error Unsupported partition. Want: [vendor/product] Got: [$(enforce_rro_partition)])
+endif
+
+ifneq (,$(LOCAL_RES_LIBRARIES))
+ # Technically we are linking against the app (if only to grab its resources),
+ # and because it's potentially not building against the SDK, we can't either.
+ LOCAL_PRIVATE_PLATFORM_APIS := true
+else ifeq (framework-res,$(enforce_rro_source_module))
+ LOCAL_PRIVATE_PLATFORM_APIS := true
+else
+ LOCAL_SDK_VERSION := current
+endif
+
include $(BUILD_RRO_PACKAGE)
diff --git a/core/goma.mk b/core/goma.mk
index 2fb37a7525..c265259da0 100644
--- a/core/goma.mk
+++ b/core/goma.mk
@@ -14,46 +14,20 @@
# limitations under the License.
#
-# Used by the compiler wrapper, but should only be set by gomacc
-unexport GOMACC_PATH
-
# Notice: this works only with Google's Goma build infrastructure.
ifneq ($(filter-out false,$(USE_GOMA)),)
- # Goma requires a lot of processes and file descriptors.
- ifeq ($(shell echo $$(($$(ulimit -u) < 2500 || $$(ulimit -n) < 16000))),1)
- $(warning Max user processes and/or open files are insufficient)
- ifeq ($(shell uname),Darwin)
- $(error See go/ma/how-to-use-goma/how-to-use-goma-for-android to relax the limit)
- else
- $(error Adjust the limit by ulimit -u and ulimit -n)
- endif
- endif
-
ifdef GOMA_DIR
goma_dir := $(GOMA_DIR)
else
goma_dir := $(HOME)/goma
endif
- goma_ctl := $(goma_dir)/goma_ctl.py
GOMA_CC := $(goma_dir)/gomacc
- $(if $(wildcard $(goma_ctl)),, \
- $(warning You should have goma in $$GOMA_DIR or $(HOME)/goma) \
- $(error See go/ma/how-to-use-goma/how-to-use-goma-for-android for detail))
-
# Append gomacc to existing *_WRAPPER variables so it's possible to
# use both ccache and gomacc.
CC_WRAPPER := $(strip $(CC_WRAPPER) $(GOMA_CC))
CXX_WRAPPER := $(strip $(CXX_WRAPPER) $(GOMA_CC))
JAVAC_WRAPPER := $(strip $(JAVAC_WRAPPER) $(GOMA_CC))
- # gomacc can start goma client's daemon process automatically, but
- # it is safer and faster to start up it beforehand. We run this as a
- # background process so this won't slow down the build.
- ifndef NOSTART_GOMA
- $(shell ( $(goma_ctl) ensure_start ) &> /dev/null &)
- endif
-
- goma_ctl :=
goma_dir :=
endif
diff --git a/core/header_library.mk b/core/header_library.mk
index 514467975c..ee65111e95 100644
--- a/core/header_library.mk
+++ b/core/header_library.mk
@@ -25,7 +25,6 @@ ifdef $(my_prefix)2ND_ARCH
ifeq ($(my_module_arch_supported),true)
# Build for 2ND_ARCH
- OVERRIDE_BUILT_MODULE_PATH :=
LOCAL_BUILT_MODULE :=
LOCAL_INSTALLED_MODULE :=
LOCAL_INTERMEDIATE_TARGETS :=
@@ -34,39 +33,4 @@ ifdef $(my_prefix)2ND_ARCH
LOCAL_2ND_ARCH_VAR_PREFIX :=
endif # 2ND_ARCH
-ifdef LOCAL_IS_HOST_MODULE
- ifdef HOST_CROSS_OS
- my_prefix := HOST_CROSS_
- LOCAL_HOST_PREFIX := $(my_prefix)
-
- include $(BUILD_SYSTEM)/module_arch_supported.mk
-
- ifeq ($(my_module_arch_supported),true)
- # Build for 2ND_ARCH
- OVERRIDE_BUILT_MODULE_PATH :=
- LOCAL_BUILT_MODULE :=
- LOCAL_INSTALLED_MODULE :=
- LOCAL_INTERMEDIATE_TARGETS :=
- include $(BUILD_SYSTEM)/header_library_internal.mk
- endif
-
- ifdef HOST_CROSS_2ND_ARCH
- LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
- include $(BUILD_SYSTEM)/module_arch_supported.mk
-
- ifeq ($(my_module_arch_supported),true)
- # Build for HOST_CROSS_2ND_ARCH
- OVERRIDE_BUILT_MODULE_PATH :=
- LOCAL_BUILT_MODULE :=
- LOCAL_INSTALLED_MODULE :=
- LOCAL_INTERMEDIATE_TARGETS :=
- include $(BUILD_SYSTEM)/header_library_internal.mk
- endif
- LOCAL_2ND_ARCH_VAR_PREFIX :=
- endif
-
- LOCAL_HOST_PREFIX :=
- endif
-endif
-
my_module_arch_supported :=
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 1ef0ccb17e..2a251e84e5 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -33,7 +33,6 @@ full_classes_header_jarjar := $(intermediates.COMMON)/classes-header-jarjar.jar
full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
full_classes_combined_jar := $(intermediates.COMMON)/classes-combined.jar
-full_classes_desugar_jar := $(intermediates.COMMON)/desugar.classes.jar
full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
full_classes_jar := $(intermediates.COMMON)/classes.jar
built_dex := $(intermediates.COMMON)/classes.dex
@@ -43,7 +42,6 @@ LOCAL_INTERMEDIATE_TARGETS += \
$(full_classes_turbine_jar) \
$(full_classes_compiled_jar) \
$(full_classes_combined_jar) \
- $(full_classes_desugar_jar) \
$(full_classes_jarjar_jar) \
$(full_classes_jar) \
$(built_dex) \
@@ -158,22 +156,6 @@ endif
$(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
-ifneq ($(USE_D8_DESUGAR),true)
-my_desugaring :=
-ifeq ($(LOCAL_JAVA_LANGUAGE_VERSION),1.8)
-my_desugaring := true
-$(full_classes_desugar_jar): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(full_classes_desugar_jar): $(full_classes_jar) $(full_java_header_libs) $(DESUGAR)
- $(desugar-classes-jar)
-endif
-else
-my_desugaring :=
-endif
-
-ifndef my_desugaring
-full_classes_desugar_jar := $(full_classes_jar)
-endif
-
ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
# No dex; all we want are the .class files with resources.
$(LOCAL_BUILT_MODULE) : $(java_resource_sources)
@@ -184,40 +166,26 @@ $(LOCAL_BUILT_MODULE) : $(full_classes_jar)
else # !LOCAL_IS_STATIC_JAVA_LIBRARY
$(built_dex): PRIVATE_INTERMEDIATES_DIR := $(intermediates.COMMON)
$(built_dex): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(built_dex): $(full_classes_desugar_jar) $(DX) $(ZIP2ZIP)
-ifneq ($(USE_D8_DESUGAR),true)
+$(built_dex): $(full_classes_jar) $(DX) $(ZIP2ZIP)
$(transform-classes.jar-to-dex)
-else
- $(transform-classes-d8.jar-to-dex)
-endif
$(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
-$(LOCAL_BUILT_MODULE): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
+$(LOCAL_BUILT_MODULE): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
$(LOCAL_BUILT_MODULE): $(built_dex) $(java_resource_sources)
@echo "Host Jar: $(PRIVATE_MODULE) ($@)"
- $(call initialize-package-file,$(PRIVATE_SOURCE_ARCHIVE),$@)
- $(add-dex-to-package)
+ rm -rf $@.parts
+ mkdir -p $@.parts
+ $(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
+ $(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
+ $(MERGE_ZIPS) -j $@ $@.parts/dex.zip $@.parts/res.zip
+ rm -rf $@.parts
endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
-ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
- my_default_app_target_sdk := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
- my_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
-else
- my_default_app_target_sdk := $(DEFAULT_APP_TARGET_SDK)
- my_sdk_version := $(PLATFORM_SDK_VERSION)
-endif
-
-ifdef LOCAL_MIN_SDK_VERSION
- my_min_sdk_version := $(LOCAL_MIN_SDK_VERSION)
-else
- my_min_sdk_version := $(call codename-or-sdk-to-sdk,$(my_default_app_target_sdk))
-endif
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(my_default_app_target_sdk)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SDK_VERSION := $(my_sdk_version)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MIN_SDK_VERSION := $(my_min_sdk_version)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call module-target-sdk-version)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SDK_VERSION := $(call module-sdk-version)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MIN_SDK_VERSION := $(call codename-or-sdk-to-sdk,$(call module-min-sdk-version))
USE_CORE_LIB_BOOTCLASSPATH :=
diff --git a/core/host_executable.mk b/core/host_executable.mk
index 1480c2c0c6..8d1026cdca 100644
--- a/core/host_executable.mk
+++ b/core/host_executable.mk
@@ -11,10 +11,6 @@ my_module_multilib := first
endif
endif
-ifeq ($(LOCAL_NO_FPIE),)
-LOCAL_LDFLAGS += $(HOST_FPIE_FLAGS)
-endif
-
ifeq ($(my_module_multilib),both)
ifneq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
ifeq ($(LOCAL_MODULE_PATH_32)$(LOCAL_MODULE_STEM_32),)
@@ -40,7 +36,6 @@ LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_2ND_ARCH_VAR_PREFIX)
include $(BUILD_SYSTEM)/module_arch_supported.mk
ifeq ($(my_module_arch_supported),true)
# Build for HOST_2ND_ARCH
-OVERRIDE_BUILT_MODULE_PATH :=
LOCAL_BUILT_MODULE :=
LOCAL_INSTALLED_MODULE :=
LOCAL_INTERMEDIATE_TARGETS :=
@@ -50,57 +45,5 @@ endif
LOCAL_2ND_ARCH_VAR_PREFIX :=
endif # HOST_2ND_ARCH
-ifdef HOST_CROSS_OS
-my_prefix := HOST_CROSS_
-LOCAL_HOST_PREFIX := $(my_prefix)
-include $(BUILD_SYSTEM)/module_arch_supported.mk
-ifeq ($(my_module_arch_supported),true)
-# Build for Windows
-OVERRIDE_BUILT_MODULE_PATH :=
-# we don't want others using the cross compiled version
-saved_LOCAL_BUILT_MODULE := $(LOCAL_BUILT_MODULE)
-saved_LOCAL_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
-saved_LOCAL_LDFLAGS := $(LOCAL_LDFLAGS)
-LOCAL_BUILT_MODULE :=
-LOCAL_INSTALLED_MODULE :=
-LOCAL_INTERMEDIATE_TARGETS :=
-
-ifeq ($(LOCAL_NO_FPIE),)
-LOCAL_LDFLAGS += $(HOST_CROSS_FPIE_FLAGS)
-endif
-
-include $(BUILD_SYSTEM)/host_executable_internal.mk
-LOCAL_LDFLAGS := $(saved_LOCAL_LDFLAGS)
-LOCAL_BUILT_MODULE := $(saved_LOCAL_BUILT_MODULE)
-LOCAL_INSTALLED_MODULE := $(saved_LOCAL_INSTALLED_MODULE)
-endif
-
-ifdef HOST_CROSS_2ND_ARCH
-LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/module_arch_supported.mk
-ifeq ($(my_module_arch_supported),true)
-OVERRIDE_BUILT_MODULE_PATH :=
-# we don't want others using the cross compiled version
-saved_LOCAL_BUILT_MODULE := $(LOCAL_BUILT_MODULE)
-saved_LOCAL_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
-saved_LOCAL_LDFLAGS := $(LOCAL_LDFLAGS)
-LOCAL_BUILT_MODULE :=
-LOCAL_INSTALLED_MODULE :=
-LOCAL_INTERMEDIATE_TARGETS :=
-
-ifeq ($(LOCAL_NO_FPIE),)
-LOCAL_LDFLAGS += $(HOST_CROSS_FPIE_FLAGS)
-endif
-
-include $(BUILD_SYSTEM)/host_executable_internal.mk
-LOCAL_LDFLAGS := $(saved_LOCAL_LDFLAGS)
-LOCAL_BUILT_MODULE := $(saved_LOCAL_BUILT_MODULE)
-LOCAL_INSTALLED_MODULE := $(saved_LOCAL_INSTALLED_MODULE)
-endif
-LOCAL_2ND_ARCH_VAR_PREFIX :=
-endif
-LOCAL_HOST_PREFIX :=
-endif
-
LOCAL_NO_2ND_ARCH_MODULE_SUFFIX :=
my_module_arch_supported :=
diff --git a/core/host_executable_internal.mk b/core/host_executable_internal.mk
index c4f9f66e5b..e72c4192bd 100644
--- a/core/host_executable_internal.mk
+++ b/core/host_executable_internal.mk
@@ -33,7 +33,7 @@ $(LOCAL_BUILT_MODULE): PRIVATE_HOST_LIBPROFILE_RT := $(my_host_libprofile_rt)
my_libdir := $(notdir $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_SHARED_LIBRARIES))
ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
-$(LOCAL_BUILT_MODULE): PRIVATE_RPATHS := ../../$(my_libdir)
+$(LOCAL_BUILT_MODULE): PRIVATE_RPATHS := ../../$(my_libdir) ../../../$(my_libdir)
else
$(LOCAL_BUILT_MODULE): PRIVATE_RPATHS := ../$(my_libdir) $(my_libdir)
endif
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 750254f594..c8d2ee78c2 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -93,6 +93,7 @@ $(full_classes_compiled_jar): \
javac-check : $(full_classes_compiled_jar)
javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
+.PHONY: javac-check-$(LOCAL_MODULE)
$(full_classes_combined_jar): $(full_classes_compiled_jar) \
$(jar_manifest_file) \
diff --git a/core/host_java_library_common.mk b/core/host_java_library_common.mk
index 51e2d94f5e..0e62f60d24 100644
--- a/core/host_java_library_common.mk
+++ b/core/host_java_library_common.mk
@@ -32,15 +32,15 @@ all_res_assets :=
proto_sources := $(filter %.proto,$(LOCAL_SRC_FILES))
ifneq ($(proto_sources),)
ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),micro)
- LOCAL_JAVA_LIBRARIES += host-libprotobuf-java-micro
+ LOCAL_JAVA_LIBRARIES += libprotobuf-java-micro
else
ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nano)
- LOCAL_JAVA_LIBRARIES += host-libprotobuf-java-nano
+ LOCAL_JAVA_LIBRARIES += libprotobuf-java-nano
else
ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),full)
- LOCAL_JAVA_LIBRARIES += host-libprotobuf-java-full
+ LOCAL_JAVA_LIBRARIES += libprotobuf-java-full
else
- LOCAL_JAVA_LIBRARIES += host-libprotobuf-java-lite
+ LOCAL_JAVA_LIBRARIES += libprotobuf-java-lite
endif
endif
endif
@@ -48,8 +48,3 @@ endif
LOCAL_INTERMEDIATE_SOURCE_DIR := $(intermediates.COMMON)/src
LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
-
-# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
-ifeq ($(RUN_ERROR_PRONE),true)
-LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
-endif
diff --git a/core/host_shared_library.mk b/core/host_shared_library.mk
index 5da7913273..81236d1a2d 100644
--- a/core/host_shared_library.mk
+++ b/core/host_shared_library.mk
@@ -23,7 +23,6 @@ LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_2ND_ARCH_VAR_PREFIX)
include $(BUILD_SYSTEM)/module_arch_supported.mk
ifeq ($(my_module_arch_supported),true)
# Build for HOST_2ND_ARCH
-OVERRIDE_BUILT_MODULE_PATH :=
LOCAL_BUILT_MODULE :=
LOCAL_INSTALLED_MODULE :=
LOCAL_INTERMEDIATE_TARGETS :=
@@ -33,45 +32,6 @@ endif
LOCAL_2ND_ARCH_VAR_PREFIX :=
endif # HOST_2ND_ARCH
-ifdef HOST_CROSS_OS
-my_prefix := HOST_CROSS_
-LOCAL_HOST_PREFIX := $(my_prefix)
-include $(BUILD_SYSTEM)/module_arch_supported.mk
-ifeq ($(my_module_arch_supported),true)
-# Build for Windows
-OVERRIDE_BUILT_MODULE_PATH :=
-LOCAL_BUILT_MODULE :=
-LOCAL_MODULE_SUFFIX :=
-# We don't want makefiles using the cross-compiled host tool
-saved_LOCAL_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
-LOCAL_INSTALLED_MODULE :=
-LOCAL_INTERMEDIATE_TARGETS :=
-
-include $(BUILD_SYSTEM)/host_shared_library_internal.mk
-LOCAL_INSTALLED_MODULE := $(saved_LOCAL_INSTALLED_MODULE)
-endif
-
-ifdef HOST_CROSS_2ND_ARCH
-LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/module_arch_supported.mk
-ifeq ($(my_module_arch_supported),true)
-# Build for HOST_CROSS_2ND_ARCH
-OVERRIDE_BUILT_MODULE_PATH :=
-LOCAL_BUILT_MODULE :=
-LOCAL_MODULE_SUFFIX :=
-# We don't want makefiles using the cross-compiled host tool
-saved_LOCAL_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
-LOCAL_INSTALLED_MODULE :=
-LOCAL_INTERMEDIATE_TARGETS :=
-
-include $(BUILD_SYSTEM)/host_shared_library_internal.mk
-LOCAL_INSTALLED_MODULE := $(saved_LOCAL_INSTALLED_MODULE)
-endif
-LOCAL_2ND_ARCH_VAR_PREFIX :=
-endif
-LOCAL_HOST_PREFIX :=
-endif
-
my_module_arch_supported :=
###########################################################
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index 0a3b317012..da2087478d 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -13,9 +13,6 @@ endif
ifeq ($(strip $(LOCAL_MODULE_SUFFIX)),)
LOCAL_MODULE_SUFFIX := $($(my_prefix)SHLIB_SUFFIX)
endif
-ifneq ($(strip $(OVERRIDE_BUILT_MODULE_PATH)),)
-$(error $(LOCAL_PATH): Illegal use of OVERRIDE_BUILT_MODULE_PATH)
-endif
ifneq ($(strip $(LOCAL_MODULE_STEM)$(LOCAL_BUILT_MODULE_STEM)),)
$(error $(LOCAL_PATH): Cannot set module stem for a library)
endif
@@ -34,10 +31,6 @@ endif
ifndef skip_build_from_source
-# Put the built modules of all shared libraries in a common directory
-# to simplify the link line.
-OVERRIDE_BUILT_MODULE_PATH := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)
-
include $(BUILD_SYSTEM)/binary.mk
my_host_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBPROFILE_RT)
diff --git a/core/host_static_library.mk b/core/host_static_library.mk
index aa0421e5ed..469da29465 100644
--- a/core/host_static_library.mk
+++ b/core/host_static_library.mk
@@ -23,7 +23,6 @@ LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_2ND_ARCH_VAR_PREFIX)
include $(BUILD_SYSTEM)/module_arch_supported.mk
ifeq ($(my_module_arch_supported),true)
# Build for HOST_2ND_ARCH
-OVERRIDE_BUILT_MODULE_PATH :=
LOCAL_BUILT_MODULE :=
LOCAL_INSTALLED_MODULE :=
LOCAL_INTERMEDIATE_TARGETS :=
@@ -33,37 +32,6 @@ endif
LOCAL_2ND_ARCH_VAR_PREFIX :=
endif # HOST_2ND_ARCH
-ifdef HOST_CROSS_OS
-my_prefix := HOST_CROSS_
-LOCAL_HOST_PREFIX := $(my_prefix)
-include $(BUILD_SYSTEM)/module_arch_supported.mk
-ifeq ($(my_module_arch_supported),true)
-# Build for Windows
-OVERRIDE_BUILT_MODULE_PATH :=
-LOCAL_BUILT_MODULE :=
-LOCAL_INSTALLED_MODULE :=
-LOCAL_INTERMEDIATE_TARGETS :=
-
-include $(BUILD_SYSTEM)/host_static_library_internal.mk
-endif
-
-ifdef HOST_CROSS_2ND_ARCH
-LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/module_arch_supported.mk
-ifeq ($(my_module_arch_supported),true)
-# Build for HOST_CROSS_2ND_ARCH
-OVERRIDE_BUILT_MODULE_PATH :=
-LOCAL_BUILT_MODULE :=
-LOCAL_INSTALLED_MODULE :=
-LOCAL_INTERMEDIATE_TARGETS :=
-
-include $(BUILD_SYSTEM)/host_static_library_internal.mk
-endif
-LOCAL_2ND_ARCH_VAR_PREFIX :=
-endif
-LOCAL_HOST_PREFIX :=
-endif
-
my_module_arch_supported :=
###########################################################
diff --git a/core/host_test_internal.mk b/core/host_test_internal.mk
index 42e01e1e4e..dfe8cf1505 100644
--- a/core/host_test_internal.mk
+++ b/core/host_test_internal.mk
@@ -3,7 +3,6 @@
#####################################################
ifeq ($(LOCAL_GTEST),true)
- LOCAL_CFLAGS_windows += -DGTEST_OS_WINDOWS
LOCAL_CFLAGS_linux += -DGTEST_OS_LINUX
LOCAL_CFLAGS_darwin += -DGTEST_OS_MAC
diff --git a/core/install_jni_libs.mk b/core/install_jni_libs.mk
index b7d83dc137..01f7f101ea 100644
--- a/core/install_jni_libs.mk
+++ b/core/install_jni_libs.mk
@@ -18,10 +18,20 @@ endif
ifneq ($(filter tests samples, $(LOCAL_MODULE_TAGS)),)
my_embed_jni := true
endif
-ifeq ($(filter $(TARGET_OUT)/% $(TARGET_OUT_VENDOR)/% $(TARGET_OUT_OEM)/%, $(my_module_path)),)
-# If this app isn't to be installed to system partitions.
-my_embed_jni := true
+
+# If the APK is not installed in one of the following partitions, force its libraries
+# to be embedded inside the APK instead of installed to /<partition>/lib[64]/.
+supported_partition_patterns := \
+ $(TARGET_OUT)/% \
+ $(TARGET_OUT_VENDOR)/% \
+ $(TARGET_OUT_OEM)/% \
+ $(TARGET_OUT_PRODUCT)/% \
+ $(TARGET_OUT_PRODUCT_SERVICES)/% \
+
+ifeq ($(filter $(supported_partition_patterns),$(my_module_path)),)
+ my_embed_jni := true
endif
+
# If we're installing this APP as a compressed module, we include all JNI libraries
# in the compressed artifact, rather than as separate files on the partition in question.
ifdef LOCAL_COMPRESSED_MODULE
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index a99d88ad7f..a79a49a7f0 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -13,9 +13,8 @@
#
my_jni_shared_libraries := \
- $(addprefix $($(my_2nd_arch_prefix)TARGET_OUT_INTERMEDIATE_LIBRARIES)/, \
- $(addsuffix .so, \
- $(LOCAL_JNI_SHARED_LIBRARIES)))
+ $(foreach lib,$(LOCAL_JNI_SHARED_LIBRARIES), \
+ $(call intermediates-dir-for,SHARED_LIBRARIES,$(lib),,,$(my_2nd_arch_prefix))/$(lib).so)
# App-specific lib path.
my_app_lib_path := $(dir $(LOCAL_INSTALLED_MODULE))lib/$(TARGET_$(my_2nd_arch_prefix)ARCH)
@@ -52,11 +51,13 @@ my_jni_filenames := $(notdir $(my_jni_shared_libraries))
my_shared_library_path := $(call get_non_asan_path,\
$($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES))
# Do not use order-only dependency, because we want to rebuild the image if an jni is updated.
-$(LOCAL_INSTALLED_MODULE) : $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
+my_installed_library := $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
+$(LOCAL_INSTALLED_MODULE) : $(my_installed_library)
+ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(my_installed_library)
# Create symlink in the app specific lib path
# Skip creating this symlink when running the second part of a target sanitization build.
-ifndef SANITIZE_TARGET
+ifeq ($(filter address,$(SANITIZE_TARGET)),)
ifdef LOCAL_POST_INSTALL_CMD
# Add a shell command separator
LOCAL_POST_INSTALL_CMD += ;
@@ -97,7 +98,9 @@ else # not my_embed_jni
$(foreach lib, $(my_prebuilt_jni_libs), \
$(eval $(call copy-one-file, $(lib), $(my_app_lib_path)/$(notdir $(lib)))))
-$(LOCAL_INSTALLED_MODULE) : $(addprefix $(my_app_lib_path)/, $(notdir $(my_prebuilt_jni_libs)))
+my_installed_library := $(addprefix $(my_app_lib_path)/, $(notdir $(my_prebuilt_jni_libs)))
+$(LOCAL_INSTALLED_MODULE) : $(my_installed_library)
+ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(my_installed_library)
endif # my_embed_jni
endif # inner my_prebuilt_jni_libs
endif # outer my_prebuilt_jni_libs
@@ -110,12 +113,12 @@ my_link_type := app:sdk
my_warn_types := native:platform $(my_warn_ndk_types)
my_allowed_types := $(my_allowed_ndk_types)
ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
- my_allowed_types += native:vendor native:vndk
+ my_allowed_types += native:vendor native:vndk native:platform_vndk
endif
else
my_link_type := app:platform
my_warn_types := $(my_warn_ndk_types)
-my_allowed_types := $(my_allowed_ndk_types) native:platform native:vendor native:vndk native:vndk_private
+my_allowed_types := $(my_allowed_ndk_types) native:platform native:vendor native:vndk native:vndk_private native:platform_vndk
endif
my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
diff --git a/core/instrumentation_test_config_template.xml b/core/instrumentation_test_config_template.xml
index 18ea676af0..afaa561e7e 100644
--- a/core/instrumentation_test_config_template.xml
+++ b/core/instrumentation_test_config_template.xml
@@ -22,6 +22,8 @@
<option name="test-file-name" value="{MODULE}.apk" />
</target_preparer>
+ {EXTRA_CONFIGS}
+
<test class="com.android.tradefed.testtype.{TEST_TYPE}" >
<option name="package" value="{PACKAGE}" />
<option name="runner" value="{RUNNER}" />
diff --git a/core/jacoco.mk b/core/jacoco.mk
index 6406df45a3..148bb045ec 100644
--- a/core/jacoco.mk
+++ b/core/jacoco.mk
@@ -51,7 +51,7 @@ $(my_unzipped_timestamp_path): $(LOCAL_FULL_CLASSES_PRE_JACOCO_JAR)
-d $(PRIVATE_UNZIPPED_PATH) \
$(PRIVATE_INCLUDE_ARGS)
(cd $(PRIVATE_UNZIPPED_PATH) && rm -rf $(PRIVATE_EXCLUDE_ARGS))
- (cd $(PRIVATE_UNZIPPED_PATH) && find -not -name "*.class" -type f | xargs --no-run-if-empty rm)
+ (cd $(PRIVATE_UNZIPPED_PATH) && find -not -name "*.class" -type f -exec rm {} \;)
touch $(PRIVATE_UNZIPPED_TIMESTAMP_PATH)
# Unfortunately in the previous task above,
# 'rm -rf $(PRIVATE_EXCLUDE_ARGS)' needs to be a separate
diff --git a/core/java.mk b/core/java.mk
index 914784948e..1533963703 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -68,16 +68,12 @@ full_classes_header_jarjar := $(intermediates.COMMON)/classes-header-jarjar.jar
full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
full_classes_processed_jar := $(intermediates.COMMON)/classes-processed.jar
-full_classes_desugar_jar := $(intermediates.COMMON)/classes-desugar.jar
full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
-full_classes_proguard_jar := $(intermediates.COMMON)/classes-proguard.jar
full_classes_combined_jar := $(intermediates.COMMON)/classes-combined.jar
built_dex_intermediate := $(intermediates.COMMON)/dex/classes.dex
-built_dex_hiddenapi := $(intermediates.COMMON)/dex-hiddenapi/classes.dex
full_classes_stubs_jar := $(intermediates.COMMON)/stubs.jar
java_source_list_file := $(intermediates.COMMON)/java-source-list
-
ifeq ($(LOCAL_MODULE_CLASS)$(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),APPS)
# If this is an apk without any Java code (e.g. framework-res), we should skip compiling Java.
full_classes_jar :=
@@ -90,11 +86,9 @@ endif
LOCAL_INTERMEDIATE_TARGETS += \
$(full_classes_turbine_jar) \
$(full_classes_compiled_jar) \
- $(full_classes_desugar_jar) \
$(full_classes_jarjar_jar) \
$(full_classes_jar) \
$(full_classes_combined_jar) \
- $(full_classes_proguard_jar) \
$(built_dex_intermediate) \
$(built_dex) \
$(full_classes_stubs_jar) \
@@ -112,11 +106,11 @@ ifneq ($(strip $(aidl_sources)),)
aidl_preprocess_import :=
ifdef LOCAL_SDK_VERSION
-ifneq ($(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS)),)
+ifneq ($(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS_USE_PREBUILT_SDK)),)
# LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS
- aidl_preprocess_import := $(TARGET_OUT_COMMON_INTERMEDIATES)/framework.aidl
+ aidl_preprocess_import := $(FRAMEWORK_AIDL)
else
- aidl_preprocess_import := $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_VERSION)/framework.aidl
+ aidl_preprocess_import := $(call resolve-prebuilt-sdk-aidl-path,$(LOCAL_SDK_VERSION))
endif # not current or system_current
else
# build against the platform.
@@ -130,7 +124,7 @@ $(foreach java,$(aidl_java_sources), \
$(aidl_java_sources) : $(LOCAL_ADDITIONAL_DEPENDENCIES) $(aidl_preprocess_import)
-$(aidl_java_sources): PRIVATE_AIDL_FLAGS := -b $(addprefix -p,$(aidl_preprocess_import)) -I$(LOCAL_PATH) -I$(LOCAL_PATH)/src $(addprefix -I,$(LOCAL_AIDL_INCLUDES))
+$(aidl_java_sources): PRIVATE_AIDL_FLAGS := $(addprefix -p,$(aidl_preprocess_import)) -I$(LOCAL_PATH) -I$(LOCAL_PATH)/src $(addprefix -I,$(LOCAL_AIDL_INCLUDES))
$(aidl_java_sources): PRIVATE_MODULE := $(LOCAL_MODULE)
endif
@@ -172,6 +166,7 @@ java_sources := $(addprefix $(LOCAL_PATH)/, $(filter %.java,$(LOCAL_SRC_FILES)))
$(filter %.java,$(LOCAL_GENERATED_SOURCES))
java_intermediate_sources := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/, $(filter %.java,$(LOCAL_INTERMEDIATE_SOURCES)))
all_java_sources := $(java_sources) $(java_intermediate_sources)
+ALL_MODULES.$(my_register_name).SRCS := $(ALL_MODULES.$(my_register_name).SRCS) $(all_java_sources)
include $(BUILD_SYSTEM)/java_common.mk
@@ -225,11 +220,6 @@ $(full_classes_compiled_jar): PRIVATE_WARNINGS_ENABLE := $(LOCAL_WARNINGS_ENABLE
# Deps for generated source files must be handled separately,
# via deps on the target that generates the sources.
-# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
-ifeq ($(RUN_ERROR_PRONE),true)
-LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
-endif
-
# For user / userdebug builds, strip the local variable table and the local variable
# type table. This has no bearing on stack traces, but will leave less information
# available via JDWP.
@@ -311,6 +301,7 @@ $(full_classes_compiled_jar): \
javac-check : $(full_classes_compiled_jar)
javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
+.PHONY: javac-check-$(LOCAL_MODULE)
$(full_classes_combined_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
$(full_classes_combined_jar): $(full_classes_compiled_jar) \
@@ -372,23 +363,7 @@ ifdef TARGET_OPENJDK9
LOCAL_DX_FLAGS := $(filter-out --multi-dex,$(LOCAL_DX_FLAGS)) --multi-dex
endif
-ifneq ($(USE_D8_DESUGAR),true)
-my_desugaring :=
-ifndef LOCAL_IS_STATIC_JAVA_LIBRARY
-my_desugaring := true
-$(full_classes_desugar_jar): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(full_classes_desugar_jar): $(LOCAL_FULL_CLASSES_JACOCO_JAR) $(full_java_header_libs) $(DESUGAR)
- $(desugar-classes-jar)
-endif
-else
-my_desugaring :=
-endif
-
-ifndef my_desugaring
-full_classes_desugar_jar := $(LOCAL_FULL_CLASSES_JACOCO_JAR)
-endif
-
-full_classes_pre_proguard_jar := $(full_classes_desugar_jar)
+full_classes_pre_proguard_jar := $(LOCAL_FULL_CLASSES_JACOCO_JAR)
# Keep a copy of the jar just before proguard processing.
$(eval $(call copy-one-file,$(full_classes_pre_proguard_jar),$(intermediates.COMMON)/classes-pre-proguard.jar))
@@ -400,9 +375,10 @@ ifneq ($(filter-out full custom obfuscation optimization,$(LOCAL_PROGUARD_ENABLE
$(error invalid value for LOCAL_PROGUARD_ENABLED: $(LOCAL_PROGUARD_ENABLED))
endif
proguard_dictionary := $(intermediates.COMMON)/proguard_dictionary
+proguard_configuration := $(intermediates.COMMON)/proguard_configuration
# When an app contains references to APIs that are not in the SDK specified by
-# its LOCAL_SDK_VERSION for example added by support library or by runtime
+# its LOCAL_SDK_VERSION for example added by support library or by runtime
# classes added by desugar, we artifically raise the "SDK version" "linked" by
# ProGuard, to
# - suppress ProGuard warnings of referencing symbols unknown to the lower SDK version.
@@ -412,21 +388,23 @@ my_proguard_sdk_raise :=
ifdef LOCAL_SDK_VERSION
ifdef TARGET_BUILD_APPS
ifeq (,$(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
- my_proguard_sdk_raise := $(call java-lib-header-files, sdk_vcurrent)
+ my_proguard_sdk_raise := $(call java-lib-header-files, $(call resolve-prebuilt-sdk-module,current))
endif
else
# For platform build, we can't just raise to the "current" SDK,
# that would break apps that use APIs removed from the current SDK.
my_proguard_sdk_raise := $(call java-lib-header-files,$(TARGET_DEFAULT_BOOTCLASSPATH_LIBRARIES) $(TARGET_DEFAULT_JAVA_LIBRARIES))
endif
+ifdef BOARD_SYSTEMSDK_VERSIONS
+ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+ # But for vendor or odm apks, don't raise SDK as the apks are required to
+ # use SDK APIs only
+ my_proguard_sdk_raise :=
endif
-
-ifeq ($(USE_R8),true)
-proguard_jars_prefix := -libraryjars
-else
-proguard_jars_prefix := -systemjars
endif
-legacy_proguard_flags := $(addprefix $(proguard_jars_prefix) ,$(my_proguard_sdk_raise) \
+endif
+
+legacy_proguard_flags := $(addprefix -libraryjars ,$(my_proguard_sdk_raise) \
$(filter-out $(my_proguard_sdk_raise), \
$(full_java_bootclasspath_libs) \
$(full_shared_java_header_libs)))
@@ -435,6 +413,7 @@ legacy_proguard_lib_deps := $(my_proguard_sdk_raise) \
$(filter-out $(my_proguard_sdk_raise),$(full_shared_java_header_libs))
legacy_proguard_flags += -printmapping $(proguard_dictionary)
+legacy_proguard_flags += -printconfiguration $(proguard_configuration)
common_proguard_flags := -forceprocessing
@@ -443,7 +422,7 @@ ifneq ($(LOCAL_INSTRUMENTATION_FOR)$(filter tests,$(LOCAL_MODULE_TAGS)),)
common_proguard_flags += -dontshrink # don't shrink tests by default
endif # test package
ifneq ($(LOCAL_PROGUARD_ENABLED),custom)
- ifdef LOCAL_USE_AAPT2
+ ifeq ($(LOCAL_USE_AAPT2),true)
common_proguard_flag_files += $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES),\
$(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags)
endif
@@ -479,21 +458,20 @@ legacy_proguard_flags := -injars $(link_instr_classes_jar) \
-applymapping $(link_instr_intermediates_dir.COMMON)/proguard_dictionary \
-verbose \
$(legacy_proguard_flags)
+legacy_proguard_lib_deps += \
+ $(link_instr_classes_jar) \
+ $(link_instr_intermediates_dir.COMMON)/proguard_options \
+ $(link_instr_intermediates_dir.COMMON)/proguard_dictionary \
# Sometimes (test + main app) uses different keep rules from the main app -
# apply the main app's dictionary anyway.
legacy_proguard_flags += -ignorewarnings
-# Make sure we run Proguard on the main app first
-$(full_classes_proguard_jar) : $(link_instr_intermediates_dir.COMMON)/proguard.classes.jar
-
endif # no obfuscation
endif # LOCAL_INSTRUMENTATION_FOR
proguard_flag_files := $(addprefix $(LOCAL_PATH)/, $(LOCAL_PROGUARD_FLAG_FILES))
-ifeq ($(USE_R8),true)
proguard_flag_files += $(addprefix $(LOCAL_PATH)/, $(LOCAL_R8_FLAG_FILES))
-endif # USE_R8
LOCAL_PROGUARD_FLAGS += $(addprefix -include , $(proguard_flag_files))
ifdef LOCAL_TEST_MODULE_TO_PROGUARD_WITH
@@ -503,65 +481,34 @@ extra_input_jar :=
endif
ifneq ($(filter obfuscation,$(LOCAL_PROGUARD_ENABLED)),)
-ifneq ($(USE_R8),true)
- $(full_classes_proguard_jar): .KATI_IMPLICIT_OUTPUTS := $(proguard_dictionary)
-else
- $(built_dex_intermediate): .KATI_IMPLICIT_OUTPUTS := $(proguard_dictionary)
-endif
+ $(built_dex_intermediate): .KATI_IMPLICIT_OUTPUTS := $(proguard_dictionary) $(proguard_configuration)
endif
-# If R8 is not enabled run Proguard.
-ifneq ($(USE_R8),true)
-# Changes to these dependencies need to be replicated below when using R8
-# instead of Proguard + dx.
-$(full_classes_proguard_jar): PRIVATE_EXTRA_INPUT_JAR := $(extra_input_jar)
-$(full_classes_proguard_jar): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
-$(full_classes_proguard_jar) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(proguard_flag_files) $(legacy_proguard_lib_deps) | $(PROGUARD)
- $(call transform-jar-to-proguard)
-else # !USE_R8
-# Running R8 instead of Proguard, proguarded jar is actually the pre-Proguarded jar.
-full_classes_proguard_jar := $(full_classes_pre_proguard_jar)
-endif # !USE_R8
-
else # LOCAL_PROGUARD_ENABLED not defined
proguard_flag_files :=
-full_classes_proguard_jar := $(full_classes_pre_proguard_jar)
endif # LOCAL_PROGUARD_ENABLED defined
ifneq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
$(built_dex_intermediate): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-my_r8 :=
ifdef LOCAL_PROGUARD_ENABLED
-ifeq ($(USE_R8),true)
-# These are the dependencies for the proguarded jar when running
-# Proguard + dx. They are used for the generated dex when using R8, as
-# R8 does Proguard + dx
-my_r8 := true
-$(built_dex_intermediate): PRIVATE_EXTRA_INPUT_JAR := $(extra_input_jar)
-$(built_dex_intermediate): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
-$(built_dex_intermediate) : $(full_classes_proguard_jar) $(extra_input_jar) $(my_support_library_sdk_raise) $(common_proguard_flag_files) $(proguard_flag_files) $(legacy_proguard_lib_deps) $(R8_COMPAT_PROGUARD)
+ $(built_dex_intermediate): PRIVATE_EXTRA_INPUT_JAR := $(extra_input_jar)
+ $(built_dex_intermediate): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
+ $(built_dex_intermediate): PRIVATE_PROGUARD_DICTIONARY := $(proguard_dictionary)
+ $(built_dex_intermediate) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(proguard_flag_files) $(legacy_proguard_lib_deps) $(R8_COMPAT_PROGUARD)
$(transform-jar-to-dex-r8)
-endif # USE_R8
-endif # LOCAL_PROGUARD_ENABLED
-
-ifndef my_r8
-$(built_dex_intermediate): $(full_classes_proguard_jar) $(DX) $(ZIP2ZIP)
-ifneq ($(USE_D8_DESUGAR),true)
+else # !LOCAL_PROGUARD_ENABLED
+ $(built_dex_intermediate): PRIVATE_D8_LIBS := $(full_java_bootclasspath_libs) $(full_shared_java_header_libs)
+ $(built_dex_intermediate): $(full_java_bootclasspath_libs) $(full_shared_java_header_libs)
+ $(built_dex_intermediate): $(full_classes_pre_proguard_jar) $(DX) $(ZIP2ZIP)
$(transform-classes.jar-to-dex)
-else
- $(transform-classes-d8.jar-to-dex)
-endif
endif
-ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),) # is_boot_jar
- $(eval $(call hiddenapi-copy-dex-files,$(built_dex_intermediate),$(built_dex_hiddenapi)))
- built_dex_copy_from := $(built_dex_hiddenapi)
-else # !is_boot_jar
- built_dex_copy_from := $(built_dex_intermediate)
-endif # is_boot_jar
+ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),)
+ $(call pretty-error,Modules in PRODUCT_BOOT_JARS must be defined in Android.bp files)
+endif
-$(built_dex): $(built_dex_copy_from)
+$(built_dex): $(built_dex_intermediate)
@echo Copying: $@
$(hide) mkdir -p $(dir $@)
$(hide) rm -f $(dir $@)/classes*.dex
@@ -587,6 +534,7 @@ ALL_FINDBUGS_FILES += $(findbugs_xml)
findbugs_html := $(PRODUCT_OUT)/findbugs/$(LOCAL_MODULE).html
$(findbugs_html) : PRIVATE_XML_FILE := $(findbugs_xml)
$(LOCAL_MODULE)-findbugs : $(findbugs_html)
+.PHONY: $(LOCAL_MODULE)-findbugs
$(findbugs_html) : $(findbugs_xml)
@mkdir -p $(dir $@)
@echo ConvertXmlToText: $@
@@ -597,20 +545,6 @@ $(LOCAL_MODULE)-findbugs : $(findbugs_html)
endif # full_classes_jar is defined
-ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
- my_default_app_target_sdk := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
- my_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
-else
- my_default_app_target_sdk := $(DEFAULT_APP_TARGET_SDK)
- my_sdk_version := $(PLATFORM_SDK_VERSION)
-endif
-
-ifdef LOCAL_MIN_SDK_VERSION
- my_min_sdk_version := $(LOCAL_MIN_SDK_VERSION)
-else
- my_min_sdk_version := $(call codename-or-sdk-to-sdk,$(my_default_app_target_sdk))
-endif
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(my_default_app_target_sdk)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SDK_VERSION := $(my_sdk_version)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MIN_SDK_VERSION := $(my_min_sdk_version)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call module-target-sdk-version)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SDK_VERSION := $(call module-sdk-version)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MIN_SDK_VERSION := $(call codename-or-sdk-to-sdk,$(call module-min-sdk-version))
diff --git a/core/java_common.mk b/core/java_common.mk
index 8f4611eed5..f5da120703 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -6,6 +6,10 @@ ifneq ($(filter ../%,$(LOCAL_SRC_FILES)),)
my_soong_problems += dotdot_srcs
endif
+ifneq (,$(LOCAL_JNI_SHARED_LIBRARIES))
+my_soong_problems += jni_libs
+endif
+
###########################################################
## Java version
###########################################################
@@ -25,7 +29,7 @@ ifeq (,$(LOCAL_JAVA_LANGUAGE_VERSION))
LOCAL_JAVA_LANGUAGE_VERSION := 1.7
else ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT)))
LOCAL_JAVA_LANGUAGE_VERSION := 1.8
- else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS))
+ else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS_USE_PREBUILT_SDK))
# TODO(ccross): allow 1.9 for current and unbundled once we have SDK system modules
LOCAL_JAVA_LANGUAGE_VERSION := 1.8
else
@@ -76,6 +80,7 @@ proto_java_sources_dir := $(proto_java_intemediate_dir)/src
$(proto_java_sources_file_stamp): PRIVATE_PROTO_INCLUDES := $(TOP)
$(proto_java_sources_file_stamp): PRIVATE_PROTO_SRC_FILES := $(proto_sources_fullpath)
$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_DIR := $(proto_java_sources_dir)
+$(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS)
ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),micro)
$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javamicro_out
else
@@ -84,13 +89,13 @@ $(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javanano
else
ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),stream)
$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javastream_out
+$(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS += --plugin=$(HOST_OUT_EXECUTABLES)/protoc-gen-javastream
$(proto_java_sources_file_stamp): $(HOST_OUT_EXECUTABLES)/protoc-gen-javastream
else
$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --java_out
endif
endif
endif
-$(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS)
$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(if $(filter lite,$(LOCAL_PROTOC_OPTIMIZE_TYPE)),lite$(if $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS),:,),)$(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
$(proto_java_sources_file_stamp) : $(proto_sources_fullpath) $(PROTOC)
$(call transform-proto-to-java)
@@ -193,16 +198,25 @@ ifdef need_compile_java
annotation_processor_flags :=
annotation_processor_deps :=
+annotation_processor_jars :=
+
+# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
+ifeq ($(RUN_ERROR_PRONE),true)
+annotation_processor_jars += $(ERROR_PRONE_JARS)
+LOCAL_JAVACFLAGS += $(ERROR_PRONE_FLAGS)
+LOCAL_JAVACFLAGS += '-Xplugin:ErrorProne $(ERROR_PRONE_CHECKS) $(LOCAL_ERROR_PRONE_FLAGS)'
+endif
ifdef LOCAL_ANNOTATION_PROCESSORS
- annotation_processor_jars := $(call java-lib-files,$(LOCAL_ANNOTATION_PROCESSORS),true)
- annotation_processor_flags += -processorpath $(call normalize-path-list,$(annotation_processor_jars))
- annotation_processor_deps += $(annotation_processor_jars)
+ annotation_processor_jars += $(call java-lib-files,$(LOCAL_ANNOTATION_PROCESSORS),true)
# b/25860419: annotation processors must be explicitly specified for grok
annotation_processor_flags += $(foreach class,$(LOCAL_ANNOTATION_PROCESSOR_CLASSES),-processor $(class))
+endif
- annotation_processor_jars :=
+ifneq (,$(strip $(annotation_processor_jars)))
+annotation_processor_flags += -processorpath $(call normalize-path-list,$(annotation_processor_jars))
+annotation_processor_deps += $(annotation_processor_jars)
endif
full_static_java_libs := $(call java-lib-files,$(LOCAL_STATIC_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
@@ -225,11 +239,29 @@ $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCE_LIST := $(java_source_list_fi
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RMTYPEDEFS := $(LOCAL_RMTYPEDEFS)
+# Sanity check class path vars.
+disallowed_deps := $(foreach sdk,$(TARGET_AVAILABLE_SDK_VERSIONS),$(call resolve-prebuilt-sdk-module,$(sdk)))
+disallowed_deps += $(foreach sdk,$(TARGET_AVAILABLE_SDK_VERSIONS),\
+ $(foreach sdk_lib,$(JAVA_SDK_LIBRARIES),$(call resolve-prebuilt-sdk-module,$(sdk),$(sdk_lib))))
+bad_deps := $(filter $(disallowed_deps),$(LOCAL_JAVA_LIBRARIES) $(LOCAL_STATIC_JAVA_LIBRARIES))
+ifneq (,$(bad_deps))
+ $(call pretty-error,SDK modules should not be depended on directly. Please use LOCAL_SDK_VERSION for $(bad_deps))
+endif
+
full_java_bootclasspath_libs :=
empty_bootclasspath :=
my_system_modules :=
+exported_sdk_libs_files :=
+my_exported_sdk_libs_file :=
ifndef LOCAL_IS_HOST_MODULE
+ sdk_libs :=
+
+ # When an sdk lib name is listed in LOCAL_JAVA_LIBRARIES, move it to LOCAL_SDK_LIBRARIES, so that
+ # it is correctly redirected to the stubs library.
+ LOCAL_SDK_LIBRARIES += $(filter $(JAVA_SDK_LIBRARIES),$(LOCAL_JAVA_LIBRARIES))
+ LOCAL_JAVA_LIBRARIES := $(filter-out $(JAVA_SDK_LIBRARIES),$(LOCAL_JAVA_LIBRARIES))
+
ifeq ($(LOCAL_SDK_VERSION),)
ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
# No bootclasspath. But we still need "" to prevent javac from using default host bootclasspath.
@@ -243,6 +275,13 @@ ifndef LOCAL_IS_HOST_MODULE
LOCAL_JAVA_LIBRARIES := $(filter-out $(TARGET_DEFAULT_BOOTCLASSPATH_LIBRARIES) $(TARGET_DEFAULT_JAVA_LIBRARIES),$(LOCAL_JAVA_LIBRARIES))
my_system_modules := $(DEFAULT_SYSTEM_MODULES)
endif # LOCAL_NO_STANDARD_LIBRARIES
+
+ ifneq (,$(TARGET_BUILD_APPS_USE_PREBUILT_SDK))
+ sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(call resolve-prebuilt-sdk-module,system_current,$(lib_name)))
+ else
+ # When SDK libraries are referenced from modules built without SDK, provide the all APIs to them
+ sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(lib_name))
+ endif
else
ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
$(call pretty-error,Must not define both LOCAL_NO_STANDARD_LIBRARIES and LOCAL_SDK_VERSION)
@@ -251,22 +290,30 @@ ifndef LOCAL_IS_HOST_MODULE
$(call pretty-error,Invalid LOCAL_SDK_VERSION '$(LOCAL_SDK_VERSION)' \
Choices are: $(TARGET_AVAILABLE_SDK_VERSIONS))
endif
- ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
- # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
- full_java_bootclasspath_libs := $(call java-lib-header-files,android_stubs_current)
- else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
- full_java_bootclasspath_libs := $(call java-lib-header-files,android_system_stubs_current)
- else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
- full_java_bootclasspath_libs := $(call java-lib-header-files,android_test_stubs_current)
- else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),core_current)
- full_java_bootclasspath_libs := $(call java-lib-header-files,core.current.stubs)
+
+ ifneq (,$(TARGET_BUILD_APPS_USE_PREBUILT_SDK)$(filter-out %current,$(LOCAL_SDK_VERSION)))
+ # TARGET_BUILD_APPS mode or numbered SDK. Use prebuilt modules.
+ sdk_module := $(call resolve-prebuilt-sdk-module,$(LOCAL_SDK_VERSION))
+ sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(call resolve-prebuilt-sdk-module,$(LOCAL_SDK_VERSION),$(lib_name)))
else
- # core_<ver> is subset of <ver>. Instead of defining a prebuilt lib for core_<ver>,
- # use the stub for <ver> when building for apps.
- _version := $(patsubst core_%,%,$(LOCAL_SDK_VERSION))
- full_java_bootclasspath_libs := $(call java-lib-header-files,sdk_v$(_version))
- _version :=
- endif # current, system_current, system_${VER}, test_current or core_current
+ # Note: the lib naming scheme must be kept in sync with build/soong/java/sdk_library.go.
+ sdk_lib_suffix = $(call pretty-error,sdk_lib_suffix was not set correctly)
+ ifeq (current,$(LOCAL_SDK_VERSION))
+ sdk_module := android_stubs_current
+ sdk_lib_suffix := .stubs
+ else ifeq (system_current,$(LOCAL_SDK_VERSION))
+ sdk_module := android_system_stubs_current
+ sdk_lib_suffix := .stubs.system
+ else ifeq (test_current,$(LOCAL_SDK_VERSION))
+ sdk_module := android_test_stubs_current
+ sdk_lib_suffix := .stubs.test
+ else ifeq (core_current,$(LOCAL_SDK_VERSION))
+ sdk_module := core.current.stubs
+ sdk_lib_suffix = $(call pretty-error,LOCAL_SDK_LIBRARIES not supported for LOCAL_SDK_VERSION = core_current)
+ endif
+ sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(lib_name)$(sdk_lib_suffix))
+ endif
+ full_java_bootclasspath_libs := $(call java-lib-header-files,$(sdk_module))
endif # LOCAL_SDK_VERSION
ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
@@ -286,15 +333,22 @@ ifndef LOCAL_IS_HOST_MODULE
# related classes to be present. This change adds stubs needed for
# javac to compile lambdas.
ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
- ifdef TARGET_BUILD_APPS
+ ifdef TARGET_BUILD_APPS_USE_PREBUILT_SDK
full_java_bootclasspath_libs += $(call java-lib-header-files,sdk-core-lambda-stubs)
else
full_java_bootclasspath_libs += $(call java-lib-header-files,core-lambda-stubs)
endif
endif
+ full_shared_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES) $(sdk_libs),$(LOCAL_IS_HOST_MODULE))
+ full_shared_java_header_libs := $(call java-lib-header-files,$(LOCAL_JAVA_LIBRARIES) $(sdk_libs),$(LOCAL_IS_HOST_MODULE))
+ sdk_libs :=
- full_shared_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
- full_shared_java_header_libs := $(call java-lib-header-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
+ # Files that contains the names of SDK libraries exported from dependencies. These will be re-exported.
+ # Note: No need to consider LOCAL_*_ANDROID_LIBRARIES and LOCAL_STATIC_JAVA_AAR_LIBRARIES. They are all appended to
+ # LOCAL_*_JAVA_LIBRARIES in java.mk
+ exported_sdk_libs_files := $(call exported-sdk-libs-files,$(LOCAL_JAVA_LIBRARIES) $(LOCAL_STATIC_JAVA_LIBRARIES))
+ # The file that contains the names of all SDK libraries that this module exports and re-exports
+ my_exported_sdk_libs_file := $(call local-intermediates-dir,COMMON)/exported-sdk-libs
else # LOCAL_IS_HOST_MODULE
@@ -332,6 +386,23 @@ else # LOCAL_IS_HOST_MODULE
endif # USE_CORE_LIB_BOOTCLASSPATH
endif # !LOCAL_IS_HOST_MODULE
+ALL_DEPS.$(LOCAL_MODULE).ALL_DEPS := $(ALL_DEPS.$(LOCAL_MODULE).ALL_DEPS) $(full_java_bootclasspath_libs)
+
+# Export the SDK libs. The sdk library names listed in LOCAL_SDK_LIBRARIES are first exported.
+# Then sdk library names exported from dependencies are all re-exported.
+$(my_exported_sdk_libs_file): PRIVATE_EXPORTED_SDK_LIBS_FILES := $(exported_sdk_libs_files)
+$(my_exported_sdk_libs_file): PRIVATE_SDK_LIBS := $(sort $(LOCAL_SDK_LIBRARIES))
+$(my_exported_sdk_libs_file): $(exported_sdk_libs_files)
+ @echo "Export SDK libs $@"
+ $(hide) mkdir -p $(dir $@) && rm -f $@ $@.temp
+ $(if $(PRIVATE_SDK_LIBS),\
+ echo $(PRIVATE_SDK_LIBS) | tr ' ' '\n' > $@.temp,\
+ touch $@.temp)
+ $(if $(PRIVATE_EXPORTED_SDK_LIBS_FILES),\
+ cat $(PRIVATE_EXPORTED_SDK_LIBS_FILES) >> $@.temp)
+ $(hide) cat $@.temp | sort -u > $@
+ $(hide) rm -f $@.temp
+
ifdef empty_bootclasspath
ifdef full_java_bootclasspath_libs
$(call pretty-error,internal error: empty_bootclasspath and full_java_bootclasspath_libs should not both be set)
@@ -429,7 +500,7 @@ ALL_MODULES.$(my_register_name).INTERMEDIATE_SOURCE_DIR := \
ifeq ($(ONE_SHOT_MAKEFILE),)
installed_static_library_notice_file_targets := \
$(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
- NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST,TARGET)-JAVA_LIBRARIES-$(lib))
+ NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-JAVA_LIBRARIES-$(lib))
else
installed_static_library_notice_file_targets :=
endif
@@ -466,7 +537,7 @@ endif
ifdef LOCAL_AAPT2_ONLY
my_link_type += aapt2_only
endif
-ifdef LOCAL_USE_AAPT2
+ifeq ($(LOCAL_USE_AAPT2),true)
my_allowed_types += aapt2_only
endif
diff --git a/core/java_host_test_config_template.xml b/core/java_host_test_config_template.xml
new file mode 100644
index 0000000000..26c1cafa32
--- /dev/null
+++ b/core/java_host_test_config_template.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {MODULE}">
+ <option name="test-suite-tag" value="apct" />
+ <option name="test-suite-tag" value="apct-junit" />
+
+ {EXTRA_CONFIGS}
+
+ <test class="com.android.tradefed.testtype.HostTest" >
+ <option name="jar" value="{MODULE}.jar" />
+ </test>
+</configuration>
diff --git a/core/java_library.mk b/core/java_library.mk
index 1b914f5ba3..c706cea724 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -50,6 +50,8 @@ else
LOCAL_EMMA_INSTRUMENT := false
endif # EMMA_INSTRUMENT
+my_dex_jar := $(common_javalib.jar)
+
#################################
include $(BUILD_SYSTEM)/java.mk
#################################
@@ -57,19 +59,22 @@ include $(BUILD_SYSTEM)/java.mk
ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
# There are some dependencies outside the build system that assume classes.jar
# is available as javalib.jar so copy it there too.
-$(eval $(call copy-one-file,$(full_classes_proguard_jar),$(common_javalib.jar)))
+$(eval $(call copy-one-file,$(full_classes_pre_proguard_jar),$(common_javalib.jar)))
-$(eval $(call copy-one-file,$(full_classes_proguard_jar),$(LOCAL_BUILT_MODULE)))
+$(eval $(call copy-one-file,$(full_classes_pre_proguard_jar),$(LOCAL_BUILT_MODULE)))
else # !LOCAL_IS_STATIC_JAVA_LIBRARY
$(common_javalib.jar): PRIVATE_DEX_FILE := $(built_dex)
$(common_javalib.jar): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
-$(common_javalib.jar): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
+$(common_javalib.jar): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
$(common_javalib.jar) : $(built_dex) $(java_resource_sources) | $(ZIPTIME) $(ZIPALIGN)
@echo "target Jar: $(PRIVATE_MODULE) ($@)"
- $(call initialize-package-file,$(PRIVATE_SOURCE_ARCHIVE),$@.tmp)
- $(call add-dex-to-package-arg,$@.tmp)
+ rm -rf $@.parts && mkdir -p $@.parts
+ $(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
+ $(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
+ $(MERGE_ZIPS) -j $@.tmp $@.parts/dex.zip $@.parts/res.zip
+ rm -rf $@.parts
$(hide) $(ZIPTIME) $@.tmp
$(call commit-change-for-toc,$@)
ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
@@ -80,22 +85,13 @@ endif # LOCAL_UNCOMPRESS_DEX
.KATI_RESTAT: $(common_javalib.jar)
ifdef LOCAL_DEX_PREOPT
-ifneq ($(dexpreopt_boot_jar_module),) # boot jar
-# boot jar's rules are defined in dex_preopt.mk
-dexpreopted_boot_jar := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(dexpreopt_boot_jar_module)_nodex.jar
-$(eval $(call copy-one-file,$(dexpreopted_boot_jar),$(LOCAL_BUILT_MODULE)))
-
-# For libart boot jars, we don't have .odex files.
-else # ! boot jar
-$(built_odex): PRIVATE_MODULE := $(LOCAL_MODULE)
-# Use pattern rule - we may have multiple built odex files.
-$(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(common_javalib.jar)
- @echo "Dexpreopt Jar: $(PRIVATE_MODULE) ($@)"
- $(call dexpreopt-one-file,$<,$@)
-
-$(eval $(call dexpreopt-copy-jar,$(common_javalib.jar),$(LOCAL_BUILT_MODULE),$(LOCAL_DEX_PREOPT)))
-
-endif # ! boot jar
+
+$(LOCAL_BUILT_MODULE): PRIVATE_STRIP_SCRIPT := $(intermediates)/strip.sh
+$(LOCAL_BUILT_MODULE): $(intermediates)/strip.sh
+$(LOCAL_BUILT_MODULE): | $(DEXPREOPT_STRIP_DEPS)
+$(LOCAL_BUILT_MODULE): .KATI_DEPFILE := $(LOCAL_BUILT_MODULE).d
+$(LOCAL_BUILT_MODULE): $(common_javalib.jar)
+ $(PRIVATE_STRIP_SCRIPT) $< $@
else # LOCAL_DEX_PREOPT
$(eval $(call copy-one-file,$(common_javalib.jar),$(LOCAL_BUILT_MODULE)))
diff --git a/core/java_renderscript.mk b/core/java_renderscript.mk
index 191b3be6ae..3fe0d0e939 100644
--- a/core/java_renderscript.mk
+++ b/core/java_renderscript.mk
@@ -75,15 +75,15 @@ $(rs_generated_src_jar): PRIVATE_RS_CC := $(LOCAL_RENDERSCRIPT_CC)
$(rs_generated_src_jar): PRIVATE_RS_FLAGS := $(renderscript_flags)
$(rs_generated_src_jar): PRIVATE_RS_SOURCE_FILES := $(renderscript_sources_fullpath)
$(rs_generated_src_jar): PRIVATE_RS_OUTPUT_DIR := $(renderscript_intermediate.COMMON)
-$(rs_generated_src_jar): PRIVATE_RS_TARGET_API := $(renderscript_target_api)
+$(rs_generated_src_jar): PRIVATE_RS_TARGET_API := $(patsubst current,0,$(renderscript_target_api))
$(rs_generated_src_jar): PRIVATE_DEP_FILES := $(bc_dep_files)
$(rs_generated_src_jar): PRIVATE_RS_OUTPUT_RES_ZIP := $(rs_generated_res_zip)
$(rs_generated_src_jar): .KATI_IMPLICIT_OUTPUTS := $(rs_generated_res_zip)
$(rs_generated_src_jar): $(renderscript_sources_fullpath) $(LOCAL_RENDERSCRIPT_CC) $(SOONG_ZIP)
$(transform-renderscripts-to-java-and-bc)
-# include the dependency files (.d/.P) generated by llvm-rs-cc.
-$(call include-depfile,$(rs_generated_src_jar).P,$(rs_generated_src_jar))
+# include the dependency files (.d) generated by llvm-rs-cc.
+$(call include-depfile,$(rs_generated_src_jar).d,$(rs_generated_src_jar))
ifneq ($(LOCAL_RENDERSCRIPT_COMPATIBILITY),)
@@ -107,7 +107,7 @@ renderscript_intermediate := $(intermediates)/renderscript
# Prevent these from showing up on the device
# One exception is librsjni.so, which is needed for
# both native path and compat path.
-rs_jni_lib := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/librsjni.so
+rs_jni_lib := $(call intermediates-dir-for,SHARED_LIBRARIES,librsjni.so)/librsjni.so
LOCAL_JNI_SHARED_LIBRARIES += librsjni
ifneq (,$(TARGET_BUILD_APPS)$(FORCE_BUILD_RS_COMPAT))
@@ -118,13 +118,13 @@ rs_compatibility_jni_libs := $(addprefix \
$(rs_generated_src_jar): .KATI_IMPLICIT_OUTPUTS += $(rs_generated_bc)
-rs_support_lib := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/libRSSupport.so
+rs_support_lib := $(call intermediates-dir-for,SHARED_LIBRARIES,libRSSupport)/libRSSupport.so
LOCAL_JNI_SHARED_LIBRARIES += libRSSupport
rs_support_io_lib :=
# check if the target api level support USAGE_IO
ifeq ($(filter $(RSCOMPAT_NO_USAGEIO_API_LEVELS),$(renderscript_target_api)),)
-rs_support_io_lib := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/libRSSupportIO.so
+rs_support_io_lib := $(call intermediates-dir-for,SHARED_LIBRARIES,libRSSupportIO)/libRSSupportIO.so
LOCAL_JNI_SHARED_LIBRARIES += libRSSupportIO
endif
@@ -138,7 +138,8 @@ endif
$(rs_compatibility_jni_libs): $(RS_PREBUILT_CLCORE) \
$(rs_support_lib) $(rs_support_io_lib) $(rs_jni_lib) $(rs_compiler_rt)
$(rs_compatibility_jni_libs): $(BCC_COMPAT)
-$(rs_compatibility_jni_libs): PRIVATE_CXX := $(CXX_WRAPPER) $(TARGET_CXX)
+$(rs_compatibility_jni_libs): PRIVATE_CXX := $(CXX_WRAPPER) $(CLANG_CXX)
+$(rs_compatibility_jni_libs): PRIVATE_CXX_LINK := $(CLANG_CXX)
$(rs_compatibility_jni_libs): PRIVATE_SDK_VERSION := $(my_min_sdk_version)
$(rs_compatibility_jni_libs): $(renderscript_intermediate)/librs.%.so: \
$(renderscript_intermediate.bc_folder)%.bc \
diff --git a/core/java_test_config_template.xml b/core/java_test_config_template.xml
new file mode 100644
index 0000000000..811cf93540
--- /dev/null
+++ b/core/java_test_config_template.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {MODULE}.">
+ <option name="test-suite-tag" value="apct" />
+ <option name="test-suite-tag" value="apct-junit" />
+ <target_preparer class="com.android.compatibility.common.tradefed.targetprep.FilePusher">
+ <option name="cleanup" value="true" />
+ <option name="push" value="cts-dalvik-device-test-runner.jar->/data/local/tmp/{MODULE}/cts-dalvik-device-test-runner.jar" />
+ <option name="push" value="{MODULE}.jar->/data/local/tmp/{MODULE}/{MODULE}.jar" />
+ </target_preparer>
+
+ {EXTRA_CONFIGS}
+
+ <test class="com.android.compatibility.testtype.DalvikTest" >
+ <option name="run-name" value="{MODULE}" />
+ <option name="classpath" value="/data/local/tmp/{MODULE}/{MODULE}.jar" />
+ <option name="classpath" value="/data/local/tmp/{MODULE}/cts-dalvik-device-test-runner.jar" />
+ </test>
+</configuration>
diff --git a/core/jetifier.mk b/core/jetifier.mk
index 33a4624a31..fff4230860 100644
--- a/core/jetifier.mk
+++ b/core/jetifier.mk
@@ -24,7 +24,7 @@ ifeq ($(strip $(LOCAL_JETIFIER_ENABLED)),true)
$(my_jetifier_output_path) : $(my_jetifier_input_path) $(JETIFIER)
rm -rf $@
- $(JETIFIER) -outputfile $@ -i $<
+ $(JETIFIER) -l error -o $@ -i $<
LOCAL_JETIFIER_OUTPUT_FILE := $(my_jetifier_output_path)
LOCAL_INTERMEDIATE_TARGETS += $(LOCAL_JETIFIER_OUTPUT_FILE)
diff --git a/core/local_systemsdk.mk b/core/local_systemsdk.mk
index 49085fd0d8..6c022f2daf 100644
--- a/core/local_systemsdk.mk
+++ b/core/local_systemsdk.mk
@@ -25,9 +25,8 @@ ifdef BOARD_SYSTEMSDK_VERSIONS
ifneq (,$(filter JAVA_LIBRARIES APPS,$(LOCAL_MODULE_CLASS)))
ifndef LOCAL_SDK_VERSION
ifeq ($(_is_vendor_app),true)
- ifeq (,$(findstring __auto_generated_rro,$(LOCAL_MODULE)))
- # Runtime resource overlay for framework-res is exempted from building
- # against System SDK.
+ ifeq (,$(filter %__auto_generated_rro_vendor,$(LOCAL_MODULE)))
+ # Runtime resource overlays are exempted from building against System SDK.
# TODO(b/35859726): remove this exception
LOCAL_SDK_VERSION := system_current
endif
diff --git a/core/local_vndk.mk b/core/local_vndk.mk
index 3677d40a15..198e3615bd 100644
--- a/core/local_vndk.mk
+++ b/core/local_vndk.mk
@@ -1,5 +1,5 @@
-#Set LOCAL_USE_VNDK for modules going into vendor partition, except for host modules
+#Set LOCAL_USE_VNDK for modules going into vendor or odm partition, except for host modules
#If LOCAL_SDK_VERSION is set, thats a more restrictive set, so they dont need LOCAL_USE_VNDK
ifndef LOCAL_IS_HOST_MODULE
ifndef LOCAL_SDK_VERSION
diff --git a/core/main.mk b/core/main.mk
index 1946edb78f..a13404a996 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -26,6 +26,8 @@ $(sort $(MAKECMDGOALS)) : run_soong_ui
else # KATI
+$(info [1/1] initializing build system ...)
+
# Absolute path of the present working direcotry.
# This overrides the shell variable $PWD, which does not necessarily points to
# the top of the source tree, for example when "make -C" is used in m/mm/mmm.
@@ -34,8 +36,6 @@ PWD := $(shell pwd)
TOP := .
TOPDIR :=
-BUILD_SYSTEM := $(TOPDIR)build/make/core
-
# This is the default target. It must be the first declared target.
.PHONY: droid
DEFAULT_GOAL := droid
@@ -46,7 +46,7 @@ droid_targets:
# Set up various standard variables based on configuration
# and host information.
-include $(BUILD_SYSTEM)/config.mk
+include build/make/core/config.mk
ifneq ($(filter $(dont_bother_goals), $(MAKECMDGOALS)),)
dont_bother := true
@@ -66,21 +66,25 @@ include $(BUILD_SYSTEM)/clang/config.mk
$(shell mkdir -p $(OUT_DIR) && \
echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt)
BUILD_NUMBER_FILE := $(OUT_DIR)/build_number.txt
+.KATI_READONLY := BUILD_NUMBER_FILE
+$(KATI_obsolete_var BUILD_NUMBER,See https://android.googlesource.com/platform/build/+/master/Changes.md#BUILD_NUMBER)
ifeq ($(HOST_OS),darwin)
DATE_FROM_FILE := date -r $(BUILD_DATETIME_FROM_FILE)
else
DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
endif
+.KATI_READONLY := DATE_FROM_FILE
# Pick a reasonable string to use to identify files.
ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
# BUILD_NUMBER has a timestamp in it, which means that
# it will change every time. Pick a stable value.
- FILE_NAME_TAG := eng.$(USER)
+ FILE_NAME_TAG := eng.$(BUILD_USERNAME)
else
FILE_NAME_TAG := $(file <$(BUILD_NUMBER_FILE))
endif
+.KATI_READONLY := FILE_NAME_TAG
# Make an empty directory, which can be used to make empty jars
EMPTY_DIRECTORY := $(OUT_DIR)/empty
@@ -98,6 +102,8 @@ $(shell mkdir -p $(EMPTY_DIRECTORY) && rm -rf $(EMPTY_DIRECTORY)/*)
-include test/sts/tools/sts-tradefed/build/config.mk
# CTS-Instant-specific config
-include test/suite_harness/tools/cts-instant-tradefed/build/config.mk
+# MTS-specific config.
+-include test/mts/tools/build/config.mk
# Clean rules
.PHONY: clean-dex-files
@@ -148,6 +154,15 @@ ADDITIONAL_BUILD_PROPERTIES :=
#
# -----------------------------------------------------------------
+# Validate ADDITIONAL_PRODUCT_PROPERTIES.
+ifneq ($(ADDITIONAL_PRODUCT_PROPERTIES),)
+$(error ADDITIONAL_PRODUCT_PROPERTIES must not be set before here: $(ADDITIONAL_PRODUCT_PROPERTIES))
+endif
+
+ADDITIONAL_PRODUCT_PROPERTIES :=
+
+#
+# -----------------------------------------------------------------
# Add the product-defined properties to the build properties.
ifdef PRODUCT_SHIPPING_API_LEVEL
ADDITIONAL_BUILD_PROPERTIES += \
@@ -205,16 +220,13 @@ include build/make/core/pdk_config.mk
#
# -----------------------------------------------------------------
-# Enable dynamic linker and hidden API developer warnings for
-# userdebug, eng and non-REL builds
+# Enable dynamic linker warnings for userdebug, eng and non-REL builds
ifneq ($(TARGET_BUILD_VARIANT),user)
- ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 \
- ro.art.hiddenapi.warning=1
+ ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
else
# Enable it for user builds as long as they are not final.
ifneq ($(PLATFORM_VERSION_CODENAME),REL)
- ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 \
- ro.art.hiddenapi.warning=1
+ ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
endif
endif
@@ -233,6 +245,25 @@ else
ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=${PRODUCT_COMPATIBLE_PROPERTY}
endif
+# Add the system server compiler filter if they are specified for the product.
+ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
+ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
+endif
+
+# Enable core platform API violation warnings on userdebug and eng builds.
+ifneq ($(TARGET_BUILD_VARIANT),user)
+ADDITIONAL_BUILD_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
+endif
+
+# Sets the default value of ro.postinstall.fstab.prefix to /system.
+# Device board config should override the value to /product when needed by:
+#
+# PRODUCT_PRODUCT_PROPERTIES += ro.postinstall.fstab.prefix=/product
+#
+# It then uses ${ro.postinstall.fstab.prefix}/etc/fstab.postinstall to
+# mount system_other partition.
+ADDITIONAL_DEFAULT_PROPERTIES += ro.postinstall.fstab.prefix=/system
+
# -----------------------------------------------------------------
###
### In this section we set up the things that are different
@@ -245,26 +276,6 @@ ifneq ($(filter sdk win_sdk sdk_addon,$(MAKECMDGOALS)),)
is_sdk_build := true
endif
-# Add build properties for ART. These define system properties used by installd
-# to pass flags to dex2oat.
-ADDITIONAL_BUILD_PROPERTIES += persist.sys.dalvik.vm.lib.2=libart.so
-ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).variant=$(DEX2OAT_TARGET_CPU_VARIANT)
-ifneq ($(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
- ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).features=$(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
-endif
-
-ifdef TARGET_2ND_ARCH
- ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).variant=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT)
- ifneq ($($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
- ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).features=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
- endif
-endif
-
-# Add the system server compiler filter if they are specified for the product.
-ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
-ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
-endif
-
## user/userdebug ##
user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
@@ -304,8 +315,6 @@ ifeq (true,$(strip $(enable_target_debugging)))
ADDITIONAL_DEFAULT_PROPERTIES += ro.debuggable=1
# Enable Dalvik lock contention logging.
ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.lockprof.threshold=500
- # Include the debugging/testing OTA keys in this build.
- INCLUDE_TEST_OTA_KEYS := true
else # !enable_target_debugging
# Target is less debuggable and adbd is off by default
ADDITIONAL_DEFAULT_PROPERTIES += ro.debuggable=0
@@ -327,6 +336,24 @@ ifndef is_sdk_build
endif
endif
+## asan ##
+
+# Install some additional tools on ASAN builds IFF we are also installing debug tools
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+ifneq (,$(filter debug,$(tags_to_install)))
+ tags_to_install += asan
+endif
+endif
+
+## java coverage ##
+# Install additional tools on java coverage builds
+ifeq (true,$(EMMA_INSTRUMENT))
+ifneq (,$(filter debug,$(tags_to_install)))
+ tags_to_install += java_coverage
+endif
+endif
+
+
## sdk ##
ifdef is_sdk_build
@@ -354,10 +381,6 @@ BUILD_WITHOUT_PV := true
ADDITIONAL_BUILD_PROPERTIES += net.bt.name=Android
-# Sets the location that the runtime dumps stack traces to when signalled
-# with SIGQUIT. Stack trace dumping is turned on for all android builds.
-ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.stack-trace-dir=/data/anr
-
# ------------------------------------------------------------
# Define a function that, given a list of module tags, returns
# non-empty if that module should be installed in /system.
@@ -388,20 +411,35 @@ endif
# Typical build; include any Android.mk files we can find.
#
-FULL_BUILD := true
-
-# Before we go and include all of the module makefiles, mark the PRODUCT_*
-# and ADDITIONAL*PROPERTIES values readonly so that they won't be modified.
-$(call readonly-product-vars)
+# Strip and readonly a few more variables so they won't be modified.
+$(readonly-final-product-vars)
ADDITIONAL_DEFAULT_PROPERTIES := $(strip $(ADDITIONAL_DEFAULT_PROPERTIES))
.KATI_READONLY := ADDITIONAL_DEFAULT_PROPERTIES
ADDITIONAL_BUILD_PROPERTIES := $(strip $(ADDITIONAL_BUILD_PROPERTIES))
.KATI_READONLY := ADDITIONAL_BUILD_PROPERTIES
+ADDITIONAL_PRODUCT_PROPERTIES := $(strip $(ADDITIONAL_PRODUCT_PROPERTIES))
+.KATI_READONLY := ADDITIONAL_PRODUCT_PROPERTIES
ifneq ($(PRODUCT_ENFORCE_RRO_TARGETS),)
ENFORCE_RRO_SOURCES :=
endif
+# Color-coded warnings including current module info
+# $(1): message to print
+define pretty-warning
+$(shell $(call echo-warning,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
+endef
+
+# Color-coded errors including current module info
+# $(1): message to print
+define pretty-error
+$(shell $(call echo-error,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
+$(error done)
+endef
+
+subdir_makefiles_inc := .
+FULL_BUILD :=
+
ifneq ($(ONE_SHOT_MAKEFILE),)
# We've probably been invoked by the "mm" shell function
# with a subdirectory's makefile.
@@ -412,11 +450,6 @@ include $(SOONG_ANDROID_MK) $(wildcard $(ONE_SHOT_MAKEFILE))
# so that the modules will be installed in the same place they
# would have been with a normal make.
CUSTOM_MODULES := $(sort $(call get-tagged-modules,$(ALL_MODULE_TAGS)))
-FULL_BUILD :=
-# Stub out the notice targets, which probably aren't defined
-# when using ONE_SHOT_MAKEFILE.
-NOTICE-HOST-%: ;
-NOTICE-TARGET-%: ;
# A helper goal printing out install paths
define register_module_install_path
@@ -445,12 +478,13 @@ UNIQUE_ALL_MODULES :=
else # ONE_SHOT_MAKEFILE
ifneq ($(dont_bother),true)
+FULL_BUILD := true
#
# Include all of the makefiles in the system
#
subdir_makefiles := $(SOONG_ANDROID_MK) $(file <$(OUT_DIR)/.module_paths/Android.mk.list)
-subdir_makefiles_total := $(words $(subdir_makefiles))
+subdir_makefiles_total := $(words int $(subdir_makefiles) post finish)
.KATI_READONLY := subdir_makefiles_total
$(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk)))
@@ -466,11 +500,33 @@ endif # dont_bother
endif # ONE_SHOT_MAKEFILE
+ifndef subdir_makefiles_total
+subdir_makefiles_total := $(words init post finish)
+endif
+
+droid_targets: no_vendor_variant_vndk_check
+.PHONY: no_vendor_variant_vndk_check
+no_vendor_variant_vndk_check:
+
+$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] finishing build rules ...)
+
# -------------------------------------------------------------------
# All module makefiles have been included at this point.
# -------------------------------------------------------------------
# -------------------------------------------------------------------
+# Use basic warning/error messages now that LOCAL_MODULE_MAKEFILE
+# and LOCAL_MODULE aren't useful anymore.
+# -------------------------------------------------------------------
+define pretty-warning
+$(warning $(1))
+endef
+
+define pretty-error
+$(error $(1))
+endef
+
+# -------------------------------------------------------------------
# Enforce to generate all RRO packages for modules having resource
# overlays.
# -------------------------------------------------------------------
@@ -504,19 +560,15 @@ ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
define get-32-bit-modules
$(sort $(foreach m,$(1),\
$(if $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS),\
- $(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX))\
- $(if $(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),\
- $(m)$(HOST_2ND_ARCH_MODULE_SUFFIX))\
- ))
+ $(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX))))
endef
# Get a list of corresponding 32-bit module names, if one exists;
# otherwise return the original module name
define get-32-bit-modules-if-we-can
$(sort $(foreach m,$(1),\
- $(if $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS)$(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),\
- $(if $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS),$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX)) \
- $(if $(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX)),\
- $(m))))
+ $(if $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS),\
+ $(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX), \
+ $(m))))
endef
else # TARGET_TRANSLATE_2ND_ARCH
# For binary translation config, by default only install the first arch.
@@ -528,18 +580,32 @@ $(strip $(1))
endef
endif # TARGET_TRANSLATE_2ND_ARCH
+# TODO: we can probably check to see if these modules are actually host
+# modules
+define get-host-32-bit-modules
+$(sort $(foreach m,$(1),\
+ $(if $(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),\
+ $(m)$(HOST_2ND_ARCH_MODULE_SUFFIX))))
+endef
+# Get a list of corresponding 32-bit module names, if one exists;
+# otherwise return the original module name
+define get-host-32-bit-modules-if-we-can
+$(sort $(foreach m,$(1),\
+ $(if $(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),\
+ $(m)$(HOST_2ND_ARCH_MODULE_SUFFIX),\
+ $(m))))
+endef
+
# If a module is for a cross host os, the required modules must be for
# that OS too.
# If a module is built for 32-bit, the required modules must be 32-bit too;
# Otherwise if the module is an executable or shared library,
# the required modules must be 64-bit;
# otherwise we require both 64-bit and 32-bit variant, if one exists.
-define select-bitness-of-required-modules
+define target-select-bitness-of-required-modules
$(foreach m,$(ALL_MODULES),\
- $(eval r := $(ALL_MODULES.$(m).REQUIRED))\
+ $(eval r := $(ALL_MODULES.$(m).REQUIRED_FROM_TARGET))\
$(if $(r),\
- $(if $(ALL_MODULES.$(m).FOR_HOST_CROSS),\
- $(eval r := $(addprefix host_cross_,$(r))))\
$(if $(ALL_MODULES.$(m).FOR_2ND_ARCH),\
$(eval r_r := $(call get-32-bit-modules-if-we-can,$(r))),\
$(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(ALL_MODULES.$(m).CLASS)),\
@@ -547,11 +613,47 @@ $(foreach m,$(ALL_MODULES),\
$(eval r_r := $(r) $(call get-32-bit-modules,$(r)))\
)\
)\
- $(eval ALL_MODULES.$(m).REQUIRED := $(strip $(r_r)))\
+ $(eval ALL_MODULES.$(m).REQUIRED_FROM_TARGET := $(strip $(r_r)))\
+ )\
+)
+endef
+$(call target-select-bitness-of-required-modules)
+
+define host-select-bitness-of-required-modules
+$(foreach m,$(ALL_MODULES),\
+ $(eval r := $(ALL_MODULES.$(m).REQUIRED_FROM_HOST))\
+ $(if $(r),\
+ $(if $(ALL_MODULES.$(m).FOR_2ND_ARCH),\
+ $(eval r_r := $(call get-host-32-bit-modules-if-we-can,$(r))),\
+ $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(ALL_MODULES.$(m).CLASS)),\
+ $(eval r_r := $(r)),\
+ $(eval r_r := $(r) $(call get-host-32-bit-modules,$(r)))\
+ )\
+ )\
+ $(eval ALL_MODULES.$(m).REQUIRED_FROM_HOST := $(strip $(r_r)))\
)\
)
endef
-$(call select-bitness-of-required-modules)
+$(call host-select-bitness-of-required-modules)
+
+define host-cross-select-bitness-of-required-modules
+$(foreach m,$(ALL_MODULES),\
+ $(eval r := $(ALL_MODULES.$(m).REQUIRED_FROM_HOST_CROSS))\
+ $(if $(r),\
+ $(if $(ALL_MODULES.$(m).FOR_HOST_CROSS),,$(error Only expected REQUIRED_FROM_HOST_CROSS on FOR_HOST_CROSS modules - $(m)))\
+ $(eval r := $(addprefix host_cross_,$(r)))\
+ $(if $(ALL_MODULES.$(m).FOR_2ND_ARCH),\
+ $(eval r_r := $(call get-host-32-bit-modules-if-we-can,$(r))),\
+ $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(ALL_MODULES.$(m).CLASS)),\
+ $(eval r_r := $(r)),\
+ $(eval r_r := $(r) $(call get-host-32-bit-modules,$(r)))\
+ )\
+ )\
+ $(eval ALL_MODULES.$(m).REQUIRED_FROM_HOST_CROSS := $(strip $(r_r)))\
+ )\
+)
+endef
+$(call host-cross-select-bitness-of-required-modules)
r_r :=
define add-required-deps
@@ -567,30 +669,42 @@ $(1): $(2)
endef
# Sets up dependencies such that whenever a host module is installed,
-# any other host modules listed in $(ALL_MODULES.$(m).REQUIRED) will also be installed
+# any other host modules listed in $(ALL_MODULES.$(m).REQUIRED_FROM_HOST) will also be installed
define add-all-host-to-host-required-modules-deps
$(foreach m,$(ALL_MODULES), \
- $(eval r := $(ALL_MODULES.$(m).REQUIRED)) \
+ $(eval r := $(ALL_MODULES.$(m).REQUIRED_FROM_HOST)) \
$(if $(r), \
$(eval r := $(call module-installed-files,$(r))) \
$(eval h_m := $(filter $(HOST_OUT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
- $(eval hc_m := $(filter $(HOST_CROSS_OUT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
$(eval h_r := $(filter $(HOST_OUT)/%, $(r))) \
- $(eval hc_r := $(filter $(HOST_CROSS_OUT)/%, $(r))) \
$(eval h_m := $(filter-out $(h_r), $(h_m))) \
- $(eval hc_m := $(filter-out $(hc_r), $(hc_m))) \
$(if $(h_m), $(eval $(call add-required-deps, $(h_m),$(h_r)))) \
- $(if $(hc_m), $(eval $(call add-required-deps, $(hc_m),$(hc_r)))) \
) \
)
endef
$(call add-all-host-to-host-required-modules-deps)
+# Sets up dependencies such that whenever a host cross module is installed,
+# any other host cross modules listed in $(ALL_MODULES.$(m).REQUIRED_FROM_HOST_CROSS) will also be installed
+define add-all-host-cross-to-host-cross-required-modules-deps
+$(foreach m,$(ALL_MODULES), \
+ $(eval r := $(ALL_MODULES.$(m).REQUIRED_FROM_HOST_CROSS)) \
+ $(if $(r), \
+ $(eval r := $(call module-installed-files,$(r))) \
+ $(eval hc_m := $(filter $(HOST_CROSS_OUT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
+ $(eval hc_r := $(filter $(HOST_CROSS_OUT)/%, $(r))) \
+ $(eval hc_m := $(filter-out $(hc_r), $(hc_m))) \
+ $(if $(hc_m), $(eval $(call add-required-deps, $(hc_m),$(hc_r)))) \
+ ) \
+)
+endef
+$(call add-all-host-cross-to-host-cross-required-modules-deps)
+
# Sets up dependencies such that whenever a target module is installed,
-# any other target modules listed in $(ALL_MODULES.$(m).REQUIRED) will also be installed
+# any other target modules listed in $(ALL_MODULES.$(m).REQUIRED_FROM_TARGET) will also be installed
define add-all-target-to-target-required-modules-deps
$(foreach m,$(ALL_MODULES), \
- $(eval r := $(ALL_MODULES.$(m).REQUIRED)) \
+ $(eval r := $(ALL_MODULES.$(m).REQUIRED_FROM_TARGET)) \
$(if $(r), \
$(eval r := $(call module-installed-files,$(r))) \
$(eval t_m := $(filter $(TARGET_OUT_ROOT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
@@ -603,10 +717,10 @@ endef
$(call add-all-target-to-target-required-modules-deps)
# Sets up dependencies such that whenever a host module is installed,
-# any target modules listed in $(ALL_MODULES.$(m).TARGET_REQUIRED) will also be installed
+# any target modules listed in $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST) will also be installed
define add-all-host-to-target-required-modules-deps
$(foreach m,$(ALL_MODULES), \
- $(eval req_mods := $(ALL_MODULES.$(m).TARGET_REQUIRED))\
+ $(eval req_mods := $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST))\
$(if $(req_mods), \
$(eval req_files := )\
$(foreach req_mod,$(req_mods), \
@@ -629,10 +743,10 @@ endef
$(call add-all-host-to-target-required-modules-deps)
# Sets up dependencies such that whenever a target module is installed,
-# any host modules listed in $(ALL_MODULES.$(m).HOST_REQUIRED) will also be installed
+# any host modules listed in $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET) will also be installed
define add-all-target-to-host-required-modules-deps
$(foreach m,$(ALL_MODULES), \
- $(eval req_mods := $(ALL_MODULES.$(m).HOST_REQUIRED))\
+ $(eval req_mods := $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET))\
$(if $(req_mods), \
$(eval req_files := )\
$(foreach req_mod,$(req_mods), \
@@ -662,7 +776,7 @@ h_r :=
hc_r :=
# Establish the dependencies on the shared libraries.
-# It also adds the shared library module names to ALL_MODULES.$(m).REQUIRED,
+# It also adds the shared library module names to ALL_MODULES.$(m).REQUIRED_FROM_(TARGET|HOST|HOST_CROSS),
# so they can be expanded to product_MODULES later.
# $(1): TARGET_ or HOST_ or HOST_CROSS_.
# $(2): non-empty for 2nd arch.
@@ -678,9 +792,42 @@ $(foreach m,$($(if $(2),$($(1)2ND_ARCH_VAR_PREFIX))$(1)DEPENDENCIES_ON_SHARED_LI
$(eval r := $(filter $($(root))/%,$(call module-installed-files,\
$(deps))))\
$(if $(filter $(1),HOST_),\
+ $(eval ALL_MODULES.$(mod).HOST_SHARED_LIBRARY_FILES := $$(ALL_MODULES.$(mod).HOST_SHARED_LIBRARY_FILES) $(word 2,$(p)) $(r))\
+ $(eval ALL_MODULES.$(mod).HOST_SHARED_LIBRARIES := $$(ALL_MODULES.$(mod).HOST_SHARED_LIBRARIES) $(deps))\
$(eval $(call add-required-host-so-deps,$(word 2,$(p)),$(r))),\
$(eval $(call add-required-deps,$(word 2,$(p)),$(r))))\
- $(eval ALL_MODULES.$(mod).REQUIRED += $(deps)))
+ $(eval ALL_MODULES.$(mod).REQUIRED_FROM_$(patsubst %_,%,$(1)) += $(deps)))
+endef
+
+# Recursively resolve host shared library dependency for a given module.
+# $(1): module name
+# Returns all dependencies of shared library.
+define get-all-shared-libs-deps
+$(if $(_all_deps_for_$(1)_set_),$(_all_deps_for_$(1)_),\
+ $(eval _all_deps_for_$(1)_ :=) \
+ $(foreach dep,$(ALL_MODULES.$(1).HOST_SHARED_LIBRARIES),\
+ $(foreach m,$(call get-all-shared-libs-deps,$(dep)),\
+ $(eval _all_deps_for_$(1)_ := $$(_all_deps_for_$(1)_) $(m))\
+ $(eval _all_deps_for_$(1)_ := $(sort $(_all_deps_for_$(1)_))))\
+ $(eval _all_deps_for_$(1)_ := $$(_all_deps_for_$(1)_) $(dep))\
+ $(eval _all_deps_for_$(1)_ := $(sort $(_all_deps_for_$(1)_) $(dep)))\
+ $(eval _all_deps_for_$(1)_set_ := true))\
+$(_all_deps_for_$(1)_))
+endef
+
+# Scan all modules in general-tests and device-tests suite and flatten the
+# shared library dependencies.
+define update-host-shared-libs-deps-for-suites
+$(foreach suite,general-tests device-tests,\
+ $(foreach m,$(COMPATIBILITY.$(suite).MODULES),\
+ $(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\
+ $(foreach dep,$(my_deps),\
+ $(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\
+ $(eval target := $(HOST_OUT_TESTCASES)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
+ $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
+ $$(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES) $(f):$(target))\
+ $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
+ $(sort $(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES)))))))
endef
$(call resolve-shared-libs-depes,TARGET_)
@@ -691,13 +838,50 @@ $(call resolve-shared-libs-depes,HOST_)
ifdef HOST_2ND_ARCH
$(call resolve-shared-libs-depes,HOST_,true)
endif
+# Update host side shared library dependencies for tests in suite device-tests and general-tests.
+# This should be called after calling resolve-shared-libs-depes for HOST_2ND_ARCH.
+$(call update-host-shared-libs-deps-for-suites)
ifdef HOST_CROSS_OS
$(call resolve-shared-libs-depes,HOST_CROSS_,,true)
endif
+# Pass the shared libraries dependencies to prebuilt ELF file check.
+define add-elf-file-check-shared-lib
+$(1): PRIVATE_SHARED_LIBRARY_FILES += $(2)
+$(1): $(2)
+endef
+
+define resolve-shared-libs-for-elf-file-check
+$(foreach m,$($(if $(2),$($(1)2ND_ARCH_VAR_PREFIX))$(1)DEPENDENCIES_ON_SHARED_LIBRARIES),\
+ $(eval p := $(subst :,$(space),$(m)))\
+ $(eval mod := $(firstword $(p)))\
+ \
+ $(eval deps := $(subst $(comma),$(space),$(lastword $(p))))\
+ $(if $(2),$(eval deps := $(addsuffix $($(1)2ND_ARCH_MODULE_SUFFIX),$(deps))))\
+ $(eval root := $(1)OUT$(if $(call streq,$(1),TARGET_),_ROOT))\
+ $(eval deps := $(filter $($(root))/%$($(1)SHLIB_SUFFIX),$(call module-built-files,$(deps))))\
+ \
+ $(eval r := $(firstword $(filter \
+ $($(if $(2),$($(1)2ND_ARCH_VAR_PREFIX))TARGET_OUT_INTERMEDIATES)/EXECUTABLES/%\
+ $($(if $(2),$($(1)2ND_ARCH_VAR_PREFIX))TARGET_OUT_INTERMEDIATES)/NATIVE_TESTS/%\
+ $($(if $(2),$($(1)2ND_ARCH_VAR_PREFIX))TARGET_OUT_INTERMEDIATES)/SHARED_LIBRARIES/%,\
+ $(call module-built-files,$(mod)))))\
+ \
+ $(if $(r),\
+ $(eval stamp := $(dir $(r))check_elf_files.timestamp)\
+ $(eval $(call add-elf-file-check-shared-lib,$(stamp),$(deps)))\
+ ))
+endef
+
+$(call resolve-shared-libs-for-elf-file-check,TARGET_)
+ifdef TARGET_2ND_ARCH
+$(call resolve-shared-libs-for-elf-file-check,TARGET_,true)
+endif
+
m :=
r :=
p :=
+stamp :=
deps :=
add-required-deps :=
@@ -878,97 +1062,408 @@ $(foreach lt,$(ALL_LINK_TYPES),\
# Of the modules defined by the component makefiles,
# determine what we actually want to build.
+
+# Expand a list of modules to the modules that they override (if any)
+# $(1): The list of modules.
+define module-overrides
+$(foreach m,$(1),$(PACKAGES.$(m).OVERRIDES) $(EXECUTABLES.$(m).OVERRIDES) $(SHARED_LIBRARIES.$(m).OVERRIDES))
+endef
+
###########################################################
## Expand a module name list with REQUIRED modules
###########################################################
# $(1): The variable name that holds the initial module name list.
# the variable will be modified to hold the expanded results.
# $(2): The initial module name list.
+# $(3): The list of overridden modules.
# Returns empty string (maybe with some whitespaces).
define expand-required-modules
-$(eval _erm_new_modules := $(sort $(filter-out $($(1)),\
- $(foreach m,$(2),$(ALL_MODULES.$(m).REQUIRED)))))\
-$(if $(_erm_new_modules),$(eval $(1) += $(_erm_new_modules))\
- $(call expand-required-modules,$(1),$(_erm_new_modules)))
+$(eval _erm_req := $(foreach m,$(2),$(ALL_MODULES.$(m).REQUIRED_FROM_TARGET))) \
+$(eval _erm_new_modules := $(sort $(filter-out $($(1)),$(_erm_req)))) \
+$(eval _erm_new_overrides := $(call module-overrides,$(_erm_new_modules))) \
+$(eval _erm_all_overrides := $(3) $(_erm_new_overrides)) \
+$(eval _erm_new_modules := $(filter-out $(_erm_all_overrides), $(_erm_new_modules))) \
+$(eval $(1) := $(filter-out $(_erm_new_overrides),$($(1)))) \
+$(eval $(1) += $(_erm_new_modules)) \
+$(if $(_erm_new_modules),\
+ $(call expand-required-modules,$(1),$(_erm_new_modules),$(_erm_all_overrides)))
endef
-ifdef FULL_BUILD
- # The base list of modules to build for this product is specified
- # by the appropriate product definition file, which was included
- # by product_config.mk.
- product_MODULES := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES)
-ifdef BOARD_VNDK_VERSION
- product_MODULES += vndk_package
+# Same as expand-required-modules above, but does not handle module overrides, as
+# we don't intend to support them on the host.
+# $(1): The variable name that holds the initial module name list.
+# the variable will be modified to hold the expanded results.
+# $(2): The initial module name list.
+# $(3): HOST or HOST_CROSS depending on whether we're expanding host or host cross modules
+# Returns empty string (maybe with some whitespaces).
+define expand-required-host-modules
+$(eval _erm_req := $(foreach m,$(2),$(ALL_MODULES.$(m).REQUIRED_FROM_$(3)))) \
+$(eval _erm_new_modules := $(sort $(filter-out $($(1)),$(_erm_req)))) \
+$(eval $(1) += $(_erm_new_modules)) \
+$(if $(_erm_new_modules),\
+ $(call expand-required-host-modules,$(1),$(_erm_new_modules),$(3)))
+endef
+
+# Transforms paths relative to PRODUCT_OUT to absolute paths.
+# $(1): list of relative paths
+# $(2): optional suffix to append to paths
+define resolve-product-relative-paths
+ $(subst $(_vendor_path_placeholder),$(TARGET_COPY_OUT_VENDOR),\
+ $(subst $(_product_path_placeholder),$(TARGET_COPY_OUT_PRODUCT),\
+ $(subst $(_product_services_path_placeholder),$(TARGET_COPY_OUT_PRODUCT_SERVICES),\
+ $(subst $(_odm_path_placeholder),$(TARGET_COPY_OUT_ODM),\
+ $(foreach p,$(1),$(call append-path,$(PRODUCT_OUT),$(p)$(2)))))))
+endef
+
+# Returns modules included automatically as a result of certain BoardConfig
+# variables being set.
+define auto-included-modules
+ $(if $(BOARD_VNDK_VERSION),vndk_package) \
+ $(if $(DEVICE_MANIFEST_FILE),device_manifest.xml) \
+ $(if $(ODM_MANIFEST_FILES),odm_manifest.xml) \
+ $(if $(ODM_MANIFEST_SKUS),$(foreach sku, $(ODM_MANIFEST_SKUS),odm_manifest_$(sku).xml)) \
+
+endef
+
+# Lists most of the files a particular product installs, including:
+# - PRODUCT_PACKAGES, and their LOCAL_REQUIRED_MODULES
+# - PRODUCT_COPY_FILES
+# The base list of modules to build for this product is specified
+# by the appropriate product definition file, which was included
+# by product_config.mk.
+# Name resolution for PRODUCT_PACKAGES:
+# foo:32 resolves to foo_32;
+# foo:64 resolves to foo;
+# foo resolves to both foo and foo_32 (if foo_32 is defined).
+#
+# Name resolution for LOCAL_REQUIRED_MODULES:
+# If a module is built for 2nd arch, its required module resolves to
+# 32-bit variant, if it exits. See the select-bitness-of-required-modules definition.
+# $(1): product makefile
+define product-installed-files
+ $(eval _mk := $(strip $(1))) \
+ $(eval _pif_modules := \
+ $(PRODUCTS.$(_mk).PRODUCT_PACKAGES) \
+ $(if $(filter eng,$(tags_to_install)),$(PRODUCTS.$(_mk).PRODUCT_PACKAGES_ENG)) \
+ $(if $(filter debug,$(tags_to_install)),$(PRODUCTS.$(_mk).PRODUCT_PACKAGES_DEBUG)) \
+ $(if $(filter tests,$(tags_to_install)),$(PRODUCTS.$(_mk).PRODUCT_PACKAGES_TESTS)) \
+ $(if $(filter asan,$(tags_to_install)),$(PRODUCTS.$(_mk).PRODUCT_PACKAGES_DEBUG_ASAN)) \
+ $(if $(filter java_coverage,$(tags_to_install)),$(PRODUCTS.$(_mk).PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
+ $(call auto-included-modules) \
+ ) \
+ $(eval ### Filter out the overridden packages and executables before doing expansion) \
+ $(eval _pif_overrides := $(call module-overrides,$(_pif_modules))) \
+ $(eval _pif_modules := $(filter-out $(_pif_overrides), $(_pif_modules))) \
+ $(eval ### Resolve the :32 :64 module name) \
+ $(eval _pif_modules_32 := $(patsubst %:32,%,$(filter %:32, $(_pif_modules)))) \
+ $(eval _pif_modules_64 := $(patsubst %:64,%,$(filter %:64, $(_pif_modules)))) \
+ $(eval _pif_modules_rest := $(filter-out %:32 %:64,$(_pif_modules))) \
+ $(eval ### Note for 32-bit product, 32 and 64 will be added as their original module names.) \
+ $(eval _pif_modules := $(call get-32-bit-modules-if-we-can, $(_pif_modules_32))) \
+ $(eval _pif_modules += $(_pif_modules_64)) \
+ $(eval ### For the rest we add both) \
+ $(eval _pif_modules += $(call get-32-bit-modules, $(_pif_modules_rest))) \
+ $(eval _pif_modules += $(_pif_modules_rest)) \
+ $(call expand-required-modules,_pif_modules,$(_pif_modules),$(_pif_overrides)) \
+ $(filter-out $(HOST_OUT_ROOT)/%,$(call module-installed-files, $(_pif_modules))) \
+ $(call resolve-product-relative-paths,\
+ $(foreach cf,$(PRODUCTS.$(_mk).PRODUCT_COPY_FILES),$(call word-colon,2,$(cf))))
+endef
+
+# Similar to product-installed-files above, but handles PRODUCT_HOST_PACKAGES instead
+# This does support the :32 / :64 syntax, but does not support module overrides.
+define host-installed-files
+ $(eval _hif_modules := $(PRODUCTS.$(strip $(1)).PRODUCT_HOST_PACKAGES)) \
+ $(eval ### Resolve the :32 :64 module name) \
+ $(eval _hif_modules_32 := $(patsubst %:32,%,$(filter %:32, $(_hif_modules)))) \
+ $(eval _hif_modules_64 := $(patsubst %:64,%,$(filter %:64, $(_hif_modules)))) \
+ $(eval _hif_modules_rest := $(filter-out %:32 %:64,$(_hif_modules))) \
+ $(eval _hif_modules := $(call get-host-32-bit-modules-if-we-can, $(_hif_modules_32))) \
+ $(eval _hif_modules += $(_hif_modules_64)) \
+ $(eval ### For the rest we add both) \
+ $(eval _hif_modules += $(call get-host-32-bit-modules, $(_hif_modules_rest))) \
+ $(eval _hif_modules += $(_hif_modules_rest)) \
+ $(eval ### Split host vs host cross modules) \
+ $(eval _hcif_modules := $(filter host_cross_%,$(_hif_modules))) \
+ $(eval _hif_modules := $(filter-out host_cross_%,$(_hif_modules))) \
+ $(call expand-required-host-modules,_hif_modules,$(_hif_modules),HOST) \
+ $(call expand-required-host-modules,_hcif_modules,$(_hcif_modules),HOST_CROSS) \
+ $(filter $(HOST_OUT)/%,$(call module-installed-files, $(_hif_modules))) \
+ $(filter $(HOST_CROSS_OUT)/%,$(call module-installed-files, $(_hcif_modules)))
+endef
+
+# Fails the build if the given list is non-empty, and prints it entries (stripping PRODUCT_OUT).
+# $(1): list of files to print
+# $(2): heading to print on failure
+define maybe-print-list-and-error
+$(if $(strip $(1)), \
+ $(warning $(2)) \
+ $(info Offending entries:) \
+ $(foreach e,$(sort $(1)),$(info $(patsubst $(PRODUCT_OUT)/%,%,$(e)))) \
+ $(error Build failed) \
+)
+endef
+
+# Check that libraries that should only be in APEXes don't end up in the system
+# image. For the Runtime APEX this complements the checks in
+# art/build/apex/art_apex_test.py.
+# TODO(b/128708192): Implement this restriction in Soong instead.
+
+# Runtime APEX libraries
+APEX_MODULE_LIBS := \
+ libadbconnection.so \
+ libadbconnectiond.so \
+ libandroidicu.so \
+ libandroidio.so \
+ libart-compiler.so \
+ libart-dexlayout.so \
+ libart-disassembler.so \
+ libart.so \
+ libartbase.so \
+ libartbased.so \
+ libartd-compiler.so \
+ libartd-dexlayout.so \
+ libartd.so \
+ libartpalette.so \
+ libc.so \
+ libdexfile.so \
+ libdexfile_external.so \
+ libdexfiled.so \
+ libdexfiled_external.so \
+ libdl.so \
+ libdt_fd_forward.so \
+ libdt_socket.so \
+ libicui18n.so \
+ libicuuc.so \
+ libjavacore.so \
+ libjdwp.so \
+ libm.so \
+ libnativebridge.so \
+ libnativehelper.so \
+ libnativeloader.so \
+ libnpt.so \
+ libopenjdk.so \
+ libopenjdkjvm.so \
+ libopenjdkjvmd.so \
+ libopenjdkjvmti.so \
+ libopenjdkjvmtid.so \
+ libpac.so \
+ libprofile.so \
+ libprofiled.so \
+ libsigchain.so \
+
+# Conscrypt APEX libraries
+APEX_MODULE_LIBS += \
+ libjavacrypto.so \
+
+# An option to disable the check below, for local use since some build targets
+# still may create these libraries in /system (b/129006418).
+DISABLE_APEX_LIBS_ABSENCE_CHECK ?=
+
+# Bionic should not be in /system, except for the bootstrap instance.
+APEX_LIBS_ABSENCE_CHECK_EXCLUDE := lib/bootstrap lib64/bootstrap
+
+# Exclude lib/arm and lib/arm64 which contain the native bridge proxy libs. They
+# are compiled for the guest architecture and used with an entirely different
+# linker config. The native libs are then linked to as usual via exported
+# interfaces, so the proxy libs do not violate the interface boundaries on the
+# native architecture.
+# TODO(b/130630776): Introduce a make variable for the appropriate directory
+# when native bridge is active.
+APEX_LIBS_ABSENCE_CHECK_EXCLUDE += lib/arm lib/arm64
+
+# Exclude vndk-* subdirectories which contain prebuilts from older releases.
+APEX_LIBS_ABSENCE_CHECK_EXCLUDE += lib/vndk-% lib64/vndk-%
+
+ifdef DISABLE_APEX_LIBS_ABSENCE_CHECK
+ check-apex-libs-absence :=
+ check-apex-libs-absence-on-disk :=
+else
+ # If the check below fails, some library has ended up in system/lib or
+ # system/lib64 that is intended to only go into some APEX package. The likely
+ # cause is that a library or binary in /system has grown a dependency that
+ # directly or indirectly pulls in the prohibited library.
+ #
+ # To resolve this, look for the APEX package that the library belong to -
+ # search for it in 'native_shared_lib' properties in 'apex' build modules (see
+ # art/build/apex/Android.bp for an example). Then check if there is an
+ # exported library in that APEX package that should be used instead, i.e. one
+ # listed in its 'native_shared_lib' property for which the corresponding
+ # 'cc_library' module has a 'stubs' clause (like libdexfile_external in
+ # art/libdexfile/Android.bp).
+ #
+ # If you cannot find an APEX exported library that fits your needs, or you
+ # think that the library you want to depend on should be allowed in /system,
+ # then please contact the owners of the APEX package containing the library.
+ #
+ # If you get this error for a library that is exported in an APEX, then the
+ # APEX might be misconfigured or something is wrong in the build system.
+ # Please reach out to the APEX package owners and/or soong-team@, or
+ # android-building@googlegroups.com externally.
+ define check-apex-libs-absence
+ $(call maybe-print-list-and-error, \
+ $(filter $(foreach lib,$(APEX_MODULE_LIBS),%/$(lib)), \
+ $(filter-out $(foreach dir,$(APEX_LIBS_ABSENCE_CHECK_EXCLUDE), \
+ $(TARGET_OUT)/$(if $(findstring %,$(dir)),$(dir),$(dir)/%)), \
+ $(filter $(TARGET_OUT)/lib/% $(TARGET_OUT)/lib64/%,$(1)))), \
+ APEX libraries found in system image (see comment for check-apex-libs-absence in \
+ build/make/core/main.mk for details))
+ endef
+
+ # TODO(b/129006418): The check above catches libraries through product
+ # dependencies visible to make, but as long as they have install rules in
+ # /system they may still be created there through other make targets. To catch
+ # that we also do a check on disk just before the system image is built.
+ define check-apex-libs-absence-on-disk
+ $(hide) ( \
+ cd $(TARGET_OUT) && \
+ findres=$$(find lib* \
+ $(foreach dir,$(APEX_LIBS_ABSENCE_CHECK_EXCLUDE),-path "$(subst %,*,$(dir))" -prune -o) \
+ -type f \( -false $(foreach lib,$(APEX_MODULE_LIBS),-o -name $(lib)) \) \
+ -print) && \
+ if [ -n "$$findres" ]; then \
+ echo "APEX libraries found in system image (see comment for check-apex-libs-absence" 1>&2; \
+ echo "in build/make/core/main.mk for details):" 1>&2; \
+ echo "$$findres" | sort 1>&2; \
+ false; \
+ fi; \
+ )
+ endef
endif
- # Filter out the overridden packages before doing expansion
- product_MODULES := $(filter-out $(foreach p, $(product_MODULES), \
- $(PACKAGES.$(p).OVERRIDES)), $(product_MODULES))
- # Filter out executables as well
- product_MODULES := $(filter-out $(foreach m, $(product_MODULES), \
- $(EXECUTABLES.$(m).OVERRIDES)), $(product_MODULES))
-
- # Resolve the :32 :64 module name
- modules_32 := $(patsubst %:32,%,$(filter %:32, $(product_MODULES)))
- modules_64 := $(patsubst %:64,%,$(filter %:64, $(product_MODULES)))
- modules_rest := $(filter-out %:32 %:64,$(product_MODULES))
- # Note for 32-bit product, $(modules_32) and $(modules_64) will be
- # added as their original module names.
- product_MODULES := $(call get-32-bit-modules-if-we-can, $(modules_32))
- product_MODULES += $(modules_64)
- # For the rest we add both
- product_MODULES += $(call get-32-bit-modules, $(modules_rest))
- product_MODULES += $(modules_rest)
-
- $(call expand-required-modules,product_MODULES,$(product_MODULES))
-
- product_FILES := $(call module-installed-files, $(product_MODULES))
- ifeq (0,1)
- $(info product_FILES for $(TARGET_DEVICE) ($(INTERNAL_PRODUCT)):)
- $(foreach p,$(product_FILES),$(info : $(p)))
- $(error done)
+
+ifdef FULL_BUILD
+ ifneq (true,$(ALLOW_MISSING_DEPENDENCIES))
+ # Check to ensure that all modules in PRODUCT_PACKAGES exist (opt in per product)
+ ifeq (true,$(PRODUCT_ENFORCE_PACKAGES_EXIST))
+ _whitelist := $(PRODUCT_ENFORCE_PACKAGES_EXIST_WHITELIST)
+ _modules := $(PRODUCT_PACKAGES)
+ # Sanity check all modules in PRODUCT_PACKAGES exist. We check for the
+ # existence if either <module> or the <module>_32 variant.
+ _nonexistant_modules := $(filter-out $(ALL_MODULES),$(_modules))
+ _nonexistant_modules := $(foreach m,$(_nonexistant_modules),\
+ $(if $(call get-32-bit-modules,$(m)),,$(m)))
+ $(call maybe-print-list-and-error,$(filter-out $(_whitelist),$(_nonexistant_modules)),\
+ $(INTERNAL_PRODUCT) includes non-existant modules in PRODUCT_PACKAGES)
+ $(call maybe-print-list-and-error,$(filter-out $(_nonexistant_modules),$(_whitelist)),\
+ $(INTERNAL_PRODUCT) includes redundant whitelist entries for nonexistant PRODUCT_PACKAGES)
+ endif
+
+ # Check to ensure that all modules in PRODUCT_HOST_PACKAGES exist
+ #
+ # Many host modules are Linux-only, so skip this check on Mac. If we ever have Mac-only modules,
+ # maybe it would make sense to have PRODUCT_HOST_PACKAGES_LINUX/_DARWIN?
+ ifneq ($(HOST_OS),darwin)
+ _modules := $(PRODUCT_HOST_PACKAGES)
+ _nonexistant_modules := $(foreach m,$(_modules),\
+ $(if $(ALL_MODULES.$(m).REQUIRED_FROM_HOST)$(filter $(HOST_OUT_ROOT)/%,$(ALL_MODULES.$(m).INSTALLED)),,$(m)))
+ $(call maybe-print-list-and-error,$(_nonexistant_modules),\
+ $(INTERNAL_PRODUCT) includes non-existant modules in PRODUCT_HOST_PACKAGES)
+ endif
+ endif
+
+ # Some modules produce only host installed files when building with TARGET_BUILD_APPS
+ ifeq ($(TARGET_BUILD_APPS),)
+ _modules := $(foreach m,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES) \
+ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_DEBUG) \
+ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_DEBUG_ASAN) \
+ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_ENG) \
+ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_TESTS),\
+ $(if $(ALL_MODULES.$(m).INSTALLED),\
+ $(if $(filter-out $(HOST_OUT_ROOT)/%,$(ALL_MODULES.$(m).INSTALLED)),,\
+ $(m))))
+ $(call maybe-print-list-and-error,$(sort $(_modules)),\
+ Host modules should be in PRODUCT_HOST_PACKAGES$(comma) not PRODUCT_PACKAGES)
+ endif
+
+ product_host_FILES := $(call host-installed-files,$(INTERNAL_PRODUCT))
+ product_target_FILES := $(call product-installed-files, $(INTERNAL_PRODUCT))
+ # WARNING: The product_MODULES variable is depended on by external files.
+ product_MODULES := $(_pif_modules)
+
+ # Verify the artifact path requirements made by included products.
+ is_asan := $(if $(filter address,$(SANITIZE_TARGET)),true)
+ ifneq (true,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS)))
+ # Fakes don't get installed, host files are irrelevant, and NDK stubs aren't installed to device.
+ static_whitelist_patterns := $(TARGET_OUT_FAKE)/% $(HOST_OUT)/% $(SOONG_OUT_DIR)/ndk/%
+ # RROs become REQUIRED by the source module, but are always placed on the vendor partition.
+ static_whitelist_patterns += %__auto_generated_rro_product.apk
+ static_whitelist_patterns += %__auto_generated_rro_vendor.apk
+ # Auto-included targets are not considered
+ static_whitelist_patterns += $(call module-installed-files,$(call auto-included-modules))
+ # $(PRODUCT_OUT)/apex is where shared libraries in APEXes get installed.
+ # The path can be considered as a fake path, as the shared libraries
+ # are installed there just to have symbols files for them under
+ # $(PRODUCT_OUT)/symbols/apex for debugging purpose. The /apex directory
+ # is never compiled into a filesystem image.
+ static_whitelist_patterns += $(PRODUCT_OUT)/apex/%
+ ifeq (true,$(BOARD_USES_SYSTEM_OTHER_ODEX))
+ # Allow system_other odex space optimization.
+ static_whitelist_patterns += \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.odex \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.vdex \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.art
endif
+
+CERTIFICATE_VIOLATION_MODULES_FILENAME := $(PRODUCT_OUT)/certificate_violation_modules.txt
+$(CERTIFICATE_VIOLATION_MODULES_FILENAME):
+ rm -f $@
+ $(foreach m,$(sort $(CERTIFICATE_VIOLATION_MODULES)), echo $(m) >> $@;)
+$(call dist-for-goals,droidcore,$(CERTIFICATE_VIOLATION_MODULES_FILENAME))
+
+ all_offending_files :=
+ $(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
+ $(eval requirements := $(PRODUCTS.$(makefile).ARTIFACT_PATH_REQUIREMENTS)) \
+ $(eval ### Verify that the product only produces files inside its path requirements.) \
+ $(eval whitelist := $(PRODUCTS.$(makefile).ARTIFACT_PATH_WHITELIST)) \
+ $(eval path_patterns := $(call resolve-product-relative-paths,$(requirements),%)) \
+ $(eval whitelist_patterns := $(call resolve-product-relative-paths,$(whitelist))) \
+ $(eval files := $(call product-installed-files, $(makefile))) \
+ $(eval offending_files := $(filter-out $(path_patterns) $(whitelist_patterns) $(static_whitelist_patterns),$(files))) \
+ $(call maybe-print-list-and-error,$(offending_files),$(makefile) produces files outside its artifact path requirement.) \
+ $(eval unused_whitelist := $(filter-out $(files),$(whitelist_patterns))) \
+ $(call maybe-print-list-and-error,$(unused_whitelist),$(makefile) includes redundant whitelist entries in its artifact path requirement.) \
+ $(eval ### Optionally verify that nothing else produces files inside this artifact path requirement.) \
+ $(eval extra_files := $(filter-out $(files) $(HOST_OUT)/%,$(product_target_FILES))) \
+ $(eval files_in_requirement := $(filter $(path_patterns),$(extra_files))) \
+ $(eval all_offending_files += $(files_in_requirement)) \
+ $(eval whitelist := $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST)) \
+ $(eval whitelist_patterns := $(call resolve-product-relative-paths,$(whitelist))) \
+ $(eval offending_files := $(filter-out $(whitelist_patterns),$(files_in_requirement))) \
+ $(eval enforcement := $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS)) \
+ $(if $(enforcement),\
+ $(call maybe-print-list-and-error,$(offending_files),\
+ $(INTERNAL_PRODUCT) produces files inside $(makefile)s artifact path requirement. \
+ $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT)) \
+ $(eval unused_whitelist := $(if $(filter true strict,$(enforcement)),\
+ $(foreach p,$(whitelist_patterns),$(if $(filter $(p),$(extra_files)),,$(p))))) \
+ $(call maybe-print-list-and-error,$(unused_whitelist),$(INTERNAL_PRODUCT) includes redundant artifact path requirement whitelist entries.) \
+ ) \
+ )
+$(PRODUCT_OUT)/offending_artifacts.txt:
+ rm -f $@
+ $(foreach f,$(sort $(all_offending_files)),echo $(f) >> $@;)
+ endif
+
+ $(call check-apex-libs-absence,$(product_target_FILES))
else
# We're not doing a full build, and are probably only including
# a subset of the module makefiles. Don't try to build any modules
# requested by the product, because we probably won't have rules
# to build them.
- product_FILES :=
+ product_target_FILES :=
+ product_host_FILES :=
endif
-eng_MODULES := $(sort \
- $(call get-tagged-modules,eng) \
- $(call module-installed-files, $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_ENG)) \
- )
-debug_MODULES := $(sort \
- $(call get-tagged-modules,debug) \
- $(call module-installed-files, $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_DEBUG)) \
- )
-tests_MODULES := $(sort \
- $(call get-tagged-modules,tests) \
- $(call module-installed-files, $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_TESTS)) \
- )
-
# TODO: Remove the 3 places in the tree that use ALL_DEFAULT_INSTALLED_MODULES
# and get rid of it from this list.
modules_to_install := $(sort \
$(ALL_DEFAULT_INSTALLED_MODULES) \
- $(product_FILES) \
- $(foreach tag,$(tags_to_install),$($(tag)_MODULES)) \
+ $(product_target_FILES) \
+ $(product_host_FILES) \
+ $(call get-tagged-modules,$(tags_to_install)) \
$(CUSTOM_MODULES) \
)
-# Some packages may override others using LOCAL_OVERRIDES_PACKAGES.
-# Filter out (do not install) any overridden packages.
-overridden_packages := $(call get-package-overrides,$(modules_to_install))
-ifdef overridden_packages
-# old_modules_to_install := $(modules_to_install)
- modules_to_install := \
- $(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk %/$(p).odex %/$(p).vdex), \
- $(modules_to_install))
-endif
-#$(error filtered out
-# $(filter-out $(modules_to_install),$(old_modules_to_install)))
-
# Don't include any GNU General Public License shared objects or static
# libraries in SDK images. GPL executables (not static/dynamic libraries)
# are okay if they don't link against any closed source libraries (directly
@@ -994,19 +1489,19 @@ ifdef is_sdk_build
# Ensure every module listed in PRODUCT_PACKAGES* gets something installed
# TODO: Should we do this for all builds and not just the sdk?
dangling_modules :=
- $(foreach m, $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES), \
+ $(foreach m, $(PRODUCT_PACKAGES), \
$(if $(strip $(ALL_MODULES.$(m).INSTALLED) $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).INSTALLED)),,\
$(eval dangling_modules += $(m))))
ifneq ($(dangling_modules),)
$(warning: Modules '$(dangling_modules)' in PRODUCT_PACKAGES have nothing to install!)
endif
- $(foreach m, $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_DEBUG), \
+ $(foreach m, $(PRODUCT_PACKAGES_DEBUG), \
$(if $(strip $(ALL_MODULES.$(m).INSTALLED)),,\
$(warning $(ALL_MODULES.$(m).MAKEFILE): Module '$(m)' in PRODUCT_PACKAGES_DEBUG has nothing to install!)))
- $(foreach m, $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_ENG), \
+ $(foreach m, $(PRODUCT_PACKAGES_ENG), \
$(if $(strip $(ALL_MODULES.$(m).INSTALLED)),,\
$(warning $(ALL_MODULES.$(m).MAKEFILE): Module '$(m)' in PRODUCT_PACKAGES_ENG has nothing to install!)))
- $(foreach m, $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES_TESTS), \
+ $(foreach m, $(PRODUCT_PACKAGES_TESTS), \
$(if $(strip $(ALL_MODULES.$(m).INSTALLED)),,\
$(warning $(ALL_MODULES.$(m).MAKEFILE): Module '$(m)' in PRODUCT_PACKAGES_TESTS has nothing to install!)))
endif
@@ -1031,6 +1526,9 @@ ifneq ($(filter all,$(MAKECMDGOALS)),)
modules_to_check += $(foreach m,$(ALL_MODULES),$(ALL_MODULES.$(m).BUILT))
endif
+# Build docs as part of checkbuild to catch more breakages.
+module_to_check += $(ALL_DOCS)
+
# for easier debugging
modules_to_check := $(sort $(modules_to_check))
#$(error modules_to_check $(modules_to_check))
@@ -1064,6 +1562,9 @@ endif
.PHONY: ramdisk
ramdisk: $(INSTALLED_RAMDISK_TARGET)
+.PHONY: ramdisk_debug
+ramdisk_debug: $(INSTALLED_DEBUG_RAMDISK_TARGET)
+
.PHONY: systemtarball
systemtarball: $(INSTALLED_SYSTEMTARBALL_TARGET)
@@ -1092,12 +1593,24 @@ vendorimage: $(INSTALLED_VENDORIMAGE_TARGET)
.PHONY: productimage
productimage: $(INSTALLED_PRODUCTIMAGE_TARGET)
+.PHONY: productservicesimage
+productservicesimage: $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET)
+
+.PHONY: odmimage
+odmimage: $(INSTALLED_ODMIMAGE_TARGET)
+
.PHONY: systemotherimage
systemotherimage: $(INSTALLED_SYSTEMOTHERIMAGE_TARGET)
+.PHONY: superimage_empty
+superimage_empty: $(INSTALLED_SUPERIMAGE_EMPTY_TARGET)
+
.PHONY: bootimage
bootimage: $(INSTALLED_BOOTIMAGE_TARGET)
+.PHONY: bootimage_debug
+bootimage_debug: $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
+
.PHONY: vbmetaimage
vbmetaimage: $(INSTALLED_VBMETAIMAGE_TARGET)
@@ -1106,26 +1619,50 @@ auxiliary: $(INSTALLED_AUX_TARGETS)
# Build files and then package it into the rom formats
.PHONY: droidcore
-droidcore: files \
- systemimage \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_RECOVERYIMAGE_TARGET) \
- $(INSTALLED_VBMETAIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_CACHEIMAGE_TARGET) \
- $(INSTALLED_BPTIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \
- $(INSTALLED_FILES_FILE) \
- $(INSTALLED_FILES_FILE_VENDOR) \
- $(INSTALLED_FILES_FILE_PRODUCT) \
- $(INSTALLED_FILES_FILE_SYSTEMOTHER) \
- soong_docs
+droidcore: $(filter $(HOST_OUT_ROOT)/%,$(modules_to_install)) \
+ $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INSTALLED_DEBUG_RAMDISK_TARGET) \
+ $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
+ $(INSTALLED_RECOVERYIMAGE_TARGET) \
+ $(INSTALLED_VBMETAIMAGE_TARGET) \
+ $(INSTALLED_USERDATAIMAGE_TARGET) \
+ $(INSTALLED_CACHEIMAGE_TARGET) \
+ $(INSTALLED_BPTIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_ODMIMAGE_TARGET) \
+ $(INSTALLED_SUPERIMAGE_EMPTY_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \
+ $(INSTALLED_FILES_FILE) \
+ $(INSTALLED_FILES_JSON) \
+ $(INSTALLED_FILES_FILE_VENDOR) \
+ $(INSTALLED_FILES_JSON_VENDOR) \
+ $(INSTALLED_FILES_FILE_ODM) \
+ $(INSTALLED_FILES_JSON_ODM) \
+ $(INSTALLED_FILES_FILE_PRODUCT) \
+ $(INSTALLED_FILES_JSON_PRODUCT) \
+ $(INSTALLED_FILES_FILE_PRODUCT_SERVICES) \
+ $(INSTALLED_FILES_JSON_PRODUCT_SERVICES) \
+ $(INSTALLED_FILES_FILE_SYSTEMOTHER) \
+ $(INSTALLED_FILES_JSON_SYSTEMOTHER) \
+ $(INSTALLED_FILES_FILE_RAMDISK) \
+ $(INSTALLED_FILES_JSON_RAMDISK) \
+ $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_FILE_ROOT) \
+ $(INSTALLED_FILES_JSON_ROOT) \
+ $(INSTALLED_FILES_FILE_RECOVERY) \
+ $(INSTALLED_FILES_JSON_RECOVERY) \
+ $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
+ auxiliary \
+ soong_docs
# dist_files only for putting your library into the dist directory with a full build.
.PHONY: dist_files
+.PHONY: apps_only
ifneq ($(TARGET_BUILD_APPS),)
# If this build is just for apps, only build apps and not the full system by default.
@@ -1140,6 +1677,12 @@ ifneq ($(TARGET_BUILD_APPS),)
# Dist the installed files if they exist.
apps_only_installed_files := $(foreach m,$(unbundled_build_modules),$(ALL_MODULES.$(m).INSTALLED))
$(call dist-for-goals,apps_only, $(apps_only_installed_files))
+
+ # Dist the bundle files if they exist.
+ apps_only_bundle_files := $(foreach m,$(unbundled_build_modules),\
+ $(if $(ALL_MODULES.$(m).BUNDLE),$(ALL_MODULES.$(m).BUNDLE):$(m)-base.zip))
+ $(call dist-for-goals,apps_only, $(apps_only_bundle_files))
+
# For uninstallable modules such as static Java library, we have to dist the built file,
# as <module_name>.<suffix>
apps_only_dist_built_files := $(foreach m,$(unbundled_build_modules),$(if $(ALL_MODULES.$(m).INSTALLED),,\
@@ -1162,7 +1705,6 @@ ifneq ($(TARGET_BUILD_APPS),)
$(COVERAGE_ZIP) : $(apps_only_installed_files)
$(call dist-for-goals,apps_only, $(COVERAGE_ZIP))
-.PHONY: apps_only
apps_only: $(unbundled_build_modules)
droid_targets: apps_only
@@ -1180,13 +1722,26 @@ else # TARGET_BUILD_APPS
$(call dist-for-goals, droidcore, \
$(INTERNAL_UPDATE_PACKAGE_TARGET) \
$(INTERNAL_OTA_PACKAGE_TARGET) \
+ $(INTERNAL_OTA_METADATA) \
+ $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET) \
$(BUILT_OTATOOLS_PACKAGE) \
$(SYMBOLS_ZIP) \
$(COVERAGE_ZIP) \
+ $(APPCOMPAT_ZIP) \
$(INSTALLED_FILES_FILE) \
+ $(INSTALLED_FILES_JSON) \
$(INSTALLED_FILES_FILE_VENDOR) \
+ $(INSTALLED_FILES_JSON_VENDOR) \
+ $(INSTALLED_FILES_FILE_ODM) \
+ $(INSTALLED_FILES_JSON_ODM) \
$(INSTALLED_FILES_FILE_PRODUCT) \
+ $(INSTALLED_FILES_JSON_PRODUCT) \
+ $(INSTALLED_FILES_FILE_PRODUCT_SERVICES) \
+ $(INSTALLED_FILES_JSON_PRODUCT_SERVICES) \
$(INSTALLED_FILES_FILE_SYSTEMOTHER) \
+ $(INSTALLED_FILES_JSON_SYSTEMOTHER) \
+ $(INSTALLED_FILES_FILE_RECOVERY) \
+ $(INSTALLED_FILES_JSON_RECOVERY) \
$(INSTALLED_BUILD_PROP_TARGET) \
$(BUILT_TARGET_FILES_PACKAGE) \
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
@@ -1207,11 +1762,41 @@ else # TARGET_BUILD_APPS
endif
endif
+ $(call dist-for-goals, droidcore, \
+ $(INSTALLED_FILES_FILE_ROOT) \
+ $(INSTALLED_FILES_JSON_ROOT) \
+ )
+
+ ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+ $(call dist-for-goals, droidcore, \
+ $(INSTALLED_FILES_FILE_RAMDISK) \
+ $(INSTALLED_FILES_JSON_RAMDISK) \
+ $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
+ $(INSTALLED_DEBUG_RAMDISK_TARGET) \
+ $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
+ )
+ endif
+
ifeq ($(EMMA_INSTRUMENT),true)
- $(JACOCO_REPORT_CLASSES_ALL) : $(INSTALLED_SYSTEMIMAGE)
+ $(JACOCO_REPORT_CLASSES_ALL) : $(INSTALLED_SYSTEMIMAGE_TARGET)
$(call dist-for-goals, dist_files, $(JACOCO_REPORT_CLASSES_ALL))
endif
+ # Put XML formatted API files in the dist dir.
+ $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml: $(call java-lib-header-files,android_stubs_current) $(APICHECK)
+ $(TARGET_OUT_COMMON_INTERMEDIATES)/system-api.xml: $(call java-lib-header-files,android_system_stubs_current) $(APICHECK)
+ $(TARGET_OUT_COMMON_INTERMEDIATES)/test-api.xml: $(call java-lib-header-files,android_test_stubs_current) $(APICHECK)
+
+ api_xmls := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/,api.xml system-api.xml test-api.xml)
+ $(api_xmls):
+ $(hide) echo "Converting API file to XML: $@"
+ $(hide) mkdir -p $(dir $@)
+ $(hide) $(APICHECK_COMMAND) --input-api-jar $< --api-xml $@
+
+ $(call dist-for-goals, dist_files, $(api_xmls))
+ api_xmls :=
+
# Building a full system-- the default is to build droidcore
droid_targets: droidcore dist_files
@@ -1220,20 +1805,22 @@ endif # TARGET_BUILD_APPS
.PHONY: docs
docs: $(ALL_DOCS)
-.PHONY: sdk
+.PHONY: sdk win_sdk winsdk-tools sdk_addon
ALL_SDK_TARGETS := $(INTERNAL_SDK_TARGET)
sdk: $(ALL_SDK_TARGETS)
$(call dist-for-goals,sdk win_sdk, \
$(ALL_SDK_TARGETS) \
$(SYMBOLS_ZIP) \
$(COVERAGE_ZIP) \
+ $(APPCOMPAT_ZIP) \
$(INSTALLED_BUILD_PROP_TARGET) \
)
# umbrella targets to assit engineers in verifying builds
.PHONY: java native target host java-host java-target native-host native-target \
java-host-tests java-target-tests native-host-tests native-target-tests \
- java-tests native-tests host-tests target-tests tests java-dex
+ java-tests native-tests host-tests target-tests tests java-dex \
+ native-host-cross
# some synonyms
.PHONY: host-java target-java host-native target-native \
target-java-tests target-native-tests
@@ -1267,15 +1854,31 @@ endif # samplecode in $(MAKECMDGOALS)
.PHONY: findbugs
findbugs: $(INTERNAL_FINDBUGS_HTML_TARGET) $(INTERNAL_FINDBUGS_XML_TARGET)
+LSDUMP_PATHS_FILE := $(PRODUCT_OUT)/lsdump_paths.txt
+
.PHONY: findlsdumps
-findlsdumps: $(FIND_LSDUMPS_FILE)
+findlsdumps: $(LSDUMP_PATHS_FILE) $(LSDUMP_PATHS)
+
+$(LSDUMP_PATHS_FILE): PRIVATE_LSDUMP_PATHS := $(LSDUMP_PATHS)
+$(LSDUMP_PATHS_FILE):
+ @echo "Generate $@"
+ @rm -rf $@ && echo "$(PRIVATE_LSDUMP_PATHS)" | sed -e 's/ /\n/g' > $@
+
+.PHONY: check-elf-files
+check-elf-files:
#xxx scrape this from ALL_MODULE_NAME_TAGS
.PHONY: modules
modules:
@echo "Available sub-modules:"
@echo "$(call module-names-for-tag-list,$(ALL_MODULE_TAGS))" | \
- tr -s ' ' '\n' | sort -u | $(COLUMN)
+ tr -s ' ' '\n' | sort -u
+
+.PHONY: dump-files
+dump-files:
+ $(info product_target_FILES for $(TARGET_DEVICE) ($(INTERNAL_PRODUCT)):)
+ $(foreach p,$(sort $(product_target_FILES)),$(info : $(p)))
+ @echo Successfully dumped product file list
.PHONY: nothing
nothing:
@@ -1288,4 +1891,8 @@ tidy_only:
ndk: $(SOONG_OUT_DIR)/ndk.timestamp
.PHONY: ndk
+$(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
+
+$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
+
endif # KATI
diff --git a/core/math.mk b/core/math.mk
deleted file mode 100644
index 44e03ce826..0000000000
--- a/core/math.mk
+++ /dev/null
@@ -1,152 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-###########################################################
-# Basic math functions for positive integers <= 100
-#
-# (SDK versions for example)
-###########################################################
-__MATH_NUMBERS := 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 \
- 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 \
- 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 \
- 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 \
- 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
-
-# Returns true if $(1) is a positive integer <= 100, otherwise returns nothing.
-define math_is_number
-$(strip \
- $(if $(1),,$(error Argument missing)) \
- $(if $(word 2,$(1)),$(error Multiple words in a single argument: $(1))) \
- $(if $(filter $(1),$(__MATH_NUMBERS)),true))
-endef
-
-#$(warning true == $(call math_is_number,2))
-#$(warning == $(call math_is_number,foo))
-#$(call math_is_number,1 2)
-#$(call math_is_number,no 2)
-
-define _math_check_valid
-$(if $(call math_is_number,$(1)),,$(error Only positive integers <= 100 are supported (not $(1))))
-endef
-
-# return a list containing integers ranging from [$(1),$(2)]
-define int_range_list
-$(call _math_check_valid,$(1))$(call _math_check_valid,$(2))$(wordlist $(1),$(2),$(__MATH_NUMBERS))
-endef
-
-#$(call _math_check_valid,0)
-#$(call _math_check_valid,1)
-#$(call _math_check_valid,100)
-#$(call _math_check_valid,101)
-#$(call _math_check_valid,)
-#$(call _math_check_valid,1 2)
-
-# Returns the greater of $1 or $2.
-# If $1 or $2 is not a positive integer <= 100, then an error is generated.
-define math_max
-$(strip $(call _math_check_valid,$(1)) $(call _math_check_valid,$(2)) \
- $(lastword $(filter $(1) $(2),$(__MATH_NUMBERS))))
-endef
-
-#$(call math_max)
-#$(call math_max,1)
-#$(call math_max,1 2,3)
-#$(warning 1 == $(call math_max,1,1))
-#$(warning 42 == $(call math_max,5,42))
-#$(warning 42 == $(call math_max,42,5))
-
-define math_gt_or_eq
-$(if $(filter $(1),$(call math_max,$(1),$(2))),true)
-endef
-
-define math_lt
-$(if $(call math_gt_or_eq,$(1),$(2)),,true)
-endef
-
-#$(warning $(call math_gt_or_eq, 2, 1))
-#$(warning $(call math_gt_or_eq, 1, 1))
-#$(warning $(if $(call math_gt_or_eq, 1, 2),false,true))
-
-# $1 is the variable name to increment
-define inc_and_print
-$(strip $(eval $(1) := $($(1)) .)$(words $($(1))))
-endef
-
-# Returns the words in $2 that are numbers and are less than $1
-define numbers_less_than
-$(strip \
- $(foreach n,$2, \
- $(if $(call math_is_number,$(n)), \
- $(if $(call math_lt,$(n),$(1)), \
- $(n)))))
-endef
-
-_INT_LIMIT_WORDS := $(foreach a,x x,$(foreach b,x x x x x x x x x x x x x x x x,\
- $(foreach c,x x x x x x x x x x x x x x x x,x x x x x x x x x x x x x x x x)))
-
-define _int_encode
-$(if $(filter $(words x $(_INT_LIMIT_WORDS)),$(words $(wordlist 1,$(1),x $(_INT_LIMIT_WORDS)))),\
- $(call pretty-error,integer greater than $(words $(_INT_LIMIT_WORDS)) is not supported!),\
- $(wordlist 1,$(1),$(_INT_LIMIT_WORDS)))
-endef
-
-# _int_max returns the maximum of the two arguments
-# input: two (x) lists; output: one (x) list
-# integer cannot be passed in directly. It has to be converted using _int_encode.
-define _int_max
-$(subst xx,x,$(join $(1),$(2)))
-endef
-
-# first argument is greater than second argument
-# output: non-empty if true
-# integer cannot be passed in directly. It has to be converted using _int_encode.
-define _int_greater-than
-$(filter-out $(words $(2)),$(words $(call _int_max,$(1),$(2))))
-endef
-
-# first argument equals to second argument
-# output: non-empty if true
-# integer cannot be passed in directly. It has to be converted using _int_encode.
-define _int_equal
-$(filter $(words $(1)),$(words $(2)))
-endef
-
-# first argument is greater than or equal to second argument
-# output: non-empty if true
-# integer cannot be passed in directly. It has to be converted using _int_encode.
-define _int_greater-or-equal
-$(call _int_greater-than,$(1),$(2))$(call _int_equal,$(1),$(2))
-endef
-
-define int_plus
-$(words $(call _int_encode,$(1)) $(call _int_encode,$(2)))
-endef
-
-define int_subtract
-$(if $(call _int_greater-or-equal,$(call _int_encode,$(1)),$(call _int_encode,$(2))),\
- $(words $(filter-out xx,$(join $(call _int_encode,$(1)),$(call _int_encode,$(2))))),\
- $(call pretty-error,$(1) subtract underflow $(2)))
-endef
-
-define int_multiply
-$(words $(foreach a,$(call _int_encode,$(1)),$(call _int_encode,$(2))))
-endef
-
-define int_divide
-$(if $(filter 0,$(2)),$(call pretty-error,division by zero is not allowed!),$(strip \
- $(if $(call _int_greater-or-equal,$(call _int_encode,$(1)),$(call _int_encode,$(2))), \
- $(call int_plus,$(call int_divide,$(call int_subtract,$(1),$(2)),$(2)),1),0)))
-endef
diff --git a/core/module_arch_supported.mk b/core/module_arch_supported.mk
index 62e2643312..7ebc8f957d 100644
--- a/core/module_arch_supported.mk
+++ b/core/module_arch_supported.mk
@@ -62,11 +62,11 @@ endif
ifdef LOCAL_IS_HOST_MODULE
ifneq (,$(LOCAL_MODULE_HOST_OS))
+ ifneq (,$(filter windows,$(LOCAL_MODULE_HOST_OS)))
+ $(call pretty-error,Windows is only supported in Android.bp files)
+ endif
ifeq (,$(filter $($(my_prefix)OS),$(LOCAL_MODULE_HOST_OS)))
my_module_arch_supported := false
endif
-else ifeq ($($(my_prefix)OS),windows)
- # If LOCAL_MODULE_HOST_OS is empty, only linux and darwin are supported
- my_module_arch_supported := false
endif
endif
diff --git a/core/multi_prebuilt.mk b/core/multi_prebuilt.mk
index 77c57ab4bd..c97d481f58 100644
--- a/core/multi_prebuilt.mk
+++ b/core/multi_prebuilt.mk
@@ -38,7 +38,6 @@ multi_prebuilt_once := true
# $(2): IS_HOST_MODULE
# $(3): MODULE_CLASS
# $(4): MODULE_TAGS
-# $(5): OVERRIDE_BUILT_MODULE_PATH
# $(6): UNINSTALLABLE_MODULE
# $(7): BUILT_MODULE_STEM
# $(8): LOCAL_STRIP_MODULE
@@ -56,7 +55,6 @@ $(foreach t,$(1), \
$(eval LOCAL_IS_HOST_MODULE := $(2)) \
$(eval LOCAL_MODULE_CLASS := $(3)) \
$(eval LOCAL_MODULE_TAGS := $(4)) \
- $(eval OVERRIDE_BUILT_MODULE_PATH := $(5)) \
$(eval LOCAL_UNINSTALLABLE_MODULE := $(6)) \
$(eval tw := $(subst :, ,$(strip $(t)))) \
$(if $(word 3,$(tw)),$(error $(LOCAL_PATH): Bad prebuilt filename '$(t)')) \
@@ -98,7 +96,7 @@ $(call auto-prebuilt-boilerplate, \
$(prebuilt_is_host), \
SHARED_LIBRARIES, \
$(prebuilt_module_tags), \
- $($(if $(prebuilt_is_host),HOST,TARGET)_OUT_INTERMEDIATE_LIBRARIES), \
+ , \
, \
, \
$(prebuilt_strip_module))
diff --git a/core/native_benchmark.mk b/core/native_benchmark.mk
index e73bcad0c5..073d8dd2e2 100644
--- a/core/native_benchmark.mk
+++ b/core/native_benchmark.mk
@@ -6,9 +6,6 @@ $(call record-module-type,NATIVE_BENCHMARK)
LOCAL_STATIC_LIBRARIES += libgoogle-benchmark
-LOCAL_MODULE_PATH_64 := $(TARGET_OUT_DATA_METRIC_TESTS)/$(LOCAL_MODULE)
-LOCAL_MODULE_PATH_32 := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_METRIC_TESTS)/$(LOCAL_MODULE)
-
ifndef LOCAL_MULTILIB
ifndef LOCAL_32_BIT_ONLY
LOCAL_MULTILIB := both
diff --git a/core/native_benchmark_test_config_template.xml b/core/native_benchmark_test_config_template.xml
new file mode 100644
index 0000000000..d1f01996bd
--- /dev/null
+++ b/core/native_benchmark_test_config_template.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {MODULE}.">
+ <option name="test-suite-tag" value="apct" />
+ <option name="test-suite-tag" value="apct-native-metric" />
+
+ {EXTRA_CONFIGS}
+
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+ </target_preparer>
+ <test class="com.android.tradefed.testtype.GoogleBenchmarkTest" >
+ <option name="native-benchmark-device-path" value="/data/local/tmp" />
+ <option name="benchmark-module-name" value="{MODULE}" />
+ </test>
+</configuration>
diff --git a/core/native_host_test_config_template.xml b/core/native_host_test_config_template.xml
new file mode 100644
index 0000000000..818b9b9f4c
--- /dev/null
+++ b/core/native_host_test_config_template.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {MODULE}.">
+ <option name="null-device" value="true" />
+
+ {EXTRA_CONFIGS}
+
+ <test class="com.android.tradefed.testtype.HostGTest" >
+ <option name="module-name" value="{MODULE}" />
+ </test>
+</configuration>
+
diff --git a/core/native_test_config_template.xml b/core/native_test_config_template.xml
index a960529ed1..ef1818f776 100644
--- a/core/native_test_config_template.xml
+++ b/core/native_test_config_template.xml
@@ -15,6 +15,11 @@
-->
<!-- This test config file is auto-generated. -->
<configuration description="Runs {MODULE}.">
+ <option name="test-suite-tag" value="apct" />
+ <option name="test-suite-tag" value="apct-native" />
+
+ {EXTRA_CONFIGS}
+
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
<option name="cleanup" value="true" />
<option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index ca2dceecec..e9e89c309e 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -7,7 +7,7 @@ endif
KATI_OUTPUT_PATTERNS := $(OUT_DIR)/build%.ninja $(OUT_DIR)/ninja%.sh
# Modifier goals we don't need to pass to Ninja.
-NINJA_EXCLUDE_GOALS := all dist APP-% PRODUCT-%
+NINJA_EXCLUDE_GOALS := all APP-% PRODUCT-%
# A list of goals which affect parsing of makefiles and we need to pass to Kati.
PARSE_TIME_MAKE_GOALS := \
@@ -28,7 +28,6 @@ PARSE_TIME_MAKE_GOALS := \
custom_images \
deps-license \
dicttool_aosp \
- dist \
dump-products \
eng \
fusion \
@@ -51,7 +50,6 @@ PARSE_TIME_MAKE_GOALS := \
user \
userdataimage \
userdebug \
- valgrind-test-art% \
vts \
win_sdk \
winsdk-tools
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 383d73c21e..c3147341b4 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -14,6 +14,11 @@ ifeq ($(LOCAL_MODULE_CLASS),GYP)
notice_file :=
endif
+ifeq ($(LOCAL_MODULE_CLASS),FAKE)
+ # We ignore NOTICE files for modules of type FAKE.
+ notice_file :=
+endif
+
# Soong generates stub libraries that don't need NOTICE files
ifdef LOCAL_NO_NOTICE_FILE
ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
@@ -33,8 +38,14 @@ else
endif
endif
+installed_notice_file :=
+
ifdef notice_file
+ifdef my_register_name
+ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
+endif
+
# This relies on the name of the directory in PRODUCT_OUT matching where
# it's installed on the target - i.e. system, data, etc. This does
# not work for root and isn't exact, but it's probably good enough for
@@ -62,12 +73,17 @@ else
endif
module_installed_filename := \
$(patsubst $(PRODUCT_OUT)/%,%,$($(my_prefix)OUT_JAVA_LIBRARIES))/$(module_leaf)
+ else ifeq ($(LOCAL_MODULE_CLASS),ETC)
+ # ETC modules may be uninstallable, yet still have a NOTICE file. e.g. apex components
+ module_installed_filename :=
else
$(error Cannot determine where to install NOTICE file for $(LOCAL_MODULE))
endif # JAVA_LIBRARIES
endif # STATIC_LIBRARIES
endif
+ifdef module_installed_filename
+
# In case it's actually a host file
module_installed_filename := $(patsubst $(HOST_OUT)/%,%,$(module_installed_filename))
module_installed_filename := $(patsubst $(HOST_CROSS_OUT)/%,%,$(module_installed_filename))
@@ -101,15 +117,13 @@ $(LOCAL_BUILT_MODULE): | $(installed_notice_file)
endif # JAVA_LIBRARIES
endif # TARGET_BUILD_APPS
-else
-# NOTICE file does not exist
-installed_notice_file :=
-endif
+endif # module_installed_filename
+endif # notice_file
# Create a predictable, phony target to build this notice file.
# Define it even if the notice file doesn't exist so that other
# modules can depend on it.
notice_target := NOTICE-$(if \
- $(LOCAL_IS_HOST_MODULE),HOST,TARGET)-$(LOCAL_MODULE_CLASS)-$(LOCAL_MODULE)
+ $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-$(LOCAL_MODULE_CLASS)-$(LOCAL_MODULE)
.PHONY: $(notice_target)
$(notice_target): $(installed_notice_file)
diff --git a/core/pack_dyn_relocs_setup.mk b/core/pack_dyn_relocs_setup.mk
new file mode 100644
index 0000000000..c5564b1eca
--- /dev/null
+++ b/core/pack_dyn_relocs_setup.mk
@@ -0,0 +1,34 @@
+#############################################################
+## Set up my_pack_module_relocations
+## Input variables:
+## DISABLE_RELOCATION_PACKER,
+## LOCAL_PACK_MODULE_RELOCATIONS*,
+## *TARGET_PACK_MODULE_RELOCATIONS,
+## LOCAL_MODULE_CLASS, HOST_OS
+## LOCAL_IS_HOST_MODULE
+## Output variables:
+## my_pack_module_relocations, if false skip relocation_packer
+#############################################################
+
+my_pack_module_relocations := false
+ifneq ($(DISABLE_RELOCATION_PACKER),true)
+ my_pack_module_relocations := $(firstword \
+ $(LOCAL_PACK_MODULE_RELOCATIONS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
+ $(LOCAL_PACK_MODULE_RELOCATIONS))
+endif
+
+ifeq ($(my_pack_module_relocations),)
+ my_pack_module_relocations := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_PACK_MODULE_RELOCATIONS)
+endif
+
+# Do not pack relocations for executables. Because packing results in
+# non-zero p_vaddr which causes kernel to load executables to lower
+# address (starting at 0x8000) http://b/20665974
+ifeq ($(filter SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+ my_pack_module_relocations := false
+endif
+
+ifdef LOCAL_IS_HOST_MODULE
+ # Do not pack relocations on host modules
+ my_pack_module_relocations := false
+endif
diff --git a/core/package.mk b/core/package.mk
index f3713fcf75..854e0093c6 100644
--- a/core/package.mk
+++ b/core/package.mk
@@ -8,7 +8,9 @@ my_prefix := TARGET_
include $(BUILD_SYSTEM)/multilib.mk
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
- ifneq ($(TARGET_SUPPORTS_64_BIT_APPS)|$(my_module_multilib),|64)
+ ifeq ($(TARGET_SUPPORTS_64_BIT_APPS),true)
+ my_module_multilib := first
+ else ifneq ($(my_module_multilib),64)
my_module_multilib := first
endif
endif
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 80d386d4b4..e27f6cecf8 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -87,54 +87,77 @@ else
LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
endif
-package_resource_overlays := $(strip \
- $(wildcard $(foreach dir, $(PRODUCT_PACKAGE_OVERLAYS), \
- $(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))) \
- $(wildcard $(foreach dir, $(DEVICE_PACKAGE_OVERLAYS), \
- $(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
+# If LOCAL_MODULE matches a rule in PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES,
+# override the manifest package name by the (first) rule matched
+override_manifest_name := $(strip $(word 1,\
+ $(foreach rule,$(PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES),\
+ $(eval _pkg_name_pat := $(call word-colon,1,$(rule)))\
+ $(eval _manifest_name_pat := $(call word-colon,2,$(rule)))\
+ $(if $(filter $(_pkg_name_pat),$(LOCAL_MODULE)),\
+ $(patsubst $(_pkg_name_pat),$(_manifest_name_pat),$(LOCAL_MODULE))\
+ )\
+ )\
+))
-enforce_rro_enabled :=
-ifneq ($(PRODUCT_ENFORCE_RRO_TARGETS),)
- ifneq ($(package_resource_overlays),)
- ifeq ($(PRODUCT_ENFORCE_RRO_TARGETS),*)
- enforce_rro_enabled := true
- else ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_TARGETS)))
- enforce_rro_enabled := true
- endif
- endif
+ifneq (,$(override_manifest_name))
+# Note: this can override LOCAL_MANIFEST_PACKAGE_NAME value set in Android.mk
+LOCAL_MANIFEST_PACKAGE_NAME := $(override_manifest_name)
+endif
+
+include $(BUILD_SYSTEM)/force_aapt2.mk
+
+# Process Support Library dependencies.
+include $(BUILD_SYSTEM)/support_libraries.mk
- ifdef enforce_rro_enabled
- ifeq (,$(LOCAL_MODULE_PATH))
- ifeq (true,$(LOCAL_PROPRIETARY_MODULE))
- enforce_rro_enabled :=
- else ifeq (true,$(LOCAL_OEM_MODULE))
- enforce_rro_enabled :=
- else ifeq (true,$(LOCAL_ODM_MODULE))
- enforce_rro_enabled :=
- else ifeq (true,$(LOCAL_PRODUCT_MODULE))
- enforce_rro_enabled :=
- endif
- else ifeq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
- enforce_rro_enabled :=
- endif
+# Determine whether auto-RRO is enabled for this package.
+enforce_rro_enabled :=
+ifeq ($(PRODUCT_ENFORCE_RRO_TARGETS),*)
+ # * means all system APKs, so enable conditionally based on module path.
+
+ # Note that base_rules.mk has not yet been included, so it's likely that only
+ # one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
+ ifeq (,$(LOCAL_MODULE_PATH))
+ non_system_module := $(filter true,\
+ $(LOCAL_ODM_MODULE) \
+ $(LOCAL_OEM_MODULE) \
+ $(LOCAL_PRODUCT_MODULE) \
+ $(LOCAL_PRODUCT_SERVICES_MODULE) \
+ $(LOCAL_PROPRIETARY_MODULE) \
+ $(LOCAL_VENDOR_MODULE))
+ enforce_rro_enabled := $(if $(non_system_module),,true)
+ else ifneq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
+ enforce_rro_enabled := true
endif
+else ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_TARGETS)))
+ enforce_rro_enabled := true
endif
+product_package_overlays := $(strip \
+ $(wildcard $(foreach dir, $(PRODUCT_PACKAGE_OVERLAYS), \
+ $(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
+device_package_overlays := $(strip \
+ $(wildcard $(foreach dir, $(DEVICE_PACKAGE_OVERLAYS), \
+ $(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
+
+static_resource_overlays :=
+runtime_resource_overlays_product :=
+runtime_resource_overlays_vendor :=
ifdef enforce_rro_enabled
ifneq ($(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS),)
- static_only_resource_overlays := $(filter $(addsuffix %,$(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS)),$(package_resource_overlays))
- ifneq ($(static_only_resource_overlays),)
- package_resource_overlays := $(filter-out $(static_only_resource_overlays),$(package_resource_overlays))
- LOCAL_RESOURCE_DIR := $(static_only_resource_overlays) $(LOCAL_RESOURCE_DIR)
- ifeq ($(package_resource_overlays),)
- enforce_rro_enabled :=
- endif
- endif
+ # The PRODUCT_ exclusion variable applies to both inclusion variables..
+ static_resource_overlays += $(filter $(addsuffix %,$(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS)),$(product_package_overlays))
+ static_resource_overlays += $(filter $(addsuffix %,$(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS)),$(device_package_overlays))
endif
+ runtime_resource_overlays_product := $(filter-out $(static_resource_overlays),$(product_package_overlays))
+ runtime_resource_overlays_vendor := $(filter-out $(static_resource_overlays),$(device_package_overlays))
else
-LOCAL_RESOURCE_DIR := $(package_resource_overlays) $(LOCAL_RESOURCE_DIR)
+ static_resource_overlays := $(product_package_overlays) $(device_package_overlays)
endif
+# Add the static overlays. Auto-RRO is created later, as it depends on
+# other logic in this file.
+LOCAL_RESOURCE_DIR := $(static_resource_overlays) $(LOCAL_RESOURCE_DIR)
+
all_assets := $(strip \
$(foreach dir, $(LOCAL_ASSET_DIR), \
$(addprefix $(dir)/, \
@@ -153,13 +176,13 @@ LOCAL_USE_AAPT2 := true
endif
my_res_package :=
-ifdef LOCAL_USE_AAPT2
+ifeq ($(LOCAL_USE_AAPT2),true)
# In aapt2 the last takes precedence.
my_resource_dirs := $(call reverse-list,$(LOCAL_RESOURCE_DIR))
my_res_dir :=
my_overlay_res_dirs :=
-ifneq ($(LOCAL_STATIC_ANDROID_LIBRARIES),)
+ifneq ($(strip $(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES)),)
# If we are using static android libraries, every source file becomes an overlay.
# This is to emulate old AAPT behavior which simulated library support.
my_res_dir :=
@@ -187,6 +210,11 @@ all_resources := $(strip $(my_res_resources) $(my_overlay_resources))
my_res_package := $(intermediates)/package-res.apk
LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
+ifeq ($(LOCAL_USE_AAPT2),true)
+ my_bundle_module := $(intermediates)/base.zip
+ LOCAL_INTERMEDIATE_TARGETS += $(my_bundle_module)
+endif
+
# Always run aapt2, because we need to at least compile the AndroidManifest.xml.
need_compile_res := true
@@ -297,32 +325,18 @@ endif # LOCAL_EMMA_INSTRUMENT
rs_compatibility_jni_libs :=
-ifeq ($(LOCAL_DATA_BINDING),true)
-data_binding_intermediates := $(intermediates.COMMON)/data-binding
-
-LOCAL_JAVACFLAGS += -processorpath $(DATA_BINDING_COMPILER) -s $(data_binding_intermediates)/anno-src
-
-LOCAL_STATIC_JAVA_LIBRARIES += databinding-baselibrary
-LOCAL_STATIC_JAVA_AAR_LIBRARIES += databinding-library databinding-adapters
-
-data_binding_res_in := $(LOCAL_RESOURCE_DIR)
-data_binding_res_out := $(data_binding_intermediates)/res
-
-# Replace with the processed merged res dir.
-LOCAL_RESOURCE_DIR := $(data_binding_res_out)
-
-LOCAL_AAPT_FLAGS += --auto-add-overlay --extra-packages com.android.databinding.library
-endif # LOCAL_DATA_BINDING
-
-# Process Support Library dependencies.
-include $(BUILD_SYSTEM)/support_libraries.mk
-
# If the module is a compressed module, we don't pre-opt it because its final
# installation location will be the data partition.
ifdef LOCAL_COMPRESSED_MODULE
LOCAL_DEX_PREOPT := false
endif
+# Default to use uncompressed native libraries in APKs if minSdkVersion >= marshmallow
+ifndef LOCAL_USE_EMBEDDED_NATIVE_LIBS
+ LOCAL_USE_EMBEDDED_NATIVE_LIBS := $(call math_gt_or_eq, \
+ $(patsubst $(PLATFORM_VERSION_CODENAME),100,$(call module-min-sdk-version)),23)
+endif
+
include $(BUILD_SYSTEM)/android_manifest.mk
resource_export_package :=
@@ -357,9 +371,9 @@ $(R_file_stamp) $(my_res_package): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOC
###############################
## AAPT/AAPT2
-ifdef LOCAL_USE_AAPT2
+ifeq ($(LOCAL_USE_AAPT2),true)
my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res
- ifneq (,$(renderscript_target_api))
+ ifneq (,$(filter-out current,$(renderscript_target_api)))
ifneq ($(call math_gt_or_eq,$(renderscript_target_api),21),true)
my_generated_res_zips := $(rs_generated_res_zip)
endif # renderscript_target_api < 21
@@ -409,21 +423,22 @@ else # LOCAL_USE_AAPT2
resource_export_package := $(intermediates.COMMON)/package-export.apk
$(R_file_stamp): $(resource_export_package)
- # add-assets-to-package looks at PRODUCT_AAPT_CONFIG, but this target
+ # create-assets-package looks at PRODUCT_AAPT_CONFIG, but this target
# can't know anything about PRODUCT. Clear it out just for this target.
$(resource_export_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
$(resource_export_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
$(resource_export_package): PRIVATE_RESOURCE_LIST := $(all_res_assets)
$(resource_export_package): $(all_res_assets) $(full_android_manifest) $(rs_generated_res_zip) $(AAPT)
@echo "target Export Resources: $(PRIVATE_MODULE) ($@)"
- $(create-empty-package)
- $(add-assets-to-package)
+ $(call create-assets-package,$@)
endif
endif # LOCAL_USE_AAPT2
endif # need_compile_res
+my_dex_jar := $(intermediates.COMMON)/dex.jar
+
called_from_package_internal := true
#################################
include $(BUILD_SYSTEM)/java.mk
@@ -450,34 +465,6 @@ endif
$(LOCAL_INTERMEDIATE_TARGETS): \
PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
-ifeq ($(LOCAL_DATA_BINDING),true)
-data_binding_stamp := $(data_binding_intermediates)/data-binding.stamp
-$(data_binding_stamp): PRIVATE_INTERMEDIATES := $(data_binding_intermediates)
-$(data_binding_stamp): PRIVATE_MANIFEST := $(full_android_manifest)
-# Generate code into $(LOCAL_INTERMEDIATE_SOURCE_DIR) so that the generated .java files
-# will be automatically picked up by function compile-java.
-$(data_binding_stamp): PRIVATE_SRC_OUT := $(LOCAL_INTERMEDIATE_SOURCE_DIR)/data-binding
-$(data_binding_stamp): PRIVATE_XML_OUT := $(data_binding_intermediates)/xml
-$(data_binding_stamp): PRIVATE_RES_OUT := $(data_binding_res_out)
-$(data_binding_stamp): PRIVATE_RES_IN := $(data_binding_res_in)
-$(data_binding_stamp): PRIVATE_ANNO_SRC_DIR := $(data_binding_intermediates)/anno-src
-
-$(data_binding_stamp) : $(all_res_assets) $(full_android_manifest) \
- $(DATA_BINDING_COMPILER)
- @echo "Data-binding process: $@"
- @rm -rf $(PRIVATE_INTERMEDIATES) $(PRIVATE_SRC_OUT) && \
- mkdir -p $(PRIVATE_INTERMEDIATES) $(PRIVATE_SRC_OUT) \
- $(PRIVATE_XML_OUT) $(PRIVATE_RES_OUT) $(PRIVATE_ANNO_SRC_DIR)
- $(hide) $(JAVA) -classpath $(DATA_BINDING_COMPILER) android.databinding.tool.MakeCopy \
- $(PRIVATE_MANIFEST) $(PRIVATE_SRC_OUT) $(PRIVATE_XML_OUT) $(PRIVATE_RES_OUT) $(PRIVATE_RES_IN)
- $(hide) touch $@
-
-# Make sure the data-binding process happens before javac and generation of R.java.
-$(R_file_stamp): $(data_binding_stamp)
-$(java_source_list_file): $(data_binding_stamp)
-$(full_classes_compiled_jar): $(data_binding_stamp)
-endif # LOCAL_DATA_BINDING
-
framework_res_package_export :=
ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
@@ -486,10 +473,10 @@ ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
# resources.
ifeq ($(LOCAL_SDK_RES_VERSION),core_current)
# core_current doesn't contain any framework resources.
-else ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+else ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS_USE_PREBUILT_SDK),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
# for released sdk versions, the platform resources were built into android.jar.
framework_res_package_export := \
- $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
+ $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION))
else # LOCAL_SDK_RES_VERSION
framework_res_package_export := \
$(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
@@ -509,7 +496,7 @@ $(resource_export_package) $(R_file_stamp) $(LOCAL_BUILT_MODULE): $(all_library_
$(LOCAL_INTERMEDIATE_TARGETS): \
PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
-ifdef LOCAL_USE_AAPT2
+ifeq ($(LOCAL_USE_AAPT2),true)
$(my_res_package) : $(all_library_res_package_export_deps)
endif
@@ -553,6 +540,7 @@ endif
ifeq ($(dir $(strip $(LOCAL_CERTIFICATE))),./)
LOCAL_CERTIFICATE := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))$(LOCAL_CERTIFICATE)
endif
+include $(BUILD_SYSTEM)/app_certificate_validate.mk
private_key := $(LOCAL_CERTIFICATE).pk8
certificate := $(LOCAL_CERTIFICATE).x509.pem
additional_certificates := $(foreach c,$(LOCAL_ADDITIONAL_CERTIFICATES), $(c).x509.pem $(c).pk8)
@@ -588,12 +576,26 @@ else
endif
endif
-$(LOCAL_BUILT_MODULE): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
+# Run veridex on product, product_services and vendor modules.
+# We skip it for unbundled app builds where we cannot build veridex.
+module_run_appcompat :=
+ifeq (true,$(non_system_module))
+ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) # ! unbundled app build
+ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true)
+ module_run_appcompat := true
+endif
+endif
+endif
+
+ifeq ($(module_run_appcompat),true)
+$(LOCAL_BUILT_MODULE) : $(appcompat-files)
+$(LOCAL_BUILT_MODULE): PRIVATE_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+endif
+
$(LOCAL_BUILT_MODULE): PRIVATE_RESOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/resources
-$(LOCAL_BUILT_MODULE): PRIVATE_FULL_CLASSES_JAR := $(full_classes_jar)
$(LOCAL_BUILT_MODULE) : $(jni_shared_libraries)
-$(LOCAL_BUILT_MODULE) : $(JAR_ARGS)
-ifdef LOCAL_USE_AAPT2
+$(LOCAL_BUILT_MODULE) : $(JAR_ARGS) $(SOONG_ZIP) $(MERGE_ZIPS) $(ZIP2ZIP)
+ifeq ($(LOCAL_USE_AAPT2),true)
$(LOCAL_BUILT_MODULE): PRIVATE_RES_PACKAGE := $(my_res_package)
$(LOCAL_BUILT_MODULE) : $(my_res_package) $(AAPT2) | $(ACP)
else
@@ -603,45 +605,115 @@ endif # LOCAL_USE_AAPT2
ifdef LOCAL_COMPRESSED_MODULE
$(LOCAL_BUILT_MODULE) : $(MINIGZIP)
endif
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
+$(LOCAL_BUILT_MODULE) : $(ZIP2ZIP)
+endif
+ifneq ($(BUILD_PLATFORM_ZIP),)
+$(LOCAL_BUILT_MODULE) : .KATI_IMPLICIT_OUTPUTS := $(dir $(LOCAL_BUILT_MODULE))package.dex.apk
+endif
+ifdef LOCAL_DEX_PREOPT
+$(LOCAL_BUILT_MODULE) : PRIVATE_STRIP_SCRIPT := $(intermediates)/strip.sh
+$(LOCAL_BUILT_MODULE) : $(intermediates)/strip.sh
+$(LOCAL_BUILT_MODULE) : | $(DEXPREOPT_STRIP_DEPS)
+$(LOCAL_BUILT_MODULE): .KATI_DEPFILE := $(LOCAL_BUILT_MODULE).d
+endif
+$(LOCAL_BUILT_MODULE): PRIVATE_USE_EMBEDDED_NATIVE_LIBS := $(LOCAL_USE_EMBEDDED_NATIVE_LIBS)
+$(LOCAL_BUILT_MODULE):
@echo "target Package: $(PRIVATE_MODULE) ($@)"
-ifdef LOCAL_USE_AAPT2
- $(call copy-file-to-new-target)
+ rm -rf $@.parts
+ mkdir -p $@.parts
+ifeq ($(LOCAL_USE_AAPT2),true)
+ cp -f $(PRIVATE_RES_PACKAGE) $@.parts/apk.zip
else # ! LOCAL_USE_AAPT2
- $(if $(PRIVATE_SOURCE_ARCHIVE),\
- $(call initialize-package-file,$(PRIVATE_SOURCE_ARCHIVE),$@),\
- $(create-empty-package))
- $(add-assets-to-package)
+ $(call create-assets-package,$@.parts/apk.zip)
endif # LOCAL_USE_AAPT2
ifneq ($(jni_shared_libraries),)
- $(add-jni-shared-libs-to-package)
+ $(call create-jni-shared-libs-package,$@.parts/jni.zip,$(PRIVATE_USE_EMBEDDED_NATIVE_LIBS))
endif
ifeq ($(full_classes_jar),)
# We don't build jar, need to add the Java resources here.
- $(if $(PRIVATE_EXTRA_JAR_ARGS),$(call add-java-resources-to,$@))
+ $(if $(PRIVATE_EXTRA_JAR_ARGS),$(call create-java-resources-jar,$@.parts/res.zip))
else # full_classes_jar
- $(add-dex-to-package)
-ifdef LOCAL_USE_AAPT2
- $(call add-jar-resources-to-package,$@,$(PRIVATE_FULL_CLASSES_JAR),$(PRIVATE_RESOURCE_INTERMEDIATES_DIR))
-endif
+ $(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
+ $(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
endif # full_classes_jar
+ $(MERGE_ZIPS) $@ $@.parts/*.zip
+ rm -rf $@.parts
ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
@# No need to align, sign-package below will do it.
$(uncompress-dexs)
endif
+# Run appcompat before stripping the classes.dex file.
+ifeq ($(module_run_appcompat),true)
+ifeq ($(LOCAL_USE_AAPT2),true)
+ $(call appcompat-header, aapt2)
+else
+ $(appcompat-header)
+endif
+ $(run-appcompat)
+endif # module_run_appcompat
ifdef LOCAL_DEX_PREOPT
ifneq ($(BUILD_PLATFORM_ZIP),)
@# Keep a copy of apk with classes.dex unstripped
$(hide) cp -f $@ $(dir $@)package.dex.apk
endif # BUILD_PLATFORM_ZIP
-ifneq (nostripping,$(LOCAL_DEX_PREOPT))
- $(call dexpreopt-remove-classes.dex,$@)
-endif
+ mv -f $@ $@.tmp
+ $(PRIVATE_STRIP_SCRIPT) $@.tmp $@
endif # LOCAL_DEX_PREOPT
$(sign-package)
ifdef LOCAL_COMPRESSED_MODULE
$(compress-package)
endif # LOCAL_COMPRESSED_MODULE
+ifeq ($(LOCAL_USE_AAPT2),true)
+ my_package_res_pb := $(intermediates)/package-res.pb.apk
+ $(my_package_res_pb): $(my_res_package) $(AAPT2)
+ $(AAPT2) convert --output-format proto $< -o $@
+
+ $(my_bundle_module): $(my_package_res_pb)
+ $(my_bundle_module): PRIVATE_RES_PACKAGE := $(my_package_res_pb)
+
+ $(my_bundle_module): $(jni_shared_libraries)
+ $(my_bundle_module): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
+ $(my_bundle_module): PRIVATE_JNI_SHARED_LIBRARIES_ABI := $(jni_shared_libraries_abis)
+
+ ifneq ($(full_classes_jar),)
+ $(my_bundle_module): PRIVATE_DEX_FILE := $(built_dex)
+ # Use the jarjar processed archive as the initial package file.
+ $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
+ $(my_bundle_module): $(built_dex)
+ else
+ $(my_bundle_module): PRIVATE_DEX_FILE :=
+ $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE :=
+ endif # full_classes_jar
+
+ $(my_bundle_module): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
+ @echo "target Bundle: $(PRIVATE_MODULE) ($@)"
+ rm -rf $@.parts
+ mkdir -p $@.parts
+ $(ZIP2ZIP) -i $(PRIVATE_RES_PACKAGE) -o $@.parts/apk.zip AndroidManifest.xml:manifest/AndroidManifest.xml resources.pb "res/**/*" "assets/**/*"
+ ifneq ($(jni_shared_libraries),)
+ $(call create-jni-shared-libs-package,$@.parts/jni.zip)
+ endif
+ ifeq ($(full_classes_jar),)
+ # We don't build jar, need to add the Java resources here.
+ $(if $(PRIVATE_EXTRA_JAR_ARGS),\
+ $(call create-java-resources-jar,$@.parts/res.zip) && \
+ $(ZIP2ZIP) -i $@.parts/res.zip -o $@.parts/res.zip.tmp "**/*:root/" && \
+ mv -f $@.parts/res.zip.tmp $@.parts/res.zip)
+ else # full_classes_jar
+ $(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
+ $(ZIP2ZIP) -i $@.parts/dex.zip -o $@.parts/dex.zip.tmp "classes*.dex:dex/"
+ mv -f $@.parts/dex.zip.tmp $@.parts/dex.zip
+ $(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
+ $(ZIP2ZIP) -i $@.parts/res.zip -o $@.parts/res.zip.tmp "**/*:root/"
+ mv -f $@.parts/res.zip.tmp $@.parts/res.zip
+ endif # full_classes_jar
+ $(MERGE_ZIPS) $@ $@.parts/*.zip
+ rm -rf $@.parts
+ ALL_MODULES.$(LOCAL_MODULE).BUNDLE := $(my_bundle_module)
+endif
+
###############################
## Build dpi-specific apks, if it's apps_only build.
ifdef TARGET_BUILD_APPS
@@ -653,20 +725,13 @@ endif
endif
###############################
-## Rule to build the odex file
+## Rule to build a jar containing dex files to dexpreopt without waiting for
+## the APK
ifdef LOCAL_DEX_PREOPT
-$(built_odex): PRIVATE_DEX_FILE := $(built_dex)
-# Use pattern rule - we may have multiple built odex files.
-$(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(built_dex)
+ $(my_dex_jar): PRIVATE_DEX_FILE := $(built_dex)
+ $(my_dex_jar): $(built_dex)
$(hide) mkdir -p $(dir $@) && rm -f $@
- $(add-dex-to-package)
-ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
- $(uncompress-dexs)
- $(align-package)
-endif
- $(hide) mv $@ $@.input
- $(call dexpreopt-one-file,$@.input,$@)
- $(hide) rm $@.input
+ $(call create-dex-jar,$@,$(PRIVATE_DEX_FILE))
endif
###############################
@@ -705,7 +770,7 @@ ifdef LOCAL_COMPATIBILITY_SUITE
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
$(foreach s,$(my_split_suffixes),\
- $(intermediates)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
+ $(call compat-copy-pair,$(intermediates)/package_$(s).apk,$(dir)/$(LOCAL_MODULE)_$(s).apk)))))
$(call create-suite-dependencies)
@@ -723,7 +788,7 @@ endif # skip_definition
# Reset internal variables.
all_res_assets :=
-ifdef enforce_rro_enabled
+ifneq (,$(runtime_resource_overlays_product)$(runtime_resource_overlays_vendor))
ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
enforce_rro_use_res_lib := true
else
@@ -738,11 +803,24 @@ ifdef enforce_rro_enabled
enforce_rro_manifest_package_info := $(full_android_manifest)
endif
-$(call append_enforce_rro_sources, \
- $(my_register_name), \
- $(enforce_rro_is_manifest_package_name), \
- $(enforce_rro_manifest_package_info), \
- $(enforce_rro_use_res_lib), \
- $(package_resource_overlays) \
+ ifdef runtime_resource_overlays_product
+ $(call append_enforce_rro_sources, \
+ $(my_register_name), \
+ $(enforce_rro_is_manifest_package_name), \
+ $(enforce_rro_manifest_package_info), \
+ $(enforce_rro_use_res_lib), \
+ $(runtime_resource_overlays_product), \
+ product \
)
-endif # enforce_rro_enabled
+ endif
+ ifdef runtime_resource_overlays_vendor
+ $(call append_enforce_rro_sources, \
+ $(my_register_name), \
+ $(enforce_rro_is_manifest_package_name), \
+ $(enforce_rro_manifest_package_info), \
+ $(enforce_rro_use_res_lib), \
+ $(runtime_resource_overlays_vendor), \
+ vendor \
+ )
+ endif
+endif
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
index 57afa3fd70..ce78ecedcc 100644
--- a/core/pdk_config.mk
+++ b/core/pdk_config.mk
@@ -13,8 +13,6 @@ ifneq (,$(filter platform-java, $(MAKECMDGOALS))$(PDK_FUSION_PLATFORM_ZIP)$(PDK_
# all paths under out dir
PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR += \
target/common/obj/JAVA_LIBRARIES/android.test.runner_intermediates \
- target/common/obj/JAVA_LIBRARIES/android.hidl.base-V1.0-java_intermediates \
- target/common/obj/JAVA_LIBRARIES/android.hidl.manager-V1.0-java_intermediates \
target/common/obj/JAVA_LIBRARIES/android-common_intermediates \
target/common/obj/JAVA_LIBRARIES/android-ex-camera2_intermediates \
target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates \
@@ -22,8 +20,6 @@ PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR += \
target/common/obj/JAVA_LIBRARIES/conscrypt_intermediates \
target/common/obj/JAVA_LIBRARIES/core-oj_intermediates \
target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \
- target/common/obj/JAVA_LIBRARIES/legacy-test_intermediates \
- target/common/obj/JAVA_LIBRARIES/legacy-android-test_intermediates \
target/common/obj/JAVA_LIBRARIES/ext_intermediates \
target/common/obj/JAVA_LIBRARIES/framework_intermediates \
target/common/obj/JAVA_LIBRARIES/hwbinder_intermediates \
@@ -175,11 +171,15 @@ ifneq (,$(filter platform platform-java, $(MAKECMDGOALS))$(filter true,$(TARGET_
# files under $(PRODUCT_OUT)/symbols to help debugging.
# Source not included to PDK due to dependency issue, so provide symbols instead.
- # We may not be building all of them.
- # The platform.zip just silently ignores the nonexistent ones.
- PDK_SYMBOL_FILES_LIST := \
- system/bin/app_process32 \
- system/bin/app_process64
+ PDK_SYMBOL_FILES_LIST :=
+ ifeq ($(TARGET_IS_64_BIT),true)
+ PDK_SYMBOL_FILES_LIST += system/bin/app_process64
+ ifdef TARGET_2ND_ARCH
+ PDK_SYMBOL_FILES_LIST += system/bin/app_process32
+ endif
+ else
+ PDK_SYMBOL_FILES_LIST += system/bin/app_process32
+ endif
ifneq (,$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))
# symbols should be explicitly pulled for fusion build
diff --git a/core/pdk_fusion_modules.mk b/core/pdk_fusion_modules.mk
index 9aabd0f156..235acf9378 100644
--- a/core/pdk_fusion_modules.mk
+++ b/core/pdk_fusion_modules.mk
@@ -37,12 +37,14 @@ include $(BUILD_PREBUILT)
# The source prebuilts are extracted in the rule of _pdk_fusion_stamp.
# Use a touch rule to establish the dependency.
+ifndef PDK_FUSION_PLATFORM_DIR
$(3) $(11) : $(_pdk_fusion_stamp)
$(hide) if [ ! -f $$@ ]; then \
echo 'Error: $$@ does not exist. Check your platform.zip.' 1>&2; \
exit 1; \
fi
$(hide) touch $$@
+endif
endef
# We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM).
diff --git a/core/prebuilt.mk b/core/prebuilt.mk
index 839e14fa19..9d284fb53c 100644
--- a/core/prebuilt.mk
+++ b/core/prebuilt.mk
@@ -47,7 +47,6 @@ LOCAL_2ND_ARCH_VAR_PREFIX := $($(my_prefix)2ND_ARCH_VAR_PREFIX)
include $(BUILD_SYSTEM)/module_arch_supported.mk
ifeq ($(my_module_arch_supported),true)
# secondary arch is supported
-OVERRIDE_BUILT_MODULE_PATH :=
LOCAL_BUILT_MODULE :=
LOCAL_INSTALLED_MODULE :=
LOCAL_INTERMEDIATE_TARGETS :=
@@ -58,39 +57,4 @@ endif # $(my_skip_non_preferred_arch) not true
LOCAL_2ND_ARCH_VAR_PREFIX :=
-ifdef LOCAL_IS_HOST_MODULE
-ifdef HOST_CROSS_OS
-ifneq (,$(filter EXECUTABLES STATIC_LIBRARIES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)))
-my_prefix := HOST_CROSS_
-LOCAL_HOST_PREFIX := $(my_prefix)
-include $(BUILD_SYSTEM)/module_arch_supported.mk
-ifeq ($(my_module_arch_supported),true)
-# host cross compilation is supported
-OVERRIDE_BUILT_MODULE_PATH :=
-LOCAL_BUILT_MODULE :=
-LOCAL_INSTALLED_MODULE :=
-LOCAL_INTERMEDIATE_TARGETS :=
-include $(BUILD_SYSTEM)/prebuilt_internal.mk
-endif
-LOCAL_HOST_PREFIX :=
-endif
-
-ifdef HOST_CROSS_2ND_ARCH
-my_prefix := HOST_CROSS_
-LOCAL_2ND_ARCH_VAR_PREFIX := $($(my_prefix)2ND_ARCH_VAR_PREFIX)
-LOCAL_HOST_PREFIX := $(my_prefix)
-include $(BUILD_SYSTEM)/module_arch_supported.mk
-ifeq ($(my_module_arch_supported),true)
-OVERRIDE_BUILT_MODULE_PATH :=
-LOCAL_BUILT_MODULE :=
-LOCAL_INSTALLED_MODULE :=
-LOCAL_INTERMEDIATE_TARGETS :=
-include $(BUILD_SYSTEM)/prebuilt_internal.mk
-endif
-LOCAL_HOST_PREFIX :=
-LOCAL_2ND_ARCH_VAR_PREFIX :=
-endif
-endif
-endif
-
my_module_arch_supported :=
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 0efda57457..e50594542f 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -6,14 +6,16 @@
##
###########################################################
+include $(BUILD_SYSTEM)/use_lld_setup.mk
+
ifneq ($(LOCAL_PREBUILT_LIBS),)
-$(error dont use LOCAL_PREBUILT_LIBS anymore LOCAL_PATH=$(LOCAL_PATH))
+$(call pretty-error,dont use LOCAL_PREBUILT_LIBS anymore)
endif
ifneq ($(LOCAL_PREBUILT_EXECUTABLES),)
-$(error dont use LOCAL_PREBUILT_EXECUTABLES anymore LOCAL_PATH=$(LOCAL_PATH))
+$(call pretty-error,dont use LOCAL_PREBUILT_EXECUTABLES anymore)
endif
ifneq ($(LOCAL_PREBUILT_JAVA_LIBRARIES),)
-$(error dont use LOCAL_PREBUILT_JAVA_LIBRARIES anymore LOCAL_PATH=$(LOCAL_PATH))
+$(call pretty-error,dont use LOCAL_PREBUILT_JAVA_LIBRARIES anymore)
endif
my_32_64_bit_suffix := $(if $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)IS_64_BIT),64,32)
@@ -37,39 +39,22 @@ endif
LOCAL_CHECKED_MODULE := $(my_prebuilt_src_file)
+ifeq (APPS,$(LOCAL_MODULE_CLASS))
+include $(BUILD_SYSTEM)/app_prebuilt_internal.mk
+else
+#
+# Non-APPS prebuilt modules handling almost to the end of the file
+#
+
my_strip_module := $(firstword \
$(LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
$(LOCAL_STRIP_MODULE))
-my_pack_module_relocations := $(firstword \
- $(LOCAL_PACK_MODULE_RELOCATIONS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
- $(LOCAL_PACK_MODULE_RELOCATIONS))
ifeq (SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS))
- # LOCAL_COPY_TO_INTERMEDIATE_LIBRARIES indicates that this prebuilt should be
- # installed to the common directory of libraries. This is needed for the NDK
- # shared libraries built by soong, as we build many different versions of each
- # library (one for each API level). Since they all have the same basename,
- # they'd clobber each other (as well as any platform libraries by the same
- # name).
- ifneq ($(LOCAL_COPY_TO_INTERMEDIATE_LIBRARIES),false)
- # Put the built targets of all shared libraries in a common directory
- # to simplify the link line.
- OVERRIDE_BUILT_MODULE_PATH := \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)
- endif
ifeq ($(LOCAL_IS_HOST_MODULE)$(my_strip_module),)
# Strip but not try to add debuglink
my_strip_module := no_debuglink
endif
-
- ifeq ($(LOCAL_IS_HOST_MODULE)$(my_pack_module_relocations),)
- # Do not pack relocations by default
- my_pack_module_relocations := false
- endif
-
- ifeq ($(DISABLE_RELOCATION_PACKER),true)
- my_pack_module_relocations := false
- endif
endif
ifneq ($(filter STATIC_LIBRARIES SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
@@ -78,20 +63,6 @@ else
prebuilt_module_is_a_library :=
endif
-ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
- ifeq ($(prebuilt_module_is_a_library),true)
- SOONG_ALREADY_CONV := $(SOONG_ALREADY_CONV) $(LOCAL_MODULE)
- endif
-
- ifdef LOCAL_USE_VNDK
- name_without_suffix := $(patsubst %.vendor,%,$(LOCAL_MODULE))
- ifneq ($(name_without_suffix),$(LOCAL_MODULE)
- SPLIT_VENDOR.$(LOCAL_MODULE_CLASS).$(name_without_suffix) := 1
- endif
- name_without_suffix :=
- endif
-endif
-
# Don't install static libraries by default.
ifndef LOCAL_UNINSTALLABLE_MODULE
ifeq (STATIC_LIBRARIES,$(LOCAL_MODULE_CLASS))
@@ -106,62 +77,39 @@ else
endif
ifdef LOCAL_COMPRESSED_MODULE
-ifneq (true,$(LOCAL_COMPRESSED_MODULE))
-$(call pretty-error, Unknown value for LOCAL_COMPRESSED_MODULE $(LOCAL_COMPRESSED_MODULE))
-endif
-endif
-
-ifeq ($(LOCAL_MODULE_CLASS),APPS)
-ifdef LOCAL_COMPRESSED_MODULE
-LOCAL_BUILT_MODULE_STEM := package.apk.gz
-else
-LOCAL_BUILT_MODULE_STEM := package.apk
-endif # LOCAL_COMPRESSED_MODULE
-
-ifndef LOCAL_INSTALLED_MODULE_STEM
-ifdef LOCAL_COMPRESSED_MODULE
-PACKAGES.$(LOCAL_MODULE).COMPRESSED := gz
-LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk.gz
-else
-LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk
-endif # LOCAL_COMPRESSED_MODULE
-endif # LOCAL_INSTALLED_MODULE_STEM
-
-else # $(LOCAL_MODULE_CLASS) != APPS)
-ifdef LOCAL_COMPRESSED_MODULE
$(error $(LOCAL_MODULE) : LOCAL_COMPRESSED_MODULE can only be defined for module class APPS)
endif # LOCAL_COMPRESSED_MODULE
-endif
-ifneq ($(filter true keep_symbols no_debuglink mini-debug-info,$(my_strip_module) $(my_pack_module_relocations)),)
+my_check_elf_file_shared_lib_files :=
+
+ifneq ($(filter true keep_symbols no_debuglink mini-debug-info,$(my_strip_module)),)
ifdef LOCAL_IS_HOST_MODULE
- $(error Cannot strip/pack host module LOCAL_PATH=$(LOCAL_PATH))
+ $(call pretty-error,Cannot strip/pack host module)
endif
ifeq ($(filter SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
- $(error Can strip/pack only shared libraries or executables LOCAL_PATH=$(LOCAL_PATH))
+ $(call pretty-error,Can strip/pack only shared libraries or executables)
endif
ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
- $(error Cannot strip/pack scripts LOCAL_PATH=$(LOCAL_PATH))
+ $(call pretty-error,Cannot strip/pack scripts)
endif
- # Set the arch-specific variables to set up the strip/pack rules.
+ # Set the arch-specific variables to set up the strip rules
LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) := $(my_strip_module)
- LOCAL_PACK_MODULE_RELOCATIONS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) := $(my_pack_module_relocations)
include $(BUILD_SYSTEM)/dynamic_binary.mk
built_module := $(linked_module)
-else # my_strip_module and my_pack_module_relocations not true
+ ifneq ($(LOCAL_SDK_VERSION),)
+ # binary.mk filters out NDK_MIGRATED_LIBS from my_shared_libs, thus those NDK libs are not added
+ # to DEPENDENCIES_ON_SHARED_LIBRARIES. Assign $(my_ndk_shared_libraries_fullpath) to
+ # my_check_elf_file_shared_lib_files so that check_elf_file.py can see those NDK stub libs.
+ my_check_elf_file_shared_lib_files := $(my_ndk_shared_libraries_fullpath)
+ endif
+else # my_strip_module not true
include $(BUILD_SYSTEM)/base_rules.mk
built_module := $(LOCAL_BUILT_MODULE)
ifdef prebuilt_module_is_a_library
export_includes := $(intermediates)/export_includes
export_cflags := $(foreach d,$(LOCAL_EXPORT_C_INCLUDE_DIRS),-I $(d))
-# Soong exports cflags instead of include dirs, so that -isystem can be included.
-ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-export_cflags += $(LOCAL_EXPORT_CFLAGS)
-else ifdef LOCAL_EXPORT_CFLAGS
-$(call pretty-error,LOCAL_EXPORT_CFLAGS can only be used by Soong, use LOCAL_EXPORT_C_INCLUDE_DIRS instead)
-endif
$(export_includes): PRIVATE_EXPORT_CFLAGS := $(export_cflags)
$(export_includes): $(LOCAL_EXPORT_C_INCLUDE_DEPS)
@echo Export includes file: $< -- $@
@@ -188,6 +136,8 @@ else ifdef LOCAL_USE_VNDK
else
my_link_type := native:vendor
endif
+else ifneq ($(filter $(TARGET_RECOVERY_OUT)/%,$(LOCAL_MODULE_PATH)),)
+my_link_type := native:recovery
else
my_link_type := native:platform
endif
@@ -203,33 +153,44 @@ endif # prebuilt_module_is_a_library
# The real dependency will be added after all Android.mks are loaded and the install paths
# of the shared libraries are determined.
ifdef LOCAL_INSTALLED_MODULE
-ifdef LOCAL_SHARED_LIBRARIES
-my_shared_libraries := $(LOCAL_SHARED_LIBRARIES)
+ifdef LOCAL_IS_HOST_MODULE
+ ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
+ my_system_shared_libraries :=
+ else
+ my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
+ endif
+else
+ ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
+ my_system_shared_libraries := libc libm libdl
+ else
+ my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
+ my_system_shared_libraries := $(patsubst libc,libc libdl,$(my_system_shared_libraries))
+ endif
+endif
+
+my_shared_libraries := \
+ $(filter-out $(my_system_shared_libraries),$(LOCAL_SHARED_LIBRARIES)) \
+ $(my_system_shared_libraries)
+
+ifdef my_shared_libraries
# Extra shared libraries introduced by LOCAL_CXX_STL.
include $(BUILD_SYSTEM)/cxx_stl_setup.mk
ifdef LOCAL_USE_VNDK
- ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
- my_shared_libraries := $(foreach l,$(my_shared_libraries),\
- $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
- endif
+ my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+ $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
endif
$(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)DEPENDENCIES_ON_SHARED_LIBRARIES += \
$(my_register_name):$(LOCAL_INSTALLED_MODULE):$(subst $(space),$(comma),$(my_shared_libraries))
-
-# We also need the LOCAL_BUILT_MODULE dependency,
-# since we use -rpath-link which points to the built module's path.
-my_built_shared_libraries := \
- $(addprefix $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)/, \
- $(addsuffix $($(my_prefix)SHLIB_SUFFIX), \
- $(my_shared_libraries)))
-$(LOCAL_BUILT_MODULE) : $(my_built_shared_libraries)
-endif
endif
+endif # my_shared_libraries
# We need to enclose the above export_includes and my_built_shared_libraries in
# "my_strip_module not true" because otherwise the rules are defined in dynamic_binary.mk.
endif # my_strip_module not true
+# Check prebuilt ELF binaries.
+include $(BUILD_SYSTEM)/check_elf_file.mk
+
ifeq ($(NATIVE_COVERAGE),true)
ifneq (,$(strip $(LOCAL_PREBUILT_COVERAGE_ARCHIVE)))
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(intermediates)/$(LOCAL_MODULE).gcnodir))
@@ -258,252 +219,40 @@ endif
endif
endif
-ifeq ($(LOCAL_MODULE_CLASS),APPS)
-PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
-
-my_extract_apk := $(strip $(LOCAL_EXTRACT_APK))
-
-# Select dpi-specific source
-ifdef LOCAL_DPI_VARIANTS
-my_dpi := $(firstword $(filter $(LOCAL_DPI_VARIANTS),$(PRODUCT_AAPT_PREF_CONFIG) $(PRODUCT_AAPT_PREBUILT_DPI)))
-ifdef my_dpi
-ifdef LOCAL_DPI_FILE_STEM
-my_prebuilt_dpi_file_stem := $(LOCAL_DPI_FILE_STEM)
-else
-my_prebuilt_dpi_file_stem := $(LOCAL_MODULE)_%.apk
-endif
-my_prebuilt_src_file := $(dir $(my_prebuilt_src_file))$(subst %,$(my_dpi),$(my_prebuilt_dpi_file_stem))
-
-ifneq ($(strip $(LOCAL_EXTRACT_DPI_APK)),)
-my_extract_apk := $(subst %,$(my_dpi),$(LOCAL_EXTRACT_DPI_APK))
-endif # LOCAL_EXTRACT_DPI_APK
-endif # my_dpi
-endif # LOCAL_DPI_VARIANTS
-
-ifdef my_extract_apk
-my_extracted_apk := $(intermediates)/extracted.apk
-
-$(my_extracted_apk): PRIVATE_EXTRACT := $(my_extract_apk)
-$(my_extracted_apk): $(my_prebuilt_src_file)
- @echo Extract APK: $@
- $(hide) mkdir -p $(dir $@) && rm -f $@
- $(hide) unzip -p $< $(PRIVATE_EXTRACT) >$@
-
-my_prebuilt_src_file := $(my_extracted_apk)
-my_extracted_apk :=
-my_extract_apk :=
-ifeq ($(PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK),true)
-# If the product property is set, always preopt for extracted modules to prevent executing out of
-# the APK.
-my_preopt_for_extracted_apk := true
-endif
-endif
-
-dex_preopt_profile_src_file := $(my_prebuilt_src_file)
-
-rs_compatibility_jni_libs :=
-include $(BUILD_SYSTEM)/install_jni_libs.mk
-
-ifeq ($(LOCAL_CERTIFICATE),EXTERNAL)
- # The magic string "EXTERNAL" means this package will be signed with
- # the default dev key throughout the build process, but we expect
- # the final package to be signed with a different key.
- #
- # This can be used for packages where we don't have access to the
- # keys, but want the package to be predexopt'ed.
- LOCAL_CERTIFICATE := $(DEFAULT_SYSTEM_DEV_CERTIFICATE)
- PACKAGES.$(LOCAL_MODULE).EXTERNAL_KEY := 1
-
- $(built_module) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
- $(built_module) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
- $(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-endif
-ifeq ($(LOCAL_CERTIFICATE),)
- # It is now a build error to add a prebuilt .apk without
- # specifying a key for it.
- $(error No LOCAL_CERTIFICATE specified for prebuilt "$(my_prebuilt_src_file)")
-else ifeq ($(LOCAL_CERTIFICATE),PRESIGNED)
- # The magic string "PRESIGNED" means this package is already checked
- # signed with its release key.
- #
- # By setting .CERTIFICATE but not .PRIVATE_KEY, this package will be
- # mentioned in apkcerts.txt (with certificate set to "PRESIGNED")
- # but the dexpreopt process will not try to re-sign the app.
- PACKAGES.$(LOCAL_MODULE).CERTIFICATE := PRESIGNED
- PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
-else
- # If this is not an absolute certificate, assign it to a generic one.
- ifeq ($(dir $(strip $(LOCAL_CERTIFICATE))),./)
- LOCAL_CERTIFICATE := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))$(LOCAL_CERTIFICATE)
- endif
-
- PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
- PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
- PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
-
- $(built_module) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
- $(built_module) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
- $(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-endif
-
-# Disable dex-preopt of prebuilts to save space, if requested.
-ifndef LOCAL_DEX_PREOPT
-ifeq ($(DONT_DEXPREOPT_PREBUILTS),true)
-LOCAL_DEX_PREOPT := false
-endif
-endif
-
-# If the module is a compressed module, we don't pre-opt it because its final
-# installation location will be the data partition.
-ifdef LOCAL_COMPRESSED_MODULE
-LOCAL_DEX_PREOPT := false
-endif
-
-#######################################
-# defines built_odex along with rule to install odex
-include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
-#######################################
-ifneq ($(LOCAL_REPLACE_PREBUILT_APK_INSTALLED),)
-# There is a replacement for the prebuilt .apk we can install without any processing.
-$(built_module) : $(LOCAL_REPLACE_PREBUILT_APK_INSTALLED)
- $(transform-prebuilt-to-target)
-
-else # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
-# Sign and align non-presigned .apks.
-# The embedded prebuilt jni to uncompress.
-ifeq ($(LOCAL_CERTIFICATE),PRESIGNED)
-# For PRESIGNED apks we must uncompress every .so file:
-# even if the .so file isn't for the current TARGET_ARCH,
-# we can't strip the file.
-embedded_prebuilt_jni_libs := 'lib/*.so'
-endif
-ifndef embedded_prebuilt_jni_libs
-# No LOCAL_PREBUILT_JNI_LIBS, uncompress all.
-embedded_prebuilt_jni_libs := 'lib/*.so'
-endif
-$(built_module): PRIVATE_EMBEDDED_JNI_LIBS := $(embedded_prebuilt_jni_libs)
-
-ifdef LOCAL_COMPRESSED_MODULE
-$(built_module) : $(MINIGZIP)
-endif
-
-$(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(SIGNAPK_JAR)
- $(transform-prebuilt-to-target)
- $(uncompress-shared-libs)
-ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
- $(uncompress-dexs)
-endif # LOCAL_UNCOMPRESS_DEX
-ifdef LOCAL_DEX_PREOPT
-ifneq ($(BUILD_PLATFORM_ZIP),)
- @# Keep a copy of apk with classes.dex unstripped
- $(hide) cp -f $@ $(dir $@)package.dex.apk
-endif # BUILD_PLATFORM_ZIP
-endif # LOCAL_DEX_PREOPT
-ifneq ($(LOCAL_CERTIFICATE),PRESIGNED)
- @# Only strip out files if we can re-sign the package.
-ifdef LOCAL_DEX_PREOPT
-ifneq (nostripping,$(LOCAL_DEX_PREOPT))
- $(call dexpreopt-remove-classes.dex,$@)
-endif # LOCAL_DEX_PREOPT != nostripping
-endif # LOCAL_DEX_PREOPT
- $(sign-package)
- # No need for align-package because sign-package takes care of alignment
-else # LOCAL_CERTIFICATE == PRESIGNED
- $(align-package)
-endif # LOCAL_CERTIFICATE
-ifdef LOCAL_COMPRESSED_MODULE
- $(compress-package)
-endif # LOCAL_COMPRESSED_MODULE
-endif # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
-
-###############################
-## Rule to build the odex file.
-# In case we don't strip the built module, use it, as dexpreopt
-# can do optimizations based on whether the built module only
-# contains uncompressed dex code.
-ifdef LOCAL_DEX_PREOPT
-ifeq (nostripping,$(LOCAL_DEX_PREOPT))
-$(built_odex) : $(built_module)
- $(call dexpreopt-one-file,$<,$@)
-else
-$(built_odex) : $(my_prebuilt_src_file)
- $(call dexpreopt-one-file,$<,$@)
-endif
-endif
-
-###############################
-## Install split apks.
-ifdef LOCAL_PACKAGE_SPLITS
-ifdef LOCAL_COMPRESSED_MODULE
-$(error $(LOCAL_MODULE): LOCAL_COMPRESSED_MODULE is not currently supported for split installs)
-endif # LOCAL_COMPRESSED_MODULE
-
-# LOCAL_PACKAGE_SPLITS is a list of apks to be installed.
-built_apk_splits := $(addprefix $(intermediates)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
-installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+ifeq ($(prebuilt_module_is_dex_javalib),true)
+my_dex_jar := $(my_prebuilt_src_file)
+# This is a target shared library, i.e. a jar with classes.dex.
-# Rules to sign the split apks.
-my_src_dir := $(sort $(dir $(LOCAL_PACKAGE_SPLITS)))
-ifneq (1,$(words $(my_src_dir)))
-$(error You must put all the split source apks in the same folder: $(LOCAL_PACKAGE_SPLITS))
+ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),)
+ $(call pretty-error,Modules in PRODUCT_BOOT_JARS must be defined in Android.bp files)
endif
-my_src_dir := $(LOCAL_PATH)/$(my_src_dir)
-$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
-$(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
-$(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-$(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
- $(copy-file-to-new-target)
- $(sign-package)
-
-# Rules to install the split apks.
-$(installed_apk_splits) : $(my_module_path)/%.apk : $(intermediates)/%.apk
- @echo "Install: $@"
- $(copy-file-to-new-target)
-
-# Register the additional built and installed files.
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
- $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(intermediates)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
-
-# Make sure to install the splits when you run "make <module_name>".
-$(my_all_targets): $(installed_apk_splits)
-
-endif # LOCAL_PACKAGE_SPLITS
-
-else ifeq ($(prebuilt_module_is_dex_javalib),true) # ! LOCAL_MODULE_CLASS != APPS
-# This is a target shared library, i.e. a jar with classes.dex.
#######################################
# defines built_odex along with rule to install odex
include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
#######################################
ifdef LOCAL_DEX_PREOPT
-ifneq ($(dexpreopt_boot_jar_module),) # boot jar
-# boot jar's rules are defined in dex_preopt.mk
-dexpreopted_boot_jar := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(dexpreopt_boot_jar_module)_nodex.jar
-$(built_module) : $(dexpreopted_boot_jar)
- $(call copy-file-to-target)
-# For libart boot jars, we don't have .odex files.
-else # ! boot jar
-$(built_odex): PRIVATE_MODULE := $(LOCAL_MODULE)
-# Use pattern rule - we may have multiple built odex files.
-$(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(my_prebuilt_src_file)
- @echo "Dexpreopt Jar: $(PRIVATE_MODULE) ($@)"
- $(call dexpreopt-one-file,$<,$@)
+$(built_module): PRIVATE_STRIP_SCRIPT := $(intermediates)/strip.sh
+$(built_module): $(intermediates)/strip.sh
+$(built_module): | $(DEXPREOPT_STRIP_DEPS)
+$(built_module): .KATI_DEPFILE := $(built_module).d
+$(built_module): $(my_prebuilt_src_file)
+ $(PRIVATE_STRIP_SCRIPT) $< $@
-$(eval $(call dexpreopt-copy-jar,$(my_prebuilt_src_file),$(built_module),$(LOCAL_DEX_PREOPT)))
-endif # boot jar
else # ! LOCAL_DEX_PREOPT
$(built_module) : $(my_prebuilt_src_file)
$(call copy-file-to-target)
endif # LOCAL_DEX_PREOPT
else # ! prebuilt_module_is_dex_javalib
+ifneq ($(filter init%rc,$(notdir $(LOCAL_INSTALLED_MODULE)))$(filter %/etc/init,$(dir $(LOCAL_INSTALLED_MODULE))),)
+ $(eval $(call copy-init-script-file-checked,$(my_prebuilt_src_file),$(built_module)))
+else ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
$(built_module) : $(my_prebuilt_src_file)
-ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
$(transform-prebuilt-to-target-strip-comments)
else
+$(built_module) : $(my_prebuilt_src_file)
$(transform-prebuilt-to-target)
endif
ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
@@ -579,15 +328,24 @@ my_src_aar := $(LOCAL_JETIFIER_OUTPUT_FILE)
my_src_jar := $(intermediates.COMMON)/aar/classes.jar
my_src_proguard_options := $(intermediates.COMMON)/aar/proguard.txt
+my_src_android_manifest := $(intermediates.COMMON)/aar/AndroidManifest.xml
$(my_src_jar) : .KATI_IMPLICIT_OUTPUTS := $(my_src_proguard_options)
+$(my_src_jar) : .KATI_IMPLICIT_OUTPUTS += $(my_src_android_manifest)
$(my_src_jar) : $(my_src_aar)
$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) $(dir $@)/res
$(hide) unzip -qo -d $(dir $@) $<
# Make sure the extracted classes.jar has a new timestamp.
$(hide) touch $@
- # Make sure the proguard file exists and has a new timestamp.
+ # Make sure the proguard and AndroidManifest.xml files exist
+ # and have a new timestamp.
$(hide) touch $(dir $@)/proguard.txt
+ $(hide) touch $(dir $@)/AndroidManifest.xml
+
+my_prebuilt_android_manifest := $(intermediates.COMMON)/manifest/AndroidManifest.xml
+$(eval $(call copy-one-file,$(my_src_android_manifest),$(my_prebuilt_android_manifest)))
+$(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_prebuilt_android_manifest))
+
else
# run Jetifier if needed
@@ -611,11 +369,13 @@ $(common_classes_pre_proguard_jar) : $(my_src_jar)
$(common_javalib_jar) : $(common_classes_jar)
$(transform-prebuilt-to-target)
+include $(BUILD_SYSTEM)/force_aapt2.mk
+
ifdef LOCAL_AAPT2_ONLY
LOCAL_USE_AAPT2 := true
endif
-ifdef LOCAL_USE_AAPT2
+ifeq ($(LOCAL_USE_AAPT2),true)
ifneq ($(my_src_aar),)
$(intermediates.COMMON)/export_proguard_flags : $(my_src_proguard_options)
@@ -631,7 +391,7 @@ framework_res_package_export :=
ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
framework_res_package_export := \
- $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
+ $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION))
else
framework_res_package_export := \
$(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
@@ -643,7 +403,7 @@ my_res_package := $(intermediates.COMMON)/package-res.apk
# We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
$(my_res_package): PRIVATE_AAPT2_CFLAGS :=
$(my_res_package): PRIVATE_AAPT_FLAGS := --static-lib --no-static-lib-packages --auto-add-overlay
-$(my_res_package): PRIVATE_ANDROID_MANIFEST := $(intermediates.COMMON)/aar/AndroidManifest.xml
+$(my_res_package): PRIVATE_ANDROID_MANIFEST := $(my_src_android_manifest)
$(my_res_package): PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
$(my_res_package): PRIVATE_SOURCE_INTERMEDIATES_DIR :=
$(my_res_package): PRIVATE_PROGUARD_OPTIONS_FILE :=
@@ -653,6 +413,7 @@ $(my_res_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
$(my_res_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
$(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
$(my_res_package) : $(framework_res_package_export)
+$(my_res_package) : $(my_src_android_manifest)
full_android_manifest :=
my_res_resources :=
@@ -671,11 +432,22 @@ endif # LOCAL_USE_AAPT2
# make sure the classes.jar and javalib.jar are built before $(LOCAL_BUILT_MODULE)
$(built_module) : $(common_javalib_jar)
+my_exported_sdk_libs_file := $(intermediates.COMMON)/exported-sdk-libs
+$(my_exported_sdk_libs_file): PRIVATE_EXPORTED_SDK_LIBS := $(LOCAL_EXPORT_SDK_LIBRARIES)
+$(my_exported_sdk_libs_file):
+ @echo "Export SDK libs $@"
+ $(hide) mkdir -p $(dir $@) && rm -f $@
+ $(if $(PRIVATE_EXPORTED_SDK_LIBS),\
+ $(hide) echo $(PRIVATE_EXPORTED_SDK_LIBS) | tr ' ' '\n' > $@,\
+ $(hide) touch $@)
+
endif # ! prebuilt_module_is_dex_javalib
endif # LOCAL_IS_HOST_MODULE is not set
endif # JAVA_LIBRARIES
+endif # APPS
+
$(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
my_prebuilt_src_file :=
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 576d14ddf7..a114b65c02 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -18,7 +18,7 @@
define gather-all-products
$(sort $(foreach p, \
$(eval _all_products_visited := )
- $(call all-products-inner, $(ALL_PRODUCTS)) \
+ $(call all-products-inner, $(PARENT_PRODUCT_FILES)) \
, $(if $(strip $(p)),$(strip $(p)),)) \
)
endef
@@ -36,8 +36,6 @@ endef
this_makefile := build/make/core/product-graph.mk
-products_svg := $(OUT_DIR)/products.svg
-products_pdf := $(OUT_DIR)/products.pdf
products_graph := $(OUT_DIR)/products.dot
ifeq ($(strip $(ANDROID_PRODUCT_GRAPH)),)
products_list := $(INTERNAL_PRODUCT)
@@ -49,7 +47,7 @@ products_list := $(foreach prod,$(ANDROID_PRODUCT_GRAPH),$(call resolve-short-pr
endif
endif
-really_all_products := $(call gather-all-products)
+all_products := $(call gather-all-products)
open_parethesis := (
close_parenthesis := )
@@ -66,7 +64,7 @@ colorscheme=\"svg\" fontcolor=\"darkblue\" href=\"products/$(1).html\" \
endef
-$(products_graph): PRIVATE_PRODUCTS := $(really_all_products)
+$(products_graph): PRIVATE_PRODUCTS := $(all_products)
$(products_graph): PRIVATE_PRODUCTS_FILTER := $(products_list)
$(products_graph): $(this_makefile)
@@ -105,6 +103,8 @@ $(OUT_DIR)/products/$(strip $(1)).txt: $(this_makefile)
$(hide) echo 'PRODUCT_DEFAULT_PROPERTY_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_DEFAULT_PROPERTY_OVERRIDES)' >> $$@
$(hide) echo 'PRODUCT_SYSTEM_DEFAULT_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_SYSTEM_DEFAULT_PROPERTIES)' >> $$@
$(hide) echo 'PRODUCT_PRODUCT_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_PRODUCT_PROPERTIES)' >> $$@
+ $(hide) echo 'PRODUCT_PRODUCT_SERVICES_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_PRODUCT_SERVICES_PROPERTIES)' >> $$@
+ $(hide) echo 'PRODUCT_ODM_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_ODM_PROPERTIES)' >> $$@
$(hide) echo 'PRODUCT_CHARACTERISTICS=$$(PRODUCTS.$(strip $(1)).PRODUCT_CHARACTERISTICS)' >> $$@
$(hide) echo 'PRODUCT_COPY_FILES=$$(PRODUCTS.$(strip $(1)).PRODUCT_COPY_FILES)' >> $$@
$(hide) echo 'PRODUCT_OTA_PUBLIC_KEYS=$$(PRODUCTS.$(strip $(1)).PRODUCT_OTA_PUBLIC_KEYS)' >> $$@
@@ -130,17 +130,13 @@ $(call product-debug-filename, $(p)): \
endef
product_debug_files:=
-$(foreach p,$(really_all_products), \
+$(foreach p,$(all_products), \
$(eval $(call transform-product-debug, $(p))) \
$(eval product_debug_files += $(call product-debug-filename, $(p))) \
)
-$(products_pdf): $(products_graph)
- @echo Product graph PDF: $@
- dot -Tpdf -Nshape=box -o $@ $<
-
-$(products_svg): $(products_graph) $(product_debug_files)
- @echo Product graph SVG: $@
- dot -Tsvg -Nshape=box -o $@ $<
-
-product-graph: $(products_pdf) $(products_svg)
+.PHONY: product-graph
+product-graph: $(products_graph)
+ @echo Product graph .dot file: $(products_graph)
+ @echo Command to convert to pdf: dot -Tpdf -Nshape=box -o $(OUT_DIR)/products.pdf $(products_graph)
+ @echo Command to convert to svg: dot -Tsvg -Nshape=box -o $(OUT_DIR)/products.svg $(products_graph)
diff --git a/core/product.mk b/core/product.mk
index 899b806ccf..40485638ce 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -23,39 +23,77 @@
# and the .mk suffix) of the product makefile, "<product_name>:" can be
# omitted.
-# Search for AndroidProducts.mks in the given dir.
-# $(1): the path to the dir
-define _search-android-products-files-in-dir
-$(sort $(shell test -d $(1) && find -L $(1) \
- -maxdepth 6 \
- -name .git -prune \
- -o -name AndroidProducts.mk -print))
-endef
-
#
# Returns the list of all AndroidProducts.mk files.
# $(call ) isn't necessary.
#
define _find-android-products-files
-$(foreach d, device vendor product,$(call _search-android-products-files-in-dir,$(d))) \
+$(file <$(OUT_DIR)/.module_paths/AndroidProducts.mk.list) \
$(SRC_TARGET_DIR)/product/AndroidProducts.mk
endef
#
+# For entries returned by get-product-makefiles, decode an entry to a short
+# product name. These either may be in the form of <name>:path/to/file.mk or
+# path/to/<name>.mk
+# $(1): The entry to decode
+#
+# Returns two words:
+# <name> <file>
+#
+define _decode-product-name
+$(strip \
+ $(eval _cpm_words := $(subst :,$(space),$(1))) \
+ $(if $(word 2,$(_cpm_words)), \
+ $(wordlist 1,2,$(_cpm_words)), \
+ $(basename $(notdir $(1))) $(1)))
+endef
+
+#
+# Validates the new common lunch choices -- ensures that they're in an
+# appropriate form, and are paired with definitions of their products.
+# $(1): The new list of COMMON_LUNCH_CHOICES
+# $(2): The new list of PRODUCT_MAKEFILES
+#
+define _validate-common-lunch-choices
+$(strip $(foreach choice,$(1),\
+ $(eval _parts := $(subst -,$(space),$(choice))) \
+ $(if $(call math_lt,$(words $(_parts)),2), \
+ $(error $(LOCAL_DIR): $(choice): Invalid lunch choice)) \
+ $(if $(call math_gt_or_eq,$(words $(_parts)),4), \
+ $(error $(LOCAL_DIR): $(choice): Invalid lunch choice)) \
+ $(if $(filter-out eng userdebug user,$(word 2,$(_parts))), \
+ $(error $(LOCAL_DIR): $(choice): Invalid variant: $(word 2,$(_parts)))) \
+ $(if $(filter-out $(foreach p,$(2),$(call _decode-product-name,$(p))),$(word 1,$(_parts))), \
+ $(error $(LOCAL_DIR): $(word 1,$(_parts)): Product not defined in this file)) \
+ ))
+endef
+
+#
# Returns the sorted concatenation of PRODUCT_MAKEFILES
# variables set in the given AndroidProducts.mk files.
# $(1): the list of AndroidProducts.mk files.
#
+# As a side-effect, COMMON_LUNCH_CHOICES will be set to a
+# union of all of the COMMON_LUNCH_CHOICES definitions within
+# each AndroidProducts.mk file.
+#
define get-product-makefiles
$(sort \
+ $(eval _COMMON_LUNCH_CHOICES :=) \
$(foreach f,$(1), \
$(eval PRODUCT_MAKEFILES :=) \
+ $(eval COMMON_LUNCH_CHOICES :=) \
$(eval LOCAL_DIR := $(patsubst %/,%,$(dir $(f)))) \
$(eval include $(f)) \
+ $(call _validate-common-lunch-choices,$(COMMON_LUNCH_CHOICES),$(PRODUCT_MAKEFILES)) \
+ $(eval _COMMON_LUNCH_CHOICES += $(COMMON_LUNCH_CHOICES)) \
$(PRODUCT_MAKEFILES) \
) \
$(eval PRODUCT_MAKEFILES :=) \
$(eval LOCAL_DIR :=) \
+ $(eval COMMON_LUNCH_CHOICES := $(sort $(_COMMON_LUNCH_CHOICES))) \
+ $(eval _COMMON_LUNCH_CHOICES :=) \
)
endef
@@ -68,102 +106,252 @@ define get-all-product-makefiles
$(call get-product-makefiles,$(_find-android-products-files))
endef
-#
-# Functions for including product makefiles
-#
+_product_var_list :=
+_product_var_list += PRODUCT_NAME
+_product_var_list += PRODUCT_MODEL
-_product_var_list := \
- PRODUCT_NAME \
- PRODUCT_MODEL \
- PRODUCT_LOCALES \
- PRODUCT_AAPT_CONFIG \
- PRODUCT_AAPT_PREF_CONFIG \
- PRODUCT_AAPT_PREBUILT_DPI \
- PRODUCT_PACKAGES \
- PRODUCT_PACKAGES_DEBUG \
- PRODUCT_PACKAGES_ENG \
- PRODUCT_PACKAGES_TESTS \
- PRODUCT_DEVICE \
- PRODUCT_MANUFACTURER \
- PRODUCT_BRAND \
- PRODUCT_PROPERTY_OVERRIDES \
- PRODUCT_DEFAULT_PROPERTY_OVERRIDES \
- PRODUCT_PRODUCT_PROPERTIES \
- PRODUCT_CHARACTERISTICS \
- PRODUCT_COPY_FILES \
- PRODUCT_OTA_PUBLIC_KEYS \
- PRODUCT_EXTRA_RECOVERY_KEYS \
- PRODUCT_PACKAGE_OVERLAYS \
- DEVICE_PACKAGE_OVERLAYS \
- PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS \
- PRODUCT_ENFORCE_RRO_TARGETS \
- PRODUCT_SDK_ATREE_FILES \
- PRODUCT_SDK_ADDON_NAME \
- PRODUCT_SDK_ADDON_COPY_FILES \
- PRODUCT_SDK_ADDON_COPY_MODULES \
- PRODUCT_SDK_ADDON_DOC_MODULES \
- PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP \
- PRODUCT_SOONG_NAMESPACES \
- PRODUCT_DEFAULT_WIFI_CHANNELS \
- PRODUCT_DEFAULT_DEV_CERTIFICATE \
- PRODUCT_RESTRICT_VENDOR_FILES \
- PRODUCT_VENDOR_KERNEL_HEADERS \
- PRODUCT_BOOT_JARS \
- PRODUCT_SUPPORTS_BOOT_SIGNER \
- PRODUCT_SUPPORTS_VBOOT \
- PRODUCT_SUPPORTS_VERITY \
- PRODUCT_SUPPORTS_VERITY_FEC \
- PRODUCT_OEM_PROPERTIES \
- PRODUCT_SYSTEM_DEFAULT_PROPERTIES \
- PRODUCT_SYSTEM_PROPERTY_BLACKLIST \
- PRODUCT_VENDOR_PROPERTY_BLACKLIST \
- PRODUCT_SYSTEM_SERVER_APPS \
- PRODUCT_SYSTEM_SERVER_JARS \
- PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK \
- PRODUCT_DEXPREOPT_SPEED_APPS \
- PRODUCT_LOADED_BY_PRIVILEGED_MODULES \
- PRODUCT_VBOOT_SIGNING_KEY \
- PRODUCT_VBOOT_SIGNING_SUBKEY \
- PRODUCT_VERITY_SIGNING_KEY \
- PRODUCT_SYSTEM_VERITY_PARTITION \
- PRODUCT_VENDOR_VERITY_PARTITION \
- PRODUCT_PRODUCT_VERITY_PARTITION \
- PRODUCT_SYSTEM_SERVER_DEBUG_INFO \
- PRODUCT_OTHER_JAVA_DEBUG_INFO \
- PRODUCT_DEX_PREOPT_MODULE_CONFIGS \
- PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER \
- PRODUCT_DEX_PREOPT_DEFAULT_FLAGS \
- PRODUCT_DEX_PREOPT_BOOT_FLAGS \
- PRODUCT_DEX_PREOPT_PROFILE_DIR \
- PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION \
- PRODUCT_DEX_PREOPT_GENERATE_DM_FILES \
+# The resoure configuration options to use for this product.
+_product_var_list += PRODUCT_LOCALES
+_product_var_list += PRODUCT_AAPT_CONFIG
+_product_var_list += PRODUCT_AAPT_PREF_CONFIG
+_product_var_list += PRODUCT_AAPT_PREBUILT_DPI
+_product_var_list += PRODUCT_HOST_PACKAGES
+_product_var_list += PRODUCT_PACKAGES
+_product_var_list += PRODUCT_PACKAGES_DEBUG
+_product_var_list += PRODUCT_PACKAGES_DEBUG_ASAN
+# Packages included only for eng/userdebug builds, when building with EMMA_INSTRUMENT=true
+_product_var_list += PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE
+_product_var_list += PRODUCT_PACKAGES_ENG
+_product_var_list += PRODUCT_PACKAGES_TESTS
+
+# The device that this product maps to.
+_product_var_list += PRODUCT_DEVICE
+_product_var_list += PRODUCT_MANUFACTURER
+_product_var_list += PRODUCT_BRAND
+
+# These PRODUCT_SYSTEM_* flags, if defined, are used in place of the
+# corresponding PRODUCT_* flags for the sysprops on /system.
+_product_var_list += \
+ PRODUCT_SYSTEM_NAME \
+ PRODUCT_SYSTEM_MODEL \
+ PRODUCT_SYSTEM_DEVICE \
+ PRODUCT_SYSTEM_BRAND \
+ PRODUCT_SYSTEM_MANUFACTURER \
+
+# A list of property assignments, like "key = value", with zero or more
+# whitespace characters on either side of the '='.
+_product_var_list += PRODUCT_PROPERTY_OVERRIDES
+
+# A list of property assignments, like "key = value", with zero or more
+# whitespace characters on either side of the '='.
+# used for adding properties to default.prop
+_product_var_list += PRODUCT_DEFAULT_PROPERTY_OVERRIDES
+
+# A list of property assignments, like "key = value", with zero or more
+# whitespace characters on either side of the '='.
+# used for adding properties to build.prop of product partition
+_product_var_list += PRODUCT_PRODUCT_PROPERTIES
+
+# A list of property assignments, like "key = value", with zero or more
+# whitespace characters on either side of the '='.
+# used for adding properties to build.prop of product partition
+_product_var_list += PRODUCT_PRODUCT_SERVICES_PROPERTIES
+_product_var_list += PRODUCT_ODM_PROPERTIES
+_product_var_list += PRODUCT_CHARACTERISTICS
+
+# A list of words like <source path>:<destination path>[:<owner>].
+# The file at the source path should be copied to the destination path
+# when building this product. <destination path> is relative to
+# $(PRODUCT_OUT), so it should look like, e.g., "system/etc/file.xml".
+# The rules for these copy steps are defined in build/make/core/Makefile.
+# The optional :<owner> is used to indicate the owner of a vendor file.
+_product_var_list += PRODUCT_COPY_FILES
+
+# The OTA key(s) specified by the product config, if any. The names
+# of these keys are stored in the target-files zip so that post-build
+# signing tools can substitute them for the test key embedded by
+# default.
+_product_var_list += PRODUCT_OTA_PUBLIC_KEYS
+_product_var_list += PRODUCT_EXTRA_RECOVERY_KEYS
+
+# Should we use the default resources or add any product specific overlays
+_product_var_list += PRODUCT_PACKAGE_OVERLAYS
+_product_var_list += DEVICE_PACKAGE_OVERLAYS
+
+# Resource overlay list which must be excluded from enforcing RRO.
+_product_var_list += PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS
+
+# Package list to apply enforcing RRO.
+_product_var_list += PRODUCT_ENFORCE_RRO_TARGETS
+
+_product_var_list += PRODUCT_SDK_ATREE_FILES
+_product_var_list += PRODUCT_SDK_ADDON_NAME
+_product_var_list += PRODUCT_SDK_ADDON_COPY_FILES
+_product_var_list += PRODUCT_SDK_ADDON_COPY_MODULES
+_product_var_list += PRODUCT_SDK_ADDON_DOC_MODULES
+_product_var_list += PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP
+
+# which Soong namespaces to export to Make
+_product_var_list += PRODUCT_SOONG_NAMESPACES
+
+_product_var_list += PRODUCT_DEFAULT_WIFI_CHANNELS
+_product_var_list += PRODUCT_DEFAULT_DEV_CERTIFICATE
+_product_var_list += PRODUCT_RESTRICT_VENDOR_FILES
+
+# The list of product-specific kernel header dirs
+_product_var_list += PRODUCT_VENDOR_KERNEL_HEADERS
+
+# A list of module names of BOOTCLASSPATH (jar files)
+_product_var_list += PRODUCT_BOOT_JARS
+_product_var_list += PRODUCT_SUPPORTS_BOOT_SIGNER
+_product_var_list += PRODUCT_SUPPORTS_VBOOT
+_product_var_list += PRODUCT_SUPPORTS_VERITY
+_product_var_list += PRODUCT_SUPPORTS_VERITY_FEC
+_product_var_list += PRODUCT_OEM_PROPERTIES
+
+# A list of property assignments, like "key = value", with zero or more
+# whitespace characters on either side of the '='.
+# used for adding properties to default.prop of system partition
+_product_var_list += PRODUCT_SYSTEM_DEFAULT_PROPERTIES
+
+_product_var_list += PRODUCT_SYSTEM_PROPERTY_BLACKLIST
+_product_var_list += PRODUCT_VENDOR_PROPERTY_BLACKLIST
+_product_var_list += PRODUCT_SYSTEM_SERVER_APPS
+_product_var_list += PRODUCT_SYSTEM_SERVER_JARS
+
+# All of the apps that we force preopt, this overrides WITH_DEXPREOPT.
+_product_var_list += PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK
+_product_var_list += PRODUCT_DEXPREOPT_SPEED_APPS
+_product_var_list += PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+_product_var_list += PRODUCT_VBOOT_SIGNING_KEY
+_product_var_list += PRODUCT_VBOOT_SIGNING_SUBKEY
+_product_var_list += PRODUCT_VERITY_SIGNING_KEY
+_product_var_list += PRODUCT_SYSTEM_VERITY_PARTITION
+_product_var_list += PRODUCT_VENDOR_VERITY_PARTITION
+_product_var_list += PRODUCT_PRODUCT_VERITY_PARTITION
+_product_var_list += PRODUCT_PRODUCT_SERVICES_VERITY_PARTITION
+_product_var_list += PRODUCT_ODM_VERITY_PARTITION
+_product_var_list += PRODUCT_SYSTEM_SERVER_DEBUG_INFO
+_product_var_list += PRODUCT_OTHER_JAVA_DEBUG_INFO
+
+# Per-module dex-preopt configs.
+_product_var_list += PRODUCT_DEX_PREOPT_MODULE_CONFIGS
+_product_var_list += PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER
+_product_var_list += PRODUCT_DEX_PREOPT_DEFAULT_FLAGS
+_product_var_list += PRODUCT_DEX_PREOPT_BOOT_FLAGS
+_product_var_list += PRODUCT_DEX_PREOPT_PROFILE_DIR
+_product_var_list += PRODUCT_DEX_PREOPT_GENERATE_DM_FILES
+_product_var_list += PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING
+_product_var_list += PRODUCT_DEX_PREOPT_RESOLVE_STARTUP_STRINGS
+
+# Boot image options.
+_product_var_list += \
PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE \
- PRODUCT_SYSTEM_SERVER_COMPILER_FILTER \
- PRODUCT_SANITIZER_MODULE_CONFIGS \
- PRODUCT_SYSTEM_BASE_FS_PATH \
- PRODUCT_VENDOR_BASE_FS_PATH \
- PRODUCT_PRODUCT_BASE_FS_PATH \
- PRODUCT_SHIPPING_API_LEVEL \
- VENDOR_PRODUCT_RESTRICT_VENDOR_FILES \
- VENDOR_EXCEPTION_MODULES \
- VENDOR_EXCEPTION_PATHS \
- PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD \
- PRODUCT_ART_USE_READ_BARRIER \
- PRODUCT_IOT \
- PRODUCT_SYSTEM_HEADROOM \
- PRODUCT_MINIMIZE_JAVA_DEBUG_INFO \
- PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS \
- PRODUCT_ADB_KEYS \
- PRODUCT_CFI_INCLUDE_PATHS \
- PRODUCT_CFI_EXCLUDE_PATHS \
- PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE \
- PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE \
+ PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION \
+ PRODUCT_USES_ART \
+
+_product_var_list += PRODUCT_SYSTEM_SERVER_COMPILER_FILTER
+# Per-module sanitizer configs
+_product_var_list += PRODUCT_SANITIZER_MODULE_CONFIGS
+_product_var_list += PRODUCT_SYSTEM_BASE_FS_PATH
+_product_var_list += PRODUCT_VENDOR_BASE_FS_PATH
+_product_var_list += PRODUCT_PRODUCT_BASE_FS_PATH
+_product_var_list += PRODUCT_PRODUCT_SERVICES_BASE_FS_PATH
+_product_var_list += PRODUCT_ODM_BASE_FS_PATH
+_product_var_list += PRODUCT_SHIPPING_API_LEVEL
+_product_var_list += VENDOR_PRODUCT_RESTRICT_VENDOR_FILES
+_product_var_list += VENDOR_EXCEPTION_MODULES
+_product_var_list += VENDOR_EXCEPTION_PATHS
+
+# Whether the product wants to ship libartd. For rules and meaning, see art/Android.mk.
+_product_var_list += PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD
+
+# Make this art variable visible to soong_config.mk.
+_product_var_list += PRODUCT_ART_USE_READ_BARRIER
+
+# Whether the product is an Android Things variant.
+_product_var_list += PRODUCT_IOT
+
+# Add reserved headroom to a system image.
+_product_var_list += PRODUCT_SYSTEM_HEADROOM
+
+# Whether to save disk space by minimizing java debug info
+_product_var_list += PRODUCT_MINIMIZE_JAVA_DEBUG_INFO
+
+# Whether any paths are excluded from sanitization when SANITIZE_TARGET=integer_overflow
+_product_var_list += PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS
+
+_product_var_list += PRODUCT_ADB_KEYS
+
+# Whether any paths should have CFI enabled for components
+_product_var_list += PRODUCT_CFI_INCLUDE_PATHS
+
+# Whether any paths are excluded from sanitization when SANITIZE_TARGET=cfi
+_product_var_list += PRODUCT_CFI_EXCLUDE_PATHS
+
+# Whether the Scudo hardened allocator is disabled platform-wide
+_product_var_list += PRODUCT_DISABLE_SCUDO
+
+# A flag to override PRODUCT_COMPATIBLE_PROPERTY
+_product_var_list += PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE
+
+# Whether the whitelist of actionable compatible properties should be disabled or not
+_product_var_list += PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE
+_product_var_list += PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS
+_product_var_list += PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT
+_product_var_list += PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST
+_product_var_list += PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT
+_product_var_list += PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST
+
+# List of modules that should be forcefully unmarked from being LOCAL_PRODUCT_MODULE, and hence
+# installed on /system directory by default.
+_product_var_list += PRODUCT_FORCE_PRODUCT_MODULES_TO_SYSTEM_PARTITION
+
+# When this is true, dynamic partitions is retrofitted on a device that has
+# already been launched without dynamic partitions. Otherwise, the device
+# is launched with dynamic partitions.
+# This flag implies PRODUCT_USE_DYNAMIC_PARTITIONS.
+_product_var_list += PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
+
+# Other dynamic partition feature flags.PRODUCT_USE_DYNAMIC_PARTITION_SIZE and
+# PRODUCT_BUILD_SUPER_PARTITION default to the value of PRODUCT_USE_DYNAMIC_PARTITIONS.
+_product_var_list += \
+ PRODUCT_USE_DYNAMIC_PARTITIONS \
+ PRODUCT_USE_DYNAMIC_PARTITION_SIZE \
+ PRODUCT_BUILD_SUPER_PARTITION \
+
+# If set, kernel configuration requirements are present in OTA package (and will be enforced
+# during OTA). Otherwise, kernel configuration requirements are enforced in VTS.
+# Devices that checks the running kernel (instead of the kernel in OTA package) should not
+# set this variable to prevent OTA failures.
+_product_var_list += PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
+
+# Whether any paths are excluded from being set XOM when ENABLE_XOM=true
+_product_var_list += PRODUCT_XOM_EXCLUDE_PATHS
+_product_var_list += PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES
+_product_var_list += PRODUCT_PACKAGE_NAME_OVERRIDES
+_product_var_list += PRODUCT_CERTIFICATE_OVERRIDES
+_product_var_list += PRODUCT_BUILD_SYSTEM_IMAGE
+_product_var_list += PRODUCT_BUILD_SYSTEM_OTHER_IMAGE
+_product_var_list += PRODUCT_BUILD_VENDOR_IMAGE
+_product_var_list += PRODUCT_BUILD_PRODUCT_IMAGE
+_product_var_list += PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE
+_product_var_list += PRODUCT_BUILD_ODM_IMAGE
+_product_var_list += PRODUCT_BUILD_CACHE_IMAGE
+_product_var_list += PRODUCT_BUILD_RAMDISK_IMAGE
+_product_var_list += PRODUCT_BUILD_USERDATA_IMAGE
+_product_var_list += PRODUCT_UPDATABLE_BOOT_MODULES
+_product_var_list += PRODUCT_UPDATABLE_BOOT_LOCATIONS
+
+# Whether the product would like to check prebuilt ELF files.
+_product_var_list += PRODUCT_CHECK_ELF_FILES
+.KATI_READONLY := _product_var_list
define dump-product
-$(info ==== $(1) ====)\
+$(warning ==== $(1) ====)\
$(foreach v,$(_product_var_list),\
-$(info PRODUCTS.$(1).$(v) := $(PRODUCTS.$(1).$(v))))\
-$(info --------)
+$(warning PRODUCTS.$(1).$(v) := $(PRODUCTS.$(1).$(v))))\
+$(warning --------)
endef
define dump-products
@@ -171,12 +359,20 @@ $(foreach p,$(PRODUCTS),$(call dump-product,$(p)))
endef
#
+# Functions for including product makefiles
+#
+
+#
# $(1): product to inherit
#
-# Does three things:
+# To be called from product makefiles, and is later evaluated during the import-nodes
+# call below. It does three things:
# 1. Inherits all of the variables from $1.
# 2. Records the inheritance in the .INHERITS_FROM variable
-# 3. Records that we've visited this node, in ALL_PRODUCTS
+# 3. Records the calling makefile in PARENT_PRODUCT_FILES
+#
+# (2) and (3) can be used together to reconstruct the include hierarchy
+# See e.g. product-graph.mk for an example of this.
#
define inherit-product
$(if $(findstring ../,$(1)),\
@@ -184,13 +380,32 @@ define inherit-product
$(eval np := $(strip $(1))))\
$(foreach v,$(_product_var_list), \
$(eval $(v) := $($(v)) $(INHERIT_TAG)$(np))) \
- $(eval inherit_var := \
- PRODUCTS.$(strip $(word 1,$(_include_stack))).INHERITS_FROM) \
+ $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
+ $(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
$(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
- $(eval inherit_var:=) \
- $(eval ALL_PRODUCTS := $(sort $(ALL_PRODUCTS) $(word 1,$(_include_stack))))
+ $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk)))
endef
+# Specifies a number of path prefixes, relative to PRODUCT_OUT, where the
+# product makefile hierarchy rooted in the current node places its artifacts.
+# Creating artifacts outside the specified paths will cause a build-time error.
+define require-artifacts-in-path
+ $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
+ $(eval PRODUCTS.$(current_mk).ARTIFACT_PATH_REQUIREMENTS := $(strip $(1))) \
+ $(eval PRODUCTS.$(current_mk).ARTIFACT_PATH_WHITELIST := $(strip $(2))) \
+ $(eval ARTIFACT_PATH_REQUIREMENT_PRODUCTS := \
+ $(sort $(ARTIFACT_PATH_REQUIREMENT_PRODUCTS) $(current_mk)))
+endef
+
+# Makes including non-existant modules in PRODUCT_PACKAGES an error.
+# $(1): whitelist of non-existant modules to allow.
+define enforce-product-packages-exist
+ $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
+ $(eval PRODUCTS.$(current_mk).PRODUCT_ENFORCE_PACKAGES_EXIST := true) \
+ $(eval PRODUCTS.$(current_mk).PRODUCT_ENFORCE_PACKAGES_EXIST_WHITELIST := $(1)) \
+ $(eval .KATI_READONLY := PRODUCTS.$(current_mk).PRODUCT_ENFORCE_PACKAGES_EXIST) \
+ $(eval .KATI_READONLY := PRODUCTS.$(current_mk).PRODUCT_ENFORCE_PACKAGES_EXIST_WHITELIST)
+endef
#
# Do inherit-product only if $(1) exists
@@ -269,67 +484,45 @@ define resolve-short-product-name
$(strip $(call _resolve-short-product-name,$(1)))
endef
+# BoardConfig variables that are also inherited in product mks. Should ideally
+# be cleaned up to not be product variables.
+_readonly_late_variables := \
+ DEVICE_PACKAGE_OVERLAYS \
+ WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY \
-_product_stash_var_list := $(_product_var_list) \
- PRODUCT_BOOTCLASSPATH \
- PRODUCT_SYSTEM_SERVER_CLASSPATH \
- TARGET_ARCH \
- TARGET_ARCH_VARIANT \
- TARGET_CPU_VARIANT \
- TARGET_BOARD_PLATFORM \
- TARGET_BOARD_PLATFORM_GPU \
- TARGET_BOARD_KERNEL_HEADERS \
- TARGET_DEVICE_KERNEL_HEADERS \
- TARGET_PRODUCT_KERNEL_HEADERS \
- TARGET_BOOTLOADER_BOARD_NAME \
- TARGET_NO_BOOTLOADER \
- TARGET_NO_KERNEL \
- TARGET_NO_RECOVERY \
- TARGET_NO_RADIOIMAGE \
- TARGET_HARDWARE_3D \
- TARGET_CPU_ABI \
- TARGET_CPU_ABI2 \
-
-
-_product_stash_var_list += \
- BOARD_WPA_SUPPLICANT_DRIVER \
- BOARD_WLAN_DEVICE \
- BOARD_USES_GENERIC_AUDIO \
- BOARD_KERNEL_CMDLINE \
- BOARD_KERNEL_BASE \
- BOARD_HAVE_BLUETOOTH \
- BOARD_VENDOR_USE_AKMD \
- BOARD_EGL_CFG \
- BOARD_BOOTIMAGE_PARTITION_SIZE \
- BOARD_RECOVERYIMAGE_PARTITION_SIZE \
- BOARD_SYSTEMIMAGE_PARTITION_SIZE \
- BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE \
- BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE \
- BOARD_USERDATAIMAGE_PARTITION_SIZE \
- BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE \
- BOARD_CACHEIMAGE_PARTITION_SIZE \
- BOARD_FLASH_BLOCK_SIZE \
- BOARD_VENDORIMAGE_PARTITION_SIZE \
- BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
- BOARD_PRODUCTIMAGE_PARTITION_SIZE \
- BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE \
- BOARD_INSTALLER_CMDLINE \
-
-
-_product_stash_var_list += \
- DEFAULT_SYSTEM_DEV_CERTIFICATE \
- WITH_DEXPREOPT \
- WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY
+# Modified internally in the build system
+_readonly_late_variables += \
+ PRODUCT_COPY_FILES \
+ PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING \
+ PRODUCT_DEX_PREOPT_BOOT_FLAGS \
+
+_readonly_early_variables := $(filter-out $(_readonly_late_variables),$(_product_var_list))
#
# Mark the variables in _product_stash_var_list as readonly
#
-define readonly-product-vars
-$(foreach v,$(_product_stash_var_list), \
- $(eval $(v) ?=) \
- $(eval .KATI_READONLY := $(v)) \
+define readonly-variables
+$(foreach v,$(1), \
+ $(eval $(v) ?=) \
+ $(eval .KATI_READONLY := $(v)) \
)
endef
+define readonly-product-vars
+$(call readonly-variables,$(_readonly_early_variables))
+endef
+
+define readonly-final-product-vars
+$(call readonly-variables,$(_readonly_late_variables))
+endef
+
+#
+# Strip the variables in _product_strip_var_list
+#
+define strip-product-vars
+$(foreach v,$(_product_var_list), \
+ $(eval $(v) := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).$(v)))) \
+)
+endef
define add-to-product-copy-files-if-exists
$(if $(wildcard $(word 1,$(subst :, ,$(1)))),$(1))
diff --git a/core/product_config.mk b/core/product_config.mk
index 9406812aba..9460357ca1 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -175,15 +175,9 @@ include $(BUILD_SYSTEM)/node_fns.mk
include $(BUILD_SYSTEM)/product.mk
include $(BUILD_SYSTEM)/device.mk
-ifneq ($(strip $(TARGET_BUILD_APPS)),)
-# An unbundled app build needs only the core product makefiles.
-all_product_configs := $(call get-product-makefiles,\
- $(SRC_TARGET_DIR)/product/AndroidProducts.mk)
-else
# Read in all of the product definitions specified by the AndroidProducts.mk
# files in the tree.
all_product_configs := $(get-all-product-makefiles)
-endif
all_named_products :=
@@ -195,18 +189,13 @@ all_named_products :=
current_product_makefile :=
all_product_makefiles :=
$(foreach f, $(all_product_configs),\
- $(eval _cpm_words := $(subst :,$(space),$(f)))\
+ $(eval _cpm_words := $(call _decode-product-name,$(f)))\
$(eval _cpm_word1 := $(word 1,$(_cpm_words)))\
$(eval _cpm_word2 := $(word 2,$(_cpm_words)))\
- $(if $(_cpm_word2),\
- $(eval all_product_makefiles += $(_cpm_word2))\
- $(eval all_named_products += $(_cpm_word1))\
- $(if $(filter $(TARGET_PRODUCT),$(_cpm_word1)),\
- $(eval current_product_makefile += $(_cpm_word2)),),\
- $(eval all_product_makefiles += $(f))\
- $(eval all_named_products += $(basename $(notdir $(f))))\
- $(if $(filter $(TARGET_PRODUCT),$(basename $(notdir $(f)))),\
- $(eval current_product_makefile += $(f)),)))
+ $(eval all_product_makefiles += $(_cpm_word2))\
+ $(eval all_named_products += $(_cpm_word1))\
+ $(if $(filter $(TARGET_PRODUCT),$(_cpm_word1)),\
+ $(eval current_product_makefile += $(_cpm_word2)),))
_cpm_words :=
_cpm_word1 :=
_cpm_word2 :=
@@ -239,12 +228,17 @@ endif
$(call import-products, $(current_product_makefile))
endif # Import all or just the current product makefile
+# Import all the products that have made artifact path requirements, so that we can verify
+# the artifacts they produce.
+$(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
+ $(if $(filter-out $(makefile),$(PRODUCTS)),$(eval $(call import-products,$(makefile))))\
+)
+
# Sanity check
$(check-all-products)
ifneq ($(filter dump-products, $(MAKECMDGOALS)),)
$(dump-products)
-$(error done)
endif
# Convert a short name like "sooner" into the path to the product
@@ -258,27 +252,19 @@ current_product_makefile :=
all_product_makefiles :=
all_product_configs :=
+############################################################################
+# Strip and assign the PRODUCT_ variables.
+$(call strip-product-vars)
#############################################################################
+# Sanity check and assign default values
-# A list of module names of BOOTCLASSPATH (jar files)
-PRODUCT_BOOT_JARS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BOOT_JARS))
-PRODUCT_SYSTEM_SERVER_JARS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_JARS))
-PRODUCT_SYSTEM_SERVER_APPS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_APPS))
-PRODUCT_DEXPREOPT_SPEED_APPS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEXPREOPT_SPEED_APPS))
-PRODUCT_LOADED_BY_PRIVILEGED_MODULES := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
+TARGET_DEVICE := $(PRODUCT_DEVICE)
-# All of the apps that we force preopt, this overrides WITH_DEXPREOPT.
-PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK))
-
-# Find the device that this product maps to.
-TARGET_DEVICE := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEVICE)
+# TODO: also keep track of things like "port", "land" in product files.
# Figure out which resoure configuration options to use for this
# product.
-PRODUCT_LOCALES := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_LOCALES))
-# TODO: also keep track of things like "port", "land" in product files.
-
# If CUSTOM_LOCALES contains any locales not already included
# in PRODUCT_LOCALES, add them to PRODUCT_LOCALES.
extra_locales := $(filter-out $(PRODUCT_LOCALES),$(CUSTOM_LOCALES))
@@ -292,133 +278,56 @@ ifneq (,$(extra_locales))
endif
# Add PRODUCT_LOCALES to PRODUCT_AAPT_CONFIG
-PRODUCT_AAPT_CONFIG := $(strip $(PRODUCT_LOCALES) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_AAPT_CONFIG))
-PRODUCT_AAPT_PREF_CONFIG := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_AAPT_PREF_CONFIG))
-PRODUCT_AAPT_PREBUILT_DPI := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_AAPT_PREBUILT_DPI))
+PRODUCT_AAPT_CONFIG := $(PRODUCT_LOCALES) $(PRODUCT_AAPT_CONFIG)
# Keep a copy of the space-separated config
PRODUCT_AAPT_CONFIG_SP := $(PRODUCT_AAPT_CONFIG)
+PRODUCT_AAPT_CONFIG := $(subst $(space),$(comma),$(PRODUCT_AAPT_CONFIG))
-# Convert spaces to commas.
-PRODUCT_AAPT_CONFIG := \
- $(subst $(space),$(comma),$(strip $(PRODUCT_AAPT_CONFIG)))
-
-PRODUCT_BRAND := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BRAND))
-
-PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_MODEL))
+ifndef PRODUCT_SYSTEM_NAME
+ PRODUCT_SYSTEM_NAME := $(PRODUCT_NAME)
+endif
+ifndef PRODUCT_SYSTEM_DEVICE
+ PRODUCT_SYSTEM_DEVICE := $(PRODUCT_DEVICE)
+endif
+ifndef PRODUCT_SYSTEM_BRAND
+ PRODUCT_SYSTEM_BRAND := $(PRODUCT_BRAND)
+endif
ifndef PRODUCT_MODEL
- PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_NAME))
+ PRODUCT_MODEL := $(PRODUCT_NAME)
+endif
+ifndef PRODUCT_SYSTEM_MODEL
+ PRODUCT_SYSTEM_MODEL := $(PRODUCT_MODEL)
endif
-PRODUCT_MANUFACTURER := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_MANUFACTURER))
ifndef PRODUCT_MANUFACTURER
PRODUCT_MANUFACTURER := unknown
endif
+ifndef PRODUCT_SYSTEM_MANUFACTURER
+ PRODUCT_SYSTEM_MANUFACTURER := $(PRODUCT_MANUFACTURER)
+endif
-ifeq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_CHARACTERISTICS),)
+ifndef PRODUCT_CHARACTERISTICS
TARGET_AAPT_CHARACTERISTICS := default
else
- TARGET_AAPT_CHARACTERISTICS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_CHARACTERISTICS))
+ TARGET_AAPT_CHARACTERISTICS := $(PRODUCT_CHARACTERISTICS)
endif
-PRODUCT_DEFAULT_WIFI_CHANNELS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEFAULT_WIFI_CHANNELS))
-
-PRODUCT_DEFAULT_DEV_CERTIFICATE := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEFAULT_DEV_CERTIFICATE))
ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE
-ifneq (1,$(words $(PRODUCT_DEFAULT_DEV_CERTIFICATE)))
+ ifneq (1,$(words $(PRODUCT_DEFAULT_DEV_CERTIFICATE)))
$(error PRODUCT_DEFAULT_DEV_CERTIFICATE='$(PRODUCT_DEFAULT_DEV_CERTIFICATE)', \
only 1 certificate is allowed.)
-endif
+ endif
endif
-# A list of words like <source path>:<destination path>[:<owner>].
-# The file at the source path should be copied to the destination path
-# when building this product. <destination path> is relative to
-# $(PRODUCT_OUT), so it should look like, e.g., "system/etc/file.xml".
-# The rules for these copy steps are defined in build/make/core/Makefile.
-# The optional :<owner> is used to indicate the owner of a vendor file.
-PRODUCT_COPY_FILES := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_COPY_FILES))
-
-# A list of property assignments, like "key = value", with zero or more
-# whitespace characters on either side of the '='.
-PRODUCT_PROPERTY_OVERRIDES := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PROPERTY_OVERRIDES))
-.KATI_READONLY := PRODUCT_PROPERTY_OVERRIDES
-
-PRODUCT_SHIPPING_API_LEVEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SHIPPING_API_LEVEL))
-
-# A list of property assignments, like "key = value", with zero or more
-# whitespace characters on either side of the '='.
-# used for adding properties to default.prop
-PRODUCT_DEFAULT_PROPERTY_OVERRIDES := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
-.KATI_READONLY := PRODUCT_DEFAULT_PROPERTY_OVERRIDES
-
-# A list of property assignments, like "key = value", with zero or more
-# whitespace characters on either side of the '='.
-# used for adding properties to default.prop of system partition
-PRODUCT_SYSTEM_DEFAULT_PROPERTIES := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_DEFAULT_PROPERTIES))
-.KATI_READONLY := PRODUCT_SYSTEM_DEFAULT_PROPERTIES
-
-# A list of property assignments, like "key = value", with zero or more
-# whitespace characters on either side of the '='.
-# used for adding properties to build.prop of product partition
-PRODUCT_PRODUCT_PROPERTIES := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_PROPERTIES))
-.KATI_READONLY := PRODUCT_PRODUCT_PROPERTIES
-
-# Should we use the default resources or add any product specific overlays
-PRODUCT_PACKAGE_OVERLAYS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGE_OVERLAYS))
-DEVICE_PACKAGE_OVERLAYS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).DEVICE_PACKAGE_OVERLAYS))
-
-# The list of product-specific kernel header dirs
-PRODUCT_VENDOR_KERNEL_HEADERS := \
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_KERNEL_HEADERS)
-
-# The OTA key(s) specified by the product config, if any. The names
-# of these keys are stored in the target-files zip so that post-build
-# signing tools can substitute them for the test key embedded by
-# default.
-PRODUCT_OTA_PUBLIC_KEYS := $(sort \
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OTA_PUBLIC_KEYS))
-
-PRODUCT_EXTRA_RECOVERY_KEYS := $(sort \
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_EXTRA_RECOVERY_KEYS))
-
-PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER))
-PRODUCT_DEX_PREOPT_DEFAULT_FLAGS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_FLAGS))
-PRODUCT_DEX_PREOPT_GENERATE_DM_FILES := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_GENERATE_DM_FILES))
-PRODUCT_DEX_PREOPT_BOOT_FLAGS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_BOOT_FLAGS))
-PRODUCT_DEX_PREOPT_PROFILE_DIR := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_PROFILE_DIR))
-
-# Boot image options.
-PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE))
-PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION))
-
-PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
-PRODUCT_SYSTEM_SERVER_DEBUG_INFO := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_DEBUG_INFO))
-PRODUCT_OTHER_JAVA_DEBUG_INFO := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OTHER_JAVA_DEBUG_INFO))
+ENFORCE_SYSTEM_CERTIFICATE := $(PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT)
+ENFORCE_SYSTEM_CERTIFICATE_WHITELIST := $(PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST)
+
+PRODUCT_OTA_PUBLIC_KEYS := $(sort $(PRODUCT_OTA_PUBLIC_KEYS))
+PRODUCT_EXTRA_RECOVERY_KEYS := $(sort $(PRODUCT_EXTRA_RECOVERY_KEYS))
# Resolve and setup per-module dex-preopt configs.
-PRODUCT_DEX_PREOPT_MODULE_CONFIGS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_MODULE_CONFIGS))
+DEXPREOPT_DISABLED_MODULES :=
# If a module has multiple setups, the first takes precedence.
_pdpmc_modules :=
$(foreach c,$(PRODUCT_DEX_PREOPT_MODULE_CONFIGS),\
@@ -427,12 +336,13 @@ $(foreach c,$(PRODUCT_DEX_PREOPT_MODULE_CONFIGS),\
$(eval _pdpmc_modules += $(m))\
$(eval cf := $(patsubst $(m)=%,%,$(c)))\
$(eval cf := $(subst $(_PDPMC_SP_PLACE_HOLDER),$(space),$(cf)))\
- $(eval DEXPREOPT.$(TARGET_PRODUCT).$(m).CONFIG := $(cf))))
+ $(if $(filter disable,$(cf)),\
+ $(eval DEXPREOPT_DISABLED_MODULES += $(m)),\
+ $(eval DEXPREOPT.$(TARGET_PRODUCT).$(m).CONFIG := $(cf)))))
_pdpmc_modules :=
+
# Resolve and setup per-module sanitizer configs.
-PRODUCT_SANITIZER_MODULE_CONFIGS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SANITIZER_MODULE_CONFIGS))
# If a module has multiple setups, the first takes precedence.
_psmc_modules :=
$(foreach c,$(PRODUCT_SANITIZER_MODULE_CONFIGS),\
@@ -444,64 +354,69 @@ $(foreach c,$(PRODUCT_SANITIZER_MODULE_CONFIGS),\
$(eval SANITIZER.$(TARGET_PRODUCT).$(m).CONFIG := $(cf))))
_psmc_modules :=
-# Whether the product wants to ship libartd. For rules and meaning, see art/Android.mk.
-PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD))
-
-# Make this art variable visible to soong_config.mk.
-PRODUCT_ART_USE_READ_BARRIER := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ART_USE_READ_BARRIER))
-
-# Whether the product is an Android Things variant.
-PRODUCT_IOT := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_IOT))
-
-# Resource overlay list which must be excluded from enforcing RRO.
-PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS))
-
-# Package list to apply enforcing RRO.
-PRODUCT_ENFORCE_RRO_TARGETS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ENFORCE_RRO_TARGETS))
-
-# Add reserved headroom to a system image.
-PRODUCT_SYSTEM_HEADROOM := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM))
-
-# Whether to save disk space by minimizing java debug info
-PRODUCT_MINIMIZE_JAVA_DEBUG_INFO := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_MINIMIZE_JAVA_DEBUG_INFO))
-
-# Whether any paths are excluded from sanitization when SANITIZE_TARGET=integer_overflow
-PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
-
-# ADB keys for debuggable builds
-PRODUCT_ADB_KEYS :=
-ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
- PRODUCT_ADB_KEYS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ADB_KEYS))
+# Reset ADB keys for non-debuggable builds
+ifeq (,$(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
+ PRODUCT_ADB_KEYS :=
endif
ifneq ($(filter-out 0 1,$(words $(PRODUCT_ADB_KEYS))),)
$(error Only one file may be in PRODUCT_ADB_KEYS: $(PRODUCT_ADB_KEYS))
endif
-.KATI_READONLY := PRODUCT_ADB_KEYS
-# Whether any paths are excluded from sanitization when SANITIZE_TARGET=cfi
-PRODUCT_CFI_EXCLUDE_PATHS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_CFI_EXCLUDE_PATHS))
+ifndef PRODUCT_USE_DYNAMIC_PARTITIONS
+ PRODUCT_USE_DYNAMIC_PARTITIONS := $(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)
+endif
+
+# All requirements of PRODUCT_USE_DYNAMIC_PARTITIONS falls back to
+# PRODUCT_USE_DYNAMIC_PARTITIONS if not defined.
+ifndef PRODUCT_USE_DYNAMIC_PARTITION_SIZE
+ PRODUCT_USE_DYNAMIC_PARTITION_SIZE := $(PRODUCT_USE_DYNAMIC_PARTITIONS)
+endif
-# Whether any paths should have CFI enabled for components
-PRODUCT_CFI_INCLUDE_PATHS := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_CFI_INCLUDE_PATHS))
+ifndef PRODUCT_BUILD_SUPER_PARTITION
+ PRODUCT_BUILD_SUPER_PARTITION := $(PRODUCT_USE_DYNAMIC_PARTITIONS)
+endif
-# which Soong namespaces to export to Make
-PRODUCT_SOONG_NAMESPACES := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SOONG_NAMESPACES))
+ifeq ($(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS),)
+ ifdef PRODUCT_SHIPPING_API_LEVEL
+ ifeq (true,$(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29))
+ PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS := true
+ endif
+ endif
+endif
-# A flag to override PRODUCT_COMPATIBLE_PROPERTY
-PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE))
+define product-overrides-config
+$$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
+ $$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
+ $$(error Rule "$$(rule)" in PRODUCT_$(1)_OVERRIDE is not <module_name>:<new_value>)))
+endef
+
+$(foreach var, \
+ MANIFEST_PACKAGE_NAME \
+ PACKAGE_NAME \
+ CERTIFICATE, \
+ $(eval $(call product-overrides-config,$(var))))
+
+# Macro to use below. $(1) is the name of the partition
+define product-build-image-config
+PRODUCT_BUILD_$(1)_IMAGE := $$(firstword $$(PRODUCT_BUILD_$(1)_IMAGE))
+ifneq ($$(filter-out true false,$$(PRODUCT_BUILD_$(1)_IMAGE)),)
+ $$(error Invalid PRODUCT_BUILD_$(1)_IMAGE: $$(PRODUCT_BUILD_$(1)_IMAGE) -- true false and empty are supported)
+endif
+endef
-# Whether the whitelist of actionable compatible properties should be disabled or not
-PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE := \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE))
+# Copy and check the value of each PRODUCT_BUILD_*_IMAGE variable
+$(foreach image, \
+ SYSTEM \
+ SYSTEM_OTHER \
+ VENDOR \
+ PRODUCT \
+ PRODUCT_SERVICES \
+ ODM \
+ CACHE \
+ RAMDISK \
+ USERDATA, \
+ $(eval $(call product-build-image-config,$(image))))
+
+product-build-image-config :=
+
+$(call readonly-product-vars)
diff --git a/core/proguard.flags b/core/proguard.flags
index 6ed1f9b3bc..50049cbdad 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -15,4 +15,35 @@
@**.VisibleForTesting *;
}
+# Understand the @Keep support annotation.
+-keep class android.support.annotation.Keep
+-keep class androidx.annotation.Keep
+
+-keep @android.support.annotation.Keep class * {*;}
+-keep @androidx.annotation.Keep class * {*;}
+
+-keepclasseswithmembers class * {
+ @android.support.annotation.Keep <methods>;
+}
+
+-keepclasseswithmembers class * {
+ @androidx.annotation.Keep <methods>;
+}
+
+-keepclasseswithmembers class * {
+ @android.support.annotation.Keep <fields>;
+}
+
+-keepclasseswithmembers class * {
+ @androidx.annotation.Keep <fields>;
+}
+
+-keepclasseswithmembers class * {
+ @android.support.annotation.Keep <init>(...);
+}
+
+-keepclasseswithmembers class * {
+ @androidx.annotation.Keep <init>(...);
+}
+
-include proguard_basic_keeps.flags
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 4de52219a1..3c25e895fa 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -1,7 +1,3 @@
-# see http://sourceforge.net/tracker/?func=detail&aid=2787465&group_id=54750&atid=474707
--optimizations !code/simplification/arithmetic
--optimizations !code/simplification/cast
-
# To prevent name conflict in incremental obfuscation.
-useuniqueclassmembernames
diff --git a/core/python_binary_host_test_config_template.xml b/core/python_binary_host_test_config_template.xml
new file mode 100644
index 0000000000..0f63953d9f
--- /dev/null
+++ b/core/python_binary_host_test_config_template.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Config to run {MODULE} unittests">
+ <test class="com.android.tradefed.testtype.python.PythonBinaryHostTest" >
+ <option name="par-file-name" value="{MODULE}" />
+ <option name="test-timeout" value="5m" />
+ </test>
+</configuration>
diff --git a/core/rbe.mk b/core/rbe.mk
new file mode 100644
index 0000000000..e90500cbc1
--- /dev/null
+++ b/core/rbe.mk
@@ -0,0 +1,81 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Notice: this works only with Google's RBE service.
+ifneq ($(filter-out false,$(USE_RBE)),)
+ ifdef RBE_DIR
+ rbe_dir := $(RBE_DIR)
+ else
+ rbe_dir := prebuilts/remoteexecution-client/live/
+ endif
+
+ ifdef RBE_CXX_EXEC_STRATEGY
+ cxx_rbe_exec_strategy := $(RBE_CXX_EXEC_STRATEGY)
+ else
+ cxx_rbe_exec_strategy := local
+ endif
+
+ ifdef RBE_CXX_COMPARE
+ cxx_compare := $(RBE_CXX_COMPARE)
+ else
+ cxx_compare := false
+ endif
+
+ ifdef RBE_JAVAC_EXEC_STRATEGY
+ javac_exec_strategy := $(RBE_JAVAC_EXEC_STRATEGY)
+ else
+ javac_exec_strategy := remote_local_fallback
+ endif
+
+ ifdef RBE_R8_EXEC_STRATEGY
+ r8_exec_strategy := $(RBE_R8_EXEC_STRATEGY)
+ else
+ r8_exec_strategy := remote_local_fallback
+ endif
+
+ ifdef RBE_D8_EXEC_STRATEGY
+ d8_exec_strategy := $(RBE_D8_EXEC_STRATEGY)
+ else
+ d8_exec_strategy := remote_local_fallback
+ endif
+
+ platform := container-image=docker://gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62
+ cxx_platform := $(platform),Pool=default
+ java_r8_d8_platform := $(platform),Pool=java16
+
+ RBE_WRAPPER := $(rbe_dir)/rewrapper
+ RBE_CXX := --labels=type=compile,lang=cpp,compiler=clang --env_var_allowlist=PWD --exec_strategy=$(cxx_rbe_exec_strategy) --platform=$(cxx_platform) --compare=$(cxx_compare)
+
+ # Append rewrapper to existing *_WRAPPER variables so it's possible to
+ # use both ccache and rewrapper.
+ CC_WRAPPER := $(strip $(CC_WRAPPER) $(RBE_WRAPPER) $(RBE_CXX))
+ CXX_WRAPPER := $(strip $(CXX_WRAPPER) $(RBE_WRAPPER) $(RBE_CXX))
+
+ ifdef RBE_JAVAC
+ JAVAC_WRAPPER := $(strip $(JAVAC_WRAPPER) $(RBE_WRAPPER) --labels=type=compile,lang=java,compiler=javac --exec_strategy=$(javac_exec_strategy) --platform=$(java_r8_d8_platform))
+ endif
+
+ ifdef RBE_R8
+ R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk9/linux-x86/bin/java)
+ endif
+
+ ifdef RBE_D8
+ D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk9/linux-x86/bin/java)
+ endif
+
+ rbe_dir :=
+endif
+
diff --git a/core/setup_one_odex.mk b/core/setup_one_odex.mk
deleted file mode 100644
index 51df43e34b..0000000000
--- a/core/setup_one_odex.mk
+++ /dev/null
@@ -1,144 +0,0 @@
-#
-# Copyright (C) 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set up variables and dependency for one odex file
-# Input variables: my_2nd_arch_prefix
-# Output(modified) variables: built_odex, installed_odex, built_installed_odex
-
-my_built_odex := $(call get-odex-file-path,$($(my_2nd_arch_prefix)DEX2OAT_TARGET_ARCH),$(LOCAL_BUILT_MODULE))
-ifdef LOCAL_DEX_PREOPT_IMAGE_LOCATION
-my_dex_preopt_image_location := $(LOCAL_DEX_PREOPT_IMAGE_LOCATION)
-else
-my_dex_preopt_image_location := $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
-endif
-my_dex_preopt_image_filename := $(call get-image-file-path,$($(my_2nd_arch_prefix)DEX2OAT_TARGET_ARCH),$(my_dex_preopt_image_location))
-
-# If LOCAL_ENFORCE_USES_LIBRARIES is not set, default to true if either of LOCAL_USES_LIBRARIES or
-# LOCAL_OPTIONAL_USES_LIBRARIES are specified.
-ifeq (,$(LOCAL_ENFORCE_USES_LIBRARIES))
-# Will change the default to true unconditionally in the future.
-ifneq (,$(LOCAL_OPTIONAL_USES_LIBRARIES))
-LOCAL_ENFORCE_USES_LIBRARIES := true
-endif
-ifneq (,$(LOCAL_USES_LIBRARIES))
-LOCAL_ENFORCE_USES_LIBRARIES := true
-endif
-endif
-
-my_uses_libraries := $(LOCAL_USES_LIBRARIES)
-my_optional_uses_libraries := $(LOCAL_OPTIONAL_USES_LIBRARIES)
-my_missing_uses_libraries := $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES)
-
-# If we have either optional or required uses-libraries, set up the class loader context
-# accordingly.
-my_lib_names :=
-my_optional_lib_names :=
-my_filtered_optional_uses_libraries :=
-my_system_dependencies :=
-my_stored_preopt_class_loader_context_libs :=
-my_conditional_uses_libraries_host :=
-my_conditional_uses_libraries_target :=
-
-ifneq (true,$(LOCAL_ENFORCE_USES_LIBRARIES))
- # Pass special class loader context to skip the classpath and collision check.
- # This will get removed once LOCAL_USES_LIBRARIES is enforced.
- # Right now LOCAL_USES_LIBRARIES is opt in, for the case where it's not specified we still default
- # to the &.
- my_dex_preopt_class_loader_context := \&
-else
- # Compute the filtered optional uses libraries by removing ones that are not supposed to exist.
- my_filtered_optional_uses_libraries := \
- $(filter-out $(my_missing_uses_libraries), $(my_optional_uses_libraries))
- my_filtered_uses_libraries := $(my_uses_libraries) $(my_filtered_optional_uses_libraries)
-
- # These are the ones we are verifying in the make rule, use the unfiltered libraries.
- my_lib_names := $(my_uses_libraries)
- my_optional_lib_names := $(my_optional_uses_libraries)
-
- # Calculate system build dependencies based on the filtered libraries.
- my_intermediate_libs := $(foreach lib_name, $(my_lib_names) $(my_filtered_optional_uses_libraries), \
- $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib_name),,COMMON)/javalib.jar)
- my_dex_preopt_system_dependencies := $(my_intermediate_libs)
- my_dex_preopt_class_loader_context := $(call normalize-path-list,$(my_intermediate_libs))
-
- # The class loader context checksums are filled in by dex2oat.
- my_stored_preopt_class_loader_context_libs := $(call normalize-path-list, \
- $(foreach lib_name,$(my_filtered_uses_libraries),/system/framework/$(lib_name).jar))
-
- # Fix up org.apache.http.legacy.boot since it should be org.apache.http.legacy in the manifest.
- my_lib_names := $(patsubst org.apache.http.legacy.boot,org.apache.http.legacy,$(my_lib_names))
- my_optional_lib_names := $(patsubst org.apache.http.legacy.boot,org.apache.http.legacy,$(my_optional_lib_names))
- ifeq (,$(filter org.apache.http.legacy,$(my_lib_names) $(my_optional_lib_names)))
- my_conditional_uses_libraries_host := $(call intermediates-dir-for,JAVA_LIBRARIES,org.apache.http.legacy.boot,,COMMON)/javalib.jar
- my_conditional_uses_libraries_target := /system/framework/org.apache.http.legacy.boot.jar
- endif
-endif
-
-# Always depend on org.apache.http.legacy.boot since it may get used by dex2oat-one-file for apps
-# targetting <SDK 28(P).
-my_always_depend_libraries := $(call intermediates-dir-for,JAVA_LIBRARIES,org.apache.http.legacy.boot,,COMMON)/javalib.jar
-
-$(my_built_odex): $(AAPT)
-$(my_built_odex): $(my_always_depend_libraries)
-$(my_built_odex): $(my_dex_preopt_system_dependencies)
-$(my_built_odex): PRIVATE_ENFORCE_USES_LIBRARIES := $(LOCAL_ENFORCE_USES_LIBRARIES)
-$(my_built_odex): PRIVATE_CONDITIONAL_USES_LIBRARIES_HOST := $(my_conditional_uses_libraries_host)
-$(my_built_odex): PRIVATE_CONDITIONAL_USES_LIBRARIES_TARGET := $(my_conditional_uses_libraries_target)
-$(my_built_odex): PRIVATE_USES_LIBRARY_NAMES := $(my_lib_names)
-$(my_built_odex): PRIVATE_OPTIONAL_USES_LIBRARY_NAMES := $(my_optional_lib_names)
-$(my_built_odex): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
-$(my_built_odex): PRIVATE_DEX_LOCATION := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
-$(my_built_odex): PRIVATE_DEX_PREOPT_IMAGE_LOCATION := $(my_dex_preopt_image_location)
-$(my_built_odex): PRIVATE_DEX2OAT_CLASS_LOADER_CONTEXT := $(my_dex_preopt_class_loader_context)
-$(my_built_odex): PRIVATE_DEX2OAT_STORED_CLASS_LOADER_CONTEXT_LIBS := $(my_stored_preopt_class_loader_context_libs)
-$(my_built_odex) : $($(my_2nd_arch_prefix)DEXPREOPT_ONE_FILE_DEPENDENCY_BUILT_BOOT_PREOPT) \
- $(DEXPREOPT_ONE_FILE_DEPENDENCY_TOOLS) \
- $(my_dex_preopt_image_filename)
-
-my_installed_odex := $(call get-odex-installed-file-path,$($(my_2nd_arch_prefix)DEX2OAT_TARGET_ARCH),$(LOCAL_INSTALLED_MODULE))
-
-my_built_vdex := $(patsubst %.odex,%.vdex,$(my_built_odex))
-my_installed_vdex := $(patsubst %.odex,%.vdex,$(my_installed_odex))
-my_installed_art := $(patsubst %.odex,%.art,$(my_installed_odex))
-
-ifndef LOCAL_DEX_PREOPT_APP_IMAGE
-# Local override not defined, use the global one.
-ifeq (true,$(WITH_DEX_PREOPT_APP_IMAGE))
- LOCAL_DEX_PREOPT_APP_IMAGE := true
-endif
-endif
-
-ifeq (true,$(LOCAL_DEX_PREOPT_APP_IMAGE))
-my_built_art := $(patsubst %.odex,%.art,$(my_built_odex))
-$(my_built_odex): PRIVATE_ART_FILE_PREOPT_FLAGS := --app-image-file=$(my_built_art) \
- --image-format=lz4
-$(eval $(call copy-one-file,$(my_built_art),$(my_installed_art)))
-built_art += $(my_built_art)
-installed_art += $(my_installed_art)
-built_installed_art += $(my_built_art):$(my_installed_art)
-endif
-
-$(eval $(call copy-one-file,$(my_built_odex),$(my_installed_odex)))
-$(eval $(call copy-one-file,$(my_built_vdex),$(my_installed_vdex)))
-
-built_odex += $(my_built_odex)
-built_vdex += $(my_built_vdex)
-
-installed_odex += $(my_installed_odex)
-installed_vdex += $(my_installed_vdex)
-
-built_installed_odex += $(my_built_odex):$(my_installed_odex)
-built_installed_vdex += $(my_built_vdex):$(my_installed_vdex)
diff --git a/core/shared_library.mk b/core/shared_library.mk
index a15b1a6eaa..2832c179b0 100644
--- a/core/shared_library.mk
+++ b/core/shared_library.mk
@@ -36,7 +36,6 @@ include $(BUILD_SYSTEM)/module_arch_supported.mk
ifeq ($(my_module_arch_supported),true)
# Build for TARGET_2ND_ARCH
-OVERRIDE_BUILT_MODULE_PATH :=
LOCAL_BUILT_MODULE :=
LOCAL_INSTALLED_MODULE :=
LOCAL_INTERMEDIATE_TARGETS :=
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index ab887e0a05..858884a436 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -13,9 +13,6 @@ endif
ifeq ($(strip $(LOCAL_MODULE_SUFFIX)),)
LOCAL_MODULE_SUFFIX := $(TARGET_SHLIB_SUFFIX)
endif
-ifneq ($(strip $(OVERRIDE_BUILT_MODULE_PATH)),)
-$(error $(LOCAL_PATH): Illegal use of OVERRIDE_BUILT_MODULE_PATH)
-endif
ifneq ($(strip $(LOCAL_MODULE_STEM)$(LOCAL_BUILT_MODULE_STEM)$(LOCAL_MODULE_STEM_32)$(LOCAL_MODULE_STEM_64)),)
$(error $(LOCAL_PATH): Cannot set module stem for a library)
endif
@@ -34,13 +31,14 @@ endif
ifndef skip_build_from_source
-# Put the built targets of all shared libraries in a common directory
-# to simplify the link line.
-OVERRIDE_BUILT_MODULE_PATH := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)
-
include $(BUILD_SYSTEM)/dynamic_binary.mk
# Define PRIVATE_ variables from global vars
+ifeq ($(LOCAL_NO_LIBCRT_BUILTINS),true)
+my_target_libcrt_builtins :=
+else
+my_target_libcrt_builtins := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBCRT_BUILTINS)
+endif
ifeq ($(LOCAL_NO_LIBGCC),true)
my_target_libgcc :=
else
@@ -51,16 +49,17 @@ ifeq ($(LOCAL_NO_CRT),true)
my_target_crtbegin_so_o :=
my_target_crtend_so_o :=
else ifdef LOCAL_USE_VNDK
-my_target_crtbegin_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_so.vendor.o
-my_target_crtend_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_so.vendor.o
+my_target_crtbegin_so_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_so.vendor)
+my_target_crtend_so_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtend_so.vendor)
else
-my_target_crtbegin_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_so.o
-my_target_crtend_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_so.o
+my_target_crtbegin_so_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_so)
+my_target_crtend_so_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtend_so)
endif
ifneq ($(LOCAL_SDK_VERSION),)
my_target_crtbegin_so_o := $(wildcard $(my_ndk_sysroot_lib)/crtbegin_so.o)
my_target_crtend_so_o := $(wildcard $(my_ndk_sysroot_lib)/crtend_so.o)
endif
+$(linked_module): PRIVATE_TARGET_LIBCRT_BUILTINS := $(my_target_libcrt_builtins)
$(linked_module): PRIVATE_TARGET_LIBGCC := $(my_target_libgcc)
$(linked_module): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
$(linked_module): PRIVATE_TARGET_CRTBEGIN_SO_O := $(my_target_crtbegin_so_o)
@@ -71,13 +70,14 @@ $(linked_module): \
$(all_libraries) \
$(my_target_crtbegin_so_o) \
$(my_target_crtend_so_o) \
+ $(my_target_libcrt_builtins) \
$(my_target_libgcc) \
$(my_target_libatomic) \
$(LOCAL_ADDITIONAL_DEPENDENCIES)
$(transform-o-to-shared-lib)
ifeq ($(my_native_coverage),true)
-gcno_suffix := .gcnodir
+gcno_suffix := .zip
built_whole_gcno_libraries := \
$(foreach lib,$(my_whole_static_libraries), \
@@ -99,11 +99,11 @@ endif
GCNO_ARCHIVE := $(basename $(my_installed_module_stem))$(gcno_suffix)
+$(intermediates)/$(GCNO_ARCHIVE) : $(SOONG_ZIP) $(MERGE_ZIPS)
$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS := $(strip $(LOCAL_GCNO_FILES))
$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES := $(strip $(built_whole_gcno_libraries)) $(strip $(built_static_gcno_libraries))
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_INTERMEDIATES_DIR := $(intermediates)
$(intermediates)/$(GCNO_ARCHIVE) : $(LOCAL_GCNO_FILES) $(built_whole_gcno_libraries) $(built_static_gcno_libraries)
- $(transform-o-to-static-lib)
+ $(package-coverage-files)
$(my_coverage_path)/$(GCNO_ARCHIVE) : $(intermediates)/$(GCNO_ARCHIVE)
$(copy-file-to-target)
diff --git a/core/soong_android_app_set.mk b/core/soong_android_app_set.mk
new file mode 100644
index 0000000000..c783b42d70
--- /dev/null
+++ b/core/soong_android_app_set.mk
@@ -0,0 +1,37 @@
+# App prebuilt coming from Soong.
+# Extra inputs:
+# LOCAL_APK_SET_MASTER_FILE
+
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ $(call pretty-error,soong_apk_set.mk may only be used from Soong)
+endif
+
+LOCAL_BUILT_MODULE_STEM := $(LOCAL_APK_SET_MASTER_FILE)
+LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_APK_SET_MASTER_FILE)
+
+#######################################
+include $(BUILD_SYSTEM)/base_rules.mk
+#######################################
+
+## Extract master APK from APK set into given directory
+# $(1) APK set
+# $(2) master APK entry (e.g., splits/base-master.apk
+
+define extract-master-from-apk-set
+$(LOCAL_BUILT_MODULE): $(1)
+ @echo "Extracting $$@"
+ unzip -pq $$< $(2) >$$@
+endef
+
+$(eval $(call extract-master-from-apk-set,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_APK_SET_MASTER_FILE)))
+LOCAL_POST_INSTALL_CMD := unzip -qo -j -d $(dir $(LOCAL_INSTALLED_MODULE)) \
+ $(LOCAL_PREBUILT_MODULE_FILE) -x $(LOCAL_APK_SET_MASTER_FILE)
+$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
+PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
+
+PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
+# We can't know exactly what apk files would be outputted yet.
+# Let extract_apks generate apkcerts.txt and merge it later.
+PACKAGES.$(LOCAL_MODULE).APKCERTS_FILE := $(LOCAL_APKCERTS_FILE)
+
+SOONG_ALREADY_CONV += $(LOCAL_MODULE)
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index ae0d1968ca..8d92b20e07 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -1,6 +1,16 @@
# App prebuilt coming from Soong.
# Extra inputs:
+# LOCAL_SOONG_BUILT_INSTALLED
+# LOCAL_SOONG_BUNDLE
+# LOCAL_SOONG_CLASSES_JAR
+# LOCAL_SOONG_DEX_JAR
+# LOCAL_SOONG_HEADER_JAR
+# LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
+# LOCAL_SOONG_PROGUARD_DICT
# LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
+# LOCAL_SOONG_RRO_DIRS
+# LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
+# LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
$(call pretty-error,soong_app_prebuilt.mk may only be used from Soong)
@@ -9,16 +19,52 @@ endif
LOCAL_MODULE_SUFFIX := .apk
LOCAL_BUILT_MODULE_STEM := package.apk
-#######################################
-include $(BUILD_SYSTEM)/base_rules.mk
-#######################################
+intermediates.COMMON := $(call local-intermediates-dir,COMMON)
full_classes_jar := $(intermediates.COMMON)/classes.jar
full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
-$(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_jar)))
-$(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_pre_proguard_jar)))
+#######################################
+include $(BUILD_SYSTEM)/base_rules.mk
+#######################################
+
+ifdef LOCAL_SOONG_CLASSES_JAR
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_jar)))
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_pre_proguard_jar)))
+ $(eval $(call add-dependency,$(LOCAL_BUILT_MODULE),$(full_classes_jar)))
+
+ ifneq ($(TURBINE_ENABLED),false)
+ ifdef LOCAL_SOONG_HEADER_JAR
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
+ else
+ $(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
+ endif
+ endif # TURBINE_ENABLED != false
+endif
+
+# Run veridex on product, product_services and vendor modules.
+# We skip it for unbundled app builds where we cannot build veridex.
+module_run_appcompat :=
+ifeq (true,$(non_system_module))
+ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) # ! unbundled app build
+ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true)
+ module_run_appcompat := true
+endif
+endif
+endif
+
+ifeq ($(module_run_appcompat),true)
+ $(LOCAL_BUILT_MODULE): $(appcompat-files)
+ $(LOCAL_BUILT_MODULE): PRIVATE_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+ $(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ @echo "Copy: $@"
+ $(copy-file-to-target)
+ $(call appcompat-header, aapt2)
+ $(run-appcompat)
+else
+ $(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
+endif
ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
@@ -34,17 +80,6 @@ ifdef LOCAL_SOONG_PROGUARD_DICT
$(intermediates.COMMON)/proguard_dictionary)
endif
-ifneq ($(TURBINE_ENABLED),false)
-ifdef LOCAL_SOONG_HEADER_JAR
-$(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
-else
-$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
-endif
-endif # TURBINE_ENABLED != false
-
-
-$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
-
ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
resource_export_package := $(intermediates.COMMON)/package-export.apk
resource_export_stamp := $(intermediates.COMMON)/src/R.stamp
@@ -52,7 +87,7 @@ resource_export_stamp := $(intermediates.COMMON)/src/R.stamp
$(resource_export_package): PRIVATE_STAMP := $(resource_export_stamp)
$(resource_export_package): .KATI_IMPLICIT_OUTPUTS := $(resource_export_stamp)
$(resource_export_package): $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE)
- @echo "Copy: $$@"
+ @echo "Copy: $@"
$(copy-file-to-target)
touch $(PRIVATE_STAMP)
$(call add-dependency,$(LOCAL_BUILT_MODULE),$(resource_export_package))
@@ -61,19 +96,49 @@ endif # LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
java-dex: $(LOCAL_SOONG_DEX_JAR)
-ifdef LOCAL_DEX_PREOPT
-# defines built_odex along with rule to install odex
-include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
-$(built_odex): $(LOCAL_SOONG_DEX_JAR)
- $(call dexpreopt-one-file,$<,$@)
+ifneq ($(BUILD_PLATFORM_ZIP),)
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(dir $(LOCAL_BUILT_MODULE))package.dex.apk))
endif
+my_built_installed := $(foreach f,$(LOCAL_SOONG_BUILT_INSTALLED),\
+ $(call word-colon,1,$(f)):$(PRODUCT_OUT)$(call word-colon,2,$(f)))
+my_installed := $(call copy-many-files, $(my_built_installed))
+ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed)
+ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(my_built_installed)
+$(my_all_targets): $(my_installed)
+
+# embedded JNI will already have been handled by soong
+my_embed_jni :=
+my_prebuilt_jni_libs :=
+ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
+ my_2nd_arch_prefix :=
+ LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
+ include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
+endif
+ifdef TARGET_2ND_ARCH
+ ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
+ my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+ LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
+ include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
+ endif
+endif
+LOCAL_SHARED_JNI_LIBRARIES :=
+my_embed_jni :=
+my_prebuilt_jni_libs :=
+my_2nd_arch_prefix :=
+
PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
ifdef LOCAL_CERTIFICATE
PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE)
PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(patsubst %.x509.pem,%.pk8,$(LOCAL_CERTIFICATE))
endif
+include $(BUILD_SYSTEM)/app_certificate_validate.mk
+PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
+
+ifdef LOCAL_SOONG_BUNDLE
+ ALL_MODULES.$(LOCAL_MODULE).BUNDLE := $(LOCAL_SOONG_BUNDLE)
+endif
ifndef LOCAL_IS_HOST_MODULE
ifeq ($(LOCAL_SDK_VERSION),system_current)
@@ -94,11 +159,26 @@ my_common := COMMON
include $(BUILD_SYSTEM)/link_type.mk
endif # !LOCAL_IS_HOST_MODULE
-ifdef LOCAL_SOONG_RRO_DIRS
+ifdef LOCAL_SOONG_DEVICE_RRO_DIRS
+ $(call append_enforce_rro_sources, \
+ $(my_register_name), \
+ false, \
+ $(LOCAL_FULL_MANIFEST_FILE), \
+ $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
+ $(LOCAL_SOONG_DEVICE_RRO_DIRS), \
+ vendor \
+ )
+endif
+
+ifdef LOCAL_SOONG_PRODUCT_RRO_DIRS
$(call append_enforce_rro_sources, \
$(my_register_name), \
false, \
$(LOCAL_FULL_MANIFEST_FILE), \
- $(LOCAL_EXPORT_PACKAGE_RESOURCES), \
- $(LOCAL_SOONG_RRO_DIRS))
+ $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
+ $(LOCAL_SOONG_PRODUCT_RRO_DIRS), \
+ product \
+ )
endif
+
+SOONG_ALREADY_CONV := $(SOONG_ALREADY_CONV) $(LOCAL_MODULE)
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
new file mode 100644
index 0000000000..2e7d47794c
--- /dev/null
+++ b/core/soong_cc_prebuilt.mk
@@ -0,0 +1,234 @@
+# Native prebuilt coming from Soong.
+# Extra inputs:
+# LOCAL_SOONG_LINK_TYPE
+# LOCAL_SOONG_TOC
+# LOCAL_SOONG_UNSTRIPPED_BINARY
+
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ $(call pretty-error,soong_cc_prebuilt.mk may only be used from Soong)
+endif
+
+ifdef LOCAL_IS_HOST_MODULE
+ ifneq ($(HOST_OS),$(LOCAL_MODULE_HOST_OS))
+ my_prefix := HOST_CROSS_
+ LOCAL_HOST_PREFIX := $(my_prefix)
+ else
+ my_prefix := HOST_
+ LOCAL_HOST_PREFIX :=
+ endif
+else
+ my_prefix := TARGET_
+endif
+
+ifeq ($($(my_prefix)ARCH),$(LOCAL_MODULE_$(my_prefix)ARCH))
+ # primary arch
+ LOCAL_2ND_ARCH_VAR_PREFIX :=
+else ifeq ($($(my_prefix)2ND_ARCH),$(LOCAL_MODULE_$(my_prefix)ARCH))
+ # secondary arch
+ LOCAL_2ND_ARCH_VAR_PREFIX := $($(my_prefix)2ND_ARCH_VAR_PREFIX)
+else
+ $(call pretty-error,Unsupported LOCAL_MODULE_$(my_prefix)ARCH=$(LOCAL_MODULE_$(my_prefix)ARCH))
+endif
+
+skip_module :=
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+ ifndef LOCAL_IS_HOST_MODULE
+ ifdef LOCAL_2ND_ARCH_VAR_PREFIX
+ # Only support shared and static libraries and tests for translated arch
+ ifeq ($(filter SHARED_LIBRARIES STATIC_LIBRARIES HEADER_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ skip_module := true
+ endif
+ endif
+ endif
+endif
+
+ifndef skip_module
+
+# Don't install static libraries by default.
+ifndef LOCAL_UNINSTALLABLE_MODULE
+ ifeq (STATIC_LIBRARIES,$(LOCAL_MODULE_CLASS))
+ LOCAL_UNINSTALLABLE_MODULE := true
+ endif
+endif
+
+#######################################
+include $(BUILD_SYSTEM)/base_rules.mk
+#######################################
+
+ifneq ($(filter STATIC_LIBRARIES SHARED_LIBRARIES HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+ # Soong module is a static or shared library
+ export_includes := $(intermediates)/export_includes
+ $(export_includes): PRIVATE_EXPORT_CFLAGS := $(LOCAL_EXPORT_CFLAGS)
+ $(export_includes): $(LOCAL_EXPORT_C_INCLUDE_DEPS)
+ @echo Export includes file: $< -- $@
+ $(hide) mkdir -p $(dir $@) && rm -f $@
+ ifdef LOCAL_EXPORT_CFLAGS
+ $(hide) echo "$(PRIVATE_EXPORT_CFLAGS)" >$@
+ else
+ $(hide) touch $@
+ endif
+
+ ifdef LOCAL_SOONG_TOC
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_TOC),$(LOCAL_BUILT_MODULE).toc))
+ $(call add-dependency,$(LOCAL_BUILT_MODULE).toc,$(LOCAL_BUILT_MODULE))
+ $(my_all_targets): $(LOCAL_BUILT_MODULE).toc
+ endif
+
+ SOONG_ALREADY_CONV := $(SOONG_ALREADY_CONV) $(LOCAL_MODULE)
+
+ my_link_type := $(LOCAL_SOONG_LINK_TYPE)
+ my_warn_types :=
+ my_allowed_types :=
+ my_link_deps :=
+ my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+ my_common :=
+ include $(BUILD_SYSTEM)/link_type.mk
+endif
+
+ifdef LOCAL_USE_VNDK
+ ifneq ($(LOCAL_VNDK_DEPEND_ON_CORE_VARIANT),true)
+ name_without_suffix := $(patsubst %.vendor,%,$(LOCAL_MODULE))
+ ifneq ($(name_without_suffix),$(LOCAL_MODULE)
+ SPLIT_VENDOR.$(LOCAL_MODULE_CLASS).$(name_without_suffix) := 1
+ endif
+ name_without_suffix :=
+ endif
+endif
+
+# Check prebuilt ELF binaries.
+ifneq ($(LOCAL_CHECK_ELF_FILES),)
+my_prebuilt_src_file := $(LOCAL_PREBUILT_MODULE_FILE)
+include $(BUILD_SYSTEM)/check_elf_file.mk
+endif
+
+# The real dependency will be added after all Android.mks are loaded and the install paths
+# of the shared libraries are determined.
+ifdef LOCAL_INSTALLED_MODULE
+ ifdef LOCAL_SHARED_LIBRARIES
+ my_shared_libraries := $(LOCAL_SHARED_LIBRARIES)
+ ifdef LOCAL_USE_VNDK
+ my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+ $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ endif
+ $(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)DEPENDENCIES_ON_SHARED_LIBRARIES += \
+ $(my_register_name):$(LOCAL_INSTALLED_MODULE):$(subst $(space),$(comma),$(my_shared_libraries))
+ endif
+endif
+
+ifeq ($(LOCAL_VNDK_DEPEND_ON_CORE_VARIANT),true)
+ # Add $(LOCAL_BUILT_MODULE) as a dependency to no_vendor_variant_vndk_check so
+ # that the vendor variant will be built and checked against the core variant.
+ no_vendor_variant_vndk_check: $(LOCAL_BUILT_MODULE)
+
+ my_core_register_name := $(subst .vendor,,$(my_register_name))
+ my_core_variant_files := $(call module-target-built-files,$(my_core_register_name))
+ my_core_shared_lib := $(sort $(filter %.so,$(my_core_variant_files)))
+ $(LOCAL_BUILT_MODULE): PRIVATE_CORE_VARIANT := $(my_core_shared_lib)
+
+ # The built vendor variant library needs to depend on the built core variant
+ # so that we can perform identity check against the core variant.
+ $(LOCAL_BUILT_MODULE): $(my_core_shared_lib)
+endif
+
+ifeq ($(LOCAL_VNDK_DEPEND_ON_CORE_VARIANT),true)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE) $(LIBRARY_IDENTITY_CHECK_SCRIPT)
+ $(call verify-vndk-libs-identical,\
+ $(PRIVATE_CORE_VARIANT),\
+ $<,\
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)TOOLS_PREFIX))
+ $(copy-file-to-target)
+else
+$(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ $(transform-prebuilt-to-target)
+endif
+ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ $(hide) chmod +x $@
+endif
+
+ifndef LOCAL_IS_HOST_MODULE
+ ifdef LOCAL_SOONG_UNSTRIPPED_BINARY
+ ifneq ($(LOCAL_VNDK_DEPEND_ON_CORE_VARIANT),true)
+ my_symbol_path := $(if $(LOCAL_SOONG_SYMBOL_PATH),$(LOCAL_SOONG_SYMBOL_PATH),$(my_module_path))
+ # Store a copy with symbols for symbolic debugging
+ my_unstripped_path := $(TARGET_OUT_UNSTRIPPED)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_symbol_path))
+ # drop /root as /root is mounted as /
+ my_unstripped_path := $(patsubst $(TARGET_OUT_UNSTRIPPED)/root/%,$(TARGET_OUT_UNSTRIPPED)/%, $(my_unstripped_path))
+ symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),$(symbolic_output))
+
+ ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
+ my_breakpad_path := $(TARGET_OUT_BREAKPAD)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_symbol_path))
+ breakpad_output := $(my_breakpad_path)/$(my_installed_module_stem).sym
+ $(breakpad_output) : $(LOCAL_SOONG_UNSTRIPPED_BINARY) | $(BREAKPAD_DUMP_SYMS) $(PRIVATE_READELF)
+ @echo "target breakpad: $(PRIVATE_MODULE) ($@)"
+ @mkdir -p $(dir $@)
+ $(hide) if $(PRIVATE_READELF) -S $< > /dev/null 2>&1 ; then \
+ $(BREAKPAD_DUMP_SYMS) -c $< > $@ ; \
+ else \
+ echo "skipped for non-elf file."; \
+ touch $@; \
+ fi
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),$(breakpad_output))
+ endif
+ endif
+ endif
+endif
+
+ifeq ($(NATIVE_COVERAGE),true)
+ ifneq (,$(strip $(LOCAL_PREBUILT_COVERAGE_ARCHIVE)))
+ $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(intermediates)/$(LOCAL_MODULE).zip))
+ ifneq ($(LOCAL_UNINSTALLABLE_MODULE),true)
+ ifdef LOCAL_IS_HOST_MODULE
+ my_coverage_path := $($(my_prefix)OUT_COVERAGE)/$(patsubst $($(my_prefix)OUT)/%,%,$(my_module_path))
+ else
+ my_coverage_path := $(TARGET_OUT_COVERAGE)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_module_path))
+ endif
+ my_coverage_path := $(my_coverage_path)/$(patsubst %.so,%,$(my_installed_module_stem)).zip
+ $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(my_coverage_path)))
+ $(LOCAL_BUILT_MODULE): $(my_coverage_path)
+ endif
+ else
+ # Coverage information is needed when static lib is a dependency of another
+ # coverage-enabled module.
+ ifeq (STATIC_LIBRARIES, $(LOCAL_MODULE_CLASS))
+ GCNO_ARCHIVE := $(LOCAL_MODULE).zip
+ $(intermediates)/$(GCNO_ARCHIVE) : $(SOONG_ZIP) $(MERGE_ZIPS)
+ $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS :=
+ $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES :=
+ $(intermediates)/$(GCNO_ARCHIVE) :
+ $(package-coverage-files)
+ endif
+ endif
+endif
+
+# A product may be configured to strip everything in some build variants.
+# We do the stripping as a post-install command so that LOCAL_BUILT_MODULE
+# is still with the symbols and we don't need to clean it (and relink) when
+# you switch build variant.
+ifneq ($(filter $(STRIP_EVERYTHING_BUILD_VARIANTS),$(TARGET_BUILD_VARIANT)),)
+$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP) --strip-all $(LOCAL_INSTALLED_MODULE)
+endif
+
+$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)
+
+# We don't care about installed static libraries, since the libraries have
+# already been linked into the module at that point. We do, however, care
+# about the NOTICE files for any static libraries that we use.
+# (see notice_files.mk)
+#
+# Filter out some NDK libraries that are not being exported.
+my_static_libraries := \
+ $(filter-out ndk_libc++_static ndk_libc++abi ndk_libandroid_support ndk_libunwind, \
+ $(LOCAL_STATIC_LIBRARIES))
+installed_static_library_notice_file_targets := \
+ $(foreach lib,$(my_static_libraries) $(LOCAL_WHOLE_STATIC_LIBRARIES), \
+ NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-STATIC_LIBRARIES-$(lib))
+
+$(notice_target): | $(installed_static_library_notice_file_targets)
+$(LOCAL_INSTALLED_MODULE): | $(notice_target)
+
+endif # !skip_module
+
+skip_module :=
diff --git a/core/soong_config.mk b/core/soong_config.mk
index f8cb2fb3b8..cdad2f260a 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -13,32 +13,14 @@ BINDER32BIT := true
endif
endif
-ifeq ($(WRITE_SOONG_VARIABLES),true)
-# Converts a list to a JSON list.
-# $1: List separator.
-# $2: List.
-_json_list = [$(if $(2),"$(subst $(1),"$(comma)",$(2))")]
-
-# Converts a space-separated list to a JSON list.
-json_list = $(call _json_list,$(space),$(1))
-
-# Converts a comma-separated list to a JSON list.
-csv_to_json_list = $(call _json_list,$(comma),$(1))
+include $(BUILD_SYSTEM)/dex_preopt_config.mk
-# 1: Key name
-# 2: Value
-add_json_val = $(eval _contents := $$(_contents) "$$(strip $$(1))":$$(space)$$(strip $$(2))$$(comma)$$(newline))
-add_json_str = $(call add_json_val,$(1),"$(strip $(2))")
-add_json_list = $(call add_json_val,$(1),$(call json_list,$(patsubst %,%,$(2))))
-add_json_csv = $(call add_json_val,$(1),$(call csv_to_json_list,$(strip $(2))))
-add_json_bool = $(call add_json_val,$(1),$(if $(strip $(2)),true,false))
-
-invert_bool = $(if $(strip $(1)),,true)
+ifeq ($(WRITE_SOONG_VARIABLES),true)
# Create soong.variables with copies of makefile settings. Runs every build,
# but only updates soong.variables if it changes
$(shell mkdir -p $(dir $(SOONG_VARIABLES)))
-_contents := {$(newline)
+$(call json_start)
$(call add_json_str, Make_suffix, -$(TARGET_PRODUCT))
@@ -51,9 +33,15 @@ $(call add_json_str, Platform_sdk_codename, $(PLATFORM_VERSION_CODE
$(call add_json_bool, Platform_sdk_final, $(filter REL,$(PLATFORM_VERSION_CODENAME)))
$(call add_json_csv, Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
$(call add_json_csv, Platform_version_future_codenames, $(PLATFORM_VERSION_FUTURE_CODENAMES))
+$(call add_json_str, Platform_security_patch, $(PLATFORM_SECURITY_PATCH))
+$(call add_json_str, Platform_preview_sdk_version, $(PLATFORM_PREVIEW_SDK_VERSION))
+$(call add_json_str, Platform_base_os, $(PLATFORM_BASE_OS))
+
+$(call add_json_str, Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
$(call add_json_bool, Allow_missing_dependencies, $(ALLOW_MISSING_DEPENDENCIES))
$(call add_json_bool, Unbundled_build, $(TARGET_BUILD_APPS))
+$(call add_json_bool, Unbundled_build_sdks_from_source, $(UNBUNDLED_BUILD_SDKS_FROM_SOURCE))
$(call add_json_bool, Pdk, $(filter true,$(TARGET_BUILD_PDK)))
$(call add_json_bool, Debuggable, $(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
@@ -78,7 +66,8 @@ $(call add_json_str, CrossHost, $(HOST_CROSS_OS))
$(call add_json_str, CrossHostArch, $(HOST_CROSS_ARCH))
$(call add_json_str, CrossHostSecondaryArch, $(HOST_CROSS_2ND_ARCH))
-$(call add_json_list, ResourceOverlays, $(PRODUCT_PACKAGE_OVERLAYS) $(DEVICE_PACKAGE_OVERLAYS))
+$(call add_json_list, DeviceResourceOverlays, $(DEVICE_PACKAGE_OVERLAYS))
+$(call add_json_list, ProductResourceOverlays, $(PRODUCT_PACKAGE_OVERLAYS))
$(call add_json_list, EnforceRROTargets, $(PRODUCT_ENFORCE_RRO_TARGETS))
$(call add_json_list, EnforceRROExcludedOverlays, $(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS))
@@ -100,8 +89,12 @@ $(call add_json_bool, Safestack, $(filter true,$(USE_SAF
$(call add_json_bool, EnableCFI, $(call invert_bool,$(filter false,$(ENABLE_CFI))))
$(call add_json_list, CFIExcludePaths, $(CFI_EXCLUDE_PATHS) $(PRODUCT_CFI_EXCLUDE_PATHS))
$(call add_json_list, CFIIncludePaths, $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
+$(call add_json_bool, EnableXOM, $(call invert_bool,$(filter false,$(ENABLE_XOM))))
+$(call add_json_list, XOMExcludePaths, $(XOM_EXCLUDE_PATHS) $(PRODUCT_XOM_EXCLUDE_PATHS))
$(call add_json_list, IntegerOverflowExcludePaths, $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
+$(call add_json_bool, DisableScudo, $(filter true,$(PRODUCT_DISABLE_SCUDO)))
+
$(call add_json_bool, ClangTidy, $(filter 1 true,$(WITH_TIDY)))
$(call add_json_str, TidyChecks, $(WITH_TIDY_CHECKS))
@@ -111,10 +104,9 @@ $(call add_json_list, CoverageExcludePaths, $(COVERAGE_EXCLUDE_PATH
$(call add_json_bool, ArtUseReadBarrier, $(call invert_bool,$(filter false,$(PRODUCT_ART_USE_READ_BARRIER))))
$(call add_json_bool, Binder32bit, $(BINDER32BIT))
-$(call add_json_bool, Brillo, $(BRILLO))
$(call add_json_str, BtConfigIncludeDir, $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
-$(call add_json_bool, Device_uses_hwc2, $(filter true,$(TARGET_USES_HWC2)))
$(call add_json_list, DeviceKernelHeaders, $(TARGET_PROJECT_SYSTEM_INCLUDES))
+$(call add_json_bool, DevicePrefer32BitApps, $(filter true,$(TARGET_PREFER_32_BIT_APPS)))
$(call add_json_bool, DevicePrefer32BitExecutables, $(filter true,$(TARGET_PREFER_32_BIT_EXECUTABLES)))
$(call add_json_str, DeviceVndkVersion, $(BOARD_VNDK_VERSION))
$(call add_json_str, Platform_vndk_version, $(PLATFORM_VNDK_VERSION))
@@ -124,36 +116,72 @@ $(call add_json_list, Platform_systemsdk_versions, $(PLATFORM_SYSTEMSDK_VE
$(call add_json_bool, Malloc_not_svelte, $(call invert_bool,$(filter true,$(MALLOC_SVELTE))))
$(call add_json_str, Override_rs_driver, $(OVERRIDE_RS_DRIVER))
+$(call add_json_bool, UncompressPrivAppDex, $(call invert_bool,$(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))))
+$(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
+
+$(call add_json_list, BootJars, $(PRODUCT_BOOT_JARS))
+
+$(call add_json_bool, VndkUseCoreVariant, $(TARGET_VNDK_USE_CORE_VARIANT))
+
+$(call add_json_bool, Product_is_iot, $(filter true,$(PRODUCT_IOT)))
+
$(call add_json_bool, Treble_linker_namespaces, $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
$(call add_json_bool, Enforce_vintf_manifest, $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
+$(call add_json_bool, Check_elf_files, $(filter true,$(PRODUCT_CHECK_ELF_FILES)))
+
$(call add_json_bool, Uml, $(filter true,$(TARGET_USER_MODE_LINUX)))
$(call add_json_bool, Use_lmkd_stats_log, $(filter true,$(TARGET_LMKD_STATS_LOG)))
$(call add_json_str, VendorPath, $(TARGET_COPY_OUT_VENDOR))
$(call add_json_str, OdmPath, $(TARGET_COPY_OUT_ODM))
$(call add_json_str, ProductPath, $(TARGET_COPY_OUT_PRODUCT))
+$(call add_json_str, ProductServicesPath, $(TARGET_COPY_OUT_PRODUCT_SERVICES))
$(call add_json_bool, MinimizeJavaDebugInfo, $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
$(call add_json_bool, UseGoma, $(filter-out false,$(USE_GOMA)))
+$(call add_json_bool, UseRBE, $(filter-out false,$(USE_RBE)))
+$(call add_json_bool, UseRBEJAVAC, $(filter-out false,$(RBE_JAVAC)))
+$(call add_json_bool, UseRBER8, $(filter-out false,$(RBE_R8)))
+$(call add_json_bool, UseRBED8, $(filter-out false,$(RBE_D8)))
$(call add_json_bool, Arc, $(filter true,$(TARGET_ARC)))
-$(call add_json_str, DistDir, $(if $(dist_goal), $(DIST_DIR)))
-
$(call add_json_list, NamespacesToExport, $(PRODUCT_SOONG_NAMESPACES))
$(call add_json_list, PgoAdditionalProfileDirs, $(PGO_ADDITIONAL_PROFILE_DIRS))
-_contents := $(_contents) "VendorVars": {$(newline)
+$(call add_json_list, BoardVendorSepolicyDirs, $(BOARD_SEPOLICY_DIRS))
+$(call add_json_list, BoardOdmSepolicyDirs, $(BOARD_ODM_SEPOLICY_DIRS))
+$(call add_json_list, BoardPlatPublicSepolicyDirs, $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
+$(call add_json_list, BoardPlatPrivateSepolicyDirs, $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
+
+$(call add_json_bool, FlattenApex, $(filter true,$(TARGET_FLATTEN_APEX)))
+
+$(call add_json_str, DexpreoptGlobalConfig, $(DEX_PREOPT_CONFIG))
+
+$(call add_json_list, ManifestPackageNameOverrides, $(PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES))
+$(call add_json_list, PackageNameOverrides, $(PRODUCT_PACKAGE_NAME_OVERRIDES))
+$(call add_json_list, CertificateOverrides, $(PRODUCT_CERTIFICATE_OVERRIDES))
+
+$(call add_json_bool, EnforceSystemCertificate, $(ENFORCE_SYSTEM_CERTIFICATE))
+$(call add_json_list, EnforceSystemCertificateWhitelist, $(ENFORCE_SYSTEM_CERTIFICATE_WHITELIST))
+
+$(call add_json_list, ProductHiddenAPIStubs, $(PRODUCT_HIDDENAPI_STUBS))
+$(call add_json_list, ProductHiddenAPIStubsSystem, $(PRODUCT_HIDDENAPI_STUBS_SYSTEM))
+$(call add_json_list, ProductHiddenAPIStubsTest, $(PRODUCT_HIDDENAPI_STUBS_TEST))
+
+$(call add_json_list, TargetFSConfigGen, $(TARGET_FS_CONFIG_GEN))
+
+$(call add_json_map, VendorVars)
$(foreach namespace,$(SOONG_CONFIG_NAMESPACES),\
- $(eval _contents := $$(_contents) "$(namespace)": {$$(newline)) \
+ $(call add_json_map, $(namespace))\
$(foreach key,$(SOONG_CONFIG_$(namespace)),\
- $(eval _contents := $$(_contents) "$(key)": "$(SOONG_CONFIG_$(namespace)_$(key))",$$(newline)))\
- $(eval _contents := $$(_contents)$(if $(strip $(SOONG_CONFIG_$(namespace))),__SV_END) },$$(newline)))
-_contents := $(_contents)$(if $(strip $(SOONG_CONFIG_NAMESPACES)),__SV_END) },$(newline)
+ $(call add_json_str,$(key),$(SOONG_CONFIG_$(namespace)_$(key))))\
+ $(call end_json_map))
+$(call end_json_map)
-_contents := $(subst $(comma)$(newline)__SV_END,$(newline),$(_contents)__SV_END}$(newline))
+$(call json_end)
-$(file >$(SOONG_VARIABLES).tmp,$(_contents))
+$(file >$(SOONG_VARIABLES).tmp,$(json_contents))
$(shell if ! cmp -s $(SOONG_VARIABLES).tmp $(SOONG_VARIABLES); then \
mv $(SOONG_VARIABLES).tmp $(SOONG_VARIABLES); \
@@ -161,15 +189,4 @@ $(shell if ! cmp -s $(SOONG_VARIABLES).tmp $(SOONG_VARIABLES); then \
rm $(SOONG_VARIABLES).tmp; \
fi)
-_json_list :=
-json_list :=
-csv_to_json_list :=
-add_json_val :=
-add_json_str :=
-add_json_list :=
-add_json_csv :=
-add_json_bool :=
-invert_bool :=
-_contents :=
-
endif # CONFIGURE_SOONG
diff --git a/core/soong_droiddoc_prebuilt.mk b/core/soong_droiddoc_prebuilt.mk
new file mode 100644
index 0000000000..c0467df2be
--- /dev/null
+++ b/core/soong_droiddoc_prebuilt.mk
@@ -0,0 +1,44 @@
+# Droiddoc prebuilt coming from Soong.
+
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ $(call pretty-error,soong_droiddoc_prebuilt.mk may only be used from Soong)
+endif
+
+ifdef LOCAL_DROIDDOC_STUBS_SRCJAR
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_STUBS_SRCJAR),$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar))
+ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar
+
+.PHONY: $(LOCAL_MODULE)
+$(LOCAL_MODULE) : $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar
+endif
+
+ifdef LOCAL_DROIDDOC_DOC_ZIP
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip))
+$(call dist-for-goals,docs,$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip)
+
+.PHONY: $(LOCAL_MODULE) $(LOCAL_MODULE)-docs.zip
+$(LOCAL_MODULE) $(LOCAL_MODULE)-docs.zip : $(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip
+ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip
+endif
+
+ifdef LOCAL_DROIDDOC_ANNOTATIONS_ZIP
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_ANNOTATIONS_ZIP),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_annotations.zip))
+endif
+
+ifdef LOCAL_DROIDDOC_API_VERSIONS_XML
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_API_VERSIONS_XML),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_generated-api-versions.xml))
+endif
+
+ifdef LOCAL_DROIDDOC_JDIFF_DOC_ZIP
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_JDIFF_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip))
+$(call dist-for-goals,docs,$(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip)
+
+ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip
+
+.PHONY: $(LOCAL_MODULE) $(LOCAL_MODULE)-jdiff
+$(LOCAL_MODULE) $(LOCAL_MODULE)-jdiff : $(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip
+endif
+
+ifdef LOCAL_DROIDDOC_METADATA_ZIP
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_METADATA_ZIP),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)-metadata.zip))
+endif
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 13b5f71f02..9692a99f46 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -1,5 +1,7 @@
# Java prebuilt coming from Soong.
# Extra inputs:
+# LOCAL_SOONG_BUILT_INSTALLED
+# LOCAL_SOONG_CLASSES_JAR
# LOCAL_SOONG_HEADER_JAR
# LOCAL_SOONG_DEX_JAR
# LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
@@ -11,27 +13,37 @@ endif
LOCAL_MODULE_SUFFIX := .jar
LOCAL_BUILT_MODULE_STEM := javalib.jar
-#######################################
-include $(BUILD_SYSTEM)/base_rules.mk
-#######################################
+intermediates.COMMON := $(call local-intermediates-dir,COMMON)
full_classes_jar := $(intermediates.COMMON)/classes.jar
full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
common_javalib.jar := $(intermediates.COMMON)/javalib.jar
-$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_jar)))
-$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_pre_proguard_jar)))
-
-ifdef LOCAL_DROIDDOC_STUBS_JAR
-$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_STUBS_JAR),$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar))
-ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar
+ifdef LOCAL_SOONG_AAR
+ LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_AAR)
endif
-ifdef LOCAL_DROIDDOC_DOC_ZIP
-$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip))
+#######################################
+include $(BUILD_SYSTEM)/base_rules.mk
+#######################################
+
+ifdef LOCAL_SOONG_CLASSES_JAR
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_jar)))
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_pre_proguard_jar)))
+ $(eval $(call add-dependency,$(LOCAL_BUILT_MODULE),$(full_classes_jar)))
+
+ ifneq ($(TURBINE_ENABLED),false)
+ ifdef LOCAL_SOONG_HEADER_JAR
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
+ else
+ $(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
+ endif
+ endif # TURBINE_ENABLED != false
endif
+$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
+
ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
$(intermediates.COMMON)/jacoco-report-classes.jar))
@@ -63,63 +75,61 @@ ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
my_static_library_extra_packages := $(intermediates.COMMON)/extra_packages
$(eval $(call copy-one-file,$(LOCAL_SOONG_STATIC_LIBRARY_EXTRA_PACKAGES),$(my_static_library_extra_packages)))
$(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_static_library_extra_packages))
+
+ my_static_library_android_manifest := $(intermediates.COMMON)/manifest/AndroidManifest.xml
+ $(eval $(call copy-one-file,$(LOCAL_FULL_MANIFEST_FILE),$(my_static_library_android_manifest)))
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_static_library_android_manifest))
endif # LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
-ifneq ($(TURBINE_ENABLED),false)
-ifdef LOCAL_SOONG_HEADER_JAR
-$(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
-else
-$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
-endif
-endif # TURBINE_ENABLED != false
ifdef LOCAL_SOONG_DEX_JAR
ifndef LOCAL_IS_HOST_MODULE
ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),) # is_boot_jar
- $(eval $(call hiddenapi-copy-soong-jar,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
- else # !is_boot_jar
- $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+ ifeq (true,$(WITH_DEXPREOPT))
+ # For libart, the boot jars' odex files are replaced by $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE).
+ # We use this installed_odex trick to get boot.art installed.
+ installed_odex := $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)
+ # Append the odex for the 2nd arch if we have one.
+ installed_odex += $($(TARGET_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)
+ ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
+ # Make sure to install the .odex and .vdex when you run "make <module_name>"
+ $(my_all_targets): $(installed_odex)
+ # Copy $(LOCAL_BUILT_MODULE) and its dependencies when installing boot.art
+ $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE): $(LOCAL_BUILT_MODULE)
+ endif
endif # is_boot_jar
- $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_jar) $(full_classes_header_jar)))
-
- dex_preopt_profile_src_file := $(common_javalib.jar)
-
- # defines built_odex along with rule to install odex
- include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
-
- dex_preopt_profile_src_file :=
-
- ifdef LOCAL_DEX_PREOPT
- ifneq ($(dexpreopt_boot_jar_module),) # boot jar
- # boot jar's rules are defined in dex_preopt.mk
- dexpreopted_boot_jar := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(dexpreopt_boot_jar_module)_nodex.jar
- $(eval $(call copy-one-file,$(dexpreopted_boot_jar),$(LOCAL_BUILT_MODULE)))
-
- # For libart boot jars, we don't have .odex files.
- else # ! boot jar
- $(built_odex): PRIVATE_MODULE := $(LOCAL_MODULE)
- # Use pattern rule - we may have multiple built odex files.
-$(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(common_javalib.jar)
- @echo "Dexpreopt Jar: $(PRIVATE_MODULE) ($@)"
- $(call dexpreopt-one-file,$<,$@)
-
- $(eval $(call dexpreopt-copy-jar,$(common_javalib.jar),$(LOCAL_BUILT_MODULE),$(LOCAL_DEX_PREOPT)))
- endif # ! boot jar
- else # LOCAL_DEX_PREOPT
- $(eval $(call copy-one-file,$(common_javalib.jar),$(LOCAL_BUILT_MODULE)))
- endif # LOCAL_DEX_PREOPT
- else # LOCAL_IS_HOST_MODULE
- $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(LOCAL_BUILT_MODULE)))
- $(eval $(call add-dependency,$(LOCAL_BUILT_MODULE),$(full_classes_jar) $(full_classes_header_jar)))
+
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+ $(eval $(call add-dependency,$(LOCAL_BUILT_MODULE),$(common_javalib.jar)))
+ $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_jar)))
+ ifneq ($(TURBINE_ENABLED),false)
+ $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_header_jar)))
+ endif
endif
java-dex : $(LOCAL_BUILT_MODULE)
-else
- $(eval $(call copy-one-file,$(full_classes_jar),$(LOCAL_BUILT_MODULE)))
+else # LOCAL_SOONG_DEX_JAR
+ ifndef LOCAL_UNINSTALLABLE_MODULE
+ ifndef LOCAL_IS_HOST_MODULE
+ $(call pretty-error,Installable device module must have LOCAL_SOONG_DEX_JAR set)
+ endif
+ endif
+endif # LOCAL_SOONG_DEX_JAR
+
+my_built_installed := $(foreach f,$(LOCAL_SOONG_BUILT_INSTALLED),\
+ $(call word-colon,1,$(f)):$(PRODUCT_OUT)$(call word-colon,2,$(f)))
+my_installed := $(call copy-many-files, $(my_built_installed))
+ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed)
+ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(my_built_installed)
+$(my_register_name): $(my_installed)
+
+ifdef LOCAL_SOONG_AAR
+ ALL_MODULES.$(LOCAL_MODULE).AAR := $(LOCAL_SOONG_AAR)
endif
javac-check : $(full_classes_jar)
javac-check-$(LOCAL_MODULE) : $(full_classes_jar)
+.PHONY: javac-check-$(LOCAL_MODULE)
ifndef LOCAL_IS_HOST_MODULE
ifeq ($(LOCAL_SDK_VERSION),system_current)
@@ -143,3 +153,15 @@ my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
my_common := COMMON
include $(BUILD_SYSTEM)/link_type.mk
endif # !LOCAL_IS_HOST_MODULE
+
+# LOCAL_EXPORT_SDK_LIBRARIES set by soong is written to exported-sdk-libs file
+my_exported_sdk_libs_file := $(intermediates.COMMON)/exported-sdk-libs
+$(my_exported_sdk_libs_file): PRIVATE_EXPORTED_SDK_LIBS := $(LOCAL_EXPORT_SDK_LIBRARIES)
+$(my_exported_sdk_libs_file):
+ @echo "Export SDK libs $@"
+ $(hide) mkdir -p $(dir $@) && rm -f $@
+ $(if $(PRIVATE_EXPORTED_SDK_LIBS),\
+ $(hide) echo $(PRIVATE_EXPORTED_SDK_LIBS) | tr ' ' '\n' > $@,\
+ $(hide) touch $@)
+
+SOONG_ALREADY_CONV := $(SOONG_ALREADY_CONV) $(LOCAL_MODULE)
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 0429dd2d22..ee759b9d8e 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -28,13 +28,15 @@ intermediates.COMMON := $(call local-intermediates-dir,COMMON)
my_res_package :=
+# Process Support Library dependencies.
+include $(BUILD_SYSTEM)/support_libraries.mk
+
+include $(BUILD_SYSTEM)/force_aapt2.mk
+
ifdef LOCAL_AAPT2_ONLY
LOCAL_USE_AAPT2 := true
endif
-# Process Support Library dependencies.
-include $(BUILD_SYSTEM)/support_libraries.mk
-
# Hack to build static Java library with Android resource
# See bug 5714516
all_resources :=
@@ -44,8 +46,8 @@ ifdef LOCAL_RESOURCE_DIR
need_compile_res := true
LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
endif
-ifdef LOCAL_USE_AAPT2
-ifneq ($(LOCAL_STATIC_ANDROID_LIBRARIES),)
+ifeq ($(LOCAL_USE_AAPT2),true)
+ifneq ($(strip $(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES)),)
need_compile_res := true
endif
endif
@@ -82,15 +84,23 @@ LOCAL_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_
R_file_stamp := $(intermediates.COMMON)/src/R.stamp
LOCAL_INTERMEDIATE_TARGETS += $(R_file_stamp)
-ifdef LOCAL_USE_AAPT2
-# For library we treat all the resource equal with no overlay.
-my_res_resources := $(all_resources)
-my_overlay_resources :=
-# For libraries put everything in the COMMON intermediate directory.
-my_res_package := $(intermediates.COMMON)/package-res.apk
-
-LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
+ifeq ($(LOCAL_USE_AAPT2),true)
+ ifneq ($(strip $(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES)),)
+ # If we are using static android libraries, every source file becomes an overlay.
+ # This is to emulate old AAPT behavior which simulated library support.
+ my_res_resources :=
+ my_overlay_resources := $(all_resources)
+ else
+ # Otherwise, for a library we treat all the resource equal with no overlay.
+ my_res_resources := $(all_resources)
+ my_overlay_resources :=
+ endif
+ # For libraries put everything in the COMMON intermediate directory.
+ my_res_package := $(intermediates.COMMON)/package-res.apk
+
+ LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
endif # LOCAL_USE_AAPT2
+
endif # need_compile_res
all_res_assets := $(all_resources)
@@ -108,17 +118,17 @@ endif
framework_res_package_export :=
# Please refer to package.mk
ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS_USE_PREBUILT_SDK),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
framework_res_package_export := \
- $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
+ $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION))
else
framework_res_package_export := \
$(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
endif
endif
-ifdef LOCAL_USE_AAPT2
-import_proguard_flag_files := $(strip $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES),\
+ifeq ($(LOCAL_USE_AAPT2),true)
+import_proguard_flag_files := $(strip $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
$(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags))
$(intermediates.COMMON)/export_proguard_flags: $(import_proguard_flag_files) $(addprefix $(LOCAL_PATH)/,$(LOCAL_EXPORT_PROGUARD_FLAG_FILES))
@echo "Export proguard flags: $@"
@@ -140,7 +150,7 @@ $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_M
# add --non-constant-id to prevent inlining constants.
# AAR needs text symbol file R.txt.
-ifdef LOCAL_USE_AAPT2
+ifeq ($(LOCAL_USE_AAPT2),true)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --static-lib --output-text-symbols $(intermediates.COMMON)/R.txt
ifndef LOCAL_AAPT_NAMESPACES
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS += --no-static-lib-packages
@@ -168,10 +178,10 @@ $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PROGUARD_OPTIONS_FILE := $(proguard_optio
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_PACKAGE_NAME :=
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_INSTRUMENTATION_FOR :=
-ifdef LOCAL_USE_AAPT2
+ifeq ($(LOCAL_USE_AAPT2),true)
# One more level with name res so we can zip up the flat resources that can be linked by apps.
my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res/res
- ifneq (,$(renderscript_target_api))
+ ifneq (,$(filter-out current,$(renderscript_target_api)))
ifneq ($(call math_gt_or_eq,$(renderscript_target_api),21),true)
my_generated_res_zips := $(rs_generated_res_zip)
endif # renderscript_target_api < 21
diff --git a/core/static_library.mk b/core/static_library.mk
index 25e527964e..8002e5c27c 100644
--- a/core/static_library.mk
+++ b/core/static_library.mk
@@ -21,7 +21,6 @@ include $(BUILD_SYSTEM)/module_arch_supported.mk
ifeq ($(my_module_arch_supported),true)
# Build for TARGET_2ND_ARCH
-OVERRIDE_BUILT_MODULE_PATH :=
LOCAL_BUILT_MODULE :=
LOCAL_INSTALLED_MODULE :=
LOCAL_INTERMEDIATE_TARGETS :=
diff --git a/core/static_library_internal.mk b/core/static_library_internal.mk
index 6b4d22f129..f82e50162c 100644
--- a/core/static_library_internal.mk
+++ b/core/static_library_internal.mk
@@ -25,7 +25,7 @@ $(LOCAL_BUILT_MODULE) : $(all_objects)
$(transform-o-to-static-lib)
ifeq ($(NATIVE_COVERAGE),true)
-gcno_suffix := .gcnodir
+gcno_suffix := .zip
built_whole_gcno_libraries := \
$(foreach lib,$(my_whole_static_libraries), \
@@ -35,11 +35,9 @@ built_whole_gcno_libraries := \
GCNO_ARCHIVE := $(LOCAL_MODULE)$(gcno_suffix)
+$(intermediates)/$(GCNO_ARCHIVE) : $(SOONG_ZIP) $(MERGE_ZIPS)
$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS := $(strip $(LOCAL_GCNO_FILES))
$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES := $(strip $(built_whole_gcno_libraries))
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_PREFIX := $(my_prefix)
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_2ND_ARCH_VAR_PREFIX := $(LOCAL_2ND_ARCH_VAR_PREFIX)
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_INTERMEDIATES_DIR := $(intermediates)
$(intermediates)/$(GCNO_ARCHIVE) : $(LOCAL_GCNO_FILES) $(built_whole_gcno_libraries)
- $(transform-o-to-static-lib)
+ $(package-coverage-files)
endif
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index b5c3a7c34d..40b2ba87cc 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -29,6 +29,13 @@ ifdef LOCAL_MODULE_PATH_64
$(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH_64 when building test $(LOCAL_MODULE))
endif
+use_testcase_folder := false
+ifneq ($(LOCAL_MODULE),$(filter $(LOCAL_MODULE),$(DEFAULT_DATA_OUT_MODULES)))
+ use_testcase_folder := true
+endif
+
+ifneq ($(use_testcase_folder),true)
ifndef LOCAL_MODULE_RELATIVE_PATH
LOCAL_MODULE_RELATIVE_PATH := $(LOCAL_MODULE)
endif
+endif
diff --git a/core/tasks/apicheck.mk b/core/tasks/apicheck.mk
deleted file mode 100644
index f4aee3f6c7..0000000000
--- a/core/tasks/apicheck.mk
+++ /dev/null
@@ -1,165 +0,0 @@
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#
-# Rules for running apicheck to confirm that you haven't broken
-# api compatibility or added apis illegally.
-#
-
-# skip api check for PDK buid
-ifeq (,$(filter true, $(WITHOUT_CHECK_API) $(TARGET_BUILD_PDK)))
-
-.PHONY: checkapi
-
-# Run the checkapi rules by default.
-droidcore: checkapi
-
-last_released_sdk_version := $(lastword $(call numerically_sort, \
- $(filter-out current, \
- $(patsubst $(SRC_API_DIR)/%.txt,%, $(wildcard $(SRC_API_DIR)/*.txt)) \
- )\
- ))
-
-.PHONY: check-public-api
-checkapi : check-public-api
-
-.PHONY: update-api
-
-# INTERNAL_PLATFORM_API_FILE is the one build by droiddoc.
-# Note that since INTERNAL_PLATFORM_API_FILE is the byproduct of api-stubs module,
-# (See frameworks/base/Android.mk)
-# we need to add api-stubs as additional dependency of the api check.
-
-# Check that the API we're building hasn't broken the last-released
-# SDK version.
-$(eval $(call check-api, \
- checkpublicapi-last, \
- $(SRC_API_DIR)/$(last_released_sdk_version).txt, \
- $(INTERNAL_PLATFORM_API_FILE), \
- frameworks/base/api/removed.txt, \
- $(INTERNAL_PLATFORM_REMOVED_API_FILE), \
- -hide 2 -hide 3 -hide 4 -hide 5 -hide 6 -hide 24 -hide 25 -hide 26 -hide 27 \
- -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
- -error 16 -error 17 -error 18 -error 31, \
- cat $(BUILD_SYSTEM)/apicheck_msg_last.txt, \
- check-public-api, \
- $(call doc-timestamp-for,api-stubs) \
- ))
-
-# Check that the API we're building hasn't changed from the not-yet-released
-# SDK version.
-$(eval $(call check-api, \
- checkpublicapi-current, \
- frameworks/base/api/current.txt, \
- $(INTERNAL_PLATFORM_API_FILE), \
- frameworks/base/api/removed.txt, \
- $(INTERNAL_PLATFORM_REMOVED_API_FILE), \
- -error 2 -error 3 -error 4 -error 5 -error 6 \
- -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
- -error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
- -error 25 -error 26 -error 27, \
- cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
- check-public-api, \
- $(call doc-timestamp-for,api-stubs) \
- ))
-
-.PHONY: update-public-api
-update-public-api: $(INTERNAL_PLATFORM_API_FILE) | $(ACP)
- @echo Copying current.txt
- $(hide) $(ACP) $(INTERNAL_PLATFORM_API_FILE) frameworks/base/api/current.txt
- @echo Copying removed.txt
- $(hide) $(ACP) $(INTERNAL_PLATFORM_REMOVED_API_FILE) frameworks/base/api/removed.txt
-
-update-api : update-public-api
-
-#####################Check System API#####################
-.PHONY: check-system-api
-checkapi : check-system-api
-
-# Check that the System API we're building hasn't broken the last-released
-# SDK version.
-$(eval $(call check-api, \
- checksystemapi-last, \
- $(SRC_SYSTEM_API_DIR)/$(last_released_sdk_version).txt, \
- $(INTERNAL_PLATFORM_SYSTEM_API_FILE), \
- frameworks/base/api/system-removed.txt, \
- $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE), \
- -hide 2 -hide 3 -hide 4 -hide 5 -hide 6 -hide 24 -hide 25 -hide 26 -hide 27 \
- -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
- -error 16 -error 17 -error 18 -error 31, \
- cat $(BUILD_SYSTEM)/apicheck_msg_last.txt, \
- check-system-api, \
- $(call doc-timestamp-for,system-api-stubs) \
- ))
-
-# Check that the System API we're building hasn't changed from the not-yet-released
-# SDK version.
-$(eval $(call check-api, \
- checksystemapi-current, \
- frameworks/base/api/system-current.txt, \
- $(INTERNAL_PLATFORM_SYSTEM_API_FILE), \
- frameworks/base/api/system-removed.txt, \
- $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE), \
- -error 2 -error 3 -error 4 -error 5 -error 6 \
- -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
- -error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
- -error 25 -error 26 -error 27, \
- cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
- check-system-api, \
- $(call doc-timestamp-for,system-api-stubs) \
- ))
-
-.PHONY: update-system-api
-update-api : update-system-api
-
-update-system-api: $(INTERNAL_PLATFORM_SYSTEM_API_FILE) | $(ACP)
- @echo Copying system-current.txt
- $(hide) $(ACP) $(INTERNAL_PLATFORM_SYSTEM_API_FILE) frameworks/base/api/system-current.txt
- @echo Copying system-removed.txt
- $(hide) $(ACP) $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE) frameworks/base/api/system-removed.txt
-
-#####################Check Test API#####################
-.PHONY: check-test-api
-checkapi : check-test-api
-
-# Check that the Test API we're building hasn't changed from the not-yet-released
-# SDK version. Note that we don't check that we haven't broken the previous
-# SDK's API because the test API is meant only for CTS which is always
-# associated with the current release.
-$(eval $(call check-api, \
- checktestapi-current, \
- frameworks/base/api/test-current.txt, \
- $(INTERNAL_PLATFORM_TEST_API_FILE), \
- frameworks/base/api/test-removed.txt, \
- $(INTERNAL_PLATFORM_TEST_REMOVED_API_FILE), \
- -error 2 -error 3 -error 4 -error 5 -error 6 \
- -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
- -error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
- -error 25 -error 26 -error 27, \
- cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
- check-test-api, \
- $(call doc-timestamp-for,test-api-stubs) \
- ))
-
-.PHONY: update-test-api
-update-api : update-test-api
-
-update-test-api: $(INTERNAL_PLATFORM_TEST_API_FILE) | $(ACP)
- @echo Copying test-current.txt
- $(hide) $(ACP) $(INTERNAL_PLATFORM_TEST_API_FILE) frameworks/base/api/test-current.txt
- @echo Copying test-removed.txt
- $(hide) $(ACP) $(INTERNAL_PLATFORM_TEST_REMOVED_API_FILE) frameworks/base/api/test-removed.txt
-
-
-endif
diff --git a/core/tasks/apidiff.mk b/core/tasks/apidiff.mk
index 4eb59afbb9..76e4749366 100644
--- a/core/tasks/apidiff.mk
+++ b/core/tasks/apidiff.mk
@@ -18,4 +18,4 @@
.PHONY: api-diff
-api-diff: offline-sdk-referenceonly-diff
+api-diff: api-stubs-docs-jdiff
diff --git a/core/tasks/check_boot_jars/check_boot_jars.py b/core/tasks/check_boot_jars/check_boot_jars.py
index 1b4540cb8d..9d71553c81 100755
--- a/core/tasks/check_boot_jars/check_boot_jars.py
+++ b/core/tasks/check_boot_jars/check_boot_jars.py
@@ -39,7 +39,7 @@ def LoadWhitelist(filename):
return True
-def CheckJar(jar):
+def CheckJar(whitelist_path, jar):
"""Check a jar file.
"""
# Get the list of files inside the jar file.
@@ -55,8 +55,9 @@ def CheckJar(jar):
package_name = package_name.replace('/', '.')
# Skip class without a package name
if package_name and not whitelist_re.match(package_name):
- print >> sys.stderr, ('Error: %s contains class file %s, which is not in the whitelist'
- % (jar, f))
+ print >> sys.stderr, ('Error: %s contains class file %s, whose package name %s is not '
+ 'in the whitelist %s of packages allowed on the bootclasspath.'
+ % (jar, f, package_name, whitelist_path))
return False
return True
@@ -65,13 +66,14 @@ def main(argv):
if len(argv) < 2:
print __doc__
return 1
+ whitelist_path = argv[0]
- if not LoadWhitelist(argv[0]):
+ if not LoadWhitelist(whitelist_path):
return 1
passed = True
for jar in argv[1:]:
- if not CheckJar(jar):
+ if not CheckJar(whitelist_path, jar):
passed = False
if not passed:
return 1
diff --git a/core/tasks/check_boot_jars/package_whitelist.txt b/core/tasks/check_boot_jars/package_whitelist.txt
index b0becba82f..38f2be57af 100644
--- a/core/tasks/check_boot_jars/package_whitelist.txt
+++ b/core/tasks/check_boot_jars/package_whitelist.txt
@@ -241,3 +241,4 @@ com\.google\.vr\.platform.*
###################################################
# Packages used for Android in Chrome OS
org\.chromium\.arc
+org\.chromium\.arc\..*
diff --git a/core/tasks/check_emu_boot.mk b/core/tasks/check_emu_boot.mk
deleted file mode 100644
index 487067738e..0000000000
--- a/core/tasks/check_emu_boot.mk
+++ /dev/null
@@ -1,23 +0,0 @@
-check_emu_boot0 := $(DIST_DIR)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)-emulator-boot-test-result.txt
-$(check_emu_boot0) : PRIVATE_PREFIX := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
-$(check_emu_boot0) : PRIVATE_EMULATOR_BOOT_TEST_SH := device/generic/goldfish/tools/emulator_boot_test.sh
-$(check_emu_boot0) : PRIVATE_BOOT_COMPLETE_STRING := "emulator: INFO: boot completed"
-$(check_emu_boot0) : PRIVATE_BOOT_FAIL_STRING := "emulator: ERROR: fail to boot after"
-$(check_emu_boot0) : PRIVATE_SUCCESS_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-SUCCESS.txt
-$(check_emu_boot0) : PRIVATE_FAIL_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-FAIL.txt
-$(check_emu_boot0) : $(INSTALLED_QEMU_SYSTEMIMAGE) $(INSTALLED_QEMU_VENDORIMAGE) \
- $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(PRODUCT_OUT)/userdata.img) \
- $(PRODUCT_OUT)/ramdisk.img device/generic/goldfish/tools/emulator_boot_test.sh
- @mkdir -p $(dir $(check_emu_boot0))
- $(hide) rm -f $(check_emu_boot0)
- $(hide) rm -f $(PRIVATE_SUCCESS_FILE)
- $(hide) rm -f $(PRIVATE_FAIL_FILE)
- (export ANDROID_PRODUCT_OUT=$$(cd $(PRODUCT_OUT);pwd);\
- export ANDROID_BUILD_TOP=$$(pwd);\
- $(PRIVATE_EMULATOR_BOOT_TEST_SH) > $(check_emu_boot0))
- (if grep -q $(PRIVATE_BOOT_COMPLETE_STRING) $(check_emu_boot0);\
- then echo boot_succeeded > $(PRIVATE_SUCCESS_FILE); fi)
- (if grep -q $(PRIVATE_BOOT_FAIL_STRING) $(check_emu_boot0);\
- then echo boot_failed > $(PRIVATE_FAIL_FILE); fi)
-.PHONY: check_emu_boot
-check_emu_boot: $(check_emu_boot0)
diff --git a/core/tasks/collect_gpl_sources.mk b/core/tasks/collect_gpl_sources.mk
index 70f0afe557..acbe9bef89 100644
--- a/core/tasks/collect_gpl_sources.mk
+++ b/core/tasks/collect_gpl_sources.mk
@@ -12,22 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-ifdef dist_goal
-
# The rule below doesn't have dependenices on the files that it copies,
-# so manually generate directly into the DIST_DIR directory that is always
-# wiped between dist builds.
-gpl_source_tgz := $(DIST_DIR)/gpl_source.tgz
+# so manually generate into a PACKAGING intermediate dir, which is wiped
+# in installclean between incremental builds on build servers.
+gpl_source_tgz := $(call intermediates-dir-for,PACKAGING,gpl_source)/gpl_source.tgz
# FORCE since we can't know whether any of the sources changed
$(gpl_source_tgz): PRIVATE_PATHS := $(sort $(patsubst %/, %, $(dir $(ALL_GPL_MODULE_LICENSE_FILES))))
$(gpl_source_tgz) : $(ALL_GPL_MODULE_LICENSE_FILES)
- @echo Package gpl sources: $@
+ @echo Package GPL sources: $@
$(hide) tar cfz $@ --exclude ".git*" $(PRIVATE_PATHS)
# Dist the tgz only if we are doing a full build
-ifeq (,$(TARGET_BUILD_APPS))
-droidcore: $(gpl_source_tgz)
-endif
-
-endif # dist_goal
+$(call dist-for-goals,droidcore,$(gpl_source_tgz))
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index 33c3a836f0..f3b4368775 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -14,7 +14,6 @@
test_suite_name := cts
test_suite_tradefed := cts-tradefed
-# TODO: Fix the following two lines after harness is moved to its own repo
test_suite_dynamic_config := test/suite_harness/tools/cts-tradefed/DynamicConfig.xml
test_suite_readme := test/suite_harness/tools/cts-tradefed/README
diff --git a/core/tasks/deps_licenses.mk b/core/tasks/deps_licenses.mk
index bb20fa042e..daf986f606 100644
--- a/core/tasks/deps_licenses.mk
+++ b/core/tasks/deps_licenses.mk
@@ -40,7 +40,7 @@ $(if $(_gmad_new),$(eval $(1) += $(_gmad_new))\
endef
define print-deps-license
-$(foreach m, $(ALL_DEPS.MODULES),\
+$(foreach m, $(sort $(ALL_DEPS.MODULES)),\
$(eval m_p := $(sort $(ALL_MODULES.$(m).PATH) $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).PATH)))\
$(if $(filter $(PROJ_PATH),$(m_p)),\
$(eval deps :=)\
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index a2b626e96a..0028ce4d4f 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -18,12 +18,16 @@
device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
# Create an artifact to include a list of test config files in device-tests.
device-tests-list-zip := $(PRODUCT_OUT)/device-tests_list.zip
+my_host_shared_lib_for_device_tests := $(call copy-many-files,$(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES))
$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip)
$(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list
-
-$(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(SOONG_ZIP)
+$(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
+$(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
echo $(sort $(COMPATIBILITY.device-tests.FILES)) | tr " " "\n" > $@.list
grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+ $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+ echo $$shared_lib >> $@-host.list; \
+ done
grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
rm -f $(PRIVATE_device_tests_list)
diff --git a/core/tasks/find-shareduid-violation.mk b/core/tasks/find-shareduid-violation.mk
new file mode 100644
index 0000000000..45fd9374ed
--- /dev/null
+++ b/core/tasks/find-shareduid-violation.mk
@@ -0,0 +1,32 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+shareduid_violation_modules_filename := $(PRODUCT_OUT)/shareduid_violation_modules.json
+
+find_shareduid_script := $(BUILD_SYSTEM)/tasks/find-shareduid-violation.py
+
+$(shareduid_violation_modules_filename): $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INSTALLED_USERDATAIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET)
+
+$(shareduid_violation_modules_filename): $(find_shareduid_script)
+$(shareduid_violation_modules_filename): $(AAPT2)
+ $(find_shareduid_script) $(PRODUCT_OUT) $(AAPT2) > $@
+$(call dist-for-goals,droidcore,$(shareduid_violation_modules_filename))
diff --git a/core/tasks/find-shareduid-violation.py b/core/tasks/find-shareduid-violation.py
new file mode 100755
index 0000000000..1f8e4df5e0
--- /dev/null
+++ b/core/tasks/find-shareduid-violation.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+import subprocess
+from glob import glob
+from collections import defaultdict
+import sys
+import json
+
+if len(sys.argv) < 3:
+ product_out = os.environ["PRODUCT_OUT"]
+ aapt = "aapt2"
+else:
+ product_out = sys.argv[1]
+ aapt = sys.argv[2]
+
+def execute(cmd):
+ p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = map(lambda b: b.decode('utf-8'), p.communicate())
+ return p.returncode == 0, out, err
+
+def make_aapt_cmds(file):
+ return [aapt + ' dump ' + file + ' --file AndroidManifest.xml',
+ aapt + ' dump xmltree ' + file + ' --file AndroidManifest.xml']
+
+def extract_shared_uid(file):
+ for cmd in make_aapt_cmds(file):
+ success, manifest, error_msg = execute(cmd)
+ if success:
+ break
+ else:
+ print(error_msg, file=sys.stderr)
+ sys.exit()
+ return None
+
+ for l in manifest.split('\n'):
+ if "sharedUserId" in l:
+ return l.split('"')[-2]
+ return None
+
+
+partitions = ["system", "vendor", "product"]
+
+shareduid_app_dict = defaultdict(list)
+
+for p in partitions:
+ for f in glob(os.path.join(product_out, p, "*", "*", "*.apk")):
+ apk_file = os.path.basename(f)
+ shared_uid = extract_shared_uid(f)
+
+ if shared_uid is None:
+ continue
+ shareduid_app_dict[shared_uid].append((p, apk_file))
+
+
+output = defaultdict(lambda: defaultdict(list))
+
+for uid, app_infos in shareduid_app_dict.items():
+ partitions = {p for p, _ in app_infos}
+ if len(partitions) > 1:
+ for part in partitions:
+ output[uid][part].extend([a for p, a in app_infos if p == part])
+
+print(json.dumps(output, indent=2, sort_keys=True))
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index c7f1dc9977..36ab1016a3 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -14,22 +14,54 @@
.PHONY: general-tests
-general-tests-zip := $(PRODUCT_OUT)/general-tests.zip
+general_tests_tools := \
+ $(HOST_OUT_JAVA_LIBRARIES)/cts-tradefed.jar \
+ $(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
+
+intermediates_dir := $(call intermediates-dir-for,PACKAGING,general-tests)
+general_tests_zip := $(PRODUCT_OUT)/general-tests.zip
# Create an artifact to include a list of test config files in general-tests.
-general-tests-list-zip := $(PRODUCT_OUT)/general-tests_list.zip
-$(general-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(general-tests-list-zip)
-$(general-tests-zip) : PRIVATE_general_tests_list := $(PRODUCT_OUT)/general-tests_list
-
-$(general-tests-zip) : $(COMPATIBILITY.general-tests.FILES) $(SOONG_ZIP)
- echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $@.list
- grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
- grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
- $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
- rm -f $(PRIVATE_general_tests_list)
- $(hide) grep -e .*.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_general_tests_list)
- $(hide) grep -e .*.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_general_tests_list)
- $(hide) $(SOONG_ZIP) -d -o $(general-tests-list-zip) -C $(dir $@) -f $(PRIVATE_general_tests_list)
- rm -f $@.list $@-host.list $@-target.list $(PRIVATE_general_tests_list)
-
-general-tests: $(general-tests-zip)
-$(call dist-for-goals, general-tests, $(general-tests-zip) $(general-tests-list-zip))
+general_tests_list_zip := $(PRODUCT_OUT)/general-tests_list.zip
+
+# Filter shared entries between general-tests and device-tests's HOST_SHARED_LIBRARY.FILES,
+# to avoid warning about overriding commands.
+my_host_shared_lib_for_general_tests := \
+ $(foreach m,$(filter $(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES),\
+ $(COMPATIBILITY.general-tests.HOST_SHARED_LIBRARY.FILES)),$(call word-colon,2,$(m)))
+my_general_tests_shared_lib_files := \
+ $(filter-out $(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES),\
+ $(COMPATIBILITY.general-tests.HOST_SHARED_LIBRARY.FILES))
+
+my_host_shared_lib_for_general_tests += $(call copy-many-files,$(my_general_tests_shared_lib_files))
+
+$(general_tests_zip) : PRIVATE_general_tests_list_zip := $(general_tests_list_zip)
+$(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip)
+$(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools)
+$(general_tests_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir)
+$(general_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_general_tests)
+$(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(general_tests_tools) $(my_host_shared_lib_for_general_tests) $(SOONG_ZIP)
+ rm -rf $(PRIVATE_INTERMEDIATES_DIR)
+ rm -f $@ $(PRIVATE_general_tests_list_zip)
+ mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools
+ echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
+ grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
+ grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
+ $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+ echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/host.list; \
+ done
+ cp -fp $(PRIVATE_TOOLS) $(PRIVATE_INTERMEDIATES_DIR)/tools/
+ $(SOONG_ZIP) -d -o $@ \
+ -P host -C $(PRIVATE_INTERMEDIATES_DIR) -D $(PRIVATE_INTERMEDIATES_DIR)/tools \
+ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
+ -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list
+ grep -e .*.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/general-tests_list
+ grep -e .*.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/general-tests_list
+ $(SOONG_ZIP) -d -o $(PRIVATE_general_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/general-tests_list
+
+general-tests: $(general_tests_zip)
+$(call dist-for-goals, general-tests, $(general_tests_zip) $(general_tests_list_zip))
+
+intermediates_dir :=
+general_tests_tools :=
+general_tests_zip :=
+general_tests_list_zip :=
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index b45526f4d9..2c56162d54 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -13,6 +13,10 @@ $(MODULE_INFO_JSON):
'"installed": [$(foreach w,$(sort $(ALL_MODULES.$(m).INSTALLED)),"$(w)", )], ' \
'"compatibility_suites": [$(foreach w,$(sort $(ALL_MODULES.$(m).COMPATIBILITY_SUITES)),"$(w)", )], ' \
'"auto_test_config": [$(ALL_MODULES.$(m).auto_test_config)], ' \
+ '"module_name": "$(ALL_MODULES.$(m).MODULE_NAME)", ' \
+ '"test_config": [$(if $(ALL_MODULES.$(m).TEST_CONFIG),"$(ALL_MODULES.$(m).TEST_CONFIG)")], ' \
+ '"dependencies": [$(foreach w,$(sort $(ALL_DEPS.$(m).ALL_DEPS)),"$(w)", )], ' \
+ '"srcs": [$(foreach w,$(sort $(ALL_MODULES.$(m).SRCS)),"$(w)", )], ' \
'},\n' \
) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
$(hide) echo '}' >> $@
@@ -21,6 +25,7 @@ $(MODULE_INFO_JSON):
# If ONE_SHOT_MAKEFILE is set, our view of the world is smaller, so don't
# rewrite the file in that came.
ifndef ONE_SHOT_MAKEFILE
-files: $(MODULE_INFO_JSON)
+droidcore: $(MODULE_INFO_JSON)
endif
+$(call dist-for-goals, general-tests, $(MODULE_INFO_JSON))
diff --git a/target/Android.mk b/core/tasks/mts.mk
index 9929b0096f..e800505e56 100644
--- a/target/Android.mk
+++ b/core/tasks/mts.mk
@@ -1,5 +1,4 @@
-#
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,12 +11,15 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-#
-LOCAL_PATH := $(call my-dir)
+ifneq ($(wildcard test/mts/README.md),)
+test_suite_name := mts
+test_suite_tradefed := mts-tradefed
+test_suite_readme := test/mts/README.md
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
-# Only if this Android.mk was included not by a symlink should it be used.
-# This facilitates the transition away from symlinks: b/64397960
-ifeq ($(LOCAL_PATH),build/make/target)
-include $(call first-makefiles-under,$(LOCAL_PATH))
+.PHONY: mts
+mts: $(compatibility_zip)
+$(call dist-for-goals, mts, $(compatibility_zip))
endif
diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk
index 66eec22b42..e9c506a279 100644
--- a/core/tasks/oem_image.mk
+++ b/core/tasks/oem_image.mk
@@ -33,7 +33,7 @@ $(INSTALLED_OEMIMAGE_TARGET) : $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_OEMIMAGE_F
$(call pretty,"Target oem fs image: $@")
@mkdir -p $(TARGET_OUT_OEM)
@mkdir -p $(oemimage_intermediates) && rm -rf $(oemimage_intermediates)/oem_image_info.txt
- $(call generate-userimage-prop-dictionary, $(oemimage_intermediates)/oem_image_info.txt, skip_fsck=true)
+ $(call generate-image-prop-dictionary, $(oemimage_intermediates)/oem_image_info.txt,oem,skip_fsck=true)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
build/make/tools/releasetools/build_image.py \
$(TARGET_OUT_OEM) $(oemimage_intermediates)/oem_image_info.txt $@ $(TARGET_OUT)
diff --git a/core/tasks/owners.mk b/core/tasks/owners.mk
new file mode 100644
index 0000000000..6f32aaf1d0
--- /dev/null
+++ b/core/tasks/owners.mk
@@ -0,0 +1,33 @@
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# Create an artifact to include TEST_MAPPING files in source tree.
+
+.PHONY: owners
+
+intermediates := $(call intermediates-dir-for,PACKAGING,owners)
+owners_zip := $(intermediates)/owners.zip
+owners_list := $(OUT_DIR)/.module_paths/OWNERS.list
+owners := $(file <$(owners_list))
+$(owners_zip) : PRIVATE_owners := $(subst $(newline),\n,$(owners))
+
+$(owners_zip) : $(owners) $(SOONG_ZIP)
+ @echo "Building artifact to include OWNERS files."
+ rm -rf $@
+ echo -e "$(PRIVATE_owners)" > $@.list
+ $(SOONG_ZIP) -o $@ -C . -l $@.list
+ rm -f $@.list
+
+owners : $(owners_zip)
+
+$(call dist-for-goals, general-tests, $(owners_zip))
diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk
index bc11b4941b..93db1de187 100644
--- a/core/tasks/sdk-addon.mk
+++ b/core/tasks/sdk-addon.mk
@@ -12,10 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+.PHONY: sdk_addon
+
+ifndef ONE_SHOT_MAKEFILE
# If they didn't define PRODUCT_SDK_ADDON_NAME, then we won't define
# any of these rules.
-addon_name := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_NAME))
+addon_name := $(PRODUCT_SDK_ADDON_NAME)
ifneq ($(addon_name),)
addon_dir_leaf := $(addon_name)-$(FILE_NAME_TAG)-$(INTERNAL_SDK_HOST_OS_NAME)
@@ -40,8 +43,8 @@ $(call stub-addon-jar-file,$(1)): $(1) | mkstubs
endef
# Files that are built and then copied into the sdk-addon
-ifneq ($(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_COPY_MODULES)),)
-$(foreach cf,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_COPY_MODULES), \
+ifneq ($(PRODUCT_SDK_ADDON_COPY_MODULES),)
+$(foreach cf,$(PRODUCT_SDK_ADDON_COPY_MODULES), \
$(eval _src := $(call module-stubs-files,$(call word-colon,1,$(cf)))) \
$(eval $(call stub-addon-jar,$(_src))) \
$(eval _src := $(call stub-addon-jar-file,$(_src))) \
@@ -52,8 +55,8 @@ $(foreach cf,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_COPY_MODULES), \
endif
# Files that are copied directly into the sdk-addon
-ifneq ($(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_COPY_FILES)),)
-$(foreach cf,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_COPY_FILES), \
+ifneq ($(PRODUCT_SDK_ADDON_COPY_FILES),)
+$(foreach cf,$(PRODUCT_SDK_ADDON_COPY_FILES), \
$(eval _src := $(call word-colon,1,$(cf))) \
$(eval _dest := $(call word-colon,2,$(cf))) \
$(if $(findstring images/,$(_dest)), $(eval _root := $(addon_dir_img)), $(eval _root := $(addon_dir_leaf))) \
@@ -69,7 +72,13 @@ files_to_copy += \
$(addon_dir_img):$(PRODUCT_OUT)/system/build.prop:images/$(TARGET_CPU_ABI)/build.prop \
$(addon_dir_img):device/generic/goldfish/data/etc/userdata.img:images/$(TARGET_CPU_ABI)/userdata.img \
$(addon_dir_img):$(target_notice_file_txt):images/$(TARGET_CPU_ABI)/NOTICE.txt \
- $(addon_dir_img):$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP):images/source.properties
+ $(addon_dir_img):$(PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP):images/source.properties
+
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+files_to_copy += \
+ $(addon_dir_img):$(QEMU_VERIFIED_BOOT_PARAMS):images/$(TARGET_CPU_ABI)/VerifiedBootParams.textproto
+endif
# Generate rules to copy the requested files
$(foreach cf,$(files_to_copy), \
@@ -84,7 +93,7 @@ $(foreach cf,$(files_to_copy), \
addon_img_source_prop := $(call append-path,$(staging),$(addon_dir_img))/images/$(TARGET_CPU_ABI)/source.properties
sdk_addon_deps += $(addon_img_source_prop)
-$(addon_img_source_prop): $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP)
+$(addon_img_source_prop): $(PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP)
@echo Generate $@
$(hide) mkdir -p $(dir $@)
$(hide) sed \
@@ -99,7 +108,7 @@ $(addon_img_source_prop): $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_SYS_I
# We don't know about all of the docs files, so depend on the timestamps for
# them, and record the directories, and the packaging rule will just copy the
# whole thing.
-doc_modules := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_DOC_MODULES))
+doc_modules := $(PRODUCT_SDK_ADDON_DOC_MODULES)
sdk_addon_deps += $(foreach dm, $(doc_modules), $(call doc-timestamp-for, $(dm)))
$(full_target): PRIVATE_DOCS_DIRS := $(addprefix $(OUT_DOCS)/, $(doc_modules))
@@ -122,7 +131,6 @@ $(full_target_img): $(full_target) $(addon_img_source_prop) | $(ACP) $(SOONG_ZIP
$(hide) $(SOONG_ZIP) -o $@ -C $(dir $(PRIVATE_STAGING_DIR)) -D $(PRIVATE_STAGING_DIR)
-.PHONY: sdk_addon
sdk_addon: $(full_target) $(full_target_img)
ifneq ($(sdk_repo_goal),)
@@ -142,3 +150,5 @@ ifneq ($(filter sdk_addon,$(MAKECMDGOALS)),)
$(error Trying to build sdk_addon, but product '$(INTERNAL_PRODUCT)' does not define one)
endif
endif # addon_name
+
+endif # !ONE_SHOT_MAKEFILE
diff --git a/core/tasks/sts.mk b/core/tasks/sts.mk
index b3c3baa462..0c33e1c77b 100644
--- a/core/tasks/sts.mk
+++ b/core/tasks/sts.mk
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+ifneq ($(wildcard test/sts/README.md),)
test_suite_name := sts
test_suite_tradefed := sts-tradefed
test_suite_readme := test/sts/README.md
@@ -21,3 +22,4 @@ include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
.PHONY: sts
sts: $(compatibility_zip)
$(call dist-for-goals, sts, $(compatibility_zip))
+endif
diff --git a/core/tasks/test_mapping.mk b/core/tasks/test_mapping.mk
index 36275b0d9a..da64cab40d 100644
--- a/core/tasks/test_mapping.mk
+++ b/core/tasks/test_mapping.mk
@@ -12,7 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# Create an artifact to include TEST_MAPPING files in source tree.
+# Create an artifact to include TEST_MAPPING files in source tree. Also include
+# a file (out/disabled-presubmit-tests) containing the tests that should be
+# skipped in presubmit check.
.PHONY: test_mapping
@@ -21,13 +23,15 @@ test_mappings_zip := $(intermediates)/test_mappings.zip
test_mapping_list := $(OUT_DIR)/.module_paths/TEST_MAPPING.list
test_mappings := $(file <$(test_mapping_list))
$(test_mappings_zip) : PRIVATE_test_mappings := $(subst $(newline),\n,$(test_mappings))
+$(test_mappings_zip) : PRIVATE_all_disabled_presubmit_tests := $(ALL_DISABLED_PRESUBMIT_TESTS)
$(test_mappings_zip) : $(test_mappings) $(SOONG_ZIP)
- @echo "Building artifact to include TEST_MAPPING files."
- rm -rf $@
+ @echo "Building artifact to include TEST_MAPPING files and tests to skip in presubmit check."
+ rm -rf $@ $(dir $@)/disabled-presubmit-tests
+ echo $(sort $(PRIVATE_all_disabled_presubmit_tests)) | tr " " "\n" > $(dir $@)/disabled-presubmit-tests
echo -e "$(PRIVATE_test_mappings)" > $@.list
- $(SOONG_ZIP) -o $@ -C . -l $@.list
- rm -f $@.list
+ $(SOONG_ZIP) -o $@ -C . -l $@.list -C $(dir $@) -f $(dir $@)/disabled-presubmit-tests
+ rm -f $@.list $(dir $@)/disabled-presubmit-tests
test_mapping : $(test_mappings_zip)
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index a1151e908c..19d2ab5746 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -84,7 +84,7 @@ $(my_built_custom_image): PRIVATE_PICKUP_FILES := $(my_pickup_files)
$(my_built_custom_image): PRIVATE_SELINUX := $(CUSTOM_IMAGE_SELINUX)
$(my_built_custom_image): PRIVATE_SUPPORT_VERITY := $(CUSTOM_IMAGE_SUPPORT_VERITY)
$(my_built_custom_image): PRIVATE_SUPPORT_VERITY_FEC := $(CUSTOM_IMAGE_SUPPORT_VERITY_FEC)
-$(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)
+$(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCT_VERITY_SIGNING_KEY)
$(my_built_custom_image): PRIVATE_VERITY_BLOCK_DEVICE := $(CUSTOM_IMAGE_VERITY_BLOCK_DEVICE)
$(my_built_custom_image): PRIVATE_DICT_FILE := $(CUSTOM_IMAGE_DICT_FILE)
$(my_built_custom_image): PRIVATE_AVB_AVBTOOL := $(AVBTOOL)
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 3d1d783e33..57a5cf945e 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -35,7 +35,6 @@ test_tools := $(HOST_OUT_JAVA_LIBRARIES)/hosttestlib.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util-tests.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-common-util-tests.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-tradefed-tests.jar \
- $(HOST_OUT_JAVA_LIBRARIES)/host-libprotobuf-java-full.jar \
$(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed).jar \
$(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed)-tests.jar \
$(HOST_OUT_EXECUTABLES)/$(test_suite_tradefed) \
@@ -52,6 +51,7 @@ $(compatibility_zip): PRIVATE_DYNAMIC_CONFIG := $(test_suite_dynamic_config)
$(compatibility_zip): $(test_artifacts) $(test_tools) $(test_suite_prebuilt_tools) $(test_suite_dynamic_config) $(SOONG_ZIP) | $(ADB) $(ACP)
# Make dir structure
$(hide) mkdir -p $(PRIVATE_OUT_DIR)/tools $(PRIVATE_OUT_DIR)/testcases
+ $(hide) echo $(BUILD_NUMBER_FROM_FILE) > $(PRIVATE_OUT_DIR)/tools/version.txt
# Copy tools
$(hide) $(ACP) -fp $(PRIVATE_TOOLS) $(PRIVATE_OUT_DIR)/tools
$(if $(PRIVATE_DYNAMIC_CONFIG),$(hide) $(ACP) -fp $(PRIVATE_DYNAMIC_CONFIG) $(PRIVATE_OUT_DIR)/testcases/$(PRIVATE_SUITE_NAME).dynamic)
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 4155a39073..d7b3010beb 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -21,17 +21,16 @@ my_pickup_files :=
my_modules_and_deps := $(my_modules)
$(foreach m,$(my_modules),\
$(eval _explicitly_required := \
- $(strip $(ALL_MODULES.$(m).EXPLICITLY_REQUIRED)\
- $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).EXPLICITLY_REQUIRED)))\
+ $(strip $(ALL_MODULES.$(m).EXPLICITLY_REQUIRED_FROM_TARGET)\
+ $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).EXPLICITLY_REQUIRED_FROM_TARGET)))\
$(eval my_modules_and_deps += $(_explicitly_required))\
)
# Ignore unknown installed files on partial builds
my_missing_files :=
-# These warnings are too noisy, silence them for now.
-#ifneq ($(ALLOW_MISSING_DEPENDENCIES),true)
-#my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))
-#endif
+ifneq ($(ALLOW_MISSING_DEPENDENCIES),true)
+my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))
+endif
# Iterate over modules' built files and installed files;
# Calculate the dest files in the output zip file.
@@ -41,6 +40,8 @@ $(foreach m,$(my_modules_and_deps),\
$(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).PICKUP_FILES)))\
$(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\
$(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\
+ $(eval _module_class_folder := $($(strip MODULE_CLASS_$(word 1, $(strip $(ALL_MODULES.$(m).CLASS)\
+ $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS))))))\
$(if $(_pickup_files)$(_built_files),,\
$(call my_missing_files,$(m)))\
$(eval my_pickup_files += $(_pickup_files))\
@@ -50,9 +51,15 @@ $(foreach m,$(my_modules_and_deps),\
$(if $(filter $(TARGET_OUT_ROOT)/%,$(ins)),\
$(eval bui := $(word 1,$(bui_ins)))\
$(eval my_built_modules += $(bui))\
+ $(if $(filter $(_module_class_folder), nativetest benchmarktest),\
+ $(eval module_class_folder_stem := $(_module_class_folder)$(findstring 64, $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))),\
+ $(eval module_class_folder_stem := $(_module_class_folder)))\
$(eval my_copy_dest := $(patsubst data/%,DATA/%,\
- $(patsubst system/%,DATA/%,\
- $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
+ $(patsubst testcases/%,DATA/$(module_class_folder_stem)/%,\
+ $(patsubst testcases/$(m)/$(TARGET_ARCH)/%,DATA/$(module_class_folder_stem)/$(m)/%,\
+ $(patsubst testcases/$(m)/$(TARGET_2ND_ARCH)/%,DATA/$(module_class_folder_stem)/$(m)/%,\
+ $(patsubst system/%,DATA/%,\
+ $(patsubst $(PRODUCT_OUT)/%,%,$(ins))))))))\
$(eval my_copy_pairs += $(bui):$(my_staging_dir)/$(my_copy_dest)))\
))
diff --git a/core/tasks/vendor_module_check.mk b/core/tasks/vendor_module_check.mk
index 6695994e1f..0b8f1e8702 100644
--- a/core/tasks/vendor_module_check.mk
+++ b/core/tasks/vendor_module_check.mk
@@ -48,18 +48,18 @@ _vendor_owner_whitelist := \
widevine
-_restrictions := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_RESTRICT_VENDOR_FILES))
+_restrictions := $(PRODUCT_RESTRICT_VENDOR_FILES)
ifneq (,$(_restrictions))
-ifneq (,$(PRODUCTS.$(INTERNAL_PRODUCT).VENDOR_PRODUCT_RESTRICT_VENDOR_FILES))
+ifneq (,$(VENDOR_PRODUCT_RESTRICT_VENDOR_FILES))
$(error Error: cannot set both PRODUCT_RESTRICT_VENDOR_FILES and VENDOR_PRODUCT_RESTRICT_VENDOR_FILES)
endif
_vendor_exception_path_prefix :=
_vendor_exception_modules :=
else
-_restrictions := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).VENDOR_PRODUCT_RESTRICT_VENDOR_FILES))
-_vendor_exception_path_prefix := $(patsubst %, vendor/%/%, $(PRODUCTS.$(INTERNAL_PRODUCT).VENDOR_EXCEPTION_PATHS))
-_vendor_exception_modules := $(PRODUCTS.$(INTERNAL_PRODUCT).VENDOR_EXCEPTION_MODULES)
+_restrictions := $(VENDOR_PRODUCT_RESTRICT_VENDOR_FILES)
+_vendor_exception_path_prefix := $(patsubst %, vendor/%/%, $(VENDOR_EXCEPTION_PATHS))
+_vendor_exception_modules := $(VENDOR_EXCEPTION_MODULES)
endif
diff --git a/core/tasks/vndk.mk b/core/tasks/vndk.mk
index b9133df02f..942063897f 100644
--- a/core/tasks/vndk.mk
+++ b/core/tasks/vndk.mk
@@ -23,67 +23,41 @@ ifneq (,$(PLATFORM_VNDK_VERSION))
# BOARD_VNDK_RUNTIME_DISABLE must not be set to 'true'.
ifneq ($(BOARD_VNDK_RUNTIME_DISABLE),true)
-# Returns arch-specific libclang_rt.ubsan* library name.
-# Because VNDK_CORE_LIBRARIES includes all arch variants for libclang_rt.ubsan*
-# libs, the arch-specific libs are selected separately.
-#
-# Args:
-# $(1): if not empty, evaluates for TARGET_2ND_ARCH
-define clang-ubsan-vndk-core
-$(strip \
- $(eval prefix := $(if $(1),2ND_,)) \
- $(addsuffix .vendor,$($(addprefix $(prefix),UBSAN_RUNTIME_LIBRARY))) \
-)
-endef
-
-# Returns list of file paths of the intermediate objs
+# Returns list of src:dest paths of the intermediate objs
#
# Args:
# $(1): list of module and filename pairs (e.g., ld.config.txt:ld.config.27.txt ...)
-# $(2): target class (e.g., SHARED_LIBRARIES, STATIC_LIBRARIES, ETC)
-# $(3): if not empty, evaluates for TARGET_2ND_ARCH
+# $(2): if not empty, evaluates for TARGET_2ND_ARCH
define paths-of-intermediates
$(strip \
$(foreach pair,$(1), \
- $(eval split_pair := $(subst :,$(space),$(pair))) \
- $(eval module := $(word 1,$(split_pair))) \
- $(eval filename := $(word 2,$(split_pair))) \
- $(eval dir := $(call intermediates-dir-for,$(2),$(module),,,$(3))) \
- $(call append-path,$(dir),$(filename)) \
+ $(eval module := $(call word-colon,1,$(pair))$(if $(2),$(TARGET_2ND_ARCH_MODULE_SUFFIX))) \
+ $(eval built := $(ALL_MODULES.$(module).BUILT_INSTALLED)) \
+ $(eval filename := $(call word-colon,2,$(pair))) \
+ $(if $(wordlist 2,100,$(built)), \
+ $(error Unable to handle multiple built files ($(module)): $(built))) \
+ $(if $(built),$(call word-colon,1,$(built)):$(filename)) \
) \
)
endef
-# Returns paths of notice files under $(TARGET_OUT_NOTICE_FILES)
+# Returns src:dest list of notice files
#
# Args:
# $(1): list of lib names (e.g., libfoo.vendor)
-# $(2): vndk lib type, one of 'vndk' or 'vndk-sp'
define paths-of-notice-files
$(strip \
- $(eval lib_dir := lib$(if $(TARGET_IS_64BIT),64,)) \
- $(eval vndk_dir := $(2)-$(PLATFORM_VNDK_VERSION)) \
$(foreach lib,$(1), \
- $(eval notice_file_name := $(patsubst %.vendor,%.so.txt,$(lib))) \
- $(TARGET_OUT_NOTICE_FILES)/src/system/$(lib_dir)/$(vndk_dir)/$(notice_file_name) \
- ) \
-)
+ $(eval notice := $(sort \
+ $(ALL_MODULES.$(lib).NOTICES) \
+ $(if $(TARGET_2ND_ARCH),
+ $(ALL_MODULES.$(lib)$(TARGET_2ND_ARCH_MODULE_SUFFIX).NOTICES)))) \
+ $(if $(wordlist 2,100,$(notice)), \
+ $(error Unable to handle multiple notice files ($(lib)): $(notice))) \
+ $(if $(notice),$(notice):$(subst .vendor,,$(lib)).so.txt)))
endef
-# If in the future libclang_rt.ubsan* is removed from the VNDK-core list,
-# need to update the related logic in this file.
-ifeq (,$(filter libclang_rt.ubsan%,$(VNDK_CORE_LIBRARIES)))
- $(warning libclang_rt.ubsan* is no longer a VNDK-core library. Please update this file.)
- vndk_core_libs := $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES))
-else
- vndk_core_libs := $(addsuffix .vendor,$(filter-out libclang_rt.ubsan%,$(VNDK_CORE_LIBRARIES)))
-
- vndk_core_libs += $(call clang-ubsan-vndk-core)
- ifdef TARGET_2ND_ARCH
- vndk_core_libs += $(call clang-ubsan-vndk-core,true)
- endif
-endif
-
+vndk_core_libs := $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES))
vndk_sp_libs := $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES))
vndk_private_libs := $(addsuffix .vendor,$(VNDK_PRIVATE_LIBRARIES))
@@ -103,34 +77,37 @@ vndk_snapshot_configs_out := $(vndk_snapshot_top)/configs
#######################################
# vndkcore.libraries.txt
vndkcore.libraries.txt := $(vndk_snapshot_configs_out)/vndkcore.libraries.txt
-$(vndkcore.libraries.txt): $(vndk_core_libs)
+$(vndkcore.libraries.txt): PRIVATE_LIBS := $(vndk_core_libs)
+$(vndkcore.libraries.txt):
@echo 'Generating: $@'
@rm -f $@
@mkdir -p $(dir $@)
$(hide) echo -n > $@
- $(hide) $(foreach lib,$^,echo $(patsubst %.vendor,%,$(lib)).so >> $@;)
+ $(hide) $(foreach lib,$(PRIVATE_LIBS),echo $(patsubst %.vendor,%,$(lib)).so >> $@;)
#######################################
# vndkprivate.libraries.txt
vndkprivate.libraries.txt := $(vndk_snapshot_configs_out)/vndkprivate.libraries.txt
-$(vndkprivate.libraries.txt): $(vndk_private_libs)
+$(vndkprivate.libraries.txt): PRIVATE_LIBS := $(vndk_private_libs)
+$(vndkprivate.libraries.txt):
@echo 'Generating: $@'
@rm -f $@
@mkdir -p $(dir $@)
$(hide) echo -n > $@
- $(hide) $(foreach lib,$^,echo $(patsubst %.vendor,%,$(lib)).so >> $@;)
+ $(hide) $(foreach lib,$(PRIVATE_LIBS),echo $(patsubst %.vendor,%,$(lib)).so >> $@;)
#######################################
# module_paths.txt
module_paths.txt := $(vndk_snapshot_configs_out)/module_paths.txt
-$(module_paths.txt): $(vndk_snapshot_libs)
+$(module_paths.txt): PRIVATE_LIBS := $(vndk_snapshot_libs)
+$(module_paths.txt):
@echo 'Generating: $@'
@rm -f $@
@mkdir -p $(dir $@)
$(hide) echo -n > $@
- $(hide) $(foreach lib,$^,echo $(patsubst %.vendor,%,$(lib)).so $(ALL_MODULES.$(lib).PATH) >> $@;)
+ $(hide) $(foreach lib,$(PRIVATE_LIBS),echo $(patsubst %.vendor,%,$(lib)).so $(ALL_MODULES.$(lib).PATH) >> $@;)
vndk_snapshot_configs := \
@@ -142,8 +119,10 @@ vndk_snapshot_configs := \
# vndk_snapshot_zip
vndk_snapshot_variant := $(vndk_snapshot_out)/$(TARGET_ARCH)
binder :=
-ifneq ($(TARGET_USES_64_BIT_BINDER), true)
- binder := binder32
+ifneq ($(TARGET_IS_64_BIT), true)
+ ifneq ($(TARGET_USES_64_BIT_BINDER), true)
+ binder := binder32
+ endif
endif
vndk_lib_dir := $(subst $(space),/,$(strip $(vndk_snapshot_variant) $(binder) arch-$(TARGET_ARCH)-$(TARGET_ARCH_VARIANT)))
vndk_lib_dir_2nd := $(subst $(space),/,$(strip $(vndk_snapshot_variant) $(binder) arch-$(TARGET_2ND_ARCH)-$(TARGET_2ND_ARCH_VARIANT)))
@@ -151,70 +130,76 @@ vndk_snapshot_zip := $(PRODUCT_OUT)/android-vndk-$(TARGET_PRODUCT).zip
$(vndk_snapshot_zip): PRIVATE_VNDK_SNAPSHOT_OUT := $(vndk_snapshot_out)
+deps := $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(subst .vendor,,$(lib)).so))
$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT := $(vndk_lib_dir)/shared/vndk-core
-$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES := \
- $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES)
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES := $(deps)
+$(vndk_snapshot_zip): $(foreach d,$(deps),$(call word-colon,1,$(d)))
+deps :=
+deps := $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(subst .vendor,,$(lib)).so))
$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT := $(vndk_lib_dir)/shared/vndk-sp
-$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES := \
- $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES)
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES := $(deps)
+$(vndk_snapshot_zip): $(foreach d,$(deps),$(call word-colon,1,$(d)))
+deps :=
+deps := $(call paths-of-intermediates,$(foreach txt,$(vndk_prebuilt_txts), \
+ $(txt):$(patsubst %.txt,%.$(PLATFORM_VNDK_VERSION).txt,$(txt)))) \
+ $(foreach config,$(vndk_snapshot_configs),$(config):$(notdir $(config)))
$(vndk_snapshot_zip): PRIVATE_CONFIGS_OUT := $(vndk_snapshot_variant)/configs
-$(vndk_snapshot_zip): PRIVATE_CONFIGS_INTERMEDIATES := \
- $(call paths-of-intermediates,$(foreach txt,$(vndk_prebuilt_txts), \
- $(txt):$(patsubst %.txt,%.$(PLATFORM_VNDK_VERSION).txt,$(txt))),ETC) \
- $(vndk_snapshot_configs)
+$(vndk_snapshot_zip): PRIVATE_CONFIGS_INTERMEDIATES := $(deps)
+$(vndk_snapshot_zip): $(foreach d,$(deps),$(call word-colon,1,$(d)))
+deps :=
+notices := $(call paths-of-notice-files,$(vndk_core_libs) $(vndk_sp_libs))
$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_OUT := $(vndk_snapshot_variant)/NOTICE_FILES
-$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_INTERMEDIATES := \
- $(call paths-of-notice-files,$(vndk_core_libs),vndk) \
- $(call paths-of-notice-files,$(vndk_sp_libs),vndk-sp)
+$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_INTERMEDIATES := $(notices)
+$(vndk_snapshot_zip): $(foreach n,$(notices),$(call word-colon,1,$(n)))
+notices :=
ifdef TARGET_2ND_ARCH
+deps := $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(subst .vendor,,$(lib)).so),true)
$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT_2ND := $(vndk_lib_dir_2nd)/shared/vndk-core
-$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_2ND := \
- $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_2ND := $(deps)
+$(vndk_snapshot_zip): $(foreach d,$(deps),$(call word-colon,1,$(d)))
+deps :=
+deps := $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(subst .vendor,,$(lib)).so),true)
$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT_2ND := $(vndk_lib_dir_2nd)/shared/vndk-sp
-$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_2ND := \
- $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_2ND := $(deps)
+$(vndk_snapshot_zip): $(foreach d,$(deps),$(call word-colon,1,$(d)))
+deps :=
endif
# Args
# $(1): destination directory
-# $(2): list of files to copy
-$(vndk_snapshot_zip): private-copy-vndk-intermediates = \
+# $(2): list of files (src:dest) to copy
+$(vndk_snapshot_zip): private-copy-intermediates = \
$(if $(2),$(strip \
- @mkdir -p $(1); \
+ @mkdir -p $(1) && \
$(foreach file,$(2), \
- if [ -e $(file) ]; then \
- cp -p $(file) $(call append-path,$(1),$(subst .vendor,,$(notdir $(file)))); \
- fi; \
+ cp $(call word-colon,1,$(file)) $(call append-path,$(1),$(call word-colon,2,$(file))) && \
) \
+ true \
))
-vndk_snapshot_dependencies := \
- $(vndk_snapshot_libs) \
- $(vndk_prebuilt_txts) \
- $(vndk_snapshot_configs)
-$(vndk_snapshot_zip): $(vndk_snapshot_dependencies) $(SOONG_ZIP)
+$(vndk_snapshot_zip): $(SOONG_ZIP)
@echo 'Generating VNDK snapshot: $@'
@rm -f $@
@rm -rf $(PRIVATE_VNDK_SNAPSHOT_OUT)
@mkdir -p $(PRIVATE_VNDK_SNAPSHOT_OUT)
- $(call private-copy-vndk-intermediates, \
+ $(call private-copy-intermediates, \
$(PRIVATE_VNDK_CORE_OUT),$(PRIVATE_VNDK_CORE_INTERMEDIATES))
- $(call private-copy-vndk-intermediates, \
+ $(call private-copy-intermediates, \
$(PRIVATE_VNDK_SP_OUT),$(PRIVATE_VNDK_SP_INTERMEDIATES))
- $(call private-copy-vndk-intermediates, \
+ $(call private-copy-intermediates, \
$(PRIVATE_CONFIGS_OUT),$(PRIVATE_CONFIGS_INTERMEDIATES))
- $(call private-copy-vndk-intermediates, \
+ $(call private-copy-intermediates, \
$(PRIVATE_NOTICE_FILES_OUT),$(PRIVATE_NOTICE_FILES_INTERMEDIATES))
ifdef TARGET_2ND_ARCH
- $(call private-copy-vndk-intermediates, \
+ $(call private-copy-intermediates, \
$(PRIVATE_VNDK_CORE_OUT_2ND),$(PRIVATE_VNDK_CORE_INTERMEDIATES_2ND))
- $(call private-copy-vndk-intermediates, \
+ $(call private-copy-intermediates, \
$(PRIVATE_VNDK_SP_OUT_2ND),$(PRIVATE_VNDK_SP_INTERMEDIATES_2ND))
endif
$(hide) $(SOONG_ZIP) -o $@ -C $(PRIVATE_VNDK_SNAPSHOT_OUT) -D $(PRIVATE_VNDK_SNAPSHOT_OUT)
@@ -240,7 +225,6 @@ vndk_snapshot_variant :=
binder :=
vndk_lib_dir :=
vndk_lib_dir_2nd :=
-vndk_snapshot_dependencies :=
else # BOARD_VNDK_RUNTIME_DISABLE is set to 'true'
error_msg := "CANNOT generate VNDK snapshot. BOARD_VNDK_RUNTIME_DISABLE must not be set to 'true'."
diff --git a/core/use_lld_setup.mk b/core/use_lld_setup.mk
new file mode 100644
index 0000000000..8f47d68c3f
--- /dev/null
+++ b/core/use_lld_setup.mk
@@ -0,0 +1,20 @@
+#############################################################
+## Set up flags based on LOCAL_USE_CLANG_LLD.
+## Input variables: LOCAL_USE_CLANG_LLD
+## Output variables: my_use_clang_lld
+#############################################################
+
+# Use LLD by default.
+# Do not use LLD if LOCAL_USE_CLANG_LLD is false or 0
+my_use_clang_lld := true
+ifneq (,$(LOCAL_USE_CLANG_LLD))
+ ifneq (,$(filter 0 false,$(LOCAL_USE_CLANG_LLD)))
+ my_use_clang_lld := false
+ endif
+endif
+
+# Do not use LLD for Darwin host executables or shared libraries. See
+# https://lld.llvm.org/AtomLLD.html for status of lld for Mach-O.
+ifeq ($($(my_prefix)OS),darwin)
+my_use_clang_lld := false
+endif
diff --git a/core/verify_uses_libraries.sh b/core/verify_uses_libraries.sh
index 8135be63ec..dde0447d36 100755
--- a/core/verify_uses_libraries.sh
+++ b/core/verify_uses_libraries.sh
@@ -15,11 +15,13 @@
# limitations under the License.
+# apt_binary is $(AAPT) in the build.
+
# Parse sdk, targetSdk, and uses librares in the APK, then cross reference against build specified ones.
set -e
local_apk=$1
-badging=$(aapt dump badging "${local_apk}")
+badging=$(${aapt_binary} dump badging "${local_apk}")
export sdk_version=$(echo "${badging}" | grep "sdkVersion" | sed -n "s/sdkVersion:'\(.*\)'/\1/p")
# Export target_sdk_version to the caller.
export target_sdk_version=$(echo "${badging}" | grep "targetSdkVersion" | sed -n "s/targetSdkVersion:'\(.*\)'/\1/p")
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 21598c4732..eb5db29851 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -39,9 +39,9 @@ ifdef INTERNAL_BUILD_ID_MAKEFILE
include $(INTERNAL_BUILD_ID_MAKEFILE)
endif
-DEFAULT_PLATFORM_VERSION := PPR1
-MIN_PLATFORM_VERSION := PPR1
-MAX_PLATFORM_VERSION := PPR1
+DEFAULT_PLATFORM_VERSION := QP1A
+MIN_PLATFORM_VERSION := QP1A
+MAX_PLATFORM_VERSION := QP1A
ALLOWED_VERSIONS := $(call allowed-platform-versions,\
$(MIN_PLATFORM_VERSION),\
@@ -56,6 +56,13 @@ ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
$(warning Invalid TARGET_PLATFORM_VERSION '$(TARGET_PLATFORM_VERSION)', must be one of)
$(error $(ALLOWED_VERSIONS))
endif
+ALLOWED_VERSIONS :=
+MIN_PLATFORM_VERSION :=
+MAX_PLATFORM_VERSION :=
+
+.KATI_READONLY := \
+ DEFAULT_PLATFORM_VERSION \
+ TARGET_PLATFORM_VERSION
# Default versions for each TARGET_PLATFORM_VERSION
# TODO: PLATFORM_VERSION, PLATFORM_SDK_VERSION, etc. should be conditional
@@ -66,20 +73,22 @@ endif
# Update this value when the platform version changes (rather
# than overriding it somewhere else). Can be an arbitrary string.
-# When you add a new PLATFORM_VERSION which will result in a new
-# PLATFORM_SDK_VERSION please ensure you add a corresponding isAtLeast*
-# method in the following java file:
-# frameworks/support/compat/gingerbread/android/support/v4/os/BuildCompat.java
-
# When you change PLATFORM_VERSION for a given PLATFORM_SDK_VERSION
# please add that PLATFORM_VERSION as well as clean up obsolete PLATFORM_VERSION's
# in the following text file:
# cts/tests/tests/os/assets/platform_versions.txt
-PLATFORM_VERSION.PPR1 := 9
+
+# Note that there should be one PLATFORM_VERSION and PLATFORM_VERSION_CODENAME
+# entry for each unreleased API level, regardless of
+# MIN_PLATFORM_VERSION/MAX_PLATFORM_VERSION. PLATFORM_VERSION is used to
+# generate the range of allowed SDK versions, so it must have an entry for every
+# unreleased API level targetable by this branch, not just those that are valid
+# lunch targets for this branch.
+PLATFORM_VERSION.QP1A := 10
# These are the current development codenames, if the build is not a final
# release build. If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.PPR1 := REL
+PLATFORM_VERSION_CODENAME.QP1A := REL
ifndef PLATFORM_VERSION
PLATFORM_VERSION := $(PLATFORM_VERSION.$(TARGET_PLATFORM_VERSION))
@@ -88,6 +97,7 @@ ifndef PLATFORM_VERSION
PLATFORM_VERSION := $(TARGET_PLATFORM_VERSION)
endif
endif
+.KATI_READONLY := PLATFORM_VERSION
ifndef PLATFORM_SDK_VERSION
# This is the canonical definition of the SDK version, which defines
@@ -99,15 +109,12 @@ ifndef PLATFORM_SDK_VERSION
# SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
# the code-name of the new development work.
- # When you change PLATFORM_SDK_VERSION please ensure you also update the
- # corresponding methods for isAtLeast* in the following java file:
- # frameworks/support/compat/gingerbread/android/support/v4/os/BuildCompat.java
-
# When you increment the PLATFORM_SDK_VERSION please ensure you also
# clear out the following text file of all older PLATFORM_VERSION's:
# cts/tests/tests/os/assets/platform_versions.txt
- PLATFORM_SDK_VERSION := 28
+ PLATFORM_SDK_VERSION := 29
endif
+.KATI_READONLY := PLATFORM_SDK_VERSION
ifndef PLATFORM_VERSION_CODENAME
PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
@@ -153,6 +160,10 @@ ifndef PLATFORM_VERSION_CODENAME
$(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_FUTURE_CODENAMES)))
endif
+.KATI_READONLY := \
+ PLATFORM_VERSION_CODENAME \
+ PLATFORM_VERSION_ALL_CODENAMES \
+ PLATFORM_VERSION_FUTURE_CODENAMES
ifeq (REL,$(PLATFORM_VERSION_CODENAME))
PLATFORM_PREVIEW_SDK_VERSION := 0
@@ -166,10 +177,14 @@ else
# SDK version the package was built for, otherwise it should fall back to
# assuming the device can only support APIs as of the previous official
# public release.
- # This value will always be 0 for release builds.
- PLATFORM_PREVIEW_SDK_VERSION := 0
+ # This value will always be forced to 0 for release builds by the logic
+ # in the "ifeq" block above, so the value below will be used on any
+ # non-release builds, and it should always be at least 1, to indicate that
+ # APIs may have changed since the claimed PLATFORM_SDK_VERSION.
+ PLATFORM_PREVIEW_SDK_VERSION := 1
endif
endif
+.KATI_READONLY := PLATFORM_PREVIEW_SDK_VERSION
ifndef DEFAULT_APP_TARGET_SDK
# This is the default minSdkVersion and targetSdkVersion to use for
@@ -183,6 +198,7 @@ ifndef DEFAULT_APP_TARGET_SDK
DEFAULT_APP_TARGET_SDK := $(PLATFORM_VERSION_CODENAME)
endif
endif
+.KATI_READONLY := DEFAULT_APP_TARGET_SDK
ifndef PLATFORM_VNDK_VERSION
# This is the definition of the VNDK version for the current VNDK libraries.
@@ -201,6 +217,7 @@ ifndef PLATFORM_VNDK_VERSION
PLATFORM_VNDK_VERSION := $(PLATFORM_VERSION_CODENAME)
endif
endif
+.KATI_READONLY := PLATFORM_VNDK_VERSION
ifndef PLATFORM_SYSTEMSDK_MIN_VERSION
# This is the oldest version of system SDK that the platform supports. Contrary
@@ -209,6 +226,7 @@ ifndef PLATFORM_SYSTEMSDK_MIN_VERSION
# old system APIs are gradually deprecated, removed and then deleted.
PLATFORM_SYSTEMSDK_MIN_VERSION := 28
endif
+.KATI_READONLY := PLATFORM_SYSTEMSDK_MIN_VERSION
# This is the list of system SDK versions that the current platform supports.
PLATFORM_SYSTEMSDK_VERSIONS :=
@@ -224,6 +242,7 @@ else
PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_VERSION_CODENAME)
endif
PLATFORM_SYSTEMSDK_VERSIONS := $(strip $(sort $(PLATFORM_SYSTEMSDK_VERSIONS)))
+.KATI_READONLY := PLATFORM_SYSTEMSDK_VERSIONS
ifndef PLATFORM_SECURITY_PATCH
# Used to indicate the security patch that has been applied to the device.
@@ -233,6 +252,7 @@ ifndef PLATFORM_SECURITY_PATCH
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
PLATFORM_SECURITY_PATCH := 2022-01-01
endif
+.KATI_READONLY := PLATFORM_SECURITY_PATCH
ifndef PLATFORM_SECURITY_PATCH_TIMESTAMP
# Used to indicate the matching timestamp for the security patch string in PLATFORM_SECURITY_PATCH.
@@ -251,6 +271,7 @@ ifndef PLATFORM_BASE_OS
# If there is no $PLATFORM_BASE_OS set, keep it empty.
PLATFORM_BASE_OS :=
endif
+.KATI_READONLY := PLATFORM_BASE_OS
ifndef BUILD_ID
# Used to signify special builds. E.g., branches and/or releases,
@@ -260,6 +281,7 @@ ifndef BUILD_ID
# If there is no BUILD_ID set, make it obvious.
BUILD_ID := UNKNOWN
endif
+.KATI_READONLY := BUILD_ID
ifndef BUILD_DATETIME
# Used to reproduce builds by setting the same time. Must be the number
@@ -272,11 +294,12 @@ DATE := date -r $(BUILD_DATETIME)
else
DATE := date -d @$(BUILD_DATETIME)
endif
+.KATI_READONLY := DATE
# Everything should be using BUILD_DATETIME_FROM_FILE instead.
# BUILD_DATETIME and DATE can be removed once BUILD_NUMBER moves
# to soong_ui.
-BUILD_DATETIME :=
+$(KATI_obsolete_var BUILD_DATETIME,Use BUILD_DATETIME_FROM_FILE)
HAS_BUILD_NUMBER := true
ifndef BUILD_NUMBER
@@ -289,13 +312,15 @@ ifndef BUILD_NUMBER
# If no BUILD_NUMBER is set, create a useful "I am an engineering build
# from this date/time" value. Make it start with a non-digit so that
# anyone trying to parse it as an integer will probably get "0".
- BUILD_NUMBER := eng.$(shell echo $${USER:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
+ BUILD_NUMBER := eng.$(shell echo $${BUILD_USERNAME:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
HAS_BUILD_NUMBER := false
endif
+.KATI_READONLY := BUILD_NUMBER HAS_BUILD_NUMBER
ifndef PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
# Used to set minimum supported target sdk version. Apps targeting sdk
- # version lower than the set value will fail to install and run on android
- # device.
- PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION := 17
+ # version lower than the set value will result in a warning being shown
+ # when any activity from the app is started.
+ PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION := 23
endif
+.KATI_READONLY := PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
diff --git a/envsetup.sh b/envsetup.sh
index cf619505a2..9e381a23e3 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -4,33 +4,38 @@ cat <<EOF
Run "m help" for help with the build system itself.
Invoke ". build/envsetup.sh" from your shell to add the following functions to your environment:
-- lunch: lunch <product_name>-<build_variant>
- Selects <product_name> as the product to build, and <build_variant> as the variant to
- build, and stores those selections in the environment to be read by subsequent
- invocations of 'm' etc.
-- tapas: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
-- croot: Changes directory to the top of the tree.
-- m: Makes from the top of the tree.
-- mm: Builds all of the modules in the current directory, but not their dependencies.
-- mmm: Builds all of the modules in the supplied directories, but not their dependencies.
- To limit the modules being built use the syntax: mmm dir/:target1,target2.
-- mma: Builds all of the modules in the current directory, and their dependencies.
-- mmma: Builds all of the modules in the supplied directories, and their dependencies.
-- provision: Flash device with all required partitions. Options will be passed on to fastboot.
-- cgrep: Greps on all local C/C++ files.
-- ggrep: Greps on all local Gradle files.
-- jgrep: Greps on all local Java files.
-- resgrep: Greps on all local res/*.xml files.
-- mangrep: Greps on all local AndroidManifest.xml files.
-- mgrep: Greps on all local Makefiles files.
-- sepgrep: Greps on all local sepolicy files.
-- sgrep: Greps on all local source files.
-- godir: Go to the directory containing a file.
+- lunch: lunch <product_name>-<build_variant>
+ Selects <product_name> as the product to build, and <build_variant> as the variant to
+ build, and stores those selections in the environment to be read by subsequent
+ invocations of 'm' etc.
+- tapas: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
+- croot: Changes directory to the top of the tree, or a subdirectory thereof.
+- m: Makes from the top of the tree.
+- mm: Builds all of the modules in the current directory, but not their dependencies.
+- mmm: Builds all of the modules in the supplied directories, but not their dependencies.
+ To limit the modules being built use the syntax: mmm dir/:target1,target2.
+- mma: Builds all of the modules in the current directory, and their dependencies.
+- mmma: Builds all of the modules in the supplied directories, and their dependencies.
+- provision: Flash device with all required partitions. Options will be passed on to fastboot.
+- cgrep: Greps on all local C/C++ files.
+- ggrep: Greps on all local Gradle files.
+- jgrep: Greps on all local Java files.
+- resgrep: Greps on all local res/*.xml files.
+- mangrep: Greps on all local AndroidManifest.xml files.
+- mgrep: Greps on all local Makefiles files.
+- sepgrep: Greps on all local sepolicy files.
+- sgrep: Greps on all local source files.
+- godir: Go to the directory containing a file.
+- allmod: List all modules.
+- gomod: Go to the directory containing a module.
+- pathmod: Get the directory containing a module.
+- refreshmod: Refresh list of modules for allmod/gomod.
Environment options:
- SANITIZE_HOST: Set to 'true' to use ASAN for all host modules. Note that
ASAN_OPTIONS=detect_leaks=0 will be set by default until the
build is leak-check clean.
+- ANDROID_QUIET_BUILD: set to 'true' to display only the essential messages.
Look at the source to view more functions. The complete list is:
EOF
@@ -48,12 +53,12 @@ function build_build_var_cache()
{
local T=$(gettop)
# Grep out the variable names from the script.
- cached_vars=`cat $T/build/envsetup.sh | tr '()' ' ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
- cached_abs_vars=`cat $T/build/envsetup.sh | tr '()' ' ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
+ cached_vars=(`cat $T/build/envsetup.sh | tr '()' ' ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`)
+ cached_abs_vars=(`cat $T/build/envsetup.sh | tr '()' ' ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`)
# Call the build system to dump the "<val>=<value>" pairs as a shell script.
build_dicts_script=`\builtin cd $T; build/soong/soong_ui.bash --dumpvars-mode \
- --vars="$cached_vars" \
- --abs-vars="$cached_abs_vars" \
+ --vars="${cached_vars[*]}" \
+ --abs-vars="${cached_abs_vars[*]}" \
--var-prefix=var_cache_ \
--abs-var-prefix=abs_var_cache_`
local ret=$?
@@ -262,8 +267,22 @@ function setpaths()
export ANDROID_EMULATOR_PREBUILTS
fi
+ # Append asuite prebuilts path to ANDROID_BUILD_PATHS.
+ local os_arch=$(get_build_var HOST_PREBUILT_TAG)
+ local ACLOUD_PATH="$T/prebuilts/asuite/acloud/$os_arch:"
+ local AIDEGEN_PATH="$T/prebuilts/asuite/aidegen/$os_arch:"
+ local ATEST_PATH="$T/prebuilts/asuite/atest/$os_arch:"
+ export ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS$ACLOUD_PATH$AIDEGEN_PATH$ATEST_PATH
+
export PATH=$ANDROID_BUILD_PATHS$PATH
- export PYTHONPATH=$T/development/python-packages:$PYTHONPATH
+
+ # out with the duplicate old
+ if [ -n $ANDROID_PYTHONPATH ]; then
+ export PYTHONPATH=${PYTHONPATH//$ANDROID_PYTHONPATH/}
+ fi
+ # and in with the new
+ export ANDROID_PYTHONPATH=$T/development/python-packages:
+ export PYTHONPATH=$ANDROID_PYTHONPATH$PYTHONPATH
export ANDROID_JAVA_HOME=$(get_abs_build_var ANDROID_JAVA_HOME)
export JAVA_HOME=$ANDROID_JAVA_HOME
@@ -301,7 +320,6 @@ function printconfig()
function set_stuff_for_environment()
{
- settitle
setpaths
set_sequence_number
@@ -316,49 +334,57 @@ function set_sequence_number()
export BUILD_ENV_SEQUENCE_NUMBER=13
}
-function settitle()
-{
- # This used to be opt-out with STAY_OFF_MY_LAWN, but this breaks folks
- # actually using PROMPT_COMMAND (https://issuetracker.google.com/38402256),
- # and the attempt to set the title doesn't do anything for the default
- # window manager in debian right now, so switch it to opt-in for anyone
- # who actually wants this.
- if [ "$ANDROID_BUILD_SET_WINDOW_TITLE" = "true" ]; then
- local arch=$(gettargetarch)
- local product=$TARGET_PRODUCT
- local variant=$TARGET_BUILD_VARIANT
- local apps=$TARGET_BUILD_APPS
- if [ -z "$apps" ]; then
- export PROMPT_COMMAND="echo -ne \"\033]0;[${arch}-${product}-${variant}] ${USER}@${HOSTNAME}: ${PWD}\007\""
- else
- export PROMPT_COMMAND="echo -ne \"\033]0;[$arch $apps $variant] ${USER}@${HOSTNAME}: ${PWD}\007\""
- fi
- fi
+# Takes a command name, and check if it's in ENVSETUP_NO_COMPLETION or not.
+function should_add_completion() {
+ local cmd="$(basename $1| sed 's/_completion//' |sed 's/\.\(.*\)*sh$//')"
+ case :"$ENVSETUP_NO_COMPLETION": in
+ *:"$cmd":*)
+ return 1
+ ;;
+ esac
+ return 0
}
function addcompletions()
{
local T dir f
- # Keep us from trying to run in something that isn't bash.
- if [ -z "${BASH_VERSION}" ]; then
+ # Keep us from trying to run in something that's neither bash nor zsh.
+ if [ -z "$BASH_VERSION" -a -z "$ZSH_VERSION" ]; then
return
fi
# Keep us from trying to run in bash that's too old.
- if [ ${BASH_VERSINFO[0]} -lt 3 ]; then
+ if [ -n "$BASH_VERSION" -a ${BASH_VERSINFO[0]} -lt 3 ]; then
return
fi
- dir="sdk/bash_completion"
- if [ -d ${dir} ]; then
- for f in `/bin/ls ${dir}/[a-z]*.bash 2> /dev/null`; do
- echo "including $f"
+ local completion_files=(
+ system/core/adb/adb.bash
+ system/core/fastboot/fastboot.bash
+ tools/asuite/asuite.sh
+ )
+ # Completion can be disabled selectively to allow users to use non-standard completion.
+ # e.g.
+ # ENVSETUP_NO_COMPLETION=adb # -> disable adb completion
+ # ENVSETUP_NO_COMPLETION=adb:bit # -> disable adb and bit completion
+ for f in ${completion_files[*]}; do
+ if [ -f "$f" ] && should_add_completion "$f"; then
. $f
- done
+ fi
+ done
+
+ if should_add_completion bit ; then
+ complete -C "bit --tab" bit
fi
+ if [ -z "$ZSH_VERSION" ]; then
+ # Doesn't work in zsh.
+ complete -o nospace -F _croot croot
+ fi
+ complete -F _lunch lunch
- complete -C "bit --tab" bit
+ complete -F _complete_android_module_names gomod
+ complete -F _complete_android_module_names m
}
function choosetype()
@@ -529,29 +555,16 @@ function choosecombo()
destroy_build_var_cache
}
-# Clear this variable. It will be built up again when the vendorsetup.sh
-# files are included at the end of this file.
-unset LUNCH_MENU_CHOICES
function add_lunch_combo()
{
- local new_combo=$1
- local c
- for c in ${LUNCH_MENU_CHOICES[@]} ; do
- if [ "$new_combo" = "$c" ] ; then
- return
- fi
- done
- LUNCH_MENU_CHOICES=(${LUNCH_MENU_CHOICES[@]} $new_combo)
+ if [ -n "$ZSH_VERSION" ]; then
+ echo -n "${funcfiletrace[1]}: "
+ else
+ echo -n "${BASH_SOURCE[1]}:${BASH_LINENO[0]}: "
+ fi
+ echo "add_lunch_combo is obsolete. Use COMMON_LUNCH_CHOICES in your AndroidProducts.mk instead."
}
-# add the default one here
-add_lunch_combo aosp_arm-eng
-add_lunch_combo aosp_arm64-eng
-add_lunch_combo aosp_mips-eng
-add_lunch_combo aosp_mips64-eng
-add_lunch_combo aosp_x86-eng
-add_lunch_combo aosp_x86_64-eng
-
function print_lunch_menu()
{
local uname=$(uname)
@@ -562,7 +575,7 @@ function print_lunch_menu()
local i=1
local choice
- for choice in ${LUNCH_MENU_CHOICES[@]}
+ for choice in $(TARGET_BUILD_APPS= get_build_var COMMON_LUNCH_CHOICES)
do
echo " $i. $choice"
i=$(($i+1))
@@ -590,9 +603,16 @@ function lunch()
selection=aosp_arm-eng
elif (echo -n $answer | grep -q -e "^[0-9][0-9]*$")
then
- if [ $answer -le ${#LUNCH_MENU_CHOICES[@]} ]
+ local choices=($(TARGET_BUILD_APPS= get_build_var COMMON_LUNCH_CHOICES))
+ if [ $answer -le ${#choices[@]} ]
then
- selection=${LUNCH_MENU_CHOICES[$(($answer-1))]}
+ # array in zsh starts from 1 instead of 0.
+ if [ -n "$ZSH_VERSION" ]
+ then
+ selection=${choices[$(($answer))]}
+ else
+ selection=${choices[$(($answer-1))]}
+ fi
fi
else
selection=$answer
@@ -643,6 +663,7 @@ function lunch()
destroy_build_var_cache
}
+unset COMMON_LUNCH_CHOICES_CACHE
# Tab completion for lunch.
function _lunch()
{
@@ -651,10 +672,13 @@ function _lunch()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
- COMPREPLY=( $(compgen -W "${LUNCH_MENU_CHOICES[*]}" -- ${cur}) )
+ if [ -z "$COMMON_LUNCH_CHOICES_CACHE" ]; then
+ COMMON_LUNCH_CHOICES_CACHE=$(TARGET_BUILD_APPS= get_build_var COMMON_LUNCH_CHOICES)
+ fi
+
+ COMPREPLY=( $(compgen -W "${COMMON_LUNCH_CHOICES_CACHE}" -- ${cur}) )
return 0
}
-complete -F _lunch lunch
# Configures the build to build unbundled apps.
# Run tapas with one or more app names (from LOCAL_PACKAGE_NAME)
@@ -756,6 +780,9 @@ function findmakefile()
{
local TOPFILE=build/make/core/envsetup.mk
local HERE=$PWD
+ if [ "$1" ]; then
+ \cd $1
+ fi;
local T=
while [ \( ! \( -f $TOPFILE \) \) -a \( $PWD != "/" \) ]; do
T=`PWD= /bin/pwd`
@@ -767,6 +794,7 @@ function findmakefile()
\cd ..
done
\cd $HERE
+ return 1
}
function mm()
@@ -835,24 +863,29 @@ function mmm()
# Remove the leading ./ and trailing / if any exists.
DIR=${DIR#./}
DIR=${DIR%/}
- if [ -f $DIR/Android.mk -o -f $DIR/Android.bp ]; then
- local TO_CHOP=`(\cd -P -- $T && pwd -P) | wc -c | tr -d ' '`
- local TO_CHOP=`expr $TO_CHOP + 1`
- local START=`PWD= /bin/pwd`
- local MDIR=`echo $START | cut -c${TO_CHOP}-`
- if [ "$MDIR" = "" ] ; then
- MDIR=$DIR
- else
- MDIR=$MDIR/$DIR
+ local M
+ if [ "$DIR_MODULES" = "" ]; then
+ M=$(findmakefile $DIR)
+ else
+ # Only check the target directory if a module is specified.
+ if [ -f $DIR/Android.mk -o -f $DIR/Android.bp ]; then
+ local HERE=$PWD
+ cd $DIR
+ M=`PWD= /bin/pwd`
+ M=$M/Android.mk
+ cd $HERE
fi
- MDIR=${MDIR%/.}
+ fi
+ if [ "$M" ]; then
+ # Remove the path to top as the makefilepath needs to be relative
+ local M=`echo $M|sed 's:'$T'/::'`
if [ "$DIR_MODULES" = "" ]; then
- MODULES_IN_PATHS="$MODULES_IN_PATHS MODULES-IN-$MDIR"
- GET_INSTALL_PATHS="$GET_INSTALL_PATHS GET-INSTALL-PATH-IN-$MDIR"
+ MODULES_IN_PATHS="$MODULES_IN_PATHS MODULES-IN-$(dirname ${M})"
+ GET_INSTALL_PATHS="$GET_INSTALL_PATHS GET-INSTALL-PATH-IN-$(dirname ${M})"
else
MODULES="$MODULES $DIR_MODULES"
fi
- MAKEFILE="$MAKEFILE $MDIR/Android.mk"
+ MAKEFILE="$MAKEFILE $M"
else
case $DIR in
showcommands | snod | dist | *=*) ARGS="$ARGS $DIR";;
@@ -894,7 +927,7 @@ function mma()
echo "Couldn't locate the top of the tree. Try setting TOP."
return 1
fi
- local M=$(findmakefile)
+ local M=$(findmakefile || echo $(realpath $PWD)/Android.mk)
# Remove the path to top as the makefilepath needs to be relative
local M=`echo $M|sed 's:'$T'/::'`
local MODULES_IN_PATHS=MODULES-IN-$(dirname ${M})
@@ -958,6 +991,18 @@ function croot()
fi
}
+function _croot()
+{
+ local T=$(gettop)
+ if [ "$T" ]; then
+ local cur="${COMP_WORDS[COMP_CWORD]}"
+ k=0
+ for c in $(compgen -d ${T}/${cur}); do
+ COMPREPLY[k++]=${c#${T}/}/
+ done
+ fi
+}
+
function cproj()
{
local TOPFILE=build/make/core/envsetup.mk
@@ -999,28 +1044,6 @@ function qpid() {
fi
}
-function pid()
-{
- local prepend=''
- local append=''
- if [ "$1" = "--exact" ]; then
- prepend=' '
- append='$'
- shift
- fi
- local EXE="$1"
- if [ "$EXE" ] ; then
- local PID=`adb shell ps \
- | tr -d '\r' \
- | \grep "$prepend$EXE$append" \
- | sed -e 's/^[^ ]* *\([0-9]*\).*$/\1/'`
- echo "$PID"
- else
- echo "usage: pid [--exact] <process name>"
- return 255
- fi
-}
-
# coredump_setup - enable core dumps globally for any process
# that has the core-file-size limit set correctly
#
@@ -1107,60 +1130,13 @@ function systemstack()
stacks system_server
}
-function stacks()
-{
- if [[ $1 =~ ^[0-9]+$ ]] ; then
- local PID="$1"
- elif [ "$1" ] ; then
- local PIDLIST="$(pid $1)"
- if [[ $PIDLIST =~ ^[0-9]+$ ]] ; then
- local PID="$PIDLIST"
- elif [ "$PIDLIST" ] ; then
- echo "more than one process: $1"
- else
- echo "no such process: $1"
- fi
- else
- echo "usage: stacks [pid|process name]"
- fi
-
- if [ "$PID" ] ; then
- # Determine whether the process is native
- if adb shell ls -l /proc/$PID/exe | grep -q /system/bin/app_process ; then
- # Dump stacks of Dalvik process
- local TRACES=/data/anr/traces.txt
- local ORIG=/data/anr/traces.orig
- local TMP=/data/anr/traces.tmp
-
- # Keep original traces to avoid clobbering
- adb shell mv $TRACES $ORIG
-
- # Make sure we have a usable file
- adb shell touch $TRACES
- adb shell chmod 666 $TRACES
-
- # Dump stacks and wait for dump to finish
- adb shell kill -3 $PID
- adb shell notify $TRACES >/dev/null
-
- # Restore original stacks, and show current output
- adb shell mv $TRACES $TMP
- adb shell mv $ORIG $TRACES
- adb shell cat $TMP
- else
- # Dump stacks of native process
- adb shell debuggerd -b $PID
- fi
- fi
-}
-
# Read the ELF header from /proc/$PID/exe to determine if the process is
# 64-bit.
function is64bit()
{
local PID="$1"
if [ "$PID" ] ; then
- if [[ "$(adb shell cat /proc/$PID/exe | xxd -l 1 -s 4 -ps)" -eq "02" ]] ; then
+ if [[ "$(adb shell cat /proc/$PID/exe | xxd -l 1 -s 4 -p)" -eq "02" ]] ; then
echo "64"
else
echo ""
@@ -1174,7 +1150,7 @@ case `uname -s` in
Darwin)
function sgrep()
{
- find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cc|cpp|S|java|xml|sh|mk|aidl|vts)' \
+ find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|xml|sh|mk|aidl|vts)' \
-exec grep --color -n "$@" {} +
}
@@ -1182,7 +1158,7 @@ case `uname -s` in
*)
function sgrep()
{
- find . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.\(c\|h\|cc\|cpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\)' \
+ find . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\)' \
-exec grep --color -n "$@" {} +
}
;;
@@ -1247,7 +1223,7 @@ case `uname -s` in
function treegrep()
{
- find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|S|java|xml)' \
+ find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|hpp|S|java|xml)' \
-exec grep --color -n -i "$@" {} +
}
@@ -1261,7 +1237,7 @@ case `uname -s` in
function treegrep()
{
- find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|S|java|xml)' -type f \
+ find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|hpp|S|java|xml)' -type f \
-exec grep --color -n -i "$@" {} +
}
@@ -1528,6 +1504,92 @@ function godir () {
\cd $T/$pathname
}
+# Update module-info.json in out.
+function refreshmod() {
+ if [ ! "$ANDROID_PRODUCT_OUT" ]; then
+ echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
+ return 1
+ fi
+
+ echo "Refreshing modules (building module-info.json). Log at $ANDROID_PRODUCT_OUT/module-info.json.build.log." >&2
+
+ # for the output of the next command
+ mkdir -p $ANDROID_PRODUCT_OUT || return 1
+
+ # Note, can't use absolute path because of the way make works.
+ m out/target/product/$(get_build_var TARGET_DEVICE)/module-info.json \
+ > $ANDROID_PRODUCT_OUT/module-info.json.build.log 2>&1
+}
+
+# List all modules for the current device, as cached in module-info.json. If any build change is
+# made and it should be reflected in the output, you should run 'refreshmod' first.
+function allmod() {
+ if [ ! "$ANDROID_PRODUCT_OUT" ]; then
+ echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
+ return 1
+ fi
+
+ if [ ! -f "$ANDROID_PRODUCT_OUT/module-info.json" ]; then
+ echo "Could not find module-info.json. It will only be built once, and it can be updated with 'refreshmod'" >&2
+ refreshmod || return 1
+ fi
+
+ python -c "import json; print '\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys()))"
+}
+
+# Get the path of a specific module in the android tree, as cached in module-info.json. If any build change
+# is made, and it should be reflected in the output, you should run 'refreshmod' first.
+function pathmod() {
+ if [ ! "$ANDROID_PRODUCT_OUT" ]; then
+ echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
+ return 1
+ fi
+
+ if [[ $# -ne 1 ]]; then
+ echo "usage: pathmod <module>" >&2
+ return 1
+ fi
+
+ if [ ! -f "$ANDROID_PRODUCT_OUT/module-info.json" ]; then
+ echo "Could not find module-info.json. It will only be built once, and it can be updated with 'refreshmod'" >&2
+ refreshmod || return 1
+ fi
+
+ local relpath=$(python -c "import json, os
+module = '$1'
+module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
+if module not in module_info:
+ exit(1)
+print module_info[module]['path'][0]" 2>/dev/null)
+
+ if [ -z "$relpath" ]; then
+ echo "Could not find module '$1' (try 'refreshmod' if there have been build changes?)." >&2
+ return 1
+ else
+ echo "$ANDROID_BUILD_TOP/$relpath"
+ fi
+}
+
+# Go to a specific module in the android tree, as cached in module-info.json. If any build change
+# is made, and it should be reflected in the output, you should run 'refreshmod' first.
+function gomod() {
+ if [[ $# -ne 1 ]]; then
+ echo "usage: gomod <module>" >&2
+ return 1
+ fi
+
+ local path="$(pathmod $@)"
+ if [ -z "$path" ]; then
+ return 1
+ fi
+ cd $path
+}
+
+function _complete_android_module_names() {
+ local word=${COMP_WORDS[COMP_CWORD]}
+ COMPREPLY=( $(allmod | grep -E "^$word") )
+}
+
# Print colored exit condition
function pez {
"$@"
@@ -1560,6 +1622,10 @@ function get_make_command()
function _wrap_build()
{
+ if [[ "${ANDROID_QUIET_BUILD:-}" == true ]]; then
+ "$@"
+ return $?
+ fi
local start_time=$(date +"%s")
"$@"
local ret=$?
@@ -1630,32 +1696,62 @@ function provision()
"$ANDROID_PRODUCT_OUT/provision-device" "$@"
}
-function atest()
-{
- # TODO (sbasi): Replace this to be a destination in the build out when & if
- # atest is built by the build system. (This will be necessary if it ever
- # depends on external pip projects).
- "$(gettop)"/tools/tradefederation/core/atest/atest.py "$@"
+# Zsh needs bashcompinit called to support bash-style completion.
+function enable_zsh_completion() {
+ # Don't override user's options if bash-style completion is already enabled.
+ if ! declare -f complete >/dev/null; then
+ autoload -U compinit && compinit
+ autoload -U bashcompinit && bashcompinit
+ fi
}
-if [ "x$SHELL" != "x/bin/bash" ]; then
- case `ps -o command -p $$` in
+function validate_current_shell() {
+ local current_sh="$(ps -o command -p $$)"
+ case "$current_sh" in
*bash*)
+ function check_type() { type -t "$1"; }
;;
+ *zsh*)
+ function check_type() { type "$1"; }
+ enable_zsh_completion ;;
*)
- echo "WARNING: Only bash is supported, use of other shell would lead to erroneous results"
+ echo -e "WARNING: Only bash and zsh are supported.\nUse of other shell would lead to erroneous results."
;;
esac
-fi
+}
# Execute the contents of any vendorsetup.sh files we can find.
-for f in `test -d device && find -L device -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort` \
- `test -d vendor && find -L vendor -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort` \
- `test -d product && find -L product -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort`
-do
- echo "including $f"
- . $f
-done
-unset f
+# Unless we find an allowed-vendorsetup_sh-files file, in which case we'll only
+# load those.
+#
+# This allows loading only approved vendorsetup.sh files
+function source_vendorsetup() {
+ allowed=
+ for f in $(find -L device vendor product -maxdepth 4 -name 'allowed-vendorsetup_sh-files' 2>/dev/null | sort); do
+ if [ -n "$allowed" ]; then
+ echo "More than one 'allowed_vendorsetup_sh-files' file found, not including any vendorsetup.sh files:"
+ echo " $allowed"
+ echo " $f"
+ return
+ fi
+ allowed="$f"
+ done
+
+ allowed_files=
+ [ -n "$allowed" ] && allowed_files=$(cat "$allowed")
+ for dir in device vendor product; do
+ for f in $(test -d $dir && \
+ find -L $dir -maxdepth 4 -name 'vendorsetup.sh' 2>/dev/null | sort); do
+
+ if [[ -z "$allowed" || "$allowed_files" =~ $f ]]; then
+ echo "including $f"; . "$f"
+ else
+ echo "ignoring $f, not in $allowed"
+ fi
+ done
+ done
+}
+validate_current_shell
+source_vendorsetup
addcompletions
diff --git a/help.sh b/help.sh
index c1435429e0..be0734431d 100755
--- a/help.sh
+++ b/help.sh
@@ -40,6 +40,10 @@ Common goals are:
Stands for "Vendor, NO Dependencies"
pnod Quickly rebuild the product image from built packages
Stands for "Product, NO Dependencies"
+ psnod Quickly rebuild the product_services image from built packages
+ Stands for "ProductServices, NO Dependencies"
+ onod Quickly rebuild the odm image from built packages
+ Stands for "ODM, NO Dependencies"
So, for example, you could run:
diff --git a/packaging/distdir.mk b/packaging/distdir.mk
new file mode 100644
index 0000000000..264a8b098c
--- /dev/null
+++ b/packaging/distdir.mk
@@ -0,0 +1,46 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# From the Android.mk pass:
+DIST_GOAL_OUTPUT_PAIRS :=
+DIST_SRC_DST_PAIRS :=
+include $(KATI_PACKAGE_MK_DIR)/dist.mk
+
+$(foreach pair,$(DIST_GOAL_OUTPUT_PAIRS), \
+ $(eval goal := $(call word-colon,1,$(pair))) \
+ $(eval output := $(call word-colon,2,$(pair))) \
+ $(eval .PHONY: _dist_$$(goal)) \
+ $(if $(call streq,$(DIST),true),\
+ $(eval _dist_$$(goal): $$(DIST_DIR)/$$(output)), \
+ $(eval _dist_$$(goal):)))
+
+define copy-one-dist-file
+$(2): $(1)
+ @echo "Dist: $$@"
+ rm -f $$@
+ cp $$< $$@
+endef
+
+ifeq ($(DIST),true)
+ $(foreach pair,$(DIST_SRC_DST_PAIRS), \
+ $(eval src := $(call word-colon,1,$(pair))) \
+ $(eval dst := $(DIST_DIR)/$(call word-colon,2,$(pair))) \
+ $(eval $(call copy-one-dist-file,$(src),$(dst))))
+endif
+
+copy-one-dist-file :=
+DIST_GOAL_OUTPUT_PAIRS :=
+DIST_SRC_DST_PAIRS :=
diff --git a/tools/Android.mk b/packaging/main.mk
index c05d681551..0b746a8f8e 100644
--- a/tools/Android.mk
+++ b/packaging/main.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2010 The Android Open Source Project
+# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,10 +14,24 @@
# limitations under the License.
#
-LOCAL_PATH := $(call my-dir)
+# Create a default rule. This is unused currently, as the real default rule is
+# still in the Kati build step.
+.PHONY: _packaging_default_rule_
+_packaging_default_rule_:
-# Only if this Android.mk was included not by a symlink should it be used.
-# This facilitates the transition away from symlinks: b/64397960
-ifeq ($(LOCAL_PATH),build/make/tools)
-include $(call all-makefiles-under,$(LOCAL_PATH))
+ifndef KATI
+$(error Only Kati is supported.)
endif
+
+$(info [1/3] initializing packaging system ...)
+
+.KATI_READONLY := KATI_PACKAGE_MK_DIR
+
+include build/make/common/core.mk
+include build/make/common/strings.mk
+
+$(info [2/3] including distdir.mk ...)
+
+include build/make/packaging/distdir.mk
+
+$(info [3/3] writing packaging rules ...)
diff --git a/rbesetup.sh b/rbesetup.sh
new file mode 100644
index 0000000000..da257b9946
--- /dev/null
+++ b/rbesetup.sh
@@ -0,0 +1,75 @@
+function _source_env_setup_script() {
+ local -r ENV_SETUP_SCRIPT="build/make/envsetup.sh"
+ local -r TOP_DIR=$(
+ while [[ ! -f "${ENV_SETUP_SCRIPT}" ]] && [[ "${PWD}" != "/" ]]; do
+ \cd ..
+ done
+ if [[ -f "${ENV_SETUP_SCRIPT}" ]]; then
+ echo "$(PWD= /bin/pwd -P)"
+ fi
+ )
+
+ local -r FULL_PATH_ENV_SETUP_SCRIPT="${TOP_DIR}/${ENV_SETUP_SCRIPT}"
+ if [[ ! -f "${FULL_PATH_ENV_SETUP_SCRIPT}" ]]; then
+ echo "ERROR: Unable to source ${ENV_SETUP_SCRIPT}"
+ return 1
+ fi
+
+ # Need to change directory to the repo root so vendor scripts can be sourced
+ # as well.
+ local -r CUR_DIR=$PWD
+ \cd "${TOP_DIR}"
+ source "${FULL_PATH_ENV_SETUP_SCRIPT}"
+ \cd "${CUR_DIR}"
+}
+
+# This function needs to run first as the remaining defining functions may be
+# using the envsetup.sh defined functions.
+_source_env_setup_script || return
+
+# This function prefixes the given command with appropriate variables needed
+# for the build to be executed with RBE.
+function use_rbe() {
+ local RBE_LOG_DIR="/tmp"
+ local RBE_BINARIES_DIR="prebuilts/remoteexecution-client/latest"
+ local DOCKER_IMAGE="gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62"
+ # Do not set an invocation-ID and let reproxy auto-generate one.
+ USE_RBE="true" \
+ FLAG_server_address="unix:///tmp/reproxy_$RANDOM.sock" \
+ FLAG_exec_root="$(gettop)" \
+ FLAG_platform="container-image=docker://${DOCKER_IMAGE}" \
+ RBE_use_application_default_credentials="true" \
+ RBE_log_dir="${RBE_LOG_DIR}" \
+ RBE_reproxy_wait_seconds="20" \
+ RBE_output_dir="${RBE_LOG_DIR}" \
+ RBE_log_path="text://${RBE_LOG_DIR}/reproxy_log.txt" \
+ RBE_CXX_EXEC_STRATEGY="remote_local_fallback" \
+ RBE_cpp_dependency_scanner_plugin="${RBE_BINARIES_DIR}/dependency_scanner_go_plugin.so" \
+ RBE_DIR=${RBE_BINARIES_DIR} \
+ RBE_re_proxy="${RBE_BINARIES_DIR}/reproxy" \
+ $@
+}
+# This function detects if the uploader is available and sets the path of it to
+# ANDROID_ENABLE_METRICS_UPLOAD.
+function _export_metrics_uploader() {
+ local uploader_path="$(gettop)/vendor/google/misc/metrics_uploader_prebuilt/metrics_uploader.sh"
+ if [[ -x "${uploader_path}" ]]; then
+ export ANDROID_ENABLE_METRICS_UPLOAD="${uploader_path}"
+ fi
+}
+# This function sets RBE specific environment variables needed for the build to
+# executed by RBE. This file should be sourced once per checkout of Android code.
+function _set_rbe_vars() {
+ unset USE_GOMA
+ export USE_RBE="true"
+ export RBE_CXX_EXEC_STRATEGY="racing"
+ export RBE_JAVAC_EXEC_STRATEGY="racing"
+ export RBE_R8_EXEC_STRATEGY="racing"
+ export RBE_D8_EXEC_STRATEGY="racing"
+ export RBE_use_unified_cas_ops="true"
+ export RBE_JAVAC=1
+ export RBE_R8=1
+ export RBE_D8=1
+}
+_export_metrics_uploader
+_set_rbe_vars
diff --git a/target/OWNERS b/target/OWNERS
new file mode 100644
index 0000000000..feb274262b
--- /dev/null
+++ b/target/OWNERS
@@ -0,0 +1 @@
+hansson@google.com
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 7fe45ebdbf..971a7b2086 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -52,3 +52,59 @@ LOCAL_PREBUILT_MODULE_FILE := $(GEN)
include $(BUILD_PREBUILT)
BUILT_VENDOR_MANIFEST := $(LOCAL_BUILT_MODULE)
endif
+
+# ODM manifest
+ifdef ODM_MANIFEST_FILES
+# ODM_MANIFEST_FILES is a list of files that is combined and installed as the default ODM manifest.
+include $(CLEAR_VARS)
+LOCAL_MODULE := odm_manifest.xml
+LOCAL_MODULE_STEM := manifest.xml
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_RELATIVE_PATH := vintf
+LOCAL_ODM_MODULE := true
+
+GEN := $(local-generated-sources-dir)/manifest.xml
+$(GEN): PRIVATE_SRC_FILES := $(ODM_MANIFEST_FILES)
+$(GEN): $(ODM_MANIFEST_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
+ # Set VINTF_IGNORE_TARGET_FCM_VERSION to true because it should only be in device manifest.
+ VINTF_IGNORE_TARGET_FCM_VERSION=true \
+ $(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \
+ -i $(call normalize-path-list,$(PRIVATE_SRC_FILES))
+
+LOCAL_PREBUILT_MODULE_FILE := $(GEN)
+include $(BUILD_PREBUILT)
+endif # ODM_MANIFEST_FILES
+
+# ODM_MANIFEST_SKUS: a list of SKUS where ODM_MANIFEST_<sku>_FILES are defined.
+ifdef ODM_MANIFEST_SKUS
+
+# Install /odm/etc/vintf/manifest_$(sku).xml
+# $(1): sku
+define _add_odm_sku_manifest
+my_fragment_files_var := ODM_MANIFEST_$$(call to-upper,$(1))_FILES
+ifndef $$(my_fragment_files_var)
+$$(error $(1) is in ODM_MANIFEST_SKUS but $$(my_fragment_files_var) is not defined)
+endif
+my_fragment_files := $$($$(my_fragment_files_var))
+include $$(CLEAR_VARS)
+LOCAL_MODULE := odm_manifest_$(1).xml
+LOCAL_MODULE_STEM := manifest_$(1).xml
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_RELATIVE_PATH := vintf
+LOCAL_ODM_MODULE := true
+GEN := $$(local-generated-sources-dir)/manifest_$(1).xml
+$$(GEN): PRIVATE_SRC_FILES := $$(my_fragment_files)
+$$(GEN): $$(my_fragment_files) $$(HOST_OUT_EXECUTABLES)/assemble_vintf
+ VINTF_IGNORE_TARGET_FCM_VERSION=true \
+ $$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $$@ \
+ -i $$(call normalize-path-list,$$(PRIVATE_SRC_FILES))
+LOCAL_PREBUILT_MODULE_FILE := $$(GEN)
+include $$(BUILD_PREBUILT)
+my_fragment_files_var :=
+my_fragment_files :=
+endef
+
+$(foreach sku, $(ODM_MANIFEST_SKUS), $(eval $(call _add_odm_sku_manifest,$(sku))))
+_add_odm_sku_manifest :=
+
+endif # ODM_MANIFEST_SKUS
diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk
new file mode 100644
index 0000000000..3ab5f12d5a
--- /dev/null
+++ b/target/board/BoardConfigEmuCommon.mk
@@ -0,0 +1,57 @@
+# BoardConfigEmuCommon.mk
+#
+# Common compile-time definitions for emulator
+#
+
+HAVE_HTC_AUDIO_DRIVER := true
+BOARD_USES_GENERIC_AUDIO := true
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
+
+# no hardware camera
+USE_CAMERA_STUB := true
+
+NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
+
+# Build OpenGLES emulation guest and host libraries
+BUILD_EMULATOR_OPENGL := true
+BUILD_QEMU_IMAGES := true
+
+# Build and enable the OpenGL ES View renderer. When running on the emulator,
+# the GLES renderer disables itself if host GL acceleration isn't available.
+USE_OPENGL_RENDERER := true
+
+# Emulator doesn't support sparse image format.
+TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
+
+ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
+ # emulator is Non-A/B device
+ AB_OTA_UPDATER := false
+
+ # emulator needs super.img
+ BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
+
+ BOARD_EXT4_SHARE_DUP_BLOCKS := true
+
+ # 3G + header
+ BOARD_SUPER_PARTITION_SIZE := 3229614080
+ BOARD_SUPER_PARTITION_GROUPS := emulator_dynamic_partitions
+ BOARD_EMULATOR_DYNAMIC_PARTITIONS_PARTITION_LIST := \
+ system \
+ vendor
+
+ # 3G
+ BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE := 3221225472
+else ifeq ($(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),true)
+ # Enable dynamic system image size and reserved 64MB in it.
+ BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE := 67108864
+ BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE := 67108864
+else
+ BOARD_SYSTEMIMAGE_PARTITION_SIZE := 3221225472
+ BOARD_VENDORIMAGE_PARTITION_SIZE := 146800640
+endif
+
+BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_FLASH_BLOCK_SIZE := 512
+DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
+
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
new file mode 100644
index 0000000000..702ef3c669
--- /dev/null
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -0,0 +1,61 @@
+# BoardConfigGsiCommon.mk
+#
+# Common compile-time definitions for GSI
+# Builds upon the mainline config.
+#
+
+include build/make/target/board/BoardConfigMainlineCommon.mk
+
+TARGET_NO_KERNEL := true
+
+# This flag is set by mainline but isn't desired for GSI.
+BOARD_USES_SYSTEM_OTHER_ODEX :=
+
+# system.img is always ext4 with sparse option
+# GSI also includes make_f2fs to support userdata parition in f2fs
+# for some devices
+TARGET_USERIMAGES_USE_F2FS := true
+
+# Enable dynamic system image size and reserved 64MB in it.
+BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE := 67108864
+
+# GSI forces product packages to /system for now.
+TARGET_COPY_OUT_PRODUCT := system/product
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE :=
+
+# Creates metadata partition mount point under root for
+# the devices with metadata parition
+BOARD_USES_METADATA_PARTITION := true
+
+# Android Verified Boot (AVB):
+# Set the rollback index to zero, to prevent the device bootloader from
+# updating the last seen rollback index in the tamper-evident storage.
+BOARD_AVB_ROLLBACK_INDEX := 0
+
+# Enable chain partition for system.
+BOARD_AVB_SYSTEM_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
+BOARD_AVB_SYSTEM_ALGORITHM := SHA256_RSA2048
+BOARD_AVB_SYSTEM_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+BOARD_AVB_SYSTEM_ROLLBACK_INDEX_LOCATION := 1
+
+# GSI specific System Properties
+ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
+TARGET_SYSTEM_PROP := build/make/target/board/gsi_system.prop
+else
+TARGET_SYSTEM_PROP := build/make/target/board/gsi_system_user.prop
+endif
+
+# Set this to create /cache mount point for non-A/B devices that mounts /cache.
+# The partition size doesn't matter, just to make build pass.
+BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
+
+# Setup a vendor image to let PRODUCT_PROPERTY_OVERRIDES does not affect GSI
+BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
+
+# Disable 64 bit mediadrmserver
+TARGET_ENABLE_MEDIADRM_64 :=
+
+# Ordinary (non-flattened) APEX may require kernel changes. For maximum compatibility,
+# use flattened APEX for GSI
+TARGET_FLATTEN_APEX := true
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
new file mode 100644
index 0000000000..6c56671eef
--- /dev/null
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -0,0 +1,46 @@
+# BoardConfigMainlineCommon.mk
+#
+# Common compile-time definitions for mainline images.
+
+# The generic product target doesn't have any hardware-specific pieces.
+TARGET_NO_BOOTLOADER := true
+TARGET_NO_RECOVERY := true
+
+TARGET_USERIMAGES_USE_EXT4 := true
+
+# Mainline devices must have /vendor and /product partitions.
+TARGET_COPY_OUT_VENDOR := vendor
+TARGET_COPY_OUT_PRODUCT := product
+
+BOARD_VNDK_VERSION := current
+
+# Required flag for non-64 bit devices from P.
+TARGET_USES_64_BIT_BINDER := true
+
+# 64 bit mediadrmserver
+TARGET_ENABLE_MEDIADRM_64 := true
+
+# Puts odex files on system_other, as well as causing dex files not to get
+# stripped from APKs.
+BOARD_USES_SYSTEM_OTHER_ODEX := true
+
+# Audio: must using XML format for Treblized devices
+USE_XML_AUDIO_POLICY_CONF := 1
+
+# Bluetooth defines
+# TODO(b/123695868): Remove the need for this
+BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR := build/make/target/board/mainline_arm64/bluetooth
+
+BOARD_AVB_ENABLE := true
+BOARD_AVB_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+
+BOARD_CHARGER_ENABLE_SUSPEND := true
+
+# Enable A/B update
+AB_OTA_UPDATER := true
+
+# Enable system property split for Treble
+BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
+
+# Generate an APEX image for experiment b/119800099.
+DEXPREOPT_GENERATE_APEX_IMAGE := true
diff --git a/target/board/generic/BoardConfig.mk b/target/board/generic/BoardConfig.mk
index 6c8284639d..8624ed72b3 100644
--- a/target/board/generic/BoardConfig.mk
+++ b/target/board/generic/BoardConfig.mk
@@ -1,101 +1,38 @@
-# config.mk
+# Copyright (C) 2018 The Android Open Source Project
#
-# Product-specific compile-time definitions.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
+# arm emulator specific definitions
TARGET_ARCH := arm
-# Note: Before Pi, we built the platform images for ARMv7-A _without_ NEON.
-#
-ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk,$(MAKECMDGOALS)),)
-# DO NOT USE
-#
-# This architecture variant should NOT be used for 32 bit arm platform
-# builds. It is the lowest common denominator required to build
-# an unbundled application for all supported 32 platforms.
-# cts for 32 bit arm is built using aosp_arm64 product.
+# Note: Before P, we built the platform images for ARMv7-A _without_ NEON.
+# Note: Before Q, we built the CTS and SDK images for ARMv7-A _without_ NEON.
+# Note: Before Q, we built unbundled apps for ARMv7-A _without_ NEON.
#
-# If you are building a 32 bit platform (and not an application),
-# you should set the following as 2nd arch variant:
-#
-# TARGET_ARCH_VARIANT := armv7-a-neon
-#
-# DO NOT USE
-TARGET_ARCH_VARIANT := armv7-a
-# DO NOT USE
-else
# Starting from Pi, System image of aosp_arm products is the new GSI
# for real devices newly launched for Pi. These devices are usualy not
# as performant as the mainstream 64-bit devices and the performance
# provided by NEON is important for them to pass related CTS tests.
TARGET_ARCH_VARIANT := armv7-a-neon
-endif
TARGET_CPU_VARIANT := generic
TARGET_CPU_ABI := armeabi-v7a
TARGET_CPU_ABI2 := armeabi
-HAVE_HTC_AUDIO_DRIVER := true
-BOARD_USES_GENERIC_AUDIO := true
-TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
-
-TARGET_USES_64_BIT_BINDER := true
-
-# no hardware camera
-USE_CAMERA_STUB := true
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
+include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
-# Build OpenGLES emulation guest and host libraries
-BUILD_EMULATOR_OPENGL := true
-BUILD_QEMU_IMAGES := true
-
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
-
-TARGET_USERIMAGES_USE_EXT4 := true
-# Partition size is default 1.5GB (1536MB) for 64 bits projects
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1610612736
BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
-TARGET_COPY_OUT_VENDOR := vendor
-# ~100 MB vendor image. Please adjust system image / vendor image sizes
-# when finalizing them.
-BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
-BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
-
-# Android generic system image always create metadata partition
-BOARD_USES_METADATA_PARTITION := true
-
-# Set this to create /cache mount point for non-A/B devices that mounts /cache.
-# The partition size doesn't matter, just to make build pass.
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
-
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
-BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
-
-# Android Verified Boot (AVB):
-# Builds a special vbmeta.img that disables AVB verification.
-# Otherwise, AVB will prevent the device from booting the generic system.img.
-# Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
-# metadata into system.img.
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(error BOARD_AVB_ENABLE cannot be set for GSI)
-endif
-BOARD_BUILD_DISABLED_VBMETAIMAGE := true
-
-ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
-# GSI is always userdebug and needs a couple of properties taking precedence
-# over those set by the vendor.
-TARGET_SYSTEM_PROP := build/make/target/board/gsi_system.prop
-endif
-BOARD_VNDK_VERSION := current
# Wifi.
BOARD_WLAN_DEVICE := emulator
@@ -107,7 +44,3 @@ WPA_SUPPLICANT_VERSION := VER_0_8_X
WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
WIFI_DRIVER_FW_PATH_STA := "/dev/null"
WIFI_DRIVER_FW_PATH_AP := "/dev/null"
-
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic/device.mk b/target/board/generic/device.mk
index 273ac71455..0a3241511a 100644
--- a/target/board/generic/device.mk
+++ b/target/board/generic/device.mk
@@ -14,14 +14,6 @@
# limitations under the License.
#
-# minimal configuration for audio policy.
-PRODUCT_COPY_FILES += \
- frameworks/av/services/audiopolicy/config/audio_policy_configuration_generic.xml:system/etc/audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/primary_audio_policy_configuration.xml:system/etc/primary_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/r_submix_audio_policy_configuration.xml:system/etc/r_submix_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/audio_policy_volumes.xml:system/etc/audio_policy_volumes.xml \
- frameworks/av/services/audiopolicy/config/default_volume_tables.xml:system/etc/default_volume_tables.xml \
-
# NFC:
# Provide default libnfc-nci.conf file for devices that does not have one in
# vendor/etc because aosp system image (of aosp_$arch products) is going to
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 56f15deec3..f07adb721b 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2013 The Android Open Source Project
+# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,20 +13,17 @@
# limitations under the License.
#
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
+# arm64 emulator specific definitions
TARGET_ARCH := arm64
TARGET_ARCH_VARIANT := armv8-a
TARGET_CPU_VARIANT := generic
TARGET_CPU_ABI := arm64-v8a
-TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
TARGET_2ND_ARCH := arm
TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
-ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk,$(MAKECMDGOALS)),)
+ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk vts,$(MAKECMDGOALS)),)
# DO NOT USE
# DO NOT USE
#
@@ -44,7 +41,7 @@ ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk,$(MAKECMDGOALS)),)
#
# DO NOT USE
# DO NOT USE
-TARGET_2ND_ARCH_VARIANT := armv7-a
+TARGET_2ND_ARCH_VARIANT := armv7-a-neon
# DO NOT USE
# DO NOT USE
TARGET_2ND_CPU_VARIANT := generic
@@ -55,69 +52,18 @@ TARGET_2ND_ARCH_VARIANT := armv8-a
TARGET_2ND_CPU_VARIANT := generic
endif
+include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
-TARGET_USES_64_BIT_BINDER := true
-
-# no hardware camera
-USE_CAMERA_STUB := true
-
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-
-# Build OpenGLES emulation host and guest libraries
-BUILD_EMULATOR_OPENGL := true
-BUILD_QEMU_IMAGES := true
-
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
-
-TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 2684354560 # 2.5 GB
BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
-TARGET_COPY_OUT_VENDOR := vendor
-# ~100 MB vendor image. Please adjust system image / vendor image sizes
-# when finalizing them.
-BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
-BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
-
-# Android generic system image always create metadata partition
-BOARD_USES_METADATA_PARTITION := true
-
-# Set this to create /cache mount point for non-A/B devices that mounts /cache.
-# The partition size doesn't matter, just to make build pass.
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
-
-BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
-
-# Android Verified Boot (AVB):
-# Builds a special vbmeta.img that disables AVB verification.
-# Otherwise, AVB will prevent the device from booting the generic system.img.
-# Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
-# metadata into system.img.
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(error BOARD_AVB_ENABLE cannot be set for GSI)
-endif
-BOARD_BUILD_DISABLED_VBMETAIMAGE := true
-
-ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
-# GSI is always userdebug and needs a couple of properties taking precedence
-# over those set by the vendor.
-TARGET_SYSTEM_PROP := build/make/target/board/gsi_system.prop
-endif
-BOARD_VNDK_VERSION := current
# Emulator system image is going to be used as GSI and some vendor still hasn't
# cleaned up all device specific directories under root!
-# TODO(jiyong) These might be SoC specific.
-BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+# TODO(b/111434759, b/111287060) SoC specific hacks
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /mnt/vendor/persist:/persist
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt:/firmware
# TODO(b/36764215): remove this setting when the generic system image
# no longer has QCOM-specific directories under /.
@@ -133,7 +79,3 @@ WPA_SUPPLICANT_VERSION := VER_0_8_X
WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
WIFI_DRIVER_FW_PATH_STA := "/dev/null"
WIFI_DRIVER_FW_PATH_AP := "/dev/null"
-
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index 8e26700bcf..e5d8e61d02 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -14,23 +14,6 @@
# limitations under the License.
#
-# minimal configuration for audio policy.
-PRODUCT_COPY_FILES += \
- frameworks/av/services/audiopolicy/config/audio_policy_configuration_generic.xml:system/etc/audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/primary_audio_policy_configuration.xml:system/etc/primary_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/r_submix_audio_policy_configuration.xml:system/etc/r_submix_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/audio_policy_volumes.xml:system/etc/audio_policy_volumes.xml \
- frameworks/av/services/audiopolicy/config/default_volume_tables.xml:system/etc/default_volume_tables.xml \
-
-# NFC:
-# Provide default libnfc-nci.conf file for devices that does not have one in
-# vendor/etc because aosp system image (of aosp_$arch products) is going to
-# be used as GSI.
-# May need to remove the following for newly launched devices in P since this
-# NFC configuration file should be in vendor/etc, instead of system/etc
-PRODUCT_COPY_FILES += \
- device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
-
# Adjust the Dalvik heap to be appropriate for a tablet.
$(call inherit-product-if-exists, frameworks/base/build/tablet-dalvik-heap.mk)
$(call inherit-product-if-exists, frameworks/native/build/tablet-dalvik-heap.mk)
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
index fc6b58284b..6e54d810bb 100644
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ b/target/board/generic_arm64_ab/BoardConfig.mk
@@ -14,7 +14,7 @@
# limitations under the License.
#
-include build/make/target/board/treble_common_64.mk
+include build/make/target/board/BoardConfigGsiCommon.mk
TARGET_ARCH := arm64
TARGET_ARCH_VARIANT := armv8-a
@@ -28,18 +28,11 @@ TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
TARGET_2ND_CPU_VARIANT := generic
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
-
# TODO(jiyong) These might be SoC specific.
BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
-
-# Set this to create /cache mount point for non-A/B devices that mounts /cache.
-# The partition size doesn't matter, just to make build pass.
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/image:/firmware/image
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/verinfo:/firmware/verinfo
# TODO(b/36764215): remove this setting when the generic system image
# no longer has QCOM-specific directories under /.
diff --git a/target/board/generic_arm64_ab/sepolicy/OWNERS b/target/board/generic_arm64_ab/sepolicy/OWNERS
index 3828988948..ff29677468 100644
--- a/target/board/generic_arm64_ab/sepolicy/OWNERS
+++ b/target/board/generic_arm64_ab/sepolicy/OWNERS
@@ -1,4 +1,8 @@
-jeffv@google.com
-dcashman@google.com
+alanstokes@google.com
+bowgotsai@google.com
jbires@google.com
+jeffv@google.com
+jgalenson@google.com
sspatil@google.com
+tomcherry@google.com
+trong@google.com
diff --git a/target/board/generic_arm_ab/BoardConfig.mk b/target/board/generic_arm_ab/BoardConfig.mk
index 7c04cd5733..9100094f89 100644
--- a/target/board/generic_arm_ab/BoardConfig.mk
+++ b/target/board/generic_arm_ab/BoardConfig.mk
@@ -14,10 +14,7 @@
# limitations under the License.
#
-include build/make/target/board/treble_common_32.mk
-
-# Overwrite the setting in treble_common_32.mk for non-A/B arm GSI
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 943718400 # 900MB
+include build/make/target/board/BoardConfigGsiCommon.mk
TARGET_ARCH := arm
TARGET_ARCH_VARIANT := armv7-a-neon
@@ -25,18 +22,14 @@ TARGET_CPU_ABI := armeabi-v7a
TARGET_CPU_ABI2 := armeabi
TARGET_CPU_VARIANT := generic
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
+# Legacy GSI keeps 32 bits binder for 32 bits CPU Arch
+TARGET_USES_64_BIT_BINDER := false
# TODO(jiyong) These might be SoC specific.
BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
-
-# Set this to create /cache mount point for non-A/B devices that mounts /cache.
-# The partition size doesn't matter, just to make build pass.
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/image:/firmware/image
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/verinfo:/firmware/verinfo
# TODO(b/36764215): remove this setting when the generic system image
# no longer has QCOM-specific directories under /.
diff --git a/target/board/generic_mips/BoardConfig.mk b/target/board/generic_mips/BoardConfig.mk
deleted file mode 100644
index 523408b541..0000000000
--- a/target/board/generic_mips/BoardConfig.mk
+++ /dev/null
@@ -1,76 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# BoardConfig.mk
-#
-# Product-specific compile-time definitions.
-#
-
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
-
-TARGET_ARCH := mips
-ifeq (,$(TARGET_ARCH_VARIANT))
-TARGET_ARCH_VARIANT := mips32r2-fp
-endif
-TARGET_CPU_ABI := mips
-
-# Make TARGET_CPU_VARIANT the same as TARGET_ARCH_VARIANT
-TARGET_CPU_VARIANT := $(TARGET_ARCH_VARIANT)
-
-HAVE_HTC_AUDIO_DRIVER := true
-BOARD_USES_GENERIC_AUDIO := true
-
-# no hardware camera
-USE_CAMERA_STUB := true
-
-# Enable dex-preoptimization to speed up the first boot sequence
-# of an SDK AVD. Note that this operation only works on Linux for now
-ifeq ($(HOST_OS),linux)
- ifeq ($(WITH_DEXPREOPT),)
- WITH_DEXPREOPT := true
- WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY := false
- endif
-endif
-
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-
-# Build OpenGLES emulation guest and host libraries
-BUILD_EMULATOR_OPENGL := true
-BUILD_QEMU_IMAGES := true
-
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
-
-TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 2147483648 # 2 GB
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 734003200
-TARGET_COPY_OUT_VENDOR := vendor
-# ~100 MB vendor image. Please adjust system image / vendor image sizes
-# when finalizing them.
-BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
-BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
-
-BOARD_SEPOLICY_DIRS += build/target/board/generic/sepolicy
-
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_mips/README.txt b/target/board/generic_mips/README.txt
deleted file mode 100644
index b31a8571a1..0000000000
--- a/target/board/generic_mips/README.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-The "generic_mips" product defines a MIPS based non-hardware-specific
-target without a kernel or bootloader.
-
-It can be used to build the entire user-level system, and
-will work with the emulator, though sound will not work
-(see the "emulator" product for that).
-
-It is not a product "base class"; no other products inherit
-from it or use it in any way.
diff --git a/target/board/generic_mips/device.mk b/target/board/generic_mips/device.mk
deleted file mode 100644
index a2633e1876..0000000000
--- a/target/board/generic_mips/device.mk
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_PROPERTY_OVERRIDES := \
- ro.ril.hsxpa=1 \
- ro.ril.gprsclass=10
-
-PRODUCT_COPY_FILES := \
- device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
- device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
- device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
- hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
-
-PRODUCT_PACKAGES := \
- audio.primary.goldfish \
- power.goldfish \
- vibrator.goldfish
diff --git a/target/board/generic_mips64/BoardConfig.mk b/target/board/generic_mips64/BoardConfig.mk
deleted file mode 100644
index 2052d7bc75..0000000000
--- a/target/board/generic_mips64/BoardConfig.mk
+++ /dev/null
@@ -1,93 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# BoardConfig.mk
-#
-# Product-specific compile-time definitions.
-#
-
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
-
-TARGET_ARCH := mips64
-ifeq (,$(TARGET_ARCH_VARIANT))
-TARGET_ARCH_VARIANT := mips64r6
-endif
-TARGET_CPU_ABI := mips64
-
-TARGET_2ND_ARCH := mips
-ifeq (,$(TARGET_2ND_ARCH_VARIANT))
-ifeq ($(TARGET_ARCH_VARIANT),mips64r6)
-# Imgtec builds use 32r6 arch variant with Imgtec-maintained prebuilts/ndk library:
-# TARGET_2ND_ARCH_VARIANT := mips32r6
-# Aosp builds lack full set of mips32r6 NDK prebuilts, so use 32r2 abi:
-TARGET_2ND_ARCH_VARIANT := mips32r2-fp
-else
-TARGET_2ND_ARCH_VARIANT := mips32r2-fp
-endif
-endif
-TARGET_2ND_CPU_ABI := mips
-
-# Make TARGET_XXX_CPU_VARIANT the same as TARGET_XXX_ARCH_VARIANT
-TARGET_CPU_VARIANT := $(TARGET_ARCH_VARIANT)
-TARGET_2ND_CPU_VARIANT := $(TARGET_2ND_ARCH_VARIANT)
-
-# The emulator (qemu) uses the Goldfish devices
-HAVE_HTC_AUDIO_DRIVER := true
-BOARD_USES_GENERIC_AUDIO := true
-
-# no hardware camera
-USE_CAMERA_STUB := true
-
-# Enable dex-preoptimization to speed up the first boot sequence
-# of an SDK AVD. Note that this operation only works on Linux for now
-ifeq ($(HOST_OS),linux)
- ifeq ($(WITH_DEXPREOPT),)
- WITH_DEXPREOPT := true
- WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY := false
- endif
-endif
-
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-
-# Build OpenGLES emulation guest and host libraries
-BUILD_EMULATOR_OPENGL := true
-BUILD_QEMU_IMAGES := true
-
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
-
-TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192 # 1.75 GB
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 1610612736 # 1.5 GB, lots of space for running tests
-TARGET_COPY_OUT_VENDOR := vendor
-# ~100 MB vendor image. Please adjust system image / vendor image sizes
-# when finalizing them.
-BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
-BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
-
-BOARD_SEPOLICY_DIRS += build/target/board/generic/sepolicy
-
-DEX_PREOPT_DEFAULT := nostripping
-
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_mips64/README.txt b/target/board/generic_mips64/README.txt
deleted file mode 100644
index cd4e05b727..0000000000
--- a/target/board/generic_mips64/README.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-The "generic_mips64" product defines a MIPS64 based non-hardware-specific
-target without a kernel or bootloader.
-
-It can be used to build the entire user-level system, and
-will work with the emulator.
-
-It is not a product "base class"; no other products inherit
-from it or use it in any way.
diff --git a/target/board/generic_mips64/device.mk b/target/board/generic_mips64/device.mk
deleted file mode 100644
index 2ccbcbd185..0000000000
--- a/target/board/generic_mips64/device.mk
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_PROPERTY_OVERRIDES := \
- ro.ril.hsxpa=1 \
- ro.ril.gprsclass=10
-
-PRODUCT_COPY_FILES := \
- device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
- device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
- device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
- hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
-
-PRODUCT_PACKAGES := \
- audio.primary.goldfish \
- power.goldfish
diff --git a/target/board/generic_mips64/system.prop b/target/board/generic_mips64/system.prop
deleted file mode 100644
index 4da69c092f..0000000000
--- a/target/board/generic_mips64/system.prop
+++ /dev/null
@@ -1,6 +0,0 @@
-#
-# system.prop for generic mips64 sdk
-#
-
-rild.libpath=/vendor/lib64/libreference-ril.so
-rild.libargs=-d /dev/ttyS0
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index 684dfc7ef7..83d7eccc7a 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -1,95 +1,32 @@
-# config.mk
+# Copyright (C) 2018 The Android Open Source Project
#
-# Product-specific compile-time definitions.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
+# x86 emulator specific definitions
TARGET_CPU_ABI := x86
TARGET_ARCH := x86
TARGET_ARCH_VARIANT := x86
-TARGET_PRELINK_MODULE := false
-TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
-
-#emulator now uses 64bit kernel to run 32bit x86 image
-#
-TARGET_USES_64_BIT_BINDER := true
-
-# The IA emulator (qemu) uses the Goldfish devices
-HAVE_HTC_AUDIO_DRIVER := true
-BOARD_USES_GENERIC_AUDIO := true
-
-# no hardware camera
-USE_CAMERA_STUB := true
-
-# Enable dex-preoptimization to speed up the first boot sequence
-# of an SDK AVD. Note that this operation only works on Linux for now
-ifeq ($(HOST_OS),linux)
-WITH_DEXPREOPT ?= true
-WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY ?= false
-endif
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-
-# Build OpenGLES emulation host and guest libraries
-BUILD_EMULATOR_OPENGL := true
-
-# Build partitioned system.img and vendor.img (if applicable)
-# for qemu, otherwise, init cannot find PART_NAME
-BUILD_QEMU_IMAGES := true
+TARGET_PRELINK_MODULE := false
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
+include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
-TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 2684354560
# Resize to 4G to accomodate ASAN and CTS
BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
-TARGET_COPY_OUT_VENDOR := vendor
-BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
-# ~100 MB vendor image. Please adjust system image / vendor image sizes
-# when finalizing them.
-BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
-BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
-
-# Android generic system image always create metadata partition
-BOARD_USES_METADATA_PARTITION := true
-
-# Set this to create /cache mount point for non-A/B devices that mounts /cache.
-# The partition size doesn't matter, just to make build pass.
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
-
-BOARD_SEPOLICY_DIRS += \
- device/generic/goldfish/sepolicy/common \
- device/generic/goldfish/sepolicy/x86
-
-# Android Verified Boot (AVB):
-# Builds a special vbmeta.img that disables AVB verification.
-# Otherwise, AVB will prevent the device from booting the generic system.img.
-# Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
-# metadata into system.img.
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(error BOARD_AVB_ENABLE cannot be set for GSI)
-endif
-BOARD_BUILD_DISABLED_VBMETAIMAGE := true
-
-ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
-# GSI is always userdebug and needs a couple of properties taking precedence
-# over those set by the vendor.
-TARGET_SYSTEM_PROP := build/make/target/board/gsi_system.prop
-endif
-BOARD_VNDK_VERSION := current
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
# Wifi.
BOARD_WLAN_DEVICE := emulator
diff --git a/target/board/generic_x86/device.mk b/target/board/generic_x86/device.mk
index 273ac71455..bbab2b4ec8 100644
--- a/target/board/generic_x86/device.mk
+++ b/target/board/generic_x86/device.mk
@@ -14,19 +14,11 @@
# limitations under the License.
#
-# minimal configuration for audio policy.
-PRODUCT_COPY_FILES += \
- frameworks/av/services/audiopolicy/config/audio_policy_configuration_generic.xml:system/etc/audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/primary_audio_policy_configuration.xml:system/etc/primary_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/r_submix_audio_policy_configuration.xml:system/etc/r_submix_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/audio_policy_volumes.xml:system/etc/audio_policy_volumes.xml \
- frameworks/av/services/audiopolicy/config/default_volume_tables.xml:system/etc/default_volume_tables.xml \
+ifdef NET_ETH0_STARTONBOOT
+ PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
+endif
-# NFC:
-# Provide default libnfc-nci.conf file for devices that does not have one in
-# vendor/etc because aosp system image (of aosp_$arch products) is going to
-# be used as GSI.
-# May need to remove the following for newly launched devices in P since this
-# NFC configuration file should be in vendor/etc, instead of system/etc
-PRODUCT_COPY_FILES += \
- device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
+# Ensure we package the BIOS files too.
+PRODUCT_HOST_PACKAGES += \
+ bios.bin \
+ vgabios-cirrus.bin \
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index 5bcb9adf90..07bbc071ac 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -1,89 +1,34 @@
-# config.mk
+# Copyright (C) 2018 The Android Open Source Project
#
-# Product-specific compile-time definitions.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
+# x86_64 emulator specific definitions
TARGET_CPU_ABI := x86_64
TARGET_ARCH := x86_64
TARGET_ARCH_VARIANT := x86_64
-TARGET_PRELINK_MODULE := false
-TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
TARGET_2ND_CPU_ABI := x86
TARGET_2ND_ARCH := x86
TARGET_2ND_ARCH_VARIANT := x86_64
-TARGET_USES_64_BIT_BINDER := true
-
-# The IA emulator (qemu) uses the Goldfish devices
-HAVE_HTC_AUDIO_DRIVER := true
-BOARD_USES_GENERIC_AUDIO := true
-
-# no hardware camera
-USE_CAMERA_STUB := true
-
-# Enable dex-preoptimization to speed up the first boot sequence
-# of an SDK AVD. Note that this operation only works on Linux for now
-ifeq ($(HOST_OS),linux)
-WITH_DEXPREOPT ?= true
-WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY ?= false
-endif
-
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-
-# Build OpenGLES emulation host and guest libraries
-BUILD_EMULATOR_OPENGL := true
-BUILD_QEMU_IMAGES := true
-
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
+TARGET_PRELINK_MODULE := false
+include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
-TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 2684354560 # 2.5 GB
BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
-TARGET_COPY_OUT_VENDOR := vendor
-# ~100 MB vendor image. Please adjust system image / vendor image sizes
-# when finalizing them.
-BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
-BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
-DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
-
-# Android generic system image always create metadata partition
-BOARD_USES_METADATA_PARTITION := true
-
-# Set this to create /cache mount point for non-A/B devices that mounts /cache.
-# The partition size doesn't matter, just to make build pass.
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
-BOARD_SEPOLICY_DIRS += \
- device/generic/goldfish/sepolicy/common \
- device/generic/goldfish/sepolicy/x86
-
-# Android Verified Boot (AVB):
-# Builds a special vbmeta.img that disables AVB verification.
-# Otherwise, AVB will prevent the device from booting the generic system.img.
-# Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
-# metadata into system.img.
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(error BOARD_AVB_ENABLE cannot be set for GSI)
-endif
-BOARD_BUILD_DISABLED_VBMETAIMAGE := true
-
-ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
-# GSI is always userdebug and needs a couple of properties taking precedence
-# over those set by the vendor.
-TARGET_SYSTEM_PROP := build/make/target/board/gsi_system.prop
-endif
-BOARD_VNDK_VERSION := current
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
# Wifi.
BOARD_WLAN_DEVICE := emulator
@@ -95,7 +40,3 @@ WPA_SUPPLICANT_VERSION := VER_0_8_X
WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
WIFI_DRIVER_FW_PATH_STA := "/dev/null"
WIFI_DRIVER_FW_PATH_AP := "/dev/null"
-
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_x86_64/device.mk b/target/board/generic_x86_64/device.mk
index 273ac71455..bbab2b4ec8 100755
--- a/target/board/generic_x86_64/device.mk
+++ b/target/board/generic_x86_64/device.mk
@@ -14,19 +14,11 @@
# limitations under the License.
#
-# minimal configuration for audio policy.
-PRODUCT_COPY_FILES += \
- frameworks/av/services/audiopolicy/config/audio_policy_configuration_generic.xml:system/etc/audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/primary_audio_policy_configuration.xml:system/etc/primary_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/r_submix_audio_policy_configuration.xml:system/etc/r_submix_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/audio_policy_volumes.xml:system/etc/audio_policy_volumes.xml \
- frameworks/av/services/audiopolicy/config/default_volume_tables.xml:system/etc/default_volume_tables.xml \
+ifdef NET_ETH0_STARTONBOOT
+ PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
+endif
-# NFC:
-# Provide default libnfc-nci.conf file for devices that does not have one in
-# vendor/etc because aosp system image (of aosp_$arch products) is going to
-# be used as GSI.
-# May need to remove the following for newly launched devices in P since this
-# NFC configuration file should be in vendor/etc, instead of system/etc
-PRODUCT_COPY_FILES += \
- device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
+# Ensure we package the BIOS files too.
+PRODUCT_HOST_PACKAGES += \
+ bios.bin \
+ vgabios-cirrus.bin \
diff --git a/target/board/generic_x86_64_ab/BoardConfig.mk b/target/board/generic_x86_64_ab/BoardConfig.mk
index a098dfe2a3..1dd5e4898d 100644
--- a/target/board/generic_x86_64_ab/BoardConfig.mk
+++ b/target/board/generic_x86_64_ab/BoardConfig.mk
@@ -14,7 +14,7 @@
# limitations under the License.
#
-include build/make/target/board/treble_common_64.mk
+include build/make/target/board/BoardConfigGsiCommon.mk
TARGET_CPU_ABI := x86_64
TARGET_ARCH := x86_64
@@ -23,12 +23,3 @@ TARGET_ARCH_VARIANT := x86_64
TARGET_2ND_CPU_ABI := x86
TARGET_2ND_ARCH := x86
TARGET_2ND_ARCH_VARIANT := x86_64
-
-# Set this to create /cache mount point for non-A/B devices that mounts /cache.
-# The partition size doesn't matter, just to make build pass.
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
-
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_x86_ab/BoardConfig.mk b/target/board/generic_x86_ab/BoardConfig.mk
index db4dacd968..53acffd239 100644
--- a/target/board/generic_x86_ab/BoardConfig.mk
+++ b/target/board/generic_x86_ab/BoardConfig.mk
@@ -14,17 +14,11 @@
# limitations under the License.
#
-include build/make/target/board/treble_common_32.mk
+include build/make/target/board/BoardConfigGsiCommon.mk
TARGET_CPU_ABI := x86
TARGET_ARCH := x86
TARGET_ARCH_VARIANT := x86
-# Set this to create /cache mount point for non-A/B devices that mounts /cache.
-# The partition size doesn't matter, just to make build pass.
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
-
-# Enable A/B update
-TARGET_NO_RECOVERY := true
-BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
+# Legacy GSI keeps 32 bits binder for 32 bits CPU Arch
+TARGET_USES_64_BIT_BINDER := false
diff --git a/target/board/generic_x86_arm/BoardConfig.mk b/target/board/generic_x86_arm/BoardConfig.mk
index c66aacc844..6fae4113c1 100644
--- a/target/board/generic_x86_arm/BoardConfig.mk
+++ b/target/board/generic_x86_arm/BoardConfig.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2016 The Android Open Source Project
+# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,11 +13,7 @@
# limitations under the License.
#
-# Configuration for generic_x86 + arm libraries needed by binary translation.
-
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
+# x86 emulator specific definitions
TARGET_CPU_ABI := x86
TARGET_ARCH := x86
TARGET_ARCH_VARIANT := x86
@@ -25,40 +21,30 @@ TARGET_ARCH_VARIANT := x86
TARGET_2ND_ARCH := arm
TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
-TARGET_2ND_ARCH_VARIANT := armv7-a
+TARGET_2ND_ARCH_VARIANT := armv7-a-neon
TARGET_2ND_CPU_VARIANT := generic
-# Tell the build system this isn't a typical 64bit+32bit multilib configuration.
+TARGET_CPU_ABI_LIST := x86 armeabi-v7a armeabi
TARGET_TRANSLATE_2ND_ARCH := true
-# no hardware camera
-USE_CAMERA_STUB := true
-
-# Enable dex-preoptimization to speed up the first boot sequence
-# of an SDK AVD. Note that this operation only works on Linux for now
-ifeq ($(HOST_OS),linux)
- ifeq ($(WITH_DEXPREOPT),)
- WITH_DEXPREOPT := true
- WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY := false
- endif
-endif
+BUILD_BROKEN_DUP_RULES := true
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-# Build OpenGLES emulation host and guest libraries
-BUILD_EMULATOR_OPENGL := true
+include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
+# Resize to 4G to accomodate ASAN and CTS
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
-TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192 # 1.75 GB
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
+# Wifi.
+BOARD_WLAN_DEVICE := emulator
+BOARD_HOSTAPD_DRIVER := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP := "/dev/null"
diff --git a/target/board/generic_x86_arm/README.txt b/target/board/generic_x86_arm/README.txt
new file mode 100644
index 0000000000..05f7ca2d35
--- /dev/null
+++ b/target/board/generic_x86_arm/README.txt
@@ -0,0 +1,10 @@
+The "generic_x86_arm" product defines a non-hardware-specific IA target
+without a kernel or bootloader.
+
+It can be used to build the entire user-level system, and
+will work with the IA version of the emulator,
+
+It is not a product "base class"; no other products inherit
+from it or use it in any way.
+
+Third party arm to x86 translator has to be installed as well
diff --git a/target/product/languages_small.mk b/target/board/generic_x86_arm/device.mk
index d695ca8dae..0a3241511a 100644
--- a/target/product/languages_small.mk
+++ b/target/board/generic_x86_arm/device.mk
@@ -14,11 +14,11 @@
# limitations under the License.
#
-# This is a build configuration that just contains a list of languages.
-# It helps in situations where laugnages must come first in the list,
-# mostly because screen densities interfere with the list of locales and
-# the system misbehaves when a density is the first locale.
-
-# This is the list of languages that originally shipped on ADP1
-
-PRODUCT_LOCALES := en_US en_GB fr_FR it_IT de_DE es_ES
+# NFC:
+# Provide default libnfc-nci.conf file for devices that does not have one in
+# vendor/etc because aosp system image (of aosp_$arch products) is going to
+# be used as GSI.
+# May need to remove the following for newly launched devices in P since this
+# NFC configuration file should be in vendor/etc, instead of system/etc
+PRODUCT_COPY_FILES += \
+ device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
diff --git a/target/board/generic_mips/system.prop b/target/board/generic_x86_arm/system.prop
index 973db2c801..64829f3ce4 100644
--- a/target/board/generic_mips/system.prop
+++ b/target/board/generic_x86_arm/system.prop
@@ -3,4 +3,3 @@
#
rild.libpath=/vendor/lib/libreference-ril.so
-rild.libargs=-d /dev/ttyS0
diff --git a/tools/droiddoc/Android.mk b/target/board/go_defaults.prop
index ff08edc103..93071cd7e2 100644
--- a/tools/droiddoc/Android.mk
+++ b/target/board/go_defaults.prop
@@ -1,4 +1,5 @@
-# Copyright (C) 2008 The Android Open Source Project
+#
+# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,7 +12,4 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-LOCAL_PATH := $(call my-dir)
-
-# Droiddoc is now Doclava -- See external/doclava.
+#
diff --git a/target/product/sdk_mips.mk b/target/board/go_defaults_512.prop
index 366994a310..a8eea9c7d6 100644
--- a/target/product/sdk_mips.mk
+++ b/target/board/go_defaults_512.prop
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2014 The Android Open Source Project
+# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,8 +14,10 @@
# limitations under the License.
#
-# Don't modify this file - It's just an alias!
+# 512MB specific properties.
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_mips.mk)
+# lmkd can kill more now.
+ro.lmk.medium=700
-PRODUCT_NAME := sdk_mips
+# madvise random in ART to reduce page cache thrashing.
+dalvik.vm.madvise-random=true
diff --git a/target/board/go_defaults_common.prop b/target/board/go_defaults_common.prop
new file mode 100644
index 0000000000..d4989e06d3
--- /dev/null
+++ b/target/board/go_defaults_common.prop
@@ -0,0 +1,42 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Sets Android Go recommended default values for propreties.
+
+# Set lowram options
+ro.lmk.critical_upgrade=true
+ro.lmk.upgrade_pressure=40
+ro.lmk.downgrade_pressure=60
+ro.lmk.kill_heaviest_task=false
+ro.statsd.enable=true
+
+# set threshold to filter unused apps
+pm.dexopt.downgrade_after_inactive_days=10
+
+# set the compiler filter for shared apks to quicken.
+# Rationale: speed has a lot of dex code expansion, it uses more ram and space
+# compared to quicken. Using quicken for shared APKs on Go devices may save RAM.
+# Note that this is a trade-off: here we trade clean pages for dirty pages,
+# extra cpu and battery. That's because the quicken files will be jit-ed in all
+# the processes that load of shared apk and the code cache is not shared.
+# Some notable apps that will be affected by this are gms and chrome.
+# b/65591595.
+pm.dexopt.shared=quicken
+
+# Default heap sizes. Allow up to 256m for large heaps to make sure a single app
+# doesn't take all of the RAM.
+dalvik.vm.heapgrowthlimit=128m
+dalvik.vm.heapsize=256m
diff --git a/target/board/gsi_arm64/BoardConfig.mk b/target/board/gsi_arm64/BoardConfig.mk
new file mode 100644
index 0000000000..90ddd0dd50
--- /dev/null
+++ b/target/board/gsi_arm64/BoardConfig.mk
@@ -0,0 +1,37 @@
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+include build/make/target/board/BoardConfigGsiCommon.mk
+
+TARGET_ARCH := arm64
+TARGET_ARCH_VARIANT := armv8-a
+TARGET_CPU_ABI := arm64-v8a
+TARGET_CPU_ABI2 :=
+TARGET_CPU_VARIANT := generic
+
+TARGET_2ND_ARCH := arm
+TARGET_2ND_ARCH_VARIANT := armv8-a
+TARGET_2ND_CPU_ABI := armeabi-v7a
+TARGET_2ND_CPU_ABI2 := armeabi
+TARGET_2ND_CPU_VARIANT := generic
+
+# TODO(b/111434759, b/111287060) SoC specific hacks
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /mnt/vendor/persist:/persist
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt:/firmware
+
+# TODO(b/36764215): remove this setting when the generic system image
+# no longer has QCOM-specific directories under /.
+BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
diff --git a/target/board/gsi_system.prop b/target/board/gsi_system.prop
index 0c04a9529f..dd3227efb2 100644
--- a/target/board/gsi_system.prop
+++ b/target/board/gsi_system.prop
@@ -4,5 +4,16 @@ ro.cp_system_other_odex=0
# GSI always disables adb authentication
ro.adb.secure=0
+# GSI disables non-AOSP nnapi extensions on product partition
+ro.nnapi.extensions.deny_on_product=true
+
+# TODO(b/120679683): disable RescueParty before all problem apps solved
+persist.sys.disable_rescue=true
+
# TODO(b/78105955): disable privapp_permissions checking before the bug solved
ro.control_privapp_permissions=disable
+
+# TODO(b/136212765): the default for LMK
+ro.lmk.kill_heaviest_task=true
+ro.lmk.kill_timeout_ms=100
+ro.lmk.use_minfree_levels=true
diff --git a/target/board/gsi_system_user.prop b/target/board/gsi_system_user.prop
new file mode 100644
index 0000000000..db6d880b7a
--- /dev/null
+++ b/target/board/gsi_system_user.prop
@@ -0,0 +1,16 @@
+# GSI always generate dex pre-opt in system image
+ro.cp_system_other_odex=0
+
+# GSI disables non-AOSP nnapi extensions on product partition
+ro.nnapi.extensions.deny_on_product=true
+
+# TODO(b/120679683): disable RescueParty before all problem apps solved
+persist.sys.disable_rescue=true
+
+# TODO(b/78105955): disable privapp_permissions checking before the bug solved
+ro.control_privapp_permissions=disable
+
+# TODO(b/136212765): the default for LMK
+ro.lmk.kill_heaviest_task=true
+ro.lmk.kill_timeout_ms=100
+ro.lmk.use_minfree_levels=true
diff --git a/target/board/generic_arm64_a/BoardConfig.mk b/target/board/mainline_arm64/BoardConfig.mk
index 34a8ac0012..8bb6212c1f 100644
--- a/target/board/generic_arm64_a/BoardConfig.mk
+++ b/target/board/mainline_arm64/BoardConfig.mk
@@ -1,5 +1,4 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,16 +13,22 @@
# limitations under the License.
#
-include build/make/target/board/treble_common_64.mk
-
TARGET_ARCH := arm64
TARGET_ARCH_VARIANT := armv8-a
-TARGET_CPU_ABI := arm64-v8a
-TARGET_CPU_ABI2 :=
TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := arm64-v8a
TARGET_2ND_ARCH := arm
TARGET_2ND_ARCH_VARIANT := armv8-a
TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
TARGET_2ND_CPU_VARIANT := generic
+
+include build/make/target/board/BoardConfigMainlineCommon.mk
+
+TARGET_NO_KERNEL := true
+
+AB_OTA_PARTITIONS := system
+
+BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/board/mainline_arm64/bluetooth/bdroid_buildcfg.h b/target/board/mainline_arm64/bluetooth/bdroid_buildcfg.h
new file mode 100644
index 0000000000..0ea8fc2747
--- /dev/null
+++ b/target/board/mainline_arm64/bluetooth/bdroid_buildcfg.h
@@ -0,0 +1,28 @@
+/*
+ *
+ * Copyright (c) 2013, The Linux Foundation. All rights reserved.
+ * Not a Contribution, Apache license notifications and license are retained
+ * for attribution purposes only.
+ *
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _BDROID_BUILDCFG_H
+#define _BDROID_BUILDCFG_H
+
+// VSC spec support
+#define BLE_VND_INCLUDED TRUE
+
+#endif
diff --git a/target/board/treble_common.mk b/target/board/treble_common.mk
deleted file mode 100644
index c4e68fefc1..0000000000
--- a/target/board/treble_common.mk
+++ /dev/null
@@ -1,61 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Common boardconfig settings for generic AOSP products targetting mobile
-# (phone/table) devices.
-
-# VNDK
-BOARD_VNDK_VERSION := current
-
-# Properties
-TARGET_SYSTEM_PROP := build/make/target/board/treble_system.prop
-BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
-
-# Bootloader, kernel and recovery are not part of generic AOSP image
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
-
-# system.img is always ext4 with sparse option
-# GSI also includes make_f2fs to support userdata parition in f2fs
-# for some devices
-TARGET_USERIMAGES_USE_EXT4 := true
-TARGET_USERIMAGES_USE_F2FS := true
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := false
-TARGET_USES_MKE2FS := true
-
-# Generic AOSP image always requires separate vendor.img
-TARGET_COPY_OUT_VENDOR := vendor
-
-# Android generic system image always create metadata partition
-BOARD_USES_METADATA_PARTITION := true
-
-# Generic AOSP image does NOT support HWC1
-TARGET_USES_HWC2 := true
-# Set emulator framebuffer display device buffer count to 3
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-
-# Audio
-USE_XML_AUDIO_POLICY_CONF := 1
-
-# Android Verified Boot (AVB):
-# Builds a special vbmeta.img that disables AVB verification.
-# Otherwise, AVB will prevent the device from booting the generic system.img.
-# Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
-# metadata into system.img.
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(error BOARD_AVB_ENABLE cannot be set for Treble GSI)
-endif
-BOARD_BUILD_DISABLED_VBMETAIMAGE := true
diff --git a/target/board/treble_system.prop b/target/board/treble_system.prop
deleted file mode 100644
index 0c04a9529f..0000000000
--- a/target/board/treble_system.prop
+++ /dev/null
@@ -1,8 +0,0 @@
-# GSI always generate dex pre-opt in system image
-ro.cp_system_other_odex=0
-
-# GSI always disables adb authentication
-ro.adb.secure=0
-
-# TODO(b/78105955): disable privapp_permissions checking before the bug solved
-ro.control_privapp_permissions=disable
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 85330b30a7..cfb8930829 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -34,48 +34,44 @@
# Unbundled apps will be built with the most generic product config.
ifneq ($(TARGET_BUILD_APPS),)
PRODUCT_MAKEFILES := \
+ $(LOCAL_DIR)/aosp_arm64.mk \
$(LOCAL_DIR)/aosp_arm.mk \
- $(LOCAL_DIR)/full.mk \
+ $(LOCAL_DIR)/aosp_x86_64.mk \
$(LOCAL_DIR)/aosp_x86.mk \
+ $(LOCAL_DIR)/full.mk \
$(LOCAL_DIR)/full_x86.mk \
- $(LOCAL_DIR)/aosp_mips.mk \
- $(LOCAL_DIR)/full_mips.mk \
- $(LOCAL_DIR)/aosp_arm64.mk \
- $(LOCAL_DIR)/aosp_mips64.mk \
- $(LOCAL_DIR)/aosp_x86_64.mk
+
else
PRODUCT_MAKEFILES := \
- $(LOCAL_DIR)/core.mk \
- $(LOCAL_DIR)/generic.mk \
- $(LOCAL_DIR)/generic_x86.mk \
- $(LOCAL_DIR)/generic_mips.mk \
- $(LOCAL_DIR)/aosp_arm.mk \
- $(LOCAL_DIR)/aosp_arm_a.mk \
+ $(LOCAL_DIR)/aosp_arm64_ab.mk \
+ $(LOCAL_DIR)/aosp_arm64.mk \
$(LOCAL_DIR)/aosp_arm_ab.mk \
- $(LOCAL_DIR)/full.mk \
- $(LOCAL_DIR)/aosp_x86.mk \
- $(LOCAL_DIR)/aosp_x86_a.mk \
+ $(LOCAL_DIR)/aosp_arm.mk \
+ $(LOCAL_DIR)/aosp_x86_64_ab.mk \
+ $(LOCAL_DIR)/aosp_x86_64.mk \
$(LOCAL_DIR)/aosp_x86_ab.mk \
$(LOCAL_DIR)/aosp_x86_arm.mk \
+ $(LOCAL_DIR)/aosp_x86.mk \
+ $(LOCAL_DIR)/full.mk \
$(LOCAL_DIR)/full_x86.mk \
- $(LOCAL_DIR)/aosp_mips.mk \
- $(LOCAL_DIR)/full_mips.mk \
- $(LOCAL_DIR)/aosp_arm64.mk \
- $(LOCAL_DIR)/aosp_arm64_a.mk \
- $(LOCAL_DIR)/aosp_arm64_ab.mk \
- $(LOCAL_DIR)/aosp_mips64.mk \
- $(LOCAL_DIR)/aosp_x86_64.mk \
- $(LOCAL_DIR)/aosp_x86_64_a.mk \
- $(LOCAL_DIR)/aosp_x86_64_ab.mk \
- $(LOCAL_DIR)/sdk_phone_armv7.mk \
- $(LOCAL_DIR)/sdk_phone_x86.mk \
- $(LOCAL_DIR)/sdk_phone_mips.mk \
+ $(LOCAL_DIR)/generic.mk \
+ $(LOCAL_DIR)/generic_x86.mk \
+ $(LOCAL_DIR)/gsi_arm64.mk \
+ $(LOCAL_DIR)/mainline_arm64.mk \
+ $(LOCAL_DIR)/mainline_system_arm64.mk \
+ $(LOCAL_DIR)/sdk_arm64.mk \
+ $(LOCAL_DIR)/sdk.mk \
$(LOCAL_DIR)/sdk_phone_arm64.mk \
+ $(LOCAL_DIR)/sdk_phone_armv7.mk \
$(LOCAL_DIR)/sdk_phone_x86_64.mk \
- $(LOCAL_DIR)/sdk_phone_mips64.mk \
- $(LOCAL_DIR)/sdk.mk \
+ $(LOCAL_DIR)/sdk_phone_x86.mk \
+ $(LOCAL_DIR)/sdk_x86_64.mk \
$(LOCAL_DIR)/sdk_x86.mk \
- $(LOCAL_DIR)/sdk_mips.mk \
- $(LOCAL_DIR)/sdk_arm64.mk \
- $(LOCAL_DIR)/sdk_x86_64.mk
+
endif
+
+COMMON_LUNCH_CHOICES := \
+ aosp_arm64-eng \
+ aosp_arm-eng \
+ aosp_x86_64-eng \
+ aosp_x86-eng \
diff --git a/target/product/OWNERS b/target/product/OWNERS
new file mode 100644
index 0000000000..1c74859834
--- /dev/null
+++ b/target/product/OWNERS
@@ -0,0 +1 @@
+per-file runtime_libart.mk = agampe@google.com, calin@google.com, mast@google.com, ngeoffray@google.com, oth@google.com, rpl@google.com, sehr@google.com, vmarko@google.com
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index b872f48fc9..0fdd31377c 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -14,15 +14,29 @@
# limitations under the License.
#
--include device/generic/goldfish/arm32-vendor.mk
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
-# TODO(b/78308559): includes vr_hwc into GSI before vr_hwc move to vendor
-PRODUCT_PACKAGES += \
- vr_hwc
+# The system image of aosp_arm-userdebug is a GSI for the devices with:
+# - ARM 32 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
-include $(SRC_TARGET_DIR)/product/full.mk
+# GSI for system/product
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_common.mk)
-# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
-PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
+# Enable mainline checking for excat this product name
+ifeq (aosp_arm,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
+
+# Emulator for vendor
+$(call inherit-product-if-exists, device/generic/goldfish/arm32-vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
PRODUCT_NAME := aosp_arm
+PRODUCT_DEVICE := generic
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on ARM32
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index 33defe1444..8ef2023e62 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -14,12 +14,37 @@
# limitations under the License.
#
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
+# The system image of aosp_arm64-userdebug is a GSI for the devices with:
+# - ARM 64 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
# This is a build configuration for a full-featured build of the
# Open-Source part of the tree. It's geared toward a US-centric
# build quite specifically for the emulator, and might not be
# entirely appropriate to inherit from for on-device configurations.
--include device/generic/goldfish/arm64-vendor.mk
+# GSI for system/product
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_common.mk)
+
+# Emulator for vendor
+$(call inherit-product-if-exists, device/generic/goldfish/arm64-vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_arm64/device.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_arm64,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ root/init.zygote32_64.rc \
+ root/init.zygote64_32.rc \
# Copy different zygote settings for vendor.img to select by setting property
# ro.zygote=zygote64_32 or ro.zygote=zygote32_64:
@@ -29,19 +54,7 @@
PRODUCT_COPY_FILES += \
system/core/rootdir/init.zygote32_64.rc:root/init.zygote32_64.rc
-# TODO(b/78308559): includes vr_hwc into GSI before vr_hwc move to vendor
-PRODUCT_PACKAGES += \
- vr_hwc
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_arm64/device.mk)
-
-# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
-PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
-
PRODUCT_NAME := aosp_arm64
PRODUCT_DEVICE := generic_arm64
PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on ARM arm64 Emulator
+PRODUCT_MODEL := AOSP on ARM64
diff --git a/target/product/aosp_arm64_a.mk b/target/product/aosp_arm64_a.mk
deleted file mode 100644
index 3c7af334d9..0000000000
--- a/target/product/aosp_arm64_a.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
-
-include build/make/target/product/treble_common_64.mk
-
-PRODUCT_NAME := aosp_arm64_a
-PRODUCT_DEVICE := generic_arm64_a
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on ARM64
diff --git a/target/product/aosp_arm64_ab.mk b/target/product/aosp_arm64_ab.mk
index 6ff11579eb..f707a39dd2 100644
--- a/target/product/aosp_arm64_ab.mk
+++ b/target/product/aosp_arm64_ab.mk
@@ -17,9 +17,32 @@
# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
# /vendor/[build|default].prop when build split is on. In order to have sysprops
# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
+# gsi_system.prop.
-include build/make/target/product/treble_common_64.mk
+# aosp_arm64_ab-userdebug is a Legacy GSI for the devices with:
+# - ARM 64 bits user space
+# - 64 bits binder interface
+# - system-as-root
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/legacy_gsi_common.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_arm64_ab,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ root/init.zygote32_64.rc \
+ root/init.zygote64_32.rc \
+
+# Copy different zygote settings for vendor.img to select by setting property
+# ro.zygote=zygote64_32 or ro.zygote=zygote32_64:
+# 1. 64-bit primary, 32-bit secondary OR
+# 2. 32-bit primary, 64-bit secondary
+# init.zygote64_32.rc is in the core_64_bit.mk below
+PRODUCT_COPY_FILES += \
+ system/core/rootdir/init.zygote32_64.rc:root/init.zygote32_64.rc
PRODUCT_NAME := aosp_arm64_ab
PRODUCT_DEVICE := generic_arm64_ab
diff --git a/target/product/aosp_arm_a.mk b/target/product/aosp_arm_a.mk
deleted file mode 100644
index 3060fa920a..0000000000
--- a/target/product/aosp_arm_a.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
-
-include build/make/target/product/treble_common_32.mk
-
-PRODUCT_NAME := aosp_arm_a
-PRODUCT_DEVICE := generic_arm_a
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on ARM32
diff --git a/target/product/aosp_arm_ab.mk b/target/product/aosp_arm_ab.mk
index 9a4688e50d..e760932675 100644
--- a/target/product/aosp_arm_ab.mk
+++ b/target/product/aosp_arm_ab.mk
@@ -17,9 +17,19 @@
# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
# /vendor/[build|default].prop when build split is on. In order to have sysprops
# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
+# gsi_system.prop.
-include build/make/target/product/treble_common_32.mk
+# aosp_arm_ab-userdebug is a Legacy GSI for the devices with:
+# - ARM 32 bits user space
+# - 32 bits binder interface
+# - system-as-root
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/legacy_gsi_common.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_arm_ab,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
PRODUCT_NAME := aosp_arm_ab
PRODUCT_DEVICE := generic_arm_ab
diff --git a/target/product/aosp_mips.mk b/target/product/aosp_mips.mk
deleted file mode 100644
index 5ee6185551..0000000000
--- a/target/product/aosp_mips.mk
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-# Copyright 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-PRODUCT_PROPERTY_OVERRIDES += \
- rild.libpath=/vendor/lib/libreference-ril.so
-
-# Note: the following lines need to stay at the beginning so that it can
-# take priority and override the rules it inherit from other mk files
-# see copy file rules in core/Makefile
-PRODUCT_COPY_FILES += \
- development/sys-img/advancedFeatures.ini.arm:advancedFeatures.ini \
- prebuilts/qemu-kernel/mips/3.18/kernel-qemu2:kernel-ranchu \
- device/generic/goldfish/fstab.ranchu.mips:root/fstab.ranchu \
- device/generic/goldfish/fstab.ranchu.early.arm:root/fstab.ranchu.early
-
-include $(SRC_TARGET_DIR)/product/full_mips.mk
-
-PRODUCT_NAME := aosp_mips
diff --git a/target/product/aosp_mips64.mk b/target/product/aosp_mips64.mk
deleted file mode 100644
index 73d3731de4..0000000000
--- a/target/product/aosp_mips64.mk
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# Copyright 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-PRODUCT_PROPERTY_OVERRIDES += \
- rild.libpath=/vendor/lib64/libreference-ril.so
-
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
-
-# Note: the following lines need to stay at the beginning so that it can
-# take priority and override the rules it inherit from other mk files
-# see copy file rules in core/Makefile
-PRODUCT_COPY_FILES += \
- development/sys-img/advancedFeatures.ini.arm:advancedFeatures.ini \
- prebuilts/qemu-kernel/mips64/3.18/kernel-qemu2:kernel-ranchu \
- device/generic/goldfish/fstab.ranchu.mips:root/fstab.ranchu \
- device/generic/goldfish/fstab.ranchu.early.arm:root/fstab.ranchu.early
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_mips64/device.mk)
-
-include $(SRC_TARGET_DIR)/product/emulator.mk
-
-# Overrides
-PRODUCT_NAME := aosp_mips64
-PRODUCT_DEVICE := generic_mips64
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on MIPS64 Emulator
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 9ba9523432..1c7194844a 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -14,15 +14,29 @@
# limitations under the License.
#
--include device/generic/goldfish/x86-vendor.mk
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
-# TODO(b/78308559): includes vr_hwc into GSI before vr_hwc move to vendor
-PRODUCT_PACKAGES += \
- vr_hwc
+# The system image of aosp_x86-userdebug is a GSI for the devices with:
+# - x86 32 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
-include $(SRC_TARGET_DIR)/product/full_x86.mk
+# GSI for system/product
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_common.mk)
-# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
-PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
+# Emulator for vendor
+$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_x86,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
PRODUCT_NAME := aosp_x86
+PRODUCT_DEVICE := generic_x86
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on x86
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 7afc7825d5..9dfa2f4804 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -14,12 +14,37 @@
# limitations under the License.
#
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
+# The system image of aosp_x86_64-userdebug is a GSI for the devices with:
+# - x86 64 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
# This is a build configuration for a full-featured build of the
# Open-Source part of the tree. It's geared toward a US-centric
# build quite specifically for the emulator, and might not be
# entirely appropriate to inherit from for on-device configurations.
--include device/generic/goldfish/x86_64-vendor.mk
+# GSI for system/product
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_common.mk)
+
+# Emulator for vendor
+$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_x86_64,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ root/init.zygote32_64.rc \
+ root/init.zygote64_32.rc \
# Copy different zygote settings for vendor.img to select by setting property
# ro.zygote=zygote64_32 or ro.zygote=zygote32_64:
@@ -29,29 +54,7 @@
PRODUCT_COPY_FILES += \
system/core/rootdir/init.zygote32_64.rc:root/init.zygote32_64.rc
-# TODO(b/78308559): includes vr_hwc into GSI before vr_hwc move to vendor
-PRODUCT_PACKAGES += \
- vr_hwc
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
-
-# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
-PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
-
-ifdef NET_ETH0_STARTONBOOT
- PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
-endif
-
-# Ensure we package the BIOS files too.
-PRODUCT_PACKAGES += \
- bios.bin \
- vgabios-cirrus.bin \
-
-# Overrides
PRODUCT_NAME := aosp_x86_64
PRODUCT_DEVICE := generic_x86_64
PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on IA x86_64 Emulator
+PRODUCT_MODEL := AOSP on x86_64
diff --git a/target/product/aosp_x86_64_a.mk b/target/product/aosp_x86_64_a.mk
deleted file mode 100644
index a7fb740974..0000000000
--- a/target/product/aosp_x86_64_a.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
-
-include build/make/target/product/treble_common_64.mk
-
-PRODUCT_NAME := aosp_x86_64_a
-PRODUCT_DEVICE := generic_x86_64_a
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on x86_64
diff --git a/target/product/aosp_x86_64_ab.mk b/target/product/aosp_x86_64_ab.mk
index 3524dbc54d..dfb7b49ff5 100644
--- a/target/product/aosp_x86_64_ab.mk
+++ b/target/product/aosp_x86_64_ab.mk
@@ -17,9 +17,32 @@
# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
# /vendor/[build|default].prop when build split is on. In order to have sysprops
# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
+# gsi_system.prop.
-include build/make/target/product/treble_common_64.mk
+# aosp_x86_64_ab-userdebug is a Legacy GSI for the devices with:
+# - x86 64 bits user space
+# - 64 bits binder interface
+# - system-as-root
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/legacy_gsi_common.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_x86_64_ab,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ root/init.zygote32_64.rc \
+ root/init.zygote64_32.rc \
+
+# Copy different zygote settings for vendor.img to select by setting property
+# ro.zygote=zygote64_32 or ro.zygote=zygote32_64:
+# 1. 64-bit primary, 32-bit secondary OR
+# 2. 32-bit primary, 64-bit secondary
+# init.zygote64_32.rc is in the core_64_bit.mk below
+PRODUCT_COPY_FILES += \
+ system/core/rootdir/init.zygote32_64.rc:root/init.zygote32_64.rc
PRODUCT_NAME := aosp_x86_64_ab
PRODUCT_DEVICE := generic_x86_64_ab
diff --git a/target/product/aosp_x86_a.mk b/target/product/aosp_x86_a.mk
deleted file mode 100644
index 9ed29955e4..0000000000
--- a/target/product/aosp_x86_a.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
-
-include build/make/target/product/treble_common_32.mk
-
-PRODUCT_NAME := aosp_x86_a
-PRODUCT_DEVICE := generic_x86_a
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on x86
diff --git a/target/product/aosp_x86_ab.mk b/target/product/aosp_x86_ab.mk
index e72b38db7f..d07351cca8 100644
--- a/target/product/aosp_x86_ab.mk
+++ b/target/product/aosp_x86_ab.mk
@@ -17,9 +17,19 @@
# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
# /vendor/[build|default].prop when build split is on. In order to have sysprops
# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
+# gsi_system.prop.
-include build/make/target/product/treble_common_32.mk
+# aosp_x86_ab-userdebug is a Legacy GSI for the devices with:
+# - x86 32 bits user space
+# - 32 bits binder interface
+# - system-as-root
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/legacy_gsi_common.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_x86_ab,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
PRODUCT_NAME := aosp_x86_ab
PRODUCT_DEVICE := generic_x86_ab
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index 19f57e8676..70aa64ee2d 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -14,30 +14,29 @@
# limitations under the License.
#
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
# aosp_x86 with arm libraries needed by binary translation.
+# The system image of aosp_x86-userdebug is a GSI for the devices with:
+# - x86 32 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
+-include device/generic/goldfish/x86-vendor.mk
+
include $(SRC_TARGET_DIR)/product/full_x86.mk
-# arm libraries. This is the list of shared libraries included in the NDK.
-# Their dependency libraries will be automatically pulled in.
-PRODUCT_PACKAGES += \
- libandroid_arm \
- libaaudio_arm \
- libc_arm \
- libdl_arm \
- libEGL_arm \
- libGLESv1_CM_arm \
- libGLESv2_arm \
- libGLESv3_arm \
- libjnigraphics_arm \
- liblog_arm \
- libm_arm \
- libmediandk_arm \
- libOpenMAXAL_arm \
- libstdc++_arm \
- libOpenSLES_arm \
- libz_arm \
+# Enable dynamic partition size
+PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
+
+# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
+
+# Support addtional P vendor interface
+PRODUCT_EXTRA_VNDK_VERSIONS := 28
PRODUCT_NAME := aosp_x86_arm
PRODUCT_DEVICE := generic_x86_arm
diff --git a/target/product/base.mk b/target/product/base.mk
index bc3710c61f..804a2ee6ba 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2012 The Android Open Source Project
+# Copyright (C) 2018 The Android Open-Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,175 +14,8 @@
# limitations under the License.
#
-# Base modules (will move elsewhere, previously user tagged)
-PRODUCT_PACKAGES += \
- 20-dns.conf \
- 95-configured \
- org.apache.http.legacy.boot \
- appwidget \
- appops \
- am \
- android.policy \
- android.test.base \
- android.test.mock \
- android.test.runner \
- app_process \
- applypatch \
- audioserver \
- bit \
- blkid \
- bmgr \
- bpfloader \
- bugreport \
- bugreportz \
- cameraserver \
- content \
- dnsmasq \
- dpm \
- framework \
- framework-sysconfig.xml \
- fsck_msdos \
- hid \
- ime \
- incidentd \
- incident \
- incident_helper \
- incident_report \
- input \
- javax.obex \
- libandroid \
- libandroid_runtime \
- libandroid_servers \
- libaudioeffect_jni \
- libaudioflinger \
- libaudiopolicyservice \
- libaudiopolicymanager \
- libbundlewrapper \
- libcamera_client \
- libcameraservice \
- libcamera2ndk \
- libdl \
- libdrmclearkeyplugin \
- libdynproc \
- libclearkeycasplugin \
- libeffectproxy \
- libeffects \
- libinput \
- libinputflinger \
- libiprouteutil \
- libjnigraphics \
- libldnhncr \
- libmedia \
- libmedia_jni \
- libmediaplayerservice \
- libmtp \
- libnetd_client \
- libnetlink \
- libnetutils \
- libpdfium \
- libradio_metadata \
- libreference-ril \
- libreverbwrapper \
- libril \
- librtp_jni \
- libsensorservice \
- libskia \
- libsonic \
- libsonivox \
- libsoundpool \
- libsoundtrigger \
- libsoundtriggerservice \
- libsqlite \
- libstagefright \
- libstagefright_amrnb_common \
- libstagefright_avc_common \
- libstagefright_enc_common \
- libstagefright_foundation \
- libstagefright_omx \
- libstagefright_yuv \
- libusbhost \
- libutils \
- libvisualizer \
- libvorbisidec \
- libmediandk \
- libvulkan \
- libwifi-service \
- locksettings \
- media \
- media_cmd \
- mediadrmserver \
- mediaserver \
- mediametrics \
- mediaextractor \
- monkey \
- mtpd \
- ndc \
- netd \
- perfetto \
- ping \
- ping6 \
- platform.xml \
- privapp-permissions-platform.xml \
- pppd \
- pm \
- racoon \
- run-as \
- schedtest \
- sdcard \
- secdiscard \
- services \
- settings \
- sgdisk \
- sm \
- statsd \
- svc \
- tc \
- telecom \
- traced \
- traced_probes \
- vdc \
- vold \
- wm
-
-# Add the compatibility library that is needed when org.apache.http.legacy
-# is removed from the bootclasspath.
-ifeq ($(REMOVE_OAHL_FROM_BCP),true)
-PRODUCT_PACKAGES += framework-oahl-backward-compatibility
-endif
-
-# Add the compatibility library that is needed when android.test.base
-# is removed from the bootclasspath.
-ifeq ($(REMOVE_ATB_FROM_BCP),true)
-PRODUCT_PACKAGES += framework-atb-backward-compatibility
-endif
-
-# Essential HAL modules
-PRODUCT_PACKAGES += \
- android.hardware.cas@1.0-service \
- android.hardware.media.omx@1.0-service
-
-# XML schema files
-PRODUCT_PACKAGES += \
- media_profiles_V1_0.dtd
-
-# Packages included only for eng or userdebug builds, previously debug tagged
-PRODUCT_PACKAGES_DEBUG := \
- logpersist.start \
- perfprofd \
- sqlite3 \
- strace
-
-PRODUCT_COPY_FILES := $(call add-to-product-copy-files-if-exists,\
- frameworks/base/config/preloaded-classes:system/etc/preloaded-classes)
-
-# Note: it is acceptable to not have a compiled-classes file. In that case, all boot classpath
-# classes will be compiled.
-PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
- frameworks/base/config/compiled-classes:system/etc/compiled-classes)
-
-# Note: it is acceptable to not have a dirty-image-objects file. In that case, the special bin
-# for known dirty objects in the image will be empty.
-PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
- frameworks/base/config/dirty-image-objects:system/etc/dirty-image-objects)
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/embedded.mk)
+# This makefile is suitable to inherit by products that don't need to be split
+# up by partition.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_product.mk)
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
new file mode 100644
index 0000000000..82557bf63b
--- /dev/null
+++ b/target/product/base_product.mk
@@ -0,0 +1,22 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Base modules and settings for the product partition.
+PRODUCT_PACKAGES += \
+ healthd \
+ ModuleMetadata \
+ product_compatibility_matrix.xml \
+ product_manifest.xml \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
new file mode 100644
index 0000000000..ecc217dcd0
--- /dev/null
+++ b/target/product/base_system.mk
@@ -0,0 +1,396 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Base modules and settings for the system partition.
+PRODUCT_PACKAGES += \
+ abb \
+ adbd \
+ am \
+ android.hidl.allocator@1.0-service \
+ android.hidl.base-V1.0-java \
+ android.hidl.manager-V1.0-java \
+ android.hidl.memory@1.0-impl \
+ android.hidl.memory@1.0-impl.vendor \
+ android.system.suspend@1.0-service \
+ android.test.base \
+ android.test.mock \
+ android.test.runner \
+ apexd \
+ applypatch \
+ appops \
+ app_process \
+ appwidget \
+ ashmemd \
+ atrace \
+ audioserver \
+ BackupRestoreConfirmation \
+ bcc \
+ blank_screen \
+ blkid \
+ bmgr \
+ bootanimation \
+ bootstat \
+ bpfloader \
+ bu \
+ bugreport \
+ bugreportz \
+ cgroups.json \
+ charger \
+ cmd \
+ com.android.conscrypt \
+ com.android.location.provider \
+ com.android.media \
+ com.android.media.swcodec \
+ com.android.resolv \
+ com.android.tzdata \
+ ContactsProvider \
+ content \
+ crash_dump \
+ CtsShimPrebuilt \
+ CtsShimPrivPrebuilt \
+ debuggerd\
+ device_config \
+ dmctl \
+ dnsmasq \
+ DownloadProvider \
+ dpm \
+ dumpstate \
+ dumpsys \
+ DynamicSystemInstallationService \
+ e2fsck \
+ ExtServices \
+ ExtShared \
+ flags_health_check \
+ framework \
+ framework-res \
+ framework-sysconfig.xml \
+ fsck_msdos \
+ fs_config_files_system \
+ fs_config_dirs_system \
+ gsid \
+ gsi_tool \
+ heapprofd \
+ heapprofd_client \
+ gatekeeperd \
+ gpuservice \
+ hid \
+ hwservicemanager \
+ idmap \
+ idmap2 \
+ idmap2d \
+ ime \
+ ims-common \
+ incident \
+ incidentd \
+ incident_helper \
+ init.environ.rc \
+ init.rc \
+ init_system \
+ input \
+ installd \
+ iorapd \
+ ip \
+ ip6tables \
+ iptables \
+ ip-up-vpn \
+ javax.obex \
+ keystore \
+ ld.config.txt \
+ ld.mc \
+ libaaudio \
+ libamidi \
+ libandroid \
+ libandroidfw \
+ libandroid_runtime \
+ libandroid_servers \
+ libartpalette-system \
+ libashmemd_client \
+ libaudioeffect_jni \
+ libbinder \
+ libbinder_ndk \
+ libc.bootstrap \
+ libcamera2ndk \
+ libcamera_client \
+ libcameraservice \
+ libc_malloc_debug \
+ libc_malloc_hooks \
+ libcutils \
+ libdl.bootstrap \
+ libdrmframework \
+ libdrmframework_jni \
+ libEGL \
+ libETC1 \
+ libFFTEm \
+ libfilterfw \
+ libgatekeeper \
+ libGLESv1_CM \
+ libGLESv2 \
+ libGLESv3 \
+ libgui \
+ libhardware \
+ libhardware_legacy \
+ libinput \
+ libinputflinger \
+ libiprouteutil \
+ libjnigraphics \
+ libjpeg \
+ liblog \
+ libm.bootstrap \
+ libmdnssd \
+ libmedia \
+ libmedia_jni \
+ libmediandk \
+ libmtp \
+ libnetd_client \
+ libnetlink \
+ libnetutils \
+ libneuralnetworks \
+ libOpenMAXAL \
+ libOpenSLES \
+ libpdfium \
+ libpixelflinger \
+ libpower \
+ libpowermanager \
+ libradio_metadata \
+ librtp_jni \
+ libsensorservice \
+ libsfplugin_ccodec \
+ libskia \
+ libsonic \
+ libsonivox \
+ libsoundpool \
+ libspeexresampler \
+ libsqlite \
+ libstagefright \
+ libstagefright_amrnb_common \
+ libstagefright_enc_common \
+ libstagefright_foundation \
+ libstagefright_omx \
+ libstdc++ \
+ libsurfaceflinger \
+ libsysutils \
+ libui \
+ libusbhost \
+ libutils \
+ libvorbisidec \
+ libvulkan \
+ libwifi-service \
+ libwilhelm \
+ linker \
+ lmkd \
+ LocalTransport \
+ locksettings \
+ logcat \
+ logd \
+ lpdump \
+ lshal \
+ mdnsd \
+ media \
+ mediacodec.policy \
+ mediadrmserver \
+ mediaextractor \
+ mediametrics \
+ media_profiles_V1_0.dtd \
+ MediaProvider \
+ mediaserver \
+ mke2fs \
+ monkey \
+ mtpd \
+ ndc \
+ netd \
+ NetworkStack \
+ org.apache.http.legacy \
+ PackageInstaller \
+ perfetto \
+ PermissionController \
+ ping \
+ ping6 \
+ platform.xml \
+ pm \
+ pppd \
+ privapp-permissions-platform.xml \
+ racoon \
+ recovery-persist \
+ resize2fs \
+ rss_hwm_reset \
+ run-as \
+ schedtest \
+ screencap \
+ sdcard \
+ secdiscard \
+ SecureElement \
+ selinux_policy_system \
+ sensorservice \
+ service \
+ servicemanager \
+ services \
+ settings \
+ SettingsProvider \
+ sgdisk \
+ Shell \
+ shell_and_utilities_system \
+ sm \
+ statsd \
+ storaged \
+ surfaceflinger \
+ svc \
+ task_profiles.json \
+ tc \
+ telecom \
+ telephony-common \
+ tombstoned \
+ traced \
+ traced_probes \
+ tune2fs \
+ tzdatacheck \
+ uiautomator \
+ uncrypt \
+ usbd \
+ vdc \
+ viewcompiler \
+ voip-common \
+ vold \
+ WallpaperBackup \
+ watchdogd \
+ wificond \
+ wifi-service \
+ wm \
+
+# VINTF data for system image
+PRODUCT_PACKAGES += \
+ system_manifest.xml \
+ system_compatibility_matrix.xml \
+
+# Host tools to install
+PRODUCT_HOST_PACKAGES += \
+ BugReport \
+ adb \
+ art-tools \
+ atest \
+ bcc \
+ bit \
+ e2fsck \
+ fastboot \
+ flags_health_check \
+ icu-data_host_runtime_apex \
+ idmap2 \
+ incident_report \
+ ld.mc \
+ lpdump \
+ mdnsd \
+ minigzip \
+ mke2fs \
+ resize2fs \
+ sgdisk \
+ sqlite3 \
+ tinyplay \
+ tune2fs \
+ tzdatacheck \
+ unwind_info \
+ unwind_reg_info \
+ unwind_symbols \
+ viewcompiler \
+ tzdata_host \
+ tzdata_host_runtime_apex \
+ tzlookup.xml_host_runtime_apex \
+ tz_version_host \
+ tz_version_host_runtime_apex \
+
+ifeq ($(TARGET_CORE_JARS),)
+$(error TARGET_CORE_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
+endif
+
+# The order matters for runtime class lookup performance.
+PRODUCT_BOOT_JARS := \
+ $(TARGET_CORE_JARS) \
+ framework \
+ ext \
+ telephony-common \
+ voip-common \
+ ims-common \
+ updatable-media
+PRODUCT_UPDATABLE_BOOT_MODULES := conscrypt updatable-media
+PRODUCT_UPDATABLE_BOOT_LOCATIONS := \
+ /apex/com.android.conscrypt/javalib/conscrypt.jar \
+ /apex/com.android.media/javalib/updatable-media.jar
+
+
+PRODUCT_COPY_FILES += \
+ system/core/rootdir/init.usb.rc:root/init.usb.rc \
+ system/core/rootdir/init.usb.configfs.rc:root/init.usb.configfs.rc \
+ system/core/rootdir/ueventd.rc:root/ueventd.rc \
+ system/core/rootdir/etc/hosts:system/etc/hosts
+
+# Add the compatibility library that is needed when android.test.base
+# is removed from the bootclasspath.
+ifeq ($(REMOVE_ATB_FROM_BCP),true)
+PRODUCT_PACKAGES += framework-atb-backward-compatibility
+PRODUCT_BOOT_JARS += framework-atb-backward-compatibility
+else
+PRODUCT_BOOT_JARS += android.test.base
+endif
+
+PRODUCT_COPY_FILES += system/core/rootdir/init.zygote32.rc:root/init.zygote32.rc
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += ro.zygote=zygote32
+
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += debug.atrace.tags.enableflags=0
+
+# Packages included only for eng or userdebug builds, previously debug tagged
+PRODUCT_PACKAGES_DEBUG := \
+ adb_keys \
+ arping \
+ gdbserver \
+ init-debug.rc \
+ iotop \
+ iw \
+ logpersist.start \
+ logtagd.rc \
+ procrank \
+ showmap \
+ sqlite3 \
+ ss \
+ strace \
+ su \
+ sanitizer-status \
+ tracepath \
+ tracepath6 \
+ traceroute6 \
+ unwind_info \
+ unwind_reg_info \
+ unwind_symbols \
+
+# The set of packages whose code can be loaded by the system server.
+PRODUCT_SYSTEM_SERVER_APPS += \
+ SettingsProvider \
+ WallpaperBackup
+
+# Packages included only for eng/userdebug builds, when building with SANITIZE_TARGET=address
+PRODUCT_PACKAGES_DEBUG_ASAN := \
+ fuzz \
+ honggfuzz
+
+PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE := \
+ libdumpcoverage
+
+PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
+ frameworks/base/config/preloaded-classes:system/etc/preloaded-classes)
+
+# Note: it is acceptable to not have a dirty-image-objects file. In that case, the special bin
+# for known dirty objects in the image will be empty.
+PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
+ frameworks/base/config/dirty-image-objects:system/etc/dirty-image-objects)
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
new file mode 100644
index 0000000000..584327cf20
--- /dev/null
+++ b/target/product/base_vendor.mk
@@ -0,0 +1,71 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Base modules and settings for recovery.
+PRODUCT_PACKAGES += \
+ adbd.recovery \
+ android.hardware.health@2.0-impl-default.recovery \
+ cgroups.recovery.json \
+ charger.recovery \
+ init_second_stage.recovery \
+ ld.config.recovery.txt \
+ linker.recovery \
+ recovery \
+ shell_and_utilities_recovery \
+ watchdogd.recovery \
+
+# These had been pulled in via init_second_stage.recovery, but may not be needed.
+PRODUCT_HOST_PACKAGES += \
+ e2fsdroid \
+ mke2fs \
+ sload_f2fs \
+ make_f2fs \
+
+PRODUCT_HOST_PACKAGES += \
+ icu-data_host_runtime_apex
+
+# Base modules and settings for the vendor partition.
+PRODUCT_PACKAGES += \
+ android.hardware.cas@1.1-service \
+ android.hardware.configstore@1.1-service \
+ android.hardware.media.omx@1.0-service \
+ dumpsys_vendor \
+ fs_config_files_nonsystem \
+ fs_config_dirs_nonsystem \
+ gralloc.default \
+ group \
+ init_vendor \
+ libbundlewrapper \
+ libclearkeycasplugin \
+ libdownmix \
+ libdrmclearkeyplugin \
+ libdynproc \
+ libeffectproxy \
+ libeffects \
+ libldnhncr \
+ libreference-ril \
+ libreverbwrapper \
+ libril \
+ libvisualizer \
+ passwd \
+ selinux_policy_nonsystem \
+ shell_and_utilities_vendor \
+ vndservice \
+ vndservicemanager \
+
+# VINTF data for vendor image
+PRODUCT_PACKAGES += \
+ device_compatibility_matrix.xml \
diff --git a/target/product/core.mk b/target/product/core.mk
deleted file mode 100644
index c72bb2f265..0000000000
--- a/target/product/core.mk
+++ /dev/null
@@ -1,77 +0,0 @@
-#
-# Copyright (C) 2007 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Base configuration for communication-oriented android devices
-# (phones, tablets, etc.). If you want a change to apply to ALMOST ALL
-# devices (including non-phones and non-tablets), modify
-# core_minimal.mk instead. If you care about wearables, you need to modify
-# core_tiny.mk in addition to core_minimal.mk.
-
-PRODUCT_PACKAGES += \
- BasicDreams \
- BlockedNumberProvider \
- BookmarkProvider \
- Browser2 \
- BuiltInPrintService \
- Calendar \
- CalendarProvider \
- CaptivePortalLogin \
- CertInstaller \
- Contacts \
- DeskClock \
- DocumentsUI \
- DownloadProviderUi \
- Email \
- ExactCalculator \
- ExternalStorageProvider \
- FusedLocation \
- InputDevices \
- KeyChain \
- Keyguard \
- LatinIME \
- Launcher3QuickStep \
- ManagedProvisioning \
- MtpDocumentsProvider \
- PacProcessor \
- libpac \
- PrintSpooler \
- PrintRecommendationService \
- ProxyHandler \
- QuickSearchBox \
- SecureElement \
- Settings \
- SettingsIntelligence \
- SharedStorageBackup \
- SimAppDialog \
- StorageManager \
- Telecom \
- TeleService \
- Traceur \
- VpnDialogs \
- vr \
- MmsService
-
-# The set of packages whose code can be loaded by the system server.
-PRODUCT_SYSTEM_SERVER_APPS += \
- FusedLocation \
- InputDevices \
- KeyChain \
- Telecom \
-
-# The set of packages we want to force 'speed' compilation on.
-PRODUCT_DEXPREOPT_SPEED_APPS += \
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_base.mk)
diff --git a/target/product/core_base.mk b/target/product/core_base.mk
deleted file mode 100644
index 7dc0010512..0000000000
--- a/target/product/core_base.mk
+++ /dev/null
@@ -1,65 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Note that components added here will be also shared in PDK. Components
-# that should not be in PDK should be added in lower level like core.mk.
-
-PRODUCT_PROPERTY_OVERRIDES := \
- ro.config.notification_sound=OnTheHunt.ogg \
- ro.config.alarm_alert=Alarm_Classic.ogg
-
-PRODUCT_PACKAGES += \
- ContactsProvider \
- DefaultContainerService \
- Home \
- TelephonyProvider \
- UserDictionaryProvider \
- atrace \
- libandroidfw \
- libaudiopreprocessing \
- libaudioutils \
- libfilterpack_imageproc \
- libgabi++ \
- libmdnssd \
- libnfc_ndef \
- libpowermanager \
- libspeexresampler \
- libstagefright_soft_aacdec \
- libstagefright_soft_aacenc \
- libstagefright_soft_amrdec \
- libstagefright_soft_amrnbenc \
- libstagefright_soft_amrwbenc \
- libstagefright_soft_avcdec \
- libstagefright_soft_avcenc \
- libstagefright_soft_flacdec \
- libstagefright_soft_flacenc \
- libstagefright_soft_g711dec \
- libstagefright_soft_gsmdec \
- libstagefright_soft_hevcdec \
- libstagefright_soft_mp3dec \
- libstagefright_soft_mpeg2dec \
- libstagefright_soft_mpeg4dec \
- libstagefright_soft_mpeg4enc \
- libstagefright_soft_opusdec \
- libstagefright_soft_rawdec \
- libstagefright_soft_vorbisdec \
- libstagefright_soft_vpxdec \
- libstagefright_soft_vpxenc \
- libvariablespeed \
- libwebrtc_audio_preprocessing \
- mdnsd \
- requestsync \
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_minimal.mk)
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index 85646c1af0..9718dc6bfe 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -14,171 +14,17 @@
# limitations under the License.
#
-# Base configuration for most consumer android devices. Do not put
-# things that are specific to communication devices (phones, tables,
-# etc.) here -- for that, use core.mk.
+# This product is the base of a generic media-capable device, which
+# means most android products, but excludes wearables.
+#
+# Note: Do not add any contents directly to this file. Choose either
+# media_<x> depending on partition also consider base_<x>.mk or
+# handheld_<x>.mk.
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_product.mk)
PRODUCT_BRAND := generic
PRODUCT_DEVICE := generic
PRODUCT_NAME := core
-
-PRODUCT_PACKAGES += \
- BackupRestoreConfirmation \
- CompanionDeviceManager \
- CtsShimPrebuilt \
- CtsShimPrivPrebuilt \
- DownloadProvider \
- ExtShared \
- ExtServices \
- HTMLViewer \
- MediaProvider \
- PackageInstaller \
- SecureElement \
- SettingsProvider \
- Shell \
- StatementService \
- WallpaperBackup \
- android.hidl.base-V1.0-java \
- android.hidl.manager-V1.0-java \
- bcc \
- bu \
- com.android.future.usb.accessory \
- com.android.location.provider \
- com.android.location.provider.xml \
- com.android.media.remotedisplay \
- com.android.media.remotedisplay.xml \
- com.android.mediadrm.signer \
- com.android.mediadrm.signer.xml \
- drmserver \
- ethernet-service \
- framework-res \
- idmap \
- installd \
- ims-common \
- ip \
- ip-up-vpn \
- ip6tables \
- iptables \
- gatekeeperd \
- keystore \
- ld.config.txt \
- ld.mc \
- libaaudio \
- libOpenMAXAL \
- libOpenSLES \
- libdownmix \
- libdrmframework \
- libdrmframework_jni \
- libfilterfw \
- libkeystore \
- libgatekeeper \
- libneuralnetworks \
- libwebviewchromium_loader \
- libwebviewchromium_plat_support \
- libwilhelm \
- logd \
- mke2fs \
- e2fsck \
- resize2fs \
- tune2fs \
- screencap \
- sensorservice \
- telephony-common \
- uiautomator \
- uncrypt \
- vndk_snapshot_package \
- voip-common \
- webview \
- webview_zygote \
-
-# Wifi modules
-PRODUCT_PACKAGES += \
- wifi-service \
- wificond \
-
-PRODUCT_COPY_FILES += \
- frameworks/native/data/etc/android.software.webview.xml:system/etc/permissions/android.software.webview.xml
-
-ifneq (REL,$(PLATFORM_VERSION_CODENAME))
-PRODUCT_COPY_FILES += \
- frameworks/native/data/etc/android.software.preview_sdk.xml:system/etc/permissions/android.software.preview_sdk.xml
-endif
-
-ifeq ($(TARGET_CORE_JARS),)
-$(error TARGET_CORE_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
-endif
-
-# The order of PRODUCT_BOOT_JARS matters.
-PRODUCT_BOOT_JARS := \
- $(TARGET_CORE_JARS) \
- ext \
- framework \
- telephony-common \
- voip-common \
- ims-common \
- android.hidl.base-V1.0-java \
- android.hidl.manager-V1.0-java
-
-ifeq ($(REMOVE_OAHL_FROM_BCP),true)
-PRODUCT_BOOT_JARS += framework-oahl-backward-compatibility
-else
-PRODUCT_BOOT_JARS += org.apache.http.legacy.boot
-endif
-
-ifeq ($(REMOVE_ATB_FROM_BCP),true)
-PRODUCT_BOOT_JARS += framework-atb-backward-compatibility
-else
-PRODUCT_BOOT_JARS += android.test.base
-endif
-
-# The order of PRODUCT_SYSTEM_SERVER_JARS matters.
-PRODUCT_SYSTEM_SERVER_JARS := \
- services \
- ethernet-service \
- wifi-service \
- com.android.location.provider \
-
-# The set of packages whose code can be loaded by the system server.
-PRODUCT_SYSTEM_SERVER_APPS += \
- SettingsProvider \
- WallpaperBackup
-
-# Adoptable external storage supports both ext4 and f2fs
-PRODUCT_PACKAGES += \
- e2fsck \
- mke2fs \
- fsck.f2fs \
- make_f2fs \
-
-PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
- ro.zygote=zygote32
-PRODUCT_COPY_FILES += \
- system/core/rootdir/init.zygote32.rc:root/init.zygote32.rc
-
-PRODUCT_COPY_FILES += \
- system/core/rootdir/etc/public.libraries.android.txt:system/etc/public.libraries.txt
-
-# Enable boot.oat filtering of compiled classes to reduce boot.oat size. b/28026683
-PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
- frameworks/base/config/compiled-classes-phone:system/etc/compiled-classes)
-
-# Enable dirty image object binning to reduce dirty pages in the image.
-PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
- frameworks/base/dirty-image-objects-phone:system/etc/dirty-image-objects)
-
-# On userdebug builds, collect more tombstones by default.
-ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
-PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
- tombstoned.max_tombstone_count=50
-endif
-
-PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
- ro.logd.size.stats=64K \
- log.tag.stats_log=I
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
-
-# Enable CFI for security-sensitive components
-$(call inherit-product, $(SRC_TARGET_DIR)/product/cfi-common.mk)
-$(call inherit-product-if-exists, vendor/google/products/cfi-vendor.mk)
diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk
deleted file mode 100644
index 4ef7b48fa1..0000000000
--- a/target/product/core_tiny.mk
+++ /dev/null
@@ -1,150 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Tiny configuration for small devices such as wearables. Includes base and embedded.
-# No telephony
-
-PRODUCT_PACKAGES := \
- Bluetooth \
- CalendarProvider \
- ContactsProvider \
- CertInstaller \
- FusedLocation \
- InputDevices
-
-PRODUCT_PACKAGES += \
- clatd \
- clatd.conf \
- pppd
-
-PRODUCT_PACKAGES += \
- audio.primary.default \
- local_time.default \
- power.default
-
-PRODUCT_PACKAGES += \
- BackupRestoreConfirmation \
- CtsShimPrebuilt \
- CtsShimPrivPrebuilt \
- DefaultContainerService \
- ExtShared \
- ExtServices \
- SecureElement \
- SettingsProvider \
- Shell \
- WallpaperBackup \
- android.hidl.base-V1.0-java \
- android.hidl.manager-V1.0-java \
- bcc \
- bu \
- com.android.location.provider \
- com.android.location.provider.xml \
- framework-res \
- installd \
- ims-common \
- ip \
- ip-up-vpn \
- ip6tables \
- iptables \
- gatekeeperd \
- keystore \
- ld.config.txt \
- ld.mc \
- libaaudio \
- libOpenMAXAL \
- libOpenSLES \
- libdownmix \
- libfilterfw \
- libgatekeeper \
- libkeystore \
- libwilhelm \
- libdrmframework_jni \
- libdrmframework \
- mke2fs \
- e2fsck \
- resize2fs \
- tune2fs \
- screencap \
- sensorservice \
- uiautomator \
- uncrypt \
- telephony-common \
- voip-common \
- logd \
-
-# Wifi modules
-PRODUCT_PACKAGES += \
- wifi-service \
- wificond \
-
-ifeq ($(TARGET_CORE_JARS),)
-$(error TARGET_CORE_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
-endif
-
-# The order matters
-PRODUCT_BOOT_JARS := \
- $(TARGET_CORE_JARS) \
- ext \
- framework \
- telephony-common \
- voip-common \
- ims-common \
- android.hidl.base-V1.0-java \
- android.hidl.manager-V1.0-java
-
-ifeq ($(REMOVE_OAHL_FROM_BCP),true)
-PRODUCT_BOOT_JARS += framework-oahl-backward-compatibility
-else
-PRODUCT_BOOT_JARS += org.apache.http.legacy.boot
-endif
-
-ifeq ($(REMOVE_ATB_FROM_BCP),true)
-PRODUCT_BOOT_JARS += framework-atb-backward-compatibility
-else
-PRODUCT_BOOT_JARS += android.test.base
-endif
-
-# The order of PRODUCT_SYSTEM_SERVER_JARS matters.
-PRODUCT_SYSTEM_SERVER_JARS := \
- services \
- wifi-service
-
-# The set of packages whose code can be loaded by the system server.
-PRODUCT_SYSTEM_SERVER_APPS += \
- FusedLocation \
- InputDevices \
- SettingsProvider \
- WallpaperBackup \
-
-# The set of packages we want to force 'speed' compilation on.
-PRODUCT_DEXPREOPT_SPEED_APPS := \
-
-PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
- ro.zygote=zygote32
-PRODUCT_COPY_FILES += \
- system/core/rootdir/init.zygote32.rc:root/init.zygote32.rc
-
-PRODUCT_PROPERTY_OVERRIDES += \
- ro.carrier=unknown
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
-$(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
-$(call inherit-product-if-exists, external/roboto-fonts/fonts.mk)
-
-# Overrides
-PRODUCT_BRAND := tiny
-PRODUCT_DEVICE := tiny
-PRODUCT_NAME := core_tiny
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
deleted file mode 100644
index 69cf10227c..0000000000
--- a/target/product/embedded.mk
+++ /dev/null
@@ -1,122 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for a very minimal build of the
-# Open-Source part of the tree.
-
-PRODUCT_PACKAGES += \
- adb \
- adbd \
- usbd \
- android.hardware.configstore@1.1-service \
- android.hidl.allocator@1.0-service \
- android.hidl.memory@1.0-impl \
- android.hidl.memory@1.0-impl.vendor \
- atrace \
- blank_screen \
- bootanimation \
- bootstat \
- charger \
- cmd \
- crash_dump \
- debuggerd\
- dumpstate \
- dumpsys \
- fastboot \
- gralloc.default \
- healthd \
- hwservicemanager \
- init \
- init.environ.rc \
- init.rc \
- libEGL \
- libETC1 \
- libFFTEm \
- libGLESv1_CM \
- libGLESv2 \
- libGLESv3 \
- libbinder \
- libc \
- libc_malloc_debug \
- libc_malloc_hooks \
- libcutils \
- libdl \
- libgui \
- libhardware \
- libhardware_legacy \
- libjpeg \
- liblog \
- libm \
- libpixelflinger \
- libpower \
- libsigchain \
- libstdc++ \
- libsurfaceflinger \
- libsurfaceflinger_ddmconnection \
- libsysutils \
- libui \
- libutils \
- linker \
- lmkd \
- logcat \
- lshal \
- recovery \
- service \
- servicemanager \
- shell_and_utilities \
- storaged \
- surfaceflinger \
- thermalserviced \
- tombstoned \
- tzdatacheck \
- vndservice \
- vndservicemanager \
-
-# VINTF data
-PRODUCT_PACKAGES += \
- device_compatibility_matrix.xml \
- device_manifest.xml \
- framework_manifest.xml \
- framework_compatibility_matrix.xml \
-
-# SELinux packages are added as dependencies of the selinux_policy
-# phony package.
-PRODUCT_PACKAGES += \
- selinux_policy \
-
-# AID Generation for
-# <pwd.h> and <grp.h>
-PRODUCT_PACKAGES += \
- passwd \
- group \
- fs_config_files \
- fs_config_dirs
-
-# If there are product-specific adb keys defined, install them on debuggable
-# builds.
-PRODUCT_PACKAGES_DEBUG += \
- adb_keys
-
-# Ensure that this property is always defined so that bionic_systrace.cpp
-# can rely on it being initially set by init.
-PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
- debug.atrace.tags.enableflags=0
-
-PRODUCT_COPY_FILES += \
- system/core/rootdir/init.usb.rc:root/init.usb.rc \
- system/core/rootdir/init.usb.configfs.rc:root/init.usb.configfs.rc \
- system/core/rootdir/ueventd.rc:root/ueventd.rc \
- system/core/rootdir/etc/hosts:system/etc/hosts
diff --git a/target/product/emulator_vendor.mk b/target/product/emulator_vendor.mk
new file mode 100644
index 0000000000..f0a5354803
--- /dev/null
+++ b/target/product/emulator_vendor.mk
@@ -0,0 +1,70 @@
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# This file is included by other product makefiles to add all the
+# emulator-related modules to PRODUCT_PACKAGES.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+
+# TODO(b/123495142): these files should be clean up
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST := \
+ system/bin/vintf \
+ system/etc/permissions/android.software.verified_boot.xml \
+ system/etc/permissions/privapp-permissions-goldfish.xml \
+ system/lib/egl/libGLES_android.so \
+ system/lib64/egl/libGLES_android.so \
+ system/priv-app/SdkSetup/SdkSetup.apk \
+
+# Device modules
+PRODUCT_PACKAGES += \
+ libGLES_android \
+ vintf \
+
+# need this for gles libraries to load properly
+# after moving to /vendor/lib/
+PRODUCT_PACKAGES += \
+ vndk-sp
+
+PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
+
+PRODUCT_CHARACTERISTICS := emulator
+
+PRODUCT_FULL_TREBLE_OVERRIDE := true
+
+# goldfish vendor partition configurations
+$(call inherit-product-if-exists, device/generic/goldfish/vendor.mk)
+
+#watchdog tiggers reboot because location service is not
+#responding, disble it for now.
+#still keep it on internal master as it is still working
+#once it is fixed in aosp, remove this block of comment.
+#PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+#config.disable_location=true
+
+# Enable Perfetto traced
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+ persist.traced.enable=1
+
+# enable Google-specific location features,
+# like NetworkLocationProvider and LocationCollector
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+ ro.com.google.locationfeatures=1
+
+# disable setupwizard
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+ ro.setupwizard.mode=DISABLED
diff --git a/target/product/full.mk b/target/product/full.mk
index 7c0578c584..b356f9d424 100644
--- a/target/product/full.mk
+++ b/target/product/full.mk
@@ -23,6 +23,9 @@ $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic/device.mk)
+# Enable dynamic partition size
+PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
+
# Overrides
PRODUCT_NAME := full
PRODUCT_DEVICE := generic
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index c390ffd8f8..447576ce15 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -24,13 +24,7 @@ PRODUCT_PACKAGES := \
WAPPushManager
PRODUCT_PACKAGES += \
- Galaxy4 \
- HoloSpiralWallpaper \
- LiveWallpapers \
LiveWallpapersPicker \
- MagicSmokeWallpapers \
- NoiseField \
- PhaseBeam \
PhotoTable
# Bluetooth:
diff --git a/target/product/full_mips.mk b/target/product/full_mips.mk
deleted file mode 100644
index 61734b43cc..0000000000
--- a/target/product/full_mips.mk
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# mips build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_mips/device.mk)
-
-include $(SRC_TARGET_DIR)/product/emulator.mk
-
-# Overrides
-PRODUCT_NAME := full_mips
-PRODUCT_DEVICE := generic_mips
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on MIPS Emulator
diff --git a/target/product/full_x86.mk b/target/product/full_x86.mk
index 6ea2d05dd6..55c450a80f 100644
--- a/target/product/full_x86.mk
+++ b/target/product/full_x86.mk
@@ -32,10 +32,13 @@ ifdef NET_ETH0_STARTONBOOT
endif
# Ensure we package the BIOS files too.
-PRODUCT_PACKAGES += \
+PRODUCT_HOST_PACKAGES += \
bios.bin \
vgabios-cirrus.bin \
+# Enable dynamic partition size
+PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
+
# Overrides
PRODUCT_NAME := full_x86
PRODUCT_DEVICE := generic_x86
diff --git a/target/product/generic.mk b/target/product/generic.mk
index dd0d663152..6fe4818763 100644
--- a/target/product/generic.mk
+++ b/target/product/generic.mk
@@ -24,3 +24,5 @@ $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
PRODUCT_BRAND := generic
PRODUCT_DEVICE := generic
PRODUCT_NAME := generic
+
+$(call enforce-product-packages-exist,)
diff --git a/target/product/generic_mips.mk b/target/product/generic_mips.mk
deleted file mode 100644
index 7b53d041af..0000000000
--- a/target/product/generic_mips.mk
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-# Copyright (C) 2007 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a generic phone product that isn't specialized for a specific device.
-# It includes the base Android platform.
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_no_telephony.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
-
-# Overrides
-PRODUCT_BRAND := generic_mips
-PRODUCT_DEVICE := generic_mips
-PRODUCT_NAME := generic_mips
diff --git a/target/product/generic_no_telephony.mk b/target/product/generic_no_telephony.mk
index 6a84c35f55..324d36f105 100644
--- a/target/product/generic_no_telephony.mk
+++ b/target/product/generic_no_telephony.mk
@@ -14,66 +14,16 @@
# limitations under the License.
#
-# This is a generic phone product that isn't specialized for a specific device.
-# It includes the base Android platform.
-
-PRODUCT_PACKAGES := \
- Bluetooth \
- BluetoothMidiService \
- Camera2 \
- Gallery2 \
- Music \
- MusicFX \
- NfcNci \
- OneTimeInitializer \
- Provision \
- SystemUI \
- SysuiDarkThemeOverlay \
- DisplayCutoutEmulationDoubleOverlay \
- DisplayCutoutEmulationCornerOverlay \
- DisplayCutoutEmulationTallOverlay \
- EasterEgg \
- WallpaperCropper
-
-PRODUCT_PACKAGES += \
- clatd \
- clatd.conf \
- pppd \
- screenrecord
-
-PRODUCT_PACKAGES += \
- librs_jni \
- libvideoeditor_jni \
- libvideoeditor_core \
- libvideoeditor_osal \
- libvideoeditor_videofilters \
- libvideoeditorplayer \
-
-PRODUCT_PACKAGES += \
- audio.primary.default \
- local_time.default \
- vibrator.default \
- power.default
-
-PRODUCT_COPY_FILES := \
- frameworks/av/media/libeffects/data/audio_effects.conf:system/etc/audio_effects.conf
-
-PRODUCT_PROPERTY_OVERRIDES += \
- ro.carrier=unknown
+# This product is a generic phone or tablet, that doesn't have telephony.
+#
+# Note: Do not add any contents directly to this file. Choose either
+# handheld_system or handheld_vendor depending on partition (also consider
+# base_<x>.mk or media_<x>.mk.
-$(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/dancing-script/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/carrois-gothic-sc/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/cutive-mono/fonts.mk)
-$(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
-$(call inherit-product-if-exists, external/roboto-fonts/fonts.mk)
-$(call inherit-product-if-exists, external/hyphenation-patterns/patterns.mk)
-$(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
-$(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_product.mk)
-# Overrides
PRODUCT_BRAND := generic
PRODUCT_DEVICE := generic
PRODUCT_NAME := generic_no_telephony
diff --git a/target/product/go_defaults.mk b/target/product/go_defaults.mk
index faa1852b41..7bb6d91e0d 100644
--- a/target/product/go_defaults.mk
+++ b/target/product/go_defaults.mk
@@ -17,3 +17,6 @@
# Inherit common Android Go defaults.
$(call inherit-product, build/target/product/go_defaults_common.mk)
+# Add the system properties.
+TARGET_SYSTEM_PROP += \
+ build/make/target/board/go_defaults.prop
diff --git a/target/product/go_defaults_512.mk b/target/product/go_defaults_512.mk
index 56ab29b4b2..5542818a06 100644
--- a/target/product/go_defaults_512.mk
+++ b/target/product/go_defaults_512.mk
@@ -17,12 +17,6 @@
# Inherit common Android Go defaults.
$(call inherit-product, build/target/product/go_defaults_common.mk)
-# 512MB specific properties.
-
-# lmkd can kill more now.
-PRODUCT_PROPERTY_OVERRIDES += \
- ro.lmk.medium=700 \
-
-# madvise random in ART to reduce page cache thrashing.
-PRODUCT_PROPERTY_OVERRIDES += \
- dalvik.vm.madvise-random=true
+# Add the system properties.
+TARGET_SYSTEM_PROP += \
+ build/make/target/board/go_defaults_512.prop
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index 18907c1421..aee7f477e2 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -14,21 +14,13 @@
# limitations under the License.
#
-# Sets Android Go recommended default values for propreties.
+# Sets Android Go recommended default product options..
-# Set lowram options
-PRODUCT_PROPERTY_OVERRIDES += \
- ro.config.low_ram=true \
- ro.lmk.critical_upgrade=true \
- ro.lmk.upgrade_pressure=40 \
- ro.lmk.downgrade_pressure=60 \
- ro.lmk.kill_heaviest_task=false \
- ro.statsd.enable=false
-# set threshold to filter unused apps
+# Set lowram options and enable traced by default
PRODUCT_PROPERTY_OVERRIDES += \
- pm.dexopt.downgrade_after_inactive_days=10
-
+ ro.config.low_ram=true \
+ persist.traced.enable=1 \
# Speed profile services and wifi-service to reduce RAM and storage.
PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile
@@ -42,26 +34,26 @@ PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK := true
PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE := true
PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := frameworks/base/config/boot-image-profile.txt
-# set the compiler filter for shared apks to quicken.
-# Rationale: speed has a lot of dex code expansion, it uses more ram and space
-# compared to quicken. Using quicken for shared APKs on Go devices may save RAM.
-# Note that this is a trade-off: here we trade clean pages for dirty pages,
-# extra cpu and battery. That's because the quicken files will be jit-ed in all
-# the processes that load of shared apk and the code cache is not shared.
-# Some notable apps that will be affected by this are gms and chrome.
-# b/65591595.
-PRODUCT_PROPERTY_OVERRIDES += \
- pm.dexopt.shared=quicken
-
-# Default heap sizes. Allow up to 256m for large heaps to make sure a single app
-# doesn't take all of the RAM.
-PRODUCT_PROPERTY_OVERRIDES += dalvik.vm.heapgrowthlimit=128m
-PRODUCT_PROPERTY_OVERRIDES += dalvik.vm.heapsize=256m
-
# Do not generate libartd.
PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := false
+# Do not spin up a separate process for the network stack on go devices, use an in-process APK.
+PRODUCT_PACKAGES += InProcessNetworkStack
+
# Strip the local variable table and the local variable type table to reduce
# the size of the system image. This has no bearing on stack traces, but will
# leave less information available via JDWP.
PRODUCT_MINIMIZE_JAVA_DEBUG_INFO := true
+
+# Disable Scudo outside of eng builds to save RAM.
+ifneq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
+ PRODUCT_DISABLE_SCUDO := true
+endif
+
+# Add the system properties.
+TARGET_SYSTEM_PROP += \
+ build/make/target/board/go_defaults_common.prop
+
+# use the go specific handheld_core_hardware.xml from frameworks
+PRODUCT_COPY_FILES += \
+ frameworks/native/data/etc/go_handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/handheld_core_hardware.xml
diff --git a/target/product/vndk/28.txt b/target/product/gsi/28.txt
index 712e91c587..712e91c587 100644
--- a/target/product/vndk/28.txt
+++ b/target/product/gsi/28.txt
diff --git a/target/product/gsi/29.txt b/target/product/gsi/29.txt
new file mode 100644
index 0000000000..14faba5585
--- /dev/null
+++ b/target/product/gsi/29.txt
@@ -0,0 +1,273 @@
+LLNDK: libEGL.so
+LLNDK: libGLESv1_CM.so
+LLNDK: libGLESv2.so
+LLNDK: libGLESv3.so
+LLNDK: libRS.so
+LLNDK: libandroid_net.so
+LLNDK: libc.so
+LLNDK: libcgrouprc.so
+LLNDK: libdl.so
+LLNDK: libft2.so
+LLNDK: liblog.so
+LLNDK: libm.so
+LLNDK: libmediandk.so
+LLNDK: libnativewindow.so
+LLNDK: libneuralnetworks.so
+LLNDK: libsync.so
+LLNDK: libvndksupport.so
+LLNDK: libvulkan.so
+VNDK-SP: android.hardware.graphics.common@1.0.so
+VNDK-SP: android.hardware.graphics.common@1.1.so
+VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.mapper@2.0.so
+VNDK-SP: android.hardware.graphics.mapper@2.1.so
+VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.renderscript@1.0.so
+VNDK-SP: android.hidl.memory.token@1.0.so
+VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.memory@1.0-impl.so
+VNDK-SP: android.hidl.safe_union@1.0.so
+VNDK-SP: libRSCpuRef.so
+VNDK-SP: libRSDriver.so
+VNDK-SP: libRS_internal.so
+VNDK-SP: libbacktrace.so
+VNDK-SP: libbase.so
+VNDK-SP: libbcinfo.so
+VNDK-SP: libbinderthreadstate.so
+VNDK-SP: libblas.so
+VNDK-SP: libc++.so
+VNDK-SP: libcompiler_rt.so
+VNDK-SP: libcutils.so
+VNDK-SP: libhardware.so
+VNDK-SP: libhidlbase.so
+VNDK-SP: libhidlmemory.so
+VNDK-SP: libhidltransport.so
+VNDK-SP: libhwbinder.so
+VNDK-SP: libhwbinder_noltopgo.so
+VNDK-SP: libion.so
+VNDK-SP: libjsoncpp.so
+VNDK-SP: liblzma.so
+VNDK-SP: libprocessgroup.so
+VNDK-SP: libunwindstack.so
+VNDK-SP: libutils.so
+VNDK-SP: libutilscallstack.so
+VNDK-SP: libz.so
+VNDK-core: android.frameworks.cameraservice.common@2.0.so
+VNDK-core: android.frameworks.cameraservice.device@2.0.so
+VNDK-core: android.frameworks.cameraservice.service@2.0.so
+VNDK-core: android.frameworks.displayservice@1.0.so
+VNDK-core: android.frameworks.schedulerservice@1.0.so
+VNDK-core: android.frameworks.sensorservice@1.0.so
+VNDK-core: android.frameworks.stats@1.0.so
+VNDK-core: android.frameworks.vr.composer@1.0.so
+VNDK-core: android.hardware.atrace@1.0.so
+VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.audio.common@4.0.so
+VNDK-core: android.hardware.audio.common@5.0.so
+VNDK-core: android.hardware.audio.effect@2.0.so
+VNDK-core: android.hardware.audio.effect@4.0.so
+VNDK-core: android.hardware.audio.effect@5.0.so
+VNDK-core: android.hardware.audio@2.0.so
+VNDK-core: android.hardware.audio@4.0.so
+VNDK-core: android.hardware.audio@5.0.so
+VNDK-core: android.hardware.authsecret@1.0.so
+VNDK-core: android.hardware.automotive.audiocontrol@1.0.so
+VNDK-core: android.hardware.automotive.evs@1.0.so
+VNDK-core: android.hardware.automotive.vehicle@2.0.so
+VNDK-core: android.hardware.biometrics.face@1.0.so
+VNDK-core: android.hardware.biometrics.fingerprint@2.1.so
+VNDK-core: android.hardware.bluetooth.a2dp@1.0.so
+VNDK-core: android.hardware.bluetooth.audio@2.0.so
+VNDK-core: android.hardware.bluetooth@1.0.so
+VNDK-core: android.hardware.boot@1.0.so
+VNDK-core: android.hardware.broadcastradio@1.0.so
+VNDK-core: android.hardware.broadcastradio@1.1.so
+VNDK-core: android.hardware.broadcastradio@2.0.so
+VNDK-core: android.hardware.camera.common@1.0.so
+VNDK-core: android.hardware.camera.device@1.0.so
+VNDK-core: android.hardware.camera.device@3.2.so
+VNDK-core: android.hardware.camera.device@3.3.so
+VNDK-core: android.hardware.camera.device@3.4.so
+VNDK-core: android.hardware.camera.device@3.5.so
+VNDK-core: android.hardware.camera.metadata@3.2.so
+VNDK-core: android.hardware.camera.metadata@3.3.so
+VNDK-core: android.hardware.camera.metadata@3.4.so
+VNDK-core: android.hardware.camera.provider@2.4.so
+VNDK-core: android.hardware.camera.provider@2.5.so
+VNDK-core: android.hardware.cas.native@1.0.so
+VNDK-core: android.hardware.cas@1.0.so
+VNDK-core: android.hardware.cas@1.1.so
+VNDK-core: android.hardware.configstore-utils.so
+VNDK-core: android.hardware.configstore@1.0.so
+VNDK-core: android.hardware.configstore@1.1.so
+VNDK-core: android.hardware.confirmationui-support-lib.so
+VNDK-core: android.hardware.confirmationui@1.0.so
+VNDK-core: android.hardware.contexthub@1.0.so
+VNDK-core: android.hardware.drm@1.0.so
+VNDK-core: android.hardware.drm@1.1.so
+VNDK-core: android.hardware.drm@1.2.so
+VNDK-core: android.hardware.dumpstate@1.0.so
+VNDK-core: android.hardware.fastboot@1.0.so
+VNDK-core: android.hardware.gatekeeper@1.0.so
+VNDK-core: android.hardware.gnss.measurement_corrections@1.0.so
+VNDK-core: android.hardware.gnss.visibility_control@1.0.so
+VNDK-core: android.hardware.gnss@1.0.so
+VNDK-core: android.hardware.gnss@1.1.so
+VNDK-core: android.hardware.gnss@2.0.so
+VNDK-core: android.hardware.graphics.allocator@2.0.so
+VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.graphics.composer@2.1.so
+VNDK-core: android.hardware.graphics.composer@2.2.so
+VNDK-core: android.hardware.graphics.composer@2.3.so
+VNDK-core: android.hardware.health.storage@1.0.so
+VNDK-core: android.hardware.health@1.0.so
+VNDK-core: android.hardware.health@2.0.so
+VNDK-core: android.hardware.input.classifier@1.0.so
+VNDK-core: android.hardware.input.common@1.0.so
+VNDK-core: android.hardware.ir@1.0.so
+VNDK-core: android.hardware.keymaster@3.0.so
+VNDK-core: android.hardware.keymaster@4.0.so
+VNDK-core: android.hardware.light@2.0.so
+VNDK-core: android.hardware.media.bufferpool@1.0.so
+VNDK-core: android.hardware.media.bufferpool@2.0.so
+VNDK-core: android.hardware.media.c2@1.0.so
+VNDK-core: android.hardware.media.omx@1.0.so
+VNDK-core: android.hardware.media@1.0.so
+VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.neuralnetworks@1.0.so
+VNDK-core: android.hardware.neuralnetworks@1.1.so
+VNDK-core: android.hardware.neuralnetworks@1.2.so
+VNDK-core: android.hardware.nfc@1.0.so
+VNDK-core: android.hardware.nfc@1.1.so
+VNDK-core: android.hardware.nfc@1.2.so
+VNDK-core: android.hardware.oemlock@1.0.so
+VNDK-core: android.hardware.power.stats@1.0.so
+VNDK-core: android.hardware.power@1.0.so
+VNDK-core: android.hardware.power@1.1.so
+VNDK-core: android.hardware.power@1.2.so
+VNDK-core: android.hardware.power@1.3.so
+VNDK-core: android.hardware.radio.config@1.0.so
+VNDK-core: android.hardware.radio.config@1.1.so
+VNDK-core: android.hardware.radio.config@1.2.so
+VNDK-core: android.hardware.radio.deprecated@1.0.so
+VNDK-core: android.hardware.radio@1.0.so
+VNDK-core: android.hardware.radio@1.1.so
+VNDK-core: android.hardware.radio@1.2.so
+VNDK-core: android.hardware.radio@1.3.so
+VNDK-core: android.hardware.radio@1.4.so
+VNDK-core: android.hardware.secure_element@1.0.so
+VNDK-core: android.hardware.secure_element@1.1.so
+VNDK-core: android.hardware.sensors@1.0.so
+VNDK-core: android.hardware.sensors@2.0.so
+VNDK-core: android.hardware.soundtrigger@2.0.so
+VNDK-core: android.hardware.soundtrigger@2.0-core.so
+VNDK-core: android.hardware.soundtrigger@2.1.so
+VNDK-core: android.hardware.soundtrigger@2.2.so
+VNDK-core: android.hardware.tetheroffload.config@1.0.so
+VNDK-core: android.hardware.tetheroffload.control@1.0.so
+VNDK-core: android.hardware.thermal@1.0.so
+VNDK-core: android.hardware.thermal@1.1.so
+VNDK-core: android.hardware.thermal@2.0.so
+VNDK-core: android.hardware.tv.cec@1.0.so
+VNDK-core: android.hardware.tv.cec@2.0.so
+VNDK-core: android.hardware.tv.input@1.0.so
+VNDK-core: android.hardware.usb.gadget@1.0.so
+VNDK-core: android.hardware.usb@1.0.so
+VNDK-core: android.hardware.usb@1.1.so
+VNDK-core: android.hardware.usb@1.2.so
+VNDK-core: android.hardware.vibrator@1.0.so
+VNDK-core: android.hardware.vibrator@1.1.so
+VNDK-core: android.hardware.vibrator@1.2.so
+VNDK-core: android.hardware.vibrator@1.3.so
+VNDK-core: android.hardware.vr@1.0.so
+VNDK-core: android.hardware.weaver@1.0.so
+VNDK-core: android.hardware.wifi.hostapd@1.0.so
+VNDK-core: android.hardware.wifi.hostapd@1.1.so
+VNDK-core: android.hardware.wifi.offload@1.0.so
+VNDK-core: android.hardware.wifi.supplicant@1.0.so
+VNDK-core: android.hardware.wifi.supplicant@1.1.so
+VNDK-core: android.hardware.wifi.supplicant@1.2.so
+VNDK-core: android.hardware.wifi@1.0.so
+VNDK-core: android.hardware.wifi@1.1.so
+VNDK-core: android.hardware.wifi@1.2.so
+VNDK-core: android.hardware.wifi@1.3.so
+VNDK-core: android.hidl.allocator@1.0.so
+VNDK-core: android.hidl.memory.block@1.0.so
+VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.hidl.token@1.0-utils.so
+VNDK-core: android.system.net.netd@1.0.so
+VNDK-core: android.system.net.netd@1.1.so
+VNDK-core: android.system.suspend@1.0.so
+VNDK-core: android.system.wifi.keystore@1.0.so
+VNDK-core: libadf.so
+VNDK-core: libaudioroute.so
+VNDK-core: libaudioutils.so
+VNDK-core: libbinder.so
+VNDK-core: libcamera_metadata.so
+VNDK-core: libcap.so
+VNDK-core: libcn-cbor.so
+VNDK-core: libcodec2.so
+VNDK-core: libcrypto.so
+VNDK-core: libcrypto_utils.so
+VNDK-core: libcurl.so
+VNDK-core: libdiskconfig.so
+VNDK-core: libdumpstateutil.so
+VNDK-core: libevent.so
+VNDK-core: libexif.so
+VNDK-core: libexpat.so
+VNDK-core: libfmq.so
+VNDK-core: libgatekeeper.so
+VNDK-core: libgui.so
+VNDK-core: libhardware_legacy.so
+VNDK-core: libhidlallocatorutils.so
+VNDK-core: libhidlcache.so
+VNDK-core: libjpeg.so
+VNDK-core: libkeymaster_messages.so
+VNDK-core: libkeymaster_portable.so
+VNDK-core: libldacBT_abr.so
+VNDK-core: libldacBT_enc.so
+VNDK-core: liblz4.so
+VNDK-core: libmedia_helper.so
+VNDK-core: libmedia_omx.so
+VNDK-core: libmemtrack.so
+VNDK-core: libminijail.so
+VNDK-core: libmkbootimg_abi_check.so
+VNDK-core: libnetutils.so
+VNDK-core: libnl.so
+VNDK-core: libpcre2.so
+VNDK-core: libpiex.so
+VNDK-core: libpng.so
+VNDK-core: libpower.so
+VNDK-core: libprocinfo.so
+VNDK-core: libprotobuf-cpp-full.so
+VNDK-core: libprotobuf-cpp-lite.so
+VNDK-core: libpuresoftkeymasterdevice.so
+VNDK-core: libradio_metadata.so
+VNDK-core: libselinux.so
+VNDK-core: libsoftkeymasterdevice.so
+VNDK-core: libspeexresampler.so
+VNDK-core: libsqlite.so
+VNDK-core: libssl.so
+VNDK-core: libstagefright_bufferpool@2.0.so
+VNDK-core: libstagefright_bufferqueue_helper.so
+VNDK-core: libstagefright_foundation.so
+VNDK-core: libstagefright_omx.so
+VNDK-core: libstagefright_omx_utils.so
+VNDK-core: libstagefright_xmlparser.so
+VNDK-core: libsysutils.so
+VNDK-core: libtinyalsa.so
+VNDK-core: libtinyxml2.so
+VNDK-core: libui.so
+VNDK-core: libusbhost.so
+VNDK-core: libwifi-system-iface.so
+VNDK-core: libxml2.so
+VNDK-core: libyuv.so
+VNDK-core: libziparchive.so
+VNDK-private: libbacktrace.so
+VNDK-private: libbinderthreadstate.so
+VNDK-private: libblas.so
+VNDK-private: libcompiler_rt.so
+VNDK-private: libft2.so
+VNDK-private: libgui.so
diff --git a/target/product/vndk/Android.mk b/target/product/gsi/Android.mk
index 768cb809aa..eaaa0512f0 100644
--- a/target/product/vndk/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -1,4 +1,3 @@
-ifneq ($(BOARD_VNDK_VERSION),)
LOCAL_PATH:= $(call my-dir)
#####################################################################
@@ -39,7 +38,17 @@ endif
droidcore: check-vndk-list
check-vndk-list-timestamp := $(call intermediates-dir-for,PACKAGING,vndk)/check-list-timestamp
+
+ifeq ($(TARGET_IS_64_BIT)|$(TARGET_2ND_ARCH),true|)
+# TODO(b/110429754) remove this condition when we support 64-bit-only device
+check-vndk-list: ;
+else ifeq ($(TARGET_BUILD_PDK),true)
+# b/118634643: don't check VNDK lib list when building PDK. Some libs (libandroid_net.so
+# and some render-script related ones) can't be built in PDK due to missing frameworks/base.
+check-vndk-list: ;
+else
check-vndk-list: $(check-vndk-list-timestamp)
+endif
_vndk_check_failure_message := " error: VNDK library list has been changed.\n"
ifeq (REL,$(PLATFORM_VERSION_CODENAME))
@@ -86,20 +95,32 @@ else
endif
@chmod a+x $@
+ifneq ($(BOARD_VNDK_VERSION),)
+
include $(CLEAR_VARS)
LOCAL_MODULE := vndk_package
LOCAL_REQUIRED_MODULES := \
- $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
- $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
$(LLNDK_LIBRARIES) \
llndk.libraries.txt \
vndksp.libraries.txt
+ifneq ($(TARGET_SKIP_CURRENT_VNDK),true)
+LOCAL_REQUIRED_MODULES += \
+ $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
+ $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES))
+endif
include $(BUILD_PHONY_PACKAGE)
include $(CLEAR_VARS)
LOCAL_MODULE := vndk_snapshot_package
+_binder32 :=
+ifneq ($(TARGET_USES_64_BIT_BINDER),true)
+ifneq ($(TARGET_IS_64_BIT),true)
+_binder32 := _binder32
+endif
+endif
LOCAL_REQUIRED_MODULES := \
- $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH))
+ $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH)$(_binder32))
+_binder32 :=
include $(BUILD_PHONY_PACKAGE)
endif # BOARD_VNDK_VERSION is set
diff --git a/target/product/gsi/OWNERS b/target/product/gsi/OWNERS
new file mode 100644
index 0000000000..3fdd5af542
--- /dev/null
+++ b/target/product/gsi/OWNERS
@@ -0,0 +1,3 @@
+jiyong@google.com
+justinyun@google.com
+smoreland@google.com
diff --git a/target/product/vndk/current.txt b/target/product/gsi/current.txt
index 712e91c587..14faba5585 100644
--- a/target/product/vndk/current.txt
+++ b/target/product/gsi/current.txt
@@ -5,6 +5,7 @@ LLNDK: libGLESv3.so
LLNDK: libRS.so
LLNDK: libandroid_net.so
LLNDK: libc.so
+LLNDK: libcgrouprc.so
LLNDK: libdl.so
LLNDK: libft2.so
LLNDK: liblog.so
@@ -17,18 +18,22 @@ LLNDK: libvndksupport.so
LLNDK: libvulkan.so
VNDK-SP: android.hardware.graphics.common@1.0.so
VNDK-SP: android.hardware.graphics.common@1.1.so
+VNDK-SP: android.hardware.graphics.common@1.2.so
VNDK-SP: android.hardware.graphics.mapper@2.0.so
VNDK-SP: android.hardware.graphics.mapper@2.1.so
+VNDK-SP: android.hardware.graphics.mapper@3.0.so
VNDK-SP: android.hardware.renderscript@1.0.so
VNDK-SP: android.hidl.memory.token@1.0.so
VNDK-SP: android.hidl.memory@1.0.so
VNDK-SP: android.hidl.memory@1.0-impl.so
+VNDK-SP: android.hidl.safe_union@1.0.so
VNDK-SP: libRSCpuRef.so
VNDK-SP: libRSDriver.so
VNDK-SP: libRS_internal.so
VNDK-SP: libbacktrace.so
VNDK-SP: libbase.so
VNDK-SP: libbcinfo.so
+VNDK-SP: libbinderthreadstate.so
VNDK-SP: libblas.so
VNDK-SP: libc++.so
VNDK-SP: libcompiler_rt.so
@@ -40,31 +45,39 @@ VNDK-SP: libhidltransport.so
VNDK-SP: libhwbinder.so
VNDK-SP: libhwbinder_noltopgo.so
VNDK-SP: libion.so
+VNDK-SP: libjsoncpp.so
VNDK-SP: liblzma.so
-VNDK-SP: libunwind.so
+VNDK-SP: libprocessgroup.so
VNDK-SP: libunwindstack.so
VNDK-SP: libutils.so
VNDK-SP: libutilscallstack.so
VNDK-SP: libz.so
+VNDK-core: android.frameworks.cameraservice.common@2.0.so
+VNDK-core: android.frameworks.cameraservice.device@2.0.so
+VNDK-core: android.frameworks.cameraservice.service@2.0.so
VNDK-core: android.frameworks.displayservice@1.0.so
VNDK-core: android.frameworks.schedulerservice@1.0.so
VNDK-core: android.frameworks.sensorservice@1.0.so
+VNDK-core: android.frameworks.stats@1.0.so
VNDK-core: android.frameworks.vr.composer@1.0.so
-VNDK-core: android.hardware.audio.common-util.so
+VNDK-core: android.hardware.atrace@1.0.so
VNDK-core: android.hardware.audio.common@2.0.so
-VNDK-core: android.hardware.audio.common@2.0-util.so
VNDK-core: android.hardware.audio.common@4.0.so
-VNDK-core: android.hardware.audio.common@4.0-util.so
+VNDK-core: android.hardware.audio.common@5.0.so
VNDK-core: android.hardware.audio.effect@2.0.so
VNDK-core: android.hardware.audio.effect@4.0.so
+VNDK-core: android.hardware.audio.effect@5.0.so
VNDK-core: android.hardware.audio@2.0.so
VNDK-core: android.hardware.audio@4.0.so
+VNDK-core: android.hardware.audio@5.0.so
VNDK-core: android.hardware.authsecret@1.0.so
VNDK-core: android.hardware.automotive.audiocontrol@1.0.so
VNDK-core: android.hardware.automotive.evs@1.0.so
VNDK-core: android.hardware.automotive.vehicle@2.0.so
+VNDK-core: android.hardware.biometrics.face@1.0.so
VNDK-core: android.hardware.biometrics.fingerprint@2.1.so
VNDK-core: android.hardware.bluetooth.a2dp@1.0.so
+VNDK-core: android.hardware.bluetooth.audio@2.0.so
VNDK-core: android.hardware.bluetooth@1.0.so
VNDK-core: android.hardware.boot@1.0.so
VNDK-core: android.hardware.broadcastradio@1.0.so
@@ -75,11 +88,15 @@ VNDK-core: android.hardware.camera.device@1.0.so
VNDK-core: android.hardware.camera.device@3.2.so
VNDK-core: android.hardware.camera.device@3.3.so
VNDK-core: android.hardware.camera.device@3.4.so
+VNDK-core: android.hardware.camera.device@3.5.so
VNDK-core: android.hardware.camera.metadata@3.2.so
VNDK-core: android.hardware.camera.metadata@3.3.so
+VNDK-core: android.hardware.camera.metadata@3.4.so
VNDK-core: android.hardware.camera.provider@2.4.so
+VNDK-core: android.hardware.camera.provider@2.5.so
VNDK-core: android.hardware.cas.native@1.0.so
VNDK-core: android.hardware.cas@1.0.so
+VNDK-core: android.hardware.cas@1.1.so
VNDK-core: android.hardware.configstore-utils.so
VNDK-core: android.hardware.configstore@1.0.so
VNDK-core: android.hardware.configstore@1.1.so
@@ -88,69 +105,101 @@ VNDK-core: android.hardware.confirmationui@1.0.so
VNDK-core: android.hardware.contexthub@1.0.so
VNDK-core: android.hardware.drm@1.0.so
VNDK-core: android.hardware.drm@1.1.so
+VNDK-core: android.hardware.drm@1.2.so
VNDK-core: android.hardware.dumpstate@1.0.so
+VNDK-core: android.hardware.fastboot@1.0.so
VNDK-core: android.hardware.gatekeeper@1.0.so
+VNDK-core: android.hardware.gnss.measurement_corrections@1.0.so
+VNDK-core: android.hardware.gnss.visibility_control@1.0.so
VNDK-core: android.hardware.gnss@1.0.so
VNDK-core: android.hardware.gnss@1.1.so
+VNDK-core: android.hardware.gnss@2.0.so
VNDK-core: android.hardware.graphics.allocator@2.0.so
+VNDK-core: android.hardware.graphics.allocator@3.0.so
VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
VNDK-core: android.hardware.graphics.composer@2.1.so
VNDK-core: android.hardware.graphics.composer@2.2.so
+VNDK-core: android.hardware.graphics.composer@2.3.so
+VNDK-core: android.hardware.health.storage@1.0.so
VNDK-core: android.hardware.health@1.0.so
VNDK-core: android.hardware.health@2.0.so
+VNDK-core: android.hardware.input.classifier@1.0.so
+VNDK-core: android.hardware.input.common@1.0.so
VNDK-core: android.hardware.ir@1.0.so
VNDK-core: android.hardware.keymaster@3.0.so
VNDK-core: android.hardware.keymaster@4.0.so
VNDK-core: android.hardware.light@2.0.so
VNDK-core: android.hardware.media.bufferpool@1.0.so
+VNDK-core: android.hardware.media.bufferpool@2.0.so
+VNDK-core: android.hardware.media.c2@1.0.so
VNDK-core: android.hardware.media.omx@1.0.so
VNDK-core: android.hardware.media@1.0.so
VNDK-core: android.hardware.memtrack@1.0.so
VNDK-core: android.hardware.neuralnetworks@1.0.so
VNDK-core: android.hardware.neuralnetworks@1.1.so
+VNDK-core: android.hardware.neuralnetworks@1.2.so
VNDK-core: android.hardware.nfc@1.0.so
VNDK-core: android.hardware.nfc@1.1.so
+VNDK-core: android.hardware.nfc@1.2.so
VNDK-core: android.hardware.oemlock@1.0.so
+VNDK-core: android.hardware.power.stats@1.0.so
VNDK-core: android.hardware.power@1.0.so
VNDK-core: android.hardware.power@1.1.so
VNDK-core: android.hardware.power@1.2.so
+VNDK-core: android.hardware.power@1.3.so
VNDK-core: android.hardware.radio.config@1.0.so
+VNDK-core: android.hardware.radio.config@1.1.so
+VNDK-core: android.hardware.radio.config@1.2.so
VNDK-core: android.hardware.radio.deprecated@1.0.so
VNDK-core: android.hardware.radio@1.0.so
VNDK-core: android.hardware.radio@1.1.so
VNDK-core: android.hardware.radio@1.2.so
+VNDK-core: android.hardware.radio@1.3.so
+VNDK-core: android.hardware.radio@1.4.so
VNDK-core: android.hardware.secure_element@1.0.so
+VNDK-core: android.hardware.secure_element@1.1.so
VNDK-core: android.hardware.sensors@1.0.so
+VNDK-core: android.hardware.sensors@2.0.so
VNDK-core: android.hardware.soundtrigger@2.0.so
VNDK-core: android.hardware.soundtrigger@2.0-core.so
VNDK-core: android.hardware.soundtrigger@2.1.so
+VNDK-core: android.hardware.soundtrigger@2.2.so
VNDK-core: android.hardware.tetheroffload.config@1.0.so
VNDK-core: android.hardware.tetheroffload.control@1.0.so
VNDK-core: android.hardware.thermal@1.0.so
VNDK-core: android.hardware.thermal@1.1.so
+VNDK-core: android.hardware.thermal@2.0.so
VNDK-core: android.hardware.tv.cec@1.0.so
+VNDK-core: android.hardware.tv.cec@2.0.so
VNDK-core: android.hardware.tv.input@1.0.so
VNDK-core: android.hardware.usb.gadget@1.0.so
VNDK-core: android.hardware.usb@1.0.so
VNDK-core: android.hardware.usb@1.1.so
+VNDK-core: android.hardware.usb@1.2.so
VNDK-core: android.hardware.vibrator@1.0.so
VNDK-core: android.hardware.vibrator@1.1.so
VNDK-core: android.hardware.vibrator@1.2.so
+VNDK-core: android.hardware.vibrator@1.3.so
VNDK-core: android.hardware.vr@1.0.so
VNDK-core: android.hardware.weaver@1.0.so
VNDK-core: android.hardware.wifi.hostapd@1.0.so
+VNDK-core: android.hardware.wifi.hostapd@1.1.so
VNDK-core: android.hardware.wifi.offload@1.0.so
VNDK-core: android.hardware.wifi.supplicant@1.0.so
VNDK-core: android.hardware.wifi.supplicant@1.1.so
+VNDK-core: android.hardware.wifi.supplicant@1.2.so
VNDK-core: android.hardware.wifi@1.0.so
VNDK-core: android.hardware.wifi@1.1.so
VNDK-core: android.hardware.wifi@1.2.so
+VNDK-core: android.hardware.wifi@1.3.so
VNDK-core: android.hidl.allocator@1.0.so
VNDK-core: android.hidl.memory.block@1.0.so
VNDK-core: android.hidl.token@1.0.so
VNDK-core: android.hidl.token@1.0-utils.so
VNDK-core: android.system.net.netd@1.0.so
VNDK-core: android.system.net.netd@1.1.so
+VNDK-core: android.system.suspend@1.0.so
VNDK-core: android.system.wifi.keystore@1.0.so
VNDK-core: libadf.so
VNDK-core: libaudioroute.so
@@ -159,6 +208,7 @@ VNDK-core: libbinder.so
VNDK-core: libcamera_metadata.so
VNDK-core: libcap.so
VNDK-core: libcn-cbor.so
+VNDK-core: libcodec2.so
VNDK-core: libcrypto.so
VNDK-core: libcrypto_utils.so
VNDK-core: libcurl.so
@@ -186,8 +236,6 @@ VNDK-core: libminijail.so
VNDK-core: libmkbootimg_abi_check.so
VNDK-core: libnetutils.so
VNDK-core: libnl.so
-VNDK-core: libopus.so
-VNDK-core: libpagemap.so
VNDK-core: libpcre2.so
VNDK-core: libpiex.so
VNDK-core: libpng.so
@@ -202,51 +250,24 @@ VNDK-core: libsoftkeymasterdevice.so
VNDK-core: libspeexresampler.so
VNDK-core: libsqlite.so
VNDK-core: libssl.so
-VNDK-core: libstagefright_amrnb_common.so
+VNDK-core: libstagefright_bufferpool@2.0.so
VNDK-core: libstagefright_bufferqueue_helper.so
-VNDK-core: libstagefright_enc_common.so
-VNDK-core: libstagefright_flacdec.so
VNDK-core: libstagefright_foundation.so
VNDK-core: libstagefright_omx.so
VNDK-core: libstagefright_omx_utils.so
-VNDK-core: libstagefright_soft_aacdec.so
-VNDK-core: libstagefright_soft_aacenc.so
-VNDK-core: libstagefright_soft_amrdec.so
-VNDK-core: libstagefright_soft_amrnbenc.so
-VNDK-core: libstagefright_soft_amrwbenc.so
-VNDK-core: libstagefright_soft_avcdec.so
-VNDK-core: libstagefright_soft_avcenc.so
-VNDK-core: libstagefright_soft_flacdec.so
-VNDK-core: libstagefright_soft_flacenc.so
-VNDK-core: libstagefright_soft_g711dec.so
-VNDK-core: libstagefright_soft_gsmdec.so
-VNDK-core: libstagefright_soft_hevcdec.so
-VNDK-core: libstagefright_soft_mp3dec.so
-VNDK-core: libstagefright_soft_mpeg2dec.so
-VNDK-core: libstagefright_soft_mpeg4dec.so
-VNDK-core: libstagefright_soft_mpeg4enc.so
-VNDK-core: libstagefright_soft_opusdec.so
-VNDK-core: libstagefright_soft_rawdec.so
-VNDK-core: libstagefright_soft_vorbisdec.so
-VNDK-core: libstagefright_soft_vpxdec.so
-VNDK-core: libstagefright_soft_vpxenc.so
VNDK-core: libstagefright_xmlparser.so
-VNDK-core: libsuspend.so
VNDK-core: libsysutils.so
VNDK-core: libtinyalsa.so
VNDK-core: libtinyxml2.so
VNDK-core: libui.so
VNDK-core: libusbhost.so
-VNDK-core: libvixl-arm.so
-VNDK-core: libvixl-arm64.so
-VNDK-core: libvorbisidec.so
VNDK-core: libwifi-system-iface.so
VNDK-core: libxml2.so
VNDK-core: libyuv.so
VNDK-core: libziparchive.so
VNDK-private: libbacktrace.so
+VNDK-private: libbinderthreadstate.so
VNDK-private: libblas.so
VNDK-private: libcompiler_rt.so
VNDK-private: libft2.so
VNDK-private: libgui.so
-VNDK-private: libunwind.so
diff --git a/target/product/gsi/init.gsi.rc b/target/product/gsi/init.gsi.rc
new file mode 100644
index 0000000000..c6faba78d9
--- /dev/null
+++ b/target/product/gsi/init.gsi.rc
@@ -0,0 +1,3 @@
+#
+# Android init script for GSI required initialization
+#
diff --git a/target/product/vndk/init.gsi.rc b/target/product/gsi/init.legacy-gsi.rc
index 0150b1a127..00dd576fc1 100644
--- a/target/product/vndk/init.gsi.rc
+++ b/target/product/gsi/init.legacy-gsi.rc
@@ -1,2 +1,3 @@
# If ro.vndk.version is not defined, import init.vndk-27.rc.
import /system/etc/init/gsi/init.vndk-${ro.vndk.version:-27}.rc
+
diff --git a/target/product/vndk/init.vndk-27.rc b/target/product/gsi/init.vndk-27.rc
index d464a2f396..d464a2f396 100644
--- a/target/product/vndk/init.vndk-27.rc
+++ b/target/product/gsi/init.vndk-27.rc
diff --git a/target/product/gsi/skip_mount.cfg b/target/product/gsi/skip_mount.cfg
new file mode 100644
index 0000000000..549767edf2
--- /dev/null
+++ b/target/product/gsi/skip_mount.cfg
@@ -0,0 +1,2 @@
+/product
+/product_services
diff --git a/target/product/treble_common_64.mk b/target/product/gsi_arm64.mk
index fc3c16f8c2..b711d88c08 100644
--- a/target/product/treble_common_64.mk
+++ b/target/product/gsi_arm64.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,24 +14,24 @@
# limitations under the License.
#
-# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_common.mk)
-include build/make/target/product/treble_common.mk
-
-# For now this will allow 64-bit apps, but still compile all apps with JNI
-# for 32-bit only.
+# Enable mainline checking
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ root/init.zygote32_64.rc \
+ root/init.zygote64_32.rc \
# Copy different zygote settings for vendor.img to select by setting property
# ro.zygote=zygote64_32 or ro.zygote=zygote32_64:
# 1. 64-bit primary, 32-bit secondary OR
# 2. 32-bit primary, 64-bit secondary
-# 3. 64-bit only is currently forbidden (b/64280459#comment6)
+# init.zygote64_32.rc is in the core_64_bit.mk below
PRODUCT_COPY_FILES += \
- system/core/rootdir/init.zygote64_32.rc:root/init.zygote64_32.rc \
system/core/rootdir/init.zygote32_64.rc:root/init.zygote32_64.rc
-TARGET_SUPPORTS_32_BIT_APPS := true
-TARGET_SUPPORTS_64_BIT_APPS := true
+PRODUCT_NAME := gsi_arm64
+PRODUCT_DEVICE := gsi_arm64
+PRODUCT_BRAND := generic
+PRODUCT_MODEL := GSI on ARM64
diff --git a/target/product/gsi_common.mk b/target/product/gsi_common.mk
new file mode 100644
index 0000000000..7578f92ac7
--- /dev/null
+++ b/target/product/gsi_common.mk
@@ -0,0 +1,96 @@
+#
+# Copyright (C) 2019 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+
+# GSI includes all AOSP product packages and placed under /system/product
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_product.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_product.mk)
+
+# Default AOSP sounds
+$(call inherit-product-if-exists, frameworks/base/data/sounds/AllAudio.mk)
+
+# GSI doesn't support apex for now.
+# Properties set in product take precedence over those in vendor.
+PRODUCT_PRODUCT_PROPERTIES += \
+ ro.apex.updatable=false
+
+# Additional settings used in all AOSP builds
+PRODUCT_PRODUCT_PROPERTIES += \
+ ro.config.ringtone=Ring_Synth_04.ogg \
+ ro.config.notification_sound=pixiedust.ogg \
+
+# The mainline checking whitelist, should be clean up
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ system/app/messaging/messaging.apk \
+ system/app/WAPPushManager/WAPPushManager.apk \
+ system/bin/healthd \
+ system/etc/init/healthd.rc \
+ system/etc/seccomp_policy/crash_dump.%.policy \
+ system/etc/seccomp_policy/mediacodec.policy \
+ system/etc/vintf/manifest/manifest_healthd.xml \
+ system/lib/libframesequence.so \
+ system/lib/libgiftranscode.so \
+ system/lib64/libframesequence.so \
+ system/lib64/libgiftranscode.so \
+
+# Some GSI builds enable dexpreopt, whitelist these preopt files
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += %.odex %.vdex %.art
+
+# Exclude GSI specific files
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ system/etc/init/config/skip_mount.cfg \
+ system/etc/init/init.gsi.rc \
+
+# Exclude all files under system/product and system/product_services
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ system/product/% \
+ system/product_services/%
+
+
+# Split selinux policy
+PRODUCT_FULL_TREBLE_OVERRIDE := true
+
+# Enable dynamic partition size
+PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
+
+# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
+
+# GSI specific tasks on boot
+PRODUCT_COPY_FILES += \
+ build/make/target/product/gsi/skip_mount.cfg:system/etc/init/config/skip_mount.cfg \
+ build/make/target/product/gsi/init.gsi.rc:system/etc/init/init.gsi.rc \
+
+# Support addtional P vendor interface
+PRODUCT_EXTRA_VNDK_VERSIONS := 28
+
+# More AOSP packages
+PRODUCT_PACKAGES += \
+ messaging \
+ PhotoTable \
+ WAPPushManager \
+ WallpaperPicker \
+
+# Telephony:
+# Provide a APN configuration to GSI product
+PRODUCT_COPY_FILES += \
+ device/sample/etc/apns-full-conf.xml:$(TARGET_COPY_OUT_PRODUCT)/etc/apns-conf.xml
+
+# NFC:
+# Provide a libnfc-nci.conf to GSI product
+PRODUCT_COPY_FILES += \
+ device/generic/common/nfc/libnfc-nci.conf:$(TARGET_COPY_OUT_PRODUCT)/etc/libnfc-nci.conf
diff --git a/target/product/gsi_keys.mk b/target/product/gsi_keys.mk
new file mode 100644
index 0000000000..5a814db108
--- /dev/null
+++ b/target/product/gsi_keys.mk
@@ -0,0 +1,22 @@
+#
+# Copyright (C) 2019 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Include GSI keys into first-stage ramdisk, so we can enable verified
+# boot when booting a GSI.
+PRODUCT_PACKAGES += \
+ q-gsi.avbpubkey \
+ r-gsi.avbpubkey \
+ s-gsi.avbpubkey \
diff --git a/target/product/handheld_product.mk b/target/product/handheld_product.mk
new file mode 100644
index 0000000000..0d100b267f
--- /dev/null
+++ b/target/product/handheld_product.mk
@@ -0,0 +1,46 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile contains the product partition contents for
+# a generic phone or tablet device. Only add something here if
+# it definitely doesn't belong on other types of devices (if it
+# does, use base_vendor.mk).
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_product.mk)
+
+# /product packages
+PRODUCT_PACKAGES += \
+ Browser2 \
+ Calendar \
+ Camera2 \
+ Contacts \
+ DeskClock \
+ Email \
+ Gallery2 \
+ LatinIME \
+ Launcher3QuickStep \
+ Music \
+ OneTimeInitializer \
+ Provision \
+ QuickSearchBox \
+ Settings \
+ SettingsIntelligence \
+ StorageManager \
+ SystemUI \
+ WallpaperCropper \
+ frameworks-base-overlays
+
+PRODUCT_PACKAGES_DEBUG += \
+ frameworks-base-overlays-debug
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
new file mode 100644
index 0000000000..6463a541b4
--- /dev/null
+++ b/target/product/handheld_system.mk
@@ -0,0 +1,90 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile contains the system partition contents for
+# a generic phone or tablet device. Only add something here if
+# it definitely doesn't belong on other types of devices (if it
+# does, use base_vendor.mk).
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_system.mk)
+$(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/dancing-script/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/carrois-gothic-sc/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/cutive-mono/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/source-sans-pro/fonts.mk)
+$(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
+$(call inherit-product-if-exists, external/roboto-fonts/fonts.mk)
+$(call inherit-product-if-exists, external/hyphenation-patterns/patterns.mk)
+$(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
+$(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
+
+PRODUCT_PACKAGES += \
+ BasicDreams \
+ BlockedNumberProvider \
+ Bluetooth \
+ BluetoothMidiService \
+ BookmarkProvider \
+ BuiltInPrintService \
+ CalendarProvider \
+ cameraserver \
+ CaptivePortalLogin \
+ CertInstaller \
+ clatd \
+ clatd.conf \
+ DocumentsUI \
+ DownloadProviderUi \
+ EasterEgg \
+ ExternalStorageProvider \
+ FusedLocation \
+ InputDevices \
+ KeyChain \
+ librs_jni \
+ ManagedProvisioning \
+ MmsService \
+ MtpDocumentsProvider \
+ MusicFX \
+ NfcNci \
+ OsuLogin \
+ PacProcessor \
+ PrintRecommendationService \
+ PrintSpooler \
+ ProxyHandler \
+ screenrecord \
+ SecureElement \
+ SharedStorageBackup \
+ SimAppDialog \
+ Telecom \
+ TelephonyProvider \
+ TeleService \
+ Traceur \
+ UserDictionaryProvider \
+ VpnDialogs \
+ vr \
+
+
+PRODUCT_SYSTEM_SERVER_APPS += \
+ FusedLocation \
+ InputDevices \
+ KeyChain \
+ Telecom \
+
+PRODUCT_COPY_FILES += \
+ frameworks/av/media/libeffects/data/audio_effects.conf:system/etc/audio_effects.conf
+
+PRODUCT_PROPERTY_OVERRIDES += \
+ ro.carrier=unknown \
+ ro.config.notification_sound=OnTheHunt.ogg \
+ ro.config.alarm_alert=Alarm_Classic.ogg
diff --git a/target/product/handheld_vendor.mk b/target/product/handheld_vendor.mk
new file mode 100644
index 0000000000..cb7cf741d4
--- /dev/null
+++ b/target/product/handheld_vendor.mk
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile contains the non-system partition contents for
+# a generic phone or tablet device. Only add something here if
+# it definitely doesn't belong on other types of devices (if it
+# does, use base_vendor.mk).
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_vendor.mk)
+
+# /vendor packages
+PRODUCT_PACKAGES += \
+ audio.primary.default \
+ local_time.default \
+ power.default \
+ vibrator.default \
+
diff --git a/target/product/languages_default.mk b/target/product/languages_default.mk
new file mode 100644
index 0000000000..a13a23c569
--- /dev/null
+++ b/target/product/languages_default.mk
@@ -0,0 +1,105 @@
+#
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is a build configuration that just contains a list of languages, with
+# en_US set as the default language.
+PRODUCT_LOCALES := \
+ en_US \
+ af_ZA \
+ am_ET \
+ ar_EG \
+ ar_XB \
+ as_IN \
+ az_AZ \
+ be_BY \
+ bg_BG \
+ bn_BD \
+ bs_BA \
+ ca_ES \
+ cs_CZ \
+ da_DK \
+ de_DE \
+ el_GR \
+ en_AU \
+ en_CA \
+ en_GB \
+ en_IN \
+ en_XA \
+ es_ES \
+ es_US \
+ et_EE \
+ eu_ES \
+ fa_IR \
+ fi_FI \
+ fr_CA \
+ fr_FR \
+ gl_ES \
+ gu_IN \
+ hi_IN \
+ hr_HR \
+ hu_HU \
+ hy_AM \
+ in_ID \
+ is_IS \
+ it_IT \
+ iw_IL \
+ ja_JP \
+ ka_GE \
+ kk_KZ \
+ km_KH \
+ kn_IN \
+ ko_KR \
+ ky_KG \
+ lo_LA \
+ lt_LT \
+ lv_LV \
+ mk_MK \
+ ml_IN \
+ mn_MN \
+ mr_IN \
+ ms_MY \
+ my_MM \
+ nb_NO \
+ ne_NP \
+ nl_NL \
+ or_IN \
+ pa_IN \
+ pl_PL \
+ pt_BR \
+ pt_PT \
+ ro_RO \
+ ru_RU \
+ si_LK \
+ sk_SK \
+ sl_SI \
+ sq_AL \
+ sr_Latn_RS \
+ sr_RS \
+ sv_SE \
+ sw_TZ \
+ ta_IN \
+ te_IN \
+ th_TH \
+ tl_PH \
+ tr_TR \
+ uk_UA \
+ ur_PK \
+ uz_UZ \
+ vi_VN \
+ zh_CN \
+ zh_HK \
+ zh_TW \
+ zu_ZA \
diff --git a/target/product/languages_full.mk b/target/product/languages_full.mk
index 5f3795f210..43a40a7e94 100644
--- a/target/product/languages_full.mk
+++ b/target/product/languages_full.mk
@@ -14,94 +14,9 @@
# limitations under the License.
#
-# This is a build configuration that just contains a list of languages.
-#
-# These are all the locales that have translations.
-PRODUCT_LOCALES := \
- en_US \
- af_ZA \
- am_ET \
- ar_EG \
- ar_XB \
- as_IN \
- az_AZ \
- be_BY \
- bg_BG \
- bn_BD \
- bs_BA \
- ca_ES \
- cs_CZ \
- da_DK \
- de_DE \
- el_GR \
- en_AU \
- en_CA \
- en_GB \
- en_IN \
- en_XA \
- en_XC \
- es_ES \
- es_US \
- et_EE \
- eu_ES \
- fa_IR \
- fi_FI \
- fr_CA \
- fr_FR \
- gl_ES \
- gu_IN \
- hi_IN \
- hr_HR \
- hu_HU \
- hy_AM \
- in_ID \
- is_IS \
- it_IT \
- iw_IL \
- ja_JP \
- ka_GE \
- kk_KZ \
- km_KH \
- kn_IN \
- ko_KR \
- ky_KG \
- lo_LA \
- lt_LT \
- lv_LV \
- mk_MK \
- ml_IN \
- mn_MN \
- mr_IN \
- ms_MY \
- my_MM \
- nb_NO \
- ne_NP \
- nl_NL \
- or_IN \
- pa_IN \
- pl_PL \
- pt_BR \
- pt_PT \
- ro_RO \
- ru_RU \
- si_LK \
- sk_SK \
- sl_SI \
- sq_AL \
- sr_Latn_RS \
- sr_RS \
- sv_SE \
- sw_TZ \
- ta_IN \
- te_IN \
- th_TH \
- tl_PH \
- tr_TR \
- uk_UA \
- ur_PK \
- uz_UZ \
- vi_VN \
- zh_CN \
- zh_HK \
- zh_TW \
- zu_ZA
+# This is a build configuration that contains the default list of languages,
+# as well as the en_XC pseudo-locale, which is useful for localization test
+# builds.
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
+PRODUCT_LOCALES += en_XC
diff --git a/target/product/legacy_gsi_common.mk b/target/product/legacy_gsi_common.mk
new file mode 100644
index 0000000000..fdae6eb16a
--- /dev/null
+++ b/target/product/legacy_gsi_common.mk
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2019 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+include $(SRC_TARGET_DIR)/product/gsi_common.mk
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ system/etc/init/init.legacy-gsi.rc \
+ system/etc/init/gsi/init.vndk-27.rc \
+ system/etc/ld.config.vndk_lite.txt \
+
+# Legacy GSI support addtional O-MR1 interface
+PRODUCT_EXTRA_VNDK_VERSIONS += 27
+
+# Support for the O-MR1 devices
+PRODUCT_COPY_FILES += \
+ build/make/target/product/gsi/init.legacy-gsi.rc:system/etc/init/init.legacy-gsi.rc \
+ build/make/target/product/gsi/init.vndk-27.rc:system/etc/init/gsi/init.vndk-27.rc
+
+# Name space configuration file for non-enforcing VNDK
+PRODUCT_PACKAGES += \
+ ld.config.vndk_lite.txt
+
+# Legacy GSI relax the compatible property checking
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := false
diff --git a/target/product/mainline.mk b/target/product/mainline.mk
new file mode 100644
index 0000000000..7900cdfee7
--- /dev/null
+++ b/target/product/mainline.mk
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile is intended to serve as a base for completely AOSP based
+# mainline devices, It contain the mainline system partition and sensible
+# defaults for the product and vendor partition.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_product.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_product.mk)
+
+$(call inherit-product, frameworks/base/data/sounds/AllAudio.mk)
+
+PRODUCT_PROPERTY_OVERRIDES += \
+ ro.config.ringtone=Ring_Synth_04.ogg \
+ ro.com.android.dataroaming=true \
+
+PRODUCT_PACKAGES += \
+ PhotoTable \
+ WallpaperPicker \
+
+PRODUCT_COPY_FILES += device/sample/etc/apns-full-conf.xml:$(TARGET_COPY_OUT_PRODUCT)/etc/apns-conf.xml
diff --git a/target/product/mainline_arm64.mk b/target/product/mainline_arm64.mk
new file mode 100644
index 0000000000..f21fa89c08
--- /dev/null
+++ b/target/product/mainline_arm64.mk
@@ -0,0 +1,67 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline.mk)
+$(call enforce-product-packages-exist,)
+
+PRODUCT_NAME := mainline_arm64
+PRODUCT_DEVICE := mainline_arm64
+PRODUCT_BRAND := generic
+PRODUCT_SHIPPING_API_LEVEL := 28
+# TODO(b/137033385): change this back to "all"
+PRODUCT_RESTRICT_VENDOR_FILES := owner
+
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ root/init.zygote64_32.rc \
+
+# Modules that are to be moved to /product
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
+ system/app/Browser2/Browser2.apk \
+ system/app/Calendar/Calendar.apk \
+ system/app/Camera2/Camera2.apk \
+ system/app/DeskClock/DeskClock.apk \
+ system/app/DeskClock/oat/arm64/DeskClock.odex \
+ system/app/DeskClock/oat/arm64/DeskClock.vdex \
+ system/app/Email/Email.apk \
+ system/app/Gallery2/Gallery2.apk \
+ system/app/LatinIME/LatinIME.apk \
+ system/app/LatinIME/oat/arm64/LatinIME.odex \
+ system/app/LatinIME/oat/arm64/LatinIME.vdex \
+ system/app/Music/Music.apk \
+ system/app/QuickSearchBox/QuickSearchBox.apk \
+ system/app/webview/webview.apk \
+ system/bin/healthd \
+ system/etc/init/healthd.rc \
+ system/etc/vintf/manifest/manifest_healthd.xml \
+ system/lib64/libjni_eglfence.so \
+ system/lib64/libjni_filtershow_filters.so \
+ system/lib64/libjni_jpegstream.so \
+ system/lib64/libjni_jpegutil.so \
+ system/lib64/libjni_latinime.so \
+ system/lib64/libjni_tinyplanet.so \
+ system/priv-app/CarrierConfig/CarrierConfig.apk \
+ system/priv-app/CarrierConfig/oat/arm64/CarrierConfig.odex \
+ system/priv-app/CarrierConfig/oat/arm64/CarrierConfig.vdex \
+ system/priv-app/Contacts/Contacts.apk \
+ system/priv-app/Dialer/Dialer.apk \
+ system/priv-app/Launcher3QuickStep/Launcher3QuickStep.apk \
+ system/priv-app/OneTimeInitializer/OneTimeInitializer.apk \
+ system/priv-app/Provision/Provision.apk \
+ system/priv-app/SettingsIntelligence/SettingsIntelligence.apk \
+ system/priv-app/StorageManager/StorageManager.apk \
+ system/priv-app/WallpaperCropper/WallpaperCropper.apk \
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
new file mode 100644
index 0000000000..3644a221ea
--- /dev/null
+++ b/target/product/mainline_system.mk
@@ -0,0 +1,123 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile is the basis of a generic system image for a handheld device.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
+# Enable updating of APEXes
+$(call inherit-product, $(SRC_TARGET_DIR)/product/updatable_apex.mk)
+# Add adb keys to debuggable AOSP builds (if they exist)
+$(call inherit-product-if-exists, vendor/google/security/adb/vendor_key.mk)
+
+# Shared java libs
+PRODUCT_PACKAGES += \
+ com.android.nfc_extras \
+
+# Applications
+PRODUCT_PACKAGES += \
+ LiveWallpapersPicker \
+ PartnerBookmarksProvider \
+ PresencePolling \
+ RcsService \
+ SafetyRegulatoryInfo \
+ Stk \
+ Tag \
+ TimeZoneUpdater \
+
+# Binaries
+PRODUCT_PACKAGES += llkd
+
+# OTA support
+PRODUCT_PACKAGES += \
+ recovery-refresh \
+ update_engine \
+ update_verifier \
+
+# Wrapped net utils for /vendor access.
+PRODUCT_PACKAGES += netutils-wrapper-1.0
+
+# Charger images
+PRODUCT_PACKAGES += charger_res_images
+
+# system_other support
+PRODUCT_PACKAGES += \
+ cppreopts.sh \
+ otapreopt_script \
+
+# Bluetooth libraries
+PRODUCT_PACKAGES += \
+ audio.a2dp.default \
+ audio.hearing_aid.default \
+
+# For ringtones that rely on forward lock encryption
+PRODUCT_PACKAGES += libfwdlockengine
+
+# System libraries commonly depended on by things on the product partition.
+# This list will be pruned periodically.
+PRODUCT_PACKAGES += \
+ android.hardware.biometrics.fingerprint@2.1 \
+ android.hardware.radio@1.0 \
+ android.hardware.radio@1.1 \
+ android.hardware.radio@1.2 \
+ android.hardware.radio.config@1.0 \
+ android.hardware.radio.deprecated@1.0 \
+ android.hardware.secure_element@1.0 \
+ android.hardware.wifi@1.0 \
+ libaudio-resampler \
+ libdrm \
+ liblogwrap \
+ liblz4 \
+ libminui \
+ libnl \
+ libprotobuf-cpp-full \
+
+# Camera service uses 'libdepthphoto' for adding dynamic depth
+# metadata inside depth jpegs.
+PRODUCT_PACKAGES += \
+ libdepthphoto \
+
+PRODUCT_PACKAGES_DEBUG += \
+ avbctl \
+ bootctl \
+ tinyplay \
+ tinycap \
+ tinymix \
+ tinypcminfo \
+ update_engine_client \
+
+PRODUCT_HOST_PACKAGES += \
+ tinyplay
+
+# Enable stats logging in LMKD
+TARGET_LMKD_STATS_LOG := true
+
+# Enable dynamic partition size
+PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
+
+PRODUCT_NAME := mainline_system
+PRODUCT_BRAND := generic
+
+_base_mk_whitelist :=
+
+_my_whitelist := $(_base_mk_whitelist)
+
+# For mainline, system.img should be mounted at /, so we include ROOT here.
+_my_paths := \
+ $(TARGET_COPY_OUT_ROOT)/ \
+ $(TARGET_COPY_OUT_SYSTEM)/ \
+
+$(call require-artifacts-in-path, $(_my_paths), $(_my_whitelist))
diff --git a/target/product/mainline_system_arm64.mk b/target/product/mainline_system_arm64.mk
new file mode 100644
index 0000000000..6cadf8448a
--- /dev/null
+++ b/target/product/mainline_system_arm64.mk
@@ -0,0 +1,36 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call enforce-product-packages-exist,)
+
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_ODM_IMAGE := false
+PRODUCT_BUILD_PRODUCT_IMAGE := false
+PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_SYSTEM_IMAGE := true
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+
+PRODUCT_NAME := mainline_system_arm64
+PRODUCT_DEVICE := mainline_arm64
+PRODUCT_BRAND := generic
+PRODUCT_SHIPPING_API_LEVEL := 28
+# TODO(b/137033385): change this back to "all"
+PRODUCT_RESTRICT_VENDOR_FILES := owner
diff --git a/target/board/generic_arm_a/BoardConfig.mk b/target/product/media_product.mk
index f0e1a39498..17c24eef1f 100644
--- a/target/board/generic_arm_a/BoardConfig.mk
+++ b/target/product/media_product.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,13 +14,12 @@
# limitations under the License.
#
-include build/make/target/board/treble_common_32.mk
+# This makefile contains the product partition contents for
+# media-capable devices (non-wearables). Only add something here
+# if it definitely doesn't belong on wearables. Otherwise, choose
+# base_vendor.mk.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_product.mk)
-# Overwrite the setting in treble_common_32.mk for non-A/B arm GSI
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 943718400 # 900MB
-
-TARGET_ARCH := arm
-TARGET_ARCH_VARIANT := armv7-a-neon
-TARGET_CPU_ABI := armeabi-v7a
-TARGET_CPU_ABI2 := armeabi
-TARGET_CPU_VARIANT := generic
+# /product packages
+PRODUCT_PACKAGES += \
+ webview \
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
new file mode 100644
index 0000000000..5c0902dbcf
--- /dev/null
+++ b/target/product/media_system.mk
@@ -0,0 +1,82 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile contains the system partition contents for
+# media-capable devices (non-wearables). Only add something
+# here if it definitely doesn't belong on wearables. Otherwise,
+# choose base_system.mk.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_system.mk)
+
+PRODUCT_PACKAGES += \
+ com.android.future.usb.accessory \
+ com.android.mediadrm.signer \
+ com.android.media.remotedisplay \
+ com.android.media.remotedisplay.xml \
+ CompanionDeviceManager \
+ drmserver \
+ ethernet-service \
+ fsck.f2fs \
+ HTMLViewer \
+ libfilterpack_imageproc \
+ libwebviewchromium_loader \
+ libwebviewchromium_plat_support \
+ make_f2fs \
+ requestsync \
+ StatementService \
+ vndk_snapshot_package \
+
+PRODUCT_HOST_PACKAGES += \
+ fsck.f2fs \
+
+PRODUCT_COPY_FILES += \
+ frameworks/native/data/etc/android.software.webview.xml:system/etc/permissions/android.software.webview.xml
+
+ifneq (REL,$(PLATFORM_VERSION_CODENAME))
+PRODUCT_COPY_FILES += \
+ frameworks/native/data/etc/android.software.preview_sdk.xml:system/etc/permissions/android.software.preview_sdk.xml
+endif
+
+# The order here is the same order they end up on the classpath, so it matters.
+PRODUCT_SYSTEM_SERVER_JARS := \
+ services \
+ ethernet-service \
+ wifi-service \
+ com.android.location.provider \
+
+PRODUCT_COPY_FILES += \
+ system/core/rootdir/etc/public.libraries.android.txt:system/etc/public.libraries.txt
+
+# Enable boot.oat filtering of compiled classes to reduce boot.oat size. b/28026683
+PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
+ frameworks/base/config/compiled-classes-phone:system/etc/compiled-classes)
+
+# Enable dirty image object binning to reduce dirty pages in the image.
+PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
+ frameworks/base/dirty-image-objects-phone:system/etc/dirty-image-objects)
+
+# On userdebug builds, collect more tombstones by default.
+ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+ tombstoned.max_tombstone_count=50
+endif
+
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+ ro.logd.size.stats=64K \
+ log.tag.stats_log=I
+
+# Enable CFI for security-sensitive components
+$(call inherit-product, $(SRC_TARGET_DIR)/product/cfi-common.mk)
+$(call inherit-product-if-exists, vendor/google/products/cfi-vendor.mk)
diff --git a/target/product/media_vendor.mk b/target/product/media_vendor.mk
new file mode 100644
index 0000000000..7d4af64bd6
--- /dev/null
+++ b/target/product/media_vendor.mk
@@ -0,0 +1,26 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile contains the non-system partition contents for
+# media-capable devices (non-wearables). Only add something here
+# if it definitely doesn't belong on wearables. Otherwise, choose
+# base_vendor.mk.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_vendor.mk)
+
+# /vendor packages
+PRODUCT_PACKAGES += \
+ libaudiopreprocessing \
+ libwebrtc_audio_preprocessing \
diff --git a/target/product/profile_boot_common.mk b/target/product/profile_boot_common.mk
index f243902605..4147dfafda 100644
--- a/target/product/profile_boot_common.mk
+++ b/target/product/profile_boot_common.mk
@@ -41,5 +41,8 @@ PRODUCT_PROPERTY_OVERRIDES += \
# Use speed compiler filter since system server doesn't have JIT.
PRODUCT_DEX_PREOPT_BOOT_FLAGS += --compiler-filter=speed
+# System server is speed compiled and doesn't have a separate preopt flag,
+# so we enable hotness in compiled code for everything.
+PRODUCT_DEX_PREOPT_DEFAULT_FLAGS := --count-hotness-in-compiled-code
PRODUCT_DIST_BOOT_AND_SYSTEM_JARS := true
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index bda45244d0..a88ba3c8d0 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -33,24 +33,9 @@ PRODUCT_PACKAGES += \
PRODUCT_PACKAGES += \
ext \
-# Why are we pulling in expat, which is used in frameworks, only, it seem?
-PRODUCT_PACKAGES += \
- libexpat \
-
-# Libcore.
-PRODUCT_PACKAGES += \
- libjavacore \
- libopenjdk \
-
-# Libcore ICU. TODO: Try to figure out if/why we need them explicitly.
-PRODUCT_PACKAGES += \
- libicui18n \
- libicuuc \
-
-# ART.
-PRODUCT_PACKAGES += art-runtime
-# ART/dex helpers.
-PRODUCT_PACKAGES += art-tools
+# Android Runtime APEX module.
+PRODUCT_PACKAGES += com.android.runtime
+PRODUCT_HOST_PACKAGES += com.android.runtime
# Certificates.
PRODUCT_PACKAGES += \
@@ -69,7 +54,7 @@ PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
dalvik.vm.dexopt.secondary=true \
dalvik.vm.appimageformat=lz4
-PRODUCT_PROPERTY_OVERRIDES += \
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
ro.dalvik.vm.native.bridge=0
# Different dexopt types for different package update/install times.
@@ -94,6 +79,21 @@ PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
pm.dexopt.inactive=verify \
pm.dexopt.shared=speed
+# Enable resolution of startup const strings.
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+ dalvik.vm.dex2oat-resolve-startup-strings=true
+
+# Specify default block size of 512K to enable parallel image decompression.
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+ dalvik.vm.dex2oat-max-image-block-size=524288
+
# Enable minidebuginfo generation unless overridden.
PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+ dalvik.vm.minidebuginfo=true \
dalvik.vm.dex2oat-minidebuginfo=true
+
+# Disable iorapd by default
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+ ro.iorapd.enable=false
+
+PRODUCT_USES_ART := true
diff --git a/target/product/sdk_base.mk b/target/product/sdk_base.mk
deleted file mode 100644
index 1e5ed19281..0000000000
--- a/target/product/sdk_base.mk
+++ /dev/null
@@ -1,174 +0,0 @@
-#
-# Copyright (C) 2007 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-PRODUCT_PROPERTY_OVERRIDES :=
-
-PRODUCT_PACKAGES := \
- CellBroadcastReceiver \
- CubeLiveWallpapers \
- CustomLocale \
- Development \
- Dialer \
- Gallery2 \
- Launcher3QuickStep \
- Camera2 \
- librs_jni \
- LiveWallpapersPicker \
- Mms \
- Music \
- Protips \
- rild \
- screenrecord \
- SdkSetup \
- SoftKeyboard \
- sqlite3 \
- SystemUI \
- SysuiDarkThemeOverlay \
- EasterEgg \
- WallpaperPicker \
- WidgetPreview \
-
-# Define the host tools and libs that are parts of the SDK.
--include sdk/build/product_sdk.mk
--include development/build/product_sdk.mk
-
-# audio libraries.
-PRODUCT_PACKAGES += \
- audio.primary.goldfish \
- audio.r_submix.default \
- local_time.default
-
-# CDD mandates following codecs
-PRODUCT_PACKAGES += \
- libstagefright_soft_aacdec \
- libstagefright_soft_aacenc \
- libstagefright_soft_amrdec \
- libstagefright_soft_amrnbenc \
- libstagefright_soft_amrwbenc \
- libstagefright_soft_avcdec \
- libstagefright_soft_avcenc \
- libstagefright_soft_flacenc \
- libstagefright_soft_g711dec \
- libstagefright_soft_gsmdec \
- libstagefright_soft_hevcdec \
- libstagefright_soft_mp3dec \
- libstagefright_soft_mpeg2dec \
- libstagefright_soft_mpeg4dec \
- libstagefright_soft_mpeg4enc \
- libstagefright_soft_opusdec \
- libstagefright_soft_rawdec \
- libstagefright_soft_vorbisdec \
- libstagefright_soft_vpxdec \
- libstagefright_soft_vpxenc
-
-PRODUCT_PACKAGE_OVERLAYS := development/sdk_overlay
-
-PRODUCT_COPY_FILES := \
- device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
- device/sample/etc/old-apns-conf.xml:system/etc/old-apns-conf.xml \
- frameworks/base/data/sounds/effects/camera_click.ogg:system/media/audio/ui/camera_click.ogg \
- frameworks/base/data/sounds/effects/VideoRecord.ogg:system/media/audio/ui/VideoRecord.ogg \
- frameworks/base/data/sounds/effects/VideoStop.ogg:system/media/audio/ui/VideoStop.ogg \
- device/generic/goldfish/data/etc/handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/handheld_core_hardware.xml \
- device/generic/goldfish/camera/media_profiles.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_profiles_V1_0.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_audio.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_telephony.xml \
- device/generic/goldfish/camera/media_codecs_google_video.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_video.xml \
- device/generic/goldfish/camera/media_codecs.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs.xml \
- device/generic/goldfish/camera/media_codecs_performance.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_performance.xml \
- frameworks/native/data/etc/android.hardware.touchscreen.multitouch.jazzhand.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.touchscreen.multitouch.jazzhand.xml \
- frameworks/native/data/etc/android.hardware.camera.autofocus.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.autofocus.xml \
- frameworks/native/data/etc/android.hardware.camera.full.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.camera.full.xml \
- frameworks/native/data/etc/android.hardware.fingerprint.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.hardware.fingerprint.xml \
- frameworks/native/data/etc/android.software.autofill.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/android.software.autofill.xml \
- frameworks/av/media/libeffects/data/audio_effects.conf:$(TARGET_COPY_OUT_VENDOR)/etc/audio_effects.conf \
- device/generic/goldfish/audio_policy.conf:$(TARGET_COPY_OUT_VENDOR)/etc/audio_policy.conf
-
-include $(SRC_TARGET_DIR)/product/emulator.mk
-
-$(call inherit-product-if-exists, frameworks/base/data/sounds/AllAudio.mk)
-$(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/dancing-script/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/carrois-gothic-sc/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/cutive-mono/fonts.mk)
-$(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
-$(call inherit-product-if-exists, external/roboto-fonts/fonts.mk)
-$(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
-$(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
-
-# locale. en_US is both first and in alphabetical order to
-# ensure this is the default locale.
-PRODUCT_LOCALES := \
- en_US \
- ar_EG \
- ar_IL \
- bg_BG \
- ca_ES \
- cs_CZ \
- da_DK \
- de_AT \
- de_CH \
- de_DE \
- de_LI \
- el_GR \
- en_AU \
- en_CA \
- en_GB \
- en_IE \
- en_IN \
- en_NZ \
- en_SG \
- en_US \
- en_ZA \
- es_ES \
- es_US \
- fi_FI \
- fr_BE \
- fr_CA \
- fr_CH \
- fr_FR \
- he_IL \
- hi_IN \
- hr_HR \
- hu_HU \
- id_ID \
- it_CH \
- it_IT \
- ja_JP \
- ko_KR \
- lt_LT \
- lv_LV \
- nb_NO \
- nl_BE \
- nl_NL \
- pl_PL \
- pt_BR \
- pt_PT \
- ro_RO \
- ru_RU \
- sk_SK \
- sl_SI \
- sr_RS \
- sv_SE \
- th_TH \
- tl_PH \
- tr_TR \
- uk_UA \
- vi_VN \
- zh_CN \
- zh_TW
diff --git a/target/product/sdk_phone_arm64.mk b/target/product/sdk_phone_arm64.mk
index c6b290f796..96f0bfd469 100644
--- a/target/product/sdk_phone_arm64.mk
+++ b/target/product/sdk_phone_arm64.mk
@@ -20,6 +20,10 @@ $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_arm64.mk)
$(call inherit-product, sdk/build/product_sdk.mk)
$(call inherit-product, development/build/product_sdk.mk)
+# keep this apk for sdk targets for now
+PRODUCT_PACKAGES += \
+ EmulatorSmokeTests
+
# Overrides
PRODUCT_BRAND := Android
PRODUCT_NAME := sdk_phone_arm64
diff --git a/target/product/sdk_phone_armv7.mk b/target/product/sdk_phone_armv7.mk
index f2b51cfd70..04d8d6a18e 100644
--- a/target/product/sdk_phone_armv7.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -20,6 +20,10 @@ $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_arm.mk)
$(call inherit-product, sdk/build/product_sdk.mk)
$(call inherit-product, development/build/product_sdk.mk)
+# keep this apk for sdk targets for now
+PRODUCT_PACKAGES += \
+ EmulatorSmokeTests
+
# Overrides
PRODUCT_BRAND := Android
diff --git a/target/product/sdk_phone_mips.mk b/target/product/sdk_phone_mips.mk
deleted file mode 100644
index 1cc2fe4b55..0000000000
--- a/target/product/sdk_phone_mips.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# Copyright (C) 2012 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
-
-# AOSP emulator images build the AOSP messaging app.
-# Google API images override with the Google API app.
-# See vendor/google/products/sdk_google_phone_*.mk
-PRODUCT_PACKAGES += \
- messaging
-
-# Overrides
-PRODUCT_BRAND := Android
-PRODUCT_NAME := sdk_phone_mips
-PRODUCT_DEVICE := generic_mips
-PRODUCT_MODEL := Android SDK for Mips
diff --git a/target/product/sdk_phone_mips64.mk b/target/product/sdk_phone_mips64.mk
deleted file mode 100644
index e45d71bd89..0000000000
--- a/target/product/sdk_phone_mips64.mk
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
-
-# AOSP emulator images build the AOSP messaging app.
-# Google API images override with the Google API app.
-# See vendor/google/products/sdk_google_phone_*.mk
-PRODUCT_PACKAGES += \
- messaging
-
-# Overrides
-PRODUCT_BRAND := Android
-PRODUCT_NAME := sdk_phone_mips64
-PRODUCT_DEVICE := generic_mips64
-PRODUCT_MODEL := Android SDK built for mips64
diff --git a/target/product/security/networkstack.pk8 b/target/product/security/networkstack.pk8
new file mode 100644
index 0000000000..877f5164ce
--- /dev/null
+++ b/target/product/security/networkstack.pk8
Binary files differ
diff --git a/target/product/security/networkstack.x509.pem b/target/product/security/networkstack.x509.pem
new file mode 100644
index 0000000000..c49b95ad31
--- /dev/null
+++ b/target/product/security/networkstack.x509.pem
@@ -0,0 +1,34 @@
+-----BEGIN CERTIFICATE-----
+MIIF3DCCA8SgAwIBAgIJAPxssNim/dFoMA0GCSqGSIb3DQEBCwUAMIGBMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEhMB8GA1UE
+AwwYY29tLmFuZHJvaWQubmV0d29ya3N0YWNrMCAXDTE5MDIxMjAxNDYyMFoYDzQ3
+NTcwMTA4MDE0NjIwWjCBgTELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3Ju
+aWExFjAUBgNVBAcMDU1vdW50YWluIFZpZXcxEDAOBgNVBAoMB0FuZHJvaWQxEDAO
+BgNVBAsMB0FuZHJvaWQxITAfBgNVBAMMGGNvbS5hbmRyb2lkLm5ldHdvcmtzdGFj
+azCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALtx9RN/8LLXV6zCyj03
+jg+N4RCQ1crz1J4xTTXCg7d4sC15LY66RANkypcJhUQWYPC8AK+8Y91hGxv1GtKK
+Ht0h4ASPVIuA+L0RPiVoKCL1fauCc6+vEsZNGaDGviOPPmbdx5sQ/ZJpMePuYKe/
+YYZE2jwsT8QoE51F0nvtp/5F4wB1tJPq1uwBzdVdkxwKZX4uWXQspjK23DhCot63
+0iRDyAkpHXpUkgOuauNWWCpMoj8w8FScTshAinUnjpXGnoOQrVKAvO+u9vEwmkG9
+nzv7XRLcp+eexv1oSBk/qatygiSIe0+T6YXsfL9kAbDoY6S5HAXQRvBA/pVABLFk
+WVT8tBFM7h6LZLR9cZoZ70wAHLGD9/PhZuQ/VtaAR8NEDaNP31KdRCdLiy9q+zRQ
+ka2K1Lk71cVdUihqXTwVdGXbjd9i5822sQ+xiIgEav3SY65vISXZBldZx+QvhhCm
+dG7b3FR9QwFhLu7Dw8vRJN7OzI04sg5zsT8k7nyhOpjF9h8MgbB9K1GXSbwry54J
+Sa72wRij6BJearV/zka7CRpmdA4Qsxx0C4kZAMDs2pzGnstPM2mZixdRBt0KT/1w
+JOt+df7dGlsTHQuytAxjSR48+GuJV7IVIbOpbtE3alGmrGl4ZrAlbe4bzZq5oYi/
+TO2AtZpfJMLamlXrew5QIRbjAgMBAAGjUzBRMB0GA1UdDgQWBBSTg8ks+/CZ1cR7
+DDZX2GIqCEty4TAfBgNVHSMEGDAWgBSTg8ks+/CZ1cR7DDZX2GIqCEty4TAPBgNV
+HRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQBqBQE4L94qa49wxgzRuO5P
+eIcYwoixcCWO86liMLZQBWUNakxCpZqXst3sUCQT57Q4+9BgNj10t0ojI4Kn93/T
+2jTjj3n60DWotHLFz/NlgYoBGNh/oeMcx+1L79J2KHYMKQmAw8w7f/DP0Bt1/x/M
+g+mBtbJaVNhbaKgEJKwmAV+zpMdUlppxF0wLwoP2yIGR3O1gniRfWTj/0K15kZji
+0L9jQiIcGwpdMy7S//xmiYLKu8t9O2MP+EduXISsCtN635IkA1IAA5+V7B+pW/g3
+lsDomGE1zuLcrvGQskmFWn5zl9SgvxfqY9l4WJxrSBGKOB//vXkMRNgCM+LjUpKj
+tVM8o/LMFz+Fz5BK3+Lk4hg9weug664HuDmoH/G8kuKSVQlXyFma8h6cBJe5I0zj
+RfP1CLHMhyqlXdtedzxcfdZXe5qLba7SCuH/S4IG/Z9cj1oiuhmAvvAa5vyyZZuX
+rVuYX6gcAZ/+AI3dnIEwwG/GAyshScIgn8Q4p+jDsgzgNlCtMcTuSPFpd3oK4YK3
+LKMbgVQPYfFn2Net9Pa7IzD/XCQDckUADYFywSq11apYkLixLbDw5yliZOtm5/lx
+TDEARkn7S4ZABfnEPIDbP23lL9RNbiA2v+f1gHFW7Vq1kdBv1ruTukM06ic5r4tB
+7SaGRU5gtmbRBzi7e6iAAQ==
+-----END CERTIFICATE-----
diff --git a/target/product/telephony.mk b/target/product/telephony.mk
index 38a8caa8a9..e0eb15925b 100644
--- a/target/product/telephony.mk
+++ b/target/product/telephony.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2007 The Android Open Source Project
+# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,16 +14,7 @@
# limitations under the License.
#
-# This is the list of product-level settings that are specific
-# to products that have telephony hardware.
-
-PRODUCT_PACKAGES := \
- CarrierConfig \
- CarrierDefaultApp \
- Dialer \
- CallLogBackup \
- CellBroadcastReceiver \
- EmergencyInfo \
- rild
-
-PRODUCT_COPY_FILES := \
+# All modules for telephony
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_product.mk)
diff --git a/target/product/telephony_product.mk b/target/product/telephony_product.mk
new file mode 100644
index 0000000000..a4c7e31f54
--- /dev/null
+++ b/target/product/telephony_product.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is the list of modules that are specific to products that have telephony
+# hardware, and install to the product partition.
+
+# /product packages
+PRODUCT_PACKAGES += \
+ CarrierConfig \
+ Dialer \
+ EmergencyInfo \
diff --git a/target/board/treble_common_32.mk b/target/product/telephony_system.mk
index dbe0899488..584cf1ee63 100644
--- a/target/board/treble_common_32.mk
+++ b/target/product/telephony_system.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,8 +14,13 @@
# limitations under the License.
#
-include build/make/target/board/treble_common.mk
+# This is the list of modules that are specific to products that have telephony
+# hardware, and install on the system partition.
-# Partition size defaults to 1 GB (1024 MB) for 32-bit products. It can
-# be overwritten in specific BoardConfig.mk, if so desired.
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1073741824
+PRODUCT_PACKAGES := \
+ ONS \
+ CarrierDefaultApp \
+ CallLogBackup \
+ CellBroadcastReceiver \
+
+PRODUCT_COPY_FILES := \
diff --git a/target/board/generic_x86_64_a/BoardConfig.mk b/target/product/telephony_vendor.mk
index 2c0260439c..86dbcc98a6 100644
--- a/target/board/generic_x86_64_a/BoardConfig.mk
+++ b/target/product/telephony_vendor.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,12 +14,11 @@
# limitations under the License.
#
-include build/make/target/board/treble_common_64.mk
+# This is the list of modules that are specific to products that have telephony
+# hardware, and install outside the system partition.
-TARGET_CPU_ABI := x86_64
-TARGET_ARCH := x86_64
-TARGET_ARCH_VARIANT := x86_64
+# /vendor packages
+PRODUCT_PACKAGES := \
+ rild \
-TARGET_2ND_CPU_ABI := x86
-TARGET_2ND_ARCH := x86
-TARGET_2ND_ARCH_VARIANT := x86_64
+PRODUCT_COPY_FILES := \
diff --git a/target/product/treble_common.mk b/target/product/treble_common.mk
deleted file mode 100644
index 6cf66affa0..0000000000
--- a/target/product/treble_common.mk
+++ /dev/null
@@ -1,68 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
-
-# Generic system image inherits from AOSP with telephony
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
-
-# Split selinux policy
-PRODUCT_FULL_TREBLE_OVERRIDE := true
-
-# The Messaging app:
-# Needed for android.telecom.cts.ExtendedInCallServiceTest#testOnCannedTextResponsesLoaded
-PRODUCT_PACKAGES += \
- messaging
-
-# The following policy XML files are used as fallback for
-# vendors/devices not using XML to configure audio policy.
-PRODUCT_COPY_FILES += \
- frameworks/av/services/audiopolicy/config/audio_policy_configuration_generic.xml:system/etc/audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/primary_audio_policy_configuration.xml:system/etc/primary_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/r_submix_audio_policy_configuration.xml:system/etc/r_submix_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/audio_policy_volumes.xml:system/etc/audio_policy_volumes.xml \
- frameworks/av/services/audiopolicy/config/default_volume_tables.xml:system/etc/default_volume_tables.xml \
-
-# Telephony:
-# Provide a default APN configuration
-PRODUCT_COPY_FILES += \
- device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
-
-# NFC:
-# Provide default libnfc-nci.conf file for devices that does not have one in
-# vendor/etc
-PRODUCT_COPY_FILES += \
- device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
-
-# Support for the O-MR1 devices
-PRODUCT_COPY_FILES += \
- build/make/target/product/vndk/init.gsi.rc:system/etc/init/init.gsi.rc \
- build/make/target/product/vndk/init.vndk-27.rc:system/etc/init/gsi/init.vndk-27.rc
-
-# Name space configuration file for non-enforcing VNDK
-PRODUCT_PACKAGES += \
- ld.config.vndk_lite.txt
-
-# Support addtional O-MR1 vendor interface
-PRODUCT_EXTRA_VNDK_VERSIONS := 27
-
-# TODO(b/78308559): includes vr_hwc into GSI before vr_hwc move to vendor
-PRODUCT_PACKAGES += \
- vr_hwc
diff --git a/target/product/treble_common_32.mk b/target/product/treble_common_32.mk
deleted file mode 100644
index 0023c3b6e8..0000000000
--- a/target/product/treble_common_32.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# treble_system.prop.
-
-include build/make/target/product/treble_common.mk
diff --git a/target/product/updatable_apex.mk b/target/product/updatable_apex.mk
new file mode 100644
index 0000000000..038f66ee64
--- /dev/null
+++ b/target/product/updatable_apex.mk
@@ -0,0 +1,21 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Inherit this when the target needs to support updating APEXes
+
+PRODUCT_PROPERTY_OVERRIDES := ro.apex.updatable=true
+PRODUCT_PACKAGES := com.android.apex.cts.shim.v1_prebuilt
+TARGET_FLATTEN_APEX := false
diff --git a/tools/OWNERS b/tools/OWNERS
index 7a23adce9b..7d666f1687 100644
--- a/tools/OWNERS
+++ b/tools/OWNERS
@@ -1,2 +1 @@
-per-file warn.py = chh@google.com
-per-file checkowners.py = chh@google.com
+per-file warn.py,checkowners.py = chh@google.com
diff --git a/tools/apicheck/Android.bp b/tools/apicheck/Android.bp
new file mode 100644
index 0000000000..8fe20e91f8
--- /dev/null
+++ b/tools/apicheck/Android.bp
@@ -0,0 +1,22 @@
+// Copyright (C) 2008 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+java_binary_host {
+ name: "apicheck",
+ wrapper: "etc/apicheck",
+ static_libs: [
+ "doclava",
+ "jsilver",
+ ],
+}
diff --git a/tools/apicheck/Android.mk b/tools/apicheck/Android.mk
deleted file mode 100644
index ab3493d9c0..0000000000
--- a/tools/apicheck/Android.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (C) 2007-2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH := $(call my-dir)
-
-# the hat script
-# ============================================================
-include $(CLEAR_VARS)
-LOCAL_IS_HOST_MODULE := true
-LOCAL_MODULE_CLASS := EXECUTABLES
-LOCAL_MODULE := apicheck
-LOCAL_SRC_FILES := etc/apicheck
-LOCAL_REQUIRED_MODULES := doclava
-include $(BUILD_PREBUILT)
-
-# Apicheck is now part of Doclava -- See external/doclava.
diff --git a/tools/atree/files.cpp b/tools/atree/files.cpp
index d5c8a977dd..b90f8b3d28 100644
--- a/tools/atree/files.cpp
+++ b/tools/atree/files.cpp
@@ -81,7 +81,7 @@ split_line(const char* p, vector<string>* out)
state = TEXT;
break;
}
- // otherwise fall-through to TEXT case
+ [[fallthrough]];
case TEXT:
if (state != IN_QUOTE && isspace(*p)) {
if (q != p) {
diff --git a/tools/auto_gen_test_config_test.py b/tools/auto_gen_test_config_test.py
index e68c27fdd2..51a8583f2e 100644
--- a/tools/auto_gen_test_config_test.py
+++ b/tools/auto_gen_test_config_test.py
@@ -34,7 +34,7 @@ MANIFEST_JUNIT_TEST = """<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.android.my.tests.x">
<instrumentation
- android:name="android.support.test.runner.AndroidJUnitRunner"
+ android:name="androidx.test.runner.AndroidJUnitRunner"
android:targetPackage="com.android.my.tests" />
</manifest>
"""
@@ -72,7 +72,7 @@ EXPECTED_JUNIT_TEST_CONFIG = """<?xml version="1.0" encoding="utf-8"?>
<test class="com.android.tradefed.testtype.AndroidJUnitTest" >
<option name="package" value="com.android.my.tests.x" />
- <option name="runner" value="android.support.test.runner.AndroidJUnitRunner" />
+ <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
</test>
</configuration>
"""
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 5a5446293e..24ac663f20 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -8,6 +8,7 @@ echo "ro.build.display.id=$BUILD_DISPLAY_ID"
echo "ro.build.version.incremental=$BUILD_NUMBER"
echo "ro.build.version.sdk=$PLATFORM_SDK_VERSION"
echo "ro.build.version.preview_sdk=$PLATFORM_PREVIEW_SDK_VERSION"
+echo "ro.build.version.preview_sdk_fingerprint=$PLATFORM_PREVIEW_SDK_FINGERPRINT"
echo "ro.build.version.codename=$PLATFORM_VERSION_CODENAME"
echo "ro.build.version.all_codenames=$PLATFORM_VERSION_ALL_CODENAMES"
echo "ro.build.version.release=$PLATFORM_VERSION"
@@ -17,8 +18,8 @@ echo "ro.build.version.min_supported_target_sdk=$PLATFORM_MIN_SUPPORTED_TARGET_S
echo "ro.build.date=`$DATE`"
echo "ro.build.date.utc=`$DATE +%s`"
echo "ro.build.type=$TARGET_BUILD_TYPE"
-echo "ro.build.user=$USER"
-echo "ro.build.host=`hostname`"
+echo "ro.build.user=$BUILD_USERNAME"
+echo "ro.build.host=$BUILD_HOSTNAME"
echo "ro.build.tags=$BUILD_VERSION_TAGS"
echo "ro.build.flavor=$TARGET_BUILD_FLAVOR"
if [ -n "$BOARD_BUILD_SYSTEM_ROOT_IMAGE" ] ; then
@@ -27,10 +28,6 @@ fi
if [ -n "$AB_OTA_UPDATER" ] ; then
echo "ro.build.ab_update=$AB_OTA_UPDATER"
fi
-echo "ro.product.model=$PRODUCT_MODEL"
-echo "ro.product.brand=$PRODUCT_BRAND"
-echo "ro.product.name=$PRODUCT_NAME"
-echo "ro.product.device=$TARGET_DEVICE"
# These values are deprecated, use "ro.product.cpu.abilist"
# instead (see below).
@@ -44,7 +41,6 @@ echo "ro.product.cpu.abilist=$TARGET_CPU_ABI_LIST"
echo "ro.product.cpu.abilist32=$TARGET_CPU_ABI_LIST_32_BIT"
echo "ro.product.cpu.abilist64=$TARGET_CPU_ABI_LIST_64_BIT"
-echo "ro.product.manufacturer=$PRODUCT_MANUFACTURER"
if [ -n "$PRODUCT_DEFAULT_LOCALE" ] ; then
echo "ro.product.locale=$PRODUCT_DEFAULT_LOCALE"
fi
@@ -53,12 +49,10 @@ echo "ro.wifi.channels=$PRODUCT_DEFAULT_WIFI_CHANNELS"
echo "# ro.build.product is obsolete; use ro.product.device"
echo "ro.build.product=$TARGET_DEVICE"
-echo "# Do not try to parse description, fingerprint, or thumbprint"
+echo "# Do not try to parse description or thumbprint"
echo "ro.build.description=$PRIVATE_BUILD_DESC"
-echo "ro.build.fingerprint=$BUILD_FINGERPRINT"
if [ -n "$BUILD_THUMBPRINT" ] ; then
echo "ro.build.thumbprint=$BUILD_THUMBPRINT"
fi
-echo "ro.build.characteristics=$TARGET_AAPT_CHARACTERISTICS"
echo "# end build properties"
diff --git a/tools/buildinfo_common.sh b/tools/buildinfo_common.sh
new file mode 100755
index 0000000000..6041d79cdf
--- /dev/null
+++ b/tools/buildinfo_common.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+partition="$1"
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 <partition>" 1>&2
+ exit 1
+fi
+
+echo "# begin common build properties"
+echo "# autogenerated by $0"
+
+echo "ro.${partition}.build.date=`$DATE`"
+echo "ro.${partition}.build.date.utc=`$DATE +%s`"
+echo "ro.${partition}.build.fingerprint=$BUILD_FINGERPRINT"
+echo "ro.${partition}.build.id=$BUILD_ID"
+echo "ro.${partition}.build.tags=$BUILD_VERSION_TAGS"
+echo "ro.${partition}.build.type=$TARGET_BUILD_TYPE"
+echo "ro.${partition}.build.version.incremental=$BUILD_NUMBER"
+echo "ro.${partition}.build.version.release=$PLATFORM_VERSION"
+echo "ro.${partition}.build.version.sdk=$PLATFORM_SDK_VERSION"
+
+echo "ro.product.${partition}.brand=$PRODUCT_BRAND"
+echo "ro.product.${partition}.device=$PRODUCT_DEVICE"
+echo "ro.product.${partition}.manufacturer=$PRODUCT_MANUFACTURER"
+echo "ro.product.${partition}.model=$PRODUCT_MODEL"
+echo "ro.product.${partition}.name=$PRODUCT_NAME"
+
+echo "# end common build properties"
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
new file mode 100755
index 0000000000..de855c6a7e
--- /dev/null
+++ b/tools/check_elf_file.py
@@ -0,0 +1,545 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""ELF file checker.
+
+This command ensures all undefined symbols in an ELF file can be resolved to
+global (or weak) symbols defined in shared objects specified in DT_NEEDED
+entries.
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import os
+import os.path
+import re
+import struct
+import subprocess
+import sys
+
+
+_ELF_MAGIC = b'\x7fELF'
+
+
+# Known machines
+_EM_386 = 3
+_EM_ARM = 40
+_EM_X86_64 = 62
+_EM_AARCH64 = 183
+
+_KNOWN_MACHINES = {_EM_386, _EM_ARM, _EM_X86_64, _EM_AARCH64}
+
+
+# ELF header struct
+_ELF_HEADER_STRUCT = (
+ ('ei_magic', '4s'),
+ ('ei_class', 'B'),
+ ('ei_data', 'B'),
+ ('ei_version', 'B'),
+ ('ei_osabi', 'B'),
+ ('ei_pad', '8s'),
+ ('e_type', 'H'),
+ ('e_machine', 'H'),
+ ('e_version', 'I'),
+)
+
+_ELF_HEADER_STRUCT_FMT = ''.join(_fmt for _, _fmt in _ELF_HEADER_STRUCT)
+
+
+ELFHeader = collections.namedtuple(
+ 'ELFHeader', [_name for _name, _ in _ELF_HEADER_STRUCT])
+
+
+ELF = collections.namedtuple(
+ 'ELF',
+ ('dt_soname', 'dt_needed', 'imported', 'exported', 'header'))
+
+
+def _get_os_name():
+ """Get the host OS name."""
+ if sys.platform == 'linux2':
+ return 'linux'
+ if sys.platform == 'darwin':
+ return 'darwin'
+ raise ValueError(sys.platform + ' is not supported')
+
+
+def _get_build_top():
+ """Find the build top of the source tree ($ANDROID_BUILD_TOP)."""
+ prev_path = None
+ curr_path = os.path.abspath(os.getcwd())
+ while prev_path != curr_path:
+ if os.path.exists(os.path.join(curr_path, '.repo')):
+ return curr_path
+ prev_path = curr_path
+ curr_path = os.path.dirname(curr_path)
+ return None
+
+
+def _select_latest_llvm_version(versions):
+ """Select the latest LLVM prebuilts version from a set of versions."""
+ pattern = re.compile('clang-r([0-9]+)([a-z]?)')
+ found_rev = 0
+ found_ver = None
+ for curr_ver in versions:
+ match = pattern.match(curr_ver)
+ if not match:
+ continue
+ curr_rev = int(match.group(1))
+ if not found_ver or curr_rev > found_rev or (
+ curr_rev == found_rev and curr_ver > found_ver):
+ found_rev = curr_rev
+ found_ver = curr_ver
+ return found_ver
+
+
+def _get_latest_llvm_version(llvm_dir):
+ """Find the latest LLVM prebuilts version from `llvm_dir`."""
+ return _select_latest_llvm_version(os.listdir(llvm_dir))
+
+
+def _get_llvm_dir():
+ """Find the path to LLVM prebuilts."""
+ build_top = _get_build_top()
+
+ llvm_prebuilts_base = os.environ.get('LLVM_PREBUILTS_BASE')
+ if not llvm_prebuilts_base:
+ llvm_prebuilts_base = os.path.join('prebuilts', 'clang', 'host')
+
+ llvm_dir = os.path.join(
+ build_top, llvm_prebuilts_base, _get_os_name() + '-x86')
+
+ if not os.path.exists(llvm_dir):
+ return None
+
+ llvm_prebuilts_version = os.environ.get('LLVM_PREBUILTS_VERSION')
+ if not llvm_prebuilts_version:
+ llvm_prebuilts_version = _get_latest_llvm_version(llvm_dir)
+
+ llvm_dir = os.path.join(llvm_dir, llvm_prebuilts_version)
+
+ if not os.path.exists(llvm_dir):
+ return None
+
+ return llvm_dir
+
+
+def _get_llvm_readobj():
+ """Find the path to llvm-readobj executable."""
+ llvm_dir = _get_llvm_dir()
+ llvm_readobj = os.path.join(llvm_dir, 'bin', 'llvm-readobj')
+ return llvm_readobj if os.path.exists(llvm_readobj) else 'llvm-readobj'
+
+
+class ELFError(ValueError):
+ """Generic ELF parse error"""
+ pass
+
+
+class ELFInvalidMagicError(ELFError):
+ """Invalid ELF magic word error"""
+ def __init__(self):
+ super(ELFInvalidMagicError, self).__init__('bad ELF magic')
+
+
+class ELFParser(object):
+ """ELF file parser"""
+
+ @classmethod
+ def _read_elf_header(cls, elf_file_path):
+ """Read the ELF magic word from the beginning of the file."""
+ with open(elf_file_path, 'rb') as elf_file:
+ buf = elf_file.read(struct.calcsize(_ELF_HEADER_STRUCT_FMT))
+ try:
+ return ELFHeader(*struct.unpack(_ELF_HEADER_STRUCT_FMT, buf))
+ except struct.error:
+ return None
+
+
+ @classmethod
+ def open(cls, elf_file_path, llvm_readobj):
+ """Open and parse the ELF file."""
+ # Parse the ELF header for simple sanity checks.
+ header = cls._read_elf_header(elf_file_path)
+ if not header or header.ei_magic != _ELF_MAGIC:
+ raise ELFInvalidMagicError()
+
+ # Run llvm-readobj and parse the output.
+ return cls._read_llvm_readobj(elf_file_path, header, llvm_readobj)
+
+
+ @classmethod
+ def _find_prefix(cls, pattern, lines_it):
+ """Iterate `lines_it` until finding a string that starts with `pattern`."""
+ for line in lines_it:
+ if line.startswith(pattern):
+ return True
+ return False
+
+
+ @classmethod
+ def _read_llvm_readobj(cls, elf_file_path, header, llvm_readobj):
+ """Run llvm-readobj and parse the output."""
+ proc = subprocess.Popen(
+ [llvm_readobj, '-dynamic-table', '-dyn-symbols', elf_file_path],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, _ = proc.communicate()
+ lines = out.splitlines()
+ return cls._parse_llvm_readobj(elf_file_path, header, lines)
+
+
+ @classmethod
+ def _parse_llvm_readobj(cls, elf_file_path, header, lines):
+ """Parse the output of llvm-readobj."""
+ lines_it = iter(lines)
+ imported, exported = cls._parse_dynamic_symbols(lines_it)
+ dt_soname, dt_needed = cls._parse_dynamic_table(elf_file_path, lines_it)
+ return ELF(dt_soname, dt_needed, imported, exported, header)
+
+
+ _DYNAMIC_SECTION_START_PATTERN = 'DynamicSection ['
+
+ _DYNAMIC_SECTION_NEEDED_PATTERN = re.compile(
+ '^ 0x[0-9a-fA-F]+\\s+NEEDED\\s+Shared library: \\[(.*)\\]$')
+
+ _DYNAMIC_SECTION_SONAME_PATTERN = re.compile(
+ '^ 0x[0-9a-fA-F]+\\s+SONAME\\s+Library soname: \\[(.*)\\]$')
+
+ _DYNAMIC_SECTION_END_PATTERN = ']'
+
+
+ @classmethod
+ def _parse_dynamic_table(cls, elf_file_path, lines_it):
+ """Parse the dynamic table section."""
+ dt_soname = os.path.basename(elf_file_path)
+ dt_needed = []
+
+ dynamic = cls._find_prefix(cls._DYNAMIC_SECTION_START_PATTERN, lines_it)
+ if not dynamic:
+ return (dt_soname, dt_needed)
+
+ for line in lines_it:
+ if line == cls._DYNAMIC_SECTION_END_PATTERN:
+ break
+
+ match = cls._DYNAMIC_SECTION_NEEDED_PATTERN.match(line)
+ if match:
+ dt_needed.append(match.group(1))
+ continue
+
+ match = cls._DYNAMIC_SECTION_SONAME_PATTERN.match(line)
+ if match:
+ dt_soname = match.group(1)
+ continue
+
+ return (dt_soname, dt_needed)
+
+
+ _DYNAMIC_SYMBOLS_START_PATTERN = 'DynamicSymbols ['
+ _DYNAMIC_SYMBOLS_END_PATTERN = ']'
+
+ _SYMBOL_ENTRY_START_PATTERN = ' Symbol {'
+ _SYMBOL_ENTRY_PATTERN = re.compile('^ ([A-Za-z0-9_]+): (.*)$')
+ _SYMBOL_ENTRY_PAREN_PATTERN = re.compile(
+ '\\s+\\((?:(?:\\d+)|(?:0x[0-9a-fA-F]+))\\)$')
+ _SYMBOL_ENTRY_END_PATTERN = ' }'
+
+
+ @staticmethod
+ def _parse_symbol_name(name_with_version):
+ """Split `name_with_version` into name and version. This function may split
+ at last occurrence of `@@` or `@`."""
+ pos = name_with_version.rfind('@')
+ if pos == -1:
+ name = name_with_version
+ version = ''
+ else:
+ if pos > 0 and name_with_version[pos - 1] == '@':
+ name = name_with_version[0:pos - 1]
+ else:
+ name = name_with_version[0:pos]
+ version = name_with_version[pos + 1:]
+ return (name, version)
+
+
+ @classmethod
+ def _parse_dynamic_symbols(cls, lines_it):
+ """Parse dynamic symbol table and collect imported and exported symbols."""
+ imported = collections.defaultdict(set)
+ exported = collections.defaultdict(set)
+
+ for symbol in cls._parse_dynamic_symbols_internal(lines_it):
+ name, version = cls._parse_symbol_name(symbol['Name'])
+ if name:
+ if symbol['Section'] == 'Undefined':
+ if symbol['Binding'] != 'Weak':
+ imported[name].add(version)
+ else:
+ if symbol['Binding'] != 'Local':
+ exported[name].add(version)
+
+ # Freeze the returned imported/exported dict.
+ return (dict(imported), dict(exported))
+
+
+ @classmethod
+ def _parse_dynamic_symbols_internal(cls, lines_it):
+ """Parse symbols entries and yield each symbols."""
+
+ if not cls._find_prefix(cls._DYNAMIC_SYMBOLS_START_PATTERN, lines_it):
+ return
+
+ for line in lines_it:
+ if line == cls._DYNAMIC_SYMBOLS_END_PATTERN:
+ return
+
+ if line == cls._SYMBOL_ENTRY_START_PATTERN:
+ symbol = {}
+ continue
+
+ if line == cls._SYMBOL_ENTRY_END_PATTERN:
+ yield symbol
+ symbol = None
+ continue
+
+ match = cls._SYMBOL_ENTRY_PATTERN.match(line)
+ if match:
+ key = match.group(1)
+ value = cls._SYMBOL_ENTRY_PAREN_PATTERN.sub('', match.group(2))
+ symbol[key] = value
+ continue
+
+
+class Checker(object):
+ """ELF file checker that checks DT_SONAME, DT_NEEDED, and symbols."""
+
+ def __init__(self, llvm_readobj):
+ self._file_path = ''
+ self._file_under_test = None
+ self._shared_libs = []
+
+ self._llvm_readobj = llvm_readobj
+
+
+ if sys.stderr.isatty():
+ _ERROR_TAG = '\033[0;1;31merror:\033[m' # Red error
+ _NOTE_TAG = '\033[0;1;30mnote:\033[m' # Black note
+ else:
+ _ERROR_TAG = 'error:' # Red error
+ _NOTE_TAG = 'note:' # Black note
+
+
+ def _error(self, *args):
+ """Emit an error to stderr."""
+ print(self._file_path + ': ' + self._ERROR_TAG, *args, file=sys.stderr)
+
+
+ def _note(self, *args):
+ """Emit a note to stderr."""
+ print(self._file_path + ': ' + self._NOTE_TAG, *args, file=sys.stderr)
+
+
+ def _load_elf_file(self, path, skip_bad_elf_magic):
+ """Load an ELF file from the `path`."""
+ try:
+ return ELFParser.open(path, self._llvm_readobj)
+ except (IOError, OSError):
+ self._error('Failed to open "{}".'.format(path))
+ sys.exit(2)
+ except ELFInvalidMagicError:
+ if skip_bad_elf_magic:
+ sys.exit(0)
+ else:
+ self._error('File "{}" must have a valid ELF magic word.'.format(path))
+ sys.exit(2)
+ except:
+ self._error('An unknown error occurred while opening "{}".'.format(path))
+ raise
+
+
+ def load_file_under_test(self, path, skip_bad_elf_magic,
+ skip_unknown_elf_machine):
+ """Load file-under-test (either an executable or a shared lib)."""
+ self._file_path = path
+ self._file_under_test = self._load_elf_file(path, skip_bad_elf_magic)
+
+ if skip_unknown_elf_machine and \
+ self._file_under_test.header.e_machine not in _KNOWN_MACHINES:
+ sys.exit(0)
+
+
+ def load_shared_libs(self, shared_lib_paths):
+ """Load shared libraries."""
+ for path in shared_lib_paths:
+ self._shared_libs.append(self._load_elf_file(path, False))
+
+
+ def check_dt_soname(self, soname):
+ """Check whether DT_SONAME matches installation file name."""
+ if self._file_under_test.dt_soname != soname:
+ self._error('DT_SONAME "{}" must be equal to the file name "{}".'
+ .format(self._file_under_test.dt_soname, soname))
+ sys.exit(2)
+
+
+ def check_dt_needed(self):
+ """Check whether all DT_NEEDED entries are specified in the build
+ system."""
+
+ missing_shared_libs = False
+
+ # Collect the DT_SONAMEs from shared libs specified in the build system.
+ specified_sonames = {lib.dt_soname for lib in self._shared_libs}
+
+ # Chech whether all DT_NEEDED entries are specified.
+ for lib in self._file_under_test.dt_needed:
+ if lib not in specified_sonames:
+ self._error('DT_NEEDED "{}" is not specified in shared_libs.'
+ .format(lib.decode('utf-8')))
+ missing_shared_libs = True
+
+ if missing_shared_libs:
+ dt_needed = sorted(set(self._file_under_test.dt_needed))
+ modules = [re.sub('\\.so$', '', lib) for lib in dt_needed]
+
+ self._note()
+ self._note('Fix suggestions:')
+ self._note(
+ ' Android.bp: shared_libs: [' +
+ ', '.join('"' + module + '"' for module in modules) + '],')
+ self._note(
+ ' Android.mk: LOCAL_SHARED_LIBRARIES := ' + ' '.join(modules))
+
+ self._note()
+ self._note('If the fix above doesn\'t work, bypass this check with:')
+ self._note(' Android.bp: check_elf_files: false,')
+ self._note(' Android.mk: LOCAL_CHECK_ELF_FILES := false')
+
+ sys.exit(2)
+
+
+ @staticmethod
+ def _find_symbol(lib, name, version):
+ """Check whether the symbol name and version matches a definition in
+ lib."""
+ try:
+ lib_sym_vers = lib.exported[name]
+ except KeyError:
+ return False
+ if version == '': # Symbol version is not requested
+ return True
+ return version in lib_sym_vers
+
+
+ @classmethod
+ def _find_symbol_from_libs(cls, libs, name, version):
+ """Check whether the symbol name and version is defined in one of the
+ shared libraries in libs."""
+ for lib in libs:
+ if cls._find_symbol(lib, name, version):
+ return lib
+ return None
+
+
+ def check_symbols(self):
+ """Check whether all undefined symbols are resolved to a definition."""
+ all_elf_files = [self._file_under_test] + self._shared_libs
+ missing_symbols = []
+ for sym, imported_vers in self._file_under_test.imported.iteritems():
+ for imported_ver in imported_vers:
+ lib = self._find_symbol_from_libs(all_elf_files, sym, imported_ver)
+ if not lib:
+ missing_symbols.append((sym, imported_ver))
+
+ if missing_symbols:
+ for sym, ver in sorted(missing_symbols):
+ sym = sym.decode('utf-8')
+ if ver:
+ sym += '@' + ver.decode('utf-8')
+ self._error('Unresolved symbol: {}'.format(sym))
+
+ self._note()
+ self._note('Some dependencies might be changed, thus the symbol(s) '
+ 'above cannot be resolved.')
+ self._note('Please re-build the prebuilt file: "{}".'
+ .format(self._file_path))
+
+ self._note()
+ self._note('If this is a new prebuilt file and it is designed to have '
+ 'unresolved symbols, add one of the following properties:')
+ self._note(' Android.bp: allow_undefined_symbols: true,')
+ self._note(' Android.mk: LOCAL_ALLOW_UNDEFINED_SYMBOLS := true')
+
+ sys.exit(2)
+
+
+def _parse_args():
+ """Parse command line options."""
+ parser = argparse.ArgumentParser()
+
+ # Input file
+ parser.add_argument('file',
+ help='Path to the input file to be checked')
+ parser.add_argument('--soname',
+ help='Shared object name of the input file')
+
+ # Shared library dependencies
+ parser.add_argument('--shared-lib', action='append', default=[],
+ help='Path to shared library dependencies')
+
+ # Check options
+ parser.add_argument('--skip-bad-elf-magic', action='store_true',
+ help='Ignore the input file without the ELF magic word')
+ parser.add_argument('--skip-unknown-elf-machine', action='store_true',
+ help='Ignore the input file with unknown machine ID')
+ parser.add_argument('--allow-undefined-symbols', action='store_true',
+ help='Ignore unresolved undefined symbols')
+
+ # Other options
+ parser.add_argument('--llvm-readobj',
+ help='Path to the llvm-readobj executable')
+
+ return parser.parse_args()
+
+
+def main():
+ """Main function"""
+ args = _parse_args()
+
+ llvm_readobj = args.llvm_readobj
+ if not llvm_readobj:
+ llvm_readobj = _get_llvm_readobj()
+
+ # Load ELF files
+ checker = Checker(llvm_readobj)
+ checker.load_file_under_test(
+ args.file, args.skip_bad_elf_magic, args.skip_unknown_elf_machine)
+ checker.load_shared_libs(args.shared_lib)
+
+ # Run checks
+ if args.soname:
+ checker.check_dt_soname(args.soname)
+
+ checker.check_dt_needed()
+
+ if not args.allow_undefined_symbols:
+ checker.check_symbols()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/check_identical_lib.sh b/tools/check_identical_lib.sh
new file mode 100755
index 0000000000..01007c088b
--- /dev/null
+++ b/tools/check_identical_lib.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+set -e
+
+STRIP_PATH="${1}"
+CORE="${2}"
+VENDOR="${3}"
+
+stripped_core="${CORE}.vndk_lib_check.stripped"
+stripped_vendor="${VENDOR}.vndk_lib_check.stripped"
+
+function cleanup() {
+ rm -f ${stripped_core} ${stripped_vendor}
+}
+trap cleanup EXIT
+
+function strip_lib() {
+ ${STRIP_PATH} \
+ -i ${1} \
+ -o ${2} \
+ -d /dev/null \
+ --remove-build-id
+}
+
+strip_lib ${CORE} ${stripped_core}
+strip_lib ${VENDOR} ${stripped_vendor}
+if ! cmp -s ${stripped_core} ${stripped_vendor}; then
+ echo "VNDK library not in vndkMustUseVendorVariantList but has different core and vendor variant: $(basename ${CORE})"
+ echo "If the two variants need to have different runtime behavior, consider using libvndksupport."
+ exit 1
+fi
diff --git a/tools/checkowners.py b/tools/checkowners.py
index 1190d30f2c..d6853d8e87 100755
--- a/tools/checkowners.py
+++ b/tools/checkowners.py
@@ -30,28 +30,38 @@ def echo(msg):
def find_address(address):
if address not in checked_addresses:
- request = (gerrit_server + '/accounts/?n=1&o=ALL_EMAILS&q=email:'
+ request = (gerrit_server + '/accounts/?n=1&q=email:'
+ urllib.quote(address))
echo('Checking email address: ' + address)
result = urllib2.urlopen(request).read()
- checked_addresses[address] = (
- result.find('"email":') >= 0 and result.find('"_account_id":') >= 0)
+ checked_addresses[address] = result.find('"_account_id":') >= 0
+ if checked_addresses[address]:
+ echo('Found email address: ' + address)
return checked_addresses[address]
+def check_address(fname, num, address):
+ if find_address(address):
+ return 0
+ print '%s:%d: ERROR: unknown email address: %s' % (fname, num, address)
+ return 1
+
+
def main():
# One regular expression to check all valid lines.
noparent = 'set +noparent'
email = '([^@ ]+@[^ @]+|\\*)'
- directive = '(%s|%s)' % (email, noparent)
+ emails = '(%s( *, *%s)*)' % (email, email)
+ file_directive = 'file: *([^ :]+ *: *)?[^ ]+'
+ directive = '(%s|%s|%s)' % (emails, noparent, file_directive)
glob = '[a-zA-Z0-9_\\.\\-\\*\\?]+'
- perfile = 'per-file +' + glob + ' *= *' + directive
- pats = '(|%s|%s|%s)$' % (noparent, email, perfile)
+ globs = '(%s( *, *%s)*)' % (glob, glob)
+ perfile = 'per-file +' + globs + ' *= *' + directive
+ include = 'include +([^ :]+ *: *)?[^ ]+'
+ pats = '(|%s|%s|%s|%s|%s)$' % (noparent, email, perfile, include, file_directive)
patterns = re.compile(pats)
-
- # One pattern to capture email address.
- email_address = '.*(@| |=|^)([^@ =]+@[^ @]+)'
- address_pattern = re.compile(email_address)
+ address_pattern = re.compile('([^@ ]+@[^ @]+)')
+ perfile_pattern = re.compile('per-file +.*=(.*)')
error = 0
for fname in args.owners:
@@ -61,17 +71,16 @@ def main():
num += 1
stripped_line = re.sub('#.*$', '', line).strip()
if not patterns.match(stripped_line):
- error = 1
- print('%s:%d: ERROR: unknown line [%s]'
- % (fname, num, line.strip()))
- elif args.check_address and address_pattern.match(stripped_line):
- address = address_pattern.match(stripped_line).group(2)
- if find_address(address):
- echo('Found email address: ' + address)
- else:
- error = 1
- print('%s:%d: ERROR: unknown email address: %s'
- % (fname, num, address))
+ error += 1
+ print '%s:%d: ERROR: unknown line [%s]' % (fname, num, line.strip())
+ elif args.check_address:
+ if perfile_pattern.match(stripped_line):
+ for addr in perfile_pattern.match(stripped_line).group(1).split(','):
+ a = addr.strip()
+ if a and a != '*':
+ error += check_address(fname, num, addr.strip())
+ elif address_pattern.match(stripped_line):
+ error += check_address(fname, num, stripped_line)
sys.exit(error)
if __name__ == '__main__':
diff --git a/tools/docker/Dockerfile b/tools/docker/Dockerfile
index ec65aaf2df..3856ab9b32 100644
--- a/tools/docker/Dockerfile
+++ b/tools/docker/Dockerfile
@@ -11,7 +11,7 @@ RUN curl -o jdk8.tgz https://android.googlesource.com/platform/prebuilts/jdk/jdk
&& rm -rf jdk8.tgz
RUN curl -o /usr/local/bin/repo https://storage.googleapis.com/git-repo-downloads/repo \
- && echo "e147f0392686c40cfd7d5e6f332c6ee74c4eab4d24e2694b3b0a0c037bf51dc5 /usr/local/bin/repo" | sha256sum --strict -c - \
+ && echo "d06f33115aea44e583c8669375b35aad397176a411de3461897444d247b6c220 /usr/local/bin/repo" | sha256sum --strict -c - \
&& chmod a+x /usr/local/bin/repo
RUN groupadd -g $groupid $username \
@@ -21,5 +21,6 @@ RUN groupadd -g $groupid $username \
COPY gitconfig /home/$username/.gitconfig
RUN chown $userid:$groupid /home/$username/.gitconfig
ENV HOME=/home/$username
+ENV USER=$username
ENTRYPOINT chroot --userspec=$(cat /root/username):$(cat /root/username) / /bin/bash -i
diff --git a/tools/droiddoc/Android.bp b/tools/droiddoc/Android.bp
new file mode 100644
index 0000000000..042806850a
--- /dev/null
+++ b/tools/droiddoc/Android.bp
@@ -0,0 +1,18 @@
+// Copyright (C) 2013 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+droiddoc_exported_dir {
+ name: "droiddoc-templates-pdk",
+ path: "templates-pdk",
+}
diff --git a/tools/droiddoc/test/generics/Android.mk b/tools/droiddoc/test/generics/Android.mk
deleted file mode 100644
index 0c808fda4f..0000000000
--- a/tools/droiddoc/test/generics/Android.mk
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=$(call all-subdir-java-files)
-
-LOCAL_MODULE:=test_generics
-LOCAL_DROIDDOC_OPTIONS:=\
- -stubs __test_generics__
-
-LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR:=tools/droiddoc/templates-google
-LOCAL_DROIDDOC_CUSTOM_ASSET_DIR:=assets-google
-LOCAL_MODULE_CLASS := JAVA_LIBRARIES
-
-include $(BUILD_DROIDDOC)
diff --git a/tools/droiddoc/test/stubs/Android.mk b/tools/droiddoc/test/stubs/Android.mk
deleted file mode 100644
index fc971e1c84..0000000000
--- a/tools/droiddoc/test/stubs/Android.mk
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=$(call all-java-files-under,src)
-
-LOCAL_MODULE:=test_stubs
-LOCAL_DROIDDOC_OPTIONS:=\
- -stubs $(OUT_DIR)/__test_stubs__
-
-LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR:=tools/droiddoc/templates-google
-LOCAL_DROIDDOC_CUSTOM_ASSET_DIR:=assets-google
-LOCAL_MODULE_CLASS := JAVA_LIBRARIES
-
-include $(BUILD_DROIDDOC)
-
diff --git a/tools/extract_kernel.py b/tools/extract_kernel.py
new file mode 100755
index 0000000000..16ccb22d47
--- /dev/null
+++ b/tools/extract_kernel.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A tool to extract kernel information from a kernel image.
+"""
+
+import argparse
+import subprocess
+import sys
+import re
+
+CONFIG_PREFIX = b'IKCFG_ST'
+GZIP_HEADER = b'\037\213\010'
+COMPRESSION_ALGO = (
+ (["gzip", "-d"], GZIP_HEADER),
+ (["xz", "-d"], b'\3757zXZ\000'),
+ (["bzip2", "-d"], b'BZh'),
+ (["lz4", "-d", "-l"], b'\002\041\114\030'),
+
+ # These are not supported in the build system yet.
+ # (["unlzma"], b'\135\0\0\0'),
+ # (["lzop", "-d"], b'\211\114\132'),
+)
+
+# "Linux version " UTS_RELEASE " (" LINUX_COMPILE_BY "@"
+# LINUX_COMPILE_HOST ") (" LINUX_COMPILER ") " UTS_VERSION "\n";
+LINUX_BANNER_PREFIX = b'Linux version '
+LINUX_BANNER_REGEX = LINUX_BANNER_PREFIX + \
+ r'([0-9]+[.][0-9]+[.][0-9]+).* \(.*@.*\) \(.*\) .*\n'
+
+
+def get_version(input_bytes, start_idx):
+ null_idx = input_bytes.find('\x00', start_idx)
+ if null_idx < 0:
+ return None
+ linux_banner = input_bytes[start_idx:null_idx].decode()
+ mo = re.match(LINUX_BANNER_REGEX, linux_banner)
+ if mo:
+ return mo.group(1)
+ return None
+
+
+def dump_version(input_bytes):
+ idx = 0
+ while True:
+ idx = input_bytes.find(LINUX_BANNER_PREFIX, idx)
+ if idx < 0:
+ return None
+
+ version = get_version(input_bytes, idx)
+ if version:
+ return version
+
+ idx += len(LINUX_BANNER_PREFIX)
+
+
+def dump_configs(input_bytes):
+ """
+ Dump kernel configuration from input_bytes. This can be done when
+ CONFIG_IKCONFIG is enabled, which is a requirement on Treble devices.
+
+ The kernel configuration is archived in GZip format right after the magic
+ string 'IKCFG_ST' in the built kernel.
+ """
+
+ # Search for magic string + GZip header
+ idx = input_bytes.find(CONFIG_PREFIX + GZIP_HEADER)
+ if idx < 0:
+ return None
+
+ # Seek to the start of the archive
+ idx += len(CONFIG_PREFIX)
+
+ sp = subprocess.Popen(["gzip", "-d", "-c"], stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ o, _ = sp.communicate(input=input_bytes[idx:])
+ if sp.returncode == 1: # error
+ return None
+
+ # success or trailing garbage warning
+ assert sp.returncode in (0, 2), sp.returncode
+
+ return o
+
+
+def try_decompress(cmd, search_bytes, input_bytes):
+ idx = input_bytes.find(search_bytes)
+ if idx < 0:
+ return None
+
+ idx = 0
+ sp = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ o, _ = sp.communicate(input=input_bytes[idx:])
+ # ignore errors
+ return o
+
+
+def decompress_dump(func, input_bytes):
+ """
+ Run func(input_bytes) first; and if that fails (returns value evaluates to
+ False), then try different decompression algorithm before running func.
+ """
+ o = func(input_bytes)
+ if o:
+ return o
+ for cmd, search_bytes in COMPRESSION_ALGO:
+ decompressed = try_decompress(cmd, search_bytes, input_bytes)
+ if decompressed:
+ o = func(decompressed)
+ if o:
+ return o
+ # Force decompress the whole file even if header doesn't match
+ decompressed = try_decompress(cmd, b"", input_bytes)
+ if decompressed:
+ o = func(decompressed)
+ if o:
+ return o
+
+def main():
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawTextHelpFormatter,
+ description=__doc__ +
+ "\nThese algorithms are tried when decompressing the image:\n " +
+ " ".join(tup[0][0] for tup in COMPRESSION_ALGO))
+ parser.add_argument('--input',
+ help='Input kernel image. If not specified, use stdin',
+ metavar='FILE',
+ type=argparse.FileType('rb'),
+ default=sys.stdin)
+ parser.add_argument('--output-configs',
+ help='If specified, write configs. Use stdout if no file '
+ 'is specified.',
+ metavar='FILE',
+ nargs='?',
+ type=argparse.FileType('wb'),
+ const=sys.stdout)
+ parser.add_argument('--output-version',
+ help='If specified, write version. Use stdout if no file '
+ 'is specified.',
+ metavar='FILE',
+ nargs='?',
+ type=argparse.FileType('wb'),
+ const=sys.stdout)
+ parser.add_argument('--tools',
+ help='Decompression tools to use. If not specified, PATH '
+ 'is searched.',
+ metavar='ALGORITHM:EXECUTABLE',
+ nargs='*')
+ args = parser.parse_args()
+
+ tools = {pair[0]: pair[1]
+ for pair in (token.split(':') for token in args.tools or [])}
+ for cmd, _ in COMPRESSION_ALGO:
+ if cmd[0] in tools:
+ cmd[0] = tools[cmd[0]]
+
+ input_bytes = args.input.read()
+
+ ret = 0
+ if args.output_configs is not None:
+ o = decompress_dump(dump_configs, input_bytes)
+ if o:
+ args.output_configs.write(o)
+ else:
+ sys.stderr.write(
+ "Cannot extract kernel configs in {}".format(args.input.name))
+ ret = 1
+ if args.output_version is not None:
+ o = decompress_dump(dump_version, input_bytes)
+ if o:
+ args.output_version.write(o)
+ else:
+ sys.stderr.write(
+ "Cannot extract kernel versions in {}".format(args.input.name))
+ ret = 1
+
+ return ret
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/tools/findleaves.py b/tools/findleaves.py
index f152a87e7f..97302e9535 100755
--- a/tools/findleaves.py
+++ b/tools/findleaves.py
@@ -103,7 +103,7 @@ def main(argv):
prune.append(p)
elif arg.startswith("--dir="):
d = arg[len("--dir="):]
- if len(p) == 0:
+ if len(d) == 0:
usage()
dirlist.append(d)
else:
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index 797cfe228b..d9a48d704a 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -12,6 +12,19 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+bootstrap_go_package {
+ name: "soong-fs_config",
+ pkgPath: "android/soong/fs_config",
+ deps: [
+ "soong-android",
+ "soong-genrule",
+ ],
+ srcs: [
+ "fs_config.go"
+ ],
+ pluginFor: ["soong_build"],
+}
+
cc_binary_host {
name: "fs_config",
srcs: ["fs_config.c"],
@@ -22,42 +35,63 @@ cc_binary_host {
cflags: ["-Werror"],
}
-// -----------------------------------------------------------------------------
-// Unit tests.
-// -----------------------------------------------------------------------------
-
-test_c_flags = [
- "-fstack-protector-all",
- "-g",
- "-Wall",
- "-Wextra",
- "-Werror",
- "-fno-builtin",
- "-DANDROID_FILESYSTEM_CONFIG=\"android_filesystem_config_test_data.h\"",
-]
-
-//#################################
-// test executable
-cc_test_host {
- name: "fs_config_generate_test",
- srcs: ["fs_config_generate.c"],
- shared_libs: ["libcutils"],
- cflags: test_c_flags,
- relative_install_path: "fs_config-unit-tests",
- no_named_install_directory: true,
- gtest: false,
-
-}
-
-//#################################
-// gTest tool
-cc_test_host {
- name: "fs_config-unit-tests",
- cflags: test_c_flags + ["-DHOST"],
- shared_libs: [
- "liblog",
- "libcutils",
- "libbase",
+target_fs_config_gen_filegroup {
+ name: "target_fs_config_gen",
+}
+
+genrule {
+ name: "oemaids_header_gen",
+ tool_files: ["fs_config_generator.py"],
+ cmd: "$(location fs_config_generator.py) oemaid --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ srcs: [
+ ":target_fs_config_gen",
+ ":android_filesystem_config_header",
+ ],
+ out: ["generated_oem_aid.h"],
+}
+
+cc_library_headers {
+ name: "oemaids_headers",
+ generated_headers: ["oemaids_header_gen"],
+ export_generated_headers: ["oemaids_header_gen"],
+}
+
+// Generate the vendor/etc/passwd text file for the target
+// This file may be empty if no AIDs are defined in
+// TARGET_FS_CONFIG_GEN files.
+genrule {
+ name: "passwd_gen",
+ tool_files: ["fs_config_generator.py"],
+ cmd: "$(location fs_config_generator.py) passwd --required-prefix=vendor_ --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ srcs: [
+ ":target_fs_config_gen",
+ ":android_filesystem_config_header",
+ ],
+ out: ["passwd"],
+}
+
+prebuilt_etc {
+ name: "passwd",
+ vendor: true,
+ src: ":passwd_gen",
+}
+
+// Generate the vendor/etc/group text file for the target
+// This file may be empty if no AIDs are defined in
+// TARGET_FS_CONFIG_GEN files.
+genrule {
+ name: "group_gen",
+ tool_files: ["fs_config_generator.py"],
+ cmd: "$(location fs_config_generator.py) group --required-prefix=vendor_ --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ srcs: [
+ ":target_fs_config_gen",
+ ":android_filesystem_config_header",
],
- srcs: ["fs_config_test.cpp"],
+ out: ["group"],
+}
+
+prebuilt_etc {
+ name: "group",
+ vendor: true,
+ src: ":group_gen",
}
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index a01e702347..96db0f39fb 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -14,131 +14,120 @@
LOCAL_PATH := $(call my-dir)
-# One can override the default android_filesystem_config.h file in one of two ways:
-#
-# 1. The old way:
-# To Build the custom target binary for the host to generate the fs_config
-# override files. The executable is hard coded to include the
-# $(TARGET_ANDROID_FILESYSTEM_CONFIG_H) file if it exists.
-# Expectations:
-# device/<vendor>/<device>/android_filesystem_config.h
-# fills in struct fs_path_config android_device_dirs[] and
-# struct fs_path_config android_device_files[]
-# device/<vendor>/<device>/device.mk
-# PRODUCT_PACKAGES += fs_config_dirs fs_config_files
-# If not specified, check if default one to be found
-#
-# 2. The new way:
-# set TARGET_FS_CONFIG_GEN to contain a list of intermediate format files
+# One can override the default android_filesystem_config.h file by using TARGET_FS_CONFIG_GEN.
+# Set TARGET_FS_CONFIG_GEN to contain a list of intermediate format files
# for generating the android_filesystem_config.h file.
#
# More information can be found in the README
-ANDROID_FS_CONFIG_H := android_filesystem_config.h
-ifneq ($(TARGET_ANDROID_FILESYSTEM_CONFIG_H),)
-ifneq ($(TARGET_FS_CONFIG_GEN),)
-$(error Cannot set TARGET_ANDROID_FILESYSTEM_CONFIG_H and TARGET_FS_CONFIG_GEN simultaneously)
+ifneq ($(wildcard $(TARGET_DEVICE_DIR)/android_filesystem_config.h),)
+$(error Using $(TARGET_DEVICE_DIR)/android_filesystem_config.h is deprecated, please use TARGET_FS_CONFIG_GEN instead)
endif
-# One and only one file can be specified.
-ifneq ($(words $(TARGET_ANDROID_FILESYSTEM_CONFIG_H)),1)
-$(error Multiple fs_config files specified, \
- see "$(TARGET_ANDROID_FILESYSTEM_CONFIG_H)".)
-endif
+system_android_filesystem_config := system/core/include/private/android_filesystem_config.h
+system_capability_header := bionic/libc/kernel/uapi/linux/capability.h
-ifeq ($(filter %/$(ANDROID_FS_CONFIG_H),$(TARGET_ANDROID_FILESYSTEM_CONFIG_H)),)
-$(error TARGET_ANDROID_FILESYSTEM_CONFIG_H file name must be $(ANDROID_FS_CONFIG_H), \
- see "$(notdir $(TARGET_ANDROID_FILESYSTEM_CONFIG_H))".)
-endif
+# List of supported vendor, oem, odm, product and product_services Partitions
+fs_config_generate_extra_partition_list := $(strip \
+ $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \
+ $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \
+ $(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm) \
+ $(if $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),product) \
+ $(if $(BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE),product_services) \
+)
-my_fs_config_h := $(TARGET_ANDROID_FILESYSTEM_CONFIG_H)
-else ifneq ($(wildcard $(TARGET_DEVICE_DIR)/$(ANDROID_FS_CONFIG_H)),)
+##################################
+# Generate the <p>/etc/fs_config_dirs binary files for each partition.
+# Add fs_config_dirs to PRODUCT_PACKAGES in the device make file to enable.
+include $(CLEAR_VARS)
-ifneq ($(TARGET_FS_CONFIG_GEN),)
-$(error Cannot provide $(TARGET_DEVICE_DIR)/$(ANDROID_FS_CONFIG_H) and set TARGET_FS_CONFIG_GEN simultaneously)
-endif
-my_fs_config_h := $(TARGET_DEVICE_DIR)/$(ANDROID_FS_CONFIG_H)
+LOCAL_MODULE := fs_config_dirs
+LOCAL_REQUIRED_MODULES := \
+ fs_config_dirs_system \
+ $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
+include $(BUILD_PHONY_PACKAGE)
-else
-my_fs_config_h := $(LOCAL_PATH)/default/$(ANDROID_FS_CONFIG_H)
-endif
##################################
+# Generate the <p>/etc/fs_config_files binary files for each partition.
+# Add fs_config_files to PRODUCT_PACKAGES in the device make file to enable.
include $(CLEAR_VARS)
-LOCAL_SRC_FILES := fs_config_generate.c
-LOCAL_MODULE := fs_config_generate_$(TARGET_DEVICE)
-LOCAL_MODULE_CLASS := EXECUTABLES
-LOCAL_SHARED_LIBRARIES := libcutils
-LOCAL_CFLAGS := -Werror -Wno-error=\#warnings
-ifneq ($(TARGET_FS_CONFIG_GEN),)
-system_android_filesystem_config := system/core/include/private/android_filesystem_config.h
+LOCAL_MODULE := fs_config_files
+LOCAL_REQUIRED_MODULES := \
+ fs_config_files_system \
+ $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
+include $(BUILD_PHONY_PACKAGE)
-# Generate the "generated_oem_aid.h" file
-oem := $(local-generated-sources-dir)/generated_oem_aid.h
-$(oem): PRIVATE_LOCAL_PATH := $(LOCAL_PATH)
-$(oem): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(oem): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(oem): PRIVATE_CUSTOM_TOOL = $(PRIVATE_LOCAL_PATH)/fs_config_generator.py oemaid --aid-header=$(PRIVATE_ANDROID_FS_HDR) $(PRIVATE_TARGET_FS_CONFIG_GEN) > $@
-$(oem): $(TARGET_FS_CONFIG_GEN) $(LOCAL_PATH)/fs_config_generator.py
- $(transform-generated-source)
-
-# Generate the fs_config header
-gen := $(local-generated-sources-dir)/$(ANDROID_FS_CONFIG_H)
-$(gen): PRIVATE_LOCAL_PATH := $(LOCAL_PATH)
-$(gen): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(gen): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(gen): PRIVATE_CUSTOM_TOOL = $(PRIVATE_LOCAL_PATH)/fs_config_generator.py fsconfig --aid-header=$(PRIVATE_ANDROID_FS_HDR) $(PRIVATE_TARGET_FS_CONFIG_GEN) > $@
-$(gen): $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(LOCAL_PATH)/fs_config_generator.py
- $(transform-generated-source)
-
-LOCAL_GENERATED_SOURCES := $(oem) $(gen)
-
-my_fs_config_h := $(gen)
-my_gen_oem_aid := $(oem)
-gen :=
-oem :=
-endif
+##################################
+# Generate the <p>/etc/fs_config_dirs binary files for all enabled partitions
+# excluding /system. Add fs_config_dirs_nonsystem to PRODUCT_PACKAGES in the
+# device make file to enable.
+include $(CLEAR_VARS)
-LOCAL_C_INCLUDES := $(dir $(my_fs_config_h)) $(dir $(my_gen_oem_aid))
+LOCAL_MODULE := fs_config_dirs_nonsystem
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_dirs_$(t))
+include $(BUILD_PHONY_PACKAGE)
-include $(BUILD_HOST_EXECUTABLE)
-fs_config_generate_bin := $(LOCAL_INSTALLED_MODULE)
-# List of all supported vendor, oem and odm Partitions
-fs_config_generate_extra_partition_list := $(strip \
- $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \
- $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \
- $(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm))
+##################################
+# Generate the <p>/etc/fs_config_files binary files for all enabled partitions
+# excluding /system. Add fs_config_files_nonsystem to PRODUCT_PACKAGES in the
+# device make file to enable.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_nonsystem
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_files_$(t))
+include $(BUILD_PHONY_PACKAGE)
##################################
# Generate the system/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs to PRODUCT_PACKAGES in the device make file to enable
+# Add fs_config_dirs or fs_config_dirs_system to PRODUCT_PACKAGES in
+# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_dirs
+LOCAL_MODULE := fs_config_dirs_system
LOCAL_MODULE_CLASS := ETC
-LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_PARTITION_LIST := $(fs_config_generate_extra_partition_list)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $< -D $(if $(fs_config_generate_extra_partition_list), \
- -P '$(subst $(space),$(comma),$(addprefix -,$(fs_config_generate_extra_partition_list)))') \
- -o $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition system \
+ --all-partitions $(subst $(space),$(comma),$(PRIVATE_PARTITION_LIST)) \
+ --dirs \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
##################################
# Generate the system/etc/fs_config_files binary file for the target
-# Add fs_config_files to PRODUCT_PACKAGES in the device make file to enable
+# Add fs_config_files or fs_config_files_system to PRODUCT_PACKAGES in
+# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_files
+LOCAL_MODULE := fs_config_files_system
LOCAL_MODULE_CLASS := ETC
-LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_PARTITION_LIST := $(fs_config_generate_extra_partition_list)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $< -F $(if $(fs_config_generate_extra_partition_list), \
- -P '$(subst $(space),$(comma),$(addprefix -,$(fs_config_generate_extra_partition_list)))') \
- -o $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition system \
+ --all-partitions $(subst $(space),$(comma),$(PRIVATE_PARTITION_LIST)) \
+ --files \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
ifneq ($(filter vendor,$(fs_config_generate_extra_partition_list)),)
##################################
@@ -152,9 +141,18 @@ LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $< -D -P vendor -o $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition vendor \
+ --dirs \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
##################################
# Generate the vendor/etc/fs_config_files binary file for the target
@@ -167,9 +165,18 @@ LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $< -F -P vendor -o $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition vendor \
+ --files \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
endif
@@ -185,9 +192,18 @@ LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $< -D -P oem -o $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition oem \
+ --dirs \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
##################################
# Generate the oem/etc/fs_config_files binary file for the target
@@ -200,9 +216,18 @@ LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $< -F -P oem -o $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition oem \
+ --files \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
endif
@@ -218,9 +243,18 @@ LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $< -D -P odm -o $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition odm \
+ --dirs \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
##################################
# Generate the odm/etc/fs_config_files binary file for the target
@@ -233,69 +267,121 @@ LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $< -F -P odm -o $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition odm \
+ --files \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
endif
-# The newer passwd/group targets are only generated if you
-# use the new TARGET_FS_CONFIG_GEN method.
-ifneq ($(TARGET_FS_CONFIG_GEN),)
-
+ifneq ($(filter product,$(fs_config_generate_extra_partition_list)),)
##################################
-# Build the oemaid header library when fs config files are present.
-# Intentionally break build if you require generated AIDs
-# header file, but are not using any fs config files.
+# Generate the product/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_product to PRODUCT_PACKAGES in
+# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := oemaids_headers
-LOCAL_EXPORT_C_INCLUDE_DIRS := $(dir $(my_gen_oem_aid))
-LOCAL_EXPORT_C_INCLUDE_DEPS := $(my_gen_oem_aid)
-include $(BUILD_HEADER_LIBRARY)
+
+LOCAL_MODULE := fs_config_dirs_product
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+ @mkdir -p $(dir $@)
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition product \
+ --dirs \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
##################################
-# Generate the vendor/etc/passwd text file for the target
-# This file may be empty if no AIDs are defined in
-# TARGET_FS_CONFIG_GEN files.
+# Generate the product/etc/fs_config_files binary file for the target
+# Add fs_config_files of fs_config_files_product to PRODUCT_PACKAGES in
+# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := passwd
+LOCAL_MODULE := fs_config_files_product
LOCAL_MODULE_CLASS := ETC
-LOCAL_VENDOR_MODULE := true
-
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
include $(BUILD_SYSTEM)/base_rules.mk
-
-$(LOCAL_BUILT_MODULE): PRIVATE_LOCAL_PATH := $(LOCAL_PATH)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $(hide) $< passwd --required-prefix=vendor_ --aid-header=$(PRIVATE_ANDROID_FS_HDR) $(PRIVATE_TARGET_FS_CONFIG_GEN) > $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition product \
+ --files \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
+endif
+ifneq ($(filter product_services,$(fs_config_generate_extra_partition_list)),)
##################################
-# Generate the vendor/etc/group text file for the target
-# This file may be empty if no AIDs are defined in
-# TARGET_FS_CONFIG_GEN files.
+# Generate the product_services/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_product_services to PRODUCT_PACKAGES in
+# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := group
+LOCAL_MODULE := fs_config_dirs_product_services
LOCAL_MODULE_CLASS := ETC
-LOCAL_VENDOR_MODULE := true
-
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT_SERVICES)/etc
include $(BUILD_SYSTEM)/base_rules.mk
-
-$(LOCAL_BUILT_MODULE): PRIVATE_LOCAL_PATH := $(LOCAL_PATH)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
@mkdir -p $(dir $@)
- $(hide) $< group --required-prefix=vendor_ --aid-header=$(PRIVATE_ANDROID_FS_HDR) $(PRIVATE_TARGET_FS_CONFIG_GEN) > $@
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition product_services \
+ --dirs \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-system_android_filesystem_config :=
+##################################
+# Generate the product_services/etc/fs_config_files binary file for the target
+# Add fs_config_files of fs_config_files_product_services to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_product_services
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT_SERVICES)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+ @mkdir -p $(dir $@)
+ $< fsconfig \
+ --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+ --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+ --partition product_services \
+ --files \
+ --out_file $@ \
+ $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
endif
-ANDROID_FS_CONFIG_H :=
-my_fs_config_h :=
-fs_config_generate_bin :=
-my_gen_oem_aid :=
+system_android_filesystem_config :=
+system_capability_header :=
fs_config_generate_extra_partition_list :=
diff --git a/tools/fs_config/OWNERS b/tools/fs_config/OWNERS
new file mode 100644
index 0000000000..55996449cf
--- /dev/null
+++ b/tools/fs_config/OWNERS
@@ -0,0 +1,2 @@
+tomcherry@google.com
+salyzyn@google.com
diff --git a/tools/fs_config/README b/tools/fs_config/README
index cc2a68ff87..f7d4deb74f 100644
--- a/tools/fs_config/README
+++ b/tools/fs_config/README
@@ -5,25 +5,9 @@
Generating the android_filesystem_config.h:
-To generate the android_filesystem_config.h file, one can choose from
-one of two methods. The first method, is to declare
-TARGET_ANDROID_FILESYSTEM_CONFIG_H in the device BoardConfig.mk file. This
-variable can only have one item in it, and it is used directly as the
-android_filesystem_config.h header when building
-fs_config_generate_$(TARGET_DEVICE) which is used to generate fs_config_files
-and fs_config_dirs target executable.
-
-The limitation with this, is that it can only be set once, thus if the device
-has a make hierarchy, then each device needs its own file, and cannot share
-from a common source or that common source needs to include everything from
-both devices.
-
-The other way is to set TARGET_FS_CONFIG_GEN, which can be a list of
-intermediate fs configuration files. It is a build error on any one
-these conditions:
- * Specify TARGET_FS_CONFIG_GEN and TARGET_ANDROID_FILESYSTEM_CONFIG_H
- * Specify TARGET_FS_CONFIG_GEN and provide
- $(TARGET_DEVICE_DIR)/android_filesystem_config.h
+To generate the android_filesystem_config.h file, one can set
+TARGET_FS_CONFIG_GEN, which can be a list of intermediate fs configuration
+files.
The parsing of the config file follows the Python ConfigParser specification,
with the sections and fields as defined below. There are two types of sections,
diff --git a/tools/fs_config/android_filesystem_config_test_data.h b/tools/fs_config/android_filesystem_config_test_data.h
deleted file mode 100644
index 07bc8e5efb..0000000000
--- a/tools/fs_config/android_filesystem_config_test_data.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <private/android_filesystem_config.h>
-
-/* Test Data */
-
-#undef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
-#undef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES
-
-static const struct fs_path_config android_device_dirs[] = {
- {00555, AID_ROOT, AID_SYSTEM, 0, "system/etc"},
- {00555, AID_ROOT, AID_SYSTEM, 0, "vendor/etc"},
- {00555, AID_ROOT, AID_SYSTEM, 0, "oem/etc"},
- {00555, AID_ROOT, AID_SYSTEM, 0, "odm/etc"},
- {00755, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc"},
- {00755, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc"},
- {00755, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc"},
- {00755, AID_SYSTEM, AID_ROOT, 0, "data/misc"},
- {00755, AID_SYSTEM, AID_ROOT, 0, "oem/data/misc"},
- {00755, AID_SYSTEM, AID_ROOT, 0, "odm/data/misc"},
- {00755, AID_SYSTEM, AID_ROOT, 0, "vendor/data/misc"},
- {00555, AID_SYSTEM, AID_ROOT, 0, "etc"},
-};
-
-static const struct fs_path_config android_device_files[] = {
- {00444, AID_ROOT, AID_SYSTEM, 0, "system/etc/fs_config_dirs"},
- {00444, AID_ROOT, AID_SYSTEM, 0, "vendor/etc/fs_config_dirs"},
- {00444, AID_ROOT, AID_SYSTEM, 0, "oem/etc/fs_config_dirs"},
- {00444, AID_ROOT, AID_SYSTEM, 0, "odm/etc/fs_config_dirs"},
- {00444, AID_ROOT, AID_SYSTEM, 0, "system/etc/fs_config_files"},
- {00444, AID_ROOT, AID_SYSTEM, 0, "vendor/etc/fs_config_files"},
- {00444, AID_ROOT, AID_SYSTEM, 0, "oem/etc/fs_config_files"},
- {00444, AID_ROOT, AID_SYSTEM, 0, "odm/etc/fs_config_files"},
- {00644, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc/fs_config_dirs"},
- {00644, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc/fs_config_dirs"},
- {00644, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc/fs_config_dirs"},
- {00644, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc/fs_config_files"},
- {00644, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc/fs_config_files"},
- {00644, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc/fs_config_files"},
- {00644, AID_SYSTEM, AID_ROOT, 0, "etc/fs_config_files"},
- {00666, AID_ROOT, AID_SYSTEM, 0, "data/misc/oem"},
-};
diff --git a/tools/fs_config/default/android_filesystem_config.h b/tools/fs_config/default/android_filesystem_config.h
deleted file mode 100644
index b7d936a7a2..0000000000
--- a/tools/fs_config/default/android_filesystem_config.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/* This file is used to enhance the properties of the filesystem
-** images generated by build tools (mkbootfs and mkyaffs2image) and
-** by the device side of adb.
-*/
-
-/* Rules for directories.
-** These rules are applied based on "first match", so they
-** should start with the most specific path and work their
-** way up to the root.
-*/
-
-#define NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS 1 /* opt out of specifying */
-
-/* Rules for files.
-** These rules are applied based on "first match", so they
-** should start with the most specific path and work their
-** way up to the root. Prefixes ending in * denotes wildcard
-** and will allow partial matches.
-*/
-
-#define NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES 1 /* opt out of specifying */
diff --git a/tools/fs_config/end_to_end_test/config.fs b/tools/fs_config/end_to_end_test/config.fs
new file mode 100644
index 0000000000..339e5aef85
--- /dev/null
+++ b/tools/fs_config/end_to_end_test/config.fs
@@ -0,0 +1,108 @@
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This file is used to define the properties of the filesystem
+# images generated by build tools (mkbootfs and mkyaffs2image) and
+# by the device side of adb.
+
+[AID_VENDOR_NEW_SERVICE]
+value: 2900
+
+[AID_VENDOR_NEW_SERVICE_TWO]
+value:2902
+
+[vendor/bin/service1]
+mode: 0755
+user: AID_SYSTEM
+group: AID_VENDOR_NEW_SERVICE
+caps: CHOWN DAC_OVERRIDE
+
+[vendor/bin/service2]
+mode: 0755
+user: AID_VENDOR_NEW_SERVICE_TWO
+group: AID_SYSTEM
+caps: AUDIT_READ CHOWN SYS_ADMIN
+
+[system/vendor/bin/service3]
+mode: 0755
+user: AID_SYSTEM
+group: AID_SYSTEM
+caps: AUDIT_READ CHOWN SYS_ADMIN
+
+[vendor/dir/]
+mode: 0755
+user: AID_VENDOR_NEW_SERVICE_TWO
+group: AID_SYSTEM
+caps: 0
+
+[system/vendor/dir2/]
+mode: 0755
+user: AID_VENDOR_NEW_SERVICE_TWO
+group: AID_SYSTEM
+caps: 0
+
+[product/bin/service1]
+mode: 0755
+user: AID_SYSTEM
+group: AID_SYSTEM
+caps: 0x34
+
+[product/bin/service2]
+mode: 0755
+user: AID_SYSTEM
+group: AID_SYSTEM
+caps: NET_BIND_SERVICE WAKE_ALARM
+
+[system/product/bin/service3]
+mode: 0755
+user: AID_SYSTEM
+group: AID_SYSTEM
+caps: NET_BIND_SERVICE WAKE_ALARM
+
+[product/dir/]
+mode: 0755
+user: AID_SYSTEM
+group: AID_SYSTEM
+caps: 0
+
+[system/product/dir/]
+mode: 0755
+user: AID_SYSTEM
+group: AID_SYSTEM
+caps: 0
+
+[system/bin/service]
+mode: 0755
+user: AID_SYSTEM
+group: AID_RADIO
+caps: NET_BIND_SERVICE
+
+[system/dir/]
+mode: 0755
+user: AID_SYSTEM
+group: AID_RADIO
+caps: 0
+
+[root_file]
+mode: 0755
+user: AID_SYSTEM
+group: AID_RADIO
+caps: 0
+
+[root_dir/]
+mode: 0755
+user: AID_SYSTEM
+group: AID_RADIO
+caps: 0
diff --git a/tools/fs_config/end_to_end_test/product_fs_config_dirs b/tools/fs_config/end_to_end_test/product_fs_config_dirs
new file mode 100644
index 0000000000..e69ad65d52
--- /dev/null
+++ b/tools/fs_config/end_to_end_test/product_fs_config_dirs
Binary files differ
diff --git a/tools/fs_config/end_to_end_test/product_fs_config_files b/tools/fs_config/end_to_end_test/product_fs_config_files
new file mode 100644
index 0000000000..376a2a6b81
--- /dev/null
+++ b/tools/fs_config/end_to_end_test/product_fs_config_files
Binary files differ
diff --git a/tools/fs_config/end_to_end_test/run_test.sh b/tools/fs_config/end_to_end_test/run_test.sh
new file mode 100755
index 0000000000..74022766c6
--- /dev/null
+++ b/tools/fs_config/end_to_end_test/run_test.sh
@@ -0,0 +1,76 @@
+cd $ANDROID_BUILD_TOP/build/make/tools/fs_config/end_to_end_test
+
+$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
+ --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
+ --partition system \
+ --all-partitions vendor,product \
+ --files \
+ --out_file result_system_fs_config_files \
+ ./config.fs
+
+diff system_fs_config_files result_system_fs_config_files 1>/dev/null && echo 'Success system_fs_config_files' ||
+ echo 'Fail: Mismatch between system_fs_config_files and result_system_fs_config_files'
+
+$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
+ --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
+ --partition system \
+ --all-partitions vendor,product \
+ --dirs \
+ --out_file result_system_fs_config_dirs \
+ ./config.fs
+
+diff system_fs_config_dirs result_system_fs_config_dirs 1>/dev/null && echo 'Success system_fs_config_dirs' ||
+ echo 'Fail: Mismatch between system_fs_config_dirs and result_system_fs_config_dirs'
+
+$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
+ --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
+ --partition vendor \
+ --files \
+ --out_file result_vendor_fs_config_files \
+ ./config.fs
+
+diff vendor_fs_config_files result_vendor_fs_config_files 1>/dev/null && echo 'Success vendor_fs_config_files' ||
+ echo 'Fail: Mismatch between vendor_fs_config_files and result_vendor_fs_config_files'
+
+$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
+ --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
+ --partition vendor \
+ --dirs \
+ --out_file result_vendor_fs_config_dirs \
+ ./config.fs
+
+diff vendor_fs_config_dirs result_vendor_fs_config_dirs 1>/dev/null && echo 'Success vendor_fs_config_dirs' ||
+ echo 'Fail: Mismatch between vendor_fs_config_dirs and result_vendor_fs_config_dirs'
+
+$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
+ --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
+ --partition product \
+ --files \
+ --out_file result_product_fs_config_files \
+ ./config.fs
+
+diff product_fs_config_files result_product_fs_config_files 1>/dev/null && echo 'Success product_fs_config_files' ||
+ echo 'Fail: Mismatch between product_fs_config_files and result_product_fs_config_files'
+
+$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
+ --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
+ --partition product \
+ --dirs \
+ --out_file result_product_fs_config_dirs \
+ ./config.fs
+
+diff product_fs_config_dirs result_product_fs_config_dirs 1>/dev/null && echo 'Success product_fs_config_dirs' ||
+ echo 'Fail: Mismatch between product_fs_config_dirs and result_product_fs_config_dirs'
+
+rm result_system_fs_config_files
+rm result_system_fs_config_dirs
+rm result_vendor_fs_config_files
+rm result_vendor_fs_config_dirs
+rm result_product_fs_config_files
+rm result_product_fs_config_dirs
diff --git a/tools/fs_config/end_to_end_test/system_fs_config_dirs b/tools/fs_config/end_to_end_test/system_fs_config_dirs
new file mode 100644
index 0000000000..3a95e40f49
--- /dev/null
+++ b/tools/fs_config/end_to_end_test/system_fs_config_dirs
Binary files differ
diff --git a/tools/fs_config/end_to_end_test/system_fs_config_files b/tools/fs_config/end_to_end_test/system_fs_config_files
new file mode 100644
index 0000000000..578091cdc7
--- /dev/null
+++ b/tools/fs_config/end_to_end_test/system_fs_config_files
Binary files differ
diff --git a/tools/fs_config/end_to_end_test/vendor_fs_config_dirs b/tools/fs_config/end_to_end_test/vendor_fs_config_dirs
new file mode 100644
index 0000000000..02dded79e4
--- /dev/null
+++ b/tools/fs_config/end_to_end_test/vendor_fs_config_dirs
Binary files differ
diff --git a/tools/fs_config/end_to_end_test/vendor_fs_config_files b/tools/fs_config/end_to_end_test/vendor_fs_config_files
new file mode 100644
index 0000000000..90bedc9e85
--- /dev/null
+++ b/tools/fs_config/end_to_end_test/vendor_fs_config_files
Binary files differ
diff --git a/tools/fs_config/fs_config.go b/tools/fs_config/fs_config.go
new file mode 100644
index 0000000000..16bcefa11b
--- /dev/null
+++ b/tools/fs_config/fs_config.go
@@ -0,0 +1,60 @@
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package fs_config
+
+import (
+ "android/soong/android"
+)
+
+var pctx = android.NewPackageContext("android/soong/fs_config")
+
+func init() {
+ android.RegisterModuleType("target_fs_config_gen_filegroup", targetFSConfigGenFactory)
+}
+
+// target_fs_config_gen_filegroup is used to expose the files pointed to by TARGET_FS_CONFIG_GEN to
+// genrules in Soong. If TARGET_FS_CONFIG_GEN is empty, it will export an empty file instead.
+func targetFSConfigGenFactory() android.Module {
+ module := &targetFSConfigGen{}
+ android.InitAndroidModule(module)
+ return module
+}
+
+var _ android.SourceFileProducer = (*targetFSConfigGen)(nil)
+
+type targetFSConfigGen struct {
+ android.ModuleBase
+ paths android.Paths
+}
+
+func (targetFSConfigGen) DepsMutator(ctx android.BottomUpMutatorContext) {}
+
+func (t *targetFSConfigGen) GenerateAndroidBuildActions(ctx android.ModuleContext) {
+ if ret := ctx.DeviceConfig().TargetFSConfigGen(); len(ret) != 0 {
+ t.paths = android.PathsForSource(ctx, ret)
+ } else {
+ path := android.PathForModuleGen(ctx, "empty")
+ t.paths = android.Paths{path}
+
+ rule := android.NewRuleBuilder()
+ rule.Command().Text("rm -rf").Output(path)
+ rule.Command().Text("touch").Output(path)
+ rule.Build(pctx, ctx, "fs_config_empty", "create empty file")
+ }
+}
+
+func (t *targetFSConfigGen) Srcs() android.Paths {
+ return t.paths
+}
diff --git a/tools/fs_config/fs_config_generate.c b/tools/fs_config/fs_config_generate.c
deleted file mode 100644
index cb7ff9da7e..0000000000
--- a/tools/fs_config/fs_config_generate.c
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <ctype.h>
-#include <stdbool.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <unistd.h>
-
-#include <private/android_filesystem_config.h>
-
-/*
- * This program expects android_device_dirs and android_device_files
- * to be defined in the supplied android_filesystem_config.h file in
- * the device/<vendor>/<product> $(TARGET_DEVICE_DIR). Then generates
- * the binary format used in the /system/etc/fs_config_dirs and
- * the /system/etc/fs_config_files to be used by the runtimes.
- */
-#ifdef ANDROID_FILESYSTEM_CONFIG
-#include ANDROID_FILESYSTEM_CONFIG
-#else
-#include "android_filesystem_config.h"
-#endif
-
-#ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
-static const struct fs_path_config android_device_dirs[] = { };
-#endif
-
-#ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES
-static const struct fs_path_config android_device_files[] = {
-#ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
- {0000, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_dirs"},
- {0000, AID_ROOT, AID_ROOT, 0, "vendor/etc/fs_config_dirs"},
- {0000, AID_ROOT, AID_ROOT, 0, "oem/etc/fs_config_dirs"},
- {0000, AID_ROOT, AID_ROOT, 0, "odm/etc/fs_config_dirs"},
-#endif
- {0000, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_files"},
- {0000, AID_ROOT, AID_ROOT, 0, "vendor/etc/fs_config_files"},
- {0000, AID_ROOT, AID_ROOT, 0, "oem/etc/fs_config_files"},
- {0000, AID_ROOT, AID_ROOT, 0, "odm/etc/fs_config_files"},
-};
-#endif
-
-static void usage() {
- fprintf(stderr,
- "Generate binary content for fs_config_dirs (-D) and fs_config_files (-F)\n"
- "from device-specific android_filesystem_config.h override. Filter based\n"
- "on a comma separated partition list (-P) whitelist or prefixed by a\n"
- "minus blacklist. Partitions are identified as path references to\n"
- "<partition>/ or system/<partition>/\n\n"
- "Usage: fs_config_generate -D|-F [-P list] [-o output-file]\n");
-}
-
-/* If tool switches to C++, use android-base/macros.h array_size() */
-#ifndef ARRAY_SIZE /* popular macro */
-#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
-#endif
-
-int main(int argc, char** argv) {
- const struct fs_path_config* pc;
- const struct fs_path_config* end;
- bool dir = false, file = false;
- const char* partitions = NULL;
- FILE* fp = stdout;
- int opt;
- static const char optstring[] = "DFP:ho:";
-
- while ((opt = getopt(argc, argv, optstring)) != -1) {
- switch (opt) {
- case 'D':
- if (file) {
- fprintf(stderr, "Must specify only -D or -F\n");
- usage();
- exit(EXIT_FAILURE);
- }
- dir = true;
- break;
- case 'F':
- if (dir) {
- fprintf(stderr, "Must specify only -F or -D\n");
- usage();
- exit(EXIT_FAILURE);
- }
- file = true;
- break;
- case 'P':
- if (partitions) {
- fprintf(stderr, "Specify only one partition list\n");
- usage();
- exit(EXIT_FAILURE);
- }
- while (*optarg && isspace(*optarg)) ++optarg;
- if (!optarg[0]) {
- fprintf(stderr, "Partition list empty\n");
- usage();
- exit(EXIT_FAILURE);
- }
- if (!optarg[1]) {
- fprintf(stderr, "Partition list too short \"%s\"\n", optarg);
- usage();
- exit(EXIT_FAILURE);
- }
- if ((optarg[0] == '-') && strchr(optstring, optarg[1]) && !optarg[2]) {
- fprintf(stderr, "Partition list is a flag \"%s\"\n", optarg);
- usage();
- exit(EXIT_FAILURE);
- }
- partitions = optarg;
- break;
- case 'o':
- if (fp != stdout) {
- fprintf(stderr, "Specify only one output file\n");
- usage();
- exit(EXIT_FAILURE);
- }
- fp = fopen(optarg, "wb");
- if (fp == NULL) {
- fprintf(stderr, "Can not open \"%s\"\n", optarg);
- exit(EXIT_FAILURE);
- }
- break;
- case 'h':
- usage();
- exit(EXIT_SUCCESS);
- default:
- usage();
- exit(EXIT_FAILURE);
- }
- }
-
- if (optind < argc) {
- fprintf(stderr, "Unknown non-argument \"%s\"\n", argv[optind]);
- usage();
- exit(EXIT_FAILURE);
- }
-
- if (!file && !dir) {
- fprintf(stderr, "Must specify either -F or -D\n");
- usage();
- exit(EXIT_FAILURE);
- }
-
- if (dir) {
- pc = android_device_dirs;
- end = &android_device_dirs[ARRAY_SIZE(android_device_dirs)];
- } else {
- pc = android_device_files;
- end = &android_device_files[ARRAY_SIZE(android_device_files)];
- }
- for (; (pc < end) && pc->prefix; pc++) {
- bool submit;
- char buffer[512];
- ssize_t len = fs_config_generate(buffer, sizeof(buffer), pc);
- if (len < 0) {
- fprintf(stderr, "Entry too large\n");
- exit(EXIT_FAILURE);
- }
- submit = true;
- if (partitions) {
- char* partitions_copy = strdup(partitions);
- char* arg = partitions_copy;
- char* sv = NULL; /* Do not leave uninitialized, NULL is known safe. */
- /* Deal with case all iterated partitions are blacklists with no match */
- bool all_blacklist_but_no_match = true;
- submit = false;
-
- if (!partitions_copy) {
- fprintf(stderr, "Failed to allocate a copy of %s\n", partitions);
- exit(EXIT_FAILURE);
- }
- /* iterate through (officially) comma separated list of partitions */
- while (!!(arg = strtok_r(arg, ",:; \t\n\r\f", &sv))) {
- static const char system[] = "system/";
- size_t plen;
- bool blacklist = false;
- if (*arg == '-') {
- blacklist = true;
- ++arg;
- } else {
- all_blacklist_but_no_match = false;
- }
- plen = strlen(arg);
- /* deal with evil callers */
- while (arg[plen - 1] == '/') {
- --plen;
- }
- /* check if we have <partition>/ or /system/<partition>/ */
- if ((!strncmp(pc->prefix, arg, plen) && (pc->prefix[plen] == '/')) ||
- (!strncmp(pc->prefix, system, strlen(system)) &&
- !strncmp(pc->prefix + strlen(system), arg, plen) &&
- (pc->prefix[strlen(system) + plen] == '/'))) {
- all_blacklist_but_no_match = false;
- /* we have a match !!! */
- if (!blacklist) submit = true;
- break;
- }
- arg = NULL;
- }
- free(partitions_copy);
- if (all_blacklist_but_no_match) submit = true;
- }
- if (submit && (fwrite(buffer, 1, len, fp) != (size_t)len)) {
- fprintf(stderr, "Write failure\n");
- exit(EXIT_FAILURE);
- }
- }
- fclose(fp);
-
- return 0;
-}
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index 4839578dd3..dccff928f9 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -12,6 +12,7 @@ Further documentation can be found in the README.
import argparse
import ConfigParser
+import ctypes
import re
import sys
import textwrap
@@ -112,7 +113,8 @@ class Utils(object):
'Cannot specify delimiter character ":" in uid: "%s"' % uid)
if ':' in logon:
raise ValueError(
- 'Cannot specify delimiter character ":" in logon: "%s"' % logon)
+ 'Cannot specify delimiter character ":" in logon: "%s"' %
+ logon)
return logon, uid
@@ -138,13 +140,13 @@ class AID(object):
'media_codec': 'mediacodec'
}
- def __init__(self, identifier, value, found):
+ def __init__(self, identifier, value, found, login_shell):
"""
Args:
identifier: The identifier name for a #define <identifier>.
value: The value of the AID, aka the uid.
found (str): The file found in, not required to be specified.
-
+ login_shell (str): The shell field per man (5) passwd file.
Raises:
ValueError: if the friendly name is longer than 31 characters as
that is bionic's internal buffer size for name.
@@ -154,24 +156,28 @@ class AID(object):
self.identifier = identifier
self.value = value
self.found = found
+ self.login_shell = login_shell
+
try:
self.normalized_value = str(int(value, 0))
- except ValueException:
- raise ValueError('Invalid "value", not aid number, got: \"%s\"' % value)
+ except ValueError:
+ raise ValueError(
+ 'Invalid "value", not aid number, got: \"%s\"' % value)
# Where we calculate the friendly name
friendly = identifier[len(AID.PREFIX):].lower()
self.friendly = AID._fixup_friendly(friendly)
if len(self.friendly) > 31:
- raise ValueError('AID names must be under 32 characters "%s"' % self.friendly)
-
+ raise ValueError(
+ 'AID names must be under 32 characters "%s"' % self.friendly)
def __eq__(self, other):
return self.identifier == other.identifier \
and self.value == other.value and self.found == other.found \
- and self.normalized_value == other.normalized_value
+ and self.normalized_value == other.normalized_value \
+ and self.login_shell == other.login_shell
@staticmethod
def is_friendly(name):
@@ -214,6 +220,7 @@ class FSConfig(object):
user (str): The uid or #define identifier (AID_SYSTEM)
group (str): The gid or #define identifier (AID_SYSTEM)
caps (str): The capability set.
+ path (str): The path of the file or directory.
filename (str): The file it was found in.
"""
@@ -224,6 +231,7 @@ class FSConfig(object):
user (str): The uid or #define identifier (AID_SYSTEM)
group (str): The gid or #define identifier (AID_SYSTEM)
caps (str): The capability set as a list.
+ path (str): The path of the file or directory.
filename (str): The file it was found in.
"""
self.mode = mode
@@ -239,6 +247,51 @@ class FSConfig(object):
and self.group == other.group and self.caps == other.caps \
and self.path == other.path and self.filename == other.filename
+ def __repr__(self):
+ return 'FSConfig(%r, %r, %r, %r, %r, %r)' % (self.mode, self.user,
+ self.group, self.caps,
+ self.path, self.filename)
+
+
+class CapabilityHeaderParser(object):
+ """Parses capability.h file
+
+ Parses a C header file and extracts lines starting with #define CAP_<name>.
+ """
+
+ _CAP_DEFINE = re.compile(r'\s*#define\s+(CAP_\S+)\s+(\S+)')
+ _SKIP_CAPS = ['CAP_LAST_CAP', 'CAP_TO_INDEX(x)', 'CAP_TO_MASK(x)']
+
+ def __init__(self, capability_header):
+ """
+ Args:
+ capability_header (str): file name for the header file containing AID entries.
+ """
+
+ self.caps = {}
+ with open(capability_header) as open_file:
+ self._parse(open_file)
+
+ def _parse(self, capability_file):
+ """Parses a capability header file. Internal use only.
+
+ Args:
+ capability_file (file): The open capability header file to parse.
+ """
+
+ for line in capability_file:
+ match = CapabilityHeaderParser._CAP_DEFINE.match(line)
+ if match:
+ cap = match.group(1)
+ value = match.group(2)
+
+ if not cap in self._SKIP_CAPS:
+ try:
+ self.caps[cap] = int(value, 0)
+ except ValueError:
+ sys.exit('Could not parse capability define "%s":"%s"'
+ % (cap, value))
+
class AIDHeaderParser(object):
"""Parses an android_filesystem_config.h file.
@@ -253,10 +306,10 @@ class AIDHeaderParser(object):
work.
"""
-
_SKIP_AIDS = [
re.compile(r'%sUNUSED[0-9].*' % AID.PREFIX),
- re.compile(r'%sAPP' % AID.PREFIX), re.compile(r'%sUSER' % AID.PREFIX)
+ re.compile(r'%sAPP' % AID.PREFIX),
+ re.compile(r'%sUSER' % AID.PREFIX)
]
_AID_DEFINE = re.compile(r'\s*#define\s+%s.*' % AID.PREFIX)
_OEM_START_KW = 'START'
@@ -307,7 +360,9 @@ class AIDHeaderParser(object):
identifier = chunks[1]
value = chunks[2]
- if any(x.match(identifier) for x in AIDHeaderParser._SKIP_AIDS):
+ if any(
+ x.match(identifier)
+ for x in AIDHeaderParser._SKIP_AIDS):
continue
try:
@@ -319,8 +374,8 @@ class AIDHeaderParser(object):
self._handle_aid(identifier, value)
except ValueError as exception:
sys.exit(
- error_message('{} for "{}"'.format(exception,
- identifier)))
+ error_message('{} for "{}"'.format(
+ exception, identifier)))
def _handle_aid(self, identifier, value):
"""Handle an AID C #define.
@@ -336,15 +391,15 @@ class AIDHeaderParser(object):
ValueError: With message set to indicate the error.
"""
- aid = AID(identifier, value, self._aid_header)
+ aid = AID(identifier, value, self._aid_header, '/system/bin/sh')
# duplicate name
if aid.friendly in self._aid_name_to_value:
raise ValueError('Duplicate aid "%s"' % identifier)
if value in self._aid_value_to_name and aid.identifier not in AIDHeaderParser._COLLISION_OK:
- raise ValueError('Duplicate aid value "%s" for %s' % (value,
- identifier))
+ raise ValueError(
+ 'Duplicate aid value "%s" for %s' % (value, identifier))
self._aid_name_to_value[aid.friendly] = aid
self._aid_value_to_name[value] = aid.friendly
@@ -397,11 +452,11 @@ class AIDHeaderParser(object):
if tmp == int_value:
raise ValueError('START and END values equal %u' % int_value)
elif is_start and tmp < int_value:
- raise ValueError('END value %u less than START value %u' %
- (tmp, int_value))
+ raise ValueError(
+ 'END value %u less than START value %u' % (tmp, int_value))
elif not is_start and tmp > int_value:
- raise ValueError('END value %u less than START value %u' %
- (int_value, tmp))
+ raise ValueError(
+ 'END value %u less than START value %u' % (int_value, tmp))
# Add START values to the head of the list and END values at the end.
# Thus, the list is ordered with index 0 as START and index 1 as END.
@@ -530,7 +585,7 @@ class FSConfigFileParser(object):
# list of handler to required options, used to identify the
# parsing section
- _SECTIONS = [('_handle_aid', ('value',)),
+ _SECTIONS = [('_handle_aid', ('value', )),
('_handle_path', ('mode', 'user', 'group', 'caps'))]
def __init__(self, config_files, oem_ranges):
@@ -593,8 +648,8 @@ class FSConfigFileParser(object):
break
if not found:
- sys.exit('Invalid section "%s" in file: "%s"' %
- (section, file_name))
+ sys.exit('Invalid section "%s" in file: "%s"' % (section,
+ file_name))
# sort entries:
# * specified path before prefix match
@@ -647,7 +702,7 @@ class FSConfigFileParser(object):
sys.exit(error_message('Found specified but unset "value"'))
try:
- aid = AID(section_name, value, file_name)
+ aid = AID(section_name, value, file_name, '/vendor/bin/sh')
except ValueError as exception:
sys.exit(error_message(exception))
@@ -714,9 +769,9 @@ class FSConfigFileParser(object):
try:
# test if string is int, if it is, use as is.
int(cap, 0)
- tmp.append('(' + cap + ')')
+ tmp.append(cap)
except ValueError:
- tmp.append('CAP_MASK_LONG(CAP_' + cap.upper() + ')')
+ tmp.append('CAP_' + cap.upper())
caps = tmp
@@ -731,7 +786,7 @@ class FSConfigFileParser(object):
if len(mode) != 4:
sys.exit('Mode must be 3 or 4 characters, got: "%s"' % mode)
- caps_str = '|'.join(caps)
+ caps_str = ','.join(caps)
entry = FSConfig(mode, user, group, caps_str, section_name, file_name)
if section_name[-1] == '/':
@@ -889,57 +944,20 @@ class FSConfigGen(BaseGenerator):
Output is used in generating fs_config_files and fs_config_dirs.
"""
- _GENERATED = textwrap.dedent("""\
- /*
- * THIS IS AN AUTOGENERATED FILE! DO NOT MODIFY
- */
- """)
-
- _INCLUDES = [
- '<private/android_filesystem_config.h>', '"generated_oem_aid.h"'
- ]
-
- _DEFINE_NO_DIRS = '#define NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS'
- _DEFINE_NO_FILES = '#define NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES'
-
- _DEFAULT_WARNING = (
- '#warning No device-supplied android_filesystem_config.h,'
- ' using empty default.')
-
- # Long names.
- # pylint: disable=invalid-name
- _NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS_ENTRY = (
- '{ 00000, AID_ROOT, AID_ROOT, 0,'
- '"system/etc/fs_config_dirs" },')
-
- _NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES_ENTRY = (
- '{ 00000, AID_ROOT, AID_ROOT, 0,'
- '"system/etc/fs_config_files" },')
-
- _IFDEF_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS = (
- '#ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS')
- # pylint: enable=invalid-name
-
- _ENDIF = '#endif'
-
- _OPEN_FILE_STRUCT = (
- 'static const struct fs_path_config android_device_files[] = {')
-
- _OPEN_DIR_STRUCT = (
- 'static const struct fs_path_config android_device_dirs[] = {')
-
- _CLOSE_FILE_STRUCT = '};'
-
- _GENERIC_DEFINE = "#define %s\t%s"
-
- _FILE_COMMENT = '// Defined in file: \"%s\"'
-
def __init__(self, *args, **kwargs):
BaseGenerator.__init__(args, kwargs)
self._oem_parser = None
self._base_parser = None
self._friendly_to_aid = None
+ self._id_to_aid = None
+ self._capability_parser = None
+
+ self._partition = None
+ self._all_partitions = None
+ self._out_file = None
+ self._generate_files = False
+ self._generate_dirs = False
def add_opts(self, opt_group):
@@ -952,11 +970,56 @@ class FSConfigGen(BaseGenerator):
help='An android_filesystem_config.h file'
' to parse AIDs and OEM Ranges from')
+ opt_group.add_argument(
+ '--capability-header',
+ required=True,
+ help='A capability.h file to parse capability defines from')
+
+ opt_group.add_argument(
+ '--partition',
+ required=True,
+ help='Partition to generate contents for')
+
+ opt_group.add_argument(
+ '--all-partitions',
+ help='Comma separated list of all possible partitions, used to'
+ ' ignore these partitions when generating the output for the system partition'
+ )
+
+ opt_group.add_argument(
+ '--files', action='store_true', help='Output fs_config_files')
+
+ opt_group.add_argument(
+ '--dirs', action='store_true', help='Output fs_config_dirs')
+
+ opt_group.add_argument('--out_file', required=True, help='Output file')
+
def __call__(self, args):
+ self._capability_parser = CapabilityHeaderParser(
+ args['capability_header'])
self._base_parser = AIDHeaderParser(args['aid_header'])
self._oem_parser = FSConfigFileParser(args['fsconfig'],
self._base_parser.oem_ranges)
+
+ self._partition = args['partition']
+ self._all_partitions = args['all_partitions']
+ if self._partition == 'system' and self._all_partitions is None:
+ sys.exit(
+ 'All other partitions must be provided if generating output'
+ ' for the system partition')
+
+ self._out_file = args['out_file']
+
+ self._generate_files = args['files']
+ self._generate_dirs = args['dirs']
+
+ if self._generate_files and self._generate_dirs:
+ sys.exit('Only one of --files or --dirs can be provided')
+
+ if not self._generate_files and not self._generate_dirs:
+ sys.exit('One of --files or --dirs must be provided')
+
base_aids = self._base_parser.aids
oem_aids = self._oem_parser.aids
@@ -972,7 +1035,7 @@ class FSConfigGen(BaseGenerator):
common = base_set & oem_set
- if len(common) > 0:
+ if common:
emsg = 'Following AID Collisions detected for: \n'
for friendly in common:
base = base_friendly[friendly]
@@ -986,53 +1049,105 @@ class FSConfigGen(BaseGenerator):
self._friendly_to_aid = oem_friendly
self._friendly_to_aid.update(base_friendly)
+ self._id_to_aid = {aid.identifier: aid for aid in base_aids}
+ self._id_to_aid.update({aid.identifier: aid for aid in oem_aids})
+
self._generate()
- def _to_fs_entry(self, fs_config):
+ def _to_fs_entry(self, fs_config, out_file):
"""Converts an FSConfig entry to an fs entry.
- Prints '{ mode, user, group, caps, "path" },'.
+ Writes the fs_config contents to the output file.
Calls sys.exit() on error.
Args:
- fs_config (FSConfig): The entry to convert to
- a valid C array entry.
+ fs_config (FSConfig): The entry to convert to write to file.
+ file (File): The file to write to.
"""
# Get some short names
mode = fs_config.mode
user = fs_config.user
group = fs_config.group
- fname = fs_config.filename
caps = fs_config.caps
path = fs_config.path
- emsg = 'Cannot convert friendly name "%s" to identifier!'
+ emsg = 'Cannot convert "%s" to identifier!'
+
+ # convert mode from octal string to integer
+ mode = int(mode, 8)
- # remap friendly names to identifier names
+ # remap names to values
if AID.is_friendly(user):
if user not in self._friendly_to_aid:
sys.exit(emsg % user)
- user = self._friendly_to_aid[user].identifier
+ user = self._friendly_to_aid[user].value
+ else:
+ if user not in self._id_to_aid:
+ sys.exit(emsg % user)
+ user = self._id_to_aid[user].value
if AID.is_friendly(group):
if group not in self._friendly_to_aid:
sys.exit(emsg % group)
- group = self._friendly_to_aid[group].identifier
+ group = self._friendly_to_aid[group].value
+ else:
+ if group not in self._id_to_aid:
+ sys.exit(emsg % group)
+ group = self._id_to_aid[group].value
- fmt = '{ %s, %s, %s, %s, "%s" },'
+ caps_dict = self._capability_parser.caps
- expanded = fmt % (mode, user, group, caps, path)
+ caps_value = 0
- print FSConfigGen._FILE_COMMENT % fname
- print ' ' + expanded
+ try:
+ # test if caps is an int
+ caps_value = int(caps, 0)
+ except ValueError:
+ caps_split = caps.split(',')
+ for cap in caps_split:
+ if cap not in caps_dict:
+ sys.exit('Unkonwn cap "%s" found!' % cap)
+ caps_value += 1 << caps_dict[cap]
+
+ path_length_with_null = len(path) + 1
+ path_length_aligned_64 = (path_length_with_null + 7) & ~7
+ # 16 bytes of header plus the path length with alignment
+ length = 16 + path_length_aligned_64
+
+ length_binary = bytearray(ctypes.c_uint16(length))
+ mode_binary = bytearray(ctypes.c_uint16(mode))
+ user_binary = bytearray(ctypes.c_uint16(int(user, 0)))
+ group_binary = bytearray(ctypes.c_uint16(int(group, 0)))
+ caps_binary = bytearray(ctypes.c_uint64(caps_value))
+ path_binary = ctypes.create_string_buffer(path,
+ path_length_aligned_64).raw
+
+ out_file.write(length_binary)
+ out_file.write(mode_binary)
+ out_file.write(user_binary)
+ out_file.write(group_binary)
+ out_file.write(caps_binary)
+ out_file.write(path_binary)
+
+ def _emit_entry(self, fs_config):
+ """Returns a boolean whether or not to emit the input fs_config"""
- @staticmethod
- def _gen_inc():
- """Generate the include header lines and print to stdout."""
- for include in FSConfigGen._INCLUDES:
- print '#include %s' % include
+ path = fs_config.path
+
+ if self._partition == 'system':
+ for skip_partition in self._all_partitions.split(','):
+ if path.startswith(skip_partition) or path.startswith(
+ 'system/' + skip_partition):
+ return False
+ return True
+ else:
+ if path.startswith(
+ self._partition) or path.startswith('system/' +
+ self._partition):
+ return True
+ return False
def _generate(self):
"""Generates an OEM android_filesystem_config.h header file to stdout.
@@ -1043,56 +1158,20 @@ class FSConfigGen(BaseGenerator):
entries.
aids ([AIDS]): A list of AID objects for Android Id entries.
"""
- print FSConfigGen._GENERATED
- print
-
- FSConfigGen._gen_inc()
- print
-
dirs = self._oem_parser.dirs
files = self._oem_parser.files
- aids = self._oem_parser.aids
-
- are_dirs = len(dirs) > 0
- are_files = len(files) > 0
- are_aids = len(aids) > 0
-
- if are_aids:
- for aid in aids:
- # use the preserved _path value
- print FSConfigGen._FILE_COMMENT % aid.found
- print FSConfigGen._GENERIC_DEFINE % (aid.identifier, aid.value)
-
- print
-
- if not are_dirs:
- print FSConfigGen._DEFINE_NO_DIRS + '\n'
-
- if not are_files:
- print FSConfigGen._DEFINE_NO_FILES + '\n'
-
- if not are_files and not are_dirs and not are_aids:
- return
-
- if are_files:
- print FSConfigGen._OPEN_FILE_STRUCT
- for fs_config in files:
- self._to_fs_entry(fs_config)
-
- if not are_dirs:
- print FSConfigGen._IFDEF_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
- print(
- ' ' +
- FSConfigGen._NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS_ENTRY)
- print FSConfigGen._ENDIF
- print FSConfigGen._CLOSE_FILE_STRUCT
- if are_dirs:
- print FSConfigGen._OPEN_DIR_STRUCT
- for dir_entry in dirs:
- self._to_fs_entry(dir_entry)
+ if self._generate_files:
+ with open(self._out_file, 'wb') as open_file:
+ for fs_config in files:
+ if self._emit_entry(fs_config):
+ self._to_fs_entry(fs_config, open_file)
- print FSConfigGen._CLOSE_FILE_STRUCT
+ if self._generate_dirs:
+ with open(self._out_file, 'wb') as open_file:
+ for dir_entry in dirs:
+ if self._emit_entry(dir_entry):
+ self._to_fs_entry(dir_entry, open_file)
@generator('aidarray')
@@ -1105,9 +1184,14 @@ class AIDArrayGen(BaseGenerator):
_INCLUDE = '#include <private/android_filesystem_config.h>'
+ # Note that the android_id name field is of type 'const char[]' instead of
+ # 'const char*'. While this seems less straightforward as we need to
+ # calculate the max length of all names, this allows the entire android_ids
+ # table to be placed in .rodata section instead of .data.rel.ro section,
+ # resulting in less memory pressure.
_STRUCT_FS_CONFIG = textwrap.dedent("""
struct android_id_info {
- const char *name;
+ const char name[%d];
unsigned aid;
};""")
@@ -1129,12 +1213,13 @@ class AIDArrayGen(BaseGenerator):
def __call__(self, args):
hdr = AIDHeaderParser(args['hdrfile'])
+ max_name_length = max(len(aid.friendly) + 1 for aid in hdr.aids)
print AIDArrayGen._GENERATED
print
print AIDArrayGen._INCLUDE
print
- print AIDArrayGen._STRUCT_FS_CONFIG
+ print AIDArrayGen._STRUCT_FS_CONFIG % max_name_length
print
print AIDArrayGen._OPEN_ID_ARRAY
@@ -1247,11 +1332,12 @@ class PasswdGen(BaseGenerator):
aids = parser.aids
# nothing to do if no aids defined
- if len(aids) == 0:
+ if not aids:
return
for aid in aids:
- if required_prefix is None or aid.friendly.startswith(required_prefix):
+ if required_prefix is None or aid.friendly.startswith(
+ required_prefix):
self._print_formatted_line(aid)
else:
sys.exit("%s: AID '%s' must start with '%s'" %
@@ -1280,7 +1366,7 @@ class PasswdGen(BaseGenerator):
except ValueError as exception:
sys.exit(exception)
- print "%s::%s:%s::/:/system/bin/sh" % (logon, uid, uid)
+ print "%s::%s:%s::/:%s" % (logon, uid, uid, aid.login_shell)
@generator('group')
@@ -1308,6 +1394,29 @@ class GroupGen(PasswdGen):
print "%s::%s:" % (logon, uid)
+@generator('print')
+class PrintGen(BaseGenerator):
+ """Prints just the constants and values, separated by spaces, in an easy to
+ parse format for use by other scripts.
+
+ Each line is just the identifier and the value, separated by a space.
+ """
+
+ def add_opts(self, opt_group):
+ opt_group.add_argument(
+ 'aid-header', help='An android_filesystem_config.h file.')
+
+ def __call__(self, args):
+
+ hdr_parser = AIDHeaderParser(args['aid-header'])
+ aids = hdr_parser.aids
+
+ aids.sort(key=lambda item: int(item.normalized_value))
+
+ for aid in aids:
+ print '%s %s' % (aid.identifier, aid.normalized_value)
+
+
def main():
"""Main entry point for execution."""
diff --git a/tools/fs_config/fs_config_test.cpp b/tools/fs_config/fs_config_test.cpp
deleted file mode 100644
index f95a4cad6f..0000000000
--- a/tools/fs_config/fs_config_test.cpp
+++ /dev/null
@@ -1,223 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <stdio.h>
-#include <sys/cdefs.h>
-
-#include <string>
-#include <vector>
-
-#include <android-base/file.h>
-#include <android-base/macros.h>
-#include <android-base/strings.h>
-#include <android-base/stringprintf.h>
-#include <gtest/gtest.h>
-#include <private/android_filesystem_config.h>
-#include <private/fs_config.h>
-
-#include "android_filesystem_config_test_data.h"
-
-// must run test in the test directory
-const static char fs_config_generate_command[] = "./fs_config_generate_test";
-
-static std::string popenToString(std::string command) {
- std::string ret;
-
- FILE* fp = popen(command.c_str(), "r");
- if (fp) {
- if (!android::base::ReadFdToString(fileno(fp), &ret)) ret = "";
- pclose(fp);
- }
- return ret;
-}
-
-static void confirm(std::string&& data, const fs_path_config* config,
- ssize_t num_config) {
- const struct fs_path_config_from_file* pc =
- reinterpret_cast<const fs_path_config_from_file*>(data.c_str());
- size_t len = data.size();
-
- ASSERT_TRUE(config != NULL);
- ASSERT_LT(0, num_config);
-
- while (len > 0) {
- uint16_t host_len = pc->len;
- if (host_len > len) break;
-
- EXPECT_EQ(config->mode, pc->mode);
- EXPECT_EQ(config->uid, pc->uid);
- EXPECT_EQ(config->gid, pc->gid);
- EXPECT_EQ(config->capabilities, pc->capabilities);
- EXPECT_STREQ(config->prefix, pc->prefix);
-
- EXPECT_LT(0, num_config);
- --num_config;
- if (num_config >= 0) ++config;
- pc = reinterpret_cast<const fs_path_config_from_file*>(
- reinterpret_cast<const char*>(pc) + host_len);
- len -= host_len;
- }
- EXPECT_EQ(0, num_config);
-}
-
-/* See local android_filesystem_config.h for test data */
-
-TEST(fs_conf_test, dirs) {
- confirm(popenToString(
- android::base::StringPrintf("%s -D", fs_config_generate_command)),
- android_device_dirs, arraysize(android_device_dirs));
-}
-
-TEST(fs_conf_test, files) {
- confirm(popenToString(
- android::base::StringPrintf("%s -F", fs_config_generate_command)),
- android_device_files, arraysize(android_device_files));
-}
-
-static const char vendor_str[] = "vendor/";
-static const char vendor_alt_str[] = "system/vendor/";
-static const char oem_str[] = "oem/";
-static const char oem_alt_str[] = "system/oem/";
-static const char odm_str[] = "odm/";
-static const char odm_alt_str[] = "system/odm/";
-
-TEST(fs_conf_test, system_dirs) {
- std::vector<fs_path_config> dirs;
- const fs_path_config* config = android_device_dirs;
- for (size_t num = arraysize(android_device_dirs); num; --num) {
- if (!android::base::StartsWith(config->prefix, vendor_str) &&
- !android::base::StartsWith(config->prefix, vendor_alt_str) &&
- !android::base::StartsWith(config->prefix, oem_str) &&
- !android::base::StartsWith(config->prefix, oem_alt_str) &&
- !android::base::StartsWith(config->prefix, odm_str) &&
- !android::base::StartsWith(config->prefix, odm_alt_str)) {
- dirs.emplace_back(*config);
- }
- ++config;
- }
- confirm(popenToString(android::base::StringPrintf(
- "%s -D -P -vendor,-oem,-odm", fs_config_generate_command)),
- &dirs[0], dirs.size());
-}
-
-TEST(fs_conf_test, vendor_dirs) {
- std::vector<fs_path_config> dirs;
- const fs_path_config* config = android_device_dirs;
- for (size_t num = arraysize(android_device_dirs); num; --num) {
- if (android::base::StartsWith(config->prefix, vendor_str) ||
- android::base::StartsWith(config->prefix, vendor_alt_str)) {
- dirs.emplace_back(*config);
- }
- ++config;
- }
- confirm(popenToString(android::base::StringPrintf(
- "%s -D -P vendor", fs_config_generate_command)),
- &dirs[0], dirs.size());
-}
-
-TEST(fs_conf_test, oem_dirs) {
- std::vector<fs_path_config> dirs;
- const fs_path_config* config = android_device_dirs;
- for (size_t num = arraysize(android_device_dirs); num; --num) {
- if (android::base::StartsWith(config->prefix, oem_str) ||
- android::base::StartsWith(config->prefix, oem_alt_str)) {
- dirs.emplace_back(*config);
- }
- ++config;
- }
- confirm(popenToString(android::base::StringPrintf(
- "%s -D -P oem", fs_config_generate_command)),
- &dirs[0], dirs.size());
-}
-
-TEST(fs_conf_test, odm_dirs) {
- std::vector<fs_path_config> dirs;
- const fs_path_config* config = android_device_dirs;
- for (size_t num = arraysize(android_device_dirs); num; --num) {
- if (android::base::StartsWith(config->prefix, odm_str) ||
- android::base::StartsWith(config->prefix, odm_alt_str)) {
- dirs.emplace_back(*config);
- }
- ++config;
- }
- confirm(popenToString(android::base::StringPrintf(
- "%s -D -P odm", fs_config_generate_command)),
- &dirs[0], dirs.size());
-}
-
-TEST(fs_conf_test, system_files) {
- std::vector<fs_path_config> files;
- const fs_path_config* config = android_device_files;
- for (size_t num = arraysize(android_device_files); num; --num) {
- if (!android::base::StartsWith(config->prefix, vendor_str) &&
- !android::base::StartsWith(config->prefix, vendor_alt_str) &&
- !android::base::StartsWith(config->prefix, oem_str) &&
- !android::base::StartsWith(config->prefix, oem_alt_str) &&
- !android::base::StartsWith(config->prefix, odm_str) &&
- !android::base::StartsWith(config->prefix, odm_alt_str)) {
- files.emplace_back(*config);
- }
- ++config;
- }
- confirm(popenToString(android::base::StringPrintf(
- "%s -F -P -vendor,-oem,-odm", fs_config_generate_command)),
- &files[0], files.size());
-}
-
-TEST(fs_conf_test, vendor_files) {
- std::vector<fs_path_config> files;
- const fs_path_config* config = android_device_files;
- for (size_t num = arraysize(android_device_files); num; --num) {
- if (android::base::StartsWith(config->prefix, vendor_str) ||
- android::base::StartsWith(config->prefix, vendor_alt_str)) {
- files.emplace_back(*config);
- }
- ++config;
- }
- confirm(popenToString(android::base::StringPrintf(
- "%s -F -P vendor", fs_config_generate_command)),
- &files[0], files.size());
-}
-
-TEST(fs_conf_test, oem_files) {
- std::vector<fs_path_config> files;
- const fs_path_config* config = android_device_files;
- for (size_t num = arraysize(android_device_files); num; --num) {
- if (android::base::StartsWith(config->prefix, oem_str) ||
- android::base::StartsWith(config->prefix, oem_alt_str)) {
- files.emplace_back(*config);
- }
- ++config;
- }
- confirm(popenToString(android::base::StringPrintf(
- "%s -F -P oem", fs_config_generate_command)),
- &files[0], files.size());
-}
-
-TEST(fs_conf_test, odm_files) {
- std::vector<fs_path_config> files;
- const fs_path_config* config = android_device_files;
- for (size_t num = arraysize(android_device_files); num; --num) {
- if (android::base::StartsWith(config->prefix, odm_str) ||
- android::base::StartsWith(config->prefix, odm_alt_str)) {
- files.emplace_back(*config);
- }
- ++config;
- }
- confirm(popenToString(android::base::StringPrintf(
- "%s -F -P odm", fs_config_generate_command)),
- &files[0], files.size());
-}
diff --git a/tools/fs_config/test_fs_config_generator.py b/tools/fs_config/test_fs_config_generator.py
index a49058a813..b7f173eb92 100755
--- a/tools/fs_config/test_fs_config_generator.py
+++ b/tools/fs_config/test_fs_config_generator.py
@@ -45,19 +45,21 @@ class Tests(unittest.TestCase):
def test_aid(self):
"""Test AID class constructor"""
- aid = AID('AID_FOO_BAR', '0xFF', 'myfakefile')
- self.assertEquals(aid.identifier, 'AID_FOO_BAR')
- self.assertEquals(aid.value, '0xFF')
- self.assertEquals(aid.found, 'myfakefile')
- self.assertEquals(aid.normalized_value, '255')
- self.assertEquals(aid.friendly, 'foo_bar')
-
- aid = AID('AID_MEDIA_EX', '1234', 'myfakefile')
- self.assertEquals(aid.identifier, 'AID_MEDIA_EX')
- self.assertEquals(aid.value, '1234')
- self.assertEquals(aid.found, 'myfakefile')
- self.assertEquals(aid.normalized_value, '1234')
- self.assertEquals(aid.friendly, 'mediaex')
+ aid = AID('AID_FOO_BAR', '0xFF', 'myfakefile', '/system/bin/sh')
+ self.assertEqual(aid.identifier, 'AID_FOO_BAR')
+ self.assertEqual(aid.value, '0xFF')
+ self.assertEqual(aid.found, 'myfakefile')
+ self.assertEqual(aid.normalized_value, '255')
+ self.assertEqual(aid.friendly, 'foo_bar')
+ self.assertEqual(aid.login_shell, '/system/bin/sh')
+
+ aid = AID('AID_MEDIA_EX', '1234', 'myfakefile', '/vendor/bin/sh')
+ self.assertEqual(aid.identifier, 'AID_MEDIA_EX')
+ self.assertEqual(aid.value, '1234')
+ self.assertEqual(aid.found, 'myfakefile')
+ self.assertEqual(aid.normalized_value, '1234')
+ self.assertEqual(aid.friendly, 'mediaex')
+ self.assertEqual(aid.login_shell, '/vendor/bin/sh')
def test_aid_header_parser_good(self):
"""Test AID Header Parser good input file"""
@@ -265,9 +267,9 @@ class Tests(unittest.TestCase):
dirs = parser.dirs
aids = parser.aids
- self.assertEquals(len(files), 1)
- self.assertEquals(len(dirs), 1)
- self.assertEquals(len(aids), 1)
+ self.assertEqual(len(files), 1)
+ self.assertEqual(len(dirs), 1)
+ self.assertEqual(len(aids), 1)
aid = aids[0]
fcap = files[0]
@@ -275,14 +277,14 @@ class Tests(unittest.TestCase):
self.assertEqual(fcap,
FSConfig('0777', 'AID_FOO', 'AID_SYSTEM',
- '(1ULL << CAP_BLOCK_SUSPEND)',
+ 'CAP_BLOCK_SUSPEND',
'/system/bin/file', temp_file.name))
self.assertEqual(dcap,
- FSConfig('0777', 'AID_FOO', 'AID_SYSTEM', '(0)',
+ FSConfig('0777', 'AID_FOO', 'AID_SYSTEM', '0',
'/vendor/path/dir/', temp_file.name))
- self.assertEqual(aid, AID('AID_OEM1', '0x1389', temp_file.name))
+ self.assertEqual(aid, AID('AID_OEM1', '0x1389', temp_file.name, '/vendor/bin/sh'))
def test_fs_config_file_parser_bad(self):
"""Test FSConfig Parser bad input file"""
diff --git a/tools/generate-enforce-rro-android-manifest.py b/tools/generate-enforce-rro-android-manifest.py
index 68331cfad1..2d9382af49 100755
--- a/tools/generate-enforce-rro-android-manifest.py
+++ b/tools/generate-enforce-rro-android-manifest.py
@@ -23,10 +23,10 @@ import os
import sys
ANDROID_MANIFEST_TEMPLATE="""<manifest xmlns:android="http://schemas.android.com/apk/res/android"
- package="%s.auto_generated_rro__"
+ package="%s.auto_generated_rro_%s__"
android:versionCode="1"
android:versionName="1.0">
- <overlay android:targetPackage="%s" android:priority="0" android:isStatic="true"/>
+ <overlay android:targetPackage="%s" android:priority="%s" android:isStatic="true"/>
</manifest>
"""
@@ -40,6 +40,12 @@ def get_args():
'-p', '--package-info', required=True,
help='Manifest package name or manifest file path of source module.')
parser.add_argument(
+ '--partition', required=True,
+ help='The partition this RRO package is installed on.')
+ parser.add_argument(
+ '--priority', required=True,
+ help='The priority for the <overlay>.')
+ parser.add_argument(
'-o', '--output', required=True,
help='Output manifest file path.')
return parser.parse_args()
@@ -48,8 +54,11 @@ def get_args():
def main(argv):
args = get_args()
- package_name = args.package_info
- if not args.use_package_name:
+ partition = args.partition
+ priority = args.priority
+ if args.use_package_name:
+ package_name = args.package_info
+ else:
with open(args.package_info) as f:
data = f.read()
f.close()
@@ -57,7 +66,7 @@ def main(argv):
package_name = dom.documentElement.getAttribute('package')
with open(args.output, 'w+') as f:
- f.write(ANDROID_MANIFEST_TEMPLATE % (package_name, package_name))
+ f.write(ANDROID_MANIFEST_TEMPLATE % (package_name, partition, package_name, priority))
f.close()
diff --git a/tools/generate-notice-files.py b/tools/generate-notice-files.py
index b754174a9f..49011b20f6 100755
--- a/tools/generate-notice-files.py
+++ b/tools/generate-notice-files.py
@@ -238,12 +238,14 @@ def main(argv):
if len(included_subdirs) > 0:
matched = False
for subdir in included_subdirs:
- if root.startswith(input_dir + '/' + subdir):
+ if (root == (input_dir + '/' + subdir) or
+ root.startswith(input_dir + '/' + subdir + '/')):
matched = True
break
elif len(excluded_subdirs) > 0:
for subdir in excluded_subdirs:
- if root.startswith(input_dir + '/' + subdir):
+ if (root == (input_dir + '/' + subdir) or
+ root.startswith(input_dir + '/' + subdir + '/')):
matched = False
break
if matched and file.endswith(".txt"):
diff --git a/tools/releasetools/OWNERS b/tools/releasetools/OWNERS
index 39448cf662..766adb4e59 100644
--- a/tools/releasetools/OWNERS
+++ b/tools/releasetools/OWNERS
@@ -1 +1,2 @@
tbao@google.com
+xunchang@google.com
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index f68976ebc8..1090d57194 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -28,7 +28,8 @@ Usage: add_img_to_target_files [flag] target_files
-r (--rebuild_recovery)
Rebuild the recovery patch and write it to the system image. Only
- meaningful when system image needs to be rebuilt.
+ meaningful when system image needs to be rebuilt and there're separate
+ boot / recovery images.
--replace_verity_private_key
Replace the private key used for verity signing. (same as the option
@@ -46,15 +47,16 @@ Usage: add_img_to_target_files [flag] target_files
from __future__ import print_function
import datetime
+import logging
import os
import shlex
import shutil
-import subprocess
import sys
import uuid
import zipfile
import build_image
+import build_super_image
import common
import rangelib
import sparse_img
@@ -63,8 +65,9 @@ if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
-OPTIONS = common.OPTIONS
+logger = logging.getLogger(__name__)
+OPTIONS = common.OPTIONS
OPTIONS.add_missing = False
OPTIONS.rebuild_recovery = False
OPTIONS.replace_updated_files_list = []
@@ -72,24 +75,31 @@ OPTIONS.replace_verity_public_key = False
OPTIONS.replace_verity_private_key = False
OPTIONS.is_signing = False
-
-# Partitions that should have their care_map added to META/care_map.txt.
-PARTITIONS_WITH_CARE_MAP = ('system', 'vendor', 'product')
+# Use a fixed timestamp (01/01/2009 00:00:00 UTC) for files when packaging
+# images. (b/24377993, b/80600931)
+FIXED_FILE_TIMESTAMP = int((
+ datetime.datetime(2009, 1, 1, 0, 0, 0, 0, None) -
+ datetime.datetime.utcfromtimestamp(0)).total_seconds())
class OutputFile(object):
+ """A helper class to write a generated file to the given dir or zip.
+
+ When generating images, we want the outputs to go into the given zip file, or
+ the given dir.
+
+ Attributes:
+ name: The name of the output file, regardless of the final destination.
+ """
+
def __init__(self, output_zip, input_dir, prefix, name):
+ # We write the intermediate output file under the given input_dir, even if
+ # the final destination is a zip archive.
+ self.name = os.path.join(input_dir, prefix, name)
self._output_zip = output_zip
- self.input_name = os.path.join(input_dir, prefix, name)
-
if self._output_zip:
self._zip_name = os.path.join(prefix, name)
- root, suffix = os.path.splitext(name)
- self.name = common.MakeTempFile(prefix=root + '-', suffix=suffix)
- else:
- self.name = self.input_name
-
def Write(self):
if self._output_zip:
common.ZipWrite(self._output_zip, self.name, self._zip_name)
@@ -106,16 +116,19 @@ def GetCareMap(which, imgname):
(which, care_map_ranges): care_map_ranges is the raw string of the care_map
RangeSet.
"""
- assert which in PARTITIONS_WITH_CARE_MAP
+ assert which in common.PARTITIONS_WITH_CARE_MAP
simg = sparse_img.SparseImage(imgname)
care_map_ranges = simg.care_map
- key = which + "_adjusted_partition_size"
- adjusted_blocks = OPTIONS.info_dict.get(key)
- if adjusted_blocks:
- assert adjusted_blocks > 0, "blocks should be positive for " + which
- care_map_ranges = care_map_ranges.intersect(rangelib.RangeSet(
- "0-%d" % (adjusted_blocks,)))
+ size_key = which + "_image_size"
+ image_size = OPTIONS.info_dict.get(size_key)
+ if image_size:
+ # excludes the verity metadata blocks of the given image. When AVB is enabled,
+ # this size is the max image size returned by the AVB tool
+ image_blocks = int(image_size) / 4096 - 1
+ assert image_blocks > 0, "blocks for {} must be positive".format(which)
+ care_map_ranges = care_map_ranges.intersect(
+ rangelib.RangeSet("0-{}".format(image_blocks)))
return [which, care_map_ranges.to_string_raw()]
@@ -125,23 +138,25 @@ def AddSystem(output_zip, recovery_img=None, boot_img=None):
output_zip. Returns the name of the system image file."""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.img")
- if os.path.exists(img.input_name):
- print("system.img already exists; no need to rebuild...")
- return img.input_name
+ if os.path.exists(img.name):
+ logger.info("system.img already exists; no need to rebuild...")
+ return img.name
def output_sink(fn, data):
ofile = open(os.path.join(OPTIONS.input_tmp, "SYSTEM", fn), "w")
ofile.write(data)
ofile.close()
- arc_name = "SYSTEM/" + fn
- if arc_name in output_zip.namelist():
- OPTIONS.replace_updated_files_list.append(arc_name)
- else:
- common.ZipWrite(output_zip, ofile.name, arc_name)
+ if output_zip:
+ arc_name = "SYSTEM/" + fn
+ if arc_name in output_zip.namelist():
+ OPTIONS.replace_updated_files_list.append(arc_name)
+ else:
+ common.ZipWrite(output_zip, ofile.name, arc_name)
- if OPTIONS.rebuild_recovery:
- print("Building new recovery patch")
+ if (OPTIONS.rebuild_recovery and recovery_img is not None and
+ boot_img is not None):
+ logger.info("Building new recovery patch")
common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
boot_img, info_dict=OPTIONS.info_dict)
@@ -157,8 +172,8 @@ def AddSystemOther(output_zip):
and store it in output_zip."""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system_other.img")
- if os.path.exists(img.input_name):
- print("system_other.img already exists; no need to rebuild...")
+ if os.path.exists(img.name):
+ logger.info("system_other.img already exists; no need to rebuild...")
return
CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system_other", img)
@@ -169,9 +184,9 @@ def AddVendor(output_zip):
output_zip."""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.img")
- if os.path.exists(img.input_name):
- print("vendor.img already exists; no need to rebuild...")
- return img.input_name
+ if os.path.exists(img.name):
+ logger.info("vendor.img already exists; no need to rebuild...")
+ return img.name
block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.map")
CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "vendor", img,
@@ -184,9 +199,9 @@ def AddProduct(output_zip):
output_zip."""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "product.img")
- if os.path.exists(img.input_name):
- print("product.img already exists; no need to rebuild...")
- return img.input_name
+ if os.path.exists(img.name):
+ logger.info("product.img already exists; no need to rebuild...")
+ return img.name
block_list = OutputFile(
output_zip, OPTIONS.input_tmp, "IMAGES", "product.map")
@@ -196,6 +211,40 @@ def AddProduct(output_zip):
return img.name
+def AddProductServices(output_zip):
+ """Turn the contents of PRODUCT_SERVICES into a product_services image and
+ store it in output_zip."""
+
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES",
+ "product_services.img")
+ if os.path.exists(img.name):
+ logger.info("product_services.img already exists; no need to rebuild...")
+ return img.name
+
+ block_list = OutputFile(
+ output_zip, OPTIONS.input_tmp, "IMAGES", "product_services.map")
+ CreateImage(
+ OPTIONS.input_tmp, OPTIONS.info_dict, "product_services", img,
+ block_list=block_list)
+ return img.name
+
+
+def AddOdm(output_zip):
+ """Turn the contents of ODM into an odm image and store it in output_zip."""
+
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "odm.img")
+ if os.path.exists(img.name):
+ logger.info("odm.img already exists; no need to rebuild...")
+ return img.name
+
+ block_list = OutputFile(
+ output_zip, OPTIONS.input_tmp, "IMAGES", "odm.map")
+ CreateImage(
+ OPTIONS.input_tmp, OPTIONS.info_dict, "odm", img,
+ block_list=block_list)
+ return img.name
+
+
def AddDtbo(output_zip):
"""Adds the DTBO image.
@@ -203,9 +252,9 @@ def AddDtbo(output_zip):
image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
"""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "dtbo.img")
- if os.path.exists(img.input_name):
- print("dtbo.img already exists; no need to rebuild...")
- return img.input_name
+ if os.path.exists(img.name):
+ logger.info("dtbo.img already exists; no need to rebuild...")
+ return img.name
dtbo_prebuilt_path = os.path.join(
OPTIONS.input_tmp, "PREBUILT_IMAGES", "dtbo.img")
@@ -214,7 +263,7 @@ def AddDtbo(output_zip):
# AVB-sign the image as needed.
if OPTIONS.info_dict.get("avb_enable") == "true":
- avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
+ avbtool = OPTIONS.info_dict["avb_avbtool"]
part_size = OPTIONS.info_dict["dtbo_size"]
# The AVB hash footer will be replaced if already present.
cmd = [avbtool, "add_hash_footer", "--image", img.name,
@@ -223,17 +272,14 @@ def AddDtbo(output_zip):
args = OPTIONS.info_dict.get("avb_dtbo_add_hash_footer_args")
if args and args.strip():
cmd.extend(shlex.split(args))
- p = common.Run(cmd, stdout=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, \
- "avbtool add_hash_footer of %s failed" % (img.name,)
+ common.RunAndCheckOutput(cmd)
img.Write()
return img.name
def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
- print("creating " + what + ".img...")
+ logger.info("creating " + what + ".img...")
image_props = build_image.ImagePropFromGlobalDict(info_dict, what)
fstab = info_dict["fstab"]
@@ -241,11 +287,7 @@ def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
if fstab and mount_point in fstab:
image_props["fs_type"] = fstab[mount_point].fs_type
- # Use a fixed timestamp (01/01/2009) when packaging the image.
- # Bug: 24377993
- epoch = datetime.datetime.fromtimestamp(0)
- timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
- image_props["timestamp"] = int(timestamp)
+ image_props["timestamp"] = FIXED_FILE_TIMESTAMP
if what == "system":
fs_config_prefix = ""
@@ -276,26 +318,29 @@ def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
hash_seed = "hash_seed-" + uuid_seed
image_props["hash_seed"] = str(uuid.uuid5(uuid.NAMESPACE_URL, hash_seed))
- succ = build_image.BuildImage(os.path.join(input_dir, what.upper()),
- image_props, output_file.name)
- assert succ, "build " + what + ".img image failed"
+ build_image.BuildImage(
+ os.path.join(input_dir, what.upper()), image_props, output_file.name)
output_file.Write()
if block_list:
block_list.Write()
- # Set the 'adjusted_partition_size' that excludes the verity blocks of the
- # given image. When avb is enabled, this size is the max image size returned
- # by the avb tool.
+ # Set the '_image_size' for given image size.
is_verity_partition = "verity_block_device" in image_props
verity_supported = (image_props.get("verity") == "true" or
image_props.get("avb_enable") == "true")
is_avb_enable = image_props.get("avb_hashtree_enable") == "true"
if verity_supported and (is_verity_partition or is_avb_enable):
- adjusted_blocks_value = image_props.get("partition_size")
- if adjusted_blocks_value:
- adjusted_blocks_key = what + "_adjusted_partition_size"
- info_dict[adjusted_blocks_key] = int(adjusted_blocks_value)/4096 - 1
+ image_size = image_props.get("image_size")
+ if image_size:
+ image_size_key = what + "_image_size"
+ info_dict[image_size_key] = int(image_size)
+
+ use_dynamic_size = (
+ info_dict.get("use_dynamic_partition_size") == "true" and
+ what in shlex.split(info_dict.get("dynamic_partition_list", "").strip()))
+ if use_dynamic_size:
+ info_dict.update(build_image.GlobalDictFromImageProp(image_props, what))
def AddUserdata(output_zip):
@@ -308,8 +353,8 @@ def AddUserdata(output_zip):
"""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "userdata.img")
- if os.path.exists(img.input_name):
- print("userdata.img already exists; no need to rebuild...")
+ if os.path.exists(img.name):
+ logger.info("userdata.img already exists; no need to rebuild...")
return
# Skip userdata.img if no size.
@@ -317,13 +362,9 @@ def AddUserdata(output_zip):
if not image_props.get("partition_size"):
return
- print("creating userdata.img...")
+ logger.info("creating userdata.img...")
- # Use a fixed timestamp (01/01/2009) when packaging the image.
- # Bug: 24377993
- epoch = datetime.datetime.fromtimestamp(0)
- timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
- image_props["timestamp"] = int(timestamp)
+ image_props["timestamp"] = FIXED_FILE_TIMESTAMP
if OPTIONS.info_dict.get("userdata_img_with_data") == "true":
user_dir = os.path.join(OPTIONS.input_tmp, "DATA")
@@ -333,65 +374,78 @@ def AddUserdata(output_zip):
fstab = OPTIONS.info_dict["fstab"]
if fstab:
image_props["fs_type"] = fstab["/data"].fs_type
- succ = build_image.BuildImage(user_dir, image_props, img.name)
- assert succ, "build userdata.img image failed"
+ build_image.BuildImage(user_dir, image_props, img.name)
common.CheckSize(img.name, "userdata.img", OPTIONS.info_dict)
img.Write()
-def AppendVBMetaArgsForPartition(cmd, partition, img_path, public_key_dir):
- if not img_path:
- return
+def AppendVBMetaArgsForPartition(cmd, partition, image):
+ """Appends the VBMeta arguments for partition.
+ It sets up the VBMeta argument by including the partition descriptor from the
+ given 'image', or by configuring the partition as a chained partition.
+
+ Args:
+ cmd: A list of command args that will be used to generate the vbmeta image.
+ The argument for the partition will be appended to the list.
+ partition: The name of the partition (e.g. "system").
+ image: The path to the partition image.
+ """
# Check if chain partition is used.
key_path = OPTIONS.info_dict.get("avb_" + partition + "_key_path")
if key_path:
- # extract public key in AVB format to be included in vbmeta.img
- avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
- public_key_path = os.path.join(public_key_dir, "%s.avbpubkey" % partition)
- p = common.Run([avbtool, "extract_public_key", "--key", key_path,
- "--output", public_key_path],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, \
- "avbtool extract_public_key fail for partition: %r" % partition
-
- rollback_index_location = OPTIONS.info_dict[
- "avb_" + partition + "_rollback_index_location"]
- cmd.extend(["--chain_partition", "%s:%s:%s" % (
- partition, rollback_index_location, public_key_path)])
+ chained_partition_arg = common.GetAvbChainedPartitionArg(
+ partition, OPTIONS.info_dict)
+ cmd.extend(["--chain_partition", chained_partition_arg])
else:
- cmd.extend(["--include_descriptors_from_image", img_path])
+ cmd.extend(["--include_descriptors_from_image", image])
+
+def AddVBMeta(output_zip, partitions, name, needed_partitions):
+ """Creates a VBMeta image and stores it in output_zip.
-def AddVBMeta(output_zip, partitions):
- """Creates a VBMeta image and store it in output_zip.
+ It generates the requested VBMeta image. The requested image could be for
+ top-level or chained VBMeta image, which is determined based on the name.
Args:
output_zip: The output zip file, which needs to be already open.
partitions: A dict that's keyed by partition names with image paths as
- values. Only valid partition names are accepted, which include 'boot',
- 'recovery', 'system', 'vendor', 'dtbo'.
+ values. Only valid partition names are accepted, as listed in
+ common.AVB_PARTITIONS.
+ name: Name of the VBMeta partition, e.g. 'vbmeta', 'vbmeta_system'.
+ needed_partitions: Partitions whose descriptors should be included into the
+ generated VBMeta image.
+
+ Returns:
+ Path to the created image.
+
+ Raises:
+ AssertionError: On invalid input args.
"""
- img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vbmeta.img")
- if os.path.exists(img.input_name):
- print("vbmeta.img already exists; not rebuilding...")
- return img.input_name
+ assert needed_partitions, "Needed partitions must be specified"
+
+ img = OutputFile(
+ output_zip, OPTIONS.input_tmp, "IMAGES", "{}.img".format(name))
+ if os.path.exists(img.name):
+ logger.info("%s.img already exists; not rebuilding...", name)
+ return img.name
- avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
+ avbtool = OPTIONS.info_dict["avb_avbtool"]
cmd = [avbtool, "make_vbmeta_image", "--output", img.name]
- common.AppendAVBSigningArgs(cmd, "vbmeta")
+ common.AppendAVBSigningArgs(cmd, name)
- public_key_dir = common.MakeTempDir(prefix="avbpubkey-")
for partition, path in partitions.items():
- assert partition in common.AVB_PARTITIONS, 'Unknown partition: %s' % (
- partition,)
- assert os.path.exists(path), 'Failed to find %s for partition %s' % (
- path, partition)
- AppendVBMetaArgsForPartition(cmd, partition, path, public_key_dir)
-
- args = OPTIONS.info_dict.get("avb_vbmeta_args")
+ if partition not in needed_partitions:
+ continue
+ assert (partition in common.AVB_PARTITIONS or
+ partition.startswith('vbmeta_')), \
+ 'Unknown partition: {}'.format(partition)
+ assert os.path.exists(path), \
+ 'Failed to find {} for {}'.format(path, partition)
+ AppendVBMetaArgsForPartition(cmd, partition, path)
+
+ args = OPTIONS.info_dict.get("avb_{}_args".format(name))
if args and args.strip():
split_args = shlex.split(args)
for index, arg in enumerate(split_args[:-1]):
@@ -405,20 +459,19 @@ def AddVBMeta(output_zip, partitions):
if os.path.exists(image_path):
continue
found = False
- for dir_name in ['IMAGES', 'RADIO', 'VENDOR_IMAGES', 'PREBUILT_IMAGES']:
+ for dir_name in ['IMAGES', 'RADIO', 'PREBUILT_IMAGES']:
alt_path = os.path.join(
OPTIONS.input_tmp, dir_name, os.path.basename(image_path))
if os.path.exists(alt_path):
split_args[index + 1] = alt_path
found = True
break
- assert found, 'failed to find %s' % (image_path,)
+ assert found, 'Failed to find {}'.format(image_path)
cmd.extend(split_args)
- p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, "avbtool make_vbmeta_image failed"
+ common.RunAndCheckOutput(cmd)
img.Write()
+ return img.name
def AddPartitionTable(output_zip):
@@ -427,7 +480,7 @@ def AddPartitionTable(output_zip):
img = OutputFile(
output_zip, OPTIONS.input_tmp, "IMAGES", "partition-table.img")
bpt = OutputFile(
- output_zip, OPTIONS.input_tmp, "IMAGES", "partition-table.bpt")
+ output_zip, OPTIONS.input_tmp, "META", "partition-table.bpt")
# use BPTTOOL from environ, or "bpttool" if empty or not set.
bpttool = os.getenv("BPTTOOL") or "bpttool"
@@ -443,10 +496,7 @@ def AddPartitionTable(output_zip):
args = OPTIONS.info_dict.get("board_bpt_make_table_args")
if args:
cmd.extend(shlex.split(args))
-
- p = common.Run(cmd, stdout=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, "bpttool make_table failed"
+ common.RunAndCheckOutput(cmd)
img.Write()
bpt.Write()
@@ -456,8 +506,8 @@ def AddCache(output_zip):
"""Create an empty cache image and store it in output_zip."""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "cache.img")
- if os.path.exists(img.input_name):
- print("cache.img already exists; no need to rebuild...")
+ if os.path.exists(img.name):
+ logger.info("cache.img already exists; no need to rebuild...")
return
image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "cache")
@@ -465,39 +515,30 @@ def AddCache(output_zip):
if "fs_type" not in image_props:
return
- print("creating cache.img...")
+ logger.info("creating cache.img...")
- # Use a fixed timestamp (01/01/2009) when packaging the image.
- # Bug: 24377993
- epoch = datetime.datetime.fromtimestamp(0)
- timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
- image_props["timestamp"] = int(timestamp)
+ image_props["timestamp"] = FIXED_FILE_TIMESTAMP
user_dir = common.MakeTempDir()
fstab = OPTIONS.info_dict["fstab"]
if fstab:
image_props["fs_type"] = fstab["/cache"].fs_type
- succ = build_image.BuildImage(user_dir, image_props, img.name)
- assert succ, "build cache.img image failed"
+ build_image.BuildImage(user_dir, image_props, img.name)
common.CheckSize(img.name, "cache.img", OPTIONS.info_dict)
img.Write()
-def AddRadioImagesForAbOta(output_zip, ab_partitions):
- """Adds the radio images needed for A/B OTA to the output file.
-
- It parses the list of A/B partitions, looks for the missing ones from RADIO/
- or VENDOR_IMAGES/ dirs, and copies them to IMAGES/ of the output file (or
- dir).
+def CheckAbOtaImages(output_zip, ab_partitions):
+ """Checks that all the listed A/B partitions have their images available.
- It also ensures that on returning from the function all the listed A/B
- partitions must have their images available under IMAGES/.
+ The images need to be available under IMAGES/ or RADIO/, with the former takes
+ a priority.
Args:
output_zip: The output zip file (needs to be already open), or None to
- write images to OPTIONS.input_tmp/.
+ find images in OPTIONS.input_tmp/.
ab_partitions: The list of A/B partitions.
Raises:
@@ -505,53 +546,35 @@ def AddRadioImagesForAbOta(output_zip, ab_partitions):
"""
for partition in ab_partitions:
img_name = partition.strip() + ".img"
- prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
- if os.path.exists(prebuilt_path):
- print("%s already exists, no need to overwrite..." % (img_name,))
- continue
-
- img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
- if os.path.exists(img_radio_path):
- if output_zip:
- common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
- else:
- shutil.copy(img_radio_path, prebuilt_path)
- continue
-
- # Walk through VENDOR_IMAGES/ since files could be under subdirs.
- img_vendor_dir = os.path.join(OPTIONS.input_tmp, "VENDOR_IMAGES")
- for root, _, files in os.walk(img_vendor_dir):
- if img_name in files:
- if output_zip:
- common.ZipWrite(output_zip, os.path.join(root, img_name),
- "IMAGES/" + img_name)
- else:
- shutil.copy(os.path.join(root, img_name), prebuilt_path)
- break
# Assert that the image is present under IMAGES/ now.
if output_zip:
# Zip spec says: All slashes MUST be forward slashes.
- img_path = 'IMAGES/' + img_name
- assert img_path in output_zip.namelist(), "cannot find " + img_name
+ images_path = "IMAGES/" + img_name
+ radio_path = "RADIO/" + img_name
+ available = (images_path in output_zip.namelist() or
+ radio_path in output_zip.namelist())
else:
- img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
- assert os.path.exists(img_path), "cannot find " + img_name
+ images_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+ radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+ available = os.path.exists(images_path) or os.path.exists(radio_path)
+ assert available, "Failed to find " + img_name
-def AddCareMapTxtForAbOta(output_zip, ab_partitions, image_paths):
- """Generates and adds care_map.txt for system and vendor partitions.
+
+def AddCareMapForAbOta(output_zip, ab_partitions, image_paths):
+ """Generates and adds care_map.pb for a/b partition that has care_map.
Args:
output_zip: The output zip file (needs to be already open), or None to
- write images to OPTIONS.input_tmp/.
+ write care_map.pb to OPTIONS.input_tmp/.
ab_partitions: The list of A/B partitions.
image_paths: A map from the partition name to the image path.
"""
care_map_list = []
for partition in ab_partitions:
partition = partition.strip()
- if partition not in PARTITIONS_WITH_CARE_MAP:
+ if partition not in common.PARTITIONS_WITH_CARE_MAP:
continue
verity_block_device = "{}_verity_block_device".format(partition)
@@ -562,15 +585,41 @@ def AddCareMapTxtForAbOta(output_zip, ab_partitions, image_paths):
assert os.path.exists(image_path)
care_map_list += GetCareMap(partition, image_path)
- if care_map_list:
- care_map_path = "META/care_map.txt"
- if output_zip and care_map_path not in output_zip.namelist():
- common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
- else:
- with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
- fp.write('\n'.join(care_map_list))
- if output_zip:
- OPTIONS.replace_updated_files_list.append(care_map_path)
+ # adds fingerprint field to the care_map
+ build_props = OPTIONS.info_dict.get(partition + ".build.prop", {})
+ prop_name_list = ["ro.{}.build.fingerprint".format(partition),
+ "ro.{}.build.thumbprint".format(partition)]
+
+ present_props = [x for x in prop_name_list if x in build_props]
+ if not present_props:
+ logger.warning("fingerprint is not present for partition %s", partition)
+ property_id, fingerprint = "unknown", "unknown"
+ else:
+ property_id = present_props[0]
+ fingerprint = build_props[property_id]
+ care_map_list += [property_id, fingerprint]
+
+ if not care_map_list:
+ return
+
+ # Converts the list into proto buf message by calling care_map_generator; and
+ # writes the result to a temp file.
+ temp_care_map_text = common.MakeTempFile(prefix="caremap_text-",
+ suffix=".txt")
+ with open(temp_care_map_text, 'w') as text_file:
+ text_file.write('\n'.join(care_map_list))
+
+ temp_care_map = common.MakeTempFile(prefix="caremap-", suffix=".pb")
+ care_map_gen_cmd = ["care_map_generator", temp_care_map_text, temp_care_map]
+ common.RunAndCheckOutput(care_map_gen_cmd)
+
+ care_map_path = "META/care_map.pb"
+ if output_zip and care_map_path not in output_zip.namelist():
+ common.ZipWrite(output_zip, temp_care_map, arcname=care_map_path)
+ else:
+ shutil.copy(temp_care_map, os.path.join(OPTIONS.input_tmp, care_map_path))
+ if output_zip:
+ OPTIONS.replace_updated_files_list.append(care_map_path)
def AddPackRadioImages(output_zip, images):
@@ -592,7 +641,7 @@ def AddPackRadioImages(output_zip, images):
prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
if os.path.exists(prebuilt_path):
- print("%s already exists, no need to overwrite..." % (img_name,))
+ logger.info("%s already exists, no need to overwrite...", img_name)
continue
img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
@@ -605,10 +654,31 @@ def AddPackRadioImages(output_zip, images):
shutil.copy(img_radio_path, prebuilt_path)
+def AddSuperEmpty(output_zip):
+ """Create a super_empty.img and store it in output_zip."""
+
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "super_empty.img")
+ build_super_image.BuildSuperImage(OPTIONS.info_dict, img.name)
+ img.Write()
+
+
+def AddSuperSplit(output_zip):
+ """Create split super_*.img and store it in output_zip."""
+
+ outdir = os.path.join(OPTIONS.input_tmp, "OTA")
+ built = build_super_image.BuildSuperImage(OPTIONS.input_tmp, outdir)
+
+ if built:
+ for dev in OPTIONS.info_dict['super_block_devices'].strip().split():
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "OTA",
+ "super_" + dev + ".img")
+ img.Write()
+
+
def ReplaceUpdatedFiles(zip_filename, files_list):
"""Updates all the ZIP entries listed in files_list.
- For now the list includes META/care_map.txt, and the related files under
+ For now the list includes META/care_map.pb, and the related files under
SYSTEM/ after rebuilding recovery.
"""
common.ZipDelete(zip_filename, files_list)
@@ -641,23 +711,33 @@ def AddImagesToTargetFiles(filename):
if not OPTIONS.add_missing:
if os.path.isdir(os.path.join(OPTIONS.input_tmp, "IMAGES")):
- print("target_files appears to already contain images.")
+ logger.warning("target_files appears to already contain images.")
sys.exit(1)
- OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, OPTIONS.input_tmp)
+ OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, repacking=True)
has_recovery = OPTIONS.info_dict.get("no_recovery") != "true"
- # {vendor,product}.img is unlike system.img or system_other.img. Because it
- # could be built from source, or dropped into target_files.zip as a prebuilt
- # blob. We consider either of them as {vendor,product}.img being available,
- # which could be used when generating vbmeta.img for AVB.
+ # {vendor,odm,product,product_services}.img are unlike system.img or
+ # system_other.img. Because it could be built from source, or dropped into
+ # target_files.zip as a prebuilt blob. We consider either of them as
+ # {vendor,product,product_services}.img being available, which could be
+ # used when generating vbmeta.img for AVB.
has_vendor = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "VENDOR")) or
os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
"vendor.img")))
+ has_odm = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "ODM")) or
+ os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
+ "odm.img")))
has_product = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "PRODUCT")) or
os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
"product.img")))
+ has_product_services = (os.path.isdir(os.path.join(OPTIONS.input_tmp,
+ "PRODUCT_SERVICES")) or
+ os.path.exists(os.path.join(OPTIONS.input_tmp,
+ "IMAGES",
+ "product_services.img")))
+ has_system = os.path.isdir(os.path.join(OPTIONS.input_tmp, "SYSTEM"))
has_system_other = os.path.isdir(os.path.join(OPTIONS.input_tmp,
"SYSTEM_OTHER"))
@@ -682,7 +762,7 @@ def AddImagesToTargetFiles(filename):
partitions = dict()
def banner(s):
- print("\n\n++++ " + s + " ++++\n\n")
+ logger.info("\n\n++++ " + s + " ++++\n\n")
banner("boot")
# common.GetBootableImage() returns the image directly if present.
@@ -722,9 +802,10 @@ def AddImagesToTargetFiles(filename):
if output_zip:
recovery_two_step_image.AddToZip(output_zip)
- banner("system")
- partitions['system'] = AddSystem(
- output_zip, recovery_img=recovery_image, boot_img=boot_image)
+ if has_system:
+ banner("system")
+ partitions['system'] = AddSystem(
+ output_zip, recovery_img=recovery_image, boot_img=boot_image)
if has_vendor:
banner("vendor")
@@ -734,6 +815,14 @@ def AddImagesToTargetFiles(filename):
banner("product")
partitions['product'] = AddProduct(output_zip)
+ if has_product_services:
+ banner("product_services")
+ partitions['product_services'] = AddProductServices(output_zip)
+
+ if has_odm:
+ banner("odm")
+ partitions['odm'] = AddOdm(output_zip)
+
if has_system_other:
banner("system_other")
AddSystemOther(output_zip)
@@ -753,8 +842,42 @@ def AddImagesToTargetFiles(filename):
partitions['dtbo'] = AddDtbo(output_zip)
if OPTIONS.info_dict.get("avb_enable") == "true":
+ # vbmeta_partitions includes the partitions that should be included into
+ # top-level vbmeta.img, which are the ones that are not included in any
+ # chained VBMeta image plus the chained VBMeta images themselves.
+ vbmeta_partitions = common.AVB_PARTITIONS[:]
+
+ vbmeta_system = OPTIONS.info_dict.get("avb_vbmeta_system", "").strip()
+ if vbmeta_system:
+ banner("vbmeta_system")
+ partitions["vbmeta_system"] = AddVBMeta(
+ output_zip, partitions, "vbmeta_system", vbmeta_system.split())
+ vbmeta_partitions = [
+ item for item in vbmeta_partitions
+ if item not in vbmeta_system.split()]
+ vbmeta_partitions.append("vbmeta_system")
+
+ vbmeta_vendor = OPTIONS.info_dict.get("avb_vbmeta_vendor", "").strip()
+ if vbmeta_vendor:
+ banner("vbmeta_vendor")
+ partitions["vbmeta_vendor"] = AddVBMeta(
+ output_zip, partitions, "vbmeta_vendor", vbmeta_vendor.split())
+ vbmeta_partitions = [
+ item for item in vbmeta_partitions
+ if item not in vbmeta_vendor.split()]
+ vbmeta_partitions.append("vbmeta_vendor")
+
banner("vbmeta")
- AddVBMeta(output_zip, partitions)
+ AddVBMeta(output_zip, partitions, "vbmeta", vbmeta_partitions)
+
+ if OPTIONS.info_dict.get("build_super_partition") == "true":
+ banner("super_empty")
+ AddSuperEmpty(output_zip)
+
+ if OPTIONS.info_dict.get(
+ "build_retrofit_dynamic_partitions_ota_package") == "true":
+ banner("super split images")
+ AddSuperSplit(output_zip)
banner("radio")
ab_partitions_txt = os.path.join(OPTIONS.input_tmp, "META",
@@ -763,14 +886,13 @@ def AddImagesToTargetFiles(filename):
with open(ab_partitions_txt, 'r') as f:
ab_partitions = f.readlines()
- # For devices using A/B update, copy over images from RADIO/ and/or
- # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
- # images ready under IMAGES/. All images should have '.img' as extension.
- AddRadioImagesForAbOta(output_zip, ab_partitions)
+ # For devices using A/B update, make sure we have all the needed images
+ # ready under IMAGES/ or RADIO/.
+ CheckAbOtaImages(output_zip, ab_partitions)
- # Generate care_map.txt for system and vendor partitions (if present), then
- # write this file to target_files package.
- AddCareMapTxtForAbOta(output_zip, ab_partitions, partitions)
+ # Generate care_map.pb for ab_partitions, then write this file to
+ # target_files package.
+ AddCareMapForAbOta(output_zip, ab_partitions, partitions)
# Radio images that need to be packed into IMAGES/, and product-img.zip.
pack_radioimages_txt = os.path.join(
@@ -810,20 +932,21 @@ def main(argv):
"is_signing"],
extra_option_handler=option_handler)
-
if len(args) != 1:
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
AddImagesToTargetFiles(args[0])
- print("done.")
+ logger.info("done.")
if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
- except common.ExternalError as e:
- print("\n ERROR: %s\n" % (e,))
+ except common.ExternalError:
+ logger.exception("\n ERROR:\n")
sys.exit(1)
finally:
common.Cleanup()
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
new file mode 100644
index 0000000000..66715ca24c
--- /dev/null
+++ b/tools/releasetools/apex_utils.py
@@ -0,0 +1,221 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os.path
+import re
+import shlex
+import sys
+import zipfile
+
+import common
+
+logger = logging.getLogger(__name__)
+
+OPTIONS = common.OPTIONS
+
+
+class ApexInfoError(Exception):
+ """An Exception raised during Apex Information command."""
+
+ def __init__(self, message):
+ Exception.__init__(self, message)
+
+
+class ApexSigningError(Exception):
+ """An Exception raised during Apex Payload signing."""
+
+ def __init__(self, message):
+ Exception.__init__(self, message)
+
+
+def SignApexPayload(payload_file, payload_key_path, payload_key_name, algorithm,
+ salt, signing_args=None):
+ """Signs a given payload_file with the payload key."""
+ # Add the new footer. Old footer, if any, will be replaced by avbtool.
+ cmd = ['avbtool', 'add_hashtree_footer',
+ '--do_not_generate_fec',
+ '--algorithm', algorithm,
+ '--key', payload_key_path,
+ '--prop', 'apex.key:{}'.format(payload_key_name),
+ '--image', payload_file,
+ '--salt', salt]
+ if signing_args:
+ cmd.extend(shlex.split(signing_args))
+
+ try:
+ common.RunAndCheckOutput(cmd)
+ except common.ExternalError as e:
+ raise ApexSigningError, \
+ 'Failed to sign APEX payload {} with {}:\n{}'.format(
+ payload_file, payload_key_path, e), sys.exc_info()[2]
+
+ # Verify the signed payload image with specified public key.
+ logger.info('Verifying %s', payload_file)
+ VerifyApexPayload(payload_file, payload_key_path)
+
+
+def VerifyApexPayload(payload_file, payload_key):
+ """Verifies the APEX payload signature with the given key."""
+ cmd = ['avbtool', 'verify_image', '--image', payload_file,
+ '--key', payload_key]
+ try:
+ common.RunAndCheckOutput(cmd)
+ except common.ExternalError as e:
+ raise ApexSigningError, \
+ 'Failed to validate payload signing for {} with {}:\n{}'.format(
+ payload_file, payload_key, e), sys.exc_info()[2]
+
+
+def ParseApexPayloadInfo(payload_path):
+ """Parses the APEX payload info.
+
+ Args:
+ payload_path: The path to the payload image.
+
+ Raises:
+ ApexInfoError on parsing errors.
+
+ Returns:
+ A dict that contains payload property-value pairs. The dict should at least
+ contain Algorithm, Salt and apex.key.
+ """
+ if not os.path.exists(payload_path):
+ raise ApexInfoError('Failed to find image: {}'.format(payload_path))
+
+ cmd = ['avbtool', 'info_image', '--image', payload_path]
+ try:
+ output = common.RunAndCheckOutput(cmd)
+ except common.ExternalError as e:
+ raise ApexInfoError, \
+ 'Failed to get APEX payload info for {}:\n{}'.format(
+ payload_path, e), sys.exc_info()[2]
+
+ # Extract the Algorithm / Salt / Prop info from payload (i.e. an image signed
+ # with avbtool). For example,
+ # Algorithm: SHA256_RSA4096
+ PAYLOAD_INFO_PATTERN = (
+ r'^\s*(?P<key>Algorithm|Salt|Prop)\:\s*(?P<value>.*?)$')
+ payload_info_matcher = re.compile(PAYLOAD_INFO_PATTERN)
+
+ payload_info = {}
+ for line in output.split('\n'):
+ line_info = payload_info_matcher.match(line)
+ if not line_info:
+ continue
+
+ key, value = line_info.group('key'), line_info.group('value')
+
+ if key == 'Prop':
+ # Further extract the property key-value pair, from a 'Prop:' line. For
+ # example,
+ # Prop: apex.key -> 'com.android.runtime'
+ # Note that avbtool writes single or double quotes around values.
+ PROPERTY_DESCRIPTOR_PATTERN = r'^\s*(?P<key>.*?)\s->\s*(?P<value>.*?)$'
+
+ prop_matcher = re.compile(PROPERTY_DESCRIPTOR_PATTERN)
+ prop = prop_matcher.match(value)
+ if not prop:
+ raise ApexInfoError(
+ 'Failed to parse prop string {}'.format(value))
+
+ prop_key, prop_value = prop.group('key'), prop.group('value')
+ if prop_key == 'apex.key':
+ # avbtool dumps the prop value with repr(), which contains single /
+ # double quotes that we don't want.
+ payload_info[prop_key] = prop_value.strip('\"\'')
+
+ else:
+ payload_info[key] = value
+
+ # Sanity check.
+ for key in ('Algorithm', 'Salt', 'apex.key'):
+ if key not in payload_info:
+ raise ApexInfoError(
+ 'Failed to find {} prop in {}'.format(key, payload_path))
+
+ return payload_info
+
+
+def SignApex(apex_data, payload_key, container_key, container_pw,
+ codename_to_api_level_map, signing_args=None):
+ """Signs the current APEX with the given payload/container keys.
+
+ Args:
+ apex_data: Raw APEX data.
+ payload_key: The path to payload signing key (w/ extension).
+ container_key: The path to container signing key (w/o extension).
+ container_pw: The matching password of the container_key, or None.
+ codename_to_api_level_map: A dict that maps from codename to API level.
+ signing_args: Additional args to be passed to the payload signer.
+
+ Returns:
+ The path to the signed APEX file.
+ """
+ apex_file = common.MakeTempFile(prefix='apex-', suffix='.apex')
+ with open(apex_file, 'wb') as apex_fp:
+ apex_fp.write(apex_data)
+
+ APEX_PAYLOAD_IMAGE = 'apex_payload.img'
+ APEX_PUBKEY = 'apex_pubkey'
+
+ # 1a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
+ # payload_key.
+ payload_dir = common.MakeTempDir(prefix='apex-payload-')
+ with zipfile.ZipFile(apex_file) as apex_fd:
+ payload_file = apex_fd.extract(APEX_PAYLOAD_IMAGE, payload_dir)
+ zip_items = apex_fd.namelist()
+
+ payload_info = ParseApexPayloadInfo(payload_file)
+ SignApexPayload(
+ payload_file,
+ payload_key,
+ payload_info['apex.key'],
+ payload_info['Algorithm'],
+ payload_info['Salt'],
+ signing_args)
+
+ # 1b. Update the embedded payload public key.
+ payload_public_key = common.ExtractAvbPublicKey(payload_key)
+
+ common.ZipDelete(apex_file, APEX_PAYLOAD_IMAGE)
+ if APEX_PUBKEY in zip_items:
+ common.ZipDelete(apex_file, APEX_PUBKEY)
+ apex_zip = zipfile.ZipFile(apex_file, 'a')
+ common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
+ common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
+ common.ZipClose(apex_zip)
+
+ # 2. Align the files at page boundary (same as in apexer).
+ aligned_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
+ common.RunAndCheckOutput(['zipalign', '-f', '4096', apex_file, aligned_apex])
+
+ # 3. Sign the APEX container with container_key.
+ signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
+
+ # Specify the 4K alignment when calling SignApk.
+ extra_signapk_args = OPTIONS.extra_signapk_args[:]
+ extra_signapk_args.extend(['-a', '4096'])
+
+ common.SignFile(
+ aligned_apex,
+ signed_apex,
+ container_key,
+ container_pw,
+ codename_to_api_level_map=codename_to_api_level_map,
+ extra_signapk_args=extra_signapk_args)
+
+ return signed_apex
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 24c5b2de7f..b23eef112c 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -19,24 +19,30 @@ import copy
import functools
import heapq
import itertools
+import logging
import multiprocessing
import os
import os.path
import re
-import subprocess
import sys
import threading
-from collections import deque, OrderedDict
+import zlib
+from collections import deque, namedtuple, OrderedDict
from hashlib import sha1
import common
from rangelib import RangeSet
-
__all__ = ["EmptyImage", "DataImage", "BlockImageDiff"]
+logger = logging.getLogger(__name__)
+
+# The tuple contains the style and bytes of a bsdiff|imgdiff patch.
+PatchInfo = namedtuple("PatchInfo", ["imgdiff", "content"])
+
def compute_patch(srcfile, tgtfile, imgdiff=False):
+ """Calls bsdiff|imgdiff to compute the patch data, returns a PatchInfo."""
patchfile = common.MakeTempFile(prefix='patch-')
cmd = ['imgdiff', '-z'] if imgdiff else ['bsdiff']
@@ -44,15 +50,14 @@ def compute_patch(srcfile, tgtfile, imgdiff=False):
# Don't dump the bsdiff/imgdiff commands, which are not useful for the case
# here, since they contain temp filenames only.
- p = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- output, _ = p.communicate()
+ proc = common.Run(cmd, verbose=False)
+ output, _ = proc.communicate()
- if p.returncode != 0:
+ if proc.returncode != 0:
raise ValueError(output)
with open(patchfile, 'rb') as f:
- return f.read()
+ return PatchInfo(imgdiff, f.read())
class Image(object):
@@ -79,6 +84,7 @@ class EmptyImage(Image):
self.extended = RangeSet()
self.total_blocks = 0
self.file_map = {}
+ self.hashtree_info = None
def RangeSha1(self, ranges):
return sha1().hexdigest()
@@ -163,12 +169,12 @@ class DataImage(Image):
def RangeSha1(self, ranges):
h = sha1()
- for data in self._GetRangeData(ranges):
+ for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
h.update(data)
return h.hexdigest()
def ReadRangeSet(self, ranges):
- return [self._GetRangeData(ranges)]
+ return list(self._GetRangeData(ranges))
def TotalSha1(self, include_clobbered_blocks=False):
if not include_clobbered_blocks:
@@ -177,7 +183,84 @@ class DataImage(Image):
return sha1(self.data).hexdigest()
def WriteRangeDataToFd(self, ranges, fd):
- for data in self._GetRangeData(ranges):
+ for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
+ fd.write(data)
+
+
+class FileImage(Image):
+ """An image wrapped around a raw image file."""
+
+ def __init__(self, path, hashtree_info_generator=None):
+ self.path = path
+ self.blocksize = 4096
+ self._file_size = os.path.getsize(self.path)
+ self._file = open(self.path, 'r')
+
+ if self._file_size % self.blocksize != 0:
+ raise ValueError("Size of file %s must be multiple of %d bytes, but is %d"
+ % self.path, self.blocksize, self._file_size)
+
+ self.total_blocks = self._file_size / self.blocksize
+ self.care_map = RangeSet(data=(0, self.total_blocks))
+ self.clobbered_blocks = RangeSet()
+ self.extended = RangeSet()
+
+ self.generator_lock = threading.Lock()
+
+ self.hashtree_info = None
+ if hashtree_info_generator:
+ self.hashtree_info = hashtree_info_generator.Generate(self)
+
+ zero_blocks = []
+ nonzero_blocks = []
+ reference = '\0' * self.blocksize
+
+ for i in range(self.total_blocks):
+ d = self._file.read(self.blocksize)
+ if d == reference:
+ zero_blocks.append(i)
+ zero_blocks.append(i+1)
+ else:
+ nonzero_blocks.append(i)
+ nonzero_blocks.append(i+1)
+
+ assert zero_blocks or nonzero_blocks
+
+ self.file_map = {}
+ if zero_blocks:
+ self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
+ if nonzero_blocks:
+ self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
+ if self.hashtree_info:
+ self.file_map["__HASHTREE"] = self.hashtree_info.hashtree_range
+
+ def __del__(self):
+ self._file.close()
+
+ def _GetRangeData(self, ranges):
+ # Use a lock to protect the generator so that we will not run two
+ # instances of this generator on the same object simultaneously.
+ with self.generator_lock:
+ for s, e in ranges:
+ self._file.seek(s * self.blocksize)
+ for _ in range(s, e):
+ yield self._file.read(self.blocksize)
+
+ def RangeSha1(self, ranges):
+ h = sha1()
+ for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
+ h.update(data)
+ return h.hexdigest()
+
+ def ReadRangeSet(self, ranges):
+ return list(self._GetRangeData(ranges))
+
+ def TotalSha1(self, include_clobbered_blocks=False):
+ assert not self.clobbered_blocks
+ return self.RangeSha1(self.care_map)
+
+ def WriteRangeDataToFd(self, ranges, fd):
+ for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
fd.write(data)
@@ -203,17 +286,17 @@ class Transfer(object):
self.id = len(by_id)
by_id.append(self)
- self._patch = None
+ self._patch_info = None
@property
- def patch(self):
- return self._patch
+ def patch_info(self):
+ return self._patch_info
- @patch.setter
- def patch(self, patch):
- if patch:
+ @patch_info.setter
+ def patch_info(self, info):
+ if info:
assert self.style == "diff"
- self._patch = patch
+ self._patch_info = info
def NetStashChange(self):
return (sum(sr.size() for (_, sr) in self.stash_before) -
@@ -224,7 +307,7 @@ class Transfer(object):
self.use_stash = []
self.style = "new"
self.src_ranges = RangeSet()
- self.patch = None
+ self.patch_info = None
def __str__(self):
return (str(self.id) + ": <" + str(self.src_ranges) + " " + self.style +
@@ -270,7 +353,6 @@ class ImgdiffStats(object):
USED_IMGDIFF_LARGE_APK = "Large APK files split and diff'd with imgdiff"
# Reasons for not applying imgdiff on APKs.
- SKIPPED_TRIMMED = "Not used imgdiff due to trimmed RangeSet"
SKIPPED_NONMONOTONIC = "Not used imgdiff due to having non-monotonic ranges"
SKIPPED_SHARED_BLOCKS = "Not used imgdiff due to using shared blocks"
SKIPPED_INCOMPLETE = "Not used imgdiff due to incomplete RangeSet"
@@ -279,7 +361,6 @@ class ImgdiffStats(object):
REASONS = (
USED_IMGDIFF,
USED_IMGDIFF_LARGE_APK,
- SKIPPED_TRIMMED,
SKIPPED_NONMONOTONIC,
SKIPPED_SHARED_BLOCKS,
SKIPPED_INCOMPLETE,
@@ -309,8 +390,8 @@ class ImgdiffStats(object):
"""Prints a report of the collected imgdiff stats."""
def print_header(header, separator):
- print(header)
- print(separator * len(header) + '\n')
+ logger.info(header)
+ logger.info(separator * len(header) + '\n')
print_header(' Imgdiff Stats Report ', '=')
for key in self.REASONS:
@@ -319,49 +400,48 @@ class ImgdiffStats(object):
values = self.stats[key]
section_header = ' {} (count: {}) '.format(key, len(values))
print_header(section_header, '-')
- print(''.join([' {}\n'.format(name) for name in values]))
+ logger.info(''.join([' {}\n'.format(name) for name in values]))
-# BlockImageDiff works on two image objects. An image object is
-# anything that provides the following attributes:
-#
-# blocksize: the size in bytes of a block, currently must be 4096.
-#
-# total_blocks: the total size of the partition/image, in blocks.
-#
-# care_map: a RangeSet containing which blocks (in the range [0,
-# total_blocks) we actually care about; i.e. which blocks contain
-# data.
-#
-# file_map: a dict that partitions the blocks contained in care_map
-# into smaller domains that are useful for doing diffs on.
-# (Typically a domain is a file, and the key in file_map is the
-# pathname.)
-#
-# clobbered_blocks: a RangeSet containing which blocks contain data
-# but may be altered by the FS. They need to be excluded when
-# verifying the partition integrity.
-#
-# ReadRangeSet(): a function that takes a RangeSet and returns the
-# data contained in the image blocks of that RangeSet. The data
-# is returned as a list or tuple of strings; concatenating the
-# elements together should produce the requested data.
-# Implementations are free to break up the data into list/tuple
-# elements in any way that is convenient.
-#
-# RangeSha1(): a function that returns (as a hex string) the SHA-1
-# hash of all the data in the specified range.
-#
-# TotalSha1(): a function that returns (as a hex string) the SHA-1
-# hash of all the data in the image (ie, all the blocks in the
-# care_map minus clobbered_blocks, or including the clobbered
-# blocks if include_clobbered_blocks is True).
-#
-# When creating a BlockImageDiff, the src image may be None, in which
-# case the list of transfers produced will never read from the
-# original image.
-
class BlockImageDiff(object):
+ """Generates the diff of two block image objects.
+
+ BlockImageDiff works on two image objects. An image object is anything that
+ provides the following attributes:
+
+ blocksize: the size in bytes of a block, currently must be 4096.
+
+ total_blocks: the total size of the partition/image, in blocks.
+
+ care_map: a RangeSet containing which blocks (in the range [0,
+ total_blocks) we actually care about; i.e. which blocks contain data.
+
+ file_map: a dict that partitions the blocks contained in care_map into
+ smaller domains that are useful for doing diffs on. (Typically a domain
+ is a file, and the key in file_map is the pathname.)
+
+ clobbered_blocks: a RangeSet containing which blocks contain data but may
+ be altered by the FS. They need to be excluded when verifying the
+ partition integrity.
+
+ ReadRangeSet(): a function that takes a RangeSet and returns the data
+ contained in the image blocks of that RangeSet. The data is returned as
+ a list or tuple of strings; concatenating the elements together should
+ produce the requested data. Implementations are free to break up the
+ data into list/tuple elements in any way that is convenient.
+
+ RangeSha1(): a function that returns (as a hex string) the SHA-1 hash of
+ all the data in the specified range.
+
+ TotalSha1(): a function that returns (as a hex string) the SHA-1 hash of
+ all the data in the image (ie, all the blocks in the care_map minus
+ clobbered_blocks, or including the clobbered blocks if
+ include_clobbered_blocks is True).
+
+ When creating a BlockImageDiff, the src image may be None, in which case the
+ list of transfers produced will never read from the original image.
+ """
+
def __init__(self, tgt, src=None, threads=None, version=4,
disable_imgdiff=False):
if threads is None:
@@ -449,10 +529,6 @@ class BlockImageDiff(object):
self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_INCOMPLETE)
return False
- if tgt_ranges.extra.get('trimmed') or src_ranges.extra.get('trimmed'):
- self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_TRIMMED)
- return False
-
reason = (ImgdiffStats.USED_IMGDIFF_LARGE_APK if large_apk
else ImgdiffStats.USED_IMGDIFF)
self.imgdiff_stats.Log(name, reason)
@@ -469,19 +545,28 @@ class BlockImageDiff(object):
self.AbbreviateSourceNames()
self.FindTransfers()
- # Find the ordering dependencies among transfers (this is O(n^2)
- # in the number of transfers).
- self.GenerateDigraph()
- # Find a sequence of transfers that satisfies as many ordering
- # dependencies as possible (heuristically).
- self.FindVertexSequence()
- # Fix up the ordering dependencies that the sequence didn't
- # satisfy.
- self.ReverseBackwardEdges()
- self.ImproveVertexSequence()
+ self.FindSequenceForTransfers()
# Ensure the runtime stash size is under the limit.
if common.OPTIONS.cache_size is not None:
+ stash_limit = (common.OPTIONS.cache_size *
+ common.OPTIONS.stash_threshold / self.tgt.blocksize)
+ # Ignore the stash limit and calculate the maximum simultaneously stashed
+ # blocks needed.
+ _, max_stashed_blocks = self.ReviseStashSize(ignore_stash_limit=True)
+
+ # We cannot stash more blocks than the stash limit simultaneously. As a
+ # result, some 'diff' commands will be converted to new; leading to an
+ # unintended large package. To mitigate this issue, we can carefully
+ # choose the transfers for conversion. The number '1024' can be further
+ # tweaked here to balance the package size and build time.
+ if max_stashed_blocks > stash_limit + 1024:
+ self.SelectAndConvertDiffTransfersToNew(
+ max_stashed_blocks - stash_limit)
+ # Regenerate the sequence as the graph has changed.
+ self.FindSequenceForTransfers()
+
+ # Revise the stash size again to keep the size under limit.
self.ReviseStashSize()
# Double-check our work.
@@ -492,7 +577,7 @@ class BlockImageDiff(object):
self.WriteTransfers(prefix)
# Report the imgdiff stats.
- if common.OPTIONS.verbose and not self.disable_imgdiff:
+ if not self.disable_imgdiff:
self.imgdiff_stats.Report()
def WriteTransfers(self, prefix):
@@ -656,6 +741,14 @@ class BlockImageDiff(object):
self.touched_src_sha1 = self.src.RangeSha1(self.touched_src_ranges)
+ if self.tgt.hashtree_info:
+ out.append("compute_hash_tree {} {} {} {} {}\n".format(
+ self.tgt.hashtree_info.hashtree_range.to_string_raw(),
+ self.tgt.hashtree_info.filesystem_range.to_string_raw(),
+ self.tgt.hashtree_info.hash_algorithm,
+ self.tgt.hashtree_info.salt,
+ self.tgt.hashtree_info.root_hash))
+
# Zero out extended blocks as a workaround for bug 20881595.
if self.tgt.extended:
assert (WriteSplitTransfers(out, "zero", self.tgt.extended) ==
@@ -694,16 +787,31 @@ class BlockImageDiff(object):
OPTIONS = common.OPTIONS
if OPTIONS.cache_size is not None:
max_allowed = OPTIONS.cache_size * OPTIONS.stash_threshold
- print("max stashed blocks: %d (%d bytes), "
- "limit: %d bytes (%.2f%%)\n" % (
- max_stashed_blocks, self._max_stashed_size, max_allowed,
- self._max_stashed_size * 100.0 / max_allowed))
+ logger.info(
+ "max stashed blocks: %d (%d bytes), limit: %d bytes (%.2f%%)\n",
+ max_stashed_blocks, self._max_stashed_size, max_allowed,
+ self._max_stashed_size * 100.0 / max_allowed)
else:
- print("max stashed blocks: %d (%d bytes), limit: <unknown>\n" % (
- max_stashed_blocks, self._max_stashed_size))
+ logger.info(
+ "max stashed blocks: %d (%d bytes), limit: <unknown>\n",
+ max_stashed_blocks, self._max_stashed_size)
- def ReviseStashSize(self):
- print("Revising stash size...")
+ def ReviseStashSize(self, ignore_stash_limit=False):
+ """ Revises the transfers to keep the stash size within the size limit.
+
+ Iterates through the transfer list and calculates the stash size each
+ transfer generates. Converts the affected transfers to new if we reach the
+ stash limit.
+
+ Args:
+ ignore_stash_limit: Ignores the stash limit and calculates the max
+ simultaneous stashed blocks instead. No change will be made to the
+ transfer list with this flag.
+
+ Return:
+ A tuple of (tgt blocks converted to new, max stashed blocks)
+ """
+ logger.info("Revising stash size...")
stash_map = {}
# Create the map between a stash and its def/use points. For example, for a
@@ -717,16 +825,19 @@ class BlockImageDiff(object):
for stash_raw_id, _ in xf.use_stash:
stash_map[stash_raw_id] += (xf,)
- # Compute the maximum blocks available for stash based on /cache size and
- # the threshold.
- cache_size = common.OPTIONS.cache_size
- stash_threshold = common.OPTIONS.stash_threshold
- max_allowed = cache_size * stash_threshold / self.tgt.blocksize
+ max_allowed_blocks = None
+ if not ignore_stash_limit:
+ # Compute the maximum blocks available for stash based on /cache size and
+ # the threshold.
+ cache_size = common.OPTIONS.cache_size
+ stash_threshold = common.OPTIONS.stash_threshold
+ max_allowed_blocks = cache_size * stash_threshold / self.tgt.blocksize
# See the comments for 'stashes' in WriteTransfers().
stashes = {}
stashed_blocks = 0
new_blocks = 0
+ max_stashed_blocks = 0
# Now go through all the commands. Compute the required stash size on the
# fly. If a command requires excess stash than available, it deletes the
@@ -743,12 +854,12 @@ class BlockImageDiff(object):
if sh not in stashes:
stashed_blocks_after += sr.size()
- if stashed_blocks_after > max_allowed:
+ if max_allowed_blocks and stashed_blocks_after > max_allowed_blocks:
# We cannot stash this one for a later command. Find out the command
# that will use this stash and replace the command with "new".
use_cmd = stash_map[stash_raw_id][2]
replaced_cmds.append(use_cmd)
- print("%10d %9s %s" % (sr.size(), "explicit", use_cmd))
+ logger.info("%10d %9s %s", sr.size(), "explicit", use_cmd)
else:
# Update the stashes map.
if sh in stashes:
@@ -756,15 +867,21 @@ class BlockImageDiff(object):
else:
stashes[sh] = 1
stashed_blocks = stashed_blocks_after
+ max_stashed_blocks = max(max_stashed_blocks, stashed_blocks)
# "move" and "diff" may introduce implicit stashes in BBOTA v3. Prior to
# ComputePatches(), they both have the style of "diff".
if xf.style == "diff":
assert xf.tgt_ranges and xf.src_ranges
if xf.src_ranges.overlaps(xf.tgt_ranges):
- if stashed_blocks + xf.src_ranges.size() > max_allowed:
+ if (max_allowed_blocks and
+ stashed_blocks + xf.src_ranges.size() > max_allowed_blocks):
replaced_cmds.append(xf)
- print("%10d %9s %s" % (xf.src_ranges.size(), "implicit", xf))
+ logger.info("%10d %9s %s", xf.src_ranges.size(), "implicit", xf)
+ else:
+ # The whole source ranges will be stashed for implicit stashes.
+ max_stashed_blocks = max(max_stashed_blocks,
+ stashed_blocks + xf.src_ranges.size())
# Replace the commands in replaced_cmds with "new"s.
for cmd in replaced_cmds:
@@ -790,28 +907,30 @@ class BlockImageDiff(object):
stashes.pop(sh)
num_of_bytes = new_blocks * self.tgt.blocksize
- print(" Total %d blocks (%d bytes) are packed as new blocks due to "
- "insufficient cache size." % (new_blocks, num_of_bytes))
- return new_blocks
+ logger.info(
+ " Total %d blocks (%d bytes) are packed as new blocks due to "
+ "insufficient cache size. Maximum blocks stashed simultaneously: %d",
+ new_blocks, num_of_bytes, max_stashed_blocks)
+ return new_blocks, max_stashed_blocks
def ComputePatches(self, prefix):
- print("Reticulating splines...")
+ logger.info("Reticulating splines...")
diff_queue = []
patch_num = 0
with open(prefix + ".new.dat", "wb") as new_f:
for index, xf in enumerate(self.transfers):
if xf.style == "zero":
tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
- print("%10d %10d (%6.2f%%) %7s %s %s" % (
- tgt_size, tgt_size, 100.0, xf.style, xf.tgt_name,
- str(xf.tgt_ranges)))
+ logger.info(
+ "%10d %10d (%6.2f%%) %7s %s %s", tgt_size, tgt_size, 100.0,
+ xf.style, xf.tgt_name, str(xf.tgt_ranges))
elif xf.style == "new":
self.tgt.WriteRangeDataToFd(xf.tgt_ranges, new_f)
tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
- print("%10d %10d (%6.2f%%) %7s %s %s" % (
- tgt_size, tgt_size, 100.0, xf.style,
- xf.tgt_name, str(xf.tgt_ranges)))
+ logger.info(
+ "%10d %10d (%6.2f%%) %7s %s %s", tgt_size, tgt_size, 100.0,
+ xf.style, xf.tgt_name, str(xf.tgt_ranges))
elif xf.style == "diff":
# We can't compare src and tgt directly because they may have
@@ -826,24 +945,20 @@ class BlockImageDiff(object):
# These are identical; we don't need to generate a patch,
# just issue copy commands on the device.
xf.style = "move"
- xf.patch = None
+ xf.patch_info = None
tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
if xf.src_ranges != xf.tgt_ranges:
- print("%10d %10d (%6.2f%%) %7s %s %s (from %s)" % (
- tgt_size, tgt_size, 100.0, xf.style,
+ logger.info(
+ "%10d %10d (%6.2f%%) %7s %s %s (from %s)", tgt_size, tgt_size,
+ 100.0, xf.style,
xf.tgt_name if xf.tgt_name == xf.src_name else (
xf.tgt_name + " (from " + xf.src_name + ")"),
- str(xf.tgt_ranges), str(xf.src_ranges)))
+ str(xf.tgt_ranges), str(xf.src_ranges))
else:
- if xf.patch:
- # We have already generated the patch with imgdiff. Check if the
- # transfer is intact.
- assert not self.disable_imgdiff
- imgdiff = True
- if (xf.src_ranges.extra.get('trimmed') or
- xf.tgt_ranges.extra.get('trimmed')):
- imgdiff = False
- xf.patch = None
+ if xf.patch_info:
+ # We have already generated the patch (e.g. during split of large
+ # APKs or reduction of stash size)
+ imgdiff = xf.patch_info.imgdiff
else:
imgdiff = self.CanUseImgdiff(
xf.tgt_name, xf.tgt_ranges, xf.src_ranges)
@@ -854,104 +969,24 @@ class BlockImageDiff(object):
else:
assert False, "unknown style " + xf.style
- if diff_queue:
- if self.threads > 1:
- print("Computing patches (using %d threads)..." % (self.threads,))
- else:
- print("Computing patches...")
-
- diff_total = len(diff_queue)
- patches = [None] * diff_total
- error_messages = []
-
- # Using multiprocessing doesn't give additional benefits, due to the
- # pattern of the code. The diffing work is done by subprocess.call, which
- # already runs in a separate process (not affected much by the GIL -
- # Global Interpreter Lock). Using multiprocess also requires either a)
- # writing the diff input files in the main process before forking, or b)
- # reopening the image file (SparseImage) in the worker processes. Doing
- # neither of them further improves the performance.
- lock = threading.Lock()
- def diff_worker():
- while True:
- with lock:
- if not diff_queue:
- return
- xf_index, imgdiff, patch_index = diff_queue.pop()
- xf = self.transfers[xf_index]
-
- if sys.stdout.isatty():
- diff_left = len(diff_queue)
- progress = (diff_total - diff_left) * 100 / diff_total
- # '\033[K' is to clear to EOL.
- print(' [%3d%%] %s\033[K' % (progress, xf.tgt_name), end='\r')
- sys.stdout.flush()
-
- patch = xf.patch
- if not patch:
- src_ranges = xf.src_ranges
- tgt_ranges = xf.tgt_ranges
-
- src_file = common.MakeTempFile(prefix="src-")
- with open(src_file, "wb") as fd:
- self.src.WriteRangeDataToFd(src_ranges, fd)
-
- tgt_file = common.MakeTempFile(prefix="tgt-")
- with open(tgt_file, "wb") as fd:
- self.tgt.WriteRangeDataToFd(tgt_ranges, fd)
-
- message = []
- try:
- patch = compute_patch(src_file, tgt_file, imgdiff)
- except ValueError as e:
- message.append(
- "Failed to generate %s for %s: tgt=%s, src=%s:\n%s" % (
- "imgdiff" if imgdiff else "bsdiff",
- xf.tgt_name if xf.tgt_name == xf.src_name else
- xf.tgt_name + " (from " + xf.src_name + ")",
- xf.tgt_ranges, xf.src_ranges, e.message))
- if message:
- with lock:
- error_messages.extend(message)
-
- with lock:
- patches[patch_index] = (xf_index, patch)
-
- threads = [threading.Thread(target=diff_worker)
- for _ in range(self.threads)]
- for th in threads:
- th.start()
- while threads:
- threads.pop().join()
-
- if sys.stdout.isatty():
- print('\n')
-
- if error_messages:
- print('ERROR:')
- print('\n'.join(error_messages))
- print('\n\n\n')
- sys.exit(1)
- else:
- patches = []
+ patches = self.ComputePatchesForInputList(diff_queue, False)
offset = 0
with open(prefix + ".patch.dat", "wb") as patch_fd:
- for index, patch in patches:
+ for index, patch_info, _ in patches:
xf = self.transfers[index]
- xf.patch_len = len(patch)
+ xf.patch_len = len(patch_info.content)
xf.patch_start = offset
offset += xf.patch_len
- patch_fd.write(patch)
+ patch_fd.write(patch_info.content)
- if common.OPTIONS.verbose:
- tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
- print("%10d %10d (%6.2f%%) %7s %s %s %s" % (
- xf.patch_len, tgt_size, xf.patch_len * 100.0 / tgt_size,
- xf.style,
- xf.tgt_name if xf.tgt_name == xf.src_name else (
- xf.tgt_name + " (from " + xf.src_name + ")"),
- xf.tgt_ranges, xf.src_ranges))
+ tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
+ logger.info(
+ "%10d %10d (%6.2f%%) %7s %s %s %s", xf.patch_len, tgt_size,
+ xf.patch_len * 100.0 / tgt_size, xf.style,
+ xf.tgt_name if xf.tgt_name == xf.src_name else (
+ xf.tgt_name + " (from " + xf.src_name + ")"),
+ xf.tgt_ranges, xf.src_ranges)
def AssertSha1Good(self):
"""Check the SHA-1 of the src & tgt blocks in the transfer list.
@@ -999,13 +1034,45 @@ class BlockImageDiff(object):
assert touched[i] == 0
touched[i] = 1
+ if self.tgt.hashtree_info:
+ for s, e in self.tgt.hashtree_info.hashtree_range:
+ for i in range(s, e):
+ assert touched[i] == 0
+ touched[i] = 1
+
# Check that we've written every target block.
for s, e in self.tgt.care_map:
for i in range(s, e):
assert touched[i] == 1
+ def FindSequenceForTransfers(self):
+ """Finds a sequence for the given transfers.
+
+ The goal is to minimize the violation of order dependencies between these
+ transfers, so that fewer blocks are stashed when applying the update.
+ """
+
+ # Clear the existing dependency between transfers
+ for xf in self.transfers:
+ xf.goes_before = OrderedDict()
+ xf.goes_after = OrderedDict()
+
+ xf.stash_before = []
+ xf.use_stash = []
+
+ # Find the ordering dependencies among transfers (this is O(n^2)
+ # in the number of transfers).
+ self.GenerateDigraph()
+ # Find a sequence of transfers that satisfies as many ordering
+ # dependencies as possible (heuristically).
+ self.FindVertexSequence()
+ # Fix up the ordering dependencies that the sequence didn't
+ # satisfy.
+ self.ReverseBackwardEdges()
+ self.ImproveVertexSequence()
+
def ImproveVertexSequence(self):
- print("Improving vertex order...")
+ logger.info("Improving vertex order...")
# At this point our digraph is acyclic; we reversed any edges that
# were backwards in the heuristically-generated sequence. The
@@ -1045,42 +1112,6 @@ class BlockImageDiff(object):
for i, xf in enumerate(L):
xf.order = i
- def RemoveBackwardEdges(self):
- print("Removing backward edges...")
- in_order = 0
- out_of_order = 0
- lost_source = 0
-
- for xf in self.transfers:
- lost = 0
- size = xf.src_ranges.size()
- for u in xf.goes_before:
- # xf should go before u
- if xf.order < u.order:
- # it does, hurray!
- in_order += 1
- else:
- # it doesn't, boo. trim the blocks that u writes from xf's
- # source, so that xf can go after u.
- out_of_order += 1
- assert xf.src_ranges.overlaps(u.tgt_ranges)
- xf.src_ranges = xf.src_ranges.subtract(u.tgt_ranges)
- xf.src_ranges.extra['trimmed'] = True
-
- if xf.style == "diff" and not xf.src_ranges:
- # nothing left to diff from; treat as new data
- xf.style = "new"
-
- lost = size - xf.src_ranges.size()
- lost_source += lost
-
- print((" %d/%d dependencies (%.2f%%) were violated; "
- "%d source blocks removed.") %
- (out_of_order, in_order + out_of_order,
- (out_of_order * 100.0 / (in_order + out_of_order))
- if (in_order + out_of_order) else 0.0,
- lost_source))
-
def ReverseBackwardEdges(self):
"""Reverse unsatisfying edges and compute pairs of stashed blocks.
@@ -1093,7 +1124,7 @@ class BlockImageDiff(object):
blocks will be written to the same stash slot in WriteTransfers().
"""
- print("Reversing backward edges...")
+ logger.info("Reversing backward edges...")
in_order = 0
out_of_order = 0
stash_raw_id = 0
@@ -1125,15 +1156,15 @@ class BlockImageDiff(object):
xf.goes_after[u] = None # value doesn't matter
u.goes_before[xf] = None
- print((" %d/%d dependencies (%.2f%%) were violated; "
- "%d source blocks stashed.") %
- (out_of_order, in_order + out_of_order,
- (out_of_order * 100.0 / (in_order + out_of_order))
- if (in_order + out_of_order) else 0.0,
- stash_size))
+ logger.info(
+ " %d/%d dependencies (%.2f%%) were violated; %d source blocks "
+ "stashed.", out_of_order, in_order + out_of_order,
+ (out_of_order * 100.0 / (in_order + out_of_order)) if (
+ in_order + out_of_order) else 0.0,
+ stash_size)
def FindVertexSequence(self):
- print("Finding vertex sequence...")
+ logger.info("Finding vertex sequence...")
# This is based on "A Fast & Effective Heuristic for the Feedback
# Arc Set Problem" by P. Eades, X. Lin, and W.F. Smyth. Think of
@@ -1246,7 +1277,7 @@ class BlockImageDiff(object):
self.transfers = new_transfers
def GenerateDigraph(self):
- print("Generating digraph...")
+ logger.info("Generating digraph...")
# Each item of source_ranges will be:
# - None, if that block is not used as a source,
@@ -1289,6 +1320,176 @@ class BlockImageDiff(object):
b.goes_before[a] = size
a.goes_after[b] = size
+ def ComputePatchesForInputList(self, diff_queue, compress_target):
+ """Returns a list of patch information for the input list of transfers.
+
+ Args:
+ diff_queue: a list of transfers with style 'diff'
+ compress_target: If True, compresses the target ranges of each
+ transfers; and save the size.
+
+ Returns:
+ A list of (transfer order, patch_info, compressed_size) tuples.
+ """
+
+ if not diff_queue:
+ return []
+
+ if self.threads > 1:
+ logger.info("Computing patches (using %d threads)...", self.threads)
+ else:
+ logger.info("Computing patches...")
+
+ diff_total = len(diff_queue)
+ patches = [None] * diff_total
+ error_messages = []
+
+ # Using multiprocessing doesn't give additional benefits, due to the
+ # pattern of the code. The diffing work is done by subprocess.call, which
+ # already runs in a separate process (not affected much by the GIL -
+ # Global Interpreter Lock). Using multiprocess also requires either a)
+ # writing the diff input files in the main process before forking, or b)
+ # reopening the image file (SparseImage) in the worker processes. Doing
+ # neither of them further improves the performance.
+ lock = threading.Lock()
+
+ def diff_worker():
+ while True:
+ with lock:
+ if not diff_queue:
+ return
+ xf_index, imgdiff, patch_index = diff_queue.pop()
+ xf = self.transfers[xf_index]
+
+ message = []
+ compressed_size = None
+
+ patch_info = xf.patch_info
+ if not patch_info:
+ src_file = common.MakeTempFile(prefix="src-")
+ with open(src_file, "wb") as fd:
+ self.src.WriteRangeDataToFd(xf.src_ranges, fd)
+
+ tgt_file = common.MakeTempFile(prefix="tgt-")
+ with open(tgt_file, "wb") as fd:
+ self.tgt.WriteRangeDataToFd(xf.tgt_ranges, fd)
+
+ try:
+ patch_info = compute_patch(src_file, tgt_file, imgdiff)
+ except ValueError as e:
+ message.append(
+ "Failed to generate %s for %s: tgt=%s, src=%s:\n%s" % (
+ "imgdiff" if imgdiff else "bsdiff",
+ xf.tgt_name if xf.tgt_name == xf.src_name else
+ xf.tgt_name + " (from " + xf.src_name + ")",
+ xf.tgt_ranges, xf.src_ranges, e.message))
+
+ if compress_target:
+ tgt_data = self.tgt.ReadRangeSet(xf.tgt_ranges)
+ try:
+ # Compresses with the default level
+ compress_obj = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
+ compressed_data = (compress_obj.compress("".join(tgt_data))
+ + compress_obj.flush())
+ compressed_size = len(compressed_data)
+ except zlib.error as e:
+ message.append(
+ "Failed to compress the data in target range {} for {}:\n"
+ "{}".format(xf.tgt_ranges, xf.tgt_name, e.message))
+
+ if message:
+ with lock:
+ error_messages.extend(message)
+
+ with lock:
+ patches[patch_index] = (xf_index, patch_info, compressed_size)
+
+ threads = [threading.Thread(target=diff_worker)
+ for _ in range(self.threads)]
+ for th in threads:
+ th.start()
+ while threads:
+ threads.pop().join()
+
+ if error_messages:
+ logger.error('ERROR:')
+ logger.error('\n'.join(error_messages))
+ logger.error('\n\n\n')
+ sys.exit(1)
+
+ return patches
+
+ def SelectAndConvertDiffTransfersToNew(self, violated_stash_blocks):
+ """Converts the diff transfers to reduce the max simultaneous stash.
+
+ Since the 'new' data is compressed with deflate, we can select the 'diff'
+ transfers for conversion by comparing its patch size with the size of the
+ compressed data. Ideally, we want to convert the transfers with a small
+ size increase, but using a large number of stashed blocks.
+ """
+ TransferSizeScore = namedtuple("TransferSizeScore",
+ "xf, used_stash_blocks, score")
+
+ logger.info("Selecting diff commands to convert to new.")
+ diff_queue = []
+ for xf in self.transfers:
+ if xf.style == "diff" and xf.src_sha1 != xf.tgt_sha1:
+ use_imgdiff = self.CanUseImgdiff(xf.tgt_name, xf.tgt_ranges,
+ xf.src_ranges)
+ diff_queue.append((xf.order, use_imgdiff, len(diff_queue)))
+
+ # Remove the 'move' transfers, and compute the patch & compressed size
+ # for the remaining.
+ result = self.ComputePatchesForInputList(diff_queue, True)
+
+ conversion_candidates = []
+ for xf_index, patch_info, compressed_size in result:
+ xf = self.transfers[xf_index]
+ if not xf.patch_info:
+ xf.patch_info = patch_info
+
+ size_ratio = len(xf.patch_info.content) * 100.0 / compressed_size
+ diff_style = "imgdiff" if xf.patch_info.imgdiff else "bsdiff"
+ logger.info("%s, target size: %d blocks, style: %s, patch size: %d,"
+ " compression_size: %d, ratio %.2f%%", xf.tgt_name,
+ xf.tgt_ranges.size(), diff_style,
+ len(xf.patch_info.content), compressed_size, size_ratio)
+
+ used_stash_blocks = sum(sr.size() for _, sr in xf.use_stash)
+ # Convert the transfer to new if the compressed size is smaller or equal.
+ # We don't need to maintain the stash_before lists here because the
+ # graph will be regenerated later.
+ if len(xf.patch_info.content) >= compressed_size:
+ # Add the transfer to the candidate list with negative score. And it
+ # will be converted later.
+ conversion_candidates.append(TransferSizeScore(xf, used_stash_blocks,
+ -1))
+ elif used_stash_blocks > 0:
+ # This heuristic represents the size increase in the final package to
+ # remove per unit of stashed data.
+ score = ((compressed_size - len(xf.patch_info.content)) * 100.0
+ / used_stash_blocks)
+ conversion_candidates.append(TransferSizeScore(xf, used_stash_blocks,
+ score))
+ # Transfers with lower score (i.e. less expensive to convert) will be
+ # converted first.
+ conversion_candidates.sort(key=lambda x: x.score)
+
+ # TODO(xunchang), improve the logic to find the transfers to convert, e.g.
+ # convert the ones that contribute to the max stash, run ReviseStashSize
+ # multiple times etc.
+ removed_stashed_blocks = 0
+ for xf, used_stash_blocks, _ in conversion_candidates:
+ logger.info("Converting %s to new", xf.tgt_name)
+ xf.ConvertToNew()
+ removed_stashed_blocks += used_stash_blocks
+ # Experiments show that we will get a smaller package size if we remove
+ # slightly more stashed blocks than the violated stash blocks.
+ if removed_stashed_blocks >= violated_stash_blocks:
+ break
+
+ logger.info("Removed %d stashed blocks", removed_stashed_blocks)
+
def FindTransfers(self):
"""Parse the file_map to generate all the transfers."""
@@ -1412,9 +1613,9 @@ class BlockImageDiff(object):
if tgt_changed < tgt_size * crop_threshold:
assert tgt_changed + tgt_skipped.size() == tgt_size
- print('%10d %10d (%6.2f%%) %s' % (
- tgt_skipped.size(), tgt_size,
- tgt_skipped.size() * 100.0 / tgt_size, tgt_name))
+ logger.info(
+ '%10d %10d (%6.2f%%) %s', tgt_skipped.size(), tgt_size,
+ tgt_skipped.size() * 100.0 / tgt_size, tgt_name)
AddSplitTransfers(
"%s-skipped" % (tgt_name,),
"%s-skipped" % (src_name,),
@@ -1528,9 +1729,9 @@ class BlockImageDiff(object):
"--block-limit={}".format(max_blocks_per_transfer),
"--split-info=" + patch_info_file,
src_file, tgt_file, patch_file]
- p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- imgdiff_output, _ = p.communicate()
- assert p.returncode == 0, \
+ proc = common.Run(cmd)
+ imgdiff_output, _ = proc.communicate()
+ assert proc.returncode == 0, \
"Failed to create imgdiff patch between {} and {}:\n{}".format(
src_name, tgt_name, imgdiff_output)
@@ -1555,7 +1756,7 @@ class BlockImageDiff(object):
split_src_ranges,
patch_content))
- print("Finding transfers...")
+ logger.info("Finding transfers...")
large_apks = []
split_large_apks = []
@@ -1580,6 +1781,9 @@ class BlockImageDiff(object):
AddTransfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers)
continue
+ elif tgt_fn == "__HASHTREE":
+ continue
+
elif tgt_fn in self.src.file_map:
# Look for an exact pathname match in the source.
AddTransfer(tgt_fn, tgt_fn, tgt_ranges, self.src.file_map[tgt_fn],
@@ -1623,7 +1827,7 @@ class BlockImageDiff(object):
self.tgt.RangeSha1(tgt_ranges),
self.src.RangeSha1(src_ranges),
"diff", self.transfers)
- transfer_split.patch = patch
+ transfer_split.patch_info = PatchInfo(True, patch)
def AbbreviateSourceNames(self):
for k in self.src.file_map.keys():
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index ca96d01cc5..4136ed432e 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -24,295 +24,90 @@ Usage: build_image.py input_directory properties_file output_image \\
from __future__ import print_function
+import logging
import os
import os.path
import re
-import shlex
import shutil
-import subprocess
import sys
import common
-import sparse_img
+import verity_utils
+logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
+BLOCK_SIZE = common.BLOCK_SIZE
+BYTES_IN_MB = 1024 * 1024
-FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
-BLOCK_SIZE = 4096
+class BuildImageError(Exception):
+ """An Exception raised during image building."""
-def RunCommand(cmd, verbose=None):
- """Echo and run the given command.
+ def __init__(self, message):
+ Exception.__init__(self, message)
- Args:
- cmd: the command represented as a list of strings.
- verbose: show commands being executed.
- Returns:
- A tuple of the output and the exit code.
- """
- if verbose is None:
- verbose = OPTIONS.verbose
- if verbose:
- print("Running: " + " ".join(cmd))
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- output, _ = p.communicate()
-
- if verbose:
- print(output.rstrip())
- return (output, p.returncode)
-
-
-def GetVerityFECSize(partition_size):
- cmd = ["fec", "-s", str(partition_size)]
- output, exit_code = RunCommand(cmd, False)
- if exit_code != 0:
- return False, 0
- return True, int(output)
-
-
-def GetVerityTreeSize(partition_size):
- cmd = ["build_verity_tree", "-s", str(partition_size)]
- output, exit_code = RunCommand(cmd, False)
- if exit_code != 0:
- return False, 0
- return True, int(output)
-
-
-def GetVerityMetadataSize(partition_size):
- cmd = ["system/extras/verity/build_verity_metadata.py", "size",
- str(partition_size)]
- output, exit_code = RunCommand(cmd, False)
- if exit_code != 0:
- return False, 0
- return True, int(output)
-
-
-def GetVeritySize(partition_size, fec_supported):
- success, verity_tree_size = GetVerityTreeSize(partition_size)
- if not success:
- return 0
- success, verity_metadata_size = GetVerityMetadataSize(partition_size)
- if not success:
- return 0
- verity_size = verity_tree_size + verity_metadata_size
- if fec_supported:
- success, fec_size = GetVerityFECSize(partition_size + verity_size)
- if not success:
- return 0
- return verity_size + fec_size
- return verity_size
-
-
-def GetSimgSize(image_file):
- simg = sparse_img.SparseImage(image_file, build_map=False)
- return simg.blocksize * simg.total_blocks
-
-
-def ZeroPadSimg(image_file, pad_size):
- blocks = pad_size // BLOCK_SIZE
- print("Padding %d blocks (%d bytes)" % (blocks, pad_size))
- simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
- simg.AppendFillChunk(0, blocks)
-
-
-def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args):
- """Calculates max image size for a given partition size.
-
- Args:
- avbtool: String with path to avbtool.
- footer_type: 'hash' or 'hashtree' for generating footer.
- partition_size: The size of the partition in question.
- additional_args: Additional arguments to pass to 'avbtool
- add_hashtree_image'.
- Returns:
- The maximum image size or 0 if an error occurred.
- """
- cmd = [avbtool, "add_%s_footer" % footer_type,
- "--partition_size", partition_size, "--calc_max_image_size"]
- cmd.extend(shlex.split(additional_args))
-
- (output, exit_code) = RunCommand(cmd)
- if exit_code != 0:
- return 0
- else:
- return int(output)
-
-def AVBAddFooter(image_path, avbtool, footer_type, partition_size,
- partition_name, key_path, algorithm, salt,
- additional_args):
- """Adds dm-verity hashtree and AVB metadata to an image.
+def GetDiskUsage(path):
+ """Returns the number of bytes that "path" occupies on host.
Args:
- image_path: Path to image to modify.
- avbtool: String with path to avbtool.
- footer_type: 'hash' or 'hashtree' for generating footer.
- partition_size: The size of the partition in question.
- partition_name: The name of the partition - will be embedded in metadata.
- key_path: Path to key to use or None.
- algorithm: Name of algorithm to use or None.
- salt: The salt to use (a hexadecimal string) or None.
- additional_args: Additional arguments to pass to 'avbtool
- add_hashtree_image'.
+ path: The directory or file to calculate size on.
Returns:
- True if the operation succeeded.
+ The number of bytes based on a 1K block_size.
"""
- cmd = [avbtool, "add_%s_footer" % footer_type,
- "--partition_size", partition_size,
- "--partition_name", partition_name,
- "--image", image_path]
-
- if key_path and algorithm:
- cmd.extend(["--key", key_path, "--algorithm", algorithm])
- if salt:
- cmd.extend(["--salt", salt])
-
- cmd.extend(shlex.split(additional_args))
-
- (_, exit_code) = RunCommand(cmd)
- return exit_code == 0
+ cmd = ["du", "-k", "-s", path]
+ output = common.RunAndCheckOutput(cmd, verbose=False)
+ return int(output.split()[0]) * 1024
-def AdjustPartitionSizeForVerity(partition_size, fec_supported):
- """Modifies the provided partition size to account for the verity metadata.
-
- This information is used to size the created image appropriately.
+def GetInodeUsage(path):
+ """Returns the number of inodes that "path" occupies on host.
Args:
- partition_size: the size of the partition to be verified.
+ path: The directory or file to calculate inode number on.
Returns:
- A tuple of the size of the partition adjusted for verity metadata, and
- the size of verity metadata.
+ The number of inodes used.
"""
- key = "%d %d" % (partition_size, fec_supported)
- if key in AdjustPartitionSizeForVerity.results:
- return AdjustPartitionSizeForVerity.results[key]
-
- hi = partition_size
- if hi % BLOCK_SIZE != 0:
- hi = (hi // BLOCK_SIZE) * BLOCK_SIZE
-
- # verity tree and fec sizes depend on the partition size, which
- # means this estimate is always going to be unnecessarily small
- verity_size = GetVeritySize(hi, fec_supported)
- lo = partition_size - verity_size
- result = lo
-
- # do a binary search for the optimal size
- while lo < hi:
- i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
- v = GetVeritySize(i, fec_supported)
- if i + v <= partition_size:
- if result < i:
- result = i
- verity_size = v
- lo = i + BLOCK_SIZE
- else:
- hi = i
-
- if OPTIONS.verbose:
- print("Adjusted partition size for verity, partition_size: {},"
- " verity_size: {}".format(result, verity_size))
- AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
- return (result, verity_size)
-
+ cmd = ["find", path, "-print"]
+ output = common.RunAndCheckOutput(cmd, verbose=False)
+ # increase by > 4% as number of files and directories is not whole picture.
+ inodes = output.count('\n')
+ spare_inodes = inodes * 4 // 100
+ min_spare_inodes = 12
+ if spare_inodes < min_spare_inodes:
+ spare_inodes = min_spare_inodes
+ return inodes + spare_inodes
-AdjustPartitionSizeForVerity.results = {}
-
-def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
- padding_size):
- cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
- verity_path, verity_fec_path]
- output, exit_code = RunCommand(cmd)
- if exit_code != 0:
- print("Could not build FEC data! Error: %s" % output)
- return False
- return True
-
-
-def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
- cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
- verity_image_path]
- output, exit_code = RunCommand(cmd)
- if exit_code != 0:
- print("Could not build verity tree! Error: %s" % output)
- return False
- root, salt = output.split()
- prop_dict["verity_root_hash"] = root
- prop_dict["verity_salt"] = salt
- return True
-
-
-def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
- block_device, signer_path, key, signer_args,
- verity_disable):
- cmd = ["system/extras/verity/build_verity_metadata.py", "build",
- str(image_size), verity_metadata_path, root_hash, salt, block_device,
- signer_path, key]
- if signer_args:
- cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))
- if verity_disable:
- cmd.append("--verity_disable")
- output, exit_code = RunCommand(cmd)
- if exit_code != 0:
- print("Could not build verity metadata! Error: %s" % output)
- return False
- return True
-
-
-def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
- """Appends the unsparse image to the given sparse image.
+def GetFilesystemCharacteristics(image_path, sparse_image=True):
+ """Returns various filesystem characteristics of "image_path".
Args:
- sparse_image_path: the path to the (sparse) image
- unsparse_image_path: the path to the (unsparse) image
+ image_path: The file to analyze.
+ sparse_image: Image is sparse
+
Returns:
- True on success, False on failure.
+ The characteristics dictionary.
"""
- cmd = ["append2simg", sparse_image_path, unsparse_image_path]
- output, exit_code = RunCommand(cmd)
- if exit_code != 0:
- print("%s: %s" % (error_message, output))
- return False
- return True
-
+ unsparse_image_path = image_path
+ if sparse_image:
+ unsparse_image_path = UnsparseImage(image_path, replace=False)
-def Append(target, file_to_append, error_message):
- """Appends file_to_append to target."""
+ cmd = ["tune2fs", "-l", unsparse_image_path]
try:
- with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
- for line in input_file:
- out_file.write(line)
- except IOError:
- print(error_message)
- return False
- return True
-
-
-def BuildVerifiedImage(data_image_path, verity_image_path,
- verity_metadata_path, verity_fec_path,
- padding_size, fec_supported):
- if not Append(verity_image_path, verity_metadata_path,
- "Could not append verity metadata!"):
- return False
-
- if fec_supported:
- # build FEC for the entire partition, including metadata
- if not BuildVerityFEC(data_image_path, verity_image_path,
- verity_fec_path, padding_size):
- return False
-
- if not Append(verity_image_path, verity_fec_path, "Could not append FEC!"):
- return False
-
- if not Append2Simg(data_image_path, verity_image_path,
- "Could not append verity data!"):
- return False
- return True
+ output = common.RunAndCheckOutput(cmd, verbose=False)
+ finally:
+ if sparse_image:
+ os.remove(unsparse_image_path)
+ fs_dict = {}
+ for line in output.splitlines():
+ fields = line.split(":")
+ if len(fields) == 2:
+ fs_dict[fields[0].strip()] = fields[1].strip()
+ return fs_dict
def UnsparseImage(sparse_image_path, replace=True):
@@ -323,81 +118,75 @@ def UnsparseImage(sparse_image_path, replace=True):
if replace:
os.unlink(unsparse_image_path)
else:
- return True, unsparse_image_path
+ return unsparse_image_path
inflate_command = ["simg2img", sparse_image_path, unsparse_image_path]
- (inflate_output, exit_code) = RunCommand(inflate_command)
- if exit_code != 0:
- print("Error: '%s' failed with exit code %d:\n%s" % (
- inflate_command, exit_code, inflate_output))
+ try:
+ common.RunAndCheckOutput(inflate_command)
+ except:
os.remove(unsparse_image_path)
- return False, None
- return True, unsparse_image_path
-
+ raise
+ return unsparse_image_path
-def MakeVerityEnabledImage(out_file, fec_supported, prop_dict):
- """Creates an image that is verifiable using dm-verity.
- Args:
- out_file: the location to write the verifiable image at
- prop_dict: a dictionary of properties required for image creation and
- verification
- Returns:
- True on success, False otherwise.
- """
- # get properties
- image_size = int(prop_dict["partition_size"])
- block_dev = prop_dict["verity_block_device"]
- signer_key = prop_dict["verity_key"] + ".pk8"
- if OPTIONS.verity_signer_path is not None:
- signer_path = OPTIONS.verity_signer_path
- else:
- signer_path = prop_dict["verity_signer_cmd"]
- signer_args = OPTIONS.verity_signer_args
-
- # make a tempdir
- tempdir_name = common.MakeTempDir(suffix="_verity_images")
-
- # get partial image paths
- verity_image_path = os.path.join(tempdir_name, "verity.img")
- verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
- verity_fec_path = os.path.join(tempdir_name, "verity_fec.img")
+def ConvertBlockMapToBaseFs(block_map_file):
+ base_fs_file = common.MakeTempFile(prefix="script_gen_", suffix=".base_fs")
+ convert_command = ["blk_alloc_to_base_fs", block_map_file, base_fs_file]
+ common.RunAndCheckOutput(convert_command)
+ return base_fs_file
- # build the verity tree and get the root hash and salt
- if not BuildVerityTree(out_file, verity_image_path, prop_dict):
- return False
- # build the metadata blocks
- root_hash = prop_dict["verity_root_hash"]
- salt = prop_dict["verity_salt"]
- verity_disable = "verity_disable" in prop_dict
- if not BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
- block_dev, signer_path, signer_key, signer_args,
- verity_disable):
- return False
+def SetUpInDirAndFsConfig(origin_in, prop_dict):
+ """Returns the in_dir and fs_config that should be used for image building.
- # build the full verified image
- target_size = int(prop_dict["original_partition_size"])
- verity_size = int(prop_dict["verity_size"])
+ When building system.img for all targets, it creates and returns a staged dir
+ that combines the contents of /system (i.e. in the given in_dir) and root.
- padding_size = target_size - image_size - verity_size
- assert padding_size >= 0
+ Args:
+ origin_in: Path to the input directory.
+ prop_dict: A property dict that contains info like partition size. Values
+ may be updated.
- if not BuildVerifiedImage(out_file,
- verity_image_path,
- verity_metadata_path,
- verity_fec_path,
- padding_size,
- fec_supported):
- return False
+ Returns:
+ A tuple of in_dir and fs_config that should be used to build the image.
+ """
+ fs_config = prop_dict.get("fs_config")
- return True
+ if prop_dict["mount_point"] == "system_other":
+ prop_dict["mount_point"] = "system"
+ return origin_in, fs_config
+ if prop_dict["mount_point"] != "system":
+ return origin_in, fs_config
-def ConvertBlockMapToBaseFs(block_map_file):
- base_fs_file = common.MakeTempFile(prefix="script_gen_", suffix=".base_fs")
- convert_command = ["blk_alloc_to_base_fs", block_map_file, base_fs_file]
- (_, exit_code) = RunCommand(convert_command)
- return base_fs_file if exit_code == 0 else None
+ if "first_pass" in prop_dict:
+ prop_dict["mount_point"] = "/"
+ return prop_dict["first_pass"]
+
+ # Construct a staging directory of the root file system.
+ in_dir = common.MakeTempDir()
+ root_dir = prop_dict.get("root_dir")
+ if root_dir:
+ shutil.rmtree(in_dir)
+ shutil.copytree(root_dir, in_dir, symlinks=True)
+ in_dir_system = os.path.join(in_dir, "system")
+ shutil.rmtree(in_dir_system, ignore_errors=True)
+ shutil.copytree(origin_in, in_dir_system, symlinks=True)
+
+ # Change the mount point to "/".
+ prop_dict["mount_point"] = "/"
+ if fs_config:
+ # We need to merge the fs_config files of system and root.
+ merged_fs_config = common.MakeTempFile(
+ prefix="merged_fs_config", suffix=".txt")
+ with open(merged_fs_config, "w") as fw:
+ if "root_fs_config" in prop_dict:
+ with open(prop_dict["root_fs_config"]) as fr:
+ fw.writelines(fr.readlines())
+ with open(fs_config) as fr:
+ fw.writelines(fr.readlines())
+ fs_config = merged_fs_config
+ prop_dict["first_pass"] = (in_dir, fs_config)
+ return in_dir, fs_config
def CheckHeadroom(ext4fs_output, prop_dict):
@@ -412,11 +201,9 @@ def CheckHeadroom(ext4fs_output, prop_dict):
ext4fs_output: The output string from mke2fs command.
prop_dict: The property dict.
- Returns:
- The check result.
-
Raises:
AssertionError: On invalid input.
+ BuildImageError: On check failure.
"""
assert ext4fs_output is not None
assert prop_dict.get('fs_type', '').startswith('ext4')
@@ -430,95 +217,38 @@ def CheckHeadroom(ext4fs_output, prop_dict):
m = ext4fs_stats.match(last_line)
used_blocks = int(m.groupdict().get('used_blocks'))
total_blocks = int(m.groupdict().get('total_blocks'))
- headroom_blocks = int(prop_dict['partition_headroom']) / BLOCK_SIZE
+ headroom_blocks = int(prop_dict['partition_headroom']) // BLOCK_SIZE
adjusted_blocks = total_blocks - headroom_blocks
if used_blocks > adjusted_blocks:
mount_point = prop_dict["mount_point"]
- print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
- "headroom: %d blocks, available: %d blocks)" % (
- mount_point, total_blocks, used_blocks, headroom_blocks,
- adjusted_blocks))
- return False
- return True
+ raise BuildImageError(
+ "Error: Not enough room on {} (total: {} blocks, used: {} blocks, "
+ "headroom: {} blocks, available: {} blocks)".format(
+ mount_point, total_blocks, used_blocks, headroom_blocks,
+ adjusted_blocks))
-def BuildImage(in_dir, prop_dict, out_file, target_out=None):
- """Build an image to out_file from in_dir with property prop_dict.
+def BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config):
+ """Builds a pure image for the files under in_dir and writes it to out_file.
Args:
- in_dir: path of input directory.
- prop_dict: property dictionary.
- out_file: path of the output image file.
- target_out: path of the product out directory to read device specific FS
- config files.
+ in_dir: Path to input directory.
+ prop_dict: A property dict that contains info like partition size. Values
+ will be updated with computed values.
+ out_file: The output image file.
+ target_out: Path to the TARGET_OUT directory as in Makefile. It actually
+ points to the /system directory under PRODUCT_OUT. fs_config (the one
+ under system/core/libcutils) reads device specific FS config files from
+ there.
+ fs_config: The fs_config file that drives the prototype
- Returns:
- True iff the image is built successfully.
+ Raises:
+ BuildImageError: On build image failures.
"""
- # system_root_image=true: build a system.img that combines the contents of
- # /system and the ramdisk, and can be mounted at the root of the file system.
- origin_in = in_dir
- fs_config = prop_dict.get("fs_config")
- if (prop_dict.get("system_root_image") == "true" and
- prop_dict["mount_point"] == "system"):
- in_dir = common.MakeTempDir()
- # Change the mount point to "/".
- prop_dict["mount_point"] = "/"
- if fs_config:
- # We need to merge the fs_config files of system and ramdisk.
- merged_fs_config = common.MakeTempFile(prefix="root_fs_config",
- suffix=".txt")
- with open(merged_fs_config, "w") as fw:
- if "ramdisk_fs_config" in prop_dict:
- with open(prop_dict["ramdisk_fs_config"]) as fr:
- fw.writelines(fr.readlines())
- with open(fs_config) as fr:
- fw.writelines(fr.readlines())
- fs_config = merged_fs_config
-
build_command = []
fs_type = prop_dict.get("fs_type", "")
run_e2fsck = False
- fs_spans_partition = True
- if fs_type.startswith("squash"):
- fs_spans_partition = False
-
- is_verity_partition = "verity_block_device" in prop_dict
- verity_supported = prop_dict.get("verity") == "true"
- verity_fec_supported = prop_dict.get("verity_fec") == "true"
-
- # Adjust the partition size to make room for the hashes if this is to be
- # verified.
- if verity_supported and is_verity_partition:
- partition_size = int(prop_dict.get("partition_size"))
- (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(
- partition_size, verity_fec_supported)
- if not adjusted_size:
- return False
- prop_dict["partition_size"] = str(adjusted_size)
- prop_dict["original_partition_size"] = str(partition_size)
- prop_dict["verity_size"] = str(verity_size)
-
- # Adjust partition size for AVB hash footer or AVB hashtree footer.
- avb_footer_type = ''
- if prop_dict.get("avb_hash_enable") == "true":
- avb_footer_type = 'hash'
- elif prop_dict.get("avb_hashtree_enable") == "true":
- avb_footer_type = 'hashtree'
-
- if avb_footer_type:
- avbtool = prop_dict["avb_avbtool"]
- partition_size = prop_dict["partition_size"]
- # avb_add_hash_footer_args or avb_add_hashtree_footer_args.
- additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
- max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type,
- partition_size, additional_args)
- if max_image_size == 0:
- return False
- prop_dict["partition_size"] = str(max_image_size)
- prop_dict["original_partition_size"] = partition_size
-
if fs_type.startswith("ext"):
build_command = [prop_dict["ext_mkuserimg"]]
if "extfs_sparse_flag" in prop_dict:
@@ -526,7 +256,7 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None):
run_e2fsck = True
build_command.extend([in_dir, out_file, fs_type,
prop_dict["mount_point"]])
- build_command.append(prop_dict["partition_size"])
+ build_command.append(prop_dict["image_size"])
if "journal_size" in prop_dict:
build_command.extend(["-j", prop_dict["journal_size"]])
if "timestamp" in prop_dict:
@@ -539,8 +269,6 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None):
build_command.extend(["-B", prop_dict["block_list"]])
if "base_fs_file" in prop_dict:
base_fs_file = ConvertBlockMapToBaseFs(prop_dict["base_fs_file"])
- if base_fs_file is None:
- return False
build_command.extend(["-d", base_fs_file])
build_command.extend(["-L", prop_dict["mount_point"]])
if "extfs_inode_count" in prop_dict:
@@ -552,13 +280,14 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None):
if "flash_logical_block_size" in prop_dict:
build_command.extend(["-o", prop_dict["flash_logical_block_size"]])
# Specify UUID and hash_seed if using mke2fs.
- if prop_dict["ext_mkuserimg"] == "mkuserimg_mke2fs.sh":
+ if prop_dict["ext_mkuserimg"] == "mkuserimg_mke2fs":
if "uuid" in prop_dict:
build_command.extend(["-U", prop_dict["uuid"]])
if "hash_seed" in prop_dict:
build_command.extend(["-S", prop_dict["hash_seed"]])
if "ext4_share_dup_blocks" in prop_dict:
build_command.append("-c")
+ build_command.extend(["--inode_size", "256"])
if "selinux_fc" in prop_dict:
build_command.append(prop_dict["selinux_fc"])
elif fs_type.startswith("squash"):
@@ -585,7 +314,7 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None):
build_command.extend(["-a"])
elif fs_type.startswith("f2fs"):
build_command = ["mkf2fsuserimg.sh"]
- build_command.extend([out_file, prop_dict["partition_size"]])
+ build_command.extend([out_file, prop_dict["image_size"]])
if fs_config:
build_command.extend(["-C", fs_config])
build_command.extend(["-f", in_dir])
@@ -598,79 +327,165 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None):
build_command.extend(["-T", str(prop_dict["timestamp"])])
build_command.extend(["-L", prop_dict["mount_point"]])
else:
- print("Error: unknown filesystem type '%s'" % (fs_type))
- return False
+ raise BuildImageError(
+ "Error: unknown filesystem type: {}".format(fs_type))
- if in_dir != origin_in:
- # Construct a staging directory of the root file system.
- ramdisk_dir = prop_dict.get("ramdisk_dir")
- if ramdisk_dir:
- shutil.rmtree(in_dir)
- shutil.copytree(ramdisk_dir, in_dir, symlinks=True)
- staging_system = os.path.join(in_dir, "system")
- shutil.rmtree(staging_system, ignore_errors=True)
- shutil.copytree(origin_in, staging_system, symlinks=True)
-
- (mkfs_output, exit_code) = RunCommand(build_command)
- if exit_code != 0:
- print("Error: '%s' failed with exit code %d:\n%s" % (
- build_command, exit_code, mkfs_output))
- return False
-
- # Check if there's enough headroom space available for ext4 image.
- if "partition_headroom" in prop_dict and fs_type.startswith("ext4"):
- if not CheckHeadroom(mkfs_output, prop_dict):
- return False
-
- if not fs_spans_partition:
- mount_point = prop_dict.get("mount_point")
- partition_size = int(prop_dict.get("partition_size"))
- image_size = GetSimgSize(out_file)
- if image_size > partition_size:
- print("Error: %s image size of %d is larger than partition size of "
- "%d" % (mount_point, image_size, partition_size))
- return False
- if verity_supported and is_verity_partition:
- ZeroPadSimg(out_file, partition_size - image_size)
-
- # Create the verified image if this is to be verified.
- if verity_supported and is_verity_partition:
- if not MakeVerityEnabledImage(out_file, verity_fec_supported, prop_dict):
- return False
-
- # Add AVB HASH or HASHTREE footer (metadata).
- if avb_footer_type:
- avbtool = prop_dict["avb_avbtool"]
- original_partition_size = prop_dict["original_partition_size"]
- partition_name = prop_dict["partition_name"]
- # key_path and algorithm are only available when chain partition is used.
- key_path = prop_dict.get("avb_key_path")
- algorithm = prop_dict.get("avb_algorithm")
- salt = prop_dict.get("avb_salt")
- # avb_add_hash_footer_args or avb_add_hashtree_footer_args
- additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
- if not AVBAddFooter(out_file, avbtool, avb_footer_type,
- original_partition_size, partition_name, key_path,
- algorithm, salt, additional_args):
- return False
+ try:
+ mkfs_output = common.RunAndCheckOutput(build_command)
+ except:
+ try:
+ du = GetDiskUsage(in_dir)
+ du_str = "{} bytes ({} MB)".format(du, du // BYTES_IN_MB)
+ # Suppress any errors from GetDiskUsage() to avoid hiding the real errors
+ # from common.RunAndCheckOutput().
+ except Exception: # pylint: disable=broad-except
+ logger.exception("Failed to compute disk usage with du")
+ du_str = "unknown"
+ print(
+ "Out of space? Out of inodes? The tree size of {} is {}, "
+ "with reserved space of {} bytes ({} MB).".format(
+ in_dir, du_str,
+ int(prop_dict.get("partition_reserved_size", 0)),
+ int(prop_dict.get("partition_reserved_size", 0)) // BYTES_IN_MB))
+ print(
+ "The max image size for filesystem files is {} bytes ({} MB), out of a "
+ "total partition size of {} bytes ({} MB).".format(
+ int(prop_dict["image_size"]),
+ int(prop_dict["image_size"]) // BYTES_IN_MB,
+ int(prop_dict["partition_size"]),
+ int(prop_dict["partition_size"]) // BYTES_IN_MB))
+ raise
if run_e2fsck and prop_dict.get("skip_fsck") != "true":
- success, unsparse_image = UnsparseImage(out_file, replace=False)
- if not success:
- return False
+ unsparse_image = UnsparseImage(out_file, replace=False)
# Run e2fsck on the inflated image file
e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
- (e2fsck_output, exit_code) = RunCommand(e2fsck_command)
+ try:
+ common.RunAndCheckOutput(e2fsck_command)
+ finally:
+ os.remove(unsparse_image)
+
+ return mkfs_output
- os.remove(unsparse_image)
- if exit_code != 0:
- print("Error: '%s' failed with exit code %d:\n%s" % (
- e2fsck_command, exit_code, e2fsck_output))
- return False
+def BuildImage(in_dir, prop_dict, out_file, target_out=None):
+ """Builds an image for the files under in_dir and writes it to out_file.
- return True
+ Args:
+ in_dir: Path to input directory.
+ prop_dict: A property dict that contains info like partition size. Values
+ will be updated with computed values.
+ out_file: The output image file.
+ target_out: Path to the TARGET_OUT directory as in Makefile. It actually
+ points to the /system directory under PRODUCT_OUT. fs_config (the one
+ under system/core/libcutils) reads device specific FS config files from
+ there.
+
+ Raises:
+ BuildImageError: On build image failures.
+ """
+ in_dir, fs_config = SetUpInDirAndFsConfig(in_dir, prop_dict)
+
+ build_command = []
+ fs_type = prop_dict.get("fs_type", "")
+
+ fs_spans_partition = True
+ if fs_type.startswith("squash"):
+ fs_spans_partition = False
+
+ # Get a builder for creating an image that's to be verified by Verified Boot,
+ # or None if not applicable.
+ verity_image_builder = verity_utils.CreateVerityImageBuilder(prop_dict)
+
+ if (prop_dict.get("use_dynamic_partition_size") == "true" and
+ "partition_size" not in prop_dict):
+ # If partition_size is not defined, use output of `du' + reserved_size.
+ size = GetDiskUsage(in_dir)
+ logger.info(
+ "The tree size of %s is %d MB.", in_dir, size // BYTES_IN_MB)
+ # If not specified, give us 16MB margin for GetDiskUsage error ...
+ reserved_size = int(prop_dict.get("partition_reserved_size", BYTES_IN_MB * 16))
+ partition_headroom = int(prop_dict.get("partition_headroom", 0))
+ if fs_type.startswith("ext4") and partition_headroom > reserved_size:
+ reserved_size = partition_headroom
+ size += reserved_size
+ # Round this up to a multiple of 4K so that avbtool works
+ size = common.RoundUpTo4K(size)
+ if fs_type.startswith("ext"):
+ prop_dict["partition_size"] = str(size)
+ prop_dict["image_size"] = str(size)
+ if "extfs_inode_count" not in prop_dict:
+ prop_dict["extfs_inode_count"] = str(GetInodeUsage(in_dir))
+ logger.info(
+ "First Pass based on estimates of %d MB and %s inodes.",
+ size // BYTES_IN_MB, prop_dict["extfs_inode_count"])
+ BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
+ sparse_image = False
+ if "extfs_sparse_flag" in prop_dict:
+ sparse_image = True
+ fs_dict = GetFilesystemCharacteristics(out_file, sparse_image)
+ os.remove(out_file)
+ block_size = int(fs_dict.get("Block size", "4096"))
+ free_size = int(fs_dict.get("Free blocks", "0")) * block_size
+ reserved_size = int(prop_dict.get("partition_reserved_size", 0))
+ partition_headroom = int(fs_dict.get("partition_headroom", 0))
+ if fs_type.startswith("ext4") and partition_headroom > reserved_size:
+ reserved_size = partition_headroom
+ if free_size <= reserved_size:
+ logger.info(
+ "Not worth reducing image %d <= %d.", free_size, reserved_size)
+ else:
+ size -= free_size
+ size += reserved_size
+ if reserved_size == 0:
+ # add .3% margin
+ size = size * 1003 // 1000
+ # Use a minimum size, otherwise we will fail to calculate an AVB footer
+ # or fail to construct an ext4 image.
+ size = max(size, 256 * 1024)
+ if block_size <= 4096:
+ size = common.RoundUpTo4K(size)
+ else:
+ size = ((size + block_size - 1) // block_size) * block_size
+ extfs_inode_count = prop_dict["extfs_inode_count"]
+ inodes = int(fs_dict.get("Inode count", extfs_inode_count))
+ inodes -= int(fs_dict.get("Free inodes", "0"))
+ # add .2% margin or 1 inode, whichever is greater
+ spare_inodes = inodes * 2 // 1000
+ min_spare_inodes = 1
+ if spare_inodes < min_spare_inodes:
+ spare_inodes = min_spare_inodes
+ inodes += spare_inodes
+ prop_dict["extfs_inode_count"] = str(inodes)
+ prop_dict["partition_size"] = str(size)
+ logger.info(
+ "Allocating %d Inodes for %s.", inodes, out_file)
+ if verity_image_builder:
+ size = verity_image_builder.CalculateDynamicPartitionSize(size)
+ prop_dict["partition_size"] = str(size)
+ logger.info(
+ "Allocating %d MB for %s.", size // BYTES_IN_MB, out_file)
+
+ prop_dict["image_size"] = prop_dict["partition_size"]
+
+ # Adjust the image size to make room for the hashes if this is to be verified.
+ if verity_image_builder:
+ max_image_size = verity_image_builder.CalculateMaxImageSize()
+ prop_dict["image_size"] = str(max_image_size)
+
+ mkfs_output = BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
+
+ # Check if there's enough headroom space available for ext4 image.
+ if "partition_headroom" in prop_dict and fs_type.startswith("ext4"):
+ CheckHeadroom(mkfs_output, prop_dict)
+
+ if not fs_spans_partition and verity_image_builder:
+ verity_image_builder.PadSparseImage(out_file)
+
+ # Create the verified image if this is to be verified.
+ if verity_image_builder:
+ verity_image_builder.Build(out_file)
def ImagePropFromGlobalDict(glob_dict, mount_point):
@@ -715,6 +530,7 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
"avb_enable",
"avb_avbtool",
"avb_salt",
+ "use_dynamic_partition_size",
)
for p in common_props:
copy_prop(p, p)
@@ -736,8 +552,8 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
d["journal_size"] = "0"
copy_prop("system_verity_block_device", "verity_block_device")
copy_prop("system_root_image", "system_root_image")
- copy_prop("ramdisk_dir", "ramdisk_dir")
- copy_prop("ramdisk_fs_config", "ramdisk_fs_config")
+ copy_prop("root_dir", "root_dir")
+ copy_prop("root_fs_config", "root_fs_config")
copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
copy_prop("system_squashfs_compressor", "squashfs_compressor")
copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
@@ -747,21 +563,22 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
copy_prop("system_extfs_inode_count", "extfs_inode_count")
if not copy_prop("system_extfs_rsv_pct", "extfs_rsv_pct"):
d["extfs_rsv_pct"] = "0"
+ copy_prop("system_reserved_size", "partition_reserved_size")
elif mount_point == "system_other":
# We inherit the selinux policies of /system since we contain some of its
# files.
- d["mount_point"] = "system"
- copy_prop("avb_system_hashtree_enable", "avb_hashtree_enable")
- copy_prop("avb_system_add_hashtree_footer_args",
+ copy_prop("avb_system_other_hashtree_enable", "avb_hashtree_enable")
+ copy_prop("avb_system_other_add_hashtree_footer_args",
"avb_add_hashtree_footer_args")
- copy_prop("avb_system_key_path", "avb_key_path")
- copy_prop("avb_system_algorithm", "avb_algorithm")
+ copy_prop("avb_system_other_key_path", "avb_key_path")
+ copy_prop("avb_system_other_algorithm", "avb_algorithm")
copy_prop("fs_type", "fs_type")
copy_prop("system_fs_type", "fs_type")
- copy_prop("system_size", "partition_size")
+ copy_prop("system_other_size", "partition_size")
if not copy_prop("system_journal_size", "journal_size"):
d["journal_size"] = "0"
copy_prop("system_verity_block_device", "verity_block_device")
+ copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
copy_prop("system_squashfs_compressor", "squashfs_compressor")
copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
copy_prop("system_squashfs_block_size", "squashfs_block_size")
@@ -769,6 +586,7 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
copy_prop("system_extfs_inode_count", "extfs_inode_count")
if not copy_prop("system_extfs_rsv_pct", "extfs_rsv_pct"):
d["extfs_rsv_pct"] = "0"
+ copy_prop("system_reserved_size", "partition_reserved_size")
elif mount_point == "data":
# Copy the generic fs type first, override with specific one if available.
copy_prop("fs_type", "fs_type")
@@ -799,6 +617,7 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
copy_prop("vendor_extfs_inode_count", "extfs_inode_count")
if not copy_prop("vendor_extfs_rsv_pct", "extfs_rsv_pct"):
d["extfs_rsv_pct"] = "0"
+ copy_prop("vendor_reserved_size", "partition_reserved_size")
elif mount_point == "product":
copy_prop("avb_product_hashtree_enable", "avb_hashtree_enable")
copy_prop("avb_product_add_hashtree_footer_args",
@@ -810,6 +629,7 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
if not copy_prop("product_journal_size", "journal_size"):
d["journal_size"] = "0"
copy_prop("product_verity_block_device", "verity_block_device")
+ copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
copy_prop("product_squashfs_compressor", "squashfs_compressor")
copy_prop("product_squashfs_compressor_opt", "squashfs_compressor_opt")
copy_prop("product_squashfs_block_size", "squashfs_block_size")
@@ -818,12 +638,58 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
copy_prop("product_extfs_inode_count", "extfs_inode_count")
if not copy_prop("product_extfs_rsv_pct", "extfs_rsv_pct"):
d["extfs_rsv_pct"] = "0"
+ copy_prop("product_reserved_size", "partition_reserved_size")
+ elif mount_point == "product_services":
+ copy_prop("avb_product_services_hashtree_enable", "avb_hashtree_enable")
+ copy_prop("avb_product_services_add_hashtree_footer_args",
+ "avb_add_hashtree_footer_args")
+ copy_prop("avb_product_services_key_path", "avb_key_path")
+ copy_prop("avb_product_services_algorithm", "avb_algorithm")
+ copy_prop("product_services_fs_type", "fs_type")
+ copy_prop("product_services_size", "partition_size")
+ if not copy_prop("product_services_journal_size", "journal_size"):
+ d["journal_size"] = "0"
+ copy_prop("product_services_verity_block_device", "verity_block_device")
+ copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
+ copy_prop("product_services_squashfs_compressor", "squashfs_compressor")
+ copy_prop("product_services_squashfs_compressor_opt",
+ "squashfs_compressor_opt")
+ copy_prop("product_services_squashfs_block_size", "squashfs_block_size")
+ copy_prop("product_services_squashfs_disable_4k_align",
+ "squashfs_disable_4k_align")
+ copy_prop("product_services_base_fs_file", "base_fs_file")
+ copy_prop("product_services_extfs_inode_count", "extfs_inode_count")
+ if not copy_prop("product_services_extfs_rsv_pct", "extfs_rsv_pct"):
+ d["extfs_rsv_pct"] = "0"
+ copy_prop("product_services_reserved_size", "partition_reserved_size")
+ elif mount_point == "odm":
+ copy_prop("avb_odm_hashtree_enable", "avb_hashtree_enable")
+ copy_prop("avb_odm_add_hashtree_footer_args",
+ "avb_add_hashtree_footer_args")
+ copy_prop("avb_odm_key_path", "avb_key_path")
+ copy_prop("avb_odm_algorithm", "avb_algorithm")
+ copy_prop("odm_fs_type", "fs_type")
+ copy_prop("odm_size", "partition_size")
+ if not copy_prop("odm_journal_size", "journal_size"):
+ d["journal_size"] = "0"
+ copy_prop("odm_verity_block_device", "verity_block_device")
+ copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
+ copy_prop("odm_squashfs_compressor", "squashfs_compressor")
+ copy_prop("odm_squashfs_compressor_opt", "squashfs_compressor_opt")
+ copy_prop("odm_squashfs_block_size", "squashfs_block_size")
+ copy_prop("odm_squashfs_disable_4k_align", "squashfs_disable_4k_align")
+ copy_prop("odm_base_fs_file", "base_fs_file")
+ copy_prop("odm_extfs_inode_count", "extfs_inode_count")
+ if not copy_prop("odm_extfs_rsv_pct", "extfs_rsv_pct"):
+ d["extfs_rsv_pct"] = "0"
+ copy_prop("odm_reserved_size", "partition_reserved_size")
elif mount_point == "oem":
copy_prop("fs_type", "fs_type")
copy_prop("oem_size", "partition_size")
if not copy_prop("oem_journal_size", "journal_size"):
d["journal_size"] = "0"
copy_prop("oem_extfs_inode_count", "extfs_inode_count")
+ copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
if not copy_prop("oem_extfs_rsv_pct", "extfs_rsv_pct"):
d["extfs_rsv_pct"] = "0"
d["partition_name"] = mount_point
@@ -844,11 +710,36 @@ def LoadGlobalDict(filename):
return d
+def GlobalDictFromImageProp(image_prop, mount_point):
+ d = {}
+ def copy_prop(src_p, dest_p):
+ if src_p in image_prop:
+ d[dest_p] = image_prop[src_p]
+ return True
+ return False
+
+ if mount_point == "system":
+ copy_prop("partition_size", "system_size")
+ elif mount_point == "system_other":
+ copy_prop("partition_size", "system_other_size")
+ elif mount_point == "vendor":
+ copy_prop("partition_size", "vendor_size")
+ elif mount_point == "odm":
+ copy_prop("partition_size", "odm_size")
+ elif mount_point == "product":
+ copy_prop("partition_size", "product_size")
+ elif mount_point == "product_services":
+ copy_prop("partition_size", "product_services_size")
+ return d
+
+
def main(argv):
if len(argv) != 4:
print(__doc__)
sys.exit(1)
+ common.InitLogging()
+
in_dir = argv[0]
glob_dict_file = argv[1]
out_file = argv[2]
@@ -856,7 +747,7 @@ def main(argv):
glob_dict = LoadGlobalDict(glob_dict_file)
if "mount_point" in glob_dict:
- # The caller knows the mount point and provides a dictionay needed by
+ # The caller knows the mount point and provides a dictionary needed by
# BuildImage().
image_properties = glob_dict
else:
@@ -872,20 +763,25 @@ def main(argv):
mount_point = "cache"
elif image_filename == "vendor.img":
mount_point = "vendor"
+ elif image_filename == "odm.img":
+ mount_point = "odm"
elif image_filename == "oem.img":
mount_point = "oem"
elif image_filename == "product.img":
mount_point = "product"
+ elif image_filename == "product_services.img":
+ mount_point = "product_services"
else:
- print("error: unknown image file name ", image_filename, file=sys.stderr)
+ logger.error("Unknown image file name %s", image_filename)
sys.exit(1)
image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
- if not BuildImage(in_dir, image_properties, out_file, target_out):
- print("error: failed to build %s from %s" % (out_file, in_dir),
- file=sys.stderr)
- sys.exit(1)
+ try:
+ BuildImage(in_dir, image_properties, out_file, target_out)
+ except:
+ logger.error("Failed to build %s from %s", out_file, in_dir)
+ raise
if __name__ == '__main__':
diff --git a/tools/releasetools/build_super_image.py b/tools/releasetools/build_super_image.py
new file mode 100755
index 0000000000..f63453d8e1
--- /dev/null
+++ b/tools/releasetools/build_super_image.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Usage: build_super_image input_file output_dir_or_file
+
+input_file: one of the following:
+ - directory containing extracted target files. It will load info from
+ META/misc_info.txt and build full super image / split images using source
+ images from IMAGES/.
+ - target files package. Same as above, but extracts the archive before
+ building super image.
+ - a dictionary file containing input arguments to build. Check
+ `dump-super-image-info' for details.
+ In addition:
+ - If source images should be included in the output image (for super.img
+ and super split images), a list of "*_image" should be paths of each
+ source images.
+
+output_dir_or_file:
+ If a single super image is built (for super_empty.img, or super.img for
+ launch devices), this argument is the output file.
+ If a collection of split images are built (for retrofit devices), this
+ argument is the output directory.
+"""
+
+from __future__ import print_function
+
+import logging
+import os.path
+import shlex
+import sys
+import zipfile
+
+import common
+import sparse_img
+
+if sys.hexversion < 0x02070000:
+ print("Python 2.7 or newer is required.", file=sys.stderr)
+ sys.exit(1)
+
+logger = logging.getLogger(__name__)
+
+
+UNZIP_PATTERN = ["IMAGES/*", "META/*"]
+
+
+def GetArgumentsForImage(partition, group, image=None):
+ image_size = sparse_img.GetImagePartitionSize(image) if image else 0
+
+ cmd = ["--partition",
+ "{}:readonly:{}:{}".format(partition, image_size, group)]
+ if image:
+ cmd += ["--image", "{}={}".format(partition, image)]
+
+ return cmd
+
+
+def BuildSuperImageFromDict(info_dict, output):
+
+ cmd = [info_dict["lpmake"],
+ "--metadata-size", "65536",
+ "--super-name", info_dict["super_metadata_device"]]
+
+ ab_update = info_dict.get("ab_update") == "true"
+ retrofit = info_dict.get("dynamic_partition_retrofit") == "true"
+ block_devices = shlex.split(info_dict.get("super_block_devices", "").strip())
+ groups = shlex.split(info_dict.get("super_partition_groups", "").strip())
+
+ if ab_update and retrofit:
+ cmd += ["--metadata-slots", "2"]
+ elif ab_update:
+ cmd += ["--metadata-slots", "3"]
+ else:
+ cmd += ["--metadata-slots", "2"]
+
+ if ab_update and retrofit:
+ cmd.append("--auto-slot-suffixing")
+
+ for device in block_devices:
+ size = info_dict["super_{}_device_size".format(device)]
+ cmd += ["--device", "{}:{}".format(device, size)]
+
+ append_suffix = ab_update and not retrofit
+ has_image = False
+ for group in groups:
+ group_size = info_dict["super_{}_group_size".format(group)]
+ if append_suffix:
+ cmd += ["--group", "{}_a:{}".format(group, group_size),
+ "--group", "{}_b:{}".format(group, group_size)]
+ else:
+ cmd += ["--group", "{}:{}".format(group, group_size)]
+
+ partition_list = shlex.split(
+ info_dict["super_{}_partition_list".format(group)].strip())
+
+ for partition in partition_list:
+ image = info_dict.get("{}_image".format(partition))
+ if image:
+ has_image = True
+
+ if not append_suffix:
+ cmd += GetArgumentsForImage(partition, group, image)
+ continue
+
+ # For A/B devices, super partition always contains sub-partitions in
+ # the _a slot, because this image should only be used for
+ # bootstrapping / initializing the device. When flashing the image,
+ # bootloader fastboot should always mark _a slot as bootable.
+ cmd += GetArgumentsForImage(partition + "_a", group + "_a", image)
+
+ other_image = None
+ if partition == "system" and "system_other_image" in info_dict:
+ other_image = info_dict["system_other_image"]
+ has_image = True
+
+ cmd += GetArgumentsForImage(partition + "_b", group + "_b", other_image)
+
+ if info_dict.get("build_non_sparse_super_partition") != "true":
+ cmd.append("--sparse")
+
+ cmd += ["--output", output]
+
+ common.RunAndCheckOutput(cmd)
+
+ if retrofit and has_image:
+ logger.info("Done writing images to directory %s", output)
+ else:
+ logger.info("Done writing image %s", output)
+
+ return True
+
+
+def BuildSuperImageFromExtractedTargetFiles(inp, out):
+ info_dict = common.LoadInfoDict(inp)
+ partition_list = shlex.split(
+ info_dict.get("dynamic_partition_list", "").strip())
+
+ if "system" in partition_list:
+ image_path = os.path.join(inp, "IMAGES", "system_other.img")
+ if os.path.isfile(image_path):
+ info_dict["system_other_image"] = image_path
+
+ missing_images = []
+ for partition in partition_list:
+ image_path = os.path.join(inp, "IMAGES", "{}.img".format(partition))
+ if not os.path.isfile(image_path):
+ missing_images.append(image_path)
+ else:
+ info_dict["{}_image".format(partition)] = image_path
+ if missing_images:
+ logger.warning("Skip building super image because the following "
+ "images are missing from target files:\n%s",
+ "\n".join(missing_images))
+ return False
+ return BuildSuperImageFromDict(info_dict, out)
+
+
+def BuildSuperImageFromTargetFiles(inp, out):
+ input_tmp = common.UnzipTemp(inp, UNZIP_PATTERN)
+ return BuildSuperImageFromExtractedTargetFiles(input_tmp, out)
+
+
+def BuildSuperImage(inp, out):
+
+ if isinstance(inp, dict):
+ logger.info("Building super image from info dict...")
+ return BuildSuperImageFromDict(inp, out)
+
+ if isinstance(inp, str):
+ if os.path.isdir(inp):
+ logger.info("Building super image from extracted target files...")
+ return BuildSuperImageFromExtractedTargetFiles(inp, out)
+
+ if zipfile.is_zipfile(inp):
+ logger.info("Building super image from target files...")
+ return BuildSuperImageFromTargetFiles(inp, out)
+
+ if os.path.isfile(inp):
+ with open(inp) as f:
+ lines = f.read()
+ logger.info("Building super image from info dict...")
+ return BuildSuperImageFromDict(common.LoadDictionaryFromLines(lines.split("\n")), out)
+
+ raise ValueError("{} is not a dictionary or a valid path".format(inp))
+
+
+def main(argv):
+
+ args = common.ParseOptions(argv, __doc__)
+
+ if len(args) != 2:
+ common.Usage(__doc__)
+ sys.exit(1)
+
+ common.InitLogging()
+
+ BuildSuperImage(args[0], args[1])
+
+
+if __name__ == "__main__":
+ try:
+ common.CloseInheritedPipes()
+ main(sys.argv[1:])
+ except common.ExternalError:
+ logger.exception("\n ERROR:\n")
+ sys.exit(1)
+ finally:
+ common.Cleanup()
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 3cac90a014..7d3424b81a 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -21,17 +21,18 @@ Verify a given OTA package with the specifed certificate.
from __future__ import print_function
import argparse
+import logging
import re
import subprocess
import sys
-import tempfile
import zipfile
-
from hashlib import sha1
from hashlib import sha256
import common
+logger = logging.getLogger(__name__)
+
def CertUsesSha256(cert):
"""Check if the cert uses SHA-256 hashing algorithm."""
@@ -165,11 +166,11 @@ def VerifyAbOtaPayload(cert, package):
cmd = ['delta_generator',
'--in_file=' + payload_file,
'--public_key=' + pubkey]
- proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ proc = common.Run(cmd)
stdoutdata, _ = proc.communicate()
assert proc.returncode == 0, \
- 'Failed to verify payload with delta_generator: %s\n%s' % (package,
- stdoutdata)
+ 'Failed to verify payload with delta_generator: {}\n{}'.format(
+ package, stdoutdata)
common.ZipClose(package_zip)
# Verified successfully upon reaching here.
@@ -182,6 +183,8 @@ def main():
parser.add_argument('package', help='The OTA package to be verified.')
args = parser.parse_args()
+ common.InitLogging()
+
VerifyPackage(args.certificate, args.package)
VerifyAbOtaPayload(args.certificate, args.package)
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index b9f39a6f73..4b0d4c752d 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -39,6 +39,7 @@ Usage: check_target_file_signatures [flags] target_files
"""
+import logging
import os
import re
import subprocess
@@ -52,6 +53,8 @@ if sys.hexversion < 0x02070000:
sys.exit(1)
+logger = logging.getLogger(__name__)
+
# Work around a bug in Python's zipfile module that prevents opening of zipfiles
# if any entry has an extra field of between 1 and 3 bytes (which is common with
# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
@@ -165,6 +168,7 @@ def CertFromPKCS7(data, filename):
class APK(object):
+
def __init__(self, full_filename, filename):
self.filename = filename
self.certs = None
@@ -241,12 +245,12 @@ class TargetFiles(object):
# must decompress them individually before we perform any analysis.
# This is the list of wildcards of files we extract from |filename|.
- apk_extensions = ['*.apk']
+ apk_extensions = ['*.apk', '*.apex']
self.certmap, compressed_extension = common.ReadApkCerts(
- zipfile.ZipFile(filename, "r"))
+ zipfile.ZipFile(filename))
if compressed_extension:
- apk_extensions.append("*.apk" + compressed_extension)
+ apk_extensions.append('*.apk' + compressed_extension)
d = common.UnzipTemp(filename, apk_extensions)
self.apks = {}
@@ -269,7 +273,7 @@ class TargetFiles(object):
os.remove(os.path.join(dirpath, fn))
fn = uncompressed_fn
- if fn.endswith(".apk"):
+ if fn.endswith(('.apk', '.apex')):
fullname = os.path.join(dirpath, fn)
displayname = fullname[len(d)+1:]
apk = APK(fullname, displayname)
@@ -415,6 +419,8 @@ def main(argv):
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
ALL_CERTS.FindLocalCerts()
Push("input target_files:")
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 6defb2b080..d743c3e2dd 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -14,12 +14,17 @@
from __future__ import print_function
+import collections
import copy
import errno
+import fnmatch
import getopt
import getpass
import gzip
import imp
+import json
+import logging
+import logging.config
import os
import platform
import re
@@ -37,14 +42,24 @@ from hashlib import sha1, sha256
import blockimgdiff
import sparse_img
+logger = logging.getLogger(__name__)
+
+
class Options(object):
def __init__(self):
+ base_out_path = os.getenv('OUT_DIR_COMMON_BASE')
+ if base_out_path is None:
+ base_search_path = "out"
+ else:
+ base_search_path = os.path.join(base_out_path,
+ os.path.basename(os.getcwd()))
+
platform_search_path = {
- "linux2": "out/host/linux-x86",
- "darwin": "out/host/darwin-x86",
+ "linux2": os.path.join(base_search_path, "host/linux-x86"),
+ "darwin": os.path.join(base_search_path, "host/darwin-x86"),
}
- self.search_path = platform_search_path.get(sys.platform, None)
+ self.search_path = platform_search_path.get(sys.platform)
self.signapk_path = "framework/signapk.jar" # Relative to search_path
self.signapk_shared_library_path = "lib64" # Relative to search_path
self.extra_signapk_args = []
@@ -72,13 +87,19 @@ class Options(object):
OPTIONS = Options()
+# The block size that's used across the releasetools scripts.
+BLOCK_SIZE = 4096
# Values for "certificate" in apkcerts that mean special things.
SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
-
# The partitions allowed to be signed by AVB (Android verified boot 2.0).
-AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'product', 'dtbo')
+AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'product',
+ 'product_services', 'dtbo', 'odm')
+
+# Partitions that should have their care_map added to META/care_map.pb
+PARTITIONS_WITH_CARE_MAP = ('system', 'vendor', 'product', 'product_services',
+ 'odm')
class ErrorCode(object):
@@ -109,23 +130,126 @@ class ErrorCode(object):
TUNE_PARTITION_FAILURE = 3007
APPLY_PATCH_FAILURE = 3008
+
class ExternalError(RuntimeError):
pass
+def InitLogging():
+ DEFAULT_LOGGING_CONFIG = {
+ 'version': 1,
+ 'disable_existing_loggers': False,
+ 'formatters': {
+ 'standard': {
+ 'format':
+ '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s',
+ 'datefmt': '%Y-%m-%d %H:%M:%S',
+ },
+ },
+ 'handlers': {
+ 'default': {
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'standard',
+ },
+ },
+ 'loggers': {
+ '': {
+ 'handlers': ['default'],
+ 'level': 'WARNING',
+ 'propagate': True,
+ }
+ }
+ }
+ env_config = os.getenv('LOGGING_CONFIG')
+ if env_config:
+ with open(env_config) as f:
+ config = json.load(f)
+ else:
+ config = DEFAULT_LOGGING_CONFIG
+
+ # Increase the logging level for verbose mode.
+ if OPTIONS.verbose:
+ config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
+ config['loggers']['']['level'] = 'INFO'
+
+ logging.config.dictConfig(config)
+
+
def Run(args, verbose=None, **kwargs):
- """Create and return a subprocess.Popen object.
+ """Creates and returns a subprocess.Popen object.
- Caller can specify if the command line should be printed. The global
- OPTIONS.verbose will be used if not specified.
+ Args:
+ args: The command represented as a list of strings.
+ verbose: Whether the commands should be shown. Default to the global
+ verbosity if unspecified.
+ kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
+ stdin, etc. stdout and stderr will default to subprocess.PIPE and
+ subprocess.STDOUT respectively unless caller specifies any of them.
+
+ Returns:
+ A subprocess.Popen object.
"""
- if verbose is None:
- verbose = OPTIONS.verbose
- if verbose:
- print(" running: ", " ".join(args))
+ if 'stdout' not in kwargs and 'stderr' not in kwargs:
+ kwargs['stdout'] = subprocess.PIPE
+ kwargs['stderr'] = subprocess.STDOUT
+ # Don't log any if caller explicitly says so.
+ if verbose != False:
+ logger.info(" Running: \"%s\"", " ".join(args))
return subprocess.Popen(args, **kwargs)
+def RunAndWait(args, verbose=None, **kwargs):
+ """Runs the given command waiting for it to complete.
+
+ Args:
+ args: The command represented as a list of strings.
+ verbose: Whether the commands should be shown. Default to the global
+ verbosity if unspecified.
+ kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
+ stdin, etc. stdout and stderr will default to subprocess.PIPE and
+ subprocess.STDOUT respectively unless caller specifies any of them.
+
+ Raises:
+ ExternalError: On non-zero exit from the command.
+ """
+ proc = Run(args, verbose=verbose, **kwargs)
+ proc.wait()
+
+ if proc.returncode != 0:
+ raise ExternalError(
+ "Failed to run command '{}' (exit code {})".format(
+ args, proc.returncode))
+
+
+def RunAndCheckOutput(args, verbose=None, **kwargs):
+ """Runs the given command and returns the output.
+
+ Args:
+ args: The command represented as a list of strings.
+ verbose: Whether the commands should be shown. Default to the global
+ verbosity if unspecified.
+ kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
+ stdin, etc. stdout and stderr will default to subprocess.PIPE and
+ subprocess.STDOUT respectively unless caller specifies any of them.
+
+ Returns:
+ The output string.
+
+ Raises:
+ ExternalError: On non-zero exit from the command.
+ """
+ proc = Run(args, verbose=verbose, **kwargs)
+ output, _ = proc.communicate()
+ # Don't log any if caller explicitly says so.
+ if verbose != False:
+ logger.info("%s", output.rstrip())
+ if proc.returncode != 0:
+ raise ExternalError(
+ "Failed to run command '{}' (exit code {}):\n{}".format(
+ args, proc.returncode, output))
+ return output
+
+
def RoundUpTo4K(value):
rounded_up = value + 4095
return rounded_up - (rounded_up % 4096)
@@ -147,9 +271,41 @@ def CloseInheritedPipes():
pass
-def LoadInfoDict(input_file, input_dir=None):
- """Read and parse the META/misc_info.txt key/value pairs from the
- input target files and return a dict."""
+def LoadInfoDict(input_file, repacking=False):
+ """Loads the key/value pairs from the given input target_files.
+
+ It reads `META/misc_info.txt` file in the target_files input, does sanity
+ checks and returns the parsed key/value pairs for to the given build. It's
+ usually called early when working on input target_files files, e.g. when
+ generating OTAs, or signing builds. Note that the function may be called
+ against an old target_files file (i.e. from past dessert releases). So the
+ property parsing needs to be backward compatible.
+
+ In a `META/misc_info.txt`, a few properties are stored as links to the files
+ in the PRODUCT_OUT directory. It works fine with the build system. However,
+ they are no longer available when (re)generating images from target_files zip.
+ When `repacking` is True, redirect these properties to the actual files in the
+ unzipped directory.
+
+ Args:
+ input_file: The input target_files file, which could be an open
+ zipfile.ZipFile instance, or a str for the dir that contains the files
+ unzipped from a target_files file.
+ repacking: Whether it's trying repack an target_files file after loading the
+ info dict (default: False). If so, it will rewrite a few loaded
+ properties (e.g. selinux_fc, root_dir) to point to the actual files in
+ target_files file. When doing repacking, `input_file` must be a dir.
+
+ Returns:
+ A dict that contains the parsed key/value pairs.
+
+ Raises:
+ AssertionError: On invalid input arguments.
+ ValueError: On malformed input values.
+ """
+ if repacking:
+ assert isinstance(input_file, str), \
+ "input_file must be a path str when doing repacking"
def read_helper(fn):
if isinstance(input_file, zipfile.ZipFile):
@@ -166,59 +322,48 @@ def LoadInfoDict(input_file, input_dir=None):
try:
d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
except KeyError:
- raise ValueError("can't find META/misc_info.txt in input target-files")
-
- assert "recovery_api_version" in d
- assert "fstab_version" in d
-
- # A few properties are stored as links to the files in the out/ directory.
- # It works fine with the build system. However, they are no longer available
- # when (re)generating from target_files zip. If input_dir is not None, we
- # are doing repacking. Redirect those properties to the actual files in the
- # unzipped directory.
- if input_dir is not None:
- # We carry a copy of file_contexts.bin under META/. If not available,
- # search BOOT/RAMDISK/. Note that sometimes we may need a different file
- # to build images than the one running on device, such as when enabling
- # system_root_image. In that case, we must have the one for image
- # generation copied to META/.
+ raise ValueError("Failed to find META/misc_info.txt in input target-files")
+
+ if "recovery_api_version" not in d:
+ raise ValueError("Failed to find 'recovery_api_version'")
+ if "fstab_version" not in d:
+ raise ValueError("Failed to find 'fstab_version'")
+
+ if repacking:
+ # We carry a copy of file_contexts.bin under META/. If not available, search
+ # BOOT/RAMDISK/. Note that sometimes we may need a different file to build
+ # images than the one running on device, in that case, we must have the one
+ # for image generation copied to META/.
fc_basename = os.path.basename(d.get("selinux_fc", "file_contexts"))
- fc_config = os.path.join(input_dir, "META", fc_basename)
- if d.get("system_root_image") == "true":
- assert os.path.exists(fc_config)
- if not os.path.exists(fc_config):
- fc_config = os.path.join(input_dir, "BOOT", "RAMDISK", fc_basename)
- if not os.path.exists(fc_config):
- fc_config = None
-
- if fc_config:
- d["selinux_fc"] = fc_config
-
- # Similarly we need to redirect "ramdisk_dir" and "ramdisk_fs_config".
- if d.get("system_root_image") == "true":
- d["ramdisk_dir"] = os.path.join(input_dir, "ROOT")
- d["ramdisk_fs_config"] = os.path.join(
- input_dir, "META", "root_filesystem_config.txt")
+ fc_config = os.path.join(input_file, "META", fc_basename)
+ assert os.path.exists(fc_config)
+
+ d["selinux_fc"] = fc_config
+
+ # Similarly we need to redirect "root_dir", and "root_fs_config".
+ d["root_dir"] = os.path.join(input_file, "ROOT")
+ d["root_fs_config"] = os.path.join(
+ input_file, "META", "root_filesystem_config.txt")
# Redirect {system,vendor}_base_fs_file.
if "system_base_fs_file" in d:
basename = os.path.basename(d["system_base_fs_file"])
- system_base_fs_file = os.path.join(input_dir, "META", basename)
+ system_base_fs_file = os.path.join(input_file, "META", basename)
if os.path.exists(system_base_fs_file):
d["system_base_fs_file"] = system_base_fs_file
else:
- print("Warning: failed to find system base fs file: %s" % (
- system_base_fs_file,))
+ logger.warning(
+ "Failed to find system base fs file: %s", system_base_fs_file)
del d["system_base_fs_file"]
if "vendor_base_fs_file" in d:
basename = os.path.basename(d["vendor_base_fs_file"])
- vendor_base_fs_file = os.path.join(input_dir, "META", basename)
+ vendor_base_fs_file = os.path.join(input_file, "META", basename)
if os.path.exists(vendor_base_fs_file):
d["vendor_base_fs_file"] = vendor_base_fs_file
else:
- print("Warning: failed to find vendor base fs file: %s" % (
- vendor_base_fs_file,))
+ logger.warning(
+ "Failed to find vendor base fs file: %s", vendor_base_fs_file)
del d["vendor_base_fs_file"]
def makeint(key):
@@ -235,20 +380,50 @@ def LoadInfoDict(input_file, input_dir=None):
makeint("boot_size")
makeint("fstab_version")
- system_root_image = d.get("system_root_image", None) == "true"
- if d.get("no_recovery", None) != "true":
- recovery_fstab_path = "RECOVERY/RAMDISK/etc/recovery.fstab"
- d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"],
- recovery_fstab_path, system_root_image)
- elif d.get("recovery_as_boot", None) == "true":
- recovery_fstab_path = "BOOT/RAMDISK/etc/recovery.fstab"
- d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"],
- recovery_fstab_path, system_root_image)
+ # We changed recovery.fstab path in Q, from ../RAMDISK/etc/recovery.fstab to
+ # ../RAMDISK/system/etc/recovery.fstab. LoadInfoDict() has to handle both
+ # cases, since it may load the info_dict from an old build (e.g. when
+ # generating incremental OTAs from that build).
+ system_root_image = d.get("system_root_image") == "true"
+ if d.get("no_recovery") != "true":
+ recovery_fstab_path = "RECOVERY/RAMDISK/system/etc/recovery.fstab"
+ if isinstance(input_file, zipfile.ZipFile):
+ if recovery_fstab_path not in input_file.namelist():
+ recovery_fstab_path = "RECOVERY/RAMDISK/etc/recovery.fstab"
+ else:
+ path = os.path.join(input_file, *recovery_fstab_path.split("/"))
+ if not os.path.exists(path):
+ recovery_fstab_path = "RECOVERY/RAMDISK/etc/recovery.fstab"
+ d["fstab"] = LoadRecoveryFSTab(
+ read_helper, d["fstab_version"], recovery_fstab_path, system_root_image)
+
+ elif d.get("recovery_as_boot") == "true":
+ recovery_fstab_path = "BOOT/RAMDISK/system/etc/recovery.fstab"
+ if isinstance(input_file, zipfile.ZipFile):
+ if recovery_fstab_path not in input_file.namelist():
+ recovery_fstab_path = "BOOT/RAMDISK/etc/recovery.fstab"
+ else:
+ path = os.path.join(input_file, *recovery_fstab_path.split("/"))
+ if not os.path.exists(path):
+ recovery_fstab_path = "BOOT/RAMDISK/etc/recovery.fstab"
+ d["fstab"] = LoadRecoveryFSTab(
+ read_helper, d["fstab_version"], recovery_fstab_path, system_root_image)
+
else:
d["fstab"] = None
- d["build.prop"] = LoadBuildProp(read_helper, 'SYSTEM/build.prop')
- d["vendor.build.prop"] = LoadBuildProp(read_helper, 'VENDOR/build.prop')
+ # Tries to load the build props for all partitions with care_map, including
+ # system and vendor.
+ for partition in PARTITIONS_WITH_CARE_MAP:
+ partition_prop = "{}.build.prop".format(partition)
+ d[partition_prop] = LoadBuildProp(
+ read_helper, "{}/build.prop".format(partition.upper()))
+ # Some partition might use /<partition>/etc/build.prop as the new path.
+ # TODO: try new path first when majority of them switch to the new path.
+ if not d[partition_prop]:
+ d[partition_prop] = LoadBuildProp(
+ read_helper, "{}/etc/build.prop".format(partition.upper()))
+ d["build.prop"] = d["system.build.prop"]
# Set up the salt (based on fingerprint or thumbprint) that will be used when
# adding AVB footer.
@@ -270,7 +445,7 @@ def LoadBuildProp(read_helper, prop_file):
try:
data = read_helper(prop_file)
except KeyError:
- print("Warning: could not read %s" % (prop_file,))
+ logger.warning("Failed to read %s", prop_file)
data = ""
return LoadDictionaryFromLines(data.split("\n"))
@@ -300,7 +475,7 @@ def LoadRecoveryFSTab(read_helper, fstab_version, recovery_fstab_path,
try:
data = read_helper(recovery_fstab_path)
except KeyError:
- print("Warning: could not find {}".format(recovery_fstab_path))
+ logger.warning("Failed to find %s", recovery_fstab_path)
data = ""
assert fstab_version == 2
@@ -353,7 +528,7 @@ def LoadRecoveryFSTab(read_helper, fstab_version, recovery_fstab_path,
def DumpInfoDict(d):
for k, v in sorted(d.items()):
- print("%-25s = (%s) %s" % (k, type(v).__name__, v))
+ logger.info("%-25s = (%s) %s", k, type(v).__name__, v)
def AppendAVBSigningArgs(cmd, partition):
@@ -365,10 +540,32 @@ def AppendAVBSigningArgs(cmd, partition):
cmd.extend(["--key", key_path, "--algorithm", algorithm])
avb_salt = OPTIONS.info_dict.get("avb_salt")
# make_vbmeta_image doesn't like "--salt" (and it's not needed).
- if avb_salt and partition != "vbmeta":
+ if avb_salt and not partition.startswith("vbmeta"):
cmd.extend(["--salt", avb_salt])
+def GetAvbChainedPartitionArg(partition, info_dict, key=None):
+ """Constructs and returns the arg to build or verify a chained partition.
+
+ Args:
+ partition: The partition name.
+ info_dict: The info dict to look up the key info and rollback index
+ location.
+ key: The key to be used for building or verifying the partition. Defaults to
+ the key listed in info_dict.
+
+ Returns:
+ A string of form "partition:rollback_index_location:key" that can be used to
+ build or verify vbmeta image.
+ """
+ if key is None:
+ key = info_dict["avb_" + partition + "_key_path"]
+ pubkey_path = ExtractAvbPublicKey(key)
+ rollback_index_location = info_dict[
+ "avb_" + partition + "_rollback_index_location"]
+ return "{}:{}:{}".format(partition, rollback_index_location, pubkey_path)
+
+
def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
has_ramdisk=False, two_step_image=False):
"""Build a bootable image from the specified sourcedir.
@@ -424,6 +621,11 @@ def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
cmd.append("--second")
cmd.append(fn)
+ fn = os.path.join(sourcedir, "dtb")
+ if os.access(fn, os.F_OK):
+ cmd.append("--dtb")
+ cmd.append(fn)
+
fn = os.path.join(sourcedir, "cmdline")
if os.access(fn, os.F_OK):
cmd.append("--cmdline")
@@ -439,11 +641,11 @@ def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
cmd.append("--pagesize")
cmd.append(open(fn).read().rstrip("\n"))
- args = info_dict.get("mkbootimg_args", None)
+ args = info_dict.get("mkbootimg_args")
if args and args.strip():
cmd.extend(shlex.split(args))
- args = info_dict.get("mkbootimg_version_args", None)
+ args = info_dict.get("mkbootimg_version_args")
if args and args.strip():
cmd.extend(shlex.split(args))
@@ -451,7 +653,7 @@ def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
cmd.extend(["--ramdisk", ramdisk_img.name])
img_unsigned = None
- if info_dict.get("vboot", None):
+ if info_dict.get("vboot"):
img_unsigned = tempfile.NamedTemporaryFile()
cmd.extend(["--output", img_unsigned.name])
else:
@@ -460,17 +662,18 @@ def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
# "boot" or "recovery", without extension.
partition_name = os.path.basename(sourcedir).lower()
- if (partition_name == "recovery" and
- info_dict.get("include_recovery_dtbo") == "true"):
- fn = os.path.join(sourcedir, "recovery_dtbo")
- cmd.extend(["--recovery_dtbo", fn])
+ if partition_name == "recovery":
+ if info_dict.get("include_recovery_dtbo") == "true":
+ fn = os.path.join(sourcedir, "recovery_dtbo")
+ cmd.extend(["--recovery_dtbo", fn])
+ if info_dict.get("include_recovery_acpio") == "true":
+ fn = os.path.join(sourcedir, "recovery_acpio")
+ cmd.extend(["--recovery_acpio", fn])
- p = Run(cmd, stdout=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, "mkbootimg of %s image failed" % (partition_name,)
+ RunAndCheckOutput(cmd)
- if (info_dict.get("boot_signer", None) == "true" and
- info_dict.get("verity_key", None)):
+ if (info_dict.get("boot_signer") == "true" and
+ info_dict.get("verity_key")):
# Hard-code the path as "/boot" for two-step special recovery image (which
# will be loaded into /boot during the two-step OTA).
if two_step_image:
@@ -482,12 +685,10 @@ def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
cmd.extend([path, img.name,
info_dict["verity_key"] + ".pk8",
info_dict["verity_key"] + ".x509.pem", img.name])
- p = Run(cmd, stdout=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, "boot_signer of %s image failed" % path
+ RunAndCheckOutput(cmd)
# Sign the image if vboot is non-empty.
- elif info_dict.get("vboot", None):
+ elif info_dict.get("vboot"):
path = "/" + partition_name
img_keyblock = tempfile.NamedTemporaryFile()
# We have switched from the prebuilt futility binary to using the tool
@@ -502,9 +703,7 @@ def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
info_dict["vboot_subkey"] + ".vbprivk",
img_keyblock.name,
img.name]
- p = Run(cmd, stdout=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, "vboot_signer of %s image failed" % path
+ RunAndCheckOutput(cmd)
# Clean up the temp files.
img_unsigned.close()
@@ -512,7 +711,7 @@ def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
# AVB: if enabled, calculate and add hash to boot.img or recovery.img.
if info_dict.get("avb_enable") == "true":
- avbtool = os.getenv('AVBTOOL') or info_dict["avb_avbtool"]
+ avbtool = info_dict["avb_avbtool"]
part_size = info_dict[partition_name + "_size"]
cmd = [avbtool, "add_hash_footer", "--image", img.name,
"--partition_size", str(part_size), "--partition_name",
@@ -521,10 +720,7 @@ def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
args = info_dict.get("avb_" + partition_name + "_add_hash_footer_args")
if args and args.strip():
cmd.extend(shlex.split(args))
- p = Run(cmd, stdout=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, "avbtool add_hash_footer of %s failed" % (
- partition_name,)
+ RunAndCheckOutput(cmd)
img.seek(os.SEEK_SET, 0)
data = img.read()
@@ -546,15 +742,15 @@ def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
if os.path.exists(prebuilt_path):
- print("using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,))
+ logger.info("using prebuilt %s from BOOTABLE_IMAGES...", prebuilt_name)
return File.FromLocalFile(name, prebuilt_path)
prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
if os.path.exists(prebuilt_path):
- print("using prebuilt %s from IMAGES..." % (prebuilt_name,))
+ logger.info("using prebuilt %s from IMAGES...", prebuilt_name)
return File.FromLocalFile(name, prebuilt_path)
- print("building image from target_files %s..." % (tree_subdir,))
+ logger.info("building image from target_files %s...", tree_subdir)
if info_dict is None:
info_dict = OPTIONS.info_dict
@@ -576,45 +772,137 @@ def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
def Gunzip(in_filename, out_filename):
- """Gunzip the given gzip compressed file to a given output file.
- """
- with gzip.open(in_filename, "rb") as in_file, open(out_filename, "wb") as out_file:
+ """Gunzips the given gzip compressed file to a given output file."""
+ with gzip.open(in_filename, "rb") as in_file, \
+ open(out_filename, "wb") as out_file:
shutil.copyfileobj(in_file, out_file)
+def UnzipToDir(filename, dirname, patterns=None):
+ """Unzips the archive to the given directory.
+
+ Args:
+ filename: The name of the zip file to unzip.
+ dirname: Where the unziped files will land.
+ patterns: Files to unzip from the archive. If omitted, will unzip the entire
+ archvie. Non-matching patterns will be filtered out. If there's no match
+ after the filtering, no file will be unzipped.
+ """
+ cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
+ if patterns is not None:
+ # Filter out non-matching patterns. unzip will complain otherwise.
+ with zipfile.ZipFile(filename) as input_zip:
+ names = input_zip.namelist()
+ filtered = [
+ pattern for pattern in patterns if fnmatch.filter(names, pattern)]
+
+ # There isn't any matching files. Don't unzip anything.
+ if not filtered:
+ return
+ cmd.extend(filtered)
+
+ RunAndCheckOutput(cmd)
+
+
def UnzipTemp(filename, pattern=None):
"""Unzips the given archive into a temporary directory and returns the name.
- If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a temp dir,
- then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
+ Args:
+ filename: If filename is of the form "foo.zip+bar.zip", unzip foo.zip into
+ a temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
+
+ pattern: Files to unzip from the archive. If omitted, will unzip the entire
+ archvie.
Returns:
The name of the temporary directory.
"""
- def unzip_to_dir(filename, dirname):
- cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
- if pattern is not None:
- cmd.extend(pattern)
- p = Run(cmd, stdout=subprocess.PIPE)
- p.communicate()
- if p.returncode != 0:
- raise ExternalError("failed to unzip input target-files \"%s\"" %
- (filename,))
-
tmp = MakeTempDir(prefix="targetfiles-")
m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
if m:
- unzip_to_dir(m.group(1), tmp)
- unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
+ UnzipToDir(m.group(1), tmp, pattern)
+ UnzipToDir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"), pattern)
filename = m.group(1)
else:
- unzip_to_dir(filename, tmp)
+ UnzipToDir(filename, tmp, pattern)
return tmp
-def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
+def GetUserImage(which, tmpdir, input_zip,
+ info_dict=None,
+ allow_shared_blocks=None,
+ hashtree_info_generator=None,
+ reset_file_map=False):
+ """Returns an Image object suitable for passing to BlockImageDiff.
+
+ This function loads the specified image from the given path. If the specified
+ image is sparse, it also performs additional processing for OTA purpose. For
+ example, it always adds block 0 to clobbered blocks list. It also detects
+ files that cannot be reconstructed from the block list, for whom we should
+ avoid applying imgdiff.
+
+ Args:
+ which: The partition name.
+ tmpdir: The directory that contains the prebuilt image and block map file.
+ input_zip: The target-files ZIP archive.
+ info_dict: The dict to be looked up for relevant info.
+ allow_shared_blocks: If image is sparse, whether having shared blocks is
+ allowed. If none, it is looked up from info_dict.
+ hashtree_info_generator: If present and image is sparse, generates the
+ hashtree_info for this sparse image.
+ reset_file_map: If true and image is sparse, reset file map before returning
+ the image.
+ Returns:
+ A Image object. If it is a sparse image and reset_file_map is False, the
+ image will have file_map info loaded.
+ """
+ if info_dict == None:
+ info_dict = LoadInfoDict(input_zip)
+
+ is_sparse = info_dict.get("extfs_sparse_flag")
+
+ # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain
+ # shared blocks (i.e. some blocks will show up in multiple files' block
+ # list). We can only allocate such shared blocks to the first "owner", and
+ # disable imgdiff for all later occurrences.
+ if allow_shared_blocks is None:
+ allow_shared_blocks = info_dict.get("ext4_share_dup_blocks") == "true"
+
+ if is_sparse:
+ img = GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
+ hashtree_info_generator)
+ if reset_file_map:
+ img.ResetFileMap()
+ return img
+ else:
+ return GetNonSparseImage(which, tmpdir, hashtree_info_generator)
+
+
+def GetNonSparseImage(which, tmpdir, hashtree_info_generator=None):
+ """Returns a Image object suitable for passing to BlockImageDiff.
+
+ This function loads the specified non-sparse image from the given path.
+
+ Args:
+ which: The partition name.
+ tmpdir: The directory that contains the prebuilt image and block map file.
+ Returns:
+ A Image object.
+ """
+ path = os.path.join(tmpdir, "IMAGES", which + ".img")
+ mappath = os.path.join(tmpdir, "IMAGES", which + ".map")
+
+ # The image and map files must have been created prior to calling
+ # ota_from_target_files.py (since LMP).
+ assert os.path.exists(path) and os.path.exists(mappath)
+
+ return blockimgdiff.FileImage(path, hashtree_info_generator=
+ hashtree_info_generator)
+
+def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
+ hashtree_info_generator=None):
"""Returns a SparseImage object suitable for passing to BlockImageDiff.
This function loads the specified sparse image from the given path, and
@@ -623,16 +911,15 @@ def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
reconstructed from the block list, for whom we should avoid applying imgdiff.
Args:
- which: The partition name, which must be "system" or "vendor".
+ which: The partition name, e.g. "system", "vendor".
tmpdir: The directory that contains the prebuilt image and block map file.
input_zip: The target-files ZIP archive.
allow_shared_blocks: Whether having shared blocks is allowed.
-
+ hashtree_info_generator: If present, generates the hashtree_info for this
+ sparse image.
Returns:
A SparseImage object, with file_map info loaded.
"""
- assert which in ("system", "vendor")
-
path = os.path.join(tmpdir, "IMAGES", which + ".img")
mappath = os.path.join(tmpdir, "IMAGES", which + ".map")
@@ -645,27 +932,40 @@ def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
# unconditionally. Note that they are still part of care_map. (Bug: 20939131)
clobbered_blocks = "0"
- image = sparse_img.SparseImage(path, mappath, clobbered_blocks,
- allow_shared_blocks=allow_shared_blocks)
+ image = sparse_img.SparseImage(
+ path, mappath, clobbered_blocks, allow_shared_blocks=allow_shared_blocks,
+ hashtree_info_generator=hashtree_info_generator)
# block.map may contain less blocks, because mke2fs may skip allocating blocks
# if they contain all zeros. We can't reconstruct such a file from its block
# list. Tag such entries accordingly. (Bug: 65213616)
for entry in image.file_map:
- # "/system/framework/am.jar" => "SYSTEM/framework/am.jar".
- arcname = string.replace(entry, which, which.upper(), 1)[1:]
# Skip artificial names, such as "__ZERO", "__NONZERO-1".
- if arcname not in input_zip.namelist():
+ if not entry.startswith('/'):
continue
+ # "/system/framework/am.jar" => "SYSTEM/framework/am.jar". Note that the
+ # filename listed in system.map may contain an additional leading slash
+ # (i.e. "//system/framework/am.jar"). Using lstrip to get consistent
+ # results.
+ arcname = string.replace(entry, which, which.upper(), 1).lstrip('/')
+
+ # Special handling another case, where files not under /system
+ # (e.g. "/sbin/charger") are packed under ROOT/ in a target_files.zip.
+ if which == 'system' and not arcname.startswith('SYSTEM'):
+ arcname = 'ROOT/' + arcname
+
+ assert arcname in input_zip.namelist(), \
+ "Failed to find the ZIP entry for {}".format(entry)
+
info = input_zip.getinfo(arcname)
ranges = image.file_map[entry]
# If a RangeSet has been tagged as using shared blocks while loading the
- # image, its block list must be already incomplete due to that reason. Don't
- # give it 'incomplete' tag to avoid messing up the imgdiff stats.
+ # image, check the original block list to determine its completeness. Note
+ # that the 'incomplete' flag would be tagged to the original RangeSet only.
if ranges.extra.get('uses_shared_blocks'):
- continue
+ ranges = ranges.extra['uses_shared_blocks']
if RoundUpTo4K(info.file_size) > ranges.size() * 4096:
ranges.extra['incomplete'] = True
@@ -718,23 +1018,36 @@ def GetKeyPasswords(keylist):
devnull.close()
key_passwords.update(PasswordManager().GetPasswords(need_passwords))
- key_passwords.update(dict.fromkeys(no_passwords, None))
+ key_passwords.update(dict.fromkeys(no_passwords))
return key_passwords
def GetMinSdkVersion(apk_name):
- """Get the minSdkVersion delared in the APK. This can be both a decimal number
- (API Level) or a codename.
- """
+ """Gets the minSdkVersion declared in the APK.
- p = Run(["aapt", "dump", "badging", apk_name], stdout=subprocess.PIPE)
- output, err = p.communicate()
- if err:
- raise ExternalError("Failed to obtain minSdkVersion: aapt return code %s"
- % (p.returncode,))
+ It calls 'aapt' to query the embedded minSdkVersion from the given APK file.
+ This can be both a decimal number (API Level) or a codename.
- for line in output.split("\n"):
- # Looking for lines such as sdkVersion:'23' or sdkVersion:'M'
+ Args:
+ apk_name: The APK filename.
+
+ Returns:
+ The parsed SDK version string.
+
+ Raises:
+ ExternalError: On failing to obtain the min SDK version.
+ """
+ proc = Run(
+ ["aapt", "dump", "badging", apk_name], stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdoutdata, stderrdata = proc.communicate()
+ if proc.returncode != 0:
+ raise ExternalError(
+ "Failed to obtain minSdkVersion: aapt return code {}:\n{}\n{}".format(
+ proc.returncode, stdoutdata, stderrdata))
+
+ for line in stdoutdata.split("\n"):
+ # Looking for lines such as sdkVersion:'23' or sdkVersion:'M'.
m = re.match(r'sdkVersion:\'([^\']*)\'', line)
if m:
return m.group(1)
@@ -742,11 +1055,20 @@ def GetMinSdkVersion(apk_name):
def GetMinSdkVersionInt(apk_name, codename_to_api_level_map):
- """Get the minSdkVersion declared in the APK as a number (API Level). If
- minSdkVersion is set to a codename, it is translated to a number using the
+ """Returns the minSdkVersion declared in the APK as a number (API Level).
+
+ If minSdkVersion is set to a codename, it is translated to a number using the
provided map.
- """
+ Args:
+ apk_name: The APK filename.
+
+ Returns:
+ The parsed SDK version number.
+
+ Raises:
+ ExternalError: On failing to get the min SDK version number.
+ """
version = GetMinSdkVersion(apk_name)
try:
return int(version)
@@ -755,13 +1077,14 @@ def GetMinSdkVersionInt(apk_name, codename_to_api_level_map):
if version in codename_to_api_level_map:
return codename_to_api_level_map[version]
else:
- raise ExternalError("Unknown minSdkVersion: '%s'. Known codenames: %s"
- % (version, codename_to_api_level_map))
+ raise ExternalError(
+ "Unknown minSdkVersion: '{}'. Known codenames: {}".format(
+ version, codename_to_api_level_map))
def SignFile(input_name, output_name, key, password, min_api_level=None,
- codename_to_api_level_map=dict(),
- whole_file=False):
+ codename_to_api_level_map=None, whole_file=False,
+ extra_signapk_args=None):
"""Sign the input_name zip/jar/apk, producing output_name. Use the
given key and password (the latter may be None if the key does not
have a password.
@@ -776,7 +1099,14 @@ def SignFile(input_name, output_name, key, password, min_api_level=None,
codename_to_api_level_map is needed to translate the codename which may be
encountered as the APK's minSdkVersion.
+
+ Caller may optionally specify extra args to be passed to SignApk, which
+ defaults to OPTIONS.extra_signapk_args if omitted.
"""
+ if codename_to_api_level_map is None:
+ codename_to_api_level_map = {}
+ if extra_signapk_args is None:
+ extra_signapk_args = OPTIONS.extra_signapk_args
java_library_path = os.path.join(
OPTIONS.search_path, OPTIONS.signapk_shared_library_path)
@@ -784,7 +1114,7 @@ def SignFile(input_name, output_name, key, password, min_api_level=None,
cmd = ([OPTIONS.java_path] + OPTIONS.java_args +
["-Djava.library.path=" + java_library_path,
"-jar", os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)] +
- OPTIONS.extra_signapk_args)
+ extra_signapk_args)
if whole_file:
cmd.append("-w")
@@ -800,12 +1130,14 @@ def SignFile(input_name, output_name, key, password, min_api_level=None,
key + OPTIONS.private_key_suffix,
input_name, output_name])
- p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ proc = Run(cmd, stdin=subprocess.PIPE)
if password is not None:
password += "\n"
- p.communicate(password)
- if p.returncode != 0:
- raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
+ stdoutdata, _ = proc.communicate(password)
+ if proc.returncode != 0:
+ raise ExternalError(
+ "Failed to run signapk.jar: return code {}:\n{}".format(
+ proc.returncode, stdoutdata))
def CheckSize(data, target, info_dict):
@@ -835,7 +1167,7 @@ def CheckSize(data, target, info_dict):
device = p.device
if "/" in device:
device = device[device.rfind("/")+1:]
- limit = info_dict.get(device + "_size", None)
+ limit = info_dict.get(device + "_size")
if not fs_type or not limit:
return
@@ -853,9 +1185,9 @@ def CheckSize(data, target, info_dict):
if pct >= 99.0:
raise ExternalError(msg)
elif pct >= 95.0:
- print("\n WARNING: %s\n" % (msg,))
- elif OPTIONS.verbose:
- print(" ", msg)
+ logger.warning("\n WARNING: %s\n", msg)
+ else:
+ logger.info(" %s", msg)
def ReadApkCerts(tf_zip):
@@ -938,17 +1270,18 @@ def ReadApkCerts(tf_zip):
COMMON_DOCSTRING = """
- -p (--path) <dir>
- Prepend <dir>/bin to the list of places to search for binaries
- run by this script, and expect to find jars in <dir>/framework.
+Global options
+
+ -p (--path) <dir>
+ Prepend <dir>/bin to the list of places to search for binaries run by this
+ script, and expect to find jars in <dir>/framework.
-s (--device_specific) <file>
- Path to the python module containing device-specific
- releasetools code.
+ Path to the Python module containing device-specific releasetools code.
- -x (--extra) <key=value>
- Add a key/value pair to the 'extras' dict, which device-specific
- extension code may look at.
+ -x (--extra) <key=value>
+ Add a key/value pair to the 'extras' dict, which device-specific extension
+ code may look at.
-v (--verbose)
Show command lines being executed.
@@ -1064,8 +1397,8 @@ def Cleanup():
class PasswordManager(object):
def __init__(self):
- self.editor = os.getenv("EDITOR", None)
- self.pwfile = os.getenv("ANDROID_PW_FILE", None)
+ self.editor = os.getenv("EDITOR")
+ self.pwfile = os.getenv("ANDROID_PW_FILE")
def GetPasswords(self, items):
"""Get passwords corresponding to each string in 'items',
@@ -1137,8 +1470,7 @@ class PasswordManager(object):
first_line = i + 4
f.close()
- p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
- _, _ = p.communicate()
+ RunAndCheckOutput([self.editor, "+%d" % (first_line,), self.pwfile])
return self.ReadFile()
@@ -1154,13 +1486,13 @@ class PasswordManager(object):
continue
m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
if not m:
- print("failed to parse password file: ", line)
+ logger.warning("Failed to parse password file: %s", line)
else:
result[m.group(2)] = m.group(1)
f.close()
except IOError as e:
if e.errno != errno.ENOENT:
- print("error reading password file: ", str(e))
+ logger.exception("Error reading password file:")
return result
@@ -1193,8 +1525,12 @@ def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
os.chmod(filename, perms)
# Use a fixed timestamp so the output is repeatable.
- epoch = datetime.datetime.fromtimestamp(0)
- timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
+ # Note: Use of fromtimestamp rather than utcfromtimestamp here is
+ # intentional. zip stores datetimes in local time without a time zone
+ # attached, so we need "epoch" but in the local time zone to get 2009/01/01
+ # in the zip archive.
+ local_epoch = datetime.datetime.fromtimestamp(0)
+ timestamp = (datetime.datetime(2009, 1, 1) - local_epoch).total_seconds()
os.utime(filename, (timestamp, timestamp))
zip_file.write(filename, arcname=arcname, compress_type=compress_type)
@@ -1262,10 +1598,7 @@ def ZipDelete(zip_filename, entries):
if isinstance(entries, basestring):
entries = [entries]
cmd = ["zip", "-d", zip_filename] + entries
- proc = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- stdoutdata, _ = proc.communicate()
- assert proc.returncode == 0, "Failed to delete %s:\n%s" % (entries,
- stdoutdata)
+ RunAndCheckOutput(cmd)
def ZipClose(zip_file):
@@ -1303,10 +1636,10 @@ class DeviceSpecificParams(object):
if x == ".py":
f = b
info = imp.find_module(f, [d])
- print("loaded device-specific extensions from", path)
+ logger.info("loaded device-specific extensions from %s", path)
self.module = imp.load_module("device_specific", *info)
except ImportError:
- print("unable to load device-specific module; assuming none")
+ logger.info("unable to load device-specific module; assuming none")
def _DoCall(self, function_name, *args, **kwargs):
"""Call the named function in the device-specific module, passing
@@ -1315,7 +1648,7 @@ class DeviceSpecificParams(object):
module does not define the function, return the value of the
'default' kwarg (which itself defaults to None)."""
if self.module is None or not hasattr(self.module, function_name):
- return kwargs.get("default", None)
+ return kwargs.get("default")
return getattr(self.module, function_name)(*((self,) + args), **kwargs)
def FullOTA_Assertions(self):
@@ -1328,6 +1661,13 @@ class DeviceSpecificParams(object):
"""Called at the start of full OTA installation."""
return self._DoCall("FullOTA_InstallBegin")
+ def FullOTA_GetBlockDifferences(self):
+ """Called during full OTA installation and verification.
+ Implementation should return a list of BlockDifference objects describing
+ the update on each additional partitions.
+ """
+ return self._DoCall("FullOTA_GetBlockDifferences")
+
def FullOTA_InstallEnd(self):
"""Called at the end of full OTA installation; typically this is
used to install the image for the device's baseband processor."""
@@ -1356,6 +1696,13 @@ class DeviceSpecificParams(object):
verification is complete)."""
return self._DoCall("IncrementalOTA_InstallBegin")
+ def IncrementalOTA_GetBlockDifferences(self):
+ """Called during incremental OTA installation and verification.
+ Implementation should return a list of BlockDifference objects describing
+ the update on each additional partitions.
+ """
+ return self._DoCall("IncrementalOTA_GetBlockDifferences")
+
def IncrementalOTA_InstallEnd(self):
"""Called at the end of incremental OTA installation; typically
this is used to install the image for the device's baseband
@@ -1365,8 +1712,9 @@ class DeviceSpecificParams(object):
def VerifyOTA_Assertions(self):
return self._DoCall("VerifyOTA_Assertions")
+
class File(object):
- def __init__(self, name, data, compress_size = None):
+ def __init__(self, name, data, compress_size=None):
self.name = name
self.data = data
self.size = len(data)
@@ -1393,6 +1741,7 @@ class File(object):
def AddToZip(self, z, compression=None):
ZipWriteStr(z, self.name, self.data, compress_type=compression)
+
DIFF_PROGRAM_BY_EXT = {
".gz" : "imgdiff",
".zip" : ["imgdiff", "-z"],
@@ -1401,6 +1750,7 @@ DIFF_PROGRAM_BY_EXT = {
".img" : "imgdiff",
}
+
class Difference(object):
def __init__(self, tf, sf, diff_program=None):
self.tf = tf
@@ -1445,7 +1795,7 @@ class Difference(object):
th.start()
th.join(timeout=300) # 5 mins
if th.is_alive():
- print("WARNING: diff command timed out")
+ logger.warning("diff command timed out")
p.terminate()
th.join(5)
if th.is_alive():
@@ -1453,8 +1803,7 @@ class Difference(object):
th.join()
if p.returncode != 0:
- print("WARNING: failure running %s:\n%s\n" % (
- diff_program, "".join(err)))
+ logger.warning("Failure running %s:\n%s\n", diff_program, "".join(err))
self.patch = None
return None, None, None
diff = ptemp.read()
@@ -1468,15 +1817,17 @@ class Difference(object):
def GetPatch(self):
- """Return a tuple (target_file, source_file, patch_data).
+ """Returns a tuple of (target_file, source_file, patch_data).
+
patch_data may be None if ComputePatch hasn't been called, or if
- computing the patch failed."""
+ computing the patch failed.
+ """
return self.tf, self.sf, self.patch
def ComputeDifferences(diffs):
"""Call ComputePatch on all the Difference objects in 'diffs'."""
- print(len(diffs), "diffs to compute")
+ logger.info("%d diffs to compute", len(diffs))
# Do the largest files first, to try and reduce the long-pole effect.
by_size = [(i.tf.size, i) for i in diffs]
@@ -1502,13 +1853,14 @@ def ComputeDifferences(diffs):
else:
name = "%s (%s)" % (tf.name, sf.name)
if patch is None:
- print("patching failed! %s" % (name,))
+ logger.error("patching failed! %40s", name)
else:
- print("%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
- dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name))
+ logger.info(
+ "%8.2f sec %8d / %8d bytes (%6.2f%%) %s", dur, len(patch),
+ tf.size, 100.0 * len(patch) / tf.size, name)
lock.release()
- except Exception as e:
- print(e)
+ except Exception:
+ logger.exception("Failed to compute diff from worker")
raise
# start worker threads; wait for them all to finish.
@@ -1545,17 +1897,42 @@ class BlockDifference(object):
self.touched_src_ranges = b.touched_src_ranges
self.touched_src_sha1 = b.touched_src_sha1
- if src is None:
- _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
+ # On devices with dynamic partitions, for new partitions,
+ # src is None but OPTIONS.source_info_dict is not.
+ if OPTIONS.source_info_dict is None:
+ is_dynamic_build = OPTIONS.info_dict.get(
+ "use_dynamic_partitions") == "true"
+ is_dynamic_source = False
else:
- _, self.device = GetTypeAndDevice("/" + partition,
- OPTIONS.source_info_dict)
+ is_dynamic_build = OPTIONS.source_info_dict.get(
+ "use_dynamic_partitions") == "true"
+ is_dynamic_source = partition in shlex.split(
+ OPTIONS.source_info_dict.get("dynamic_partition_list", "").strip())
+
+ is_dynamic_target = partition in shlex.split(
+ OPTIONS.info_dict.get("dynamic_partition_list", "").strip())
+
+ # For dynamic partitions builds, check partition list in both source
+ # and target build because new partitions may be added, and existing
+ # partitions may be removed.
+ is_dynamic = is_dynamic_build and (is_dynamic_source or is_dynamic_target)
+
+ if is_dynamic:
+ self.device = 'map_partition("%s")' % partition
+ else:
+ if OPTIONS.source_info_dict is None:
+ _, device_path = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
+ else:
+ _, device_path = GetTypeAndDevice("/" + partition,
+ OPTIONS.source_info_dict)
+ self.device = '"%s"' % device_path
@property
def required_cache(self):
return self._required_cache
- def WriteScript(self, script, output_zip, progress=None):
+ def WriteScript(self, script, output_zip, progress=None,
+ write_verify_script=False):
if not self.src:
# write the output unconditionally
script.Print("Patching %s image unconditionally..." % (self.partition,))
@@ -1565,8 +1942,9 @@ class BlockDifference(object):
if progress:
script.ShowProgress(progress, 0)
self._WriteUpdate(script, output_zip)
- if OPTIONS.verify:
- self._WritePostInstallVerifyScript(script)
+
+ if write_verify_script:
+ self.WritePostInstallVerifyScript(script)
def WriteStrictVerifyScript(self, script):
"""Verify all the blocks in the care_map, including clobbered blocks.
@@ -1579,12 +1957,12 @@ class BlockDifference(object):
script.Print("Verifying %s..." % (partition,))
ranges = self.tgt.care_map
ranges_str = ranges.to_string_raw()
- script.AppendExtra('range_sha1("%s", "%s") == "%s" && '
- 'ui_print(" Verified.") || '
- 'ui_print("\\"%s\\" has unexpected contents.");' % (
- self.device, ranges_str,
- self.tgt.TotalSha1(include_clobbered_blocks=True),
- self.device))
+ script.AppendExtra(
+ 'range_sha1(%s, "%s") == "%s" && ui_print(" Verified.") || '
+ 'ui_print("%s has unexpected contents.");' % (
+ self.device, ranges_str,
+ self.tgt.TotalSha1(include_clobbered_blocks=True),
+ self.partition))
script.AppendExtra("")
def WriteVerifyScript(self, script, touched_blocks_only=False):
@@ -1608,12 +1986,12 @@ class BlockDifference(object):
return
ranges_str = ranges.to_string_raw()
- script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
- 'block_image_verify("%s", '
- 'package_extract_file("%s.transfer.list"), '
- '"%s.new.dat", "%s.patch.dat")) then') % (
- self.device, ranges_str, expected_sha1,
- self.device, partition, partition, partition))
+ script.AppendExtra(
+ 'if (range_sha1(%s, "%s") == "%s" || block_image_verify(%s, '
+ 'package_extract_file("%s.transfer.list"), "%s.new.dat", '
+ '"%s.patch.dat")) then' % (
+ self.device, ranges_str, expected_sha1,
+ self.device, partition, partition, partition))
script.Print('Verified %s image...' % (partition,))
script.AppendExtra('else')
@@ -1626,7 +2004,7 @@ class BlockDifference(object):
# this check fails, give an explicit log message about the partition
# having been remounted R/W (the most likely explanation).
if self.check_first_block:
- script.AppendExtra('check_first_block("%s");' % (self.device,))
+ script.AppendExtra('check_first_block(%s);' % (self.device,))
# If version >= 4, try block recovery before abort update
if partition == "system":
@@ -1634,8 +2012,8 @@ class BlockDifference(object):
else:
code = ErrorCode.VENDOR_RECOVER_FAILURE
script.AppendExtra((
- 'ifelse (block_image_recover("{device}", "{ranges}") && '
- 'block_image_verify("{device}", '
+ 'ifelse (block_image_recover({device}, "{ranges}") && '
+ 'block_image_verify({device}, '
'package_extract_file("{partition}.transfer.list"), '
'"{partition}.new.dat", "{partition}.patch.dat"), '
'ui_print("{partition} recovered successfully."), '
@@ -1657,23 +2035,25 @@ class BlockDifference(object):
'abort("E%d: %s partition has unexpected contents");\n'
'endif;') % (code, partition))
- def _WritePostInstallVerifyScript(self, script):
+ def WritePostInstallVerifyScript(self, script):
partition = self.partition
script.Print('Verifying the updated %s image...' % (partition,))
# Unlike pre-install verification, clobbered_blocks should not be ignored.
ranges = self.tgt.care_map
ranges_str = ranges.to_string_raw()
- script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
- self.device, ranges_str,
- self.tgt.TotalSha1(include_clobbered_blocks=True)))
+ script.AppendExtra(
+ 'if range_sha1(%s, "%s") == "%s" then' % (
+ self.device, ranges_str,
+ self.tgt.TotalSha1(include_clobbered_blocks=True)))
# Bug: 20881595
# Verify that extended blocks are really zeroed out.
if self.tgt.extended:
ranges_str = self.tgt.extended.to_string_raw()
- script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
- self.device, ranges_str,
- self._HashZeroBlocks(self.tgt.extended.size())))
+ script.AppendExtra(
+ 'if range_sha1(%s, "%s") == "%s" then' % (
+ self.device, ranges_str,
+ self._HashZeroBlocks(self.tgt.extended.size())))
script.Print('Verified the updated %s image.' % (partition,))
if partition == "system":
code = ErrorCode.SYSTEM_NONZERO_CONTENTS
@@ -1703,9 +2083,9 @@ class BlockDifference(object):
'{}.transfer.list'.format(self.path),
'{}.transfer.list'.format(self.partition))
- # For full OTA, compress the new.dat with brotli with quality 6 to reduce its size. Quailty 9
- # almost triples the compression time but doesn't further reduce the size too much.
- # For a typical 1.8G system.new.dat
+ # For full OTA, compress the new.dat with brotli with quality 6 to reduce
+ # its size. Quailty 9 almost triples the compression time but doesn't
+ # further reduce the size too much. For a typical 1.8G system.new.dat
# zip | brotli(quality 6) | brotli(quality 9)
# compressed_size: 942M | 869M (~8% reduced) | 854M
# compression_time: 75s | 265s | 719s
@@ -1716,10 +2096,7 @@ class BlockDifference(object):
'--output={}.new.dat.br'.format(self.path),
'{}.new.dat'.format(self.path)]
print("Compressing {}.new.dat with brotli".format(self.partition))
- p = Run(brotli_cmd, stdout=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0,\
- 'compression of {}.new.dat failed'.format(self.partition)
+ RunAndCheckOutput(brotli_cmd)
new_data_name = '{}.new.dat.br'.format(self.partition)
ZipWrite(output_zip,
@@ -1740,7 +2117,7 @@ class BlockDifference(object):
else:
code = ErrorCode.VENDOR_UPDATE_FAILURE
- call = ('block_image_update("{device}", '
+ call = ('block_image_update({device}, '
'package_extract_file("{partition}.transfer.list"), '
'"{new_data_name}", "{partition}.patch.dat") ||\n'
' abort("E{code}: Failed to update {partition} image.");'.format(
@@ -1768,6 +2145,7 @@ class BlockDifference(object):
DataImage = blockimgdiff.DataImage
+EmptyImage = blockimgdiff.EmptyImage
# map recovery.fstab's fs_types to mount/format "partition types"
PARTITION_TYPES = {
@@ -1777,6 +2155,7 @@ PARTITION_TYPES = {
"squashfs": "EMMC"
}
+
def GetTypeAndDevice(mount_point, info):
fstab = info["fstab"]
if fstab:
@@ -1831,6 +2210,21 @@ def ExtractPublicKey(cert):
return pubkey
+def ExtractAvbPublicKey(key):
+ """Extracts the AVB public key from the given public or private key.
+
+ Args:
+ key: The input key file, which should be PEM-encoded public or private key.
+
+ Returns:
+ The path to the extracted AVB public key file.
+ """
+ output = MakeTempFile(prefix='avb-', suffix='.avbpubkey')
+ RunAndCheckOutput(
+ ['avbtool', 'extract_public_key', "--key", key, "--output", output])
+ return output
+
+
def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
info_dict=None):
"""Generates the recovery-from-boot patch and writes the script to output.
@@ -1871,7 +2265,7 @@ def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
if os.path.exists(path):
diff_program.append("-b")
diff_program.append(path)
- bonus_args = "-b /system/etc/recovery-resource.dat"
+ bonus_args = "--bonus /system/etc/recovery-resource.dat"
else:
bonus_args = ""
@@ -1889,8 +2283,12 @@ def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
if full_recovery_image:
sh = """#!/system/bin/sh
-if ! applypatch -c %(type)s:%(device)s:%(size)d:%(sha1)s; then
- applypatch /system/etc/recovery.img %(type)s:%(device)s %(sha1)s %(size)d && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
+if ! applypatch --check %(type)s:%(device)s:%(size)d:%(sha1)s; then
+ applypatch \\
+ --flash /system/etc/recovery.img \\
+ --target %(type)s:%(device)s:%(size)d:%(sha1)s && \\
+ log -t recovery "Installing new recovery image: succeeded" || \\
+ log -t recovery "Installing new recovery image: failed"
else
log -t recovery "Recovery image already installed"
fi
@@ -1900,8 +2298,13 @@ fi
'size': recovery_img.size}
else:
sh = """#!/system/bin/sh
-if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
- applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
+if ! applypatch --check %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
+ applypatch %(bonus_args)s \\
+ --patch /system/recovery-from-boot.p \\
+ --source %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s \\
+ --target %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s && \\
+ log -t recovery "Installing new recovery image: succeeded" || \\
+ log -t recovery "Installing new recovery image: failed"
else
log -t recovery "Recovery image already installed"
fi
@@ -1919,6 +2322,222 @@ fi
# in the L release.
sh_location = "bin/install-recovery.sh"
- print("putting script in", sh_location)
+ logger.info("putting script in %s", sh_location)
output_sink(sh_location, sh)
+
+
+class DynamicPartitionUpdate(object):
+ def __init__(self, src_group=None, tgt_group=None, progress=None,
+ block_difference=None):
+ self.src_group = src_group
+ self.tgt_group = tgt_group
+ self.progress = progress
+ self.block_difference = block_difference
+
+ @property
+ def src_size(self):
+ if not self.block_difference:
+ return 0
+ return DynamicPartitionUpdate._GetSparseImageSize(self.block_difference.src)
+
+ @property
+ def tgt_size(self):
+ if not self.block_difference:
+ return 0
+ return DynamicPartitionUpdate._GetSparseImageSize(self.block_difference.tgt)
+
+ @staticmethod
+ def _GetSparseImageSize(img):
+ if not img:
+ return 0
+ return img.blocksize * img.total_blocks
+
+
+class DynamicGroupUpdate(object):
+ def __init__(self, src_size=None, tgt_size=None):
+ # None: group does not exist. 0: no size limits.
+ self.src_size = src_size
+ self.tgt_size = tgt_size
+
+
+class DynamicPartitionsDifference(object):
+ def __init__(self, info_dict, block_diffs, progress_dict=None,
+ source_info_dict=None):
+ if progress_dict is None:
+ progress_dict = dict()
+
+ self._remove_all_before_apply = False
+ if source_info_dict is None:
+ self._remove_all_before_apply = True
+ source_info_dict = dict()
+
+ block_diff_dict = {e.partition:e for e in block_diffs}
+ assert len(block_diff_dict) == len(block_diffs), \
+ "Duplicated BlockDifference object for {}".format(
+ [partition for partition, count in
+ collections.Counter(e.partition for e in block_diffs).items()
+ if count > 1])
+
+ self._partition_updates = collections.OrderedDict()
+
+ for p, block_diff in block_diff_dict.items():
+ self._partition_updates[p] = DynamicPartitionUpdate()
+ self._partition_updates[p].block_difference = block_diff
+
+ for p, progress in progress_dict.items():
+ if p in self._partition_updates:
+ self._partition_updates[p].progress = progress
+
+ tgt_groups = shlex.split(info_dict.get(
+ "super_partition_groups", "").strip())
+ src_groups = shlex.split(source_info_dict.get(
+ "super_partition_groups", "").strip())
+
+ for g in tgt_groups:
+ for p in shlex.split(info_dict.get(
+ "super_%s_partition_list" % g, "").strip()):
+ assert p in self._partition_updates, \
+ "{} is in target super_{}_partition_list but no BlockDifference " \
+ "object is provided.".format(p, g)
+ self._partition_updates[p].tgt_group = g
+
+ for g in src_groups:
+ for p in shlex.split(source_info_dict.get(
+ "super_%s_partition_list" % g, "").strip()):
+ assert p in self._partition_updates, \
+ "{} is in source super_{}_partition_list but no BlockDifference " \
+ "object is provided.".format(p, g)
+ self._partition_updates[p].src_group = g
+
+ target_dynamic_partitions = set(shlex.split(info_dict.get(
+ "dynamic_partition_list", "").strip()))
+ block_diffs_with_target = set(p for p, u in self._partition_updates.items()
+ if u.tgt_size)
+ assert block_diffs_with_target == target_dynamic_partitions, \
+ "Target Dynamic partitions: {}, BlockDifference with target: {}".format(
+ list(target_dynamic_partitions), list(block_diffs_with_target))
+
+ source_dynamic_partitions = set(shlex.split(source_info_dict.get(
+ "dynamic_partition_list", "").strip()))
+ block_diffs_with_source = set(p for p, u in self._partition_updates.items()
+ if u.src_size)
+ assert block_diffs_with_source == source_dynamic_partitions, \
+ "Source Dynamic partitions: {}, BlockDifference with source: {}".format(
+ list(source_dynamic_partitions), list(block_diffs_with_source))
+
+ if self._partition_updates:
+ logger.info("Updating dynamic partitions %s",
+ self._partition_updates.keys())
+
+ self._group_updates = collections.OrderedDict()
+
+ for g in tgt_groups:
+ self._group_updates[g] = DynamicGroupUpdate()
+ self._group_updates[g].tgt_size = int(info_dict.get(
+ "super_%s_group_size" % g, "0").strip())
+
+ for g in src_groups:
+ if g not in self._group_updates:
+ self._group_updates[g] = DynamicGroupUpdate()
+ self._group_updates[g].src_size = int(source_info_dict.get(
+ "super_%s_group_size" % g, "0").strip())
+
+ self._Compute()
+
+ def WriteScript(self, script, output_zip, write_verify_script=False):
+ script.Comment('--- Start patching dynamic partitions ---')
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ script.Comment('Patch partition %s' % p)
+ u.block_difference.WriteScript(script, output_zip, progress=u.progress,
+ write_verify_script=False)
+
+ op_list_path = MakeTempFile()
+ with open(op_list_path, 'w') as f:
+ for line in self._op_list:
+ f.write('{}\n'.format(line))
+
+ ZipWrite(output_zip, op_list_path, "dynamic_partitions_op_list")
+
+ script.Comment('Update dynamic partition metadata')
+ script.AppendExtra('assert(update_dynamic_partitions('
+ 'package_extract_file("dynamic_partitions_op_list")));')
+
+ if write_verify_script:
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ u.block_difference.WritePostInstallVerifyScript(script)
+ script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_size and u.src_size <= u.tgt_size:
+ script.Comment('Patch partition %s' % p)
+ u.block_difference.WriteScript(script, output_zip, progress=u.progress,
+ write_verify_script=write_verify_script)
+ if write_verify_script:
+ script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
+
+ script.Comment('--- End patching dynamic partitions ---')
+
+ def _Compute(self):
+ self._op_list = list()
+
+ def append(line):
+ self._op_list.append(line)
+
+ def comment(line):
+ self._op_list.append("# %s" % line)
+
+ if self._remove_all_before_apply:
+ comment('Remove all existing dynamic partitions and groups before '
+ 'applying full OTA')
+ append('remove_all_groups')
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and not u.tgt_group:
+ append('remove %s' % p)
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and u.tgt_group and u.src_group != u.tgt_group:
+ comment('Move partition %s from %s to default' % (p, u.src_group))
+ append('move %s default' % p)
+
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ comment('Shrink partition %s from %d to %d' %
+ (p, u.src_size, u.tgt_size))
+ append('resize %s %s' % (p, u.tgt_size))
+
+ for g, u in self._group_updates.items():
+ if u.src_size is not None and u.tgt_size is None:
+ append('remove_group %s' % g)
+ if (u.src_size is not None and u.tgt_size is not None and
+ u.src_size > u.tgt_size):
+ comment('Shrink group %s from %d to %d' % (g, u.src_size, u.tgt_size))
+ append('resize_group %s %d' % (g, u.tgt_size))
+
+ for g, u in self._group_updates.items():
+ if u.src_size is None and u.tgt_size is not None:
+ comment('Add group %s with maximum size %d' % (g, u.tgt_size))
+ append('add_group %s %d' % (g, u.tgt_size))
+ if (u.src_size is not None and u.tgt_size is not None and
+ u.src_size < u.tgt_size):
+ comment('Grow group %s from %d to %d' % (g, u.src_size, u.tgt_size))
+ append('resize_group %s %d' % (g, u.tgt_size))
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_group and not u.src_group:
+ comment('Add partition %s to group %s' % (p, u.tgt_group))
+ append('add %s %s' % (p, u.tgt_group))
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_size and u.src_size < u.tgt_size:
+ comment('Grow partition %s from %d to %d' % (p, u.src_size, u.tgt_size))
+ append('resize %s %d' % (p, u.tgt_size))
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and u.tgt_group and u.src_group != u.tgt_group:
+ comment('Move partition %s from default to %s' %
+ (p, u.tgt_group))
+ append('move %s %s' % (p, u.tgt_group))
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 7a819288e0..7ed85fecc9 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -31,14 +31,6 @@ class EdifyGenerator(object):
else:
self.fstab = fstab
- def MakeTemporary(self):
- """Make a temporary script object whose commands can latter be
- appended to the parent script with AppendScript(). Used when the
- caller wants to generate script commands out-of-order."""
- x = EdifyGenerator(self.version, self.info)
- x.mounts = self.mounts
- return x
-
@property
def required_cache(self):
"""Return the minimum cache size to apply the update."""
@@ -140,8 +132,8 @@ class EdifyGenerator(object):
self.script.append(
('(!less_than_int(%s, getprop("ro.build.date.utc"))) || '
'abort("E%d: Can\'t install this package (%s) over newer '
- 'build (" + getprop("ro.build.date") + ").");') % (timestamp,
- common.ErrorCode.OLDER_BUILD, timestamp_text))
+ 'build (" + getprop("ro.build.date") + ").");') % (
+ timestamp, common.ErrorCode.OLDER_BUILD, timestamp_text))
def AssertDevice(self, device):
"""Assert that the device identifier is the given string."""
@@ -171,31 +163,32 @@ class EdifyGenerator(object):
[0,1]."""
self.script.append("set_progress(%f);" % (frac,))
- def PatchCheck(self, filename, *sha1):
- """Check that the given file has one of the
- given *sha1 hashes, checking the version saved in cache if the
- file does not match."""
- self.script.append(
- 'apply_patch_check("%s"' % (filename,) +
- "".join([', "%s"' % (i,) for i in sha1]) +
- ') || abort("E%d: \\"%s\\" has unexpected contents.");' % (
- common.ErrorCode.BAD_PATCH_FILE, filename))
-
- def Verify(self, filename):
- """Check that the given file has one of the
- given hashes (encoded in the filename)."""
- self.script.append(
- 'apply_patch_check("{filename}") && '
- 'ui_print(" Verified.") || '
- 'ui_print("\\"{filename}\\" has unexpected contents.");'.format(
- filename=filename))
-
- def FileCheck(self, filename, *sha1):
- """Check that the given file has one of the
- given *sha1 hashes."""
- self.script.append('assert(sha1_check(read_file("%s")' % (filename,) +
- "".join([', "%s"' % (i,) for i in sha1]) +
- '));')
+ def PatchCheck(self, filename, *sha1): # pylint: disable=unused-argument
+ """Checks that the given partition has the desired checksum.
+
+ The call to this function is being deprecated in favor of
+ PatchPartitionCheck(). It will try to parse and handle the old format,
+ unless the format is unknown.
+ """
+ tokens = filename.split(':')
+ assert len(tokens) == 6 and tokens[0] == 'EMMC', \
+ "Failed to handle unknown format. Use PatchPartitionCheck() instead."
+ source = '{}:{}:{}:{}'.format(tokens[0], tokens[1], tokens[2], tokens[3])
+ target = '{}:{}:{}:{}'.format(tokens[0], tokens[1], tokens[4], tokens[5])
+ self.PatchPartitionCheck(target, source)
+
+ def PatchPartitionCheck(self, target, source):
+ """Checks whether updater can patch the given partitions.
+
+ It checks the checksums of the given partitions. If none of them matches the
+ expected checksum, updater will additionally look for a backup on /cache.
+ """
+ self.script.append(self.WordWrap((
+ 'patch_partition_check("{target}",\0"{source}") ||\n abort('
+ '"E{code}: \\"{target}\\" or \\"{source}\\" has unexpected '
+ 'contents.");').format(
+ target=target, source=source,
+ code=common.ErrorCode.BAD_PATCH_FILE)))
def CacheFreeSpaceCheck(self, amount):
"""Check that there's at least 'amount' space that can be made
@@ -275,17 +268,41 @@ class EdifyGenerator(object):
def ApplyPatch(self, srcfile, tgtfile, tgtsize, tgtsha1, *patchpairs):
"""Apply binary patches (in *patchpairs) to the given srcfile to
produce tgtfile (which may be "-" to indicate overwriting the
- source file."""
- if len(patchpairs) % 2 != 0 or len(patchpairs) == 0:
- raise ValueError("bad patches given to ApplyPatch")
- cmd = ['apply_patch("%s",\0"%s",\0%s,\0%d'
- % (srcfile, tgtfile, tgtsha1, tgtsize)]
- for i in range(0, len(patchpairs), 2):
- cmd.append(',\0%s,\0package_extract_file("%s")' % patchpairs[i:i+2])
- cmd.append(') ||\n abort("E%d: Failed to apply patch to %s");' % (
- common.ErrorCode.APPLY_PATCH_FAILURE, srcfile))
- cmd = "".join(cmd)
- self.script.append(self.WordWrap(cmd))
+ source file.
+
+ This edify function is being deprecated in favor of PatchPartition(). It
+ will try to redirect calls to PatchPartition() if possible. On unknown /
+ invalid inputs, raises an exception.
+ """
+ tokens = srcfile.split(':')
+ assert (len(tokens) == 6 and tokens[0] == 'EMMC' and tgtfile == '-' and
+ len(patchpairs) == 2), \
+ "Failed to handle unknown format. Use PatchPartition() instead."
+
+ # Also sanity check the args.
+ assert tokens[3] == patchpairs[0], \
+ "Found mismatching values for source SHA-1: {} vs {}".format(
+ tokens[3], patchpairs[0])
+ assert int(tokens[4]) == tgtsize, \
+ "Found mismatching values for target size: {} vs {}".format(
+ tokens[4], tgtsize)
+ assert tokens[5] == tgtsha1, \
+ "Found mismatching values for target SHA-1: {} vs {}".format(
+ tokens[5], tgtsha1)
+
+ source = '{}:{}:{}:{}'.format(tokens[0], tokens[1], tokens[2], tokens[3])
+ target = '{}:{}:{}:{}'.format(tokens[0], tokens[1], tokens[4], tokens[5])
+ patch = patchpairs[1]
+ self.PatchPartition(target, source, patch)
+
+ def PatchPartition(self, target, source, patch):
+ """Applies the patch to the source partition and writes it to target."""
+ self.script.append(self.WordWrap((
+ 'patch_partition("{target}",\0"{source}",\0'
+ 'package_extract_file("{patch}")) ||\n'
+ ' abort("E{code}: Failed to apply patch to {source}");').format(
+ target=target, source=source, patch=patch,
+ code=common.ErrorCode.APPLY_PATCH_FAILURE)))
def WriteRawImage(self, mount_point, fn, mapfn=None):
"""Write the given package file into the partition for the given
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index e6e8c9fff9..e01b5e8ecd 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -28,34 +28,127 @@ Usage: img_from_target_files [flags] input_target_files output_image_zip
from __future__ import print_function
+import logging
+import os
+import shutil
import sys
+import zipfile
+
+import common
+from build_super_image import BuildSuperImage
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
-import os
-import shutil
-import zipfile
-
-import common
+logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
-def CopyInfo(output_zip):
+def LoadOptions(input_file):
+ """
+ Load information from input_file to OPTIONS.
+
+ Args:
+ input_file: A Zipfile instance of input zip file, or path to the directory
+ of extracted zip.
+ """
+ info = OPTIONS.info_dict = common.LoadInfoDict(input_file)
+
+ OPTIONS.put_super = info.get("super_image_in_update_package") == "true"
+ OPTIONS.dynamic_partition_list = info.get("dynamic_partition_list",
+ "").strip().split()
+ OPTIONS.super_device_list = info.get("super_block_devices",
+ "").strip().split()
+ OPTIONS.retrofit_dap = info.get("dynamic_partition_retrofit") == "true"
+ OPTIONS.build_super = info.get("build_super_partition") == "true"
+ OPTIONS.sparse_userimages = bool(info.get("extfs_sparse_flag"))
+
+
+def CopyInfo(input_tmp, output_zip):
"""Copy the android-info.txt file from the input to the output."""
common.ZipWrite(
- output_zip, os.path.join(OPTIONS.input_tmp, "OTA", "android-info.txt"),
+ output_zip, os.path.join(input_tmp, "OTA", "android-info.txt"),
"android-info.txt")
+def CopyUserImages(input_tmp, output_zip):
+ """
+ Copy user images from the unzipped input and write to output_zip.
+
+ Args:
+ input_tmp: path to the unzipped input.
+ output_zip: a ZipFile instance to write images to.
+ """
+ dynamic_images = [p + ".img" for p in OPTIONS.dynamic_partition_list]
+
+ # Filter out system_other for launch DAP devices because it is in super image.
+ if not OPTIONS.retrofit_dap and "system" in OPTIONS.dynamic_partition_list:
+ dynamic_images.append("system_other.img")
+
+ images_path = os.path.join(input_tmp, "IMAGES")
+ # A target-files zip must contain the images since Lollipop.
+ assert os.path.exists(images_path)
+ for image in sorted(os.listdir(images_path)):
+ if OPTIONS.bootable_only and image not in ("boot.img", "recovery.img"):
+ continue
+ if not image.endswith(".img"):
+ continue
+ if image == "recovery-two-step.img":
+ continue
+ if OPTIONS.put_super:
+ if image == "super_empty.img":
+ continue
+ if image in dynamic_images:
+ continue
+ logger.info("writing %s to archive...", os.path.join("IMAGES", image))
+ common.ZipWrite(output_zip, os.path.join(images_path, image), image)
+
+
+def WriteSuperImages(input_tmp, output_zip):
+ """
+ Write super images from the unzipped input and write to output_zip. This is
+ only done if super_image_in_update_package is set to "true".
+
+ - For retrofit dynamic partition devices, copy split super images from target
+ files package.
+ - For devices launched with dynamic partitions, build super image from target
+ files package.
+
+ Args:
+ input_tmp: path to the unzipped input.
+ output_zip: a ZipFile instance to write images to.
+ """
+ if not OPTIONS.build_super or not OPTIONS.put_super:
+ return
+
+ if OPTIONS.retrofit_dap:
+ # retrofit devices already have split super images under OTA/
+ images_path = os.path.join(input_tmp, "OTA")
+ for device in OPTIONS.super_device_list:
+ image = "super_%s.img" % device
+ image_path = os.path.join(images_path, image)
+ assert os.path.exists(image_path)
+ logger.info("writing %s to archive...", os.path.join("OTA", image))
+ common.ZipWrite(output_zip, image_path, image)
+ else:
+ # super image for non-retrofit devices aren't in target files package,
+ # so build it.
+ super_file = common.MakeTempFile("super_", ".img")
+ logger.info("building super image %s...", super_file)
+ BuildSuperImage(input_tmp, super_file)
+ logger.info("writing super.img to archive...")
+ common.ZipWrite(output_zip, super_file, "super.img")
+
+
def main(argv):
- bootable_only = [False]
+ # This allows modifying the value from inner function.
+ bootable_only_array = [False]
def option_handler(o, _):
if o in ("-z", "--bootable_zip"):
- bootable_only[0] = True
+ bootable_only_array[0] = True
else:
return False
return True
@@ -65,35 +158,32 @@ def main(argv):
extra_long_opts=["bootable_zip"],
extra_option_handler=option_handler)
- bootable_only = bootable_only[0]
+ OPTIONS.bootable_only = bootable_only_array[0]
if len(args) != 2:
common.Usage(__doc__)
sys.exit(1)
- OPTIONS.input_tmp = common.UnzipTemp(args[0], ["IMAGES/*", "OTA/*"])
- output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
- CopyInfo(output_zip)
+ common.InitLogging()
- try:
- images_path = os.path.join(OPTIONS.input_tmp, "IMAGES")
- # A target-files zip must contain the images since Lollipop.
- assert os.path.exists(images_path)
- for image in sorted(os.listdir(images_path)):
- if bootable_only and image not in ("boot.img", "recovery.img"):
- continue
- if not image.endswith(".img"):
- continue
- if image == "recovery-two-step.img":
- continue
- common.ZipWrite(output_zip, os.path.join(images_path, image), image)
+ # We need files under IMAGES/, OTA/, META/ for img_from_target_files.py.
+ # However, common.LoadInfoDict() may read additional files under BOOT/,
+ # RECOVERY/ and ROOT/. So unzip everything from the target_files.zip.
+ OPTIONS.input_tmp = common.UnzipTemp(args[0])
+ LoadOptions(OPTIONS.input_tmp)
+ output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED,
+ allowZip64=not OPTIONS.sparse_userimages)
+ try:
+ CopyInfo(OPTIONS.input_tmp, output_zip)
+ CopyUserImages(OPTIONS.input_tmp, output_zip)
+ WriteSuperImages(OPTIONS.input_tmp, output_zip)
finally:
- print("cleaning up...")
+ logger.info("cleaning up...")
common.ZipClose(output_zip)
shutil.rmtree(OPTIONS.input_tmp)
- print("done.")
+ logger.info("done.")
if __name__ == '__main__':
@@ -101,5 +191,5 @@ if __name__ == '__main__':
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError as e:
- print("\n ERROR: %s\n" % (e,))
+ logger.exception("\n ERROR:\n")
sys.exit(1)
diff --git a/tools/releasetools/make_recovery_patch.py b/tools/releasetools/make_recovery_patch.py
index 7c6007e5f3..725b3550a6 100755
--- a/tools/releasetools/make_recovery_patch.py
+++ b/tools/releasetools/make_recovery_patch.py
@@ -16,24 +16,27 @@
from __future__ import print_function
+import logging
+import os
import sys
+import common
+
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
-import os
-import common
+logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
-def main(argv):
- # def option_handler(o, a):
- # return False
+def main(argv):
args = common.ParseOptions(argv, __doc__)
input_dir, output_dir = args
+ common.InitLogging()
+
OPTIONS.info_dict = common.LoadInfoDict(input_dir)
recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
new file mode 100755
index 0000000000..3b72551a37
--- /dev/null
+++ b/tools/releasetools/merge_target_files.py
@@ -0,0 +1,814 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+"""
+This script merges two partial target files packages (one of which contains
+system files, and the other contains non-system files) together, producing a
+complete target files package that can be used to generate an OTA package.
+
+Usage: merge_target_files.py [args]
+
+ --system-target-files system-target-files-zip-archive
+ The input target files package containing system bits. This is a zip
+ archive.
+
+ --system-item-list system-item-list-file
+ The optional path to a newline-separated config file that replaces the
+ contents of default_system_item_list if provided.
+
+ --system-misc-info-keys system-misc-info-keys-file
+ The optional path to a newline-separated config file that replaces the
+ contents of default_system_misc_info_keys if provided.
+
+ --other-target-files other-target-files-zip-archive
+ The input target files package containing other bits. This is a zip
+ archive.
+
+ --other-item-list other-item-list-file
+ The optional path to a newline-separated config file that replaces the
+ contents of default_other_item_list if provided.
+
+ --output-target-files output-target-files-package
+ The output merged target files package. Also a zip archive.
+
+ --rebuild_recovery
+ Rebuild the recovery patch used by non-A/B devices and write it to the
+ system image.
+
+ --keep-tmp
+ Keep tempoary files for debugging purposes.
+"""
+
+from __future__ import print_function
+
+import fnmatch
+import logging
+import os
+import sys
+import zipfile
+
+import common
+import add_img_to_target_files
+
+logger = logging.getLogger(__name__)
+OPTIONS = common.OPTIONS
+OPTIONS.verbose = True
+OPTIONS.system_target_files = None
+OPTIONS.system_item_list = None
+OPTIONS.system_misc_info_keys = None
+OPTIONS.other_target_files = None
+OPTIONS.other_item_list = None
+OPTIONS.output_target_files = None
+OPTIONS.rebuild_recovery = False
+OPTIONS.keep_tmp = False
+
+# default_system_item_list is a list of items to extract from the partial
+# system target files package as is, meaning these items will land in the
+# output target files package exactly as they appear in the input partial
+# system target files package.
+
+default_system_item_list = [
+ 'META/apkcerts.txt',
+ 'META/filesystem_config.txt',
+ 'META/root_filesystem_config.txt',
+ 'META/system_manifest.xml',
+ 'META/system_matrix.xml',
+ 'META/update_engine_config.txt',
+ 'PRODUCT/*',
+ 'ROOT/*',
+ 'SYSTEM/*',
+]
+
+# system_extract_special_item_list is a list of items to extract from the
+# partial system target files package that need some special processing, such
+# as some sort of combination with items from the partial other target files
+# package.
+
+system_extract_special_item_list = [
+ 'META/*',
+]
+
+# default_system_misc_info_keys is a list of keys to obtain from the system instance of
+# META/misc_info.txt. The remaining keys from the other instance.
+
+default_system_misc_info_keys = [
+ 'avb_system_hashtree_enable',
+ 'avb_system_add_hashtree_footer_args',
+ 'avb_system_key_path',
+ 'avb_system_algorithm',
+ 'avb_system_rollback_index_location',
+ 'avb_product_hashtree_enable',
+ 'avb_product_add_hashtree_footer_args',
+ 'avb_product_services_hashtree_enable',
+ 'avb_product_services_add_hashtree_footer_args',
+ 'system_root_image',
+ 'root_dir',
+ 'ab_update',
+ 'default_system_dev_certificate',
+ 'system_size',
+]
+
+# default_other_item_list is a list of items to extract from the partial
+# other target files package as is, meaning these items will land in the output
+# target files package exactly as they appear in the input partial other target
+# files package.
+
+default_other_item_list = [
+ 'META/boot_filesystem_config.txt',
+ 'META/otakeys.txt',
+ 'META/releasetools.py',
+ 'META/vendor_filesystem_config.txt',
+ 'META/vendor_manifest.xml',
+ 'META/vendor_matrix.xml',
+ 'BOOT/*',
+ 'DATA/*',
+ 'ODM/*',
+ 'OTA/android-info.txt',
+ 'PREBUILT_IMAGES/*',
+ 'RADIO/*',
+ 'VENDOR/*',
+]
+
+# other_extract_special_item_list is a list of items to extract from the
+# partial other target files package that need some special processing, such as
+# some sort of combination with items from the partial system target files
+# package.
+
+other_extract_special_item_list = [
+ 'META/*',
+]
+
+
+def extract_items(target_files, target_files_temp_dir, extract_item_list):
+ """Extract items from target files to temporary directory.
+
+ This function extracts from the specified target files zip archive into the
+ specified temporary directory, the items specified in the extract item list.
+
+ Args:
+ target_files: The target files zip archive from which to extract items.
+
+ target_files_temp_dir: The temporary directory where the extracted items
+ will land.
+
+ extract_item_list: A list of items to extract.
+ """
+
+ logger.info('extracting from %s', target_files)
+
+ # Filter the extract_item_list to remove any items that do not exist in the
+ # zip file. Otherwise, the extraction step will fail.
+
+ with zipfile.ZipFile(
+ target_files,
+ 'r',
+ allowZip64=True) as target_files_zipfile:
+ target_files_namelist = target_files_zipfile.namelist()
+
+ filtered_extract_item_list = []
+ for pattern in extract_item_list:
+ matching_namelist = fnmatch.filter(target_files_namelist, pattern)
+ if not matching_namelist:
+ logger.warning('no match for %s', pattern)
+ else:
+ filtered_extract_item_list.append(pattern)
+
+ # Extract from target_files into target_files_temp_dir the
+ # filtered_extract_item_list.
+
+ common.UnzipToDir(
+ target_files,
+ target_files_temp_dir,
+ filtered_extract_item_list)
+
+
+def read_config_list(config_file_path):
+ """Reads a config file into a list of strings.
+
+ Expects the file to be newline-separated.
+
+ Args:
+ config_file_path: The path to the config file to open and read.
+ """
+ with open(config_file_path) as config_file:
+ return config_file.read().splitlines()
+
+
+def validate_config_lists(
+ system_item_list,
+ system_misc_info_keys,
+ other_item_list):
+ """Performs validations on the merge config lists.
+
+ Args:
+ system_item_list: The list of items to extract from the partial
+ system target files package as is.
+
+ system_misc_info_keys: A list of keys to obtain from the system instance
+ of META/misc_info.txt. The remaining keys from the other instance.
+
+ other_item_list: The list of items to extract from the partial
+ other target files package as is.
+
+ Returns:
+ False if a validation fails, otherwise true.
+ """
+ default_combined_item_set = set(default_system_item_list)
+ default_combined_item_set.update(default_other_item_list)
+
+ combined_item_set = set(system_item_list)
+ combined_item_set.update(other_item_list)
+
+ # Check that the merge config lists are not missing any item specified
+ # by the default config lists.
+ difference = default_combined_item_set.difference(combined_item_set)
+ if difference:
+ logger.error('Missing merge config items: %s' % list(difference))
+ logger.error('Please ensure missing items are in either the '
+ 'system-item-list or other-item-list files provided to '
+ 'this script.')
+ return False
+
+ if ('dynamic_partition_list' in system_misc_info_keys) or (
+ 'super_partition_groups' in system_misc_info_keys):
+ logger.error('Dynamic partition misc info keys should come from '
+ 'the other instance of META/misc_info.txt.')
+ return False
+
+ return True
+
+
+def process_ab_partitions_txt(
+ system_target_files_temp_dir,
+ other_target_files_temp_dir,
+ output_target_files_temp_dir):
+ """Perform special processing for META/ab_partitions.txt
+
+ This function merges the contents of the META/ab_partitions.txt files from
+ the system directory and the other directory, placing the merged result in
+ the output directory. The precondition in that the files are already
+ extracted. The post condition is that the output META/ab_partitions.txt
+ contains the merged content. The format for each ab_partitions.txt a one
+ partition name per line. The output file contains the union of the parition
+ names.
+
+ Args:
+ system_target_files_temp_dir: The name of a directory containing the
+ special items extracted from the system target files package.
+
+ other_target_files_temp_dir: The name of a directory containing the
+ special items extracted from the other target files package.
+
+ output_target_files_temp_dir: The name of a directory that will be used
+ to create the output target files package after all the special cases
+ are processed.
+ """
+
+ system_ab_partitions_txt = os.path.join(
+ system_target_files_temp_dir, 'META', 'ab_partitions.txt')
+
+ other_ab_partitions_txt = os.path.join(
+ other_target_files_temp_dir, 'META', 'ab_partitions.txt')
+
+ with open(system_ab_partitions_txt) as f:
+ system_ab_partitions = f.read().splitlines()
+
+ with open(other_ab_partitions_txt) as f:
+ other_ab_partitions = f.read().splitlines()
+
+ output_ab_partitions = set(system_ab_partitions + other_ab_partitions)
+
+ output_ab_partitions_txt = os.path.join(
+ output_target_files_temp_dir, 'META', 'ab_partitions.txt')
+
+ with open(output_ab_partitions_txt, 'w') as output:
+ for partition in sorted(output_ab_partitions):
+ output.write('%s\n' % partition)
+
+
+def append_recovery_to_filesystem_config(output_target_files_temp_dir):
+ """Perform special processing for META/filesystem_config.txt
+
+ This function appends recovery information to META/filesystem_config.txt
+ so that recovery patch regeneration will succeed.
+
+ Args:
+ output_target_files_temp_dir: The name of a directory that will be used
+ to create the output target files package after all the special cases
+ are processed. We find filesystem_config.txt here.
+ """
+
+ filesystem_config_txt = os.path.join(
+ output_target_files_temp_dir,
+ 'META',
+ 'filesystem_config.txt')
+
+ with open(filesystem_config_txt, 'a') as f:
+ # TODO(bpeckham) this data is hard coded. It should be generated
+ # programmatically.
+ f.write(
+ 'system/bin/install-recovery.sh 0 0 750 '
+ 'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
+ f.write(
+ 'system/recovery-from-boot.p 0 0 644 '
+ 'selabel=u:object_r:system_file:s0 capabilities=0x0\n')
+ f.write(
+ 'system/etc/recovery.img 0 0 440 '
+ 'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
+
+
+def process_misc_info_txt(
+ system_target_files_temp_dir,
+ other_target_files_temp_dir,
+ output_target_files_temp_dir,
+ system_misc_info_keys):
+ """Perform special processing for META/misc_info.txt
+
+ This function merges the contents of the META/misc_info.txt files from the
+ system directory and the other directory, placing the merged result in the
+ output directory. The precondition in that the files are already extracted.
+ The post condition is that the output META/misc_info.txt contains the merged
+ content.
+
+ Args:
+ system_target_files_temp_dir: The name of a directory containing the
+ special items extracted from the system target files package.
+
+ other_target_files_temp_dir: The name of a directory containing the
+ special items extracted from the other target files package.
+
+ output_target_files_temp_dir: The name of a directory that will be used
+ to create the output target files package after all the special cases
+ are processed.
+
+ system_misc_info_keys: A list of keys to obtain from the system instance
+ of META/misc_info.txt. The remaining keys from the other instance.
+ """
+
+ def read_helper(d):
+ misc_info_txt = os.path.join(d, 'META', 'misc_info.txt')
+ with open(misc_info_txt) as f:
+ return list(f.read().splitlines())
+
+ system_info_dict = common.LoadDictionaryFromLines(
+ read_helper(system_target_files_temp_dir))
+
+ # We take most of the misc info from the other target files.
+
+ merged_info_dict = common.LoadDictionaryFromLines(
+ read_helper(other_target_files_temp_dir))
+
+ # Replace certain values in merged_info_dict with values from
+ # system_info_dict.
+
+ for key in system_misc_info_keys:
+ merged_info_dict[key] = system_info_dict[key]
+
+ # Merge misc info keys used for Dynamic Partitions.
+ if (merged_info_dict.get('use_dynamic_partitions') == 'true') and (
+ system_info_dict.get('use_dynamic_partitions') == 'true'):
+ merged_info_dict['dynamic_partition_list'] = '%s %s' % (
+ system_info_dict.get('dynamic_partition_list', ''),
+ merged_info_dict.get('dynamic_partition_list', ''))
+ # Partition groups and group sizes are defined by the other (non-system)
+ # misc info file because these values may vary for each board that uses
+ # a shared system image.
+ for partition_group in merged_info_dict['super_partition_groups'].split(' '):
+ if ('super_%s_group_size' % partition_group) not in merged_info_dict:
+ raise common.ExternalError(
+ 'Other META/misc_info.txt does not contain required key '
+ 'super_%s_group_size.' % partition_group)
+ key = 'super_%s_partition_list' % partition_group
+ merged_info_dict[key] = '%s %s' % (
+ system_info_dict.get(key, ''),
+ merged_info_dict.get(key, ''))
+
+ output_misc_info_txt = os.path.join(
+ output_target_files_temp_dir,
+ 'META', 'misc_info.txt')
+
+ sorted_keys = sorted(merged_info_dict.keys())
+
+ with open(output_misc_info_txt, 'w') as output:
+ for key in sorted_keys:
+ output.write('{}={}\n'.format(key, merged_info_dict[key]))
+
+
+def process_file_contexts_bin(temp_dir, output_target_files_temp_dir):
+ """Perform special processing for META/file_contexts.bin.
+
+ This function combines plat_file_contexts and vendor_file_contexts, which are
+ expected to already be extracted in temp_dir, to produce a merged
+ file_contexts.bin that will land in temp_dir at META/file_contexts.bin.
+
+ Args:
+ temp_dir: The name of a scratch directory that this function can use for
+ intermediate files generated during processing.
+
+ output_target_files_temp_dir: The name of the working directory that must
+ already contain plat_file_contexts and vendor_file_contexts (in the
+ appropriate sub directories), and to which META/file_contexts.bin will be
+ written.
+ """
+
+ # To create a merged file_contexts.bin file, we use the system and vendor
+ # file contexts files as input, the m4 tool to combine them, the sorting tool
+ # to sort, and finally the sefcontext_compile tool to generate the final
+ # output. We currently omit a checkfc step since the files had been checked
+ # as part of the build.
+
+ # The m4 step concatenates the two input files contexts files. Since m4
+ # writes to stdout, we receive that into an array of bytes, and then write it
+ # to a file.
+
+ # Collect the file contexts that we're going to combine from SYSTEM, VENDOR,
+ # PRODUCT, and ODM. We require SYSTEM and VENDOR, but others are optional.
+
+ file_contexts_list = []
+
+ for partition in ['SYSTEM', 'VENDOR', 'PRODUCT', 'ODM']:
+ prefix = 'plat' if partition == 'SYSTEM' else partition.lower()
+
+ file_contexts = os.path.join(
+ output_target_files_temp_dir,
+ partition, 'etc', 'selinux', prefix + '_file_contexts')
+
+ mandatory = partition in ['SYSTEM', 'VENDOR']
+
+ if mandatory or os.path.isfile(file_contexts):
+ file_contexts_list.append(file_contexts)
+ else:
+ logger.warning('file not found: %s', file_contexts)
+
+ command = ['m4', '--fatal-warnings', '-s'] + file_contexts_list
+
+ merged_content = common.RunAndCheckOutput(command, verbose=False)
+
+ merged_file_contexts_txt = os.path.join(temp_dir, 'merged_file_contexts.txt')
+
+ with open(merged_file_contexts_txt, 'wb') as f:
+ f.write(merged_content)
+
+ # The sort step sorts the concatenated file.
+
+ sorted_file_contexts_txt = os.path.join(temp_dir, 'sorted_file_contexts.txt')
+ command = ['fc_sort', merged_file_contexts_txt, sorted_file_contexts_txt]
+ common.RunAndWait(command, verbose=True)
+
+ # Finally, the compile step creates the final META/file_contexts.bin.
+
+ file_contexts_bin = os.path.join(
+ output_target_files_temp_dir,
+ 'META', 'file_contexts.bin')
+
+ command = [
+ 'sefcontext_compile',
+ '-o', file_contexts_bin,
+ sorted_file_contexts_txt,
+ ]
+
+ common.RunAndWait(command, verbose=True)
+
+
+def process_special_cases(
+ temp_dir,
+ system_target_files_temp_dir,
+ other_target_files_temp_dir,
+ output_target_files_temp_dir,
+ system_misc_info_keys,
+ rebuild_recovery
+):
+ """Perform special-case processing for certain target files items.
+
+ Certain files in the output target files package require special-case
+ processing. This function performs all that special-case processing.
+
+ Args:
+ temp_dir: The name of a scratch directory that this function can use for
+ intermediate files generated during processing.
+
+ system_target_files_temp_dir: The name of a directory containing the
+ special items extracted from the system target files package.
+
+ other_target_files_temp_dir: The name of a directory containing the
+ special items extracted from the other target files package.
+
+ output_target_files_temp_dir: The name of a directory that will be used
+ to create the output target files package after all the special cases
+ are processed.
+
+ system_misc_info_keys: A list of keys to obtain from the system instance
+ of META/misc_info.txt. The remaining keys from the other instance.
+
+ rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
+ devices and write it to the system image.
+ """
+
+ if 'ab_update' in system_misc_info_keys:
+ process_ab_partitions_txt(
+ system_target_files_temp_dir=system_target_files_temp_dir,
+ other_target_files_temp_dir=other_target_files_temp_dir,
+ output_target_files_temp_dir=output_target_files_temp_dir)
+
+ if rebuild_recovery:
+ append_recovery_to_filesystem_config(
+ output_target_files_temp_dir=output_target_files_temp_dir)
+
+ process_misc_info_txt(
+ system_target_files_temp_dir=system_target_files_temp_dir,
+ other_target_files_temp_dir=other_target_files_temp_dir,
+ output_target_files_temp_dir=output_target_files_temp_dir,
+ system_misc_info_keys=system_misc_info_keys)
+
+ process_file_contexts_bin(
+ temp_dir=temp_dir,
+ output_target_files_temp_dir=output_target_files_temp_dir)
+
+
+def merge_target_files(
+ temp_dir,
+ system_target_files,
+ system_item_list,
+ system_misc_info_keys,
+ other_target_files,
+ other_item_list,
+ output_target_files,
+ rebuild_recovery):
+ """Merge two target files packages together.
+
+ This function takes system and other target files packages as input, performs
+ various file extractions, special case processing, and finally creates a
+ merged zip archive as output.
+
+ Args:
+ temp_dir: The name of a directory we use when we extract items from the
+ input target files packages, and also a scratch directory that we use for
+ temporary files.
+
+ system_target_files: The name of the zip archive containing the system
+ partial target files package.
+
+ system_item_list: The list of items to extract from the partial system
+ target files package as is, meaning these items will land in the output
+ target files package exactly as they appear in the input partial system
+ target files package.
+
+ system_misc_info_keys: The list of keys to obtain from the system instance
+ of META/misc_info.txt. The remaining keys from the other instance.
+
+ other_target_files: The name of the zip archive containing the other
+ partial target files package.
+
+ other_item_list: The list of items to extract from the partial other
+ target files package as is, meaning these items will land in the output
+ target files package exactly as they appear in the input partial other
+ target files package.
+
+ output_target_files: The name of the output zip archive target files
+ package created by merging system and other.
+
+ rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
+ devices and write it to the system image.
+ """
+
+ logger.info(
+ 'starting: merge system %s and other %s into output %s',
+ system_target_files,
+ other_target_files,
+ output_target_files)
+
+ # Create directory names that we'll use when we extract files from system,
+ # and other, and for zipping the final output.
+
+ system_target_files_temp_dir = os.path.join(temp_dir, 'system')
+ other_target_files_temp_dir = os.path.join(temp_dir, 'other')
+ output_target_files_temp_dir = os.path.join(temp_dir, 'output')
+
+ # Extract "as is" items from the input system partial target files package.
+ # We extract them directly into the output temporary directory since the
+ # items do not need special case processing.
+
+ extract_items(
+ target_files=system_target_files,
+ target_files_temp_dir=output_target_files_temp_dir,
+ extract_item_list=system_item_list)
+
+ # Extract "as is" items from the input other partial target files package. We
+ # extract them directly into the output temporary directory since the items
+ # do not need special case processing.
+
+ extract_items(
+ target_files=other_target_files,
+ target_files_temp_dir=output_target_files_temp_dir,
+ extract_item_list=other_item_list)
+
+ # Extract "special" items from the input system partial target files package.
+ # We extract these items to different directory since they require special
+ # processing before they will end up in the output directory.
+
+ extract_items(
+ target_files=system_target_files,
+ target_files_temp_dir=system_target_files_temp_dir,
+ extract_item_list=system_extract_special_item_list)
+
+ # Extract "special" items from the input other partial target files package.
+ # We extract these items to different directory since they require special
+ # processing before they will end up in the output directory.
+
+ extract_items(
+ target_files=other_target_files,
+ target_files_temp_dir=other_target_files_temp_dir,
+ extract_item_list=other_extract_special_item_list)
+
+ # Now that the temporary directories contain all the extracted files, perform
+ # special case processing on any items that need it. After this function
+ # completes successfully, all the files we need to create the output target
+ # files package are in place.
+
+ process_special_cases(
+ temp_dir=temp_dir,
+ system_target_files_temp_dir=system_target_files_temp_dir,
+ other_target_files_temp_dir=other_target_files_temp_dir,
+ output_target_files_temp_dir=output_target_files_temp_dir,
+ system_misc_info_keys=system_misc_info_keys,
+ rebuild_recovery=rebuild_recovery)
+
+ # Regenerate IMAGES in the temporary directory.
+
+ add_img_args = ['--verbose']
+ if rebuild_recovery:
+ add_img_args.append('--rebuild_recovery')
+ add_img_args.append(output_target_files_temp_dir)
+
+ add_img_to_target_files.main(add_img_args)
+
+ # Finally, create the output target files zip archive.
+
+ output_zip = os.path.abspath(output_target_files)
+ output_target_files_list = os.path.join(temp_dir, 'output.list')
+ output_target_files_meta_dir = os.path.join(
+ output_target_files_temp_dir, 'META')
+
+ command = [
+ 'find',
+ output_target_files_meta_dir,
+ ]
+ # TODO(bpeckham): sort this to be more like build.
+ meta_content = common.RunAndCheckOutput(command, verbose=False)
+ command = [
+ 'find',
+ output_target_files_temp_dir,
+ '-path',
+ output_target_files_meta_dir,
+ '-prune',
+ '-o',
+ '-print'
+ ]
+ # TODO(bpeckham): sort this to be more like build.
+ other_content = common.RunAndCheckOutput(command, verbose=False)
+
+ with open(output_target_files_list, 'wb') as f:
+ f.write(meta_content)
+ f.write(other_content)
+
+ command = [
+ 'soong_zip',
+ '-d',
+ '-o', output_zip,
+ '-C', output_target_files_temp_dir,
+ '-l', output_target_files_list,
+ ]
+ logger.info('creating %s', output_target_files)
+ common.RunAndWait(command, verbose=True)
+
+
+def call_func_with_temp_dir(func, keep_tmp):
+ """Manage the creation and cleanup of the temporary directory.
+
+ This function calls the given function after first creating a temporary
+ directory. It also cleans up the temporary directory.
+
+ Args:
+ func: The function to call. Should accept one parameter, the path to
+ the temporary directory.
+
+ keep_tmp: Keep the temporary directory after processing is complete.
+ """
+
+ # Create a temporary directory. This will serve as the parent of directories
+ # we use when we extract items from the input target files packages, and also
+ # a scratch directory that we use for temporary files.
+
+ temp_dir = common.MakeTempDir(prefix='merge_target_files_')
+
+ try:
+ func(temp_dir)
+ except:
+ raise
+ finally:
+ if keep_tmp:
+ logger.info('keeping %s', temp_dir)
+ else:
+ common.Cleanup()
+
+
+def main():
+ """The main function.
+
+ Process command line arguments, then call merge_target_files to
+ perform the heavy lifting.
+ """
+
+ common.InitLogging()
+
+ def option_handler(o, a):
+ if o == '--system-target-files':
+ OPTIONS.system_target_files = a
+ elif o == '--system-item-list':
+ OPTIONS.system_item_list = a
+ elif o == '--system-misc-info-keys':
+ OPTIONS.system_misc_info_keys = a
+ elif o == '--other-target-files':
+ OPTIONS.other_target_files = a
+ elif o == '--other-item-list':
+ OPTIONS.other_item_list = a
+ elif o == '--output-target-files':
+ OPTIONS.output_target_files = a
+ elif o == '--rebuild_recovery':
+ OPTIONS.rebuild_recovery = True
+ elif o == '--keep-tmp':
+ OPTIONS.keep_tmp = True
+ else:
+ return False
+ return True
+
+ args = common.ParseOptions(
+ sys.argv[1:], __doc__,
+ extra_long_opts=[
+ 'system-target-files=',
+ 'system-item-list=',
+ 'system-misc-info-keys=',
+ 'other-target-files=',
+ 'other-item-list=',
+ 'output-target-files=',
+ 'rebuild_recovery',
+ 'keep-tmp',
+ ],
+ extra_option_handler=option_handler)
+
+ if (len(args) != 0 or
+ OPTIONS.system_target_files is None or
+ OPTIONS.other_target_files is None or
+ OPTIONS.output_target_files is None):
+ common.Usage(__doc__)
+ sys.exit(1)
+
+ if OPTIONS.system_item_list:
+ system_item_list = read_config_list(OPTIONS.system_item_list)
+ else:
+ system_item_list = default_system_item_list
+
+ if OPTIONS.system_misc_info_keys:
+ system_misc_info_keys = read_config_list(OPTIONS.system_misc_info_keys)
+ else:
+ system_misc_info_keys = default_system_misc_info_keys
+
+ if OPTIONS.other_item_list:
+ other_item_list = read_config_list(OPTIONS.other_item_list)
+ else:
+ other_item_list = default_other_item_list
+
+ if not validate_config_lists(
+ system_item_list=system_item_list,
+ system_misc_info_keys=system_misc_info_keys,
+ other_item_list=other_item_list):
+ sys.exit(1)
+
+ call_func_with_temp_dir(
+ lambda temp_dir: merge_target_files(
+ temp_dir=temp_dir,
+ system_target_files=OPTIONS.system_target_files,
+ system_item_list=system_item_list,
+ system_misc_info_keys=system_misc_info_keys,
+ other_target_files=OPTIONS.other_target_files,
+ other_item_list=other_item_list,
+ output_target_files=OPTIONS.output_target_files,
+ rebuild_recovery=OPTIONS.rebuild_recovery),
+ OPTIONS.keep_tmp)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 10a19b3683..8b55f034b3 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -15,54 +15,12 @@
# limitations under the License.
"""
-Given a target-files zipfile, produces an OTA package that installs
-that build. An incremental OTA is produced if -i is given, otherwise
-a full OTA is produced.
+Given a target-files zipfile, produces an OTA package that installs that build.
+An incremental OTA is produced if -i is given, otherwise a full OTA is produced.
-Usage: ota_from_target_files [flags] input_target_files output_ota_package
+Usage: ota_from_target_files [options] input_target_files output_ota_package
- -k (--package_key) <key> Key to use to sign the package (default is
- the value of default_system_dev_certificate from the input
- target-files's META/misc_info.txt, or
- "build/target/product/security/testkey" if that value is not
- specified).
-
- For incremental OTAs, the default value is based on the source
- target-file, not the target build.
-
- -i (--incremental_from) <file>
- Generate an incremental OTA using the given target-files zip as
- the starting build.
-
- --full_radio
- When generating an incremental OTA, always include a full copy of
- radio image. This option is only meaningful when -i is specified,
- because a full radio is always included in a full OTA if applicable.
-
- --full_bootloader
- Similar to --full_radio. When generating an incremental OTA, always
- include a full copy of bootloader image.
-
- --verify
- Remount and verify the checksums of the files written to the system and
- vendor (if used) partitions. Non-A/B incremental OTAs only.
-
- -o (--oem_settings) <main_file[,additional_files...]>
- Comma seperated list of files used to specify the expected OEM-specific
- properties on the OEM partition of the intended device. Multiple expected
- values can be used by providing multiple files. Only the first dict will
- be used to compute fingerprint, while the rest will be used to assert
- OEM-specific properties.
-
- --oem_no_mount
- For devices with OEM-specific properties but without an OEM partition,
- do not mount the OEM partition in the updater-script. This should be
- very rarely used, since it's expected to have a dedicated OEM partition
- for OEM-specific properties. Only meaningful when -o is specified.
-
- --wipe_user_data
- Generate an OTA package that will wipe the user data partition
- when installed.
+Common options that apply to both of non-A/B and A/B OTAs
--downgrade
Intentionally generate an incremental OTA that updates from a newer build
@@ -73,6 +31,19 @@ Usage: ota_from_target_files [flags] input_target_files output_ota_package
will be used in the OTA package, unless --binary flag is specified. Please
also check the comment for --override_timestamp below.
+ -i (--incremental_from) <file>
+ Generate an incremental OTA using the given target-files zip as the
+ starting build.
+
+ -k (--package_key) <key>
+ Key to use to sign the package (default is the value of
+ default_system_dev_certificate from the input target-files's
+ META/misc_info.txt, or "build/target/product/security/testkey" if that
+ value is not specified).
+
+ For incremental OTAs, the default value is based on the source
+ target-file, not the target build.
+
--override_timestamp
Intentionally generate an incremental OTA that updates from a newer build
to an older one (based on timestamp comparison), by setting the downgrade
@@ -89,13 +60,84 @@ Usage: ota_from_target_files [flags] input_target_files output_ota_package
based on timestamp) with the same "ota-downgrade=yes" flag, with the
difference being whether "ota-wipe=yes" is set.
- -e (--extra_script) <file>
+ --wipe_user_data
+ Generate an OTA package that will wipe the user data partition when
+ installed.
+
+ --retrofit_dynamic_partitions
+ Generates an OTA package that updates a device to support dynamic
+ partitions (default False). This flag is implied when generating
+ an incremental OTA where the base build does not support dynamic
+ partitions but the target build does. For A/B, when this flag is set,
+ --skip_postinstall is implied.
+
+ --skip_compatibility_check
+ Skip adding the compatibility package to the generated OTA package.
+
+ --output_metadata_path
+ Write a copy of the metadata to a separate file. Therefore, users can
+ read the post build fingerprint without extracting the OTA package.
+
+Non-A/B OTA specific options
+
+ -b (--binary) <file>
+ Use the given binary as the update-binary in the output package, instead
+ of the binary in the build's target_files. Use for development only.
+
+ --block
+ Generate a block-based OTA for non-A/B device. We have deprecated the
+ support for file-based OTA since O. Block-based OTA will be used by
+ default for all non-A/B devices. Keeping this flag here to not break
+ existing callers.
+
+ -e (--extra_script) <file>
Insert the contents of file at the end of the update script.
+ --full_bootloader
+ Similar to --full_radio. When generating an incremental OTA, always
+ include a full copy of bootloader image.
+
+ --full_radio
+ When generating an incremental OTA, always include a full copy of radio
+ image. This option is only meaningful when -i is specified, because a full
+ radio is always included in a full OTA if applicable.
+
+ --log_diff <file>
+ Generate a log file that shows the differences in the source and target
+ builds for an incremental package. This option is only meaningful when -i
+ is specified.
+
+ -o (--oem_settings) <main_file[,additional_files...]>
+ Comma seperated list of files used to specify the expected OEM-specific
+ properties on the OEM partition of the intended device. Multiple expected
+ values can be used by providing multiple files. Only the first dict will
+ be used to compute fingerprint, while the rest will be used to assert
+ OEM-specific properties.
+
+ --oem_no_mount
+ For devices with OEM-specific properties but without an OEM partition, do
+ not mount the OEM partition in the updater-script. This should be very
+ rarely used, since it's expected to have a dedicated OEM partition for
+ OEM-specific properties. Only meaningful when -o is specified.
+
+ --stash_threshold <float>
+ Specify the threshold that will be used to compute the maximum allowed
+ stash size (defaults to 0.8).
+
+ -t (--worker_threads) <int>
+ Specify the number of worker-threads that will be used when generating
+ patches for incremental updates (defaults to 3).
+
+ --verify
+ Verify the checksums of the updated system and vendor (if any) partitions.
+ Non-A/B incremental OTAs only.
+
-2 (--two_step)
- Generate a 'two-step' OTA package, where recovery is updated
- first, so that any changes made to the system partition are done
- using the new recovery (new kernel, etc.).
+ Generate a 'two-step' OTA package, where recovery is updated first, so
+ that any changes made to the system partition are done using the new
+ recovery (new kernel, etc.).
+
+A/B OTA specific options
--include_secondary
Additionally include the payload for secondary slot images (default:
@@ -115,30 +157,6 @@ Usage: ota_from_target_files [flags] input_target_files output_ota_package
Due to the special install procedure, the secondary payload will be always
generated as a full payload.
- --block
- Generate a block-based OTA for non-A/B device. We have deprecated the
- support for file-based OTA since O. Block-based OTA will be used by
- default for all non-A/B devices. Keeping this flag here to not break
- existing callers.
-
- -b (--binary) <file>
- Use the given binary as the update-binary in the output package,
- instead of the binary in the build's target_files. Use for
- development only.
-
- -t (--worker_threads) <int>
- Specifies the number of worker-threads that will be used when
- generating patches for incremental updates (defaults to 3).
-
- --stash_threshold <float>
- Specifies the threshold that will be used to compute the maximum
- allowed stash size (defaults to 0.8).
-
- --log_diff <file>
- Generate a log file that shows the differences in the source and target
- builds for an incremental package. This option is only meaningful when
- -i is specified.
-
--payload_signer <signer>
Specify the signer when signing the payload and metadata for A/B OTAs.
By default (i.e. without this flag), it calls 'openssl pkeyutl' to sign
@@ -150,6 +168,9 @@ Usage: ota_from_target_files [flags] input_target_files output_ota_package
--payload_signer_args <args>
Specify the arguments needed for payload signer.
+ --payload_signer_key_size <key_size>
+ Specify the key size in bytes of the payload signer.
+
--skip_postinstall
Skip the postinstall hooks when generating an A/B OTA package (default:
False). Note that this discards ALL the hooks, including non-optional
@@ -160,23 +181,25 @@ Usage: ota_from_target_files [flags] input_target_files output_ota_package
from __future__ import print_function
+import logging
import multiprocessing
import os.path
import shlex
import shutil
import struct
-import subprocess
import sys
import tempfile
import zipfile
import common
import edify_generator
+import verity_utils
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
+logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
OPTIONS.package_key = None
@@ -204,14 +227,21 @@ OPTIONS.stash_threshold = 0.8
OPTIONS.log_diff = None
OPTIONS.payload_signer = None
OPTIONS.payload_signer_args = []
+OPTIONS.payload_signer_key_size = None
OPTIONS.extracted_input = None
OPTIONS.key_passwords = []
OPTIONS.skip_postinstall = False
+OPTIONS.retrofit_dynamic_partitions = False
+OPTIONS.skip_compatibility_check = False
+OPTIONS.output_metadata_path = None
METADATA_NAME = 'META-INF/com/android/metadata'
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
-UNZIP_PATTERN = ['IMAGES/*', 'META/*']
+DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
+AB_PARTITIONS = 'META/ab_partitions.txt'
+UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'RADIO/*']
+RETROFIT_DAP_UNZIP_PATTERN = ['OTA/super_*.img', AB_PARTITIONS]
class BuildInfo(object):
@@ -243,15 +273,23 @@ class BuildInfo(object):
device: The device name, which could come from OEM dicts if applicable.
"""
+ _RO_PRODUCT_RESOLVE_PROPS = ["ro.product.brand", "ro.product.device",
+ "ro.product.manufacturer", "ro.product.model",
+ "ro.product.name"]
+ _RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER = ["product", "product_services",
+ "odm", "vendor", "system"]
+
def __init__(self, info_dict, oem_dicts):
"""Initializes a BuildInfo instance with the given dicts.
+ Note that it only wraps up the given dicts, without making copies.
+
Arguments:
info_dict: The build-time info dict.
oem_dicts: A list of OEM dicts (which is parsed from --oem_settings). Note
that it always uses the first dict to calculate the fingerprint or the
device name. The rest would be used for asserting OEM properties only
- (e.g. one package can be installed on one of these devices).
+ (e.g. one package can be installed on one of these devices).
"""
self.info_dict = info_dict
self.oem_dicts = oem_dicts
@@ -279,22 +317,82 @@ class BuildInfo(object):
return self._fingerprint
@property
+ def vendor_fingerprint(self):
+ return self._fingerprint_of("vendor")
+
+ @property
+ def product_fingerprint(self):
+ return self._fingerprint_of("product")
+
+ @property
+ def odm_fingerprint(self):
+ return self._fingerprint_of("odm")
+
+ def _fingerprint_of(self, partition):
+ if partition + ".build.prop" not in self.info_dict:
+ return None
+ build_prop = self.info_dict[partition + ".build.prop"]
+ if "ro." + partition + ".build.fingerprint" in build_prop:
+ return build_prop["ro." + partition + ".build.fingerprint"]
+ if "ro." + partition + ".build.thumbprint" in build_prop:
+ return build_prop["ro." + partition + ".build.thumbprint"]
+ return None
+
+ @property
def oem_props(self):
return self._oem_props
def __getitem__(self, key):
return self.info_dict[key]
+ def __setitem__(self, key, value):
+ self.info_dict[key] = value
+
def get(self, key, default=None):
return self.info_dict.get(key, default)
+ def items(self):
+ return self.info_dict.items()
+
def GetBuildProp(self, prop):
"""Returns the inquired build property."""
+ if prop in BuildInfo._RO_PRODUCT_RESOLVE_PROPS:
+ return self._ResolveRoProductBuildProp(prop)
+
try:
return self.info_dict.get("build.prop", {})[prop]
except KeyError:
raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
+ def _ResolveRoProductBuildProp(self, prop):
+ """Resolves the inquired ro.product.* build property"""
+ prop_val = self.info_dict.get("build.prop", {}).get(prop)
+ if prop_val:
+ return prop_val
+
+ source_order_val = self.info_dict.get("build.prop", {}).get(
+ "ro.product.property_source_order")
+ if source_order_val:
+ source_order = source_order_val.split(",")
+ else:
+ source_order = BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
+
+ # Check that all sources in ro.product.property_source_order are valid
+ if any([x not in BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
+ for x in source_order]):
+ raise common.ExternalError(
+ "Invalid ro.product.property_source_order '{}'".format(source_order))
+
+ for source in source_order:
+ source_prop = prop.replace("ro.product", "ro.product.{}".format(source),
+ 1)
+ prop_val = self.info_dict.get("{}.build.prop".format(source), {}).get(
+ source_prop)
+ if prop_val:
+ return prop_val
+
+ raise common.ExternalError("couldn't resolve {}".format(prop))
+
def GetVendorBuildProp(self, prop):
"""Returns the inquired vendor build property."""
try:
@@ -310,7 +408,18 @@ class BuildInfo(object):
def CalculateFingerprint(self):
if self.oem_props is None:
- return self.GetBuildProp("ro.build.fingerprint")
+ try:
+ return self.GetBuildProp("ro.build.fingerprint")
+ except common.ExternalError:
+ return "{}/{}/{}:{}/{}/{}:{}/{}".format(
+ self.GetBuildProp("ro.product.brand"),
+ self.GetBuildProp("ro.product.name"),
+ self.GetBuildProp("ro.product.device"),
+ self.GetBuildProp("ro.build.version.release"),
+ self.GetBuildProp("ro.build.id"),
+ self.GetBuildProp("ro.build.version.incremental"),
+ self.GetBuildProp("ro.build.type"),
+ self.GetBuildProp("ro.build.tags"))
return "%s/%s/%s:%s" % (
self.GetOemProperty("ro.product.brand"),
self.GetOemProperty("ro.product.name"),
@@ -369,28 +478,46 @@ class PayloadSigner(object):
cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
cmd.extend(["-out", signing_key])
-
- get_signing_key = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- stdoutdata, _ = get_signing_key.communicate()
- assert get_signing_key.returncode == 0, \
- "Failed to get signing key: {}".format(stdoutdata)
+ common.RunAndCheckOutput(cmd, verbose=False)
self.signer = "openssl"
self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
"-pkeyopt", "digest:sha256"]
+ self.key_size = self._GetKeySizeInBytes(signing_key)
else:
self.signer = OPTIONS.payload_signer
self.signer_args = OPTIONS.payload_signer_args
+ if OPTIONS.payload_signer_key_size:
+ self.key_size = int(OPTIONS.payload_signer_key_size)
+ assert self.key_size == 256 or self.key_size == 512, \
+ "Unsupported key size {}".format(OPTIONS.payload_signer_key_size)
+ else:
+ self.key_size = 256
+
+ @staticmethod
+ def _GetKeySizeInBytes(signing_key):
+ modulus_file = common.MakeTempFile(prefix="modulus-")
+ cmd = ["openssl", "rsa", "-inform", "PEM", "-in", signing_key, "-modulus",
+ "-noout", "-out", modulus_file]
+ common.RunAndCheckOutput(cmd, verbose=False)
+
+ with open(modulus_file) as f:
+ modulus_string = f.read()
+ # The modulus string has the format "Modulus=$data", where $data is the
+ # concatenation of hex dump of the modulus.
+ MODULUS_PREFIX = "Modulus="
+ assert modulus_string.startswith(MODULUS_PREFIX)
+ modulus_string = modulus_string[len(MODULUS_PREFIX):]
+ key_size = len(modulus_string) / 2
+ assert key_size == 256 or key_size == 512, \
+ "Unsupported key size {}".format(key_size)
+ return key_size
def Sign(self, in_file):
"""Signs the given input file. Returns the output filename."""
out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
- signing = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- stdoutdata, _ = signing.communicate()
- assert signing.returncode == 0, \
- "Failed to sign the input file: {}".format(stdoutdata)
+ common.RunAndCheckOutput(cmd)
return out_file
@@ -408,8 +535,6 @@ class Payload(object):
Args:
secondary: Whether it's generating a secondary payload (default: False).
"""
- # The place where the output from the subprocess should go.
- self._log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
self.payload_file = None
self.payload_properties = None
self.secondary = secondary
@@ -434,10 +559,7 @@ class Payload(object):
if source_file is not None:
cmd.extend(["--source_image", source_file])
cmd.extend(additional_args)
- p = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
- stdoutdata, _ = p.communicate()
- assert p.returncode == 0, \
- "brillo_update_payload generate failed: {}".format(stdoutdata)
+ common.RunAndCheckOutput(cmd)
self.payload_file = payload_file
self.payload_properties = None
@@ -458,12 +580,10 @@ class Payload(object):
metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
cmd = ["brillo_update_payload", "hash",
"--unsigned_payload", self.payload_file,
- "--signature_size", "256",
+ "--signature_size", str(payload_signer.key_size),
"--metadata_hash_file", metadata_sig_file,
"--payload_hash_file", payload_sig_file]
- p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
- p1.communicate()
- assert p1.returncode == 0, "brillo_update_payload hash failed"
+ common.RunAndCheckOutput(cmd)
# 2. Sign the hashes.
signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
@@ -475,12 +595,10 @@ class Payload(object):
cmd = ["brillo_update_payload", "sign",
"--unsigned_payload", self.payload_file,
"--payload", signed_payload_file,
- "--signature_size", "256",
+ "--signature_size", str(payload_signer.key_size),
"--metadata_signature_file", signed_metadata_sig_file,
"--payload_signature_file", signed_payload_sig_file]
- p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
- p1.communicate()
- assert p1.returncode == 0, "brillo_update_payload sign failed"
+ common.RunAndCheckOutput(cmd)
# 4. Dump the signed payload properties.
properties_file = common.MakeTempFile(prefix="payload-properties-",
@@ -488,9 +606,7 @@ class Payload(object):
cmd = ["brillo_update_payload", "properties",
"--payload", signed_payload_file,
"--properties_file", properties_file]
- p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
- p1.communicate()
- assert p1.returncode == 0, "brillo_update_payload properties failed"
+ common.RunAndCheckOutput(cmd)
if self.secondary:
with open(properties_file, "a") as f:
@@ -572,11 +688,11 @@ def _WriteRecoveryImageToBoot(script, output_zip):
OPTIONS.input_tmp, "RECOVERY")
common.ZipWriteStr(
output_zip, recovery_two_step_img_name, recovery_two_step_img.data)
- print("two-step package: using %s in stage 1/3" % (
- recovery_two_step_img_name,))
+ logger.info(
+ "two-step package: using %s in stage 1/3", recovery_two_step_img_name)
script.WriteRawImage("/boot", recovery_two_step_img_name)
else:
- print("two-step package: using recovery.img in stage 1/3")
+ logger.info("two-step package: using recovery.img in stage 1/3")
# The "recovery.img" entry has been written into package earlier.
script.WriteRawImage("/boot", "recovery.img")
@@ -587,14 +703,26 @@ def HasRecoveryPatch(target_files_zip):
"SYSTEM/etc/recovery.img" in namelist)
-def HasVendorPartition(target_files_zip):
+def HasPartition(target_files_zip, partition):
try:
- target_files_zip.getinfo("VENDOR/")
+ target_files_zip.getinfo(partition.upper() + "/")
return True
except KeyError:
return False
+def HasVendorPartition(target_files_zip):
+ return HasPartition(target_files_zip, "vendor")
+
+
+def HasProductPartition(target_files_zip):
+ return HasPartition(target_files_zip, "product")
+
+
+def HasOdmPartition(target_files_zip):
+ return HasPartition(target_files_zip, "odm")
+
+
def HasTrebleEnabled(target_files_zip, target_info):
return (HasVendorPartition(target_files_zip) and
target_info.GetBuildProp("ro.treble.enabled") == "true")
@@ -640,23 +768,24 @@ def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip, target_info,
generating an incremental OTA; None otherwise.
"""
- def AddCompatibilityArchive(system_updated, vendor_updated):
- """Adds compatibility info based on system/vendor update status.
+ def AddCompatibilityArchive(framework_updated, device_updated):
+ """Adds compatibility info based on update status of both sides of Treble
+ boundary.
Args:
- system_updated: If True, the system image will be updated and therefore
- its metadata should be included.
- vendor_updated: If True, the vendor image will be updated and therefore
- its metadata should be included.
+ framework_updated: If True, the system / product image will be updated
+ and therefore their metadata should be included.
+ device_updated: If True, the vendor / odm image will be updated and
+ therefore their metadata should be included.
"""
# Determine what metadata we need. Files are names relative to META/.
compatibility_files = []
- vendor_metadata = ("vendor_manifest.xml", "vendor_matrix.xml")
- system_metadata = ("system_manifest.xml", "system_matrix.xml")
- if vendor_updated:
- compatibility_files += vendor_metadata
- if system_updated:
- compatibility_files += system_metadata
+ device_metadata = ("vendor_manifest.xml", "vendor_matrix.xml")
+ framework_metadata = ("system_manifest.xml", "system_matrix.xml")
+ if device_updated:
+ compatibility_files += device_metadata
+ if framework_updated:
+ compatibility_files += framework_metadata
# Create new archive.
compatibility_archive = tempfile.NamedTemporaryFile()
@@ -680,33 +809,43 @@ def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip, target_info,
arcname="compatibility.zip",
compress_type=zipfile.ZIP_STORED)
+ def FingerprintChanged(source_fp, target_fp):
+ if source_fp is None or target_fp is None:
+ return True
+ return source_fp != target_fp
+
# Will only proceed if the target has enabled the Treble support (as well as
# having a /vendor partition).
if not HasTrebleEnabled(target_zip, target_info):
return
- # We don't support OEM thumbprint in Treble world (which calculates
- # fingerprints in a different way as shown in CalculateFingerprint()).
- assert not target_info.oem_props
+ # Skip adding the compatibility package as a workaround for b/114240221. The
+ # compatibility will always fail on devices without qualified kernels.
+ if OPTIONS.skip_compatibility_check:
+ return
- # Full OTA carries the info for system/vendor both.
+ # Full OTA carries the info for system/vendor/product/odm
if source_info is None:
AddCompatibilityArchive(True, True)
return
- assert not source_info.oem_props
-
source_fp = source_info.fingerprint
target_fp = target_info.fingerprint
system_updated = source_fp != target_fp
- source_fp_vendor = source_info.GetVendorBuildProp(
- "ro.vendor.build.fingerprint")
- target_fp_vendor = target_info.GetVendorBuildProp(
- "ro.vendor.build.fingerprint")
- vendor_updated = source_fp_vendor != target_fp_vendor
+ # other build fingerprints could be possibly blacklisted at build time. For
+ # such a case, we consider those images being changed.
+ vendor_updated = FingerprintChanged(source_info.vendor_fingerprint,
+ target_info.vendor_fingerprint)
+ product_updated = HasProductPartition(target_zip) and \
+ FingerprintChanged(source_info.product_fingerprint,
+ target_info.product_fingerprint)
+ odm_updated = HasOdmPartition(target_zip) and \
+ FingerprintChanged(source_info.odm_fingerprint,
+ target_info.odm_fingerprint)
- AddCompatibilityArchive(system_updated, vendor_updated)
+ AddCompatibilityArchive(system_updated or product_updated,
+ vendor_updated or odm_updated)
def WriteFullOTAPackage(input_zip, output_file):
@@ -810,33 +949,51 @@ else if get_stage("%(bcb_dev)s") == "3/3" then
script.ShowProgress(system_progress, 0)
- # See the notes in WriteBlockIncrementalOTAPackage().
- allow_shared_blocks = target_info.get('ext4_share_dup_blocks') == "true"
-
- # Full OTA is done as an "incremental" against an empty source image. This
- # has the effect of writing new data from the package to the entire
- # partition, but lets us reuse the updater code that writes incrementals to
- # do it.
- system_tgt = common.GetSparseImage("system", OPTIONS.input_tmp, input_zip,
- allow_shared_blocks)
- system_tgt.ResetFileMap()
- system_diff = common.BlockDifference("system", system_tgt, src=None)
- system_diff.WriteScript(script, output_zip)
-
- boot_img = common.GetBootableImage(
- "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
-
+ def GetBlockDifference(partition):
+ # Full OTA is done as an "incremental" against an empty source image. This
+ # has the effect of writing new data from the package to the entire
+ # partition, but lets us reuse the updater code that writes incrementals to
+ # do it.
+ tgt = common.GetUserImage(partition, OPTIONS.input_tmp, input_zip,
+ info_dict=target_info,
+ reset_file_map=True)
+ diff = common.BlockDifference(partition, tgt, src=None)
+ return diff
+
+ device_specific_diffs = device_specific.FullOTA_GetBlockDifferences()
+ if device_specific_diffs:
+ assert all(isinstance(diff, common.BlockDifference)
+ for diff in device_specific_diffs), \
+ "FullOTA_GetBlockDifferences is not returning a list of " \
+ "BlockDifference objects"
+
+ progress_dict = dict()
+ block_diffs = [GetBlockDifference("system")]
if HasVendorPartition(input_zip):
- script.ShowProgress(0.1, 0)
-
- vendor_tgt = common.GetSparseImage("vendor", OPTIONS.input_tmp, input_zip,
- allow_shared_blocks)
- vendor_tgt.ResetFileMap()
- vendor_diff = common.BlockDifference("vendor", vendor_tgt)
- vendor_diff.WriteScript(script, output_zip)
+ block_diffs.append(GetBlockDifference("vendor"))
+ progress_dict["vendor"] = 0.1
+ if device_specific_diffs:
+ block_diffs += device_specific_diffs
+
+ if target_info.get('use_dynamic_partitions') == "true":
+ # Use empty source_info_dict to indicate that all partitions / groups must
+ # be re-added.
+ dynamic_partitions_diff = common.DynamicPartitionsDifference(
+ info_dict=OPTIONS.info_dict,
+ block_diffs=block_diffs,
+ progress_dict=progress_dict)
+ dynamic_partitions_diff.WriteScript(script, output_zip,
+ write_verify_script=OPTIONS.verify)
+ else:
+ for block_diff in block_diffs:
+ block_diff.WriteScript(script, output_zip,
+ progress=progress_dict.get(block_diff.partition),
+ write_verify_script=OPTIONS.verify)
AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip, target_info)
+ boot_img = common.GetBootableImage(
+ "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
common.CheckSize(boot_img.data, "boot.img", target_info)
common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
@@ -886,10 +1043,22 @@ endif;
FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
-def WriteMetadata(metadata, output_zip):
+def WriteMetadata(metadata, output):
+ """Writes the metadata to the zip archive or a file.
+
+ Args:
+ metadata: The metadata dict for the package.
+ output: A ZipFile object or a string of the output file path.
+ """
+
value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.iteritems())])
- common.ZipWriteStr(output_zip, METADATA_NAME, value,
- compress_type=zipfile.ZIP_STORED)
+ if isinstance(output, zipfile.ZipFile):
+ common.ZipWriteStr(output, METADATA_NAME, value,
+ compress_type=zipfile.ZIP_STORED)
+ return
+
+ with open(output, 'w') as f:
+ f.write(value)
def HandleDowngradeMetadata(metadata, target_info, source_info):
@@ -951,6 +1120,9 @@ def GetPackageMetadata(target_info, source_info=None):
if OPTIONS.wipe_user_data:
metadata['ota-wipe'] = 'yes'
+ if OPTIONS.retrofit_dynamic_partitions:
+ metadata['ota-retrofit-dynamic-partitions'] = 'yes'
+
is_incremental = source_info is not None
if is_incremental:
metadata['pre-build'] = source_info.fingerprint
@@ -1020,7 +1192,7 @@ class PropertyFiles(object):
A string with placeholders for the metadata offset/size info, e.g.
"payload.bin:679:343,payload_properties.txt:378:45,metadata: ".
"""
- return self._GetPropertyFilesString(input_zip, reserve_space=True)
+ return self.GetPropertyFilesString(input_zip, reserve_space=True)
class InsufficientSpaceException(Exception):
pass
@@ -1049,7 +1221,7 @@ class PropertyFiles(object):
InsufficientSpaceException: If the reserved length is insufficient to hold
the final string.
"""
- result = self._GetPropertyFilesString(input_zip, reserve_space=False)
+ result = self.GetPropertyFilesString(input_zip, reserve_space=False)
if len(result) > reserved_length:
raise self.InsufficientSpaceException(
'Insufficient reserved space: reserved={}, actual={}'.format(
@@ -1068,17 +1240,29 @@ class PropertyFiles(object):
Raises:
AssertionError: On finding a mismatch.
"""
- actual = self._GetPropertyFilesString(input_zip)
+ actual = self.GetPropertyFilesString(input_zip)
assert actual == expected, \
"Mismatching streaming metadata: {} vs {}.".format(actual, expected)
- def _GetPropertyFilesString(self, zip_file, reserve_space=False):
- """Constructs the property-files string per request."""
+ def GetPropertyFilesString(self, zip_file, reserve_space=False):
+ """
+ Constructs the property-files string per request.
+
+ Args:
+ zip_file: The input ZIP file.
+ reserved_length: The reserved length of the property-files string.
+
+ Returns:
+ A property-files string including the metadata offset/size info, e.g.
+ "payload.bin:679:343,payload_properties.txt:378:45,metadata: ".
+ """
def ComputeEntryOffsetSize(name):
"""Computes the zip entry offset and size."""
info = zip_file.getinfo(name)
- offset = info.header_offset + len(info.FileHeader())
+ offset = info.header_offset
+ offset += zipfile.sizeFileHeader
+ offset += len(info.extra) + len(info.filename)
size = info.file_size
return '%s:%d:%d' % (os.path.basename(name), offset, size)
@@ -1133,7 +1317,8 @@ class StreamingPropertyFiles(PropertyFiles):
'payload_properties.txt',
)
self.optional = (
- # care_map.txt is available only if dm-verity is enabled.
+ # care_map is available only if dm-verity is enabled.
+ 'care_map.pb',
'care_map.txt',
# compatibility.zip is available only if target supports Treble.
'compatibility.zip',
@@ -1202,7 +1387,9 @@ class AbOtaPropertyFiles(StreamingPropertyFiles):
payload, till the end of 'medatada_signature_message'.
"""
payload_info = input_zip.getinfo('payload.bin')
- payload_offset = payload_info.header_offset + len(payload_info.FileHeader())
+ payload_offset = payload_info.header_offset
+ payload_offset += zipfile.sizeFileHeader
+ payload_offset += len(payload_info.extra) + len(payload_info.filename)
payload_size = payload_info.file_size
with input_zip.open('payload.bin', 'r') as payload_fp:
@@ -1318,6 +1505,11 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
for property_files in needed_property_files:
property_files.Verify(output_zip, metadata[property_files.name].strip())
+ # If requested, dump the metadata to a separate file.
+ output_metadata_path = OPTIONS.output_metadata_path
+ if output_metadata_path:
+ WriteMetadata(metadata, output_metadata_path)
+
def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
@@ -1326,8 +1518,8 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
target_api_version = target_info["recovery_api_version"]
source_api_version = source_info["recovery_api_version"]
if source_api_version == 0:
- print("WARNING: generating edify script for a source that "
- "can't install it.")
+ logger.warning(
+ "Generating edify script for a source that can't install it.")
script = edify_generator.EdifyGenerator(
source_api_version, target_info, fstab=source_info["fstab"])
@@ -1349,8 +1541,10 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
device_specific = common.DeviceSpecificParams(
source_zip=source_zip,
source_version=source_api_version,
+ source_tmp=OPTIONS.source_tmp,
target_zip=target_zip,
target_version=target_api_version,
+ target_tmp=OPTIONS.target_tmp,
output_zip=output_zip,
script=script,
metadata=metadata,
@@ -1366,16 +1560,20 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
target_recovery = common.GetBootableImage(
"/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
- # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain
- # shared blocks (i.e. some blocks will show up in multiple files' block
- # list). We can only allocate such shared blocks to the first "owner", and
- # disable imgdiff for all later occurrences.
+ # See notes in common.GetUserImage()
allow_shared_blocks = (source_info.get('ext4_share_dup_blocks') == "true" or
target_info.get('ext4_share_dup_blocks') == "true")
- system_src = common.GetSparseImage("system", OPTIONS.source_tmp, source_zip,
- allow_shared_blocks)
- system_tgt = common.GetSparseImage("system", OPTIONS.target_tmp, target_zip,
- allow_shared_blocks)
+ system_src = common.GetUserImage("system", OPTIONS.source_tmp, source_zip,
+ info_dict=source_info,
+ allow_shared_blocks=allow_shared_blocks)
+
+ hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator(
+ "system", 4096, target_info)
+ system_tgt = common.GetUserImage("system", OPTIONS.target_tmp, target_zip,
+ info_dict=target_info,
+ allow_shared_blocks=allow_shared_blocks,
+ hashtree_info_generator=
+ hashtree_info_generator)
blockimgdiff_version = max(
int(i) for i in target_info.get("blockimgdiff_versions", "1").split(","))
@@ -1400,10 +1598,16 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
if HasVendorPartition(target_zip):
if not HasVendorPartition(source_zip):
raise RuntimeError("can't generate incremental that adds /vendor")
- vendor_src = common.GetSparseImage("vendor", OPTIONS.source_tmp, source_zip,
- allow_shared_blocks)
- vendor_tgt = common.GetSparseImage("vendor", OPTIONS.target_tmp, target_zip,
- allow_shared_blocks)
+ vendor_src = common.GetUserImage("vendor", OPTIONS.source_tmp, source_zip,
+ info_dict=source_info,
+ allow_shared_blocks=allow_shared_blocks)
+ hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator(
+ "vendor", 4096, target_info)
+ vendor_tgt = common.GetUserImage(
+ "vendor", OPTIONS.target_tmp, target_zip,
+ info_dict=target_info,
+ allow_shared_blocks=allow_shared_blocks,
+ hashtree_info_generator=hashtree_info_generator)
# Check first block of vendor partition for remount R/W only if
# disk type is ext4
@@ -1498,15 +1702,18 @@ else if get_stage("%(bcb_dev)s") != "3/3" then
else:
include_full_boot = False
- print("boot target: %d source: %d diff: %d" % (
- target_boot.size, source_boot.size, len(d)))
+ logger.info(
+ "boot target: %d source: %d diff: %d", target_boot.size,
+ source_boot.size, len(d))
+
+ common.ZipWriteStr(output_zip, "boot.img.p", d)
- common.ZipWriteStr(output_zip, "patch/boot.img.p", d)
+ script.PatchPartitionCheck(
+ "{}:{}:{}:{}".format(
+ boot_type, boot_device, target_boot.size, target_boot.sha1),
+ "{}:{}:{}:{}".format(
+ boot_type, boot_device, source_boot.size, source_boot.sha1))
- script.PatchCheck("%s:%s:%d:%s:%d:%s" %
- (boot_type, boot_device,
- source_boot.size, source_boot.sha1,
- target_boot.size, target_boot.sha1))
size.append(target_boot.size)
if size:
@@ -1531,44 +1738,70 @@ else
system_diff.WriteVerifyScript(script, touched_blocks_only=True)
if vendor_diff:
vendor_diff.WriteVerifyScript(script, touched_blocks_only=True)
+ device_specific_diffs = device_specific.IncrementalOTA_GetBlockDifferences()
+ if device_specific_diffs:
+ assert all(isinstance(diff, common.BlockDifference)
+ for diff in device_specific_diffs), \
+ "IncrementalOTA_GetBlockDifferences is not returning a list of " \
+ "BlockDifference objects"
+ for diff in device_specific_diffs:
+ diff.WriteVerifyScript(script, touched_blocks_only=True)
script.Comment("---- start making changes here ----")
device_specific.IncrementalOTA_InstallBegin()
- system_diff.WriteScript(script, output_zip,
- progress=0.8 if vendor_diff else 0.9)
-
+ block_diffs = [system_diff]
+ progress_dict = {"system": 0.8 if vendor_diff else 0.9}
if vendor_diff:
- vendor_diff.WriteScript(script, output_zip, progress=0.1)
+ block_diffs.append(vendor_diff)
+ progress_dict["vendor"] = 0.1
+ if device_specific_diffs:
+ block_diffs += device_specific_diffs
+
+ if OPTIONS.source_info_dict.get("use_dynamic_partitions") == "true":
+ if OPTIONS.target_info_dict.get("use_dynamic_partitions") != "true":
+ raise RuntimeError(
+ "can't generate incremental that disables dynamic partitions")
+ dynamic_partitions_diff = common.DynamicPartitionsDifference(
+ info_dict=OPTIONS.target_info_dict,
+ source_info_dict=OPTIONS.source_info_dict,
+ block_diffs=block_diffs,
+ progress_dict=progress_dict)
+ dynamic_partitions_diff.WriteScript(
+ script, output_zip, write_verify_script=OPTIONS.verify)
+ else:
+ for block_diff in block_diffs:
+ block_diff.WriteScript(script, output_zip,
+ progress=progress_dict.get(block_diff.partition),
+ write_verify_script=OPTIONS.verify)
if OPTIONS.two_step:
common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
script.WriteRawImage("/boot", "boot.img")
- print("writing full boot image (forced by two-step mode)")
+ logger.info("writing full boot image (forced by two-step mode)")
if not OPTIONS.two_step:
if updating_boot:
if include_full_boot:
- print("boot image changed; including full.")
+ logger.info("boot image changed; including full.")
script.Print("Installing boot image...")
script.WriteRawImage("/boot", "boot.img")
else:
# Produce the boot image by applying a patch to the current
# contents of the boot partition, and write it back to the
# partition.
- print("boot image changed; including patch.")
+ logger.info("boot image changed; including patch.")
script.Print("Patching boot image...")
script.ShowProgress(0.1, 10)
- script.ApplyPatch("%s:%s:%d:%s:%d:%s"
- % (boot_type, boot_device,
- source_boot.size, source_boot.sha1,
- target_boot.size, target_boot.sha1),
- "-",
- target_boot.size, target_boot.sha1,
- source_boot.sha1, "patch/boot.img.p")
+ script.PatchPartition(
+ '{}:{}:{}:{}'.format(
+ boot_type, boot_device, target_boot.size, target_boot.sha1),
+ '{}:{}:{}:{}'.format(
+ boot_type, boot_device, source_boot.size, source_boot.sha1),
+ 'boot.img.p')
else:
- print("boot image unchanged; skipping.")
+ logger.info("boot image unchanged; skipping.")
# Do device-specific installation (eg, write radio image).
device_specific.IncrementalOTA_InstallEnd()
@@ -1629,10 +1862,11 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
- input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
with zipfile.ZipFile(input_file, 'r') as input_zip:
infolist = input_zip.infolist()
+ namelist = input_zip.namelist()
+ input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
for info in infolist:
unzipped_file = os.path.join(input_tmp, *info.filename.split('/'))
if info.filename == 'IMAGES/system_other.img':
@@ -1647,7 +1881,7 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
elif skip_postinstall and info.filename == POSTINSTALL_CONFIG:
pass
- elif info.filename.startswith(('META/', 'IMAGES/')):
+ elif info.filename.startswith(('META/', 'IMAGES/', 'RADIO/')):
common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
common.ZipClose(target_zip)
@@ -1680,6 +1914,90 @@ def GetTargetFilesZipWithoutPostinstallConfig(input_file):
return target_file
+def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
+ super_block_devices,
+ dynamic_partition_list):
+ """Returns a target-files.zip for retrofitting dynamic partitions.
+
+ This allows brillo_update_payload to generate an OTA based on the exact
+ bits on the block devices. Postinstall is disabled.
+
+ Args:
+ input_file: The input target-files.zip filename.
+ super_block_devices: The list of super block devices
+ dynamic_partition_list: The list of dynamic partitions
+
+ Returns:
+ The filename of target-files.zip with *.img replaced with super_*.img for
+ each block device in super_block_devices.
+ """
+ assert super_block_devices, "No super_block_devices are specified."
+
+ replace = {'OTA/super_{}.img'.format(dev): 'IMAGES/{}.img'.format(dev)
+ for dev in super_block_devices}
+
+ target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+ shutil.copyfile(input_file, target_file)
+
+ with zipfile.ZipFile(input_file, 'r') as input_zip:
+ namelist = input_zip.namelist()
+
+ input_tmp = common.UnzipTemp(input_file, RETROFIT_DAP_UNZIP_PATTERN)
+
+ # Remove partitions from META/ab_partitions.txt that is in
+ # dynamic_partition_list but not in super_block_devices so that
+ # brillo_update_payload won't generate update for those logical partitions.
+ ab_partitions_file = os.path.join(input_tmp, *AB_PARTITIONS.split('/'))
+ with open(ab_partitions_file) as f:
+ ab_partitions_lines = f.readlines()
+ ab_partitions = [line.strip() for line in ab_partitions_lines]
+ # Assert that all super_block_devices are in ab_partitions
+ super_device_not_updated = [partition for partition in super_block_devices
+ if partition not in ab_partitions]
+ assert not super_device_not_updated, \
+ "{} is in super_block_devices but not in {}".format(
+ super_device_not_updated, AB_PARTITIONS)
+ # ab_partitions -= (dynamic_partition_list - super_block_devices)
+ new_ab_partitions = common.MakeTempFile(prefix="ab_partitions", suffix=".txt")
+ with open(new_ab_partitions, 'w') as f:
+ for partition in ab_partitions:
+ if (partition in dynamic_partition_list and
+ partition not in super_block_devices):
+ logger.info("Dropping %s from ab_partitions.txt", partition)
+ continue
+ f.write(partition + "\n")
+ to_delete = [AB_PARTITIONS]
+
+ # Always skip postinstall for a retrofit update.
+ to_delete += [POSTINSTALL_CONFIG]
+
+ # Delete dynamic_partitions_info.txt so that brillo_update_payload thinks this
+ # is a regular update on devices without dynamic partitions support.
+ to_delete += [DYNAMIC_PARTITION_INFO]
+
+ # Remove the existing partition images as well as the map files.
+ to_delete += replace.values()
+ to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
+
+ common.ZipDelete(target_file, to_delete)
+
+ target_zip = zipfile.ZipFile(target_file, 'a', allowZip64=True)
+
+ # Write super_{foo}.img as {foo}.img.
+ for src, dst in replace.items():
+ assert src in namelist, \
+ 'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
+ unzipped_file = os.path.join(input_tmp, *src.split('/'))
+ common.ZipWrite(target_zip, unzipped_file, arcname=dst)
+
+ # Write new ab_partitions.txt file
+ common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
+
+ common.ZipClose(target_zip)
+
+ return target_file
+
+
def WriteABOTAPackageWithBrilloScript(target_file, output_file,
source_file=None):
"""Generates an Android OTA package that has A/B update payload."""
@@ -1701,7 +2019,11 @@ def WriteABOTAPackageWithBrilloScript(target_file, output_file,
# Metadata to comply with Android OTA package format.
metadata = GetPackageMetadata(target_info, source_info)
- if OPTIONS.skip_postinstall:
+ if OPTIONS.retrofit_dynamic_partitions:
+ target_file = GetTargetFilesZipForRetrofitDynamicPartitions(
+ target_file, target_info.get("super_block_devices").strip().split(),
+ target_info.get("dynamic_partition_list").strip().split())
+ elif OPTIONS.skip_postinstall:
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
# Generate payload.
@@ -1741,16 +2063,19 @@ def WriteABOTAPackageWithBrilloScript(target_file, output_file,
target_zip = zipfile.ZipFile(target_file, "r")
if (target_info.get("verity") == "true" or
target_info.get("avb_enable") == "true"):
- care_map_path = "META/care_map.txt"
- namelist = target_zip.namelist()
- if care_map_path in namelist:
- care_map_data = target_zip.read(care_map_path)
- # In order to support streaming, care_map.txt needs to be packed as
+ care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
+ "META/" + x in target_zip.namelist()]
+
+ # Adds care_map if either the protobuf format or the plain text one exists.
+ if care_map_list:
+ care_map_name = care_map_list[0]
+ care_map_data = target_zip.read("META/" + care_map_name)
+ # In order to support streaming, care_map needs to be packed as
# ZIP_STORED.
- common.ZipWriteStr(output_zip, "care_map.txt", care_map_data,
+ common.ZipWriteStr(output_zip, care_map_name, care_map_data,
compress_type=zipfile.ZIP_STORED)
else:
- print("Warning: cannot find care map file in target_file package")
+ logger.warning("Cannot find care map file in target_file package")
AddCompatibilityArchiveIfTrebleEnabled(
target_zip, output_zip, target_info, source_info)
@@ -1826,10 +2151,18 @@ def main(argv):
OPTIONS.payload_signer = a
elif o == "--payload_signer_args":
OPTIONS.payload_signer_args = shlex.split(a)
+ elif o == "--payload_signer_key_size":
+ OPTIONS.payload_signer_key_size = a
elif o == "--extracted_input_target_files":
OPTIONS.extracted_input = a
elif o == "--skip_postinstall":
OPTIONS.skip_postinstall = True
+ elif o == "--retrofit_dynamic_partitions":
+ OPTIONS.retrofit_dynamic_partitions = True
+ elif o == "--skip_compatibility_check":
+ OPTIONS.skip_compatibility_check = True
+ elif o == "--output_metadata_path":
+ OPTIONS.output_metadata_path = a
else:
return False
return True
@@ -1858,14 +2191,20 @@ def main(argv):
"log_diff=",
"payload_signer=",
"payload_signer_args=",
+ "payload_signer_key_size=",
"extracted_input_target_files=",
"skip_postinstall",
+ "retrofit_dynamic_partitions",
+ "skip_compatibility_check",
+ "output_metadata_path=",
], extra_option_handler=option_handler)
if len(args) != 2:
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
if OPTIONS.downgrade:
# We should only allow downgrading incrementals (as opposed to full).
# Otherwise the device may go back from arbitrary build with this full
@@ -1886,9 +2225,8 @@ def main(argv):
with zipfile.ZipFile(args[0], 'r') as input_zip:
OPTIONS.info_dict = common.LoadInfoDict(input_zip)
- if OPTIONS.verbose:
- print("--- target info ---")
- common.DumpInfoDict(OPTIONS.info_dict)
+ logger.info("--- target info ---")
+ common.DumpInfoDict(OPTIONS.info_dict)
# Load the source build dict if applicable.
if OPTIONS.incremental_source is not None:
@@ -1896,13 +2234,29 @@ def main(argv):
with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
- if OPTIONS.verbose:
- print("--- source info ---")
- common.DumpInfoDict(OPTIONS.source_info_dict)
+ logger.info("--- source info ---")
+ common.DumpInfoDict(OPTIONS.source_info_dict)
# Load OEM dicts if provided.
OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
+ # Assume retrofitting dynamic partitions when base build does not set
+ # use_dynamic_partitions but target build does.
+ if (OPTIONS.source_info_dict and
+ OPTIONS.source_info_dict.get("use_dynamic_partitions") != "true" and
+ OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
+ if OPTIONS.target_info_dict.get("dynamic_partition_retrofit") != "true":
+ raise common.ExternalError(
+ "Expect to generate incremental OTA for retrofitting dynamic "
+ "partitions, but dynamic_partition_retrofit is not set in target "
+ "build.")
+ logger.info("Implicitly generating retrofit incremental OTA.")
+ OPTIONS.retrofit_dynamic_partitions = True
+
+ # Skip postinstall for retrofitting dynamic partitions.
+ if OPTIONS.retrofit_dynamic_partitions:
+ OPTIONS.skip_postinstall = True
+
ab_update = OPTIONS.info_dict.get("ab_update") == "true"
# Use the default key to sign the package if not specified with package_key.
@@ -1922,7 +2276,7 @@ def main(argv):
output_file=args[1],
source_file=OPTIONS.incremental_source)
- print("done.")
+ logger.info("done.")
return
# Sanity check the loaded info dicts first.
@@ -1933,7 +2287,7 @@ def main(argv):
# Non-A/B OTAs rely on /cache partition to store temporary files.
cache_size = OPTIONS.info_dict.get("cache_size")
if cache_size is None:
- print("--- can't determine the cache partition size ---")
+ logger.warning("--- can't determine the cache partition size ---")
OPTIONS.cache_size = cache_size
if OPTIONS.extra_script is not None:
@@ -1942,7 +2296,7 @@ def main(argv):
if OPTIONS.extracted_input is not None:
OPTIONS.input_tmp = OPTIONS.extracted_input
else:
- print("unzipping target target-files...")
+ logger.info("unzipping target target-files...")
OPTIONS.input_tmp = common.UnzipTemp(args[0], UNZIP_PATTERN)
OPTIONS.target_tmp = OPTIONS.input_tmp
@@ -1954,7 +2308,7 @@ def main(argv):
if OPTIONS.device_specific is None:
from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
if os.path.exists(from_input):
- print("(using device-specific extensions from target_files)")
+ logger.info("(using device-specific extensions from target_files)")
OPTIONS.device_specific = from_input
else:
OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
@@ -1971,7 +2325,7 @@ def main(argv):
# Generate an incremental OTA.
else:
- print("unzipping source target-files...")
+ logger.info("unzipping source target-files...")
OPTIONS.source_tmp = common.UnzipTemp(
OPTIONS.incremental_source, UNZIP_PATTERN)
with zipfile.ZipFile(args[0], 'r') as input_zip, \
@@ -1987,15 +2341,15 @@ def main(argv):
target_files_diff.recursiveDiff(
'', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
- print("done.")
+ logger.info("done.")
if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
- except common.ExternalError as e:
- print("\n ERROR: %s\n" % (e,))
+ except common.ExternalError:
+ logger.exception("\n ERROR:\n")
sys.exit(1)
finally:
common.Cleanup()
diff --git a/tools/releasetools/pylintrc b/tools/releasetools/pylintrc
index 7b3405c851..2a307421ba 100644
--- a/tools/releasetools/pylintrc
+++ b/tools/releasetools/pylintrc
@@ -62,7 +62,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
-disable=invalid-name,missing-docstring,too-many-branches,too-many-locals,too-many-arguments,too-many-statements,duplicate-code,too-few-public-methods,too-many-instance-attributes,too-many-lines,too-many-public-methods,locally-disabled,fixme
+disable=invalid-name,missing-docstring,too-many-branches,too-many-locals,too-many-arguments,too-many-statements,duplicate-code,too-few-public-methods,too-many-instance-attributes,too-many-lines,too-many-public-methods,locally-disabled,fixme,not-callable
[REPORTS]
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
new file mode 100755
index 0000000000..1778615a64
--- /dev/null
+++ b/tools/releasetools/sign_apex.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Signs a standalone APEX file.
+
+Usage: sign_apex [flags] input_apex_file output_apex_file
+
+ --container_key <key>
+ Mandatory flag that specifies the container signing key.
+
+ --payload_key <key>
+ Mandatory flag that specifies the payload signing key.
+
+ --payload_extra_args <args>
+ Optional flag that specifies any extra args to be passed to payload signer
+ (e.g. --payload_extra_args="--signing_helper_with_files /path/to/helper").
+"""
+
+import logging
+import shutil
+import sys
+
+import apex_utils
+import common
+
+logger = logging.getLogger(__name__)
+
+
+def main(argv):
+
+ options = {}
+
+ def option_handler(o, a):
+ if o == '--container_key':
+ # Strip the suffix if any, as common.SignFile expects no suffix.
+ DEFAULT_CONTAINER_KEY_SUFFIX = '.x509.pem'
+ if a.endswith(DEFAULT_CONTAINER_KEY_SUFFIX):
+ a = a[:-len(DEFAULT_CONTAINER_KEY_SUFFIX)]
+ options['container_key'] = a
+ elif o == '--payload_key':
+ options['payload_key'] = a
+ elif o == '--payload_extra_args':
+ options['payload_extra_args'] = a
+ else:
+ return False
+ return True
+
+ args = common.ParseOptions(
+ argv, __doc__,
+ extra_opts='',
+ extra_long_opts=[
+ 'container_key=',
+ 'payload_extra_args=',
+ 'payload_key=',
+ ],
+ extra_option_handler=option_handler)
+
+ if (len(args) != 2 or 'container_key' not in options or
+ 'payload_key' not in options):
+ common.Usage(__doc__)
+ sys.exit(1)
+
+ common.InitLogging()
+
+ input_zip = args[0]
+ output_zip = args[1]
+ with open(input_zip) as input_fp:
+ apex_data = input_fp.read()
+
+ signed_apex = apex_utils.SignApex(
+ apex_data,
+ payload_key=options['payload_key'],
+ container_key=options['container_key'],
+ container_pw=None,
+ codename_to_api_level_map=None,
+ signing_args=options.get('payload_extra_args'))
+
+ shutil.copyfile(signed_apex, output_zip)
+ logger.info("done.")
+
+
+if __name__ == '__main__':
+ try:
+ main(sys.argv[1:])
+ except common.ExternalError:
+ logger.exception("\n ERROR:\n")
+ sys.exit(1)
+ finally:
+ common.Cleanup()
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index fa62c8f63c..e8c4163e95 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -21,11 +21,23 @@ target-files zip.
Usage: sign_target_files_apks [flags] input_target_files output_target_files
-e (--extra_apks) <name,name,...=key>
- Add extra APK name/key pairs as though they appeared in
- apkcerts.txt (so mappings specified by -k and -d are applied).
- Keys specified in -e override any value for that app contained
- in the apkcerts.txt file. Option may be repeated to give
- multiple extra packages.
+ Add extra APK/APEX name/key pairs as though they appeared in apkcerts.txt
+ or apexkeys.txt (so mappings specified by -k and -d are applied). Keys
+ specified in -e override any value for that app contained in the
+ apkcerts.txt file, or the container key for an APEX. Option may be
+ repeated to give multiple extra packages.
+
+ --extra_apex_payload_key <name=key>
+ Add a mapping for APEX package name to payload signing key, which will
+ override the default payload signing key in apexkeys.txt. Note that the
+ container key should be overridden via the `--extra_apks` flag above.
+ Option may be repeated for multiple APEXes.
+
+ --skip_apks_with_path_prefix <prefix>
+ Skip signing an APK if it has the matching prefix in its path. The prefix
+ should be matching the entry name, which has partition names in upper
+ case, e.g. "VENDOR/app/", or "SYSTEM_OTHER/preloads/". Option may be
+ repeated to give multiple prefixes.
-k (--key_mapping) <src_key=dest_key>
Add a mapping from the key name as specified in apkcerts.txt (the
@@ -79,12 +91,15 @@ Usage: sign_target_files_apks [flags] input_target_files output_target_files
Replace the veritykeyid in BOOT/cmdline of input_target_file_zip
with keyid of the cert pointed by <path_to_X509_PEM_cert_file>.
- --avb_{boot,system,vendor,dtbo,vbmeta}_algorithm <algorithm>
- --avb_{boot,system,vendor,dtbo,vbmeta}_key <key>
+ --avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
+ vbmeta_vendor}_algorithm <algorithm>
+ --avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
+ vbmeta_vendor}_key <key>
Use the specified algorithm (e.g. SHA256_RSA4096) and the key to AVB-sign
the specified image. Otherwise it uses the existing values in info dict.
- --avb_{boot,system,vendor,dtbo,vbmeta}_extra_args <args>
+ --avb_{apex,boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
+ vbmeta_vendor}_extra_args <args>
Specify any additional args that are needed to AVB-sign the image
(e.g. "--signing_helper /path/to/helper"). The args will be appended to
the existing ones in info dict.
@@ -96,6 +111,8 @@ import base64
import copy
import errno
import gzip
+import itertools
+import logging
import os
import re
import shutil
@@ -107,6 +124,7 @@ import zipfile
from xml.etree import ElementTree
import add_img_to_target_files
+import apex_utils
import common
@@ -115,9 +133,13 @@ if sys.hexversion < 0x02070000:
sys.exit(1)
+logger = logging.getLogger(__name__)
+
OPTIONS = common.OPTIONS
OPTIONS.extra_apks = {}
+OPTIONS.extra_apex_payload_keys = {}
+OPTIONS.skip_apks_with_path_prefix = set()
OPTIONS.key_map = {}
OPTIONS.rebuild_recovery = False
OPTIONS.replace_ota_keys = False
@@ -144,28 +166,158 @@ def GetApkCerts(certmap):
return certmap
-def CheckAllApksSigned(input_tf_zip, apk_key_map, compressed_extension):
- """Check that all the APKs we want to sign have keys specified, and
- error out if they don't."""
- unknown_apks = []
- compressed_apk_extension = None
- if compressed_extension:
- compressed_apk_extension = ".apk" + compressed_extension
+def GetApexKeys(keys_info, key_map):
+ """Gets APEX payload and container signing keys by applying the mapping rules.
+
+ Presigned payload / container keys will be set accordingly.
+
+ Args:
+ keys_info: A dict that maps from APEX filenames to a tuple of (payload_key,
+ container_key).
+ key_map: A dict that overrides the keys, specified via command-line input.
+
+ Returns:
+ A dict that contains the updated APEX key mapping, which should be used for
+ the current signing.
+ """
+ # Apply all the --extra_apex_payload_key options to override the payload
+ # signing keys in the given keys_info.
+ for apex, key in OPTIONS.extra_apex_payload_keys.items():
+ if not key:
+ key = 'PRESIGNED'
+ if apex not in keys_info:
+ logger.warning('Failed to find %s in target_files; Ignored', apex)
+ continue
+ keys_info[apex] = (key, keys_info[apex][1])
+
+ # Apply the key remapping to container keys.
+ for apex, (payload_key, container_key) in keys_info.items():
+ keys_info[apex] = (payload_key, key_map.get(container_key, container_key))
+
+ # Apply all the --extra_apks options to override the container keys.
+ for apex, key in OPTIONS.extra_apks.items():
+ # Skip non-APEX containers.
+ if apex not in keys_info:
+ continue
+ if not key:
+ key = 'PRESIGNED'
+ keys_info[apex] = (keys_info[apex][0], key_map.get(key, key))
+
+ return keys_info
+
+
+def GetApkFileInfo(filename, compressed_extension, skipped_prefixes):
+ """Returns the APK info based on the given filename.
+
+ Checks if the given filename (with path) looks like an APK file, by taking the
+ compressed extension into consideration. If it appears to be an APK file,
+ further checks if the APK file should be skipped when signing, based on the
+ given path prefixes.
+
+ Args:
+ filename: Path to the file.
+ compressed_extension: The extension string of compressed APKs (e.g. ".gz"),
+ or None if there's no compressed APKs.
+ skipped_prefixes: A set/list/tuple of the path prefixes to be skipped.
+
+ Returns:
+ (is_apk, is_compressed, should_be_skipped): is_apk indicates whether the
+ given filename is an APK file. is_compressed indicates whether the APK file
+ is compressed (only meaningful when is_apk is True). should_be_skipped
+ indicates whether the filename matches any of the given prefixes to be
+ skipped.
+
+ Raises:
+ AssertionError: On invalid compressed_extension or skipped_prefixes inputs.
+ """
+ assert compressed_extension is None or compressed_extension.startswith('.'), \
+ "Invalid compressed_extension arg: '{}'".format(compressed_extension)
+
+ # skipped_prefixes should be one of set/list/tuple types. Other types such as
+ # str shouldn't be accepted.
+ assert isinstance(skipped_prefixes, (set, list, tuple)), \
+ "Invalid skipped_prefixes input type: {}".format(type(skipped_prefixes))
+
+ compressed_apk_extension = (
+ ".apk" + compressed_extension if compressed_extension else None)
+ is_apk = (filename.endswith(".apk") or
+ (compressed_apk_extension and
+ filename.endswith(compressed_apk_extension)))
+ if not is_apk:
+ return (False, False, False)
+
+ is_compressed = (compressed_apk_extension and
+ filename.endswith(compressed_apk_extension))
+ should_be_skipped = filename.startswith(tuple(skipped_prefixes))
+ return (True, is_compressed, should_be_skipped)
+
+
+def CheckApkAndApexKeysAvailable(input_tf_zip, known_keys,
+ compressed_extension, apex_keys):
+ """Checks that all the APKs and APEXes have keys specified.
+
+ Args:
+ input_tf_zip: An open target_files zip file.
+ known_keys: A set of APKs and APEXes that have known signing keys.
+ compressed_extension: The extension string of compressed APKs, such as
+ '.gz', or None if there's no compressed APKs.
+ apex_keys: A dict that contains the key mapping from APEX name to
+ (payload_key, container_key).
+
+ Raises:
+ AssertionError: On finding unknown APKs and APEXes.
+ """
+ unknown_files = []
for info in input_tf_zip.infolist():
- if (info.filename.endswith(".apk") or
- (compressed_apk_extension and
- info.filename.endswith(compressed_apk_extension))):
+ # Handle APEXes first, e.g. SYSTEM/apex/com.android.tzdata.apex.
+ if (info.filename.startswith('SYSTEM/apex') and
+ info.filename.endswith('.apex')):
name = os.path.basename(info.filename)
- if compressed_apk_extension and name.endswith(compressed_apk_extension):
- name = name[:-len(compressed_extension)]
- if name not in apk_key_map:
- unknown_apks.append(name)
- if unknown_apks:
- print("ERROR: no key specified for:\n")
- print(" " + "\n ".join(unknown_apks))
- print("\nUse '-e <apkname>=' to specify a key (which may be an empty "
- "string to not sign this apk).")
- sys.exit(1)
+ if name not in known_keys:
+ unknown_files.append(name)
+ continue
+
+ # And APKs.
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ info.filename, compressed_extension, OPTIONS.skip_apks_with_path_prefix)
+ if not is_apk or should_be_skipped:
+ continue
+
+ name = os.path.basename(info.filename)
+ if is_compressed:
+ name = name[:-len(compressed_extension)]
+ if name not in known_keys:
+ unknown_files.append(name)
+
+ assert not unknown_files, \
+ ("No key specified for:\n {}\n"
+ "Use '-e <apkname>=' to specify a key (which may be an empty string to "
+ "not sign this apk).".format("\n ".join(unknown_files)))
+
+ # For all the APEXes, double check that we won't have an APEX that has only
+ # one of the payload / container keys set.
+ if not apex_keys:
+ return
+
+ invalid_apexes = []
+ for info in input_tf_zip.infolist():
+ if (not info.filename.startswith('SYSTEM/apex') or
+ not info.filename.endswith('.apex')):
+ continue
+
+ name = os.path.basename(info.filename)
+ (payload_key, container_key) = apex_keys[name]
+ if ((payload_key in common.SPECIAL_CERT_STRINGS and
+ container_key not in common.SPECIAL_CERT_STRINGS) or
+ (payload_key not in common.SPECIAL_CERT_STRINGS and
+ container_key in common.SPECIAL_CERT_STRINGS)):
+ invalid_apexes.append(
+ "{}: payload_key {}, container_key {}".format(
+ name, payload_key, container_key))
+
+ assert not invalid_apexes, \
+ "Invalid APEX keys specified:\n {}\n".format(
+ "\n ".join(invalid_apexes))
def SignApk(data, keyname, pw, platform_api_level, codename_to_api_level_map,
@@ -232,39 +384,44 @@ def SignApk(data, keyname, pw, platform_api_level, codename_to_api_level_map,
def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
- apk_key_map, key_passwords, platform_api_level,
- codename_to_api_level_map,
+ apk_keys, apex_keys, key_passwords,
+ platform_api_level, codename_to_api_level_map,
compressed_extension):
-
- compressed_apk_extension = None
- if compressed_extension:
- compressed_apk_extension = ".apk" + compressed_extension
-
+ # maxsize measures the maximum filename length, including the ones to be
+ # skipped.
maxsize = max(
[len(os.path.basename(i.filename)) for i in input_tf_zip.infolist()
- if (i.filename.endswith('.apk') or
- (compressed_apk_extension and
- i.filename.endswith(compressed_apk_extension)))])
+ if GetApkFileInfo(i.filename, compressed_extension, [])[0]])
system_root_image = misc_info.get("system_root_image") == "true"
for info in input_tf_zip.infolist():
- if info.filename.startswith("IMAGES/"):
+ filename = info.filename
+ if filename.startswith("IMAGES/"):
continue
- data = input_tf_zip.read(info.filename)
+ # Skip split super images, which will be re-generated during signing.
+ if filename.startswith("OTA/") and filename.endswith(".img"):
+ continue
+
+ data = input_tf_zip.read(filename)
out_info = copy.copy(info)
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ filename, compressed_extension, OPTIONS.skip_apks_with_path_prefix)
+
+ if is_apk and should_be_skipped:
+ # Copy skipped APKs verbatim.
+ print(
+ "NOT signing: %s\n"
+ " (skipped due to matching prefix)" % (filename,))
+ common.ZipWriteStr(output_tf_zip, out_info, data)
# Sign APKs.
- if (info.filename.endswith(".apk") or
- (compressed_apk_extension and
- info.filename.endswith(compressed_apk_extension))):
- is_compressed = (compressed_extension and
- info.filename.endswith(compressed_apk_extension))
- name = os.path.basename(info.filename)
+ elif is_apk:
+ name = os.path.basename(filename)
if is_compressed:
name = name[:-len(compressed_extension)]
- key = apk_key_map[name]
+ key = apk_keys[name]
if key not in common.SPECIAL_CERT_STRINGS:
print(" signing: %-*s (%s)" % (maxsize, name, key))
signed_data = SignApk(data, key, key_passwords[key], platform_api_level,
@@ -272,19 +429,63 @@ def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
common.ZipWriteStr(output_tf_zip, out_info, signed_data)
else:
# an APK we're not supposed to sign.
- print("NOT signing: %s" % (name,))
+ print(
+ "NOT signing: %s\n"
+ " (skipped due to special cert string)" % (name,))
common.ZipWriteStr(output_tf_zip, out_info, data)
+ # Sign bundled APEX files.
+ elif filename.startswith("SYSTEM/apex") and filename.endswith(".apex"):
+ name = os.path.basename(filename)
+ payload_key, container_key = apex_keys[name]
+
+ # We've asserted not having a case with only one of them PRESIGNED.
+ if (payload_key not in common.SPECIAL_CERT_STRINGS and
+ container_key not in common.SPECIAL_CERT_STRINGS):
+ print(" signing: %-*s container (%s)" % (
+ maxsize, name, container_key))
+ print(" : %-*s payload (%s)" % (
+ maxsize, name, payload_key))
+
+ signed_apex = apex_utils.SignApex(
+ data,
+ payload_key,
+ container_key,
+ key_passwords[container_key],
+ codename_to_api_level_map,
+ OPTIONS.avb_extra_args.get('apex'))
+ common.ZipWrite(output_tf_zip, signed_apex, filename)
+
+ else:
+ print(
+ "NOT signing: %s\n"
+ " (skipped due to special cert string)" % (name,))
+ common.ZipWriteStr(output_tf_zip, out_info, data)
+
+ # AVB public keys for the installed APEXes, which will be updated later.
+ elif (os.path.dirname(filename) == 'SYSTEM/etc/security/apex' and
+ filename != 'SYSTEM/etc/security/apex/'):
+ continue
+
# System properties.
- elif info.filename in ("SYSTEM/build.prop",
- "VENDOR/build.prop",
- "SYSTEM/etc/prop.default",
- "BOOT/RAMDISK/prop.default",
- "BOOT/RAMDISK/default.prop", # legacy
- "ROOT/default.prop", # legacy
- "RECOVERY/RAMDISK/prop.default",
- "RECOVERY/RAMDISK/default.prop"): # legacy
- print("Rewriting %s:" % (info.filename,))
+ elif filename in ("SYSTEM/build.prop",
+ "VENDOR/build.prop",
+ "SYSTEM/vendor/build.prop",
+ "ODM/build.prop", # legacy
+ "ODM/etc/build.prop",
+ "VENDOR/odm/build.prop", # legacy
+ "VENDOR/odm/etc/build.prop",
+ "PRODUCT/build.prop",
+ "SYSTEM/product/build.prop",
+ "PRODUCT_SERVICES/build.prop",
+ "SYSTEM/product_services/build.prop",
+ "SYSTEM/etc/prop.default",
+ "BOOT/RAMDISK/prop.default",
+ "BOOT/RAMDISK/default.prop", # legacy
+ "ROOT/default.prop", # legacy
+ "RECOVERY/RAMDISK/prop.default",
+ "RECOVERY/RAMDISK/default.prop"): # legacy
+ print("Rewriting %s:" % (filename,))
if stat.S_ISLNK(info.external_attr >> 16):
new_data = data
else:
@@ -293,46 +494,65 @@ def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
# Replace the certs in *mac_permissions.xml (there could be multiple, such
# as {system,vendor}/etc/selinux/{plat,nonplat}_mac_permissions.xml).
- elif info.filename.endswith("mac_permissions.xml"):
- print("Rewriting %s with new keys." % (info.filename,))
+ elif filename.endswith("mac_permissions.xml"):
+ print("Rewriting %s with new keys." % (filename,))
new_data = ReplaceCerts(data)
common.ZipWriteStr(output_tf_zip, out_info, new_data)
# Ask add_img_to_target_files to rebuild the recovery patch if needed.
- elif info.filename in ("SYSTEM/recovery-from-boot.p",
- "SYSTEM/etc/recovery.img",
- "SYSTEM/bin/install-recovery.sh"):
+ elif filename in ("SYSTEM/recovery-from-boot.p",
+ "SYSTEM/etc/recovery.img",
+ "SYSTEM/bin/install-recovery.sh"):
OPTIONS.rebuild_recovery = True
- # Don't copy OTA keys if we're replacing them.
- elif (OPTIONS.replace_ota_keys and
- info.filename in (
- "BOOT/RAMDISK/res/keys",
- "BOOT/RAMDISK/etc/update_engine/update-payload-key.pub.pem",
- "RECOVERY/RAMDISK/res/keys",
- "SYSTEM/etc/security/otacerts.zip",
- "SYSTEM/etc/update_engine/update-payload-key.pub.pem")):
+ # Don't copy OTA certs if we're replacing them.
+ elif (
+ OPTIONS.replace_ota_keys and
+ filename in (
+ "BOOT/RAMDISK/system/etc/security/otacerts.zip",
+ "BOOT/RAMDISK/system/etc/update_engine/update-payload-key.pub.pem",
+ "RECOVERY/RAMDISK/system/etc/security/otacerts.zip",
+ "SYSTEM/etc/security/otacerts.zip",
+ "SYSTEM/etc/update_engine/update-payload-key.pub.pem")):
pass
# Skip META/misc_info.txt since we will write back the new values later.
- elif info.filename == "META/misc_info.txt":
+ elif filename == "META/misc_info.txt":
pass
# Skip verity public key if we will replace it.
elif (OPTIONS.replace_verity_public_key and
- info.filename in ("BOOT/RAMDISK/verity_key",
- "ROOT/verity_key")):
+ filename in ("BOOT/RAMDISK/verity_key",
+ "ROOT/verity_key")):
pass
# Skip verity keyid (for system_root_image use) if we will replace it.
- elif (OPTIONS.replace_verity_keyid and
- info.filename == "BOOT/cmdline"):
+ elif OPTIONS.replace_verity_keyid and filename == "BOOT/cmdline":
pass
# Skip the care_map as we will regenerate the system/vendor images.
- elif info.filename == "META/care_map.txt":
+ elif filename == "META/care_map.pb" or filename == "META/care_map.txt":
pass
+ # Updates system_other.avbpubkey in /product/etc/.
+ elif filename in (
+ "PRODUCT/etc/security/avb/system_other.avbpubkey",
+ "SYSTEM/product/etc/security/avb/system_other.avbpubkey"):
+ # Only update system_other's public key, if the corresponding signing
+ # key is specified via --avb_system_other_key.
+ signing_key = OPTIONS.avb_keys.get("system_other")
+ if signing_key:
+ public_key = common.ExtractAvbPublicKey(signing_key)
+ print(" Rewriting AVB public key of system_other in /product")
+ common.ZipWrite(output_tf_zip, public_key, filename)
+
+ # Should NOT sign boot-debug.img.
+ elif filename in (
+ "BOOT/RAMDISK/force_debuggable",
+ "RECOVERY/RAMDISK/force_debuggable"
+ "RECOVERY/RAMDISK/first_stage_ramdisk/force_debuggable"):
+ raise common.ExternalError("debuggable boot.img cannot be signed")
+
# A non-APK file; copy it verbatim.
else:
common.ZipWriteStr(output_tf_zip, out_info, data)
@@ -453,8 +673,8 @@ def RewriteProps(data):
original_line = line
if line and line[0] != '#' and "=" in line:
key, value = line.split("=", 1)
- if key in ("ro.build.fingerprint", "ro.build.thumbprint",
- "ro.vendor.build.fingerprint", "ro.vendor.build.thumbprint"):
+ if (key.startswith("ro.") and
+ key.endswith((".build.fingerprint", ".build.thumbprint"))):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
@@ -467,7 +687,7 @@ def RewriteProps(data):
assert len(pieces) == 5
pieces[-1] = EditTags(pieces[-1])
value = " ".join(pieces)
- elif key == "ro.build.tags":
+ elif key.startswith("ro.") and key.endswith(".build.tags"):
value = EditTags(value)
elif key == "ro.build.display.id":
# change, eg, "JWR66N dev-keys" to "JWR66N"
@@ -483,6 +703,27 @@ def RewriteProps(data):
return "\n".join(output) + "\n"
+def WriteOtacerts(output_zip, filename, keys):
+ """Constructs a zipfile from given keys; and writes it to output_zip.
+
+ Args:
+ output_zip: The output target_files zip.
+ filename: The archive name in the output zip.
+ keys: A list of public keys to use during OTA package verification.
+ """
+
+ try:
+ from StringIO import StringIO
+ except ImportError:
+ from io import StringIO
+ temp_file = StringIO()
+ certs_zip = zipfile.ZipFile(temp_file, "w")
+ for k in keys:
+ common.ZipWrite(certs_zip, k)
+ common.ZipClose(certs_zip)
+ common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
+
+
def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
try:
keylist = input_tf_zip.read("META/otakeys.txt").split()
@@ -520,40 +761,20 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
print("META/otakeys.txt has no keys; using %s for OTA package"
" verification." % (mapped_keys[0],))
- # recovery uses a version of the key that has been slightly
- # predigested (by DumpPublicKey.java) and put in res/keys.
+ # recovery now uses the same x509.pem version of the keys.
# extra_recovery_keys are used only in recovery.
- cmd = ([OPTIONS.java_path] + OPTIONS.java_args +
- ["-jar",
- os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")] +
- mapped_keys + extra_recovery_keys)
- p = common.Run(cmd, stdout=subprocess.PIPE)
- new_recovery_keys, _ = p.communicate()
- if p.returncode != 0:
- raise common.ExternalError("failed to run dumpkeys")
-
- # system_root_image puts the recovery keys at BOOT/RAMDISK.
- if misc_info.get("system_root_image") == "true":
- recovery_keys_location = "BOOT/RAMDISK/res/keys"
+ if misc_info.get("recovery_as_boot") == "true":
+ recovery_keys_location = "BOOT/RAMDISK/system/etc/security/otacerts.zip"
else:
- recovery_keys_location = "RECOVERY/RAMDISK/res/keys"
- common.ZipWriteStr(output_tf_zip, recovery_keys_location, new_recovery_keys)
+ recovery_keys_location = "RECOVERY/RAMDISK/system/etc/security/otacerts.zip"
+
+ WriteOtacerts(output_tf_zip, recovery_keys_location,
+ mapped_keys + extra_recovery_keys)
# SystemUpdateActivity uses the x509.pem version of the keys, but
# put into a zipfile system/etc/security/otacerts.zip.
# We DO NOT include the extra_recovery_keys (if any) here.
-
- try:
- from StringIO import StringIO
- except ImportError:
- from io import StringIO
- temp_file = StringIO()
- certs_zip = zipfile.ZipFile(temp_file, "w")
- for k in mapped_keys:
- common.ZipWrite(certs_zip, k)
- common.ZipClose(certs_zip)
- common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
- temp_file.getvalue())
+ WriteOtacerts(output_tf_zip, "SYSTEM/etc/security/otacerts.zip", mapped_keys)
# For A/B devices, update the payload verification key.
if misc_info.get("ab_update") == "true":
@@ -571,11 +792,9 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
pubkey)
common.ZipWriteStr(
output_tf_zip,
- "BOOT/RAMDISK/etc/update_engine/update-payload-key.pub.pem",
+ "BOOT/RAMDISK/system/etc/update_engine/update-payload-key.pub.pem",
pubkey)
- return new_recovery_keys
-
def ReplaceVerityPublicKey(output_zip, filename, key_path):
"""Replaces the verity public key at the given path in the given zip.
@@ -658,8 +877,11 @@ def ReplaceAvbSigningKeys(misc_info):
'dtbo' : 'avb_dtbo_add_hash_footer_args',
'recovery' : 'avb_recovery_add_hash_footer_args',
'system' : 'avb_system_add_hashtree_footer_args',
+ 'system_other' : 'avb_system_other_add_hashtree_footer_args',
'vendor' : 'avb_vendor_add_hashtree_footer_args',
'vbmeta' : 'avb_vbmeta_args',
+ 'vbmeta_system' : 'avb_vbmeta_system_args',
+ 'vbmeta_vendor' : 'avb_vbmeta_vendor_args',
}
def ReplaceAvbPartitionSigningKey(partition):
@@ -748,11 +970,70 @@ def GetCodenameToApiLevelMap(input_tf_zip):
result = dict()
for codename in codenames:
codename = codename.strip()
- if len(codename) > 0:
+ if codename:
result[codename] = api_level
return result
+def ReadApexKeysInfo(tf_zip):
+ """Parses the APEX keys info from a given target-files zip.
+
+ Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns a
+ dict that contains the mapping from APEX names (e.g. com.android.tzdata) to a
+ tuple of (payload_key, container_key).
+
+ Args:
+ tf_zip: The input target_files ZipFile (already open).
+
+ Returns:
+ (payload_key, container_key): payload_key contains the path to the payload
+ signing key; container_key contains the path to the container signing
+ key.
+ """
+ keys = {}
+ for line in tf_zip.read("META/apexkeys.txt").split("\n"):
+ line = line.strip()
+ if not line:
+ continue
+ matches = re.match(
+ r'^name="(?P<NAME>.*)"\s+'
+ r'public_key="(?P<PAYLOAD_PUBLIC_KEY>.*)"\s+'
+ r'private_key="(?P<PAYLOAD_PRIVATE_KEY>.*)"\s+'
+ r'container_certificate="(?P<CONTAINER_CERT>.*)"\s+'
+ r'container_private_key="(?P<CONTAINER_PRIVATE_KEY>.*)"$',
+ line)
+ if not matches:
+ continue
+
+ name = matches.group('NAME')
+ payload_private_key = matches.group("PAYLOAD_PRIVATE_KEY")
+
+ def CompareKeys(pubkey, pubkey_suffix, privkey, privkey_suffix):
+ pubkey_suffix_len = len(pubkey_suffix)
+ privkey_suffix_len = len(privkey_suffix)
+ return (pubkey.endswith(pubkey_suffix) and
+ privkey.endswith(privkey_suffix) and
+ pubkey[:-pubkey_suffix_len] == privkey[:-privkey_suffix_len])
+
+ # Sanity check on the container key names, as we'll carry them without the
+ # extensions. This doesn't apply to payload keys though, which we will use
+ # full names only.
+ container_cert = matches.group("CONTAINER_CERT")
+ container_private_key = matches.group("CONTAINER_PRIVATE_KEY")
+ if container_cert == 'PRESIGNED' and container_private_key == 'PRESIGNED':
+ container_key = 'PRESIGNED'
+ elif CompareKeys(
+ container_cert, OPTIONS.public_key_suffix,
+ container_private_key, OPTIONS.private_key_suffix):
+ container_key = container_cert[:-len(OPTIONS.public_key_suffix)]
+ else:
+ raise ValueError("Failed to parse container keys: \n{}".format(line))
+
+ keys[name] = (payload_private_key, container_key)
+
+ return keys
+
+
def main(argv):
key_mapping_options = []
@@ -763,6 +1044,15 @@ def main(argv):
names = names.split(",")
for n in names:
OPTIONS.extra_apks[n] = key
+ elif o == "--extra_apex_payload_key":
+ apex_name, key = a.split("=")
+ OPTIONS.extra_apex_payload_keys[apex_name] = key
+ elif o == "--skip_apks_with_path_prefix":
+ # Sanity check the prefix, which must be in all upper case.
+ prefix = a.split('/')[0]
+ if not prefix or prefix != prefix.upper():
+ raise ValueError("Invalid path prefix '%s'" % (a,))
+ OPTIONS.skip_apks_with_path_prefix.add(a)
elif o in ("-d", "--default_key_mappings"):
key_mapping_options.append((None, a))
elif o in ("-k", "--key_mapping"):
@@ -807,12 +1097,32 @@ def main(argv):
OPTIONS.avb_algorithms['system'] = a
elif o == "--avb_system_extra_args":
OPTIONS.avb_extra_args['system'] = a
+ elif o == "--avb_system_other_key":
+ OPTIONS.avb_keys['system_other'] = a
+ elif o == "--avb_system_other_algorithm":
+ OPTIONS.avb_algorithms['system_other'] = a
+ elif o == "--avb_system_other_extra_args":
+ OPTIONS.avb_extra_args['system_other'] = a
elif o == "--avb_vendor_key":
OPTIONS.avb_keys['vendor'] = a
elif o == "--avb_vendor_algorithm":
OPTIONS.avb_algorithms['vendor'] = a
elif o == "--avb_vendor_extra_args":
OPTIONS.avb_extra_args['vendor'] = a
+ elif o == "--avb_vbmeta_system_key":
+ OPTIONS.avb_keys['vbmeta_system'] = a
+ elif o == "--avb_vbmeta_system_algorithm":
+ OPTIONS.avb_algorithms['vbmeta_system'] = a
+ elif o == "--avb_vbmeta_system_extra_args":
+ OPTIONS.avb_extra_args['vbmeta_system'] = a
+ elif o == "--avb_vbmeta_vendor_key":
+ OPTIONS.avb_keys['vbmeta_vendor'] = a
+ elif o == "--avb_vbmeta_vendor_algorithm":
+ OPTIONS.avb_algorithms['vbmeta_vendor'] = a
+ elif o == "--avb_vbmeta_vendor_extra_args":
+ OPTIONS.avb_extra_args['vbmeta_vendor'] = a
+ elif o == "--avb_apex_extra_args":
+ OPTIONS.avb_extra_args['apex'] = a
else:
return False
return True
@@ -822,6 +1132,8 @@ def main(argv):
extra_opts="e:d:k:ot:",
extra_long_opts=[
"extra_apks=",
+ "extra_apex_payload_key=",
+ "skip_apks_with_path_prefix=",
"default_key_mappings=",
"key_mapping=",
"replace_ota_keys",
@@ -829,6 +1141,7 @@ def main(argv):
"replace_verity_public_key=",
"replace_verity_private_key=",
"replace_verity_keyid=",
+ "avb_apex_extra_args=",
"avb_vbmeta_algorithm=",
"avb_vbmeta_key=",
"avb_vbmeta_extra_args=",
@@ -841,9 +1154,18 @@ def main(argv):
"avb_system_algorithm=",
"avb_system_key=",
"avb_system_extra_args=",
+ "avb_system_other_algorithm=",
+ "avb_system_other_key=",
+ "avb_system_other_extra_args=",
"avb_vendor_algorithm=",
"avb_vendor_key=",
"avb_vendor_extra_args=",
+ "avb_vbmeta_system_algorithm=",
+ "avb_vbmeta_system_key=",
+ "avb_vbmeta_system_extra_args=",
+ "avb_vbmeta_vendor_algorithm=",
+ "avb_vbmeta_vendor_key=",
+ "avb_vbmeta_vendor_extra_args=",
],
extra_option_handler=option_handler)
@@ -851,6 +1173,8 @@ def main(argv):
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
input_zip = zipfile.ZipFile(args[0], "r")
output_zip = zipfile.ZipFile(args[1], "w",
compression=zipfile.ZIP_DEFLATED,
@@ -860,18 +1184,26 @@ def main(argv):
BuildKeyMap(misc_info, key_mapping_options)
- certmap, compressed_extension = common.ReadApkCerts(input_zip)
- apk_key_map = GetApkCerts(certmap)
- CheckAllApksSigned(input_zip, apk_key_map, compressed_extension)
+ apk_keys_info, compressed_extension = common.ReadApkCerts(input_zip)
+ apk_keys = GetApkCerts(apk_keys_info)
+
+ apex_keys_info = ReadApexKeysInfo(input_zip)
+ apex_keys = GetApexKeys(apex_keys_info, apk_keys)
+
+ CheckApkAndApexKeysAvailable(
+ input_zip,
+ set(apk_keys.keys()) | set(apex_keys.keys()),
+ compressed_extension,
+ apex_keys)
- key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
+ key_passwords = common.GetKeyPasswords(
+ set(apk_keys.values()) | set(itertools.chain(*apex_keys.values())))
platform_api_level, _ = GetApiLevelAndCodename(input_zip)
codename_to_api_level_map = GetCodenameToApiLevelMap(input_zip)
ProcessTargetFiles(input_zip, output_zip, misc_info,
- apk_key_map, key_passwords,
- platform_api_level,
- codename_to_api_level_map,
+ apk_keys, apex_keys, key_passwords,
+ platform_api_level, codename_to_api_level_map,
compressed_extension)
common.ZipClose(input_zip)
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 083da7a490..33678966e8 100644..100755
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+#
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +14,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from __future__ import print_function
+
+import argparse
import bisect
+import logging
import os
import struct
import threading
@@ -20,6 +26,8 @@ from hashlib import sha1
import rangelib
+logger = logging.getLogger(__name__)
+
class SparseImage(object):
"""Wraps a sparse image file into an image object.
@@ -33,7 +41,8 @@ class SparseImage(object):
"""
def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None,
- mode="rb", build_map=True, allow_shared_blocks=False):
+ mode="rb", build_map=True, allow_shared_blocks=False,
+ hashtree_info_generator=None):
self.simg_f = f = open(simg_fn, mode)
header_bin = f.read(28)
@@ -60,10 +69,13 @@ class SparseImage(object):
raise ValueError("Chunk header size was expected to be 12, but is %u." %
(chunk_hdr_sz,))
- print("Total of %u %u-byte output blocks in %u input chunks."
- % (total_blks, blk_sz, total_chunks))
+ logger.info(
+ "Total of %u %u-byte output blocks in %u input chunks.", total_blks,
+ blk_sz, total_chunks)
if not build_map:
+ assert not hashtree_info_generator, \
+ "Cannot generate the hashtree info without building the offset map."
return
pos = 0 # in blocks
@@ -102,8 +114,18 @@ class SparseImage(object):
if data_sz != 0:
raise ValueError("Don't care chunk input size is non-zero (%u)" %
(data_sz))
- else:
- pos += chunk_sz
+ # Fills the don't care data ranges with zeros.
+ # TODO(xunchang) pass the care_map to hashtree info generator.
+ if hashtree_info_generator:
+ fill_data = '\x00' * 4
+ # In order to compute verity hashtree on device, we need to write
+ # zeros explicitly to the don't care ranges. Because these ranges may
+ # contain non-zero data from the previous build.
+ care_data.append(pos)
+ care_data.append(pos + chunk_sz)
+ offset_map.append((pos, chunk_sz, None, fill_data))
+
+ pos += chunk_sz
elif chunk_type == 0xCAC4:
raise ValueError("CRC32 chunks are not supported")
@@ -128,6 +150,10 @@ class SparseImage(object):
extended = extended.intersect(all_blocks).subtract(self.care_map)
self.extended = extended
+ self.hashtree_info = None
+ if hashtree_info_generator:
+ self.hashtree_info = hashtree_info_generator.Generate(self)
+
if file_map_fn:
self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks,
allow_shared_blocks)
@@ -227,15 +253,21 @@ class SparseImage(object):
ranges = rangelib.RangeSet.parse(ranges)
if allow_shared_blocks:
- # Find the shared blocks that have been claimed by others.
+ # Find the shared blocks that have been claimed by others. If so, tag
+ # the entry so that we can skip applying imgdiff on this file.
shared_blocks = ranges.subtract(remaining)
if shared_blocks:
- ranges = ranges.subtract(shared_blocks)
- if not ranges:
+ non_shared = ranges.subtract(shared_blocks)
+ if not non_shared:
continue
- # Tag the entry so that we can skip applying imgdiff on this file.
- ranges.extra['uses_shared_blocks'] = True
+ # There shouldn't anything in the extra dict yet.
+ assert not ranges.extra, "Non-empty RangeSet.extra"
+
+ # Put the non-shared RangeSet as the value in the block map, which
+ # has a copy of the original RangeSet.
+ non_shared.extra['uses_shared_blocks'] = ranges
+ ranges = non_shared
out[fn] = ranges
assert ranges.size() == ranges.intersect(remaining).size()
@@ -246,6 +278,8 @@ class SparseImage(object):
remaining = remaining.subtract(ranges)
remaining = remaining.subtract(clobbered_blocks)
+ if self.hashtree_info:
+ remaining = remaining.subtract(self.hashtree_info.hashtree_range)
# For all the remaining blocks in the care_map (ie, those that
# aren't part of the data for any file nor part of the clobbered_blocks),
@@ -308,8 +342,28 @@ class SparseImage(object):
out["__NONZERO-%d" % i] = rangelib.RangeSet(data=blocks)
if clobbered_blocks:
out["__COPY"] = clobbered_blocks
+ if self.hashtree_info:
+ out["__HASHTREE"] = self.hashtree_info.hashtree_range
def ResetFileMap(self):
"""Throw away the file map and treat the entire image as
undifferentiated data."""
self.file_map = {"__DATA": self.care_map}
+
+
+def GetImagePartitionSize(img):
+ try:
+ simg = SparseImage(img, build_map=False)
+ return simg.blocksize * simg.total_blocks
+ except ValueError:
+ return os.path.getsize(img)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('image')
+ parser.add_argument('--get_partition_size', action='store_true',
+ help='Return partition size of the image')
+ args = parser.parse_args()
+ if args.get_partition_size:
+ print(GetImagePartitionSize(args.image))
diff --git a/tools/releasetools/target_files_diff.py b/tools/releasetools/target_files_diff.py
index 7415f276f4..4402c8d20e 100755
--- a/tools/releasetools/target_files_diff.py
+++ b/tools/releasetools/target_files_diff.py
@@ -71,7 +71,6 @@ def rewrite_build_property(original, new):
'ro.build.user=',
'ro.build.description=',
'ro.build.fingerprint=',
- 'ro.expect.recovery_id=',
'ro.vendor.build.date=',
'ro.vendor.build.date.utc=',
'ro.vendor.build.fingerprint=']
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
index 9a0f78ed15..482f86c7cd 100644
--- a/tools/releasetools/test_add_img_to_target_files.py
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -16,27 +16,35 @@
import os
import os.path
-import unittest
import zipfile
import common
import test_utils
from add_img_to_target_files import (
- AddCareMapTxtForAbOta, AddPackRadioImages, AddRadioImagesForAbOta,
- GetCareMap)
+ AddCareMapForAbOta, AddPackRadioImages, AppendVBMetaArgsForPartition,
+ CheckAbOtaImages, GetCareMap)
from rangelib import RangeSet
OPTIONS = common.OPTIONS
-class AddImagesToTargetFilesTest(unittest.TestCase):
+class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase):
def setUp(self):
OPTIONS.input_tmp = common.MakeTempDir()
- def tearDown(self):
- common.Cleanup()
+ def _verifyCareMap(self, expected, file_name):
+ """Parses the care_map.pb; and checks the content in plain text."""
+ text_file = common.MakeTempFile(prefix="caremap-", suffix=".txt")
+
+ # Calls an external binary to convert the proto message.
+ cmd = ["care_map_generator", "--parse_proto", file_name, text_file]
+ common.RunAndCheckOutput(cmd)
+
+ with open(text_file, 'r') as verify_fp:
+ plain_text = verify_fp.read()
+ self.assertEqual('\n'.join(expected), plain_text)
@staticmethod
def _create_images(images, prefix):
@@ -55,73 +63,25 @@ class AddImagesToTargetFilesTest(unittest.TestCase):
os.mkdir(images_path)
return images, images_path
- def test_AddRadioImagesForAbOta_imageExists(self):
+ def test_CheckAbOtaImages_imageExistsUnderImages(self):
"""Tests the case with existing images under IMAGES/."""
- images, images_path = self._create_images(['aboot', 'xbl'], 'IMAGES')
- AddRadioImagesForAbOta(None, images)
-
- for image in images:
- self.assertTrue(
- os.path.exists(os.path.join(images_path, image + '.img')))
-
- def test_AddRadioImagesForAbOta_copyFromRadio(self):
- """Tests the case that copies images from RADIO/."""
- images, images_path = self._create_images(['aboot', 'xbl'], 'RADIO')
- AddRadioImagesForAbOta(None, images)
-
- for image in images:
- self.assertTrue(
- os.path.exists(os.path.join(images_path, image + '.img')))
-
- def test_AddRadioImagesForAbOta_copyFromRadio_zipOutput(self):
+ images, _ = self._create_images(['aboot', 'xbl'], 'IMAGES')
+ CheckAbOtaImages(None, images)
+
+ def test_CheckAbOtaImages_imageExistsUnderRadio(self):
+ """Tests the case with some image under RADIO/."""
+ images, _ = self._create_images(['system', 'vendor'], 'IMAGES')
+ radio_path = os.path.join(OPTIONS.input_tmp, 'RADIO')
+ if not os.path.exists(radio_path):
+ os.mkdir(radio_path)
+ with open(os.path.join(radio_path, 'modem.img'), 'wb') as image_fp:
+ image_fp.write('modem'.encode())
+ CheckAbOtaImages(None, images + ['modem'])
+
+ def test_CheckAbOtaImages_missingImages(self):
images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
-
- # Set up the output zip.
- output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
- AddRadioImagesForAbOta(output_zip, images)
-
- with zipfile.ZipFile(output_file, 'r') as verify_zip:
- for image in images:
- self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
-
- def test_AddRadioImagesForAbOta_copyFromVendorImages(self):
- """Tests the case that copies images from VENDOR_IMAGES/."""
- vendor_images_path = os.path.join(OPTIONS.input_tmp, 'VENDOR_IMAGES')
- os.mkdir(vendor_images_path)
-
- partitions = ['aboot', 'xbl']
- for index, partition in enumerate(partitions):
- subdir = os.path.join(vendor_images_path, 'subdir-{}'.format(index))
- os.mkdir(subdir)
-
- partition_image_path = os.path.join(subdir, partition + '.img')
- with open(partition_image_path, 'wb') as partition_fp:
- partition_fp.write(partition.encode())
-
- # Set up the output dir.
- images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
- os.mkdir(images_path)
-
- AddRadioImagesForAbOta(None, partitions)
-
- for partition in partitions:
- self.assertTrue(
- os.path.exists(os.path.join(images_path, partition + '.img')))
-
- def test_AddRadioImagesForAbOta_missingImages(self):
- images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
- self.assertRaises(AssertionError, AddRadioImagesForAbOta, None,
- images + ['baz'])
-
- def test_AddRadioImagesForAbOta_missingImages_zipOutput(self):
- images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
-
- # Set up the output zip.
- output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
- self.assertRaises(AssertionError, AddRadioImagesForAbOta, output_zip,
- images + ['baz'])
+ self.assertRaises(
+ AssertionError, CheckAbOtaImages, None, images + ['baz'])
def test_AddPackRadioImages(self):
images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
@@ -172,11 +132,18 @@ class AddImagesToTargetFilesTest(unittest.TestCase):
images + ['baz'])
@staticmethod
- def _test_AddCareMapTxtForAbOta():
- """Helper function to set up the test for test_AddCareMapTxtForAbOta()."""
+ def _test_AddCareMapForAbOta():
+ """Helper function to set up the test for test_AddCareMapForAbOta()."""
OPTIONS.info_dict = {
- 'system_verity_block_device' : '/dev/block/system',
- 'vendor_verity_block_device' : '/dev/block/vendor',
+ 'system_verity_block_device': '/dev/block/system',
+ 'vendor_verity_block_device': '/dev/block/vendor',
+ 'system.build.prop': {
+ 'ro.system.build.fingerprint':
+ 'google/sailfish/12345:user/dev-keys',
+ },
+ 'vendor.build.prop': {
+ 'ro.vendor.build.fingerprint': 'google/sailfish/678:user/dev-keys',
+ }
}
# Prepare the META/ folder.
@@ -197,121 +164,197 @@ class AddImagesToTargetFilesTest(unittest.TestCase):
}
return image_paths
- def test_AddCareMapTxtForAbOta(self):
- image_paths = self._test_AddCareMapTxtForAbOta()
+ def test_AddCareMapForAbOta(self):
+ image_paths = self._test_AddCareMapForAbOta()
- AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+ AddCareMapForAbOta(None, ['system', 'vendor'], image_paths)
- care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
- with open(care_map_file, 'r') as verify_fp:
- care_map = verify_fp.read()
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
+ expected = ['system', RangeSet("0-5 10-15").to_string_raw(),
+ "ro.system.build.fingerprint",
+ "google/sailfish/12345:user/dev-keys",
+ 'vendor', RangeSet("0-9").to_string_raw(),
+ "ro.vendor.build.fingerprint",
+ "google/sailfish/678:user/dev-keys"]
- lines = care_map.split('\n')
- self.assertEqual(4, len(lines))
- self.assertEqual('system', lines[0])
- self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
- self.assertEqual('vendor', lines[2])
- self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+ self._verifyCareMap(expected, care_map_file)
- def test_AddCareMapTxtForAbOta_withNonCareMapPartitions(self):
+ def test_AddCareMapForAbOta_withNonCareMapPartitions(self):
"""Partitions without care_map should be ignored."""
- image_paths = self._test_AddCareMapTxtForAbOta()
+ image_paths = self._test_AddCareMapForAbOta()
- AddCareMapTxtForAbOta(
+ AddCareMapForAbOta(
None, ['boot', 'system', 'vendor', 'vbmeta'], image_paths)
- care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
- with open(care_map_file, 'r') as verify_fp:
- care_map = verify_fp.read()
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
+ expected = ['system', RangeSet("0-5 10-15").to_string_raw(),
+ "ro.system.build.fingerprint",
+ "google/sailfish/12345:user/dev-keys",
+ 'vendor', RangeSet("0-9").to_string_raw(),
+ "ro.vendor.build.fingerprint",
+ "google/sailfish/678:user/dev-keys"]
- lines = care_map.split('\n')
- self.assertEqual(4, len(lines))
- self.assertEqual('system', lines[0])
- self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
- self.assertEqual('vendor', lines[2])
- self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+ self._verifyCareMap(expected, care_map_file)
- def test_AddCareMapTxtForAbOta_withAvb(self):
+ def test_AddCareMapForAbOta_withAvb(self):
"""Tests the case for device using AVB."""
- image_paths = self._test_AddCareMapTxtForAbOta()
+ image_paths = self._test_AddCareMapForAbOta()
OPTIONS.info_dict = {
'avb_system_hashtree_enable' : 'true',
'avb_vendor_hashtree_enable' : 'true',
+ 'system.build.prop': {
+ 'ro.system.build.fingerprint':
+ 'google/sailfish/12345:user/dev-keys',
+ },
+ 'vendor.build.prop': {
+ 'ro.vendor.build.fingerprint': 'google/sailfish/678:user/dev-keys',
+ }
}
- AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+ AddCareMapForAbOta(None, ['system', 'vendor'], image_paths)
+
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
+ expected = ['system', RangeSet("0-5 10-15").to_string_raw(),
+ "ro.system.build.fingerprint",
+ "google/sailfish/12345:user/dev-keys",
+ 'vendor', RangeSet("0-9").to_string_raw(),
+ "ro.vendor.build.fingerprint",
+ "google/sailfish/678:user/dev-keys"]
+
+ self._verifyCareMap(expected, care_map_file)
+
+ def test_AddCareMapForAbOta_noFingerprint(self):
+ """Tests the case for partitions without fingerprint."""
+ image_paths = self._test_AddCareMapForAbOta()
+ OPTIONS.info_dict = {
+ 'system_verity_block_device': '/dev/block/system',
+ 'vendor_verity_block_device': '/dev/block/vendor',
+ }
- care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
- with open(care_map_file, 'r') as verify_fp:
- care_map = verify_fp.read()
+ AddCareMapForAbOta(None, ['system', 'vendor'], image_paths)
- lines = care_map.split('\n')
- self.assertEqual(4, len(lines))
- self.assertEqual('system', lines[0])
- self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
- self.assertEqual('vendor', lines[2])
- self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
+ expected = ['system', RangeSet("0-5 10-15").to_string_raw(), "unknown",
+ "unknown", 'vendor', RangeSet("0-9").to_string_raw(), "unknown",
+ "unknown"]
- def test_AddCareMapTxtForAbOta_verityNotEnabled(self):
- """No care_map.txt should be generated if verity not enabled."""
- image_paths = self._test_AddCareMapTxtForAbOta()
+ self._verifyCareMap(expected, care_map_file)
+
+ def test_AddCareMapForAbOta_withThumbprint(self):
+ """Tests the case for partitions with thumbprint."""
+ image_paths = self._test_AddCareMapForAbOta()
+ OPTIONS.info_dict = {
+ 'system_verity_block_device': '/dev/block/system',
+ 'vendor_verity_block_device': '/dev/block/vendor',
+ 'system.build.prop': {
+ 'ro.system.build.thumbprint': 'google/sailfish/123:user/dev-keys',
+ },
+ 'vendor.build.prop' : {
+ 'ro.vendor.build.thumbprint': 'google/sailfish/456:user/dev-keys',
+ }
+ }
+
+ AddCareMapForAbOta(None, ['system', 'vendor'], image_paths)
+
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
+ expected = ['system', RangeSet("0-5 10-15").to_string_raw(),
+ "ro.system.build.thumbprint",
+ "google/sailfish/123:user/dev-keys",
+ 'vendor', RangeSet("0-9").to_string_raw(),
+ "ro.vendor.build.thumbprint",
+ "google/sailfish/456:user/dev-keys"]
+
+ self._verifyCareMap(expected, care_map_file)
+
+ def test_AddCareMapForAbOta_verityNotEnabled(self):
+ """No care_map.pb should be generated if verity not enabled."""
+ image_paths = self._test_AddCareMapForAbOta()
OPTIONS.info_dict = {}
- AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+ AddCareMapForAbOta(None, ['system', 'vendor'], image_paths)
- care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
self.assertFalse(os.path.exists(care_map_file))
- def test_AddCareMapTxtForAbOta_missingImageFile(self):
+ def test_AddCareMapForAbOta_missingImageFile(self):
"""Missing image file should be considered fatal."""
- image_paths = self._test_AddCareMapTxtForAbOta()
+ image_paths = self._test_AddCareMapForAbOta()
image_paths['vendor'] = ''
- self.assertRaises(AssertionError, AddCareMapTxtForAbOta, None,
+ self.assertRaises(AssertionError, AddCareMapForAbOta, None,
['system', 'vendor'], image_paths)
- def test_AddCareMapTxtForAbOta_zipOutput(self):
+ def test_AddCareMapForAbOta_zipOutput(self):
"""Tests the case with ZIP output."""
- image_paths = self._test_AddCareMapTxtForAbOta()
+ image_paths = self._test_AddCareMapForAbOta()
output_file = common.MakeTempFile(suffix='.zip')
with zipfile.ZipFile(output_file, 'w') as output_zip:
- AddCareMapTxtForAbOta(output_zip, ['system', 'vendor'], image_paths)
+ AddCareMapForAbOta(output_zip, ['system', 'vendor'], image_paths)
+ care_map_name = "META/care_map.pb"
+ temp_dir = common.MakeTempDir()
with zipfile.ZipFile(output_file, 'r') as verify_zip:
- care_map = verify_zip.read('META/care_map.txt').decode('ascii')
-
- lines = care_map.split('\n')
- self.assertEqual(4, len(lines))
- self.assertEqual('system', lines[0])
- self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
- self.assertEqual('vendor', lines[2])
- self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
-
- def test_AddCareMapTxtForAbOta_zipOutput_careMapEntryExists(self):
+ self.assertTrue(care_map_name in verify_zip.namelist())
+ verify_zip.extract(care_map_name, path=temp_dir)
+
+ expected = ['system', RangeSet("0-5 10-15").to_string_raw(),
+ "ro.system.build.fingerprint",
+ "google/sailfish/12345:user/dev-keys",
+ 'vendor', RangeSet("0-9").to_string_raw(),
+ "ro.vendor.build.fingerprint",
+ "google/sailfish/678:user/dev-keys"]
+ self._verifyCareMap(expected, os.path.join(temp_dir, care_map_name))
+
+ def test_AddCareMapForAbOta_zipOutput_careMapEntryExists(self):
"""Tests the case with ZIP output which already has care_map entry."""
- image_paths = self._test_AddCareMapTxtForAbOta()
+ image_paths = self._test_AddCareMapForAbOta()
output_file = common.MakeTempFile(suffix='.zip')
with zipfile.ZipFile(output_file, 'w') as output_zip:
- # Create an existing META/care_map.txt entry.
- common.ZipWriteStr(output_zip, 'META/care_map.txt', 'dummy care_map.txt')
+ # Create an existing META/care_map.pb entry.
+ common.ZipWriteStr(output_zip, 'META/care_map.pb',
+ 'dummy care_map.pb')
- # Request to add META/care_map.txt again.
- AddCareMapTxtForAbOta(output_zip, ['system', 'vendor'], image_paths)
+ # Request to add META/care_map.pb again.
+ AddCareMapForAbOta(output_zip, ['system', 'vendor'], image_paths)
# The one under OPTIONS.input_tmp must have been replaced.
- care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
- with open(care_map_file, 'r') as verify_fp:
- care_map = verify_fp.read()
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
+ expected = ['system', RangeSet("0-5 10-15").to_string_raw(),
+ "ro.system.build.fingerprint",
+ "google/sailfish/12345:user/dev-keys",
+ 'vendor', RangeSet("0-9").to_string_raw(),
+ "ro.vendor.build.fingerprint",
+ "google/sailfish/678:user/dev-keys"]
- lines = care_map.split('\n')
- self.assertEqual(4, len(lines))
- self.assertEqual('system', lines[0])
- self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
- self.assertEqual('vendor', lines[2])
- self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+ self._verifyCareMap(expected, care_map_file)
# The existing entry should be scheduled to be replaced.
- self.assertIn('META/care_map.txt', OPTIONS.replace_updated_files_list)
+ self.assertIn('META/care_map.pb', OPTIONS.replace_updated_files_list)
+
+ def test_AppendVBMetaArgsForPartition(self):
+ OPTIONS.info_dict = {}
+ cmd = []
+ AppendVBMetaArgsForPartition(cmd, 'system', '/path/to/system.img')
+ self.assertEqual(
+ ['--include_descriptors_from_image', '/path/to/system.img'], cmd)
+
+ def test_AppendVBMetaArgsForPartition_vendorAsChainedPartition(self):
+ testdata_dir = test_utils.get_testdata_dir()
+ pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
+ OPTIONS.info_dict = {
+ 'avb_avbtool': 'avbtool',
+ 'avb_vendor_key_path': pubkey,
+ 'avb_vendor_rollback_index_location': 5,
+ }
+ cmd = []
+ AppendVBMetaArgsForPartition(cmd, 'vendor', '/path/to/vendor.img')
+ self.assertEqual(2, len(cmd))
+ self.assertEqual('--chain_partition', cmd[0])
+ chained_partition_args = cmd[1].split(':')
+ self.assertEqual(3, len(chained_partition_args))
+ self.assertEqual('vendor', chained_partition_args[0])
+ self.assertEqual('5', chained_partition_args[1])
+ self.assertTrue(os.path.exists(chained_partition_args[2]))
def test_GetCareMap(self):
sparse_image = test_utils.construct_sparse_image([
@@ -319,7 +362,7 @@ class AddImagesToTargetFilesTest(unittest.TestCase):
(0xCAC3, 4),
(0xCAC1, 6)])
OPTIONS.info_dict = {
- 'system_adjusted_partition_size' : 12,
+ 'system_image_size' : 53248,
}
name, care_map = GetCareMap('system', sparse_image)
self.assertEqual('system', name)
@@ -334,6 +377,6 @@ class AddImagesToTargetFilesTest(unittest.TestCase):
(0xCAC3, 4),
(0xCAC1, 6)])
OPTIONS.info_dict = {
- 'system_adjusted_partition_size' : -12,
+ 'system_image_size' : -45056,
}
self.assertRaises(AssertionError, GetCareMap, 'system', sparse_image)
diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py
new file mode 100644
index 0000000000..2f8ee49823
--- /dev/null
+++ b/tools/releasetools/test_apex_utils.py
@@ -0,0 +1,87 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import os.path
+
+import apex_utils
+import common
+import test_utils
+
+
+class ApexUtilsTest(test_utils.ReleaseToolsTestCase):
+
+ # echo "foo" | sha256sum
+ SALT = 'b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c'
+
+ def setUp(self):
+ self.testdata_dir = test_utils.get_testdata_dir()
+ # The default payload signing key.
+ self.payload_key = os.path.join(self.testdata_dir, 'testkey.key')
+
+ @staticmethod
+ def _GetTestPayload():
+ payload_file = common.MakeTempFile(prefix='apex-', suffix='.img')
+ with open(payload_file, 'wb') as payload_fp:
+ payload_fp.write(os.urandom(8192))
+ return payload_file
+
+ def test_ParseApexPayloadInfo(self):
+ payload_file = self._GetTestPayload()
+ apex_utils.SignApexPayload(
+ payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT)
+ payload_info = apex_utils.ParseApexPayloadInfo(payload_file)
+ self.assertEqual('SHA256_RSA2048', payload_info['Algorithm'])
+ self.assertEqual(self.SALT, payload_info['Salt'])
+ self.assertEqual('testkey', payload_info['apex.key'])
+
+ def test_SignApexPayload(self):
+ payload_file = self._GetTestPayload()
+ apex_utils.SignApexPayload(
+ payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT)
+ apex_utils.VerifyApexPayload(payload_file, self.payload_key)
+
+ def test_SignApexPayload_withSignerHelper(self):
+ payload_file = self._GetTestPayload()
+ payload_signer_args = '--signing_helper_with_files {}'.format(
+ os.path.join(self.testdata_dir, 'signing_helper.sh'))
+ apex_utils.SignApexPayload(
+ payload_file,
+ self.payload_key,
+ 'testkey', 'SHA256_RSA2048', self.SALT,
+ payload_signer_args)
+ apex_utils.VerifyApexPayload(payload_file, self.payload_key)
+
+ def test_SignApexPayload_invalidKey(self):
+ self.assertRaises(
+ apex_utils.ApexSigningError,
+ apex_utils.SignApexPayload,
+ self._GetTestPayload(),
+ os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+ 'testkey',
+ 'SHA256_RSA2048',
+ self.SALT)
+
+ def test_VerifyApexPayload_wrongKey(self):
+ payload_file = self._GetTestPayload()
+ apex_utils.SignApexPayload(
+ payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT)
+ apex_utils.VerifyApexPayload(payload_file, self.payload_key)
+ self.assertRaises(
+ apex_utils.ApexSigningError,
+ apex_utils.VerifyApexPayload,
+ payload_file,
+ os.path.join(self.testdata_dir, 'testkey_with_passwd.key'))
diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py
index ceada18ead..b6d47d4a0b 100644
--- a/tools/releasetools/test_blockimgdiff.py
+++ b/tools/releasetools/test_blockimgdiff.py
@@ -14,17 +14,18 @@
# limitations under the License.
#
-from __future__ import print_function
-
-import unittest
+import os
+from hashlib import sha1
import common
-from blockimgdiff import (BlockImageDiff, EmptyImage, HeapItem, ImgdiffStats,
- Transfer)
+from blockimgdiff import (
+ BlockImageDiff, DataImage, EmptyImage, FileImage, HeapItem, ImgdiffStats,
+ Transfer)
from rangelib import RangeSet
+from test_utils import ReleaseToolsTestCase
-class HealpItemTest(unittest.TestCase):
+class HealpItemTest(ReleaseToolsTestCase):
class Item(object):
def __init__(self, score):
@@ -54,7 +55,7 @@ class HealpItemTest(unittest.TestCase):
self.assertFalse(item)
-class BlockImageDiffTest(unittest.TestCase):
+class BlockImageDiffTest(ReleaseToolsTestCase):
def test_GenerateDigraphOrder(self):
"""Make sure GenerateDigraph preserves the order.
@@ -130,11 +131,11 @@ class BlockImageDiffTest(unittest.TestCase):
# Sufficient cache to stash 5 blocks (size * 0.8 >= 5).
common.OPTIONS.cache_size = 7 * 4096
- self.assertEqual(0, block_image_diff.ReviseStashSize())
+ self.assertEqual((0, 5), block_image_diff.ReviseStashSize())
# Insufficient cache to stash 5 blocks (size * 0.8 < 5).
common.OPTIONS.cache_size = 6 * 4096
- self.assertEqual(10, block_image_diff.ReviseStashSize())
+ self.assertEqual((10, 0), block_image_diff.ReviseStashSize())
def test_ReviseStashSize_bug_33687949(self):
"""ReviseStashSize() should "free" the used stash _after_ the command.
@@ -172,7 +173,7 @@ class BlockImageDiffTest(unittest.TestCase):
# Insufficient cache to stash 15 blocks (size * 0.8 < 15).
common.OPTIONS.cache_size = 15 * 4096
- self.assertEqual(15, block_image_diff.ReviseStashSize())
+ self.assertEqual((15, 5), block_image_diff.ReviseStashSize())
def test_FileTypeSupportedByImgdiff(self):
self.assertTrue(
@@ -203,8 +204,8 @@ class BlockImageDiffTest(unittest.TestCase):
self.assertDictEqual(
{
- ImgdiffStats.USED_IMGDIFF : {"/system/app/app1.apk"},
- ImgdiffStats.USED_IMGDIFF_LARGE_APK : {"/vendor/app/app2.apk"},
+ ImgdiffStats.USED_IMGDIFF: {"/system/app/app1.apk"},
+ ImgdiffStats.USED_IMGDIFF_LARGE_APK: {"/vendor/app/app2.apk"},
},
block_image_diff.imgdiff_stats.stats)
@@ -229,13 +230,6 @@ class BlockImageDiffTest(unittest.TestCase):
"/system/app/app2.apk", RangeSet("10-15"),
RangeSet("15-20 30 10-14")))
- # At least one of the ranges has been modified.
- src_ranges = RangeSet("0-5")
- src_ranges.extra['trimmed'] = True
- self.assertFalse(
- block_image_diff.CanUseImgdiff(
- "/vendor/app/app3.apk", RangeSet("10-15"), src_ranges))
-
# At least one of the ranges is incomplete.
src_ranges = RangeSet("0-5")
src_ranges.extra['incomplete'] = True
@@ -246,14 +240,13 @@ class BlockImageDiffTest(unittest.TestCase):
# The stats are correctly logged.
self.assertDictEqual(
{
- ImgdiffStats.SKIPPED_NONMONOTONIC : {'/system/app/app2.apk'},
- ImgdiffStats.SKIPPED_TRIMMED : {'/vendor/app/app3.apk'},
+ ImgdiffStats.SKIPPED_NONMONOTONIC: {'/system/app/app2.apk'},
ImgdiffStats.SKIPPED_INCOMPLETE: {'/vendor/app/app4.apk'},
},
block_image_diff.imgdiff_stats.stats)
-class ImgdiffStatsTest(unittest.TestCase):
+class ImgdiffStatsTest(ReleaseToolsTestCase):
def test_Log(self):
imgdiff_stats = ImgdiffStats()
@@ -272,3 +265,45 @@ class ImgdiffStatsTest(unittest.TestCase):
self.assertRaises(AssertionError, imgdiff_stats.Log, "/system/app/app1.apk",
"invalid reason")
+
+
+class DataImageTest(ReleaseToolsTestCase):
+ def test_read_range_set(self):
+ data = "file" + ('\0' * 4092)
+ image = DataImage(data)
+ self.assertEqual(data, "".join(image.ReadRangeSet(image.care_map)))
+
+
+class FileImageTest(ReleaseToolsTestCase):
+ def setUp(self):
+ self.file_path = common.MakeTempFile()
+ self.data = os.urandom(4096 * 4)
+ with open(self.file_path, 'w') as f:
+ f.write(self.data)
+ self.file = FileImage(self.file_path)
+
+ def test_totalsha1(self):
+ self.assertEqual(sha1(self.data).hexdigest(), self.file.TotalSha1())
+
+ def test_ranges(self):
+ blocksize = self.file.blocksize
+ for s in range(4):
+ for e in range(s, 4):
+ expected_data = self.data[s * blocksize : e * blocksize]
+
+ rs = RangeSet([s, e])
+ data = "".join(self.file.ReadRangeSet(rs))
+ self.assertEqual(expected_data, data)
+
+ sha1sum = self.file.RangeSha1(rs)
+ self.assertEqual(sha1(expected_data).hexdigest(), sha1sum)
+
+ tmpfile = common.MakeTempFile()
+ with open(tmpfile, 'w') as f:
+ self.file.WriteRangeDataToFd(rs, f)
+ with open(tmpfile, 'r') as f:
+ self.assertEqual(expected_data, f.read())
+
+ def test_read_all(self):
+ data = "".join(self.file.ReadRangeSet(self.file.care_map))
+ self.assertEqual(self.data, data)
diff --git a/tools/releasetools/test_build_image.py b/tools/releasetools/test_build_image.py
index 161faff8c8..1cebd0c095 100644
--- a/tools/releasetools/test_build_image.py
+++ b/tools/releasetools/test_build_image.py
@@ -14,13 +14,16 @@
# limitations under the License.
#
-import unittest
+import filecmp
+import os.path
import common
-from build_image import CheckHeadroom, RunCommand
+from build_image import (
+ BuildImageError, CheckHeadroom, GetFilesystemCharacteristics, SetUpInDirAndFsConfig)
+from test_utils import ReleaseToolsTestCase
-class BuildImageTest(unittest.TestCase):
+class BuildImageTest(ReleaseToolsTestCase):
# Available: 1000 blocks.
EXT4FS_OUTPUT = (
@@ -33,7 +36,7 @@ class BuildImageTest(unittest.TestCase):
'partition_headroom' : '4096000',
'mount_point' : 'system',
}
- self.assertTrue(CheckHeadroom(self.EXT4FS_OUTPUT, prop_dict))
+ CheckHeadroom(self.EXT4FS_OUTPUT, prop_dict)
def test_CheckHeadroom_InsufficientHeadroom(self):
# Required headroom: 1001 blocks.
@@ -42,7 +45,8 @@ class BuildImageTest(unittest.TestCase):
'partition_headroom' : '4100096',
'mount_point' : 'system',
}
- self.assertFalse(CheckHeadroom(self.EXT4FS_OUTPUT, prop_dict))
+ self.assertRaises(
+ BuildImageError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
def test_CheckHeadroom_WrongFsType(self):
prop_dict = {
@@ -72,23 +76,125 @@ class BuildImageTest(unittest.TestCase):
"""Tests the result parsing from actual call to mke2fs."""
input_dir = common.MakeTempDir()
output_image = common.MakeTempFile(suffix='.img')
- command = ['mkuserimg_mke2fs.sh', input_dir, output_image, 'ext4',
+ command = ['mkuserimg_mke2fs', input_dir, output_image, 'ext4',
'/system', '409600', '-j', '0']
- ext4fs_output, exit_code = RunCommand(command)
- self.assertEqual(0, exit_code)
+ proc = common.Run(command)
+ ext4fs_output, _ = proc.communicate()
+ self.assertEqual(0, proc.returncode)
prop_dict = {
'fs_type' : 'ext4',
'partition_headroom' : '40960',
'mount_point' : 'system',
}
- self.assertTrue(CheckHeadroom(ext4fs_output, prop_dict))
+ CheckHeadroom(ext4fs_output, prop_dict)
prop_dict = {
'fs_type' : 'ext4',
'partition_headroom' : '413696',
'mount_point' : 'system',
}
- self.assertFalse(CheckHeadroom(ext4fs_output, prop_dict))
+ self.assertRaises(BuildImageError, CheckHeadroom, ext4fs_output, prop_dict)
+
+ def test_SetUpInDirAndFsConfig_SystemRootImageTrue_NonSystem(self):
+ prop_dict = {
+ 'fs_config': 'fs-config',
+ 'mount_point': 'vendor',
+ 'system_root_image': 'true',
+ }
+ in_dir, fs_config = SetUpInDirAndFsConfig('/path/to/in_dir', prop_dict)
+ self.assertEqual('/path/to/in_dir', in_dir)
+ self.assertEqual('fs-config', fs_config)
+ self.assertEqual('vendor', prop_dict['mount_point'])
+
+ @staticmethod
+ def _gen_fs_config(partition):
+ fs_config = common.MakeTempFile(suffix='.txt')
+ with open(fs_config, 'w') as fs_config_fp:
+ fs_config_fp.write('fs-config-{}\n'.format(partition))
+ return fs_config
+
+ def test_SetUpInDirAndFsConfig(self):
+ root_dir = common.MakeTempDir()
+ with open(os.path.join(root_dir, 'init'), 'w') as init_fp:
+ init_fp.write('init')
+
+ origin_in = common.MakeTempDir()
+ with open(os.path.join(origin_in, 'file'), 'w') as in_fp:
+ in_fp.write('system-file')
+ os.symlink('../etc', os.path.join(origin_in, 'symlink'))
+
+ fs_config_system = self._gen_fs_config('system')
+
+ prop_dict = {
+ 'fs_config': fs_config_system,
+ 'mount_point': 'system',
+ 'root_dir': root_dir,
+ }
+ in_dir, fs_config = SetUpInDirAndFsConfig(origin_in, prop_dict)
+
+ self.assertTrue(filecmp.cmp(
+ os.path.join(in_dir, 'init'), os.path.join(root_dir, 'init')))
+ self.assertTrue(filecmp.cmp(
+ os.path.join(in_dir, 'system', 'file'),
+ os.path.join(origin_in, 'file')))
+ self.assertTrue(os.path.islink(os.path.join(in_dir, 'system', 'symlink')))
+
+ self.assertTrue(filecmp.cmp(fs_config_system, fs_config))
+ self.assertEqual('/', prop_dict['mount_point'])
+
+ def test_SetUpInDirAndFsConfig_WithRootFsConfig(self):
+ root_dir = common.MakeTempDir()
+ with open(os.path.join(root_dir, 'init'), 'w') as init_fp:
+ init_fp.write('init')
+
+ origin_in = common.MakeTempDir()
+ with open(os.path.join(origin_in, 'file'), 'w') as in_fp:
+ in_fp.write('system-file')
+ os.symlink('../etc', os.path.join(origin_in, 'symlink'))
+
+ fs_config_system = self._gen_fs_config('system')
+ fs_config_root = self._gen_fs_config('root')
+
+ prop_dict = {
+ 'fs_config': fs_config_system,
+ 'mount_point': 'system',
+ 'root_dir': root_dir,
+ 'root_fs_config': fs_config_root,
+ }
+ in_dir, fs_config = SetUpInDirAndFsConfig(origin_in, prop_dict)
+
+ self.assertTrue(filecmp.cmp(
+ os.path.join(in_dir, 'init'), os.path.join(root_dir, 'init')))
+ self.assertTrue(filecmp.cmp(
+ os.path.join(in_dir, 'system', 'file'),
+ os.path.join(origin_in, 'file')))
+ self.assertTrue(os.path.islink(os.path.join(in_dir, 'system', 'symlink')))
+
+ with open(fs_config) as fs_config_fp:
+ fs_config_data = fs_config_fp.readlines()
+ self.assertIn('fs-config-system\n', fs_config_data)
+ self.assertIn('fs-config-root\n', fs_config_data)
+ self.assertEqual('/', prop_dict['mount_point'])
+
+ def test_GetFilesystemCharacteristics(self):
+ input_dir = common.MakeTempDir()
+ output_image = common.MakeTempFile(suffix='.img')
+ command = ['mkuserimg_mke2fs', input_dir, output_image, 'ext4',
+ '/system', '409600', '-j', '0']
+ proc = common.Run(command)
+ ext4fs_output, _ = proc.communicate()
+ self.assertEqual(0, proc.returncode)
+
+ output_file = common.MakeTempFile(suffix='.img')
+ cmd = ["img2simg", output_image, output_file]
+ p = common.Run(cmd)
+ p.communicate()
+ self.assertEqual(0, p.returncode)
- common.Cleanup()
+ fs_dict = GetFilesystemCharacteristics(output_file)
+ self.assertEqual(int(fs_dict['Block size']), 4096)
+ self.assertGreaterEqual(int(fs_dict['Free blocks']), 0) # expect ~88
+ self.assertGreater(int(fs_dict['Inode count']), 0) # expect ~64
+ self.assertGreaterEqual(int(fs_dict['Free inodes']), 0) # expect ~53
+ self.assertGreater(int(fs_dict['Inode count']), int(fs_dict['Free inodes']))
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index fb26b6660a..dce27fe1c3 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -14,11 +14,11 @@
# limitations under the License.
#
+import copy
import os
import subprocess
import tempfile
import time
-import unittest
import zipfile
from hashlib import sha1
@@ -27,6 +27,7 @@ import test_utils
import validate_target_files
from rangelib import RangeSet
+from blockimgdiff import EmptyImage, DataImage
KiB = 1024
MiB = 1024 * KiB
@@ -43,7 +44,8 @@ def get_2gb_string():
yield '\0' * (step_size - block_size)
-class CommonZipTest(unittest.TestCase):
+class CommonZipTest(test_utils.ReleaseToolsTestCase):
+
def _verify(self, zip_file, zip_file_name, arcname, expected_hash,
test_file_name=None, expected_stat=None, expected_mode=0o644,
expected_compress_type=zipfile.ZIP_STORED):
@@ -333,8 +335,8 @@ class CommonZipTest(unittest.TestCase):
self.assertFalse('Test2' in entries)
self.assertTrue('Test3' in entries)
- self.assertRaises(AssertionError, common.ZipDelete, zip_file.name,
- 'Test2')
+ self.assertRaises(
+ common.ExternalError, common.ZipDelete, zip_file.name, 'Test2')
with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
entries = check_zip.namelist()
self.assertTrue('Test1' in entries)
@@ -357,8 +359,92 @@ class CommonZipTest(unittest.TestCase):
finally:
os.remove(zip_file.name)
-
-class CommonApkUtilsTest(unittest.TestCase):
+ @staticmethod
+ def _test_UnzipTemp_createZipFile():
+ zip_file = common.MakeTempFile(suffix='.zip')
+ output_zip = zipfile.ZipFile(
+ zip_file, 'w', compression=zipfile.ZIP_DEFLATED)
+ contents = os.urandom(1024)
+ with tempfile.NamedTemporaryFile() as entry_file:
+ entry_file.write(contents)
+ common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
+ common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
+ common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
+ common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
+ common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
+ common.ZipClose(output_zip)
+ common.ZipClose(output_zip)
+ return zip_file
+
+ def test_UnzipTemp(self):
+ zip_file = self._test_UnzipTemp_createZipFile()
+ unzipped_dir = common.UnzipTemp(zip_file)
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
+
+ def test_UnzipTemp_withPatterns(self):
+ zip_file = self._test_UnzipTemp_createZipFile()
+
+ unzipped_dir = common.UnzipTemp(zip_file, ['Test1'])
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
+
+ unzipped_dir = common.UnzipTemp(zip_file, ['Test1', 'Foo3'])
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
+
+ unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Foo3*'])
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
+
+ unzipped_dir = common.UnzipTemp(zip_file, ['*Test1', '*Baz*'])
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
+
+ def test_UnzipTemp_withEmptyPatterns(self):
+ zip_file = self._test_UnzipTemp_createZipFile()
+ unzipped_dir = common.UnzipTemp(zip_file, [])
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
+
+ def test_UnzipTemp_withPartiallyMatchingPatterns(self):
+ zip_file = self._test_UnzipTemp_createZipFile()
+ unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Nonexistent*'])
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
+ self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
+
+ def test_UnzipTemp_withNoMatchingPatterns(self):
+ zip_file = self._test_UnzipTemp_createZipFile()
+ unzipped_dir = common.UnzipTemp(zip_file, ['Foo4', 'Nonexistent*'])
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
+ self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
+
+
+class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase):
"""Tests the APK utils related functions."""
APKCERTS_TXT1 = (
@@ -406,9 +492,6 @@ class CommonApkUtilsTest(unittest.TestCase):
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
- def tearDown(self):
- common.Cleanup()
-
@staticmethod
def _write_apkcerts_txt(apkcerts_txt, additional=None):
if additional is None:
@@ -492,6 +575,13 @@ class CommonApkUtilsTest(unittest.TestCase):
wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
+ def test_ExtractAvbPublicKey(self):
+ privkey = os.path.join(self.testdata_dir, 'testkey.key')
+ pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
+ with open(common.ExtractAvbPublicKey(privkey)) as privkey_fp, \
+ open(common.ExtractAvbPublicKey(pubkey)) as pubkey_fp:
+ self.assertEqual(privkey_fp.read(), pubkey_fp.read())
+
def test_ParseCertificate(self):
cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
@@ -504,11 +594,28 @@ class CommonApkUtilsTest(unittest.TestCase):
actual = common.ParseCertificate(cert_fp.read())
self.assertEqual(expected, actual)
+ def test_GetMinSdkVersion(self):
+ test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
+ self.assertEqual('24', common.GetMinSdkVersion(test_app))
+
+ def test_GetMinSdkVersion_invalidInput(self):
+ self.assertRaises(
+ common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk')
+
+ def test_GetMinSdkVersionInt(self):
+ test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
+ self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {}))
+
+ def test_GetMinSdkVersionInt_invalidInput(self):
+ self.assertRaises(
+ common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk',
+ {})
-class CommonUtilsTest(unittest.TestCase):
- def tearDown(self):
- common.Cleanup()
+class CommonUtilsTest(test_utils.ReleaseToolsTestCase):
+
+ def setUp(self):
+ self.testdata_dir = test_utils.get_testdata_dir()
def test_GetSparseImage_emptyBlockMapFile(self):
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
@@ -534,11 +641,13 @@ class CommonUtilsTest(unittest.TestCase):
},
sparse_image.file_map)
- def test_GetSparseImage_invalidImageName(self):
+ def test_GetSparseImage_missingImageFile(self):
self.assertRaises(
- AssertionError, common.GetSparseImage, 'system2', None, None, False)
+ AssertionError, common.GetSparseImage, 'system2', self.testdata_dir,
+ None, False)
self.assertRaises(
- AssertionError, common.GetSparseImage, 'unknown', None, None, False)
+ AssertionError, common.GetSparseImage, 'unknown', self.testdata_dir,
+ None, False)
def test_GetSparseImage_missingBlockMapFile(self):
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
@@ -645,8 +754,276 @@ class CommonUtilsTest(unittest.TestCase):
self.assertFalse(sparse_image.file_map['/system/file1'].extra)
self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
+ def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self):
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '//system/file1 1-5 9-10',
+ '//system/file2 11-12',
+ '/system/app/file3 13-15']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+ # '/system/file2' has less blocks listed (2) than actual (3).
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+ # '/system/app/file3' has less blocks listed (3) than actual (4).
+ target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4))
+
+ tempdir = common.UnzipTemp(target_files)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+
+ self.assertFalse(sparse_image.file_map['//system/file1'].extra)
+ self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
+ self.assertTrue(
+ sparse_image.file_map['/system/app/file3'].extra['incomplete'])
+
+ def test_GetSparseImage_systemRootImage_nonSystemFiles(self):
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '//system/file1 1-5 9-10',
+ '//init.rc 13-15']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+ # '/init.rc' has less blocks listed (3) than actual (4).
+ target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4))
+
+ tempdir = common.UnzipTemp(target_files)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+
+ self.assertFalse(sparse_image.file_map['//system/file1'].extra)
+ self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete'])
+
+ def test_GetSparseImage_fileNotFound(self):
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '//system/file1 1-5 9-10',
+ '//system/file2 11-12']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+
+ tempdir = common.UnzipTemp(target_files)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+ False)
+
+ def test_GetAvbChainedPartitionArg(self):
+ pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
+ info_dict = {
+ 'avb_avbtool': 'avbtool',
+ 'avb_system_key_path': pubkey,
+ 'avb_system_rollback_index_location': 2,
+ }
+ args = common.GetAvbChainedPartitionArg('system', info_dict).split(':')
+ self.assertEqual(3, len(args))
+ self.assertEqual('system', args[0])
+ self.assertEqual('2', args[1])
+ self.assertTrue(os.path.exists(args[2]))
+
+ def test_GetAvbChainedPartitionArg_withPrivateKey(self):
+ key = os.path.join(self.testdata_dir, 'testkey.key')
+ info_dict = {
+ 'avb_avbtool': 'avbtool',
+ 'avb_product_key_path': key,
+ 'avb_product_rollback_index_location': 2,
+ }
+ args = common.GetAvbChainedPartitionArg('product', info_dict).split(':')
+ self.assertEqual(3, len(args))
+ self.assertEqual('product', args[0])
+ self.assertEqual('2', args[1])
+ self.assertTrue(os.path.exists(args[2]))
+
+ def test_GetAvbChainedPartitionArg_withSpecifiedKey(self):
+ info_dict = {
+ 'avb_avbtool': 'avbtool',
+ 'avb_system_key_path': 'does-not-exist',
+ 'avb_system_rollback_index_location': 2,
+ }
+ pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
+ args = common.GetAvbChainedPartitionArg(
+ 'system', info_dict, pubkey).split(':')
+ self.assertEqual(3, len(args))
+ self.assertEqual('system', args[0])
+ self.assertEqual('2', args[1])
+ self.assertTrue(os.path.exists(args[2]))
+
+ def test_GetAvbChainedPartitionArg_invalidKey(self):
+ pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem')
+ info_dict = {
+ 'avb_avbtool': 'avbtool',
+ 'avb_system_key_path': pubkey,
+ 'avb_system_rollback_index_location': 2,
+ }
+ self.assertRaises(
+ common.ExternalError, common.GetAvbChainedPartitionArg, 'system',
+ info_dict)
+
+ INFO_DICT_DEFAULT = {
+ 'recovery_api_version': 3,
+ 'fstab_version': 2,
+ 'system_root_image': 'true',
+ 'no_recovery' : 'true',
+ 'recovery_as_boot': 'true',
+ }
+
+ @staticmethod
+ def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path):
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ info_values = ''.join(
+ ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.iteritems())])
+ common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
+
+ FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
+ if info_dict.get('system_root_image') == 'true':
+ fstab_values = FSTAB_TEMPLATE.format('/')
+ else:
+ fstab_values = FSTAB_TEMPLATE.format('/system')
+ common.ZipWriteStr(target_files_zip, fstab_path, fstab_values)
+
+ common.ZipWriteStr(
+ target_files_zip, 'META/file_contexts', 'file-contexts')
+ return target_files
+
+ def test_LoadInfoDict(self):
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ self.INFO_DICT_DEFAULT,
+ 'BOOT/RAMDISK/system/etc/recovery.fstab')
+ with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ loaded_dict = common.LoadInfoDict(target_files_zip)
+ self.assertEqual(3, loaded_dict['recovery_api_version'])
+ self.assertEqual(2, loaded_dict['fstab_version'])
+ self.assertIn('/', loaded_dict['fstab'])
+ self.assertIn('/system', loaded_dict['fstab'])
+
+ def test_LoadInfoDict_legacyRecoveryFstabPath(self):
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ self.INFO_DICT_DEFAULT,
+ 'BOOT/RAMDISK/etc/recovery.fstab')
+ with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ loaded_dict = common.LoadInfoDict(target_files_zip)
+ self.assertEqual(3, loaded_dict['recovery_api_version'])
+ self.assertEqual(2, loaded_dict['fstab_version'])
+ self.assertIn('/', loaded_dict['fstab'])
+ self.assertIn('/system', loaded_dict['fstab'])
+
+ def test_LoadInfoDict_dirInput(self):
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ self.INFO_DICT_DEFAULT,
+ 'BOOT/RAMDISK/system/etc/recovery.fstab')
+ unzipped = common.UnzipTemp(target_files)
+ loaded_dict = common.LoadInfoDict(unzipped)
+ self.assertEqual(3, loaded_dict['recovery_api_version'])
+ self.assertEqual(2, loaded_dict['fstab_version'])
+ self.assertIn('/', loaded_dict['fstab'])
+ self.assertIn('/system', loaded_dict['fstab'])
+
+ def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self):
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ self.INFO_DICT_DEFAULT,
+ 'BOOT/RAMDISK/system/etc/recovery.fstab')
+ unzipped = common.UnzipTemp(target_files)
+ loaded_dict = common.LoadInfoDict(unzipped)
+ self.assertEqual(3, loaded_dict['recovery_api_version'])
+ self.assertEqual(2, loaded_dict['fstab_version'])
+ self.assertIn('/', loaded_dict['fstab'])
+ self.assertIn('/system', loaded_dict['fstab'])
+
+ def test_LoadInfoDict_systemRootImageFalse(self):
+ # Devices not using system-as-root nor recovery-as-boot. Non-A/B devices
+ # launched prior to P will likely have this config.
+ info_dict = copy.copy(self.INFO_DICT_DEFAULT)
+ del info_dict['no_recovery']
+ del info_dict['system_root_image']
+ del info_dict['recovery_as_boot']
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ info_dict,
+ 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
+ with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ loaded_dict = common.LoadInfoDict(target_files_zip)
+ self.assertEqual(3, loaded_dict['recovery_api_version'])
+ self.assertEqual(2, loaded_dict['fstab_version'])
+ self.assertNotIn('/', loaded_dict['fstab'])
+ self.assertIn('/system', loaded_dict['fstab'])
+
+ def test_LoadInfoDict_recoveryAsBootFalse(self):
+ # Devices using system-as-root, but with standalone recovery image. Non-A/B
+ # devices launched since P will likely have this config.
+ info_dict = copy.copy(self.INFO_DICT_DEFAULT)
+ del info_dict['no_recovery']
+ del info_dict['recovery_as_boot']
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ info_dict,
+ 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
+ with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ loaded_dict = common.LoadInfoDict(target_files_zip)
+ self.assertEqual(3, loaded_dict['recovery_api_version'])
+ self.assertEqual(2, loaded_dict['fstab_version'])
+ self.assertIn('/', loaded_dict['fstab'])
+ self.assertIn('/system', loaded_dict['fstab'])
+
+ def test_LoadInfoDict_noRecoveryTrue(self):
+ # Device doesn't have a recovery partition at all.
+ info_dict = copy.copy(self.INFO_DICT_DEFAULT)
+ del info_dict['recovery_as_boot']
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ info_dict,
+ 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
+ with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ loaded_dict = common.LoadInfoDict(target_files_zip)
+ self.assertEqual(3, loaded_dict['recovery_api_version'])
+ self.assertEqual(2, loaded_dict['fstab_version'])
+ self.assertIsNone(loaded_dict['fstab'])
+
+ def test_LoadInfoDict_missingMetaMiscInfoTxt(self):
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ self.INFO_DICT_DEFAULT,
+ 'BOOT/RAMDISK/system/etc/recovery.fstab')
+ common.ZipDelete(target_files, 'META/misc_info.txt')
+ with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip)
+
+ def test_LoadInfoDict_repacking(self):
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ self.INFO_DICT_DEFAULT,
+ 'BOOT/RAMDISK/system/etc/recovery.fstab')
+ unzipped = common.UnzipTemp(target_files)
+ loaded_dict = common.LoadInfoDict(unzipped, True)
+ self.assertEqual(3, loaded_dict['recovery_api_version'])
+ self.assertEqual(2, loaded_dict['fstab_version'])
+ self.assertIn('/', loaded_dict['fstab'])
+ self.assertIn('/system', loaded_dict['fstab'])
+ self.assertEqual(
+ os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir'])
+ self.assertEqual(
+ os.path.join(unzipped, 'META', 'root_filesystem_config.txt'),
+ loaded_dict['root_fs_config'])
+
+ def test_LoadInfoDict_repackingWithZipFileInput(self):
+ target_files = self._test_LoadInfoDict_createTargetFiles(
+ self.INFO_DICT_DEFAULT,
+ 'BOOT/RAMDISK/system/etc/recovery.fstab')
+ with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ self.assertRaises(
+ AssertionError, common.LoadInfoDict, target_files_zip, True)
+
-class InstallRecoveryScriptFormatTest(unittest.TestCase):
+class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
"""Checks the format of install-recovery.sh.
Its format should match between common.py and validate_target_files.py.
@@ -706,5 +1083,235 @@ class InstallRecoveryScriptFormatTest(unittest.TestCase):
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
self._info)
- def tearDown(self):
- common.Cleanup()
+
+class MockScriptWriter(object):
+ """A class that mocks edify_generator.EdifyGenerator.
+ """
+ def __init__(self, enable_comments=False):
+ self.lines = []
+ self.enable_comments = enable_comments
+ def Comment(self, comment):
+ if self.enable_comments:
+ self.lines.append("# {}".format(comment))
+ def AppendExtra(self, extra):
+ self.lines.append(extra)
+ def __str__(self):
+ return "\n".join(self.lines)
+
+
+class MockBlockDifference(object):
+ def __init__(self, partition, tgt, src=None):
+ self.partition = partition
+ self.tgt = tgt
+ self.src = src
+ def WriteScript(self, script, _, progress=None,
+ write_verify_script=False):
+ if progress:
+ script.AppendExtra("progress({})".format(progress))
+ script.AppendExtra("patch({});".format(self.partition))
+ if write_verify_script:
+ self.WritePostInstallVerifyScript(script)
+ def WritePostInstallVerifyScript(self, script):
+ script.AppendExtra("verify({});".format(self.partition))
+
+
+class FakeSparseImage(object):
+ def __init__(self, size):
+ self.blocksize = 4096
+ self.total_blocks = size // 4096
+ assert size % 4096 == 0, "{} is not a multiple of 4096".format(size)
+
+
+class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
+ @staticmethod
+ def get_op_list(output_path):
+ with zipfile.ZipFile(output_path, 'r') as output_zip:
+ with output_zip.open("dynamic_partitions_op_list") as op_list:
+ return [line.strip() for line in op_list.readlines()
+ if not line.startswith("#")]
+
+ def setUp(self):
+ self.script = MockScriptWriter()
+ self.output_path = common.MakeTempFile(suffix='.zip')
+
+ def test_full(self):
+ target_info = common.LoadDictionaryFromLines("""
+dynamic_partition_list=system vendor
+super_partition_groups=group_foo
+super_group_foo_group_size={group_size}
+super_group_foo_partition_list=system vendor
+""".format(group_size=4 * GiB).split("\n"))
+ block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)),
+ MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
+
+ dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs)
+ with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+ dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
+
+ self.assertEqual(str(self.script).strip(), """
+assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list")));
+patch(vendor);
+verify(vendor);
+unmap_partition("vendor");
+patch(system);
+verify(system);
+unmap_partition("system");
+""".strip())
+
+ lines = self.get_op_list(self.output_path)
+
+ remove_all_groups = lines.index("remove_all_groups")
+ add_group = lines.index("add_group group_foo 4294967296")
+ add_vendor = lines.index("add vendor group_foo")
+ add_system = lines.index("add system group_foo")
+ resize_vendor = lines.index("resize vendor 1073741824")
+ resize_system = lines.index("resize system 3221225472")
+
+ self.assertLess(remove_all_groups, add_group,
+ "Should add groups after removing all groups")
+ self.assertLess(add_group, min(add_vendor, add_system),
+ "Should add partitions after adding group")
+ self.assertLess(add_system, resize_system,
+ "Should resize system after adding it")
+ self.assertLess(add_vendor, resize_vendor,
+ "Should resize vendor after adding it")
+
+ def test_inc_groups(self):
+ source_info = common.LoadDictionaryFromLines("""
+super_partition_groups=group_foo group_bar group_baz
+super_group_foo_group_size={group_foo_size}
+super_group_bar_group_size={group_bar_size}
+""".format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n"))
+ target_info = common.LoadDictionaryFromLines("""
+super_partition_groups=group_foo group_baz group_qux
+super_group_foo_group_size={group_foo_size}
+super_group_baz_group_size={group_baz_size}
+super_group_qux_group_size={group_qux_size}
+""".format(group_foo_size=3 * GiB, group_baz_size=4 * GiB,
+ group_qux_size=1 * GiB).split("\n"))
+
+ dp_diff = common.DynamicPartitionsDifference(target_info,
+ block_diffs=[],
+ source_info_dict=source_info)
+ with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+ dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
+
+ lines = self.get_op_list(self.output_path)
+
+ removed = lines.index("remove_group group_bar")
+ shrunk = lines.index("resize_group group_foo 3221225472")
+ grown = lines.index("resize_group group_baz 4294967296")
+ added = lines.index("add_group group_qux 1073741824")
+
+ self.assertLess(max(removed, shrunk) < min(grown, added),
+ "ops that remove / shrink partitions must precede ops that "
+ "grow / add partitions")
+
+ def test_incremental(self):
+ source_info = common.LoadDictionaryFromLines("""
+dynamic_partition_list=system vendor product product_services
+super_partition_groups=group_foo
+super_group_foo_group_size={group_foo_size}
+super_group_foo_partition_list=system vendor product product_services
+""".format(group_foo_size=4 * GiB).split("\n"))
+ target_info = common.LoadDictionaryFromLines("""
+dynamic_partition_list=system vendor product odm
+super_partition_groups=group_foo group_bar
+super_group_foo_group_size={group_foo_size}
+super_group_foo_partition_list=system vendor odm
+super_group_bar_group_size={group_bar_size}
+super_group_bar_partition_list=product
+""".format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n"))
+
+ block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB),
+ src=FakeSparseImage(1024 * MiB)),
+ MockBlockDifference("vendor", FakeSparseImage(512 * MiB),
+ src=FakeSparseImage(1024 * MiB)),
+ MockBlockDifference("product", FakeSparseImage(1024 * MiB),
+ src=FakeSparseImage(1024 * MiB)),
+ MockBlockDifference("product_services", None,
+ src=FakeSparseImage(1024 * MiB)),
+ MockBlockDifference("odm", FakeSparseImage(1024 * MiB),
+ src=None)]
+
+ dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
+ source_info_dict=source_info)
+ with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+ dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
+
+ metadata_idx = self.script.lines.index(
+ 'assert(update_dynamic_partitions(package_extract_file('
+ '"dynamic_partitions_op_list")));')
+ self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx)
+ self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);'))
+ for p in ("product", "system", "odm"):
+ patch_idx = self.script.lines.index("patch({});".format(p))
+ verify_idx = self.script.lines.index("verify({});".format(p))
+ self.assertLess(metadata_idx, patch_idx,
+ "Should patch {} after updating metadata".format(p))
+ self.assertLess(patch_idx, verify_idx,
+ "Should verify {} after patching".format(p))
+
+ self.assertNotIn("patch(product_services);", self.script.lines)
+
+ lines = self.get_op_list(self.output_path)
+
+ remove = lines.index("remove product_services")
+ move_product_out = lines.index("move product default")
+ shrink = lines.index("resize vendor 536870912")
+ shrink_group = lines.index("resize_group group_foo 3221225472")
+ add_group_bar = lines.index("add_group group_bar 1073741824")
+ add_odm = lines.index("add odm group_foo")
+ grow_existing = lines.index("resize system 1610612736")
+ grow_added = lines.index("resize odm 1073741824")
+ move_product_in = lines.index("move product group_bar")
+
+ max_idx_move_partition_out_foo = max(remove, move_product_out, shrink)
+ min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added)
+
+ self.assertLess(max_idx_move_partition_out_foo, shrink_group,
+ "Must shrink group after partitions inside group are shrunk"
+ " / removed")
+
+ self.assertLess(add_group_bar, move_product_in,
+ "Must add partitions to group after group is added")
+
+ self.assertLess(max_idx_move_partition_out_foo,
+ min_idx_move_partition_in_foo,
+ "Must shrink partitions / remove partitions from group"
+ "before adding / moving partitions into group")
+
+ def test_remove_partition(self):
+ source_info = common.LoadDictionaryFromLines("""
+blockimgdiff_versions=3,4
+use_dynamic_partitions=true
+dynamic_partition_list=foo
+super_partition_groups=group_foo
+super_group_foo_group_size={group_foo_size}
+super_group_foo_partition_list=foo
+""".format(group_foo_size=4 * GiB).split("\n"))
+ target_info = common.LoadDictionaryFromLines("""
+blockimgdiff_versions=3,4
+use_dynamic_partitions=true
+super_partition_groups=group_foo
+super_group_foo_group_size={group_foo_size}
+""".format(group_foo_size=4 * GiB).split("\n"))
+
+ common.OPTIONS.info_dict = target_info
+ common.OPTIONS.target_info_dict = target_info
+ common.OPTIONS.source_info_dict = source_info
+ common.OPTIONS.cache_size = 4 * 4096
+
+ block_diffs = [common.BlockDifference("foo", EmptyImage(),
+ src=DataImage("source", pad=True))]
+
+ dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
+ source_info_dict=source_info)
+ with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+ dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
+
+ self.assertNotIn("block_image_update", str(self.script),
+ "Removed partition should not be patched.")
+
+ lines = self.get_op_list(self.output_path)
+ self.assertEqual(lines, ["remove foo"])
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
new file mode 100644
index 0000000000..bb9ce8e276
--- /dev/null
+++ b/tools/releasetools/test_merge_target_files.py
@@ -0,0 +1,77 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os.path
+
+import common
+import test_utils
+from merge_target_files import (
+ read_config_list, validate_config_lists, default_system_item_list,
+ default_other_item_list, default_system_misc_info_keys)
+
+
+class MergeTargetFilesTest(test_utils.ReleaseToolsTestCase):
+
+ def setUp(self):
+ self.testdata_dir = test_utils.get_testdata_dir()
+
+ def test_read_config_list(self):
+ system_item_list_file = os.path.join(self.testdata_dir,
+ 'merge_config_system_item_list')
+ system_item_list = read_config_list(system_item_list_file)
+ expected_system_item_list = [
+ 'META/apkcerts.txt',
+ 'META/filesystem_config.txt',
+ 'META/root_filesystem_config.txt',
+ 'META/system_manifest.xml',
+ 'META/system_matrix.xml',
+ 'META/update_engine_config.txt',
+ 'PRODUCT/*',
+ 'ROOT/*',
+ 'SYSTEM/*',
+ ]
+ self.assertItemsEqual(system_item_list, expected_system_item_list)
+
+ def test_validate_config_lists_ReturnsFalseIfMissingDefaultItem(self):
+ system_item_list = default_system_item_list[:]
+ system_item_list.remove('SYSTEM/*')
+ self.assertFalse(
+ validate_config_lists(system_item_list, default_system_misc_info_keys,
+ default_other_item_list))
+
+ def test_validate_config_lists_ReturnsTrueIfDefaultItemInDifferentList(self):
+ system_item_list = default_system_item_list[:]
+ system_item_list.remove('ROOT/*')
+ other_item_list = default_other_item_list[:]
+ other_item_list.append('ROOT/*')
+ self.assertTrue(
+ validate_config_lists(system_item_list, default_system_misc_info_keys,
+ other_item_list))
+
+ def test_validate_config_lists_ReturnsTrueIfExtraItem(self):
+ system_item_list = default_system_item_list[:]
+ system_item_list.append('MY_NEW_PARTITION/*')
+ self.assertTrue(
+ validate_config_lists(system_item_list, default_system_misc_info_keys,
+ default_other_item_list))
+
+ def test_validate_config_lists_ReturnsFalseIfBadSystemMiscInfoKeys(self):
+ for bad_key in ['dynamic_partition_list', 'super_partition_groups']:
+ system_misc_info_keys = default_system_misc_info_keys[:]
+ system_misc_info_keys.append(bad_key)
+ self.assertFalse(
+ validate_config_lists(default_system_item_list, system_misc_info_keys,
+ default_other_item_list))
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 262e701b6d..466fde1af3 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -17,8 +17,6 @@
import copy
import os
import os.path
-import subprocess
-import unittest
import zipfile
import common
@@ -50,17 +48,25 @@ def construct_target_files(secondary=False):
"POSTINSTALL_OPTIONAL_system=true",
]))
+ ab_partitions = [
+ ('IMAGES', 'boot'),
+ ('IMAGES', 'system'),
+ ('IMAGES', 'vendor'),
+ ('RADIO', 'bootloader'),
+ ('RADIO', 'modem'),
+ ]
# META/ab_partitions.txt
- ab_partitions = ['boot', 'system', 'vendor']
target_files_zip.writestr(
'META/ab_partitions.txt',
- '\n'.join(ab_partitions))
+ '\n'.join([partition[1] for partition in ab_partitions]))
# Create dummy images for each of them.
- for partition in ab_partitions:
- target_files_zip.writestr('IMAGES/' + partition + '.img',
- os.urandom(len(partition)))
+ for path, partition in ab_partitions:
+ target_files_zip.writestr(
+ '{}/{}.img'.format(path, partition),
+ os.urandom(len(partition)))
+ # system_other shouldn't appear in META/ab_partitions.txt.
if secondary:
target_files_zip.writestr('IMAGES/system_other.img',
os.urandom(len("system_other")))
@@ -97,7 +103,7 @@ class MockScriptWriter(object):
self.script.append(('AssertSomeThumbprint',) + args)
-class BuildInfoTest(unittest.TestCase):
+class BuildInfoTest(test_utils.ReleaseToolsTestCase):
TEST_INFO_DICT = {
'build.prop' : {
@@ -182,6 +188,16 @@ class BuildInfoTest(unittest.TestCase):
self.assertRaises(KeyError,
lambda: target_info['build.prop']['ro.build.foo'])
+ def test___setitem__(self):
+ target_info = BuildInfo(copy.deepcopy(self.TEST_INFO_DICT), None)
+ self.assertEqual('value1', target_info['property1'])
+ target_info['property1'] = 'value2'
+ self.assertEqual('value2', target_info['property1'])
+
+ self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
+ target_info['build.prop']['ro.build.foo'] = 'build-bar'
+ self.assertEqual('build-bar', target_info['build.prop']['ro.build.foo'])
+
def test_get(self):
target_info = BuildInfo(self.TEST_INFO_DICT, None)
self.assertEqual('value1', target_info.get('property1'))
@@ -201,6 +217,12 @@ class BuildInfoTest(unittest.TestCase):
self.assertRaises(KeyError,
lambda: target_info.get('build.prop')['ro.build.foo'])
+ def test_items(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ items = target_info.items()
+ self.assertIn(('property1', 'value1'), items)
+ self.assertIn(('property2', 4096), items)
+
def test_GetBuildProp(self):
target_info = BuildInfo(self.TEST_INFO_DICT, None)
self.assertEqual('build-foo', target_info.GetBuildProp('ro.build.foo'))
@@ -231,6 +253,23 @@ class BuildInfoTest(unittest.TestCase):
self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
'ro.build.nonexistent')
+ def test_vendor_fingerprint(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('vendor-build-fingerprint',
+ target_info.vendor_fingerprint)
+
+ def test_vendor_fingerprint_blacklisted(self):
+ target_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+ del target_info_dict['vendor.build.prop']['ro.vendor.build.fingerprint']
+ target_info = BuildInfo(target_info_dict, self.TEST_OEM_DICTS)
+ self.assertIsNone(target_info.vendor_fingerprint)
+
+ def test_vendor_fingerprint_without_vendor_build_prop(self):
+ target_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+ del target_info_dict['vendor.build.prop']
+ target_info = BuildInfo(target_info_dict, self.TEST_OEM_DICTS)
+ self.assertIsNone(target_info.vendor_fingerprint)
+
def test_WriteMountOemScript(self):
target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
self.TEST_OEM_DICTS)
@@ -312,10 +351,7 @@ class BuildInfoTest(unittest.TestCase):
script_writer.script)
-class LoadOemDictsTest(unittest.TestCase):
-
- def tearDown(self):
- common.Cleanup()
+class LoadOemDictsTest(test_utils.ReleaseToolsTestCase):
def test_NoneDict(self):
self.assertIsNone(_LoadOemDicts(None))
@@ -348,7 +384,7 @@ class LoadOemDictsTest(unittest.TestCase):
self.assertEqual('{}'.format(i), oem_dict['ro.build.index'])
-class OtaFromTargetFilesTest(unittest.TestCase):
+class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
TEST_TARGET_INFO_DICT = {
'build.prop' : {
@@ -379,6 +415,7 @@ class OtaFromTargetFilesTest(unittest.TestCase):
# Reset the global options as in ota_from_target_files.py.
common.OPTIONS.incremental_source = None
common.OPTIONS.downgrade = False
+ common.OPTIONS.retrofit_dynamic_partitions = False
common.OPTIONS.timestamp = False
common.OPTIONS.wipe_user_data = False
common.OPTIONS.no_signing = False
@@ -390,9 +427,6 @@ class OtaFromTargetFilesTest(unittest.TestCase):
common.OPTIONS.search_path = test_utils.get_search_path()
self.assertIsNotNone(common.OPTIONS.search_path)
- def tearDown(self):
- common.Cleanup()
-
def test_GetPackageMetadata_abOta_full(self):
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
target_info_dict['ab_update'] = 'true'
@@ -484,6 +518,23 @@ class OtaFromTargetFilesTest(unittest.TestCase):
},
metadata)
+ def test_GetPackageMetadata_retrofitDynamicPartitions(self):
+ target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ common.OPTIONS.retrofit_dynamic_partitions = True
+ metadata = GetPackageMetadata(target_info)
+ self.assertDictEqual(
+ {
+ 'ota-retrofit-dynamic-partitions' : 'yes',
+ 'ota-type' : 'BLOCK',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'post-sdk-level' : '27',
+ 'post-security-patch-level' : '2017-12-01',
+ 'post-timestamp' : '1500000000',
+ 'pre-device' : 'product-device',
+ },
+ metadata)
+
@staticmethod
def _test_GetPackageMetadata_swapBuildTimestamps(target_info, source_info):
(target_info['build.prop']['ro.build.date.utc'],
@@ -542,6 +593,8 @@ class OtaFromTargetFilesTest(unittest.TestCase):
self.assertIn('IMAGES/boot.img', namelist)
self.assertIn('IMAGES/system.img', namelist)
self.assertIn('IMAGES/vendor.img', namelist)
+ self.assertIn('RADIO/bootloader.img', namelist)
+ self.assertIn('RADIO/modem.img', namelist)
self.assertIn(POSTINSTALL_CONFIG, namelist)
self.assertNotIn('IMAGES/system_other.img', namelist)
@@ -559,11 +612,33 @@ class OtaFromTargetFilesTest(unittest.TestCase):
self.assertIn('IMAGES/boot.img', namelist)
self.assertIn('IMAGES/system.img', namelist)
self.assertIn('IMAGES/vendor.img', namelist)
+ self.assertIn('RADIO/bootloader.img', namelist)
+ self.assertIn('RADIO/modem.img', namelist)
self.assertNotIn('IMAGES/system_other.img', namelist)
self.assertNotIn('IMAGES/system.map', namelist)
self.assertNotIn(POSTINSTALL_CONFIG, namelist)
+ def test_GetTargetFilesZipForSecondaryImages_withoutRadioImages(self):
+ input_file = construct_target_files(secondary=True)
+ common.ZipDelete(input_file, 'RADIO/bootloader.img')
+ common.ZipDelete(input_file, 'RADIO/modem.img')
+ target_file = GetTargetFilesZipForSecondaryImages(input_file)
+
+ with zipfile.ZipFile(target_file) as verify_zip:
+ namelist = verify_zip.namelist()
+
+ self.assertIn('META/ab_partitions.txt', namelist)
+ self.assertIn('IMAGES/boot.img', namelist)
+ self.assertIn('IMAGES/system.img', namelist)
+ self.assertIn('IMAGES/vendor.img', namelist)
+ self.assertIn(POSTINSTALL_CONFIG, namelist)
+
+ self.assertNotIn('IMAGES/system_other.img', namelist)
+ self.assertNotIn('IMAGES/system.map', namelist)
+ self.assertNotIn('RADIO/bootloader.img', namelist)
+ self.assertNotIn('RADIO/modem.img', namelist)
+
def test_GetTargetFilesZipWithoutPostinstallConfig(self):
input_file = construct_target_files()
target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
@@ -656,14 +731,11 @@ class TestPropertyFiles(PropertyFiles):
)
-class PropertyFilesTest(unittest.TestCase):
+class PropertyFilesTest(test_utils.ReleaseToolsTestCase):
def setUp(self):
common.OPTIONS.no_signing = False
- def tearDown(self):
- common.Cleanup()
-
@staticmethod
def construct_zip_package(entries):
zip_file = common.MakeTempFile(suffix='.zip')
@@ -742,8 +814,7 @@ class PropertyFilesTest(unittest.TestCase):
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(streaming_metadata)
@@ -766,8 +837,7 @@ class PropertyFilesTest(unittest.TestCase):
property_files = TestPropertyFiles()
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
# First get the raw metadata string (i.e. without padding space).
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
raw_length = len(raw_metadata)
@@ -801,8 +871,7 @@ class PropertyFilesTest(unittest.TestCase):
property_files = TestPropertyFiles()
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
# First get the raw metadata string (i.e. without padding space).
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
# Should pass the test if verification passes.
@@ -827,6 +896,7 @@ class StreamingPropertyFilesTest(PropertyFilesTest):
property_files.required)
self.assertEqual(
(
+ 'care_map.pb',
'care_map.txt',
'compatibility.zip',
),
@@ -859,8 +929,7 @@ class StreamingPropertyFilesTest(PropertyFilesTest):
zip_file = self.construct_zip_package(entries)
property_files = StreamingPropertyFiles()
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(streaming_metadata)
@@ -883,8 +952,7 @@ class StreamingPropertyFilesTest(PropertyFilesTest):
property_files = StreamingPropertyFiles()
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
# First get the raw metadata string (i.e. without padding space).
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
# Should pass the test if verification passes.
@@ -924,6 +992,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTest):
property_files.required)
self.assertEqual(
(
+ 'care_map.pb',
'care_map.txt',
'compatibility.zip',
),
@@ -962,11 +1031,11 @@ class AbOtaPropertyFilesTest(PropertyFilesTest):
'--signature_size', str(self.SIGNATURE_SIZE),
'--metadata_hash_file', metadata_sig_file,
'--payload_hash_file', payload_sig_file]
- proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ proc = common.Run(cmd)
stdoutdata, _ = proc.communicate()
self.assertEqual(
0, proc.returncode,
- 'Failed to run brillo_update_payload: {}'.format(stdoutdata))
+ 'Failed to run brillo_update_payload:\n{}'.format(stdoutdata))
signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
@@ -1019,8 +1088,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTest):
zip_file = self.construct_zip_package_withValidPayload(with_metadata=True)
property_files = AbOtaPropertyFiles()
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
@@ -1035,8 +1103,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTest):
zip_file = self.construct_zip_package_withValidPayload(with_metadata=True)
property_files = AbOtaPropertyFiles()
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
property_files.Verify(zip_fp, raw_metadata)
@@ -1069,8 +1136,7 @@ class NonAbOtaPropertyFilesTest(PropertyFilesTest):
zip_file = self.construct_zip_package(entries)
property_files = NonAbOtaPropertyFiles()
with zipfile.ZipFile(zip_file) as zip_fp:
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(property_files_string)
@@ -1087,14 +1153,13 @@ class NonAbOtaPropertyFilesTest(PropertyFilesTest):
zip_file = self.construct_zip_package(entries)
property_files = NonAbOtaPropertyFiles()
with zipfile.ZipFile(zip_file) as zip_fp:
- # pylint: disable=protected-access
- raw_metadata = property_files._GetPropertyFilesString(
+ raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
property_files.Verify(zip_fp, raw_metadata)
-class PayloadSignerTest(unittest.TestCase):
+class PayloadSignerTest(test_utils.ReleaseToolsTestCase):
SIGFILE = 'sigfile.bin'
SIGNED_SIGFILE = 'signed-sigfile.bin'
@@ -1110,9 +1175,6 @@ class PayloadSignerTest(unittest.TestCase):
common.OPTIONS.package_key : None,
}
- def tearDown(self):
- common.Cleanup()
-
def _assertFilesEqual(self, file1, file2):
with open(file1, 'rb') as fp1, open(file2, 'rb') as fp2:
self.assertEqual(fp1.read(), fp2.read())
@@ -1120,6 +1182,7 @@ class PayloadSignerTest(unittest.TestCase):
def test_init(self):
payload_signer = PayloadSigner()
self.assertEqual('openssl', payload_signer.signer)
+ self.assertEqual(256, payload_signer.key_size)
def test_init_withPassword(self):
common.OPTIONS.package_key = os.path.join(
@@ -1133,9 +1196,16 @@ class PayloadSignerTest(unittest.TestCase):
def test_init_withExternalSigner(self):
common.OPTIONS.payload_signer = 'abc'
common.OPTIONS.payload_signer_args = ['arg1', 'arg2']
+ common.OPTIONS.payload_signer_key_size = '512'
payload_signer = PayloadSigner()
self.assertEqual('abc', payload_signer.signer)
self.assertEqual(['arg1', 'arg2'], payload_signer.signer_args)
+ self.assertEqual(512, payload_signer.key_size)
+
+ def test_GetKeySizeInBytes_512Bytes(self):
+ signing_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+ key_size = PayloadSigner._GetKeySizeInBytes(signing_key)
+ self.assertEqual(512, key_size)
def test_Sign(self):
payload_signer = PayloadSigner()
@@ -1173,7 +1243,7 @@ class PayloadSignerTest(unittest.TestCase):
self._assertFilesEqual(verify_file, signed_file)
-class PayloadTest(unittest.TestCase):
+class PayloadTest(test_utils.ReleaseToolsTestCase):
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
@@ -1187,9 +1257,6 @@ class PayloadTest(unittest.TestCase):
common.OPTIONS.package_key : None,
}
- def tearDown(self):
- common.Cleanup()
-
@staticmethod
def _create_payload_full(secondary=False):
target_file = construct_target_files(secondary)
@@ -1227,7 +1294,7 @@ class PayloadTest(unittest.TestCase):
target_file = construct_target_files()
common.ZipDelete(target_file, 'IMAGES/vendor.img')
payload = Payload()
- self.assertRaises(AssertionError, payload.Generate, target_file)
+ self.assertRaises(common.ExternalError, payload.Generate, target_file)
def test_Sign_full(self):
payload = self._create_payload_full()
@@ -1275,7 +1342,7 @@ class PayloadTest(unittest.TestCase):
payload = self._create_payload_full()
payload_signer = PayloadSigner()
payload_signer.signer_args.append('bad-option')
- self.assertRaises(AssertionError, payload.Sign, payload_signer)
+ self.assertRaises(common.ExternalError, payload.Sign, payload_signer)
def test_WriteToZip(self):
payload = self._create_payload_full()
diff --git a/tools/releasetools/test_rangelib.py b/tools/releasetools/test_rangelib.py
index e1811870fe..1251e11f0d 100644
--- a/tools/releasetools/test_rangelib.py
+++ b/tools/releasetools/test_rangelib.py
@@ -14,11 +14,11 @@
# limitations under the License.
#
-import unittest
-
from rangelib import RangeSet
+from test_utils import ReleaseToolsTestCase
+
-class RangeSetTest(unittest.TestCase):
+class RangeSetTest(ReleaseToolsTestCase):
def test_union(self):
self.assertEqual(RangeSet("10-19 30-34").union(RangeSet("18-29")),
@@ -129,8 +129,8 @@ class RangeSetTest(unittest.TestCase):
self.assertEqual(
RangeSet.parse_raw(RangeSet("0-9").to_string_raw()),
RangeSet("0-9"))
- self.assertEqual(RangeSet.parse_raw(
- RangeSet("2-10 12").to_string_raw()),
+ self.assertEqual(
+ RangeSet.parse_raw(RangeSet("2-10 12").to_string_raw()),
RangeSet("2-10 12"))
self.assertEqual(
RangeSet.parse_raw(RangeSet("11 2-10 12 1 0").to_string_raw()),
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 26f9e10e84..710fde5251 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -14,20 +14,18 @@
# limitations under the License.
#
-from __future__ import print_function
-
import base64
import os.path
-import unittest
import zipfile
import common
import test_utils
from sign_target_files_apks import (
- EditTags, ReplaceCerts, ReplaceVerityKeyId, RewriteProps)
+ CheckApkAndApexKeysAvailable, EditTags, GetApkFileInfo, ReadApexKeysInfo,
+ ReplaceCerts, ReplaceVerityKeyId, RewriteProps)
-class SignTargetFilesApksTest(unittest.TestCase):
+class SignTargetFilesApksTest(test_utils.ReleaseToolsTestCase):
MAC_PERMISSIONS_XML = """<?xml version="1.0" encoding="iso-8859-1"?>
<policy>
@@ -35,12 +33,14 @@ class SignTargetFilesApksTest(unittest.TestCase):
<signer signature="{}"><seinfo value="media"/></signer>
</policy>"""
+ # pylint: disable=line-too-long
+ APEX_KEYS_TXT = """name="apex.apexd_test.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem" container_certificate="build/target/product/security/testkey.x509.pem" container_private_key="build/target/product/security/testkey.pk8"
+name="apex.apexd_test_different_app.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" container_certificate="build/target/product/security/testkey.x509.pem" container_private_key="build/target/product/security/testkey.pk8"
+"""
+
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
- def tearDown(self):
- common.Cleanup()
-
def test_EditTags(self):
self.assertEqual(EditTags('dev-keys'), ('release-keys'))
self.assertEqual(EditTags('test-keys'), ('release-keys'))
@@ -53,36 +53,60 @@ class SignTargetFilesApksTest(unittest.TestCase):
def test_RewriteProps(self):
props = (
- ('', '\n'),
+ ('', ''),
('ro.build.fingerprint=foo/bar/dev-keys',
- 'ro.build.fingerprint=foo/bar/release-keys\n'),
+ 'ro.build.fingerprint=foo/bar/release-keys'),
('ro.build.thumbprint=foo/bar/dev-keys',
- 'ro.build.thumbprint=foo/bar/release-keys\n'),
+ 'ro.build.thumbprint=foo/bar/release-keys'),
('ro.vendor.build.fingerprint=foo/bar/dev-keys',
- 'ro.vendor.build.fingerprint=foo/bar/release-keys\n'),
+ 'ro.vendor.build.fingerprint=foo/bar/release-keys'),
('ro.vendor.build.thumbprint=foo/bar/dev-keys',
- 'ro.vendor.build.thumbprint=foo/bar/release-keys\n'),
- ('# comment line 1', '# comment line 1\n'),
+ 'ro.vendor.build.thumbprint=foo/bar/release-keys'),
+ ('ro.odm.build.fingerprint=foo/bar/test-keys',
+ 'ro.odm.build.fingerprint=foo/bar/release-keys'),
+ ('ro.odm.build.thumbprint=foo/bar/test-keys',
+ 'ro.odm.build.thumbprint=foo/bar/release-keys'),
+ ('ro.product.build.fingerprint=foo/bar/dev-keys',
+ 'ro.product.build.fingerprint=foo/bar/release-keys'),
+ ('ro.product.build.thumbprint=foo/bar/dev-keys',
+ 'ro.product.build.thumbprint=foo/bar/release-keys'),
+ ('ro.product_services.build.fingerprint=foo/bar/test-keys',
+ 'ro.product_services.build.fingerprint=foo/bar/release-keys'),
+ ('ro.product_services.build.thumbprint=foo/bar/test-keys',
+ 'ro.product_services.build.thumbprint=foo/bar/release-keys'),
+ ('# comment line 1', '# comment line 1'),
('ro.bootimage.build.fingerprint=foo/bar/dev-keys',
- 'ro.bootimage.build.fingerprint=foo/bar/release-keys\n'),
+ 'ro.bootimage.build.fingerprint=foo/bar/release-keys'),
('ro.build.description='
'sailfish-user 8.0.0 OPR6.170623.012 4283428 dev-keys',
'ro.build.description='
- 'sailfish-user 8.0.0 OPR6.170623.012 4283428 release-keys\n'),
- ('ro.build.tags=dev-keys', 'ro.build.tags=release-keys\n'),
- ('# comment line 2', '# comment line 2\n'),
+ 'sailfish-user 8.0.0 OPR6.170623.012 4283428 release-keys'),
+ ('ro.build.tags=dev-keys', 'ro.build.tags=release-keys'),
+ ('ro.build.tags=test-keys', 'ro.build.tags=release-keys'),
+ ('ro.system.build.tags=dev-keys',
+ 'ro.system.build.tags=release-keys'),
+ ('ro.vendor.build.tags=dev-keys',
+ 'ro.vendor.build.tags=release-keys'),
+ ('ro.odm.build.tags=dev-keys',
+ 'ro.odm.build.tags=release-keys'),
+ ('ro.product.build.tags=dev-keys',
+ 'ro.product.build.tags=release-keys'),
+ ('ro.product_services.build.tags=dev-keys',
+ 'ro.product_services.build.tags=release-keys'),
+ ('# comment line 2', '# comment line 2'),
('ro.build.display.id=OPR6.170623.012 dev-keys',
- 'ro.build.display.id=OPR6.170623.012\n'),
- ('# comment line 3', '# comment line 3\n'),
+ 'ro.build.display.id=OPR6.170623.012'),
+ ('# comment line 3', '# comment line 3'),
)
# Assert the case for each individual line.
- for prop, output in props:
- self.assertEqual(RewriteProps(prop), output)
+ for prop, expected in props:
+ self.assertEqual(expected + '\n', RewriteProps(prop))
# Concatenate all the input lines.
- self.assertEqual(RewriteProps('\n'.join([prop[0] for prop in props])),
- ''.join([prop[1] for prop in props]))
+ self.assertEqual(
+ '\n'.join([prop[1] for prop in props]) + '\n',
+ RewriteProps('\n'.join([prop[0] for prop in props])))
def test_ReplaceVerityKeyId(self):
BOOT_CMDLINE1 = (
@@ -211,3 +235,276 @@ class SignTargetFilesApksTest(unittest.TestCase):
cert2_path[:-9] : 'non-existent',
}
self.assertEqual(output_xml, ReplaceCerts(input_xml))
+
+ def test_CheckApkAndApexKeysAvailable(self):
+ input_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'w') as input_zip:
+ input_zip.writestr('SYSTEM/app/App1.apk', "App1-content")
+ input_zip.writestr('SYSTEM/app/App2.apk.gz', "App2-content")
+
+ apk_key_map = {
+ 'App1.apk' : 'key1',
+ 'App2.apk' : 'key2',
+ 'App3.apk' : 'key3',
+ }
+ with zipfile.ZipFile(input_file) as input_zip:
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, None, {})
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, '.gz', {})
+
+ # 'App2.apk.gz' won't be considered as an APK.
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, None, {})
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, '.xz', {})
+
+ del apk_key_map['App2.apk']
+ self.assertRaises(
+ AssertionError, CheckApkAndApexKeysAvailable, input_zip, apk_key_map,
+ '.gz', {})
+
+ def test_CheckApkAndApexKeysAvailable_invalidApexKeys(self):
+ input_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'w') as input_zip:
+ input_zip.writestr('SYSTEM/apex/Apex1.apex', "Apex1-content")
+ input_zip.writestr('SYSTEM/apex/Apex2.apex', "Apex2-content")
+
+ apk_key_map = {
+ 'Apex1.apex' : 'key1',
+ 'Apex2.apex' : 'key2',
+ 'Apex3.apex' : 'key3',
+ }
+ apex_keys = {
+ 'Apex1.apex' : ('payload-key1', 'container-key1'),
+ 'Apex2.apex' : ('payload-key2', 'container-key2'),
+ }
+ with zipfile.ZipFile(input_file) as input_zip:
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, None, apex_keys)
+
+ # Fine to have both keys as PRESIGNED.
+ apex_keys['Apex2.apex'] = ('PRESIGNED', 'PRESIGNED')
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, None, apex_keys)
+
+ # Having only one of them as PRESIGNED is not allowed.
+ apex_keys['Apex2.apex'] = ('payload-key2', 'PRESIGNED')
+ self.assertRaises(
+ AssertionError, CheckApkAndApexKeysAvailable, input_zip, apk_key_map,
+ None, apex_keys)
+
+ apex_keys['Apex2.apex'] = ('PRESIGNED', 'container-key1')
+ self.assertRaises(
+ AssertionError, CheckApkAndApexKeysAvailable, input_zip, apk_key_map,
+ None, apex_keys)
+
+ def test_GetApkFileInfo(self):
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "PRODUCT/apps/Chats.apk", None, [])
+ self.assertTrue(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertFalse(should_be_skipped)
+
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "PRODUCT/apps/Chats.apk", None, [])
+ self.assertTrue(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertFalse(should_be_skipped)
+
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "PRODUCT/apps/Chats.dat", None, [])
+ self.assertFalse(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertFalse(should_be_skipped)
+
+ def test_GetApkFileInfo_withCompressedApks(self):
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "PRODUCT/apps/Chats.apk.gz", ".gz", [])
+ self.assertTrue(is_apk)
+ self.assertTrue(is_compressed)
+ self.assertFalse(should_be_skipped)
+
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "PRODUCT/apps/Chats.apk.gz", ".xz", [])
+ self.assertFalse(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertFalse(should_be_skipped)
+
+ self.assertRaises(
+ AssertionError, GetApkFileInfo, "PRODUCT/apps/Chats.apk", "", [])
+
+ self.assertRaises(
+ AssertionError, GetApkFileInfo, "PRODUCT/apps/Chats.apk", "apk", [])
+
+ def test_GetApkFileInfo_withSkippedPrefixes(self):
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "PRODUCT/preloads/apps/Chats.apk", None, set())
+ self.assertTrue(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertFalse(should_be_skipped)
+
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "PRODUCT/preloads/apps/Chats.apk",
+ None,
+ set(["PRODUCT/preloads/"]))
+ self.assertTrue(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertTrue(should_be_skipped)
+
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "SYSTEM_OTHER/preloads/apps/Chats.apk",
+ None,
+ set(["SYSTEM/preloads/", "SYSTEM_OTHER/preloads/"]))
+ self.assertTrue(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertTrue(should_be_skipped)
+
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "SYSTEM_OTHER/preloads/apps/Chats.apk.gz",
+ ".gz",
+ set(["PRODUCT/prebuilts/", "SYSTEM_OTHER/preloads/"]))
+ self.assertTrue(is_apk)
+ self.assertTrue(is_compressed)
+ self.assertTrue(should_be_skipped)
+
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "SYSTEM_OTHER/preloads/apps/Chats.dat",
+ None,
+ set(["SYSTEM_OTHER/preloads/"]))
+ self.assertFalse(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertFalse(should_be_skipped)
+
+ def test_GetApkFileInfo_checkSkippedPrefixesInput(self):
+ # set
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "SYSTEM_OTHER/preloads/apps/Chats.apk",
+ None,
+ set(["SYSTEM_OTHER/preloads/"]))
+ self.assertTrue(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertTrue(should_be_skipped)
+
+ # tuple
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "SYSTEM_OTHER/preloads/apps/Chats.apk",
+ None,
+ ("SYSTEM_OTHER/preloads/",))
+ self.assertTrue(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertTrue(should_be_skipped)
+
+ # list
+ (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
+ "SYSTEM_OTHER/preloads/apps/Chats.apk",
+ None,
+ ["SYSTEM_OTHER/preloads/"])
+ self.assertTrue(is_apk)
+ self.assertFalse(is_compressed)
+ self.assertTrue(should_be_skipped)
+
+ # str is invalid.
+ self.assertRaises(
+ AssertionError, GetApkFileInfo, "SYSTEM_OTHER/preloads/apps/Chats.apk",
+ None, "SYSTEM_OTHER/preloads/")
+
+ # None is invalid.
+ self.assertRaises(
+ AssertionError, GetApkFileInfo, "SYSTEM_OTHER/preloads/apps/Chats.apk",
+ None, None)
+
+ def test_ReadApexKeysInfo(self):
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apexkeys.txt', self.APEX_KEYS_TXT)
+
+ with zipfile.ZipFile(target_files) as target_files_zip:
+ keys_info = ReadApexKeysInfo(target_files_zip)
+
+ self.assertEqual({
+ 'apex.apexd_test.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
+ 'build/target/product/security/testkey'),
+ 'apex.apexd_test_different_app.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
+ 'build/target/product/security/testkey'),
+ }, keys_info)
+
+ def test_ReadApexKeysInfo_mismatchingContainerKeys(self):
+ # Mismatching payload public / private keys.
+ apex_keys = self.APEX_KEYS_TXT + (
+ 'name="apex.apexd_test_different_app2.apex" '
+ 'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
+ 'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
+ 'container_certificate="build/target/product/security/testkey.x509.pem" '
+ 'container_private_key="build/target/product/security/testkey2.pk8"')
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apexkeys.txt', apex_keys)
+
+ with zipfile.ZipFile(target_files) as target_files_zip:
+ self.assertRaises(ValueError, ReadApexKeysInfo, target_files_zip)
+
+ def test_ReadApexKeysInfo_missingPayloadPrivateKey(self):
+ # Invalid lines will be skipped.
+ apex_keys = self.APEX_KEYS_TXT + (
+ 'name="apex.apexd_test_different_app2.apex" '
+ 'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
+ 'container_certificate="build/target/product/security/testkey.x509.pem" '
+ 'container_private_key="build/target/product/security/testkey.pk8"')
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apexkeys.txt', apex_keys)
+
+ with zipfile.ZipFile(target_files) as target_files_zip:
+ keys_info = ReadApexKeysInfo(target_files_zip)
+
+ self.assertEqual({
+ 'apex.apexd_test.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
+ 'build/target/product/security/testkey'),
+ 'apex.apexd_test_different_app.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
+ 'build/target/product/security/testkey'),
+ }, keys_info)
+
+ def test_ReadApexKeysInfo_missingPayloadPublicKey(self):
+ # Invalid lines will be skipped.
+ apex_keys = self.APEX_KEYS_TXT + (
+ 'name="apex.apexd_test_different_app2.apex" '
+ 'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
+ 'container_certificate="build/target/product/security/testkey.x509.pem" '
+ 'container_private_key="build/target/product/security/testkey.pk8"')
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apexkeys.txt', apex_keys)
+
+ with zipfile.ZipFile(target_files) as target_files_zip:
+ keys_info = ReadApexKeysInfo(target_files_zip)
+
+ self.assertEqual({
+ 'apex.apexd_test.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
+ 'build/target/product/security/testkey'),
+ 'apex.apexd_test_different_app.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
+ 'build/target/product/security/testkey'),
+ }, keys_info)
+
+ def test_ReadApexKeysInfo_presignedKeys(self):
+ apex_keys = self.APEX_KEYS_TXT + (
+ 'name="apex.apexd_test_different_app2.apex" '
+ 'private_key="PRESIGNED" '
+ 'public_key="PRESIGNED" '
+ 'container_certificate="PRESIGNED" '
+ 'container_private_key="PRESIGNED"')
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apexkeys.txt', apex_keys)
+
+ with zipfile.ZipFile(target_files) as target_files_zip:
+ keys_info = ReadApexKeysInfo(target_files_zip)
+
+ self.assertEqual({
+ 'apex.apexd_test.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
+ 'build/make/target/product/security/testkey'),
+ 'apex.apexd_test_different_app.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
+ 'build/make/target/product/security/testkey'),
+ }, keys_info)
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index a15ff5b2a5..edb3d41d29 100644
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -18,12 +18,18 @@
Utils for running unittests.
"""
+import logging
import os
import os.path
import struct
+import sys
+import unittest
import common
+# Some test runner doesn't like outputs from stderr.
+logging.basicConfig(stream=sys.stdout)
+
def get_testdata_dir():
"""Returns the testdata dir, in relative to the script dir."""
@@ -110,3 +116,10 @@ def construct_sparse_image(chunks):
fp.write(os.urandom(data_size))
return sparse_image
+
+
+class ReleaseToolsTestCase(unittest.TestCase):
+ """A common base class for all the releasetools unittests."""
+
+ def tearDown(self):
+ common.Cleanup()
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
new file mode 100644
index 0000000000..5f619ec79a
--- /dev/null
+++ b/tools/releasetools/test_validate_target_files.py
@@ -0,0 +1,216 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Unittests for validate_target_files.py."""
+
+import os
+import os.path
+import shutil
+import zipfile
+
+import common
+import test_utils
+from rangelib import RangeSet
+from validate_target_files import (ValidateVerifiedBootImages,
+ ValidateFileConsistency)
+from verity_utils import CreateVerityImageBuilder
+
+
+class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase):
+
+ def setUp(self):
+ self.testdata_dir = test_utils.get_testdata_dir()
+
+ def _generate_boot_image(self, output_file):
+ kernel = common.MakeTempFile(prefix='kernel-')
+ with open(kernel, 'wb') as kernel_fp:
+ kernel_fp.write(os.urandom(10))
+
+ cmd = ['mkbootimg', '--kernel', kernel, '-o', output_file]
+ proc = common.Run(cmd)
+ stdoutdata, _ = proc.communicate()
+ self.assertEqual(
+ 0, proc.returncode,
+ "Failed to run mkbootimg: {}".format(stdoutdata))
+
+ cmd = ['boot_signer', '/boot', output_file,
+ os.path.join(self.testdata_dir, 'testkey.pk8'),
+ os.path.join(self.testdata_dir, 'testkey.x509.pem'), output_file]
+ proc = common.Run(cmd)
+ stdoutdata, _ = proc.communicate()
+ self.assertEqual(
+ 0, proc.returncode,
+ "Failed to sign boot image with boot_signer: {}".format(stdoutdata))
+
+ def test_ValidateVerifiedBootImages_bootImage(self):
+ input_tmp = common.MakeTempDir()
+ os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+ boot_image = os.path.join(input_tmp, 'IMAGES', 'boot.img')
+ self._generate_boot_image(boot_image)
+
+ info_dict = {
+ 'boot_signer' : 'true',
+ }
+ options = {
+ 'verity_key' : os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+ }
+ ValidateVerifiedBootImages(input_tmp, info_dict, options)
+
+ def test_ValidateVerifiedBootImages_bootImage_wrongKey(self):
+ input_tmp = common.MakeTempDir()
+ os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+ boot_image = os.path.join(input_tmp, 'IMAGES', 'boot.img')
+ self._generate_boot_image(boot_image)
+
+ info_dict = {
+ 'boot_signer' : 'true',
+ }
+ options = {
+ 'verity_key' : os.path.join(self.testdata_dir, 'verity.x509.pem'),
+ }
+ self.assertRaises(
+ AssertionError, ValidateVerifiedBootImages, input_tmp, info_dict,
+ options)
+
+ def test_ValidateVerifiedBootImages_bootImage_corrupted(self):
+ input_tmp = common.MakeTempDir()
+ os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+ boot_image = os.path.join(input_tmp, 'IMAGES', 'boot.img')
+ self._generate_boot_image(boot_image)
+
+ # Corrupt the late byte of the image.
+ with open(boot_image, 'r+b') as boot_fp:
+ boot_fp.seek(-1, os.SEEK_END)
+ last_byte = boot_fp.read(1)
+ last_byte = chr(255 - ord(last_byte))
+ boot_fp.seek(-1, os.SEEK_END)
+ boot_fp.write(last_byte)
+
+ info_dict = {
+ 'boot_signer' : 'true',
+ }
+ options = {
+ 'verity_key' : os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+ }
+ self.assertRaises(
+ AssertionError, ValidateVerifiedBootImages, input_tmp, info_dict,
+ options)
+
+ def _generate_system_image(self, output_file, system_root=None,
+ file_map=None):
+ prop_dict = {
+ 'partition_size': str(1024 * 1024),
+ 'verity': 'true',
+ 'verity_block_device': '/dev/block/system',
+ 'verity_key' : os.path.join(self.testdata_dir, 'testkey'),
+ 'verity_fec': "true",
+ 'verity_signer_cmd': 'verity_signer',
+ }
+ verity_image_builder = CreateVerityImageBuilder(prop_dict)
+ image_size = verity_image_builder.CalculateMaxImageSize()
+
+ # Use an empty root directory.
+ if not system_root:
+ system_root = common.MakeTempDir()
+ cmd = ['mkuserimg_mke2fs', '-s', system_root, output_file, 'ext4',
+ '/system', str(image_size), '-j', '0']
+ if file_map:
+ cmd.extend(['-B', file_map])
+ proc = common.Run(cmd)
+ stdoutdata, _ = proc.communicate()
+ self.assertEqual(
+ 0, proc.returncode,
+ "Failed to create system image with mkuserimg_mke2fs: {}".format(
+ stdoutdata))
+
+ # Append the verity metadata.
+ verity_image_builder.Build(output_file)
+
+ def test_ValidateVerifiedBootImages_systemImage(self):
+ input_tmp = common.MakeTempDir()
+ os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+ system_image = os.path.join(input_tmp, 'IMAGES', 'system.img')
+ self._generate_system_image(system_image)
+
+ # Pack the verity key.
+ verity_key_mincrypt = os.path.join(
+ input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
+ os.makedirs(os.path.dirname(verity_key_mincrypt))
+ shutil.copyfile(
+ os.path.join(self.testdata_dir, 'testkey_mincrypt'),
+ verity_key_mincrypt)
+
+ info_dict = {
+ 'verity' : 'true',
+ }
+ options = {
+ 'verity_key' : os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+ 'verity_key_mincrypt' : verity_key_mincrypt,
+ }
+ ValidateVerifiedBootImages(input_tmp, info_dict, options)
+
+ def test_ValidateFileConsistency_incompleteRange(self):
+ input_tmp = common.MakeTempDir()
+ os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+ system_image = os.path.join(input_tmp, 'IMAGES', 'system.img')
+ system_root = os.path.join(input_tmp, "SYSTEM")
+ os.mkdir(system_root)
+
+ # Write the test file that contain multiple blocks of zeros, and these
+ # zero blocks will be omitted by kernel. And the test files will occupy one
+ # block range each in the final system image.
+ with open(os.path.join(system_root, 'a'), 'w') as f:
+ f.write("aaa")
+ f.write('\0' * 4096 * 3)
+ with open(os.path.join(system_root, 'b'), 'w') as f:
+ f.write("bbb")
+ f.write('\0' * 4096 * 3)
+
+ raw_file_map = os.path.join(input_tmp, 'IMAGES', 'raw_system.map')
+ self._generate_system_image(system_image, system_root, raw_file_map)
+
+ # Parse the generated file map and update the block ranges for each file.
+ file_map_list = {}
+ image_ranges = RangeSet()
+ with open(raw_file_map, 'r') as f:
+ for line in f.readlines():
+ info = line.split()
+ self.assertEqual(2, len(info))
+ image_ranges = image_ranges.union(RangeSet(info[1]))
+ file_map_list[info[0]] = RangeSet(info[1])
+
+ # Add one unoccupied block as the shared block for all test files.
+ mock_shared_block = RangeSet("10-20").subtract(image_ranges).first(1)
+ with open(os.path.join(input_tmp, 'IMAGES', 'system.map'), 'w') as f:
+ for key in sorted(file_map_list.keys()):
+ line = "{} {}\n".format(
+ key, file_map_list[key].union(mock_shared_block))
+ f.write(line)
+
+ # Prepare for the target zip file
+ input_file = common.MakeTempFile()
+ all_entries = ['SYSTEM/', 'SYSTEM/b', 'SYSTEM/a', 'IMAGES/',
+ 'IMAGES/system.map', 'IMAGES/system.img']
+ with zipfile.ZipFile(input_file, 'w') as input_zip:
+ for name in all_entries:
+ input_zip.write(os.path.join(input_tmp, name), arcname=name)
+
+ input_zip = zipfile.ZipFile(input_file, 'r')
+ info_dict = {'extfs_sparse_flag': '-s'}
+
+ # Expect the validation to pass and both files are skipped due to
+ # 'incomplete' block range.
+ ValidateFileConsistency(input_zip, input_tmp, info_dict)
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
new file mode 100644
index 0000000000..e0607c8831
--- /dev/null
+++ b/tools/releasetools/test_verity_utils.py
@@ -0,0 +1,380 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Unittests for verity_utils.py."""
+
+import copy
+import math
+import os.path
+import random
+
+import common
+import sparse_img
+from rangelib import RangeSet
+from test_utils import get_testdata_dir, ReleaseToolsTestCase
+from verity_utils import (
+ CreateHashtreeInfoGenerator, CreateVerityImageBuilder, HashtreeInfo,
+ VerifiedBootVersion1HashtreeInfoGenerator)
+
+BLOCK_SIZE = common.BLOCK_SIZE
+
+
+class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase):
+
+ def setUp(self):
+ self.testdata_dir = get_testdata_dir()
+
+ self.partition_size = 1024 * 1024
+ self.prop_dict = {
+ 'verity': 'true',
+ 'verity_fec': 'true',
+ 'system_verity_block_device': '/dev/block/system',
+ 'system_size': self.partition_size
+ }
+
+ self.hash_algorithm = "sha256"
+ self.fixed_salt = \
+ "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
+ self.expected_root_hash = \
+ "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d"
+
+ def _create_simg(self, raw_data):
+ output_file = common.MakeTempFile()
+ raw_image = common.MakeTempFile()
+ with open(raw_image, 'wb') as f:
+ f.write(raw_data)
+
+ cmd = ["img2simg", raw_image, output_file, '4096']
+ p = common.Run(cmd)
+ p.communicate()
+ self.assertEqual(0, p.returncode)
+
+ return output_file
+
+ def _generate_image(self):
+ partition_size = 1024 * 1024
+ prop_dict = {
+ 'partition_size': str(partition_size),
+ 'verity': 'true',
+ 'verity_block_device': '/dev/block/system',
+ 'verity_key': os.path.join(self.testdata_dir, 'testkey'),
+ 'verity_fec': 'true',
+ 'verity_signer_cmd': 'verity_signer',
+ }
+ verity_image_builder = CreateVerityImageBuilder(prop_dict)
+ self.assertIsNotNone(verity_image_builder)
+ adjusted_size = verity_image_builder.CalculateMaxImageSize()
+
+ raw_image = ""
+ for i in range(adjusted_size):
+ raw_image += str(i % 10)
+
+ output_file = self._create_simg(raw_image)
+
+ # Append the verity metadata.
+ verity_image_builder.Build(output_file)
+
+ return output_file
+
+ def test_CreateHashtreeInfoGenerator(self):
+ image_file = sparse_img.SparseImage(self._generate_image())
+
+ generator = CreateHashtreeInfoGenerator(
+ 'system', image_file, self.prop_dict)
+ self.assertEqual(
+ VerifiedBootVersion1HashtreeInfoGenerator, type(generator))
+ self.assertEqual(self.partition_size, generator.partition_size)
+ self.assertTrue(generator.fec_supported)
+
+ def test_DecomposeSparseImage(self):
+ image_file = sparse_img.SparseImage(self._generate_image())
+
+ generator = VerifiedBootVersion1HashtreeInfoGenerator(
+ self.partition_size, 4096, True)
+ generator.DecomposeSparseImage(image_file)
+ self.assertEqual(991232, generator.filesystem_size)
+ self.assertEqual(12288, generator.hashtree_size)
+ self.assertEqual(32768, generator.metadata_size)
+
+ def test_ParseHashtreeMetadata(self):
+ image_file = sparse_img.SparseImage(self._generate_image())
+ generator = VerifiedBootVersion1HashtreeInfoGenerator(
+ self.partition_size, 4096, True)
+ generator.DecomposeSparseImage(image_file)
+
+ # pylint: disable=protected-access
+ generator._ParseHashtreeMetadata()
+
+ self.assertEqual(
+ self.hash_algorithm, generator.hashtree_info.hash_algorithm)
+ self.assertEqual(self.fixed_salt, generator.hashtree_info.salt)
+ self.assertEqual(self.expected_root_hash, generator.hashtree_info.root_hash)
+
+ def test_ValidateHashtree_smoke(self):
+ generator = VerifiedBootVersion1HashtreeInfoGenerator(
+ self.partition_size, 4096, True)
+ generator.image = sparse_img.SparseImage(self._generate_image())
+
+ generator.hashtree_info = info = HashtreeInfo()
+ info.filesystem_range = RangeSet(data=[0, 991232 / 4096])
+ info.hashtree_range = RangeSet(
+ data=[991232 / 4096, (991232 + 12288) / 4096])
+ info.hash_algorithm = self.hash_algorithm
+ info.salt = self.fixed_salt
+ info.root_hash = self.expected_root_hash
+
+ self.assertTrue(generator.ValidateHashtree())
+
+ def test_ValidateHashtree_failure(self):
+ generator = VerifiedBootVersion1HashtreeInfoGenerator(
+ self.partition_size, 4096, True)
+ generator.image = sparse_img.SparseImage(self._generate_image())
+
+ generator.hashtree_info = info = HashtreeInfo()
+ info.filesystem_range = RangeSet(data=[0, 991232 / 4096])
+ info.hashtree_range = RangeSet(
+ data=[991232 / 4096, (991232 + 12288) / 4096])
+ info.hash_algorithm = self.hash_algorithm
+ info.salt = self.fixed_salt
+ info.root_hash = "a" + self.expected_root_hash[1:]
+
+ self.assertFalse(generator.ValidateHashtree())
+
+ def test_Generate(self):
+ image_file = sparse_img.SparseImage(self._generate_image())
+ generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict)
+ info = generator.Generate(image_file)
+
+ self.assertEqual(RangeSet(data=[0, 991232 / 4096]), info.filesystem_range)
+ self.assertEqual(RangeSet(data=[991232 / 4096, (991232 + 12288) / 4096]),
+ info.hashtree_range)
+ self.assertEqual(self.hash_algorithm, info.hash_algorithm)
+ self.assertEqual(self.fixed_salt, info.salt)
+ self.assertEqual(self.expected_root_hash, info.root_hash)
+
+
+class VerifiedBootVersion1VerityImageBuilderTest(ReleaseToolsTestCase):
+
+ DEFAULT_PARTITION_SIZE = 4096 * 1024
+ DEFAULT_PROP_DICT = {
+ 'partition_size': str(DEFAULT_PARTITION_SIZE),
+ 'verity': 'true',
+ 'verity_block_device': '/dev/block/system',
+ 'verity_key': os.path.join(get_testdata_dir(), 'testkey'),
+ 'verity_fec': 'true',
+ 'verity_signer_cmd': 'verity_signer',
+ }
+
+ def test_init(self):
+ prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
+ verity_image_builder = CreateVerityImageBuilder(prop_dict)
+ self.assertIsNotNone(verity_image_builder)
+ self.assertEqual(1, verity_image_builder.version)
+
+ def test_init_MissingProps(self):
+ prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
+ del prop_dict['verity']
+ self.assertIsNone(CreateVerityImageBuilder(prop_dict))
+
+ prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
+ del prop_dict['verity_block_device']
+ self.assertIsNone(CreateVerityImageBuilder(prop_dict))
+
+ def test_CalculateMaxImageSize(self):
+ verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
+ size = verity_image_builder.CalculateMaxImageSize()
+ self.assertLess(size, self.DEFAULT_PARTITION_SIZE)
+
+ # Same result by explicitly passing the partition size.
+ self.assertEqual(
+ verity_image_builder.CalculateMaxImageSize(),
+ verity_image_builder.CalculateMaxImageSize(
+ self.DEFAULT_PARTITION_SIZE))
+
+ @staticmethod
+ def _BuildAndVerify(prop, verify_key):
+ verity_image_builder = CreateVerityImageBuilder(prop)
+ image_size = verity_image_builder.CalculateMaxImageSize()
+
+ # Build the sparse image with verity metadata.
+ input_dir = common.MakeTempDir()
+ image = common.MakeTempFile(suffix='.img')
+ cmd = ['mkuserimg_mke2fs', input_dir, image, 'ext4', '/system',
+ str(image_size), '-j', '0', '-s']
+ common.RunAndCheckOutput(cmd)
+ verity_image_builder.Build(image)
+
+ # Verify the verity metadata.
+ cmd = ['verity_verifier', image, '-mincrypt', verify_key]
+ common.RunAndCheckOutput(cmd)
+
+ def test_Build(self):
+ self._BuildAndVerify(
+ self.DEFAULT_PROP_DICT,
+ os.path.join(get_testdata_dir(), 'testkey_mincrypt'))
+
+ def test_Build_SanityCheck(self):
+ # A sanity check for the test itself: the image shouldn't be verifiable
+ # with wrong key.
+ self.assertRaises(
+ common.ExternalError,
+ self._BuildAndVerify,
+ self.DEFAULT_PROP_DICT,
+ os.path.join(get_testdata_dir(), 'verity_mincrypt'))
+
+ def test_Build_FecDisabled(self):
+ prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
+ del prop_dict['verity_fec']
+ self._BuildAndVerify(
+ prop_dict,
+ os.path.join(get_testdata_dir(), 'testkey_mincrypt'))
+
+ def test_Build_SquashFs(self):
+ verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
+ verity_image_builder.CalculateMaxImageSize()
+
+ # Build the sparse image with verity metadata.
+ input_dir = common.MakeTempDir()
+ image = common.MakeTempFile(suffix='.img')
+ cmd = ['mksquashfsimage.sh', input_dir, image, '-s']
+ common.RunAndCheckOutput(cmd)
+ verity_image_builder.PadSparseImage(image)
+ verity_image_builder.Build(image)
+
+ # Verify the verity metadata.
+ cmd = ["verity_verifier", image, '-mincrypt',
+ os.path.join(get_testdata_dir(), 'testkey_mincrypt')]
+ common.RunAndCheckOutput(cmd)
+
+
+class VerifiedBootVersion2VerityImageBuilderTest(ReleaseToolsTestCase):
+
+ DEFAULT_PROP_DICT = {
+ 'partition_size': str(4096 * 1024),
+ 'partition_name': 'system',
+ 'avb_avbtool': 'avbtool',
+ 'avb_hashtree_enable': 'true',
+ 'avb_add_hashtree_footer_args': '',
+ }
+
+ def test_init(self):
+ prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
+ verity_image_builder = CreateVerityImageBuilder(prop_dict)
+ self.assertIsNotNone(verity_image_builder)
+ self.assertEqual(2, verity_image_builder.version)
+
+ def test_init_MissingProps(self):
+ prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
+ del prop_dict['avb_hashtree_enable']
+ verity_image_builder = CreateVerityImageBuilder(prop_dict)
+ self.assertIsNone(verity_image_builder)
+
+ def test_Build(self):
+ prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
+ verity_image_builder = CreateVerityImageBuilder(prop_dict)
+ self.assertIsNotNone(verity_image_builder)
+ self.assertEqual(2, verity_image_builder.version)
+
+ input_dir = common.MakeTempDir()
+ image_dir = common.MakeTempDir()
+ system_image = os.path.join(image_dir, 'system.img')
+ system_image_size = verity_image_builder.CalculateMaxImageSize()
+ cmd = ['mkuserimg_mke2fs', input_dir, system_image, 'ext4', '/system',
+ str(system_image_size), '-j', '0', '-s']
+ common.RunAndCheckOutput(cmd)
+ verity_image_builder.Build(system_image)
+
+ # Additionally make vbmeta image so that we can verify with avbtool.
+ vbmeta_image = os.path.join(image_dir, 'vbmeta.img')
+ cmd = ['avbtool', 'make_vbmeta_image', '--include_descriptors_from_image',
+ system_image, '--output', vbmeta_image]
+ common.RunAndCheckOutput(cmd)
+
+ # Verify the verity metadata.
+ cmd = ['avbtool', 'verify_image', '--image', vbmeta_image]
+ common.RunAndCheckOutput(cmd)
+
+ def _test_CalculateMinPartitionSize_SetUp(self):
+ # To test CalculateMinPartitionSize(), by using 200MB to 2GB image size.
+ # - 51200 = 200MB * 1024 * 1024 / 4096
+ # - 524288 = 2GB * 1024 * 1024 * 1024 / 4096
+ image_sizes = [BLOCK_SIZE * random.randint(51200, 524288) + offset
+ for offset in range(BLOCK_SIZE)]
+
+ prop_dict = {
+ 'partition_size': None,
+ 'partition_name': 'system',
+ 'avb_avbtool': 'avbtool',
+ 'avb_hashtree_enable': 'true',
+ 'avb_add_hashtree_footer_args': None,
+ }
+ builder = CreateVerityImageBuilder(prop_dict)
+ self.assertEqual(2, builder.version)
+ return image_sizes, builder
+
+ def test_CalculateMinPartitionSize_LinearFooterSize(self):
+ """Tests with footer size which is linear to partition size."""
+ image_sizes, builder = self._test_CalculateMinPartitionSize_SetUp()
+ for image_size in image_sizes:
+ for ratio in 0.95, 0.56, 0.22:
+ expected_size = common.RoundUpTo4K(int(math.ceil(image_size / ratio)))
+ self.assertEqual(
+ expected_size,
+ builder.CalculateMinPartitionSize(
+ image_size, lambda x, ratio=ratio: int(x * ratio)))
+
+ def test_AVBCalcMinPartitionSize_SlowerGrowthFooterSize(self):
+ """Tests with footer size which grows slower than partition size."""
+
+ def _SizeCalculator(partition_size):
+ """Footer size is the power of 0.95 of partition size."""
+ # Minus footer size to return max image size.
+ return partition_size - int(math.pow(partition_size, 0.95))
+
+ image_sizes, builder = self._test_CalculateMinPartitionSize_SetUp()
+ for image_size in image_sizes:
+ min_partition_size = builder.CalculateMinPartitionSize(
+ image_size, _SizeCalculator)
+ # Checks min_partition_size can accommodate image_size.
+ self.assertGreaterEqual(
+ _SizeCalculator(min_partition_size),
+ image_size)
+ # Checks min_partition_size (round to BLOCK_SIZE) is the minimum.
+ self.assertLess(
+ _SizeCalculator(min_partition_size - BLOCK_SIZE),
+ image_size)
+
+ def test_CalculateMinPartitionSize_FasterGrowthFooterSize(self):
+ """Tests with footer size which grows faster than partition size."""
+
+ def _SizeCalculator(partition_size):
+ """Max image size is the power of 0.95 of partition size."""
+ # Max image size grows less than partition size, which means
+ # footer size grows faster than partition size.
+ return int(math.pow(partition_size, 0.95))
+
+ image_sizes, builder = self._test_CalculateMinPartitionSize_SetUp()
+ for image_size in image_sizes:
+ min_partition_size = builder.CalculateMinPartitionSize(
+ image_size, _SizeCalculator)
+ # Checks min_partition_size can accommodate image_size.
+ self.assertGreaterEqual(
+ _SizeCalculator(min_partition_size),
+ image_size)
+ # Checks min_partition_size (round to BLOCK_SIZE) is the minimum.
+ self.assertLess(
+ _SizeCalculator(min_partition_size - BLOCK_SIZE),
+ image_size)
diff --git a/tools/releasetools/testdata/TestApp.apk b/tools/releasetools/testdata/TestApp.apk
new file mode 100644
index 0000000000..a91160368c
--- /dev/null
+++ b/tools/releasetools/testdata/TestApp.apk
Binary files differ
diff --git a/tools/releasetools/testdata/merge_config_system_item_list b/tools/releasetools/testdata/merge_config_system_item_list
new file mode 100644
index 0000000000..36c26187d8
--- /dev/null
+++ b/tools/releasetools/testdata/merge_config_system_item_list
@@ -0,0 +1,9 @@
+META/apkcerts.txt
+META/filesystem_config.txt
+META/root_filesystem_config.txt
+META/system_manifest.xml
+META/system_matrix.xml
+META/update_engine_config.txt
+PRODUCT/*
+ROOT/*
+SYSTEM/*
diff --git a/target/board/generic_x86_a/BoardConfig.mk b/tools/releasetools/testdata/signing_helper.sh
index 67cb07d7f9..364e0238b6 100644..100755
--- a/target/board/generic_x86_a/BoardConfig.mk
+++ b/tools/releasetools/testdata/signing_helper.sh
@@ -1,5 +1,6 @@
+#!/bin/sh
#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,8 +15,7 @@
# limitations under the License.
#
-include build/make/target/board/treble_common_32.mk
-
-TARGET_CPU_ABI := x86
-TARGET_ARCH := x86
-TARGET_ARCH_VARIANT := x86
+tmpfile=$(mktemp)
+cat $3 | openssl rsautl -sign -inkey $2 -raw > $tmpfile
+cat $tmpfile > $3
+rm $tmpfile
diff --git a/tools/releasetools/testdata/testkey.key b/tools/releasetools/testdata/testkey.key
new file mode 100644
index 0000000000..3a84607abe
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC+O/I7YvBaCZA3
+KrvP7ErTh6DS3cAvjLY2GkAA7NWcXIICsVwWMtMZAMO+Rtk/o3XY7r53Amg8ue2e
+b0D5Wc8gUVEeDQdRZJscz5CTwmC/b/YBWQBSPknWv23hf7ZYjR5HMk/XlmOfrylA
+oaZzJyKvLSBu+Zi9cvZlSZObnoOYR8JQJEhjYgYn8JwycV4i1VTQvEqxXkyW3kE7
+RW/8JXgRqI4vDIKehm5SFi2jt0eU7/ju/8f3OGQkLng4DV2QPfwQ+A7kad+EYVI1
+dBGYkNNesWB3o75A7jJQ1fyVg/XQzOKZSki1lrTm3rw0AOrBiXdPudbO+4L2vgip
+kPI9/bVNAgMBAAECggEABGjBSY0Wgw+7rvunlL8mUNbQ7HJFVRTO2FwtZZgXr2MZ
+hFR2DPGqoOa6ortjp6zzO071TS7aGaY5krWDbQQe3+Hinm6w37sUOUu6TyJvOaCv
+tAJLFpzo+zg+pL5gDJdgv0e0QAv1TSszKpNUl1Ct5h+Go+vXFXUHrvtQl4fKBwqA
+efxcd3R4z3p/3Cl2ZYIRz9I7UXUZZYwJE7bDNDz3aFZ1jUoELGmhe1O5w0hJY1q6
+PxuOM9bL60yDn0vu0eiCjaPlHeHyGe9pQ1aQLEuwQz9zpWC01dWPVkLmny7HDygC
+VBsdg8MNlzJQ1WV2en11BH72IqZ59U8pD0xEB7a4BQKBgQDxenfXyYZw4AKcaJlP
+ncJmsx/wcgEvWNxiI4etArXES4VIyP2OlSw+q9JbOOpaSk8TJP5PNfUkgTbC4B2y
+gh/AobJP5b7Wn5LrsHc3GY6CzF1i8T4xXQRxnaKWE86SOmZQlEmyCnpyCmfFVuaR
+E8p8CPW/gQLhpxSlQdGZ0bYLiwKBgQDJrJRDdyaI/Isusog/OwKHVGBU6CmRa5tM
+gx+GIlxheqhuDqnBkr0h1kL20Zi80JeG7vKWr+dwfqkEarfdTe+juwlIuQ3MEuXL
+AbsKNuaU1naOqOLm9rjZgRtR7oNLVH5AbkKMaJz1zM6YiMl54FEDX7ZVY8b6q1Kz
+YXT3sGi9hwKBgBsNa0ujagpPLjuzhCllNRgoTRW0z+kr/VSJQnPhb9eT1lS3H6DP
+mWtT+Hb7w1VmKcGtTUg2dUYnq6jdTrZm2YPNGZrV1DFbIyyAUnq7xDlnB7dD64HA
+N/U6gbJqeaPsIvY4BqGJhvorrEBxYdcy7mZC4rUXkOkSvL9exkqDMe/NAoGARaHU
+v0aQg5PO6pyx9kMFqHw1lptiXtdsk4pihAmxI+cZ6IYfjrp/mwNDs7zCo87RwsEV
++Xlay7iv2tqOCVczerDFj9p1LRUJSoKadfhmvNUfsjoVvfFJ+a9eI3fa1VOjE9P+
+HkSwjR3d50Sza+VLk4Kkje8ZcMtejpkDrdG3GFkCgYBXHqciwlFn5nMPFRe8v426
+6YBiUtzCQCZxDtMeeZYCJslFfjrqPXNUcU/flxWwaikjFsLJEtl7aT3Hpdi5I7T8
+yCYkUWqAAh7twEYTOeG6v/tEa/PmsBjZXPD2zkCp76EQmv3gbvsH4F/nA55gT/GR
+2in6XS/4rHBvjn5gF6MFyg==
+-----END PRIVATE KEY-----
diff --git a/tools/releasetools/testdata/testkey_RSA4096.key b/tools/releasetools/testdata/testkey_RSA4096.key
new file mode 100644
index 0000000000..07115e1ec8
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_RSA4096.key
@@ -0,0 +1,52 @@
+-----BEGIN PRIVATE KEY-----
+MIIJRAIBADANBgkqhkiG9w0BAQEFAASCCS4wggkqAgEAAoICAQC7y8EH4O8M9aA7
+UhaWLlW5ceQxZi0P7DNOgog/82SIZh1/Vv0S8KCu9LcngK60oIejPU3k9zb0Mpl2
+4OEtupbOq9SV0nyRIp33rs9EJ0zm8keZ2jEfwTubdhE4GumlwkbfYHlMPmpufqOq
+uxKfCOUZk3ZEC5RFDBqVLGrf9m22ITMQwNGj/u/mtAg2UXRGy534eU1evHOFH8tD
+IxXd378m+gRY+bPi6fOkrJgAw6N01NwwsHRxxQsVgp8m1EKpPJ+ARRtP6YYtNsNB
+KTOXqZw18OD7eG7yVONAf7oSOMNRK7qg6ZU6YN/y1k+YZF3D/1HU9THoDXIpBHoW
+R2SpEM63Ua8ilSmx9PfoaDn27VrpjIcq50HqKAXOvclxOCyVD0pRkrryPpevYCVu
+x9/InmW3K4dPiAy0KmmS4ZSLUJnr6Lnkt8C3VxXHqZ4T7MlgrjrO70YqCxeDvYE0
+KL/e/UeJ69nANybkDThhBkxkOC1vvik3VXO4ITPEKCnLHdvdj8rwkjqf3Ex5A0g8
+XaH0l6I72pqxXi8nnU9udLrUEdHUT1KHzjnZBVP6aomcDy5Gnbb6wXTBTkB7fdQB
+nGcqT0DH67PqJE/rCUguVSmX4KGyOVrr5S1GQTg1EmHbF2Kf6P0YJpcVU7PHRE8s
+BisP39wzoE5XsIpn/aKpWdpcfqxjTQIDAQABAoICAQCB/vtyLryLpgPyzFIiR5TD
+uBkUMPyEhybE9ArI6fzvhnBo05h/4d34/iFC0QsesfjygN9I3fBGfjhJWEXH19/I
+1J1l0Ly14taiu3lyXhoXzCLQV3+l0acnaEVnJwoR2jghLLEKnDIkprk42CJ9wDSG
+zdMSK0nJuiU0mfipa/ZqGvU0ZaU49qKuenUs1Jm+3/hMJfvu1ljJEEcuBD2Axv+V
+RYB47vEc5IHpvifCb6rYlviNI7iXgKS5kSAGSuySJgrrSessGCTva3chxhmWpKwj
+ksjKioWSbjyZS1FMh8p8h966wLayIJklikCy5tcZc8X7und/gL9DsXuprGX7uky8
+3ZS2cJjiVimkwoROq5VPa+0SBmNJWRBNvRfARiaKkyyidVxIsvjFNDFPQp1jYlzM
+fvGJwgnCiUQP56hvri3irriWN8Le2U8lqQQ7YaDLKcsf6iiMGwfxcK+6E4MUv797
+V5CZXSC7RrPd9wdj6UIqtgSGVUH3BV7kB+fYYfvV15kpj6IXYTxElfAZ2ak3g9sv
+JZ6moKbRN0xSufkDftMGv77cH13v97Iy3Whp8zEPMBnbsdpPp6DpIH8sl2R7O+zr
+uMty6vXw0Ux69LEpz4b4HyD7t65zTSwpou5YhfZt/yRzovawsQIRGlc4E1FJDFjf
+e9LvazMXo/us3T5LNv5pAQKCAQEA5Kq1RJgMlvKOfGaEah1xu65s3I1lGuz+9c1x
+geYFlta9H2vG1aADUtgmIBjjC+1z4KLD9jrjrwwbqKnisu7/qUOR6Qf8BHFbrMSs
+J8IMOD3Vw/UVc/8LCjoI4n1XaKYJtOyIxKJtWKAbgtvgVOAGSt47LEOOchXNnAKv
+C3Flak3ADYaUQFLoiwmp6WdSL+uiLisukKNjmYu8vxhg9255p31PB6xixd0raoF/
+oDTfgY1fG/OFXvQd+GcjrTJ2Lqk1GtZqau5MEkS5jsKKnPJ6+ozd2t+QVkMrIQER
+WeTtZ7gimJo6QF8uyyG8WqT1qxbO2zV4Nrwak6ozRFhEJdnJrQKCAQEA0j5hY6sr
+apIvEUFsK6k1rEb77+1p85eyCOSYZpHEIe0hy89MjMUFA5IKhsXnUqhkiuJURUrD
+VtccWWJt5DUgS6HzJUGjeXo07wkqVz+10l+l+RSHleNBYlbxSpZQtvkKQkISF56c
+bSjLzOGM4RE8NxBdFg6EijwlKlZ8kW1ZJaQv9fuR+QS9DFXSiYUJSDiwLF0F6ogQ
+i1h3RN3RIKYc9kizYqBKaksg8EfQEyJs2Rhl5JrPmdZvDTpSeGRqz66WbdL4gSNv
+ud64BYY+Uhec9yH2HDal1l/j1dFbh+Nzs2v4b5TYmCO/zX5GfucUrZaGHbZcovlo
+/abKhURKW/N0IQKCAQEAhz0PCAqFJ6E89AYNulS/tyhp6ecWLN6NzAI9Z34LQDKw
+l6y+ZAnG7XA43DLb1WoSZoDdNPuPPTAEC7SuBvWi7xCvcwrt2hLRDVUkHD9/yqOH
+keWZUok8lkfMiWdoEtRgWUireuA1m3zVyIcSHiCAmDbm+D7cOEz81ZAgxrvCJyTk
+uRsnAwQF1HVasFgTG5RYzsVrPM/lUCJ89ugMMUp9WLmbzAYARNWRn+QG/1FF/vEF
+lxpnfskSEJ+vUffOPbqFVeIJ/kQBaayLsgsMv9YJNbWqYJBoZRxEnbhr8qaaYgVd
+MLPGT9v7aNgC9fkp8o4CuVLeTkDh1wOKXpl1dI1h4QKCAQEAgfNExxI517lbllLV
+xXblUgLeHkKkxofw50ZEXMGkdUPZK9yJ+Eie/MH796nDfXfQDXgvllTLwJVdVHJe
+cjvUJmuHmnOj06YRqd4EacFbZRjxwa9Kzv6Un3AV3IBki3QLP0EPZcIH9gDNV2ni
+Zgr9KRvYLZXznm3mmvCyNkFcZMPDUUuZwk2HfGRfqditEBLZ8dHdokVP1JFtxwdE
+B+Yk6KWvGzrwRBsD1QDOP8V7egR2loKJ5xB/u7Fc4EVRL+U93cwVBd0dZcmf/Oop
+AxzNTIOVV4L/pi3G6ZZp+OhBz5jhCjb6Wa9fEmkGsdGrGlB7vUnGoIWAu6eobg7z
+1zn3gQKCAQBwyViGs7b5HuYQ8UNkvBK9MxUE6h/qHEshbw+QtD/wcdNNpwLTawoW
+JH3bWzD01p1DsbRx/bcV5yaiWDhuslSH2xB6+N1gx2ohg8lPmFhKQiR1OTQ3L603
+Y+3h8FNO/c3YPcNr/k4N+tVKPSJvz0NcbkNs9qGUUsiEppVtc17VFAv/yPicV/wP
+0vC4Qw4xitSlIzD1QtPl0HfhA3ZM6fBb0lYx3tpJjmkrWPVjwWhYIAVLD6j7Jark
+NtMW9wSG21atSgWX1jFOiVsu3qzDpMvLXbH2FpAO9t+9GSDcSzfUMQLymWyW1+Dl
+L0rBGoJyEopSbtycAkWEHb/YLolfaTRd
+-----END PRIVATE KEY-----
diff --git a/tools/releasetools/testdata/testkey_mincrypt b/tools/releasetools/testdata/testkey_mincrypt
new file mode 100644
index 0000000000..7f5d31b5c4
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_mincrypt
Binary files differ
diff --git a/tools/releasetools/testdata/testkey_with_passwd.key b/tools/releasetools/testdata/testkey_with_passwd.key
new file mode 100644
index 0000000000..2f0a199645
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_with_passwd.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCwaAOHPqgkCmqU
+AFRnJW6LrAuSfy9EzWSRHSkltp811ByMIE0N6/Nttu8ZCL456lzArHNKt/zdoBik
+eLB6gN9CTvQ8n4LMdSEmkRl3uXBtOPJuVObJ6ZUILz6L7WofWcr8DT81j2At7nHi
+Wg8SkCsFXbFfpjljOlpqUG3Szt+48X8rcgG82s97BuRwNxUgfK1/8QzOiH9fDbMU
+h6XI2jo2VwuBYOsJadJJWOf6oRRHZonrts0FXpV46CXykpLvLT2u5GXg1Pxd7i1K
+v1P8bxZOzVbEVfkL2DnUCtUBAnP98r9UyjQDd4blk4Mwl+mzB5otPTacNzEGhmNK
+Et+HB/cdAgMBAAECggEATsn2IXa7tHUuivHmwLb4O8vY01KY8xrleubSVPTPAUS+
+h1t57ujerbcR7VV5WPay/J9JUyr/9qClwPfioqRikwQek+EOk3ERIF+YR1/8tdvE
+c8DZ337DQIeRYP/l8SCyx4bHH43tADbKiLV+m+TmQhxJt5XPdeE/NtK7andZdwkv
+xEoG9l2aONE4z9pY1x+c1SdDSsq92/iLHLgSkQJmWo+lrfeh6gshXgQgDY8n6rgY
+GsCgSawLphvd8Tvo86CL04l0pWtY1gEW3s6sdYo1YDkpWQzSRCtGm0GlhEt2fyq5
+coTK2sLHguE7NL5VZo4zlGtM3QBdvRksTO1mJOt6JQKBgQDaT4oGjZp1rtKdObvn
+ElaUo5EOyJjmXkRBBndrbiG3078eOqTJHXx45DJUv8hj9+g6vSULiIeFk1FiiMQD
+vcnsBEaGaSc886wXY6TQgIIzvVfzDHGYTuQydiYQbLClH6S28HLqdlZjUIlHwxb9
+wBm8JwmTiVeAEvO8LTzeEqfkLwKBgQDO3He8Ei8XDeqtIK0lzcZ83yw9OGP23/gK
+8GDaf8J+cOtOyYkDlcV0rBNFvE8+TzIpIUlo47b2RSaART3iPSfRJTaySZjKWCVo
+s2A0/zQcrj7GgD2gaHRrgI9bmnWW1j95a9n/6AUEyEIJ6K8tYK819Vl4GAyhNHEQ
+sRbxa69qcwKBgQC5F8jxx2tXLdM6JLIQtzabLZcWTrN8Vh5Od3oWpriF0EzxB02h
+ipN3OBsISdZQE+dcrfNTtP0aHo5ZGZX/ihFCP1nAKjVvczXMWtppQRujXHzOABXr
+ya+mrQ+Wy2B1j7+qr3DvI0gZSjYqltjOaeon4X04DrEWUHtAZ6Z8rpqUVwKBgQCB
+o8mmI/8/A4m/Vmss9fke6P5gn6aGYXah5GPOi6Loevv9NHCZvpMwu2aYnZtMAXX+
+MM5A3fUcAdpPKRXPY2RAvoG42kbXCMbpBwGUNRwDnW/aFySIEu5jMP6m+fYXwc2l
+2uGUb2Q1ywsYCqs+VQl5V3nquaewn5z8SP+H7WTR4QKBgQCO5CRpyNOjEwMxTPR1
+GYUKAEiVtmzknHAxUE6drTgGEZSquAXiau0B5+7+/G5gwqxCLGpnstMByI+dhkR6
++ybAc/bzb2aoGK4pZf/PuwxQQsHBnG0oaSFU6RZlbVV20j7FZ04+cYnKHwCYkKjN
+DwA1Ae+H+u95raB4vYhk7IzD4A==
+-----END PRIVATE KEY-----
diff --git a/tools/releasetools/testdata/verity_mincrypt b/tools/releasetools/testdata/verity_mincrypt
new file mode 100644
index 0000000000..31982d95ad
--- /dev/null
+++ b/tools/releasetools/testdata/verity_mincrypt
Binary files differ
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 8c9e07c08b..1c856a8e14 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -17,16 +17,24 @@
"""
Validate a given (signed) target_files.zip.
-It performs checks to ensure the integrity of the input zip.
+It performs the following checks to assert the integrity of the input zip.
+
- It verifies the file consistency between the ones in IMAGES/system.img (read
via IMAGES/system.map) and the ones under unpacked folder of SYSTEM/. The
same check also applies to the vendor image if present.
+
+ - It verifies the install-recovery script consistency, by comparing the
+ checksums in the script against the ones of IMAGES/{boot,recovery}.img.
+
+ - It verifies the signed Verified Boot related images, for both of Verified
+ Boot 1.0 and 2.0 (aka AVB).
"""
+import argparse
+import filecmp
import logging
import os.path
import re
-import sys
import zipfile
import common
@@ -76,18 +84,25 @@ def ValidateFileConsistency(input_zip, input_tmp, info_dict):
# bytes past the file length, which is expected to be padded with '\0's.
ranges = image.file_map[entry]
- incomplete = ranges.extra.get('incomplete', False)
+ # Use the original RangeSet if applicable, which includes the shared
+ # blocks. And this needs to happen before checking the monotonicity flag.
+ if ranges.extra.get('uses_shared_blocks'):
+ file_ranges = ranges.extra['uses_shared_blocks']
+ else:
+ file_ranges = ranges
+
+ incomplete = file_ranges.extra.get('incomplete', False)
if incomplete:
logging.warning('Skipping %s that has incomplete block list', entry)
continue
# TODO(b/79951650): Handle files with non-monotonic ranges.
- if not ranges.monotonic:
+ if not file_ranges.monotonic:
logging.warning(
- 'Skipping %s that has non-monotonic ranges: %s', entry, ranges)
+ 'Skipping %s that has non-monotonic ranges: %s', entry, file_ranges)
continue
- blocks_sha1 = image.RangeSha1(ranges)
+ blocks_sha1 = image.RangeSha1(file_ranges)
# The filename under unpacked directory, such as SYSTEM/bin/sh.
unpacked_name = os.path.join(
@@ -96,7 +111,7 @@ def ValidateFileConsistency(input_zip, input_tmp, info_dict):
file_sha1 = unpacked_file.sha1
assert blocks_sha1 == file_sha1, \
'file: %s, range: %s, blocks_sha1: %s, file_sha1: %s' % (
- entry, ranges, blocks_sha1, file_sha1)
+ entry, file_ranges, blocks_sha1, file_sha1)
logging.info('Validating file consistency.')
@@ -122,14 +137,18 @@ def ValidateInstallRecoveryScript(input_tmp, info_dict):
1. full recovery:
...
- if ! applypatch -c type:device:size:SHA-1; then
- applypatch /system/etc/recovery.img type:device sha1 size && ...
+ if ! applypatch --check type:device:size:sha1; then
+ applypatch --flash /system/etc/recovery.img \\
+ type:device:size:sha1 && \\
...
2. recovery from boot:
...
- applypatch [-b bonus_args] boot_info recovery_info recovery_sha1 \
- recovery_size patch_info && ...
+ if ! applypatch --check type:recovery_device:recovery_size:recovery_sha1; then
+ applypatch [--bonus bonus_args] \\
+ --patch /system/recovery-from-boot.p \\
+ --source type:boot_device:boot_size:boot_sha1 \\
+ --target type:recovery_device:recovery_size:recovery_sha1 && \\
...
For full recovery, we want to calculate the SHA-1 of /system/etc/recovery.img
@@ -146,75 +165,228 @@ def ValidateInstallRecoveryScript(input_tmp, info_dict):
logging.info('Checking %s', script_path)
with open(os.path.join(input_tmp, script_path), 'r') as script:
lines = script.read().strip().split('\n')
- assert len(lines) >= 6
- check_cmd = re.search(r'if ! applypatch -c \w+:.+:\w+:(\w+);',
+ assert len(lines) >= 10
+ check_cmd = re.search(r'if ! applypatch --check (\w+:.+:\w+:\w+);',
lines[1].strip())
- expected_recovery_check_sha1 = check_cmd.group(1)
- patch_cmd = re.search(r'(applypatch.+)&&', lines[2].strip())
- applypatch_argv = patch_cmd.group(1).strip().split()
+ check_partition = check_cmd.group(1)
+ assert len(check_partition.split(':')) == 4
full_recovery_image = info_dict.get("full_recovery_image") == "true"
if full_recovery_image:
- assert len(applypatch_argv) == 5
- # Check we have the same expected SHA-1 of recovery.img in both check mode
- # and patch mode.
- expected_recovery_sha1 = applypatch_argv[3].strip()
- assert expected_recovery_check_sha1 == expected_recovery_sha1
- ValidateFileAgainstSha1(input_tmp, 'recovery.img',
- 'SYSTEM/etc/recovery.img', expected_recovery_sha1)
+ assert len(lines) == 10, "Invalid line count: {}".format(lines)
+
+ # Expect something like "EMMC:/dev/block/recovery:28:5f9c..62e3".
+ target = re.search(r'--target (.+) &&', lines[4].strip())
+ assert target is not None, \
+ "Failed to parse target line \"{}\"".format(lines[4])
+ flash_partition = target.group(1)
+
+ # Check we have the same recovery target in the check and flash commands.
+ assert check_partition == flash_partition, \
+ "Mismatching targets: {} vs {}".format(check_partition, flash_partition)
+
+ # Validate the SHA-1 of the recovery image.
+ recovery_sha1 = flash_partition.split(':')[3]
+ ValidateFileAgainstSha1(
+ input_tmp, 'recovery.img', 'SYSTEM/etc/recovery.img', recovery_sha1)
else:
- # We're patching boot.img to get recovery.img where bonus_args is optional
- if applypatch_argv[1] == "-b":
- assert len(applypatch_argv) == 8
- boot_info_index = 3
- else:
- assert len(applypatch_argv) == 6
- boot_info_index = 1
+ assert len(lines) == 11, "Invalid line count: {}".format(lines)
+
+ # --source boot_type:boot_device:boot_size:boot_sha1
+ source = re.search(r'--source (\w+:.+:\w+:\w+) \\', lines[4].strip())
+ assert source is not None, \
+ "Failed to parse source line \"{}\"".format(lines[4])
- # boot_info: boot_type:boot_device:boot_size:boot_sha1
- boot_info = applypatch_argv[boot_info_index].strip().split(':')
- assert len(boot_info) == 4
+ source_partition = source.group(1)
+ source_info = source_partition.split(':')
+ assert len(source_info) == 4, \
+ "Invalid source partition: {}".format(source_partition)
ValidateFileAgainstSha1(input_tmp, file_name='boot.img',
file_path='IMAGES/boot.img',
- expected_sha1=boot_info[3])
-
- recovery_sha1_index = boot_info_index + 2
- expected_recovery_sha1 = applypatch_argv[recovery_sha1_index]
- assert expected_recovery_check_sha1 == expected_recovery_sha1
+ expected_sha1=source_info[3])
+
+ # --target recovery_type:recovery_device:recovery_size:recovery_sha1
+ target = re.search(r'--target (\w+:.+:\w+:\w+) && \\', lines[5].strip())
+ assert target is not None, \
+ "Failed to parse target line \"{}\"".format(lines[5])
+ target_partition = target.group(1)
+
+ # Check we have the same recovery target in the check and patch commands.
+ assert check_partition == target_partition, \
+ "Mismatching targets: {} vs {}".format(
+ check_partition, target_partition)
+
+ recovery_info = target_partition.split(':')
+ assert len(recovery_info) == 4, \
+ "Invalid target partition: {}".format(target_partition)
ValidateFileAgainstSha1(input_tmp, file_name='recovery.img',
file_path='IMAGES/recovery.img',
- expected_sha1=expected_recovery_sha1)
+ expected_sha1=recovery_info[3])
logging.info('Done checking %s', script_path)
-def main(argv):
- def option_handler():
- return True
+def ValidateVerifiedBootImages(input_tmp, info_dict, options):
+ """Validates the Verified Boot related images.
+
+ For Verified Boot 1.0, it verifies the signatures of the bootable images
+ (boot/recovery etc), as well as the dm-verity metadata in system images
+ (system/vendor/product). For Verified Boot 2.0, it calls avbtool to verify
+ vbmeta.img, which in turn verifies all the descriptors listed in vbmeta.
- args = common.ParseOptions(
- argv, __doc__, extra_opts="",
- extra_long_opts=[],
- extra_option_handler=option_handler)
+ Args:
+ input_tmp: The top-level directory of unpacked target-files.zip.
+ info_dict: The loaded info dict.
+ options: A dict that contains the user-supplied public keys to be used for
+ image verification. In particular, 'verity_key' is used to verify the
+ bootable images in VB 1.0, and the vbmeta image in VB 2.0, where
+ applicable. 'verity_key_mincrypt' will be used to verify the system
+ images in VB 1.0.
- if len(args) != 1:
- common.Usage(__doc__)
- sys.exit(1)
+ Raises:
+ AssertionError: On any verification failure.
+ """
+ # Verified boot 1.0 (images signed with boot_signer and verity_signer).
+ if info_dict.get('boot_signer') == 'true':
+ logging.info('Verifying Verified Boot images...')
+
+ # Verify the boot/recovery images (signed with boot_signer), against the
+ # given X.509 encoded pubkey (or falling back to the one in the info_dict if
+ # none given).
+ verity_key = options['verity_key']
+ if verity_key is None:
+ verity_key = info_dict['verity_key'] + '.x509.pem'
+ for image in ('boot.img', 'recovery.img', 'recovery-two-step.img'):
+ image_path = os.path.join(input_tmp, 'IMAGES', image)
+ if not os.path.exists(image_path):
+ continue
+
+ cmd = ['boot_signer', '-verify', image_path, '-certificate', verity_key]
+ proc = common.Run(cmd)
+ stdoutdata, _ = proc.communicate()
+ assert proc.returncode == 0, \
+ 'Failed to verify {} with boot_signer:\n{}'.format(image, stdoutdata)
+ logging.info(
+ 'Verified %s with boot_signer (key: %s):\n%s', image, verity_key,
+ stdoutdata.rstrip())
+
+ # Verify verity signed system images in Verified Boot 1.0. Note that not using
+ # 'elif' here, since 'boot_signer' and 'verity' are not bundled in VB 1.0.
+ if info_dict.get('verity') == 'true':
+ # First verify that the verity key that's built into the root image (as
+ # /verity_key) matches the one given via command line, if any.
+ if info_dict.get("system_root_image") == "true":
+ verity_key_mincrypt = os.path.join(input_tmp, 'ROOT', 'verity_key')
+ else:
+ verity_key_mincrypt = os.path.join(
+ input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
+ assert os.path.exists(verity_key_mincrypt), 'Missing verity_key'
+
+ if options['verity_key_mincrypt'] is None:
+ logging.warn(
+ 'Skipped checking the content of /verity_key, as the key file not '
+ 'provided. Use --verity_key_mincrypt to specify.')
+ else:
+ expected_key = options['verity_key_mincrypt']
+ assert filecmp.cmp(expected_key, verity_key_mincrypt, shallow=False), \
+ "Mismatching mincrypt verity key files"
+ logging.info('Verified the content of /verity_key')
+
+ # Then verify the verity signed system/vendor/product images, against the
+ # verity pubkey in mincrypt format.
+ for image in ('system.img', 'vendor.img', 'product.img'):
+ image_path = os.path.join(input_tmp, 'IMAGES', image)
+
+ # We are not checking if the image is actually enabled via info_dict (e.g.
+ # 'system_verity_block_device=...'). Because it's most likely a bug that
+ # skips signing some of the images in signed target-files.zip, while
+ # having the top-level verity flag enabled.
+ if not os.path.exists(image_path):
+ continue
+
+ cmd = ['verity_verifier', image_path, '-mincrypt', verity_key_mincrypt]
+ proc = common.Run(cmd)
+ stdoutdata, _ = proc.communicate()
+ assert proc.returncode == 0, \
+ 'Failed to verify {} with verity_verifier (key: {}):\n{}'.format(
+ image, verity_key_mincrypt, stdoutdata)
+ logging.info(
+ 'Verified %s with verity_verifier (key: %s):\n%s', image,
+ verity_key_mincrypt, stdoutdata.rstrip())
+
+ # Handle the case of Verified Boot 2.0 (AVB).
+ if info_dict.get("avb_enable") == "true":
+ logging.info('Verifying Verified Boot 2.0 (AVB) images...')
+
+ key = options['verity_key']
+ if key is None:
+ key = info_dict['avb_vbmeta_key_path']
+
+ # avbtool verifies all the images that have descriptors listed in vbmeta.
+ image = os.path.join(input_tmp, 'IMAGES', 'vbmeta.img')
+ cmd = ['avbtool', 'verify_image', '--image', image, '--key', key]
+
+ # Append the args for chained partitions if any.
+ for partition in common.AVB_PARTITIONS:
+ key_name = 'avb_' + partition + '_key_path'
+ if info_dict.get(key_name) is not None:
+ chained_partition_arg = common.GetAvbChainedPartitionArg(
+ partition, info_dict, options[key_name])
+ cmd.extend(["--expected_chain_partition", chained_partition_arg])
+
+ proc = common.Run(cmd)
+ stdoutdata, _ = proc.communicate()
+ assert proc.returncode == 0, \
+ 'Failed to verify {} with avbtool (key: {}):\n{}'.format(
+ image, key, stdoutdata)
+
+ logging.info(
+ 'Verified %s with avbtool (key: %s):\n%s', image, key,
+ stdoutdata.rstrip())
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument(
+ 'target_files',
+ help='the input target_files.zip to be validated')
+ parser.add_argument(
+ '--verity_key',
+ help='the verity public key to verify the bootable images (Verified '
+ 'Boot 1.0), or the vbmeta image (Verified Boot 2.0, aka AVB), where '
+ 'applicable')
+ for partition in common.AVB_PARTITIONS:
+ parser.add_argument(
+ '--avb_' + partition + '_key_path',
+ help='the public or private key in PEM format to verify AVB chained '
+ 'partition of {}'.format(partition))
+ parser.add_argument(
+ '--verity_key_mincrypt',
+ help='the verity public key in mincrypt format to verify the system '
+ 'images, if target using Verified Boot 1.0')
+ args = parser.parse_args()
+
+ # Unprovided args will have 'None' as the value.
+ options = vars(args)
logging_format = '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s'
date_format = '%Y/%m/%d %H:%M:%S'
logging.basicConfig(level=logging.INFO, format=logging_format,
datefmt=date_format)
- logging.info("Unzipping the input target_files.zip: %s", args[0])
- input_tmp = common.UnzipTemp(args[0])
+ logging.info("Unzipping the input target_files.zip: %s", args.target_files)
+ input_tmp = common.UnzipTemp(args.target_files)
info_dict = common.LoadInfoDict(input_tmp)
- with zipfile.ZipFile(args[0], 'r') as input_zip:
+ with zipfile.ZipFile(args.target_files, 'r') as input_zip:
ValidateFileConsistency(input_zip, input_tmp, info_dict)
ValidateInstallRecoveryScript(input_tmp, info_dict)
+ ValidateVerifiedBootImages(input_tmp, info_dict, options)
+
# TODO: Check if the OTA keys have been properly updated (the ones on /system,
# in recovery image).
@@ -223,6 +395,6 @@ def main(argv):
if __name__ == '__main__':
try:
- main(sys.argv[1:])
+ main()
finally:
common.Cleanup()
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
new file mode 100644
index 0000000000..3a58755e47
--- /dev/null
+++ b/tools/releasetools/verity_utils.py
@@ -0,0 +1,693 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import logging
+import os.path
+import shlex
+import struct
+
+import common
+import sparse_img
+from rangelib import RangeSet
+
+logger = logging.getLogger(__name__)
+
+OPTIONS = common.OPTIONS
+BLOCK_SIZE = common.BLOCK_SIZE
+FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
+
+
+class BuildVerityImageError(Exception):
+ """An Exception raised during verity image building."""
+
+ def __init__(self, message):
+ Exception.__init__(self, message)
+
+
+def GetVerityFECSize(image_size):
+ cmd = ["fec", "-s", str(image_size)]
+ output = common.RunAndCheckOutput(cmd, verbose=False)
+ return int(output)
+
+
+def GetVerityTreeSize(image_size):
+ cmd = ["build_verity_tree", "-s", str(image_size)]
+ output = common.RunAndCheckOutput(cmd, verbose=False)
+ return int(output)
+
+
+def GetVerityMetadataSize(image_size):
+ cmd = ["build_verity_metadata.py", "size", str(image_size)]
+ output = common.RunAndCheckOutput(cmd, verbose=False)
+ return int(output)
+
+
+def GetVeritySize(image_size, fec_supported):
+ verity_tree_size = GetVerityTreeSize(image_size)
+ verity_metadata_size = GetVerityMetadataSize(image_size)
+ verity_size = verity_tree_size + verity_metadata_size
+ if fec_supported:
+ fec_size = GetVerityFECSize(image_size + verity_size)
+ return verity_size + fec_size
+ return verity_size
+
+
+def GetSimgSize(image_file):
+ simg = sparse_img.SparseImage(image_file, build_map=False)
+ return simg.blocksize * simg.total_blocks
+
+
+def ZeroPadSimg(image_file, pad_size):
+ blocks = pad_size // BLOCK_SIZE
+ logger.info("Padding %d blocks (%d bytes)", blocks, pad_size)
+ simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
+ simg.AppendFillChunk(0, blocks)
+
+
+def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
+ padding_size):
+ cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
+ verity_path, verity_fec_path]
+ common.RunAndCheckOutput(cmd)
+
+
+def BuildVerityTree(sparse_image_path, verity_image_path):
+ cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
+ verity_image_path]
+ output = common.RunAndCheckOutput(cmd)
+ root, salt = output.split()
+ return root, salt
+
+
+def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
+ block_device, signer_path, key, signer_args,
+ verity_disable):
+ cmd = ["build_verity_metadata.py", "build", str(image_size),
+ verity_metadata_path, root_hash, salt, block_device, signer_path, key]
+ if signer_args:
+ cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))
+ if verity_disable:
+ cmd.append("--verity_disable")
+ common.RunAndCheckOutput(cmd)
+
+
+def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
+ """Appends the unsparse image to the given sparse image.
+
+ Args:
+ sparse_image_path: the path to the (sparse) image
+ unsparse_image_path: the path to the (unsparse) image
+
+ Raises:
+ BuildVerityImageError: On error.
+ """
+ cmd = ["append2simg", sparse_image_path, unsparse_image_path]
+ try:
+ common.RunAndCheckOutput(cmd)
+ except:
+ logger.exception(error_message)
+ raise BuildVerityImageError(error_message)
+
+
+def Append(target, file_to_append, error_message):
+ """Appends file_to_append to target.
+
+ Raises:
+ BuildVerityImageError: On error.
+ """
+ try:
+ with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
+ for line in input_file:
+ out_file.write(line)
+ except IOError:
+ logger.exception(error_message)
+ raise BuildVerityImageError(error_message)
+
+
+def CreateVerityImageBuilder(prop_dict):
+ """Returns a verity image builder based on the given build properties.
+
+ Args:
+ prop_dict: A dict that contains the build properties. In particular, it will
+ look for verity-related property values.
+
+ Returns:
+ A VerityImageBuilder instance for Verified Boot 1.0 or Verified Boot 2.0; or
+ None if the given build doesn't support Verified Boot.
+ """
+ partition_size = prop_dict.get("partition_size")
+ # partition_size could be None at this point, if using dynamic partitions.
+ if partition_size:
+ partition_size = int(partition_size)
+
+ # Verified Boot 1.0
+ verity_supported = prop_dict.get("verity") == "true"
+ is_verity_partition = "verity_block_device" in prop_dict
+ if verity_supported and is_verity_partition:
+ if OPTIONS.verity_signer_path is not None:
+ signer_path = OPTIONS.verity_signer_path
+ else:
+ signer_path = prop_dict["verity_signer_cmd"]
+ return Version1VerityImageBuilder(
+ partition_size,
+ prop_dict["verity_block_device"],
+ prop_dict.get("verity_fec") == "true",
+ signer_path,
+ prop_dict["verity_key"] + ".pk8",
+ OPTIONS.verity_signer_args,
+ "verity_disable" in prop_dict)
+
+ # Verified Boot 2.0
+ if (prop_dict.get("avb_hash_enable") == "true" or
+ prop_dict.get("avb_hashtree_enable") == "true"):
+ # key_path and algorithm are only available when chain partition is used.
+ key_path = prop_dict.get("avb_key_path")
+ algorithm = prop_dict.get("avb_algorithm")
+ if prop_dict.get("avb_hash_enable") == "true":
+ return VerifiedBootVersion2VerityImageBuilder(
+ prop_dict["partition_name"],
+ partition_size,
+ VerifiedBootVersion2VerityImageBuilder.AVB_HASH_FOOTER,
+ prop_dict["avb_avbtool"],
+ key_path,
+ algorithm,
+ prop_dict.get("avb_salt"),
+ prop_dict["avb_add_hash_footer_args"])
+ else:
+ return VerifiedBootVersion2VerityImageBuilder(
+ prop_dict["partition_name"],
+ partition_size,
+ VerifiedBootVersion2VerityImageBuilder.AVB_HASHTREE_FOOTER,
+ prop_dict["avb_avbtool"],
+ key_path,
+ algorithm,
+ prop_dict.get("avb_salt"),
+ prop_dict["avb_add_hashtree_footer_args"])
+
+ return None
+
+
+class VerityImageBuilder(object):
+ """A builder that generates an image with verity metadata for Verified Boot.
+
+ A VerityImageBuilder instance handles the works for building an image with
+ verity metadata for supporting Android Verified Boot. This class defines the
+ common interface between Verified Boot 1.0 and Verified Boot 2.0. A matching
+ builder will be returned based on the given build properties.
+
+ More info on the verity image generation can be found at the following link.
+ https://source.android.com/security/verifiedboot/dm-verity#implementation
+ """
+
+ def CalculateMaxImageSize(self, partition_size):
+ """Calculates the filesystem image size for the given partition size."""
+ raise NotImplementedError
+
+ def CalculateDynamicPartitionSize(self, image_size):
+ """Calculates and sets the partition size for a dynamic partition."""
+ raise NotImplementedError
+
+ def PadSparseImage(self, out_file):
+ """Adds padding to the generated sparse image."""
+ raise NotImplementedError
+
+ def Build(self, out_file):
+ """Builds the verity image and writes it to the given file."""
+ raise NotImplementedError
+
+
+class Version1VerityImageBuilder(VerityImageBuilder):
+ """A VerityImageBuilder for Verified Boot 1.0."""
+
+ def __init__(self, partition_size, block_dev, fec_supported, signer_path,
+ signer_key, signer_args, verity_disable):
+ self.version = 1
+ self.partition_size = partition_size
+ self.block_device = block_dev
+ self.fec_supported = fec_supported
+ self.signer_path = signer_path
+ self.signer_key = signer_key
+ self.signer_args = signer_args
+ self.verity_disable = verity_disable
+ self.image_size = None
+ self.verity_size = None
+
+ def CalculateDynamicPartitionSize(self, image_size):
+ # This needs to be implemented. Note that returning the given image size as
+ # the partition size doesn't make sense, as it will fail later.
+ raise NotImplementedError
+
+ def CalculateMaxImageSize(self, partition_size=None):
+ """Calculates the max image size by accounting for the verity metadata.
+
+ Args:
+ partition_size: The partition size, which defaults to self.partition_size
+ if unspecified.
+
+ Returns:
+ The size of the image adjusted for verity metadata.
+ """
+ if partition_size is None:
+ partition_size = self.partition_size
+ assert partition_size > 0, \
+ "Invalid partition size: {}".format(partition_size)
+
+ hi = partition_size
+ if hi % BLOCK_SIZE != 0:
+ hi = (hi // BLOCK_SIZE) * BLOCK_SIZE
+
+ # verity tree and fec sizes depend on the partition size, which
+ # means this estimate is always going to be unnecessarily small
+ verity_size = GetVeritySize(hi, self.fec_supported)
+ lo = partition_size - verity_size
+ result = lo
+
+ # do a binary search for the optimal size
+ while lo < hi:
+ i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
+ v = GetVeritySize(i, self.fec_supported)
+ if i + v <= partition_size:
+ if result < i:
+ result = i
+ verity_size = v
+ lo = i + BLOCK_SIZE
+ else:
+ hi = i
+
+ self.image_size = result
+ self.verity_size = verity_size
+
+ logger.info(
+ "Calculated image size for verity: partition_size %d, image_size %d, "
+ "verity_size %d", partition_size, result, verity_size)
+ return result
+
+ def Build(self, out_file):
+ """Creates an image that is verifiable using dm-verity.
+
+ Args:
+ out_file: the output image.
+
+ Returns:
+ AssertionError: On invalid partition sizes.
+ BuildVerityImageError: On other errors.
+ """
+ image_size = int(self.image_size)
+ tempdir_name = common.MakeTempDir(suffix="_verity_images")
+
+ # Get partial image paths.
+ verity_image_path = os.path.join(tempdir_name, "verity.img")
+ verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
+
+ # Build the verity tree and get the root hash and salt.
+ root_hash, salt = BuildVerityTree(out_file, verity_image_path)
+
+ # Build the metadata blocks.
+ BuildVerityMetadata(
+ image_size, verity_metadata_path, root_hash, salt, self.block_device,
+ self.signer_path, self.signer_key, self.signer_args,
+ self.verity_disable)
+
+ padding_size = self.partition_size - self.image_size - self.verity_size
+ assert padding_size >= 0
+
+ # Build the full verified image.
+ Append(
+ verity_image_path, verity_metadata_path,
+ "Failed to append verity metadata")
+
+ if self.fec_supported:
+ # Build FEC for the entire partition, including metadata.
+ verity_fec_path = os.path.join(tempdir_name, "verity_fec.img")
+ BuildVerityFEC(
+ out_file, verity_image_path, verity_fec_path, padding_size)
+ Append(verity_image_path, verity_fec_path, "Failed to append FEC")
+
+ Append2Simg(
+ out_file, verity_image_path, "Failed to append verity data")
+
+ def PadSparseImage(self, out_file):
+ sparse_image_size = GetSimgSize(out_file)
+ if sparse_image_size > self.image_size:
+ raise BuildVerityImageError(
+ "Error: image size of {} is larger than partition size of "
+ "{}".format(sparse_image_size, self.image_size))
+ ZeroPadSimg(out_file, self.image_size - sparse_image_size)
+
+
+class VerifiedBootVersion2VerityImageBuilder(VerityImageBuilder):
+ """A VerityImageBuilder for Verified Boot 2.0."""
+
+ AVB_HASH_FOOTER = 1
+ AVB_HASHTREE_FOOTER = 2
+
+ def __init__(self, partition_name, partition_size, footer_type, avbtool,
+ key_path, algorithm, salt, signing_args):
+ self.version = 2
+ self.partition_name = partition_name
+ self.partition_size = partition_size
+ self.footer_type = footer_type
+ self.avbtool = avbtool
+ self.algorithm = algorithm
+ self.key_path = key_path
+ self.salt = salt
+ self.signing_args = signing_args
+ self.image_size = None
+
+ def CalculateMinPartitionSize(self, image_size, size_calculator=None):
+ """Calculates min partition size for a given image size.
+
+ This is used when determining the partition size for a dynamic partition,
+ which should be cover the given image size (for filesystem files) as well as
+ the verity metadata size.
+
+ Args:
+ image_size: The size of the image in question.
+ size_calculator: The function to calculate max image size
+ for a given partition size.
+
+ Returns:
+ The minimum partition size required to accommodate the image size.
+ """
+ if size_calculator is None:
+ size_calculator = self.CalculateMaxImageSize
+
+ # Use image size as partition size to approximate final partition size.
+ image_ratio = size_calculator(image_size) / float(image_size)
+
+ # Prepare a binary search for the optimal partition size.
+ lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - BLOCK_SIZE
+
+ # Ensure lo is small enough: max_image_size should <= image_size.
+ delta = BLOCK_SIZE
+ max_image_size = size_calculator(lo)
+ while max_image_size > image_size:
+ image_ratio = max_image_size / float(lo)
+ lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - delta
+ delta *= 2
+ max_image_size = size_calculator(lo)
+
+ hi = lo + BLOCK_SIZE
+
+ # Ensure hi is large enough: max_image_size should >= image_size.
+ delta = BLOCK_SIZE
+ max_image_size = size_calculator(hi)
+ while max_image_size < image_size:
+ image_ratio = max_image_size / float(hi)
+ hi = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE + delta
+ delta *= 2
+ max_image_size = size_calculator(hi)
+
+ partition_size = hi
+
+ # Start to binary search.
+ while lo < hi:
+ mid = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
+ max_image_size = size_calculator(mid)
+ if max_image_size >= image_size: # if mid can accommodate image_size
+ if mid < partition_size: # if a smaller partition size is found
+ partition_size = mid
+ hi = mid
+ else:
+ lo = mid + BLOCK_SIZE
+
+ logger.info(
+ "CalculateMinPartitionSize(%d): partition_size %d.", image_size,
+ partition_size)
+
+ return partition_size
+
+ def CalculateDynamicPartitionSize(self, image_size):
+ self.partition_size = self.CalculateMinPartitionSize(image_size)
+ return self.partition_size
+
+ def CalculateMaxImageSize(self, partition_size=None):
+ """Calculates max image size for a given partition size.
+
+ Args:
+ partition_size: The partition size, which defaults to self.partition_size
+ if unspecified.
+
+ Returns:
+ The maximum image size.
+
+ Raises:
+ BuildVerityImageError: On error or getting invalid image size.
+ """
+ if partition_size is None:
+ partition_size = self.partition_size
+ assert partition_size > 0, \
+ "Invalid partition size: {}".format(partition_size)
+
+ add_footer = ("add_hash_footer" if self.footer_type == self.AVB_HASH_FOOTER
+ else "add_hashtree_footer")
+ cmd = [self.avbtool, add_footer, "--partition_size",
+ str(partition_size), "--calc_max_image_size"]
+ cmd.extend(shlex.split(self.signing_args))
+
+ proc = common.Run(cmd)
+ output, _ = proc.communicate()
+ if proc.returncode != 0:
+ raise BuildVerityImageError(
+ "Failed to calculate max image size:\n{}".format(output))
+ image_size = int(output)
+ if image_size <= 0:
+ raise BuildVerityImageError(
+ "Invalid max image size: {}".format(output))
+ self.image_size = image_size
+ return image_size
+
+ def PadSparseImage(self, out_file):
+ # No-op as the padding is taken care of by avbtool.
+ pass
+
+ def Build(self, out_file):
+ """Adds dm-verity hashtree and AVB metadata to an image.
+
+ Args:
+ out_file: Path to image to modify.
+ """
+ add_footer = ("add_hash_footer" if self.footer_type == self.AVB_HASH_FOOTER
+ else "add_hashtree_footer")
+ cmd = [self.avbtool, add_footer,
+ "--partition_size", str(self.partition_size),
+ "--partition_name", self.partition_name,
+ "--image", out_file]
+ if self.key_path and self.algorithm:
+ cmd.extend(["--key", self.key_path, "--algorithm", self.algorithm])
+ if self.salt:
+ cmd.extend(["--salt", self.salt])
+ cmd.extend(shlex.split(self.signing_args))
+
+ proc = common.Run(cmd)
+ output, _ = proc.communicate()
+ if proc.returncode != 0:
+ raise BuildVerityImageError("Failed to add AVB footer: {}".format(output))
+
+
+class HashtreeInfoGenerationError(Exception):
+ """An Exception raised during hashtree info generation."""
+
+ def __init__(self, message):
+ Exception.__init__(self, message)
+
+
+class HashtreeInfo(object):
+ def __init__(self):
+ self.hashtree_range = None
+ self.filesystem_range = None
+ self.hash_algorithm = None
+ self.salt = None
+ self.root_hash = None
+
+
+def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict):
+ generator = None
+ if (info_dict.get("verity") == "true" and
+ info_dict.get("{}_verity_block_device".format(partition_name))):
+ partition_size = info_dict["{}_size".format(partition_name)]
+ fec_supported = info_dict.get("verity_fec") == "true"
+ generator = VerifiedBootVersion1HashtreeInfoGenerator(
+ partition_size, block_size, fec_supported)
+
+ return generator
+
+
+class HashtreeInfoGenerator(object):
+ def Generate(self, image):
+ raise NotImplementedError
+
+ def DecomposeSparseImage(self, image):
+ raise NotImplementedError
+
+ def ValidateHashtree(self):
+ raise NotImplementedError
+
+
+class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator):
+ """A class that parses the metadata of hashtree for a given partition."""
+
+ def __init__(self, partition_size, block_size, fec_supported):
+ """Initialize VerityTreeInfo with the sparse image and input property.
+
+ Arguments:
+ partition_size: The whole size in bytes of a partition, including the
+ filesystem size, padding size, and verity size.
+ block_size: Expected size in bytes of each block for the sparse image.
+ fec_supported: True if the verity section contains fec data.
+ """
+
+ self.block_size = block_size
+ self.partition_size = partition_size
+ self.fec_supported = fec_supported
+
+ self.image = None
+ self.filesystem_size = None
+ self.hashtree_size = None
+ self.metadata_size = None
+
+ prop_dict = {
+ 'partition_size': str(partition_size),
+ 'verity': 'true',
+ 'verity_fec': 'true' if fec_supported else None,
+ # 'verity_block_device' needs to be present to indicate a verity-enabled
+ # partition.
+ 'verity_block_device': '',
+ # We don't need the following properties that are needed for signing the
+ # verity metadata.
+ 'verity_key': '',
+ 'verity_signer_cmd': None,
+ }
+ self.verity_image_builder = CreateVerityImageBuilder(prop_dict)
+
+ self.hashtree_info = HashtreeInfo()
+
+ def DecomposeSparseImage(self, image):
+ """Calculate the verity size based on the size of the input image.
+
+ Since we already know the structure of a verity enabled image to be:
+ [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then
+ calculate the size and offset of each section.
+ """
+
+ self.image = image
+ assert self.block_size == image.blocksize
+ assert self.partition_size == image.total_blocks * self.block_size, \
+ "partition size {} doesn't match with the calculated image size." \
+ " total_blocks: {}".format(self.partition_size, image.total_blocks)
+
+ adjusted_size = self.verity_image_builder.CalculateMaxImageSize()
+ assert adjusted_size % self.block_size == 0
+
+ verity_tree_size = GetVerityTreeSize(adjusted_size)
+ assert verity_tree_size % self.block_size == 0
+
+ metadata_size = GetVerityMetadataSize(adjusted_size)
+ assert metadata_size % self.block_size == 0
+
+ self.filesystem_size = adjusted_size
+ self.hashtree_size = verity_tree_size
+ self.metadata_size = metadata_size
+
+ self.hashtree_info.filesystem_range = RangeSet(
+ data=[0, adjusted_size / self.block_size])
+ self.hashtree_info.hashtree_range = RangeSet(
+ data=[adjusted_size / self.block_size,
+ (adjusted_size + verity_tree_size) / self.block_size])
+
+ def _ParseHashtreeMetadata(self):
+ """Parses the hash_algorithm, root_hash, salt from the metadata block."""
+
+ metadata_start = self.filesystem_size + self.hashtree_size
+ metadata_range = RangeSet(
+ data=[metadata_start / self.block_size,
+ (metadata_start + self.metadata_size) / self.block_size])
+ meta_data = ''.join(self.image.ReadRangeSet(metadata_range))
+
+ # More info about the metadata structure available in:
+ # system/extras/verity/build_verity_metadata.py
+ META_HEADER_SIZE = 268
+ header_bin = meta_data[0:META_HEADER_SIZE]
+ header = struct.unpack("II256sI", header_bin)
+
+ # header: magic_number, version, signature, table_len
+ assert header[0] == 0xb001b001, header[0]
+ table_len = header[3]
+ verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len]
+ table_entries = verity_table.rstrip().split()
+
+ # Expected verity table format: "1 block_device block_device block_size
+ # block_size data_blocks data_blocks hash_algorithm root_hash salt"
+ assert len(table_entries) == 10, "Unexpected verity table size {}".format(
+ len(table_entries))
+ assert (int(table_entries[3]) == self.block_size and
+ int(table_entries[4]) == self.block_size)
+ assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
+ int(table_entries[6]) * self.block_size == self.filesystem_size)
+
+ self.hashtree_info.hash_algorithm = table_entries[7]
+ self.hashtree_info.root_hash = table_entries[8]
+ self.hashtree_info.salt = table_entries[9]
+
+ def ValidateHashtree(self):
+ """Checks that we can reconstruct the verity hash tree."""
+
+ # Writes the filesystem section to a temp file; and calls the executable
+ # build_verity_tree to construct the hash tree.
+ adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
+ with open(adjusted_partition, "wb") as fd:
+ self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd)
+
+ generated_verity_tree = common.MakeTempFile(prefix="verity")
+ root_hash, salt = BuildVerityTree(adjusted_partition, generated_verity_tree)
+
+ # The salt should be always identical, as we use fixed value.
+ assert salt == self.hashtree_info.salt, \
+ "Calculated salt {} doesn't match the one in metadata {}".format(
+ salt, self.hashtree_info.salt)
+
+ if root_hash != self.hashtree_info.root_hash:
+ logger.warning(
+ "Calculated root hash %s doesn't match the one in metadata %s",
+ root_hash, self.hashtree_info.root_hash)
+ return False
+
+ # Reads the generated hash tree and checks if it has the exact same bytes
+ # as the one in the sparse image.
+ with open(generated_verity_tree, "rb") as fd:
+ return fd.read() == ''.join(self.image.ReadRangeSet(
+ self.hashtree_info.hashtree_range))
+
+ def Generate(self, image):
+ """Parses and validates the hashtree info in a sparse image.
+
+ Returns:
+ hashtree_info: The information needed to reconstruct the hashtree.
+
+ Raises:
+ HashtreeInfoGenerationError: If we fail to generate the exact bytes of
+ the hashtree.
+ """
+
+ self.DecomposeSparseImage(image)
+ self._ParseHashtreeMetadata()
+
+ if not self.ValidateHashtree():
+ raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree")
+
+ return self.hashtree_info
diff --git a/tools/signapk/Android.bp b/tools/signapk/Android.bp
index 4954cf5744..ad9d957dde 100644
--- a/tools/signapk/Android.bp
+++ b/tools/signapk/Android.bp
@@ -22,10 +22,16 @@ java_library_host {
manifest: "SignApk.mf",
static_libs: [
"apksig",
- "bouncycastle",
- "bouncycastle-bcpkix",
- "conscrypt",
+ "bouncycastle-unbundled",
+ "bouncycastle-bcpkix-unbundled",
+ "conscrypt-nojarjar",
],
required: ["libconscrypt_openjdk_jni"],
+
+ // The post-build signing tools need signapk.jar (and its shared libraries,
+ // handled in their own Android.bp files)
+ dist: {
+ targets: ["droidcore"],
+ },
}
diff --git a/tools/signapk/Android.mk b/tools/signapk/Android.mk
deleted file mode 100644
index ff54d6d84b..0000000000
--- a/tools/signapk/Android.mk
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-LOCAL_PATH := $(call my-dir)
-
-ifeq ($(TARGET_BUILD_APPS),)
-# The post-build signing tools need signapk.jar and its shared libraries,
-# but we don't need this if we're just doing unbundled apps.
-my_dist_files := $(HOST_OUT_JAVA_LIBRARIES)/signapk.jar \
- $(HOST_OUT_SHARED_LIBRARIES)/libconscrypt_openjdk_jni$(HOST_SHLIB_SUFFIX)
-
-$(call dist-for-goals,droidcore,$(my_dist_files))
-my_dist_files :=
-endif
diff --git a/tools/signapk/src/com/android/signapk/CountingOutputStream.java b/tools/signapk/src/com/android/signapk/CountingOutputStream.java
new file mode 100644
index 0000000000..893a780440
--- /dev/null
+++ b/tools/signapk/src/com/android/signapk/CountingOutputStream.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.signapk;
+import java.io.OutputStream;
+import java.io.IOException;
+
+class CountingOutputStream extends OutputStream {
+ private final OutputStream mBase;
+ private long mWrittenBytes;
+
+ public CountingOutputStream(OutputStream base) {
+ mBase = base;
+ }
+
+ @Override
+ public void close() throws IOException {
+ mBase.close();
+ }
+
+ @Override
+ public void flush() throws IOException {
+ mBase.flush();
+ }
+
+ @Override
+ public void write(byte[] b) throws IOException {
+ mBase.write(b);
+ mWrittenBytes += b.length;
+ }
+
+ @Override
+ public void write(byte[] b, int off, int len) throws IOException {
+ mBase.write(b, off, len);
+ mWrittenBytes += len;
+ }
+
+ @Override
+ public void write(int b) throws IOException {
+ mBase.write(b);
+ mWrittenBytes += 1;
+ }
+
+ public long getWrittenBytes() {
+ return mWrittenBytes;
+ }
+}
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index fdf6283a03..57973ec044 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -36,6 +36,7 @@ import org.conscrypt.OpenSSLProvider;
import com.android.apksig.ApkSignerEngine;
import com.android.apksig.DefaultApkSignerEngine;
+import com.android.apksig.Hints;
import com.android.apksig.apk.ApkUtils;
import com.android.apksig.apk.MinSdkVersionException;
import com.android.apksig.util.DataSink;
@@ -73,6 +74,7 @@ import java.security.spec.PKCS8EncodedKeySpec;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
+import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
@@ -80,6 +82,7 @@ import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.regex.Pattern;
+import java.util.zip.ZipEntry;
import javax.crypto.Cipher;
import javax.crypto.EncryptedPrivateKeyInfo;
@@ -372,11 +375,16 @@ class SignApk {
Pattern ignoredFilenamePattern,
ApkSignerEngine apkSigner,
JarOutputStream out,
+ CountingOutputStream outCounter,
long timestamp,
int defaultAlignment) throws IOException {
byte[] buffer = new byte[4096];
int num;
+ List<Pattern> pinPatterns = extractPinPatterns(in);
+ ArrayList<Hints.ByteRange> pinByteRanges = pinPatterns == null ? null : new ArrayList<>();
+ HashSet<String> namesToPin = new HashSet<>();
+
ArrayList<String> names = new ArrayList<String>();
for (Enumeration<JarEntry> e = in.entries(); e.hasMoreElements();) {
JarEntry entry = e.nextElement();
@@ -388,6 +396,16 @@ class SignApk {
&& (ignoredFilenamePattern.matcher(entryName).matches())) {
continue;
}
+ if (Hints.PIN_BYTE_RANGE_ZIP_ENTRY_NAME.equals(entryName)) {
+ continue; // We regenerate it below.
+ }
+ if (pinPatterns != null) {
+ for (Pattern pinPattern : pinPatterns) {
+ if (pinPattern.matcher(entryName).matches()) {
+ namesToPin.add(entryName);
+ }
+ }
+ }
names.add(entryName);
}
Collections.sort(names);
@@ -460,6 +478,7 @@ class SignApk {
outEntry.setExtra(extra);
offset += extra.length;
+ long entryHeaderStart = outCounter.getWrittenBytes();
out.putNextEntry(outEntry);
ApkSignerEngine.InspectJarEntryRequest inspectEntryRequest =
(apkSigner != null) ? apkSigner.outputJarEntry(name) : null;
@@ -475,10 +494,18 @@ class SignApk {
offset += num;
}
}
+ out.closeEntry();
out.flush();
if (inspectEntryRequest != null) {
inspectEntryRequest.done();
}
+
+ if (namesToPin.contains(name)) {
+ pinByteRanges.add(
+ new Hints.ByteRange(
+ entryHeaderStart,
+ outCounter.getWrittenBytes()));
+ }
}
// Copy all the non-STORED entries. We don't attempt to
@@ -494,6 +521,7 @@ class SignApk {
// Create a new entry so that the compressed len is recomputed.
JarEntry outEntry = new JarEntry(name);
outEntry.setTime(timestamp);
+ long entryHeaderStart = outCounter.getWrittenBytes();
out.putNextEntry(outEntry);
ApkSignerEngine.InspectJarEntryRequest inspectEntryRequest =
(apkSigner != null) ? apkSigner.outputJarEntry(name) : null;
@@ -507,11 +535,47 @@ class SignApk {
entryDataSink.consume(buffer, 0, num);
}
}
+ out.closeEntry();
out.flush();
if (inspectEntryRequest != null) {
inspectEntryRequest.done();
}
+
+ if (namesToPin.contains(name)) {
+ pinByteRanges.add(
+ new Hints.ByteRange(
+ entryHeaderStart,
+ outCounter.getWrittenBytes()));
+ }
}
+
+ if (pinByteRanges != null) {
+ // Cover central directory
+ pinByteRanges.add(
+ new Hints.ByteRange(outCounter.getWrittenBytes(),
+ Long.MAX_VALUE));
+ addPinByteRanges(out, pinByteRanges, timestamp);
+ }
+ }
+
+ private static List<Pattern> extractPinPatterns(JarFile in) throws IOException {
+ ZipEntry pinMetaEntry = in.getEntry(Hints.PIN_HINT_ASSET_ZIP_ENTRY_NAME);
+ if (pinMetaEntry == null) {
+ return null;
+ }
+ InputStream pinMetaStream = in.getInputStream(pinMetaEntry);
+ byte[] patternBlob = new byte[(int) pinMetaEntry.getSize()];
+ pinMetaStream.read(patternBlob);
+ return Hints.parsePinPatterns(patternBlob);
+ }
+
+ private static void addPinByteRanges(JarOutputStream outputJar,
+ ArrayList<Hints.ByteRange> pinByteRanges,
+ long timestamp) throws IOException {
+ JarEntry je = new JarEntry(Hints.PIN_BYTE_RANGE_ZIP_ENTRY_NAME);
+ je.setTime(timestamp);
+ outputJar.putNextEntry(je);
+ outputJar.write(Hints.encodeByteRangeList(pinByteRanges));
}
private static boolean shouldOutputApkEntry(
@@ -679,9 +743,11 @@ class SignApk {
public void write(OutputStream out) throws IOException {
try {
signer = new WholeFileSignerOutputStream(out, outputStream);
- JarOutputStream outputJar = new JarOutputStream(signer);
+ CountingOutputStream outputJarCounter = new CountingOutputStream(signer);
+ JarOutputStream outputJar = new JarOutputStream(outputJarCounter);
- copyFiles(inputJar, STRIP_PATTERN, null, outputJar, timestamp, 0);
+ copyFiles(inputJar, STRIP_PATTERN, null, outputJar,
+ outputJarCounter, timestamp, 0);
addOtacert(outputJar, publicKeyFile, timestamp);
signer.notifyClosing();
@@ -1065,11 +1131,14 @@ class SignApk {
// Build the output APK in memory, by copying input APK's ZIP entries across
// and then signing the output APK.
ByteArrayOutputStream v1SignedApkBuf = new ByteArrayOutputStream();
- JarOutputStream outputJar = new JarOutputStream(v1SignedApkBuf);
+ CountingOutputStream outputJarCounter =
+ new CountingOutputStream(v1SignedApkBuf);
+ JarOutputStream outputJar = new JarOutputStream(outputJarCounter);
// Use maximum compression for compressed entries because the APK lives forever
// on the system partition.
outputJar.setLevel(9);
- copyFiles(inputJar, null, apkSigner, outputJar, timestamp, alignment);
+ copyFiles(inputJar, null, apkSigner, outputJar,
+ outputJarCounter, timestamp, alignment);
ApkSignerEngine.OutputJarSignatureRequest addV1SignatureRequest =
apkSigner.outputJarEntries();
if (addV1SignatureRequest != null) {
diff --git a/tools/signtos/Android.bp b/tools/signtos/Android.bp
index b26631f1e8..688e7b8e71 100644
--- a/tools/signtos/Android.bp
+++ b/tools/signtos/Android.bp
@@ -21,7 +21,7 @@ java_library_host {
srcs: ["SignTos.java"],
manifest: "SignTos.mf",
static_libs: [
- "bouncycastle",
- "bouncycastle-bcpkix",
+ "bouncycastle-unbundled",
+ "bouncycastle-bcpkix-unbundled",
],
}
diff --git a/tools/test_extract_kernel.py b/tools/test_extract_kernel.py
new file mode 100644
index 0000000000..1a1cfcbdad
--- /dev/null
+++ b/tools/test_extract_kernel.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from extract_kernel import get_version, dump_version
+
+class ExtractKernelTest(unittest.TestCase):
+ def test_extract_version(self):
+ self.assertEqual("4.9.100", get_version(
+ b'Linux version 4.9.100-a123 (a@a) (a) a\n\x00', 0))
+ self.assertEqual("4.9.123", get_version(
+ b'Linux version 4.9.123 (@) () \n\x00', 0))
+
+ def test_dump_self(self):
+ self.assertEqual("4.9.1", dump_version(
+ b"trash\x00Linux version 4.8.8\x00trash\x00"
+ "other trash Linux version 4.9.1-g3 (2@s) (2) a\n\x00"))
diff --git a/tools/vendor_buildinfo.sh b/tools/vendor_buildinfo.sh
deleted file mode 100755
index c12f7cbf52..0000000000
--- a/tools/vendor_buildinfo.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-echo "# begin build properties"
-echo "# autogenerated by vendor_buildinfo.sh"
-
-echo "ro.product.board=$TARGET_BOOTLOADER_BOARD_NAME"
-echo "ro.board.platform=$TARGET_BOARD_PLATFORM"
-
-echo "ro.product.vendor.manufacturer=$PRODUCT_MANUFACTURER"
-echo "ro.product.vendor.model=$PRODUCT_MODEL"
-echo "ro.product.vendor.brand=$PRODUCT_BRAND"
-echo "ro.product.vendor.name=$PRODUCT_NAME"
-echo "ro.product.vendor.device=$TARGET_DEVICE"
-
-echo "# end build properties"
diff --git a/tools/warn.py b/tools/warn.py
index 01398be550..c710164ff3 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -75,6 +75,7 @@ Use option --gencsv to output warning counts in CSV format.
# emit_js_data():
import argparse
+import cgi
import csv
import multiprocessing
import os
@@ -177,6 +178,9 @@ warn_patterns = [
{'category': 'make', 'severity': Severity.MEDIUM,
'description': 'Invalid SDK/NDK linking',
'patterns': [r".*: warning: .+ \(.+\) should not link to .+ \(.+\)"]},
+ {'category': 'make', 'severity': Severity.MEDIUM,
+ 'description': 'Duplicate header copy',
+ 'patterns': [r".*: warning: Duplicate header copy: .+"]},
{'category': 'C/C++', 'severity': Severity.HIGH, 'option': '-Wimplicit-function-declaration',
'description': 'Implicit function declaration',
'patterns': [r".*: warning: implicit declaration of function .+",
@@ -238,9 +242,11 @@ warn_patterns = [
'description': 'Unused parameter',
'patterns': [r".*: warning: unused parameter '.*'"]},
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wunused',
- 'description': 'Unused function, variable or label',
+ 'description': 'Unused function, variable, label, comparison, etc.',
'patterns': [r".*: warning: '.+' defined but not used",
r".*: warning: unused function '.+'",
+ r".*: warning: unused label '.+'",
+ r".*: warning: relational comparison result unused",
r".*: warning: lambda capture .* is not used",
r".*: warning: private field '.+' is not used",
r".*: warning: unused variable '.+'"]},
@@ -494,48 +500,43 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: @Multibinds is a more efficient and declarative mechanism for ensuring that a set multibinding is present in the graph.',
- 'patterns': [r".*: warning: \[EmptySetMultibindingContributions\] .+"]},
- {'category': 'java',
- 'severity': Severity.LOW,
- 'description':
- 'Java: Add a private constructor to modules that will not be instantiated by Dagger.',
- 'patterns': [r".*: warning: \[PrivateConstructorForNoninstantiableModuleTest\] .+"]},
+ 'Java: Use parameter comments to document ambiguous literals',
+ 'patterns': [r".*: warning: \[BooleanParameter\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: @Binds is a more efficient and declarative mechanism for delegating a binding.',
- 'patterns': [r".*: warning: \[UseBinds\] .+"]},
+ 'Java: This class\'s name looks like a Type Parameter.',
+ 'patterns': [r".*: warning: \[ClassNamedLikeTypeParameter\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Fields that can be null should be annotated @Nullable',
- 'patterns': [r".*: warning: \[FieldMissingNullable\] .+"]},
+ 'Java: Field name is CONSTANT_CASE, but field is not static and final',
+ 'patterns': [r".*: warning: \[ConstantField\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Method parameters that aren\'t checked for null shouldn\'t be annotated @Nullable',
- 'patterns': [r".*: warning: \[ParameterNotNullable\] .+"]},
+ 'Java: @Multibinds is a more efficient and declarative mechanism for ensuring that a set multibinding is present in the graph.',
+ 'patterns': [r".*: warning: \[EmptySetMultibindingContributions\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Methods that can return null should be annotated @Nullable',
- 'patterns': [r".*: warning: \[ReturnMissingNullable\] .+"]},
+ 'Java: Prefer assertThrows to ExpectedException',
+ 'patterns': [r".*: warning: \[ExpectedExceptionRefactoring\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Use parameter comments to document ambiguous literals',
- 'patterns': [r".*: warning: \[BooleanParameter\] .+"]},
+ 'Java: This field is only assigned during initialization; consider making it final',
+ 'patterns': [r".*: warning: \[FieldCanBeFinal\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Field name is CONSTANT CASE, but field is not static and final',
- 'patterns': [r".*: warning: \[ConstantField\] .+"]},
+ 'Java: Fields that can be null should be annotated @Nullable',
+ 'patterns': [r".*: warning: \[FieldMissingNullable\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Deprecated item is not annotated with @Deprecated',
- 'patterns': [r".*: warning: \[DepAnn\] .+"]},
+ 'Java: Refactors uses of the JSR 305 @Immutable to Error Prone\'s annotation',
+ 'patterns': [r".*: warning: \[ImmutableRefactoring\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
@@ -544,11 +545,6 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Prefer \'L\' to \'l\' for the suffix to long literals',
- 'patterns': [r".*: warning: \[LongLiteralLowerCaseSuffix\] .+"]},
- {'category': 'java',
- 'severity': Severity.LOW,
- 'description':
'Java: A private method that does not reference the enclosing instance can be static',
'patterns': [r".*: warning: \[MethodCanBeStatic\] .+"]},
{'category': 'java',
@@ -579,123 +575,103 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Non-standard parameter comment; prefer `/*paramName=*/ arg`',
+ 'Java: Non-standard parameter comment; prefer `/* paramName= */ arg`',
'patterns': [r".*: warning: \[ParameterComment\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Utility classes (only static members) are not designed to be instantiated and should be made noninstantiable with a default constructor.',
- 'patterns': [r".*: warning: \[PrivateConstructorForUtilityClass\] .+"]},
+ 'Java: Method parameters that aren\'t checked for null shouldn\'t be annotated @Nullable',
+ 'patterns': [r".*: warning: \[ParameterNotNullable\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Unused imports',
- 'patterns': [r".*: warning: \[RemoveUnusedImports\] .+"]},
+ 'Java: Add a private constructor to modules that will not be instantiated by Dagger.',
+ 'patterns': [r".*: warning: \[PrivateConstructorForNoninstantiableModule\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: The default case of a switch should appear at the end of the last statement group',
- 'patterns': [r".*: warning: \[SwitchDefault\] .+"]},
+ 'Java: Utility classes (only static members) are not designed to be instantiated and should be made noninstantiable with a default constructor.',
+ 'patterns': [r".*: warning: \[PrivateConstructorForUtilityClass\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Unchecked exceptions do not need to be declared in the method signature.',
- 'patterns': [r".*: warning: \[ThrowsUncheckedException\] .+"]},
+ 'Java: Unused imports',
+ 'patterns': [r".*: warning: \[RemoveUnusedImports\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Type parameters must be a single letter with an optional numeric suffix, or an UpperCamelCase name followed by the letter \'T\'.',
- 'patterns': [r".*: warning: \[TypeParameterNaming\] .+"]},
+ 'Java: Methods that can return null should be annotated @Nullable',
+ 'patterns': [r".*: warning: \[ReturnMissingNullable\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Constructors and methods with the same name should appear sequentially with no other code in between',
- 'patterns': [r".*: warning: \[UngroupedOverloads\] .+"]},
+ 'Java: Scopes on modules have no function and will soon be an error.',
+ 'patterns': [r".*: warning: \[ScopeOnModule\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Unnecessary call to NullPointerTester#setDefault',
- 'patterns': [r".*: warning: \[UnnecessarySetDefault\] .+"]},
+ 'Java: The default case of a switch should appear at the end of the last statement group',
+ 'patterns': [r".*: warning: \[SwitchDefault\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Using static imports for types is unnecessary',
- 'patterns': [r".*: warning: \[UnnecessaryStaticImport\] .+"]},
+ 'Java: Prefer assertThrows to @Test(expected=...)',
+ 'patterns': [r".*: warning: \[TestExceptionRefactoring\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Wildcard imports, static or otherwise, should not be used',
- 'patterns': [r".*: warning: \[WildcardImport\] .+"]},
+ 'Java: Unchecked exceptions do not need to be declared in the method signature.',
+ 'patterns': [r".*: warning: \[ThrowsUncheckedException\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: ',
- 'patterns': [r".*: warning: \[RemoveFieldPrefixes\] .+"]},
+ 'Java: Prefer assertThrows to try/fail',
+ 'patterns': [r".*: warning: \[TryFailRefactoring\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Prefer assertThrows to ExpectedException',
- 'patterns': [r".*: warning: \[ExpectedExceptionMigration\] .+"]},
+ 'Java: Type parameters must be a single letter with an optional numeric suffix, or an UpperCamelCase name followed by the letter \'T\'.',
+ 'patterns': [r".*: warning: \[TypeParameterNaming\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Logger instances are not constants -- they are mutable and have side effects -- and should not be named using CONSTANT CASE',
- 'patterns': [r".*: warning: \[LoggerVariableCase\] .+"]},
+ 'Java: Constructors and methods with the same name should appear sequentially with no other code in between. Please re-order or re-name methods.',
+ 'patterns': [r".*: warning: \[UngroupedOverloads\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Prefer assertThrows to @Test(expected=...)',
- 'patterns': [r".*: warning: \[TestExceptionMigration\] .+"]},
- {'category': 'java',
- 'severity': Severity.MEDIUM,
- 'description':
- 'Java: Public fields must be final.',
- 'patterns': [r".*: warning: \[NonFinalPublicFields\] .+"]},
- {'category': 'java',
- 'severity': Severity.MEDIUM,
- 'description':
- 'Java: Private fields that are only assigned in the initializer should be made final.',
- 'patterns': [r".*: warning: \[PrivateFieldsNotAssigned\] .+"]},
- {'category': 'java',
- 'severity': Severity.MEDIUM,
- 'description':
- 'Java: Lists returned by methods should be immutable.',
- 'patterns': [r".*: warning: \[ReturnedListNotImmutable\] .+"]},
- {'category': 'java',
- 'severity': Severity.MEDIUM,
- 'description':
- 'Java: Parameters to log methods should not be generated by a call to String.format() or MessageFormat.format().',
- 'patterns': [r".*: warning: \[SaferLoggerFormat\] .+"]},
+ 'Java: Unnecessary call to NullPointerTester#setDefault',
+ 'patterns': [r".*: warning: \[UnnecessarySetDefault\] .+"]},
{'category': 'java',
- 'severity': Severity.MEDIUM,
+ 'severity': Severity.LOW,
'description':
- 'Java: Parameters to log methods should not be generated by a call to toString(); see b/22986665.',
- 'patterns': [r".*: warning: \[SaferLoggerToString\] .+"]},
+ 'Java: Using static imports for types is unnecessary',
+ 'patterns': [r".*: warning: \[UnnecessaryStaticImport\] .+"]},
{'category': 'java',
- 'severity': Severity.MEDIUM,
+ 'severity': Severity.LOW,
'description':
- 'Java: A call to Binder.clearCallingIdentity() should be followed by Binder.restoreCallingIdentity() in a finally block. Otherwise the wrong Binder identity may be used by subsequent code.',
- 'patterns': [r".*: warning: \[BinderIdentityRestoredDangerously\] .+"]},
+ 'Java: @Binds is a more efficient and declarative mechanism for delegating a binding.',
+ 'patterns': [r".*: warning: \[UseBinds\] .+"]},
{'category': 'java',
- 'severity': Severity.MEDIUM,
+ 'severity': Severity.LOW,
'description':
- 'Java: Classes extending PreferenceActivity must implement isValidFragment such that it does not unconditionally return true to prevent vulnerability to fragment injection attacks.',
- 'patterns': [r".*: warning: \[FragmentInjection\] .+"]},
+ 'Java: Wildcard imports, static or otherwise, should not be used',
+ 'patterns': [r".*: warning: \[WildcardImport\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Subclasses of Fragment must be instantiable via Class#newInstance(): the class must be public, static and have a public nullary constructor',
- 'patterns': [r".*: warning: \[FragmentNotInstantiable\] .+"]},
+ 'Java: Method reference is ambiguous',
+ 'patterns': [r".*: warning: \[AmbiguousMethodReference\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Hardcoded reference to /sdcard',
- 'patterns': [r".*: warning: \[HardCodedSdCardPath\] .+"]},
+ 'Java: This method passes a pair of parameters through to String.format, but the enclosing method wasn\'t annotated @FormatMethod. Doing so gives compile-time rather than run-time protection against malformed format strings.',
+ 'patterns': [r".*: warning: \[AnnotateFormatMethod\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: A wakelock acquired with a timeout may be released by the system before calling `release`, even after checking `isHeld()`. If so, it will throw a RuntimeException. Please wrap in a try/catch block.',
- 'patterns': [r".*: warning: \[WakelockReleasedDangerously\] .+"]},
+ 'Java: Annotations should be positioned after Javadocs, but before modifiers..',
+ 'patterns': [r".*: warning: \[AnnotationPosition\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
@@ -704,103 +680,88 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Arguments are swapped in assertEquals-like call',
- 'patterns': [r".*: warning: \[AssertEqualsArgumentOrderChecker\] .+"]},
- {'category': 'java',
- 'severity': Severity.MEDIUM,
- 'description':
- 'Java: An equality test between objects with incompatible types always returns false',
- 'patterns': [r".*: warning: \[EqualsIncompatibleType\] .+"]},
+ 'Java: Arrays do not override equals() or hashCode, so comparisons will be done on reference equality only. If neither deduplication nor lookup are needed, consider using a List instead. Otherwise, use IdentityHashMap/Set, a Map from a library that handles object arrays, or an Iterable/List of pairs.',
+ 'patterns': [r".*: warning: \[ArrayAsKeyOfSetOrMap\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: @AssistedInject and @Inject should not be used on different constructors in the same class.',
- 'patterns': [r".*: warning: \[AssistedInjectAndInjectOnConstructors\] .+"]},
- {'category': 'java',
- 'severity': Severity.MEDIUM,
- 'description':
- 'Java: Constructors on abstract classes are never directly @Injected, only the constructors of their subclasses can be @Inject\'ed.',
- 'patterns': [r".*: warning: \[InjectOnConstructorOfAbstractClass\] .+"]},
- {'category': 'java',
- 'severity': Severity.MEDIUM,
- 'description':
- 'Java: Injection frameworks currently don\'t understand Qualifiers in TYPE PARAMETER or TYPE USE contexts.',
- 'patterns': [r".*: warning: \[QualifierWithTypeUse\] .+"]},
+ 'Java: Arguments are swapped in assertEquals-like call',
+ 'patterns': [r".*: warning: \[AssertEqualsArgumentOrderChecker\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: This code declares a binding for a common value type without a Qualifier annotation.',
- 'patterns': [r".*: warning: \[BindingToUnqualifiedCommonType\] .+"]},
+ 'Java: Assertions may be disabled at runtime and do not guarantee that execution will halt here; consider throwing an exception instead',
+ 'patterns': [r".*: warning: \[AssertFalse\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: This method is not annotated with @Inject, but it overrides a method that is annotated with @com.google.inject.Inject. Guice will inject this method, and it is recommended to annotate it explicitly.',
- 'patterns': [r".*: warning: \[OverridesGuiceInjectableMethod\] .+"]},
+ 'Java: The lambda passed to assertThrows should contain exactly one statement',
+ 'patterns': [r".*: warning: \[AssertThrowsMultipleStatements\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: The ordering of parameters in overloaded methods should be as consistent as possible (when viewed from left to right)',
- 'patterns': [r".*: warning: \[InconsistentOverloads\] .+"]},
+ 'Java: This assertion throws an AssertionError if it fails, which will be caught by an enclosing try block.',
+ 'patterns': [r".*: warning: \[AssertionFailureIgnored\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Double-checked locking on non-volatile fields is unsafe',
- 'patterns': [r".*: warning: \[DoubleCheckedLocking\] .+"]},
+ 'Java: @AssistedInject and @Inject should not be used on different constructors in the same class.',
+ 'patterns': [r".*: warning: \[AssistedInjectAndInjectOnConstructors\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Annotations should always be immutable',
- 'patterns': [r".*: warning: \[ImmutableAnnotationChecker\] .+"]},
+ 'Java: Make toString(), hashCode() and equals() final in AutoValue classes, so it is clear to readers that AutoValue is not overriding them',
+ 'patterns': [r".*: warning: \[AutoValueFinalMethods\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Enums should always be immutable',
- 'patterns': [r".*: warning: \[ImmutableEnumChecker\] .+"]},
+ 'Java: Classes that implement Annotation must override equals and hashCode. Consider using AutoAnnotation instead of implementing Annotation by hand.',
+ 'patterns': [r".*: warning: \[BadAnnotationImplementation\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Writes to static fields should not be guarded by instance locks',
- 'patterns': [r".*: warning: \[StaticGuardedByInstance\] .+"]},
+ 'Java: Possible sign flip from narrowing conversion',
+ 'patterns': [r".*: warning: \[BadComparable\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Synchronizing on non-final fields is not safe: if the field is ever updated, different threads may end up locking on different objects.',
- 'patterns': [r".*: warning: \[SynchronizeOnNonFinalField\] .+"]},
+ 'Java: Importing nested classes/static methods/static fields with commonly-used names can make code harder to read, because it may not be clear from the context exactly which type is being referred to. Qualifying the name with that of the containing class can make the code clearer.',
+ 'patterns': [r".*: warning: \[BadImport\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Method reference is ambiguous',
- 'patterns': [r".*: warning: \[AmbiguousMethodReference\] .+"]},
+ 'Java: instanceof used in a way that is equivalent to a null check.',
+ 'patterns': [r".*: warning: \[BadInstanceof\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Assertions may be disabled at runtime and do not guarantee that execution will halt here; consider throwing an exception instead',
- 'patterns': [r".*: warning: \[AssertFalse\] .+"]},
+ 'Java: BigDecimal#equals has surprising behavior: it also compares scale.',
+ 'patterns': [r".*: warning: \[BigDecimalEquals\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: This assertion throws an AssertionError if it fails, which will be caught by an enclosing try block.',
- 'patterns': [r".*: warning: \[AssertionFailureIgnored\] .+"]},
+ 'Java: new BigDecimal(double) loses precision in this case.',
+ 'patterns': [r".*: warning: \[BigDecimalLiteralDouble\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Classes that implement Annotation must override equals and hashCode. Consider using AutoAnnotation instead of implementing Annotation by hand.',
- 'patterns': [r".*: warning: \[BadAnnotationImplementation\] .+"]},
+ 'Java: A call to Binder.clearCallingIdentity() should be followed by Binder.restoreCallingIdentity() in a finally block. Otherwise the wrong Binder identity may be used by subsequent code.',
+ 'patterns': [r".*: warning: \[BinderIdentityRestoredDangerously\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Possible sign flip from narrowing conversion',
- 'patterns': [r".*: warning: \[BadComparable\] .+"]},
+ 'Java: This code declares a binding for a common value type without a Qualifier annotation.',
+ 'patterns': [r".*: warning: \[BindingToUnqualifiedCommonType\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: BigDecimal(double) and BigDecimal.valueOf(double) may lose precision, prefer BigDecimal(String) or BigDecimal(long)',
- 'patterns': [r".*: warning: \[BigDecimalLiteralDouble\] .+"]},
+ 'Java: valueOf or autoboxing provides better time and space performance',
+ 'patterns': [r".*: warning: \[BoxedPrimitiveConstructor\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: valueOf or autoboxing provides better time and space performance',
- 'patterns': [r".*: warning: \[BoxedPrimitiveConstructor\] .+"]},
+ 'Java: ByteBuffer.array() shouldn\'t be called unless ByteBuffer.arrayOffset() is used or if the ByteBuffer was initialized using ByteBuffer.wrap() or ByteBuffer.allocate().',
+ 'patterns': [r".*: warning: \[ByteBufferBackingArray\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
@@ -834,6 +795,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Providing Closeable resources makes their lifecycle unclear',
+ 'patterns': [r".*: warning: \[CloseableProvides\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: The type of the array parameter of Collection.toArray needs to be compatible with the array type',
'patterns': [r".*: warning: \[CollectionToArraySafeParameter\] .+"]},
{'category': 'java',
@@ -864,26 +830,71 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Implicit use of the platform default charset, which can result in differing behavior between JVM executions or incorrect behavior if the encoding of the data source doesn\'t match expectations.',
+ 'Java: Implicit use of the platform default charset, which can result in differing behaviour between JVM executions or incorrect behavior if the encoding of the data source doesn\'t match expectations.',
'patterns': [r".*: warning: \[DefaultCharset\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Avoid deprecated Thread methods; read the method\'s javadoc for details.',
+ 'patterns': [r".*: warning: \[DeprecatedThreadMethods\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Prefer collection factory methods or builders to the double-brace initialization pattern.',
+ 'patterns': [r".*: warning: \[DoubleBraceInitialization\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Double-checked locking on non-volatile fields is unsafe',
+ 'patterns': [r".*: warning: \[DoubleCheckedLocking\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Empty top-level type declaration',
'patterns': [r".*: warning: \[EmptyTopLevelDeclaration\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: equals() implementation may throw NullPointerException when given null',
+ 'patterns': [r".*: warning: \[EqualsBrokenForNull\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Overriding Object#equals in a non-final class by using getClass rather than instanceof breaks substitutability of subclasses.',
+ 'patterns': [r".*: warning: \[EqualsGetClass\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Classes that override equals should also override hashCode.',
'patterns': [r".*: warning: \[EqualsHashCode\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: An equality test between objects with incompatible types always returns false',
+ 'patterns': [r".*: warning: \[EqualsIncompatibleType\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: The contract of #equals states that it should return false for incompatible types, while this implementation may throw ClassCastException.',
+ 'patterns': [r".*: warning: \[EqualsUnsafeCast\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Implementing #equals by just comparing hashCodes is fragile. Hashes collide frequently, and this will lead to false positives in #equals.',
+ 'patterns': [r".*: warning: \[EqualsUsingHashCode\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Calls to ExpectedException#expect should always be followed by exactly one statement.',
'patterns': [r".*: warning: \[ExpectedExceptionChecker\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: When only using JUnit Assert\'s static methods, you should import statically instead of extending.',
+ 'patterns': [r".*: warning: \[ExtendingJUnitAssert\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Switch case may fall through',
'patterns': [r".*: warning: \[FallThrough\] .+"]},
{'category': 'java',
@@ -899,11 +910,26 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: This fuzzy equality check is using a tolerance less than the gap to the next number. You may want a less restrictive tolerance, or to assert equality.',
+ 'patterns': [r".*: warning: \[FloatingPointAssertionWithinEpsilon\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Floating point literal loses precision',
'patterns': [r".*: warning: \[FloatingPointLiteralPrecision\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Classes extending PreferenceActivity must implement isValidFragment such that it does not unconditionally return true to prevent vulnerability to fragment injection attacks.',
+ 'patterns': [r".*: warning: \[FragmentInjection\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Subclasses of Fragment must be instantiable via Class#newInstance(): the class must be public, static and have a public nullary constructor',
+ 'patterns': [r".*: warning: \[FragmentNotInstantiable\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Overloads will be ambiguous when passing lambda arguments',
'patterns': [r".*: warning: \[FunctionalInterfaceClash\] .+"]},
{'category': 'java',
@@ -919,21 +945,56 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Hardcoded reference to /sdcard',
+ 'patterns': [r".*: warning: \[HardCodedSdCardPath\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Hiding fields of superclasses may cause confusion and errors',
'patterns': [r".*: warning: \[HidingField\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Annotations should always be immutable',
+ 'patterns': [r".*: warning: \[ImmutableAnnotationChecker\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Enums should always be immutable',
+ 'patterns': [r".*: warning: \[ImmutableEnumChecker\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: This annotation has incompatible modifiers as specified by its @IncompatibleModifiers annotation',
'patterns': [r".*: warning: \[IncompatibleModifiers\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: It is confusing to have a field and a parameter under the same scope that differ only in capitalization.',
+ 'patterns': [r".*: warning: \[InconsistentCapitalization\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Including fields in hashCode which are not compared in equals violates the contract of hashCode.',
+ 'patterns': [r".*: warning: \[InconsistentHashCode\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: The ordering of parameters in overloaded methods should be as consistent as possible (when viewed from left to right)',
+ 'patterns': [r".*: warning: \[InconsistentOverloads\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: This for loop increments the same variable in the header and in the body',
'patterns': [r".*: warning: \[IncrementInForLoopAndHeader\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Constructors on abstract classes are never directly @Injected, only the constructors of their subclasses can be @Inject\'ed.',
+ 'patterns': [r".*: warning: \[InjectOnConstructorOfAbstractClass\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Please also override int read(byte[], int, int), otherwise multi-byte reads from this input stream are likely to be slow.',
'patterns': [r".*: warning: \[InputStreamSlowMultibyteRead\] .+"]},
{'category': 'java',
@@ -949,6 +1010,21 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: This @param tag doesn\'t refer to a parameter of the method.',
+ 'patterns': [r".*: warning: \[InvalidParam\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: This tag is invalid.',
+ 'patterns': [r".*: warning: \[InvalidTag\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: The documented method doesn\'t actually throw this checked exception.',
+ 'patterns': [r".*: warning: \[InvalidThrows\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Class should not implement both `Iterable` and `Iterator`',
'patterns': [r".*: warning: \[IterableAndIterator\] .+"]},
{'category': 'java',
@@ -979,11 +1055,21 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Calls to Lock#lock should be immediately followed by a try block which releases the lock.',
+ 'patterns': [r".*: warning: \[LockNotBeforeTry\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Assignment where a boolean expression was expected; use == if this assignment wasn\'t expected or add parentheses for clarity.',
'patterns': [r".*: warning: \[LogicalAssignment\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Math.abs does not always give a positive result. Please consider other methods for positive random numbers.',
+ 'patterns': [r".*: warning: \[MathAbsoluteRandom\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Switches on enum types should either handle all values, or have a default case.',
'patterns': [r".*: warning: \[MissingCasesInEnumSwitch\] .+"]},
{'category': 'java',
@@ -1004,6 +1090,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: A collection or proto builder was created, but its values were never accessed.',
+ 'patterns': [r".*: warning: \[ModifiedButNotUsed\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Modifying a collection while iterating over it in a loop may cause a ConcurrentModificationException to be thrown.',
'patterns': [r".*: warning: \[ModifyCollectionInEnhancedForLoop\] .+"]},
{'category': 'java',
@@ -1034,6 +1125,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Instead of returning a functional type, return the actual type that the returned function would return and use lambdas at use site.',
+ 'patterns': [r".*: warning: \[NoFunctionalReturnType\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: This update of a volatile variable is non-atomic',
'patterns': [r".*: warning: \[NonAtomicVolatileUpdate\] .+"]},
{'category': 'java',
@@ -1054,6 +1150,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Dereference of possibly-null value',
+ 'patterns': [r".*: warning: \[NullableDereference\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: @Nullable should not be used for primitive types since they cannot be null',
'patterns': [r".*: warning: \[NullablePrimitive\] .+"]},
{'category': 'java',
@@ -1064,6 +1165,16 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Calling toString on Objects that don\'t override toString() doesn\'t provide useful information',
+ 'patterns': [r".*: warning: \[ObjectToString\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Objects.hashCode(Object o) should not be passed a primitive value',
+ 'patterns': [r".*: warning: \[ObjectsHashCodePrimitive\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Use grouping parenthesis to make the operator precedence explicit',
'patterns': [r".*: warning: \[OperatorPrecedence\] .+"]},
{'category': 'java',
@@ -1089,6 +1200,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: This method is not annotated with @Inject, but it overrides a method that is annotated with @com.google.inject.Inject. Guice will inject this method, and it is recommended to annotate it explicitly.',
+ 'patterns': [r".*: warning: \[OverridesGuiceInjectableMethod\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Detects `/* name= */`-style comments on actual parameters where the name doesn\'t match the formal parameter',
'patterns': [r".*: warning: \[ParameterName\] .+"]},
{'category': 'java',
@@ -1104,8 +1220,13 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Protobuf fields cannot be null, so this check is redundant',
- 'patterns': [r".*: warning: \[ProtoFieldPreconditionsCheckNotNull\] .+"]},
+ 'Java: A field on a protocol buffer was set twice in the same chained expression.',
+ 'patterns': [r".*: warning: \[ProtoRedundantSet\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Protos should not be used as a key to a map, in a set, or in a contains method on a descendant of a collection. Protos have non deterministic ordering and proto equality is deep, which is a performance issue.',
+ 'patterns': [r".*: warning: \[ProtosAsKeyOfSetOrMap\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
@@ -1114,6 +1235,16 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Qualifiers/Scope annotations on @Inject methods don\'t have any effect. Move the qualifier annotation to the binding location.',
+ 'patterns': [r".*: warning: \[QualifierOrScopeOnInjectMethod\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Injection frameworks currently don\'t understand Qualifiers in TYPE_PARAMETER or TYPE_USE contexts.',
+ 'patterns': [r".*: warning: \[QualifierWithTypeUse\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: reachabilityFence should always be called inside a finally block',
'patterns': [r".*: warning: \[ReachabilityFenceUsage\] .+"]},
{'category': 'java',
@@ -1134,11 +1265,21 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Void methods should not have a @return tag.',
+ 'patterns': [r".*: warning: \[ReturnFromVoid\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
'patterns': [r".*: warning: \[ShortCircuitBoolean\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Writes to static fields should not be guarded by instance locks',
+ 'patterns': [r".*: warning: \[StaticGuardedByInstance\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: A static variable or method should be qualified with a class name, not expression',
'patterns': [r".*: warning: \[StaticQualifiedUsingExpression\] .+"]},
{'category': 'java',
@@ -1154,223 +1295,218 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: String.split should never take only a single argument; it has surprising behavior',
- 'patterns': [r".*: warning: \[StringSplit\] .+"]},
- {'category': 'java',
- 'severity': Severity.MEDIUM,
- 'description':
- 'Java: Prefer Splitter to String.split',
+ 'Java: String.split(String) has surprising behavior',
'patterns': [r".*: warning: \[StringSplitter\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Using @Test(expected=...) is discouraged, since the test will pass if *any* statement in the test method throws the expected exception',
- 'patterns': [r".*: warning: \[TestExceptionChecker\] .+"]},
+ 'Java: SWIG generated code that can\'t call a C++ destructor will leak memory',
+ 'patterns': [r".*: warning: \[SwigMemoryLeak\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Thread.join needs to be surrounded by a loop until it succeeds, as in Uninterruptibles.joinUninterruptibly.',
- 'patterns': [r".*: warning: \[ThreadJoinLoop\] .+"]},
+ 'Java: Synchronizing on non-final fields is not safe: if the field is ever updated, different threads may end up locking on different objects.',
+ 'patterns': [r".*: warning: \[SynchronizeOnNonFinalField\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: ThreadLocals should be stored in static fields',
- 'patterns': [r".*: warning: \[ThreadLocalUsage\] .+"]},
+ 'Java: Code that contains System.exit() is untestable.',
+ 'patterns': [r".*: warning: \[SystemExitOutsideMain\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Three-letter time zone identifiers are deprecated, may be ambiguous, and might not do what you intend; the full IANA time zone ID should be used instead.',
- 'patterns': [r".*: warning: \[ThreeLetterTimeZoneID\] .+"]},
+ 'Java: Using @Test(expected=...) is discouraged, since the test will pass if *any* statement in the test method throws the expected exception',
+ 'patterns': [r".*: warning: \[TestExceptionChecker\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Truth Library assert is called on a constant.',
- 'patterns': [r".*: warning: \[TruthConstantAsserts\] .+"]},
+ 'Java: Thread.join needs to be surrounded by a loop until it succeeds, as in Uninterruptibles.joinUninterruptibly.',
+ 'patterns': [r".*: warning: \[ThreadJoinLoop\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Type parameter declaration overrides another type parameter already declared',
- 'patterns': [r".*: warning: \[TypeParameterShadowing\] .+"]},
+ 'Java: ThreadLocals should be stored in static fields',
+ 'patterns': [r".*: warning: \[ThreadLocalUsage\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Declaring a type parameter that is only used in the return type is a misuse of generics: operations on the type parameter are unchecked, it hides unsafe casts at invocations of the method, and it interacts badly with method overload resolution.',
- 'patterns': [r".*: warning: \[TypeParameterUnusedInFormals\] .+"]},
+ 'Java: Relying on the thread scheduler is discouraged; see Effective Java Item 72 (2nd edition) / 84 (3rd edition).',
+ 'patterns': [r".*: warning: \[ThreadPriorityCheck\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Creation of a Set/HashSet/HashMap of java.net.URL. equals() and hashCode() of java.net.URL class make blocking internet connections.',
- 'patterns': [r".*: warning: \[URLEqualsHashCode\] .+"]},
+ 'Java: Three-letter time zone identifiers are deprecated, may be ambiguous, and might not do what you intend; the full IANA time zone ID should be used instead.',
+ 'patterns': [r".*: warning: \[ThreeLetterTimeZoneID\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Switch handles all enum values; an explicit default case is unnecessary and defeats error checking for non-exhaustive switches.',
- 'patterns': [r".*: warning: \[UnnecessaryDefaultInEnumSwitch\] .+"]},
+ 'Java: An implementation of Object.toString() should never return null.',
+ 'patterns': [r".*: warning: \[ToStringReturnsNull\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Finalizer may run before native code finishes execution',
- 'patterns': [r".*: warning: \[UnsafeFinalization\] .+"]},
+ 'Java: The actual and expected values appear to be swapped, which results in poor assertion failure messages. The actual value should come first.',
+ 'patterns': [r".*: warning: \[TruthAssertExpected\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Unsynchronized method overrides a synchronized method.',
- 'patterns': [r".*: warning: \[UnsynchronizedOverridesSynchronized\] .+"]},
+ 'Java: Truth Library assert is called on a constant.',
+ 'patterns': [r".*: warning: \[TruthConstantAsserts\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Java assert is used in test. For testing purposes Assert.* matchers should be used.',
- 'patterns': [r".*: warning: \[UseCorrectAssertInTests\] .+"]},
+ 'Java: Argument is not compatible with the subject\'s type.',
+ 'patterns': [r".*: warning: \[TruthIncompatibleType\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Non-constant variable missing @Var annotation',
- 'patterns': [r".*: warning: \[Var\] .+"]},
+ 'Java: Type parameter declaration shadows another named type',
+ 'patterns': [r".*: warning: \[TypeNameShadowing\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Because of spurious wakeups, Object.wait() and Condition.await() must always be called in a loop',
- 'patterns': [r".*: warning: \[WaitNotInLoop\] .+"]},
+ 'Java: Type parameter declaration overrides another type parameter already declared',
+ 'patterns': [r".*: warning: \[TypeParameterShadowing\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Pluggable Type checker internal error',
- 'patterns': [r".*: warning: \[PluggableTypeChecker\] .+"]},
+ 'Java: Declaring a type parameter that is only used in the return type is a misuse of generics: operations on the type parameter are unchecked, it hides unsafe casts at invocations of the method, and it interacts badly with method overload resolution.',
+ 'patterns': [r".*: warning: \[TypeParameterUnusedInFormals\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Invalid message format-style format specifier ({0}), expected printf-style (%s)',
- 'patterns': [r".*: warning: \[FloggerMessageFormat\] .+"]},
+ 'Java: Avoid hash-based containers of java.net.URL--the containers rely on equals() and hashCode(), which cause java.net.URL to make blocking internet connections.',
+ 'patterns': [r".*: warning: \[URLEqualsHashCode\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Logger level check is already implied in the log() call. An explicit at[Level]().isEnabled() check is redundant.',
- 'patterns': [r".*: warning: \[FloggerRedundantIsEnabled\] .+"]},
+ 'Java: Collection, Iterable, Multimap, and Queue do not have well-defined equals behavior',
+ 'patterns': [r".*: warning: \[UndefinedEquals\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Calling withCause(Throwable) with an inline allocated Throwable is discouraged. Consider using withStackTrace(StackSize) instead, and specifying a reduced stack size (e.g. SMALL, MEDIUM or LARGE) instead of FULL, to improve performance.',
- 'patterns': [r".*: warning: \[FloggerWithCause\] .+"]},
+ 'Java: Switch handles all enum values: an explicit default case is unnecessary and defeats error checking for non-exhaustive switches.',
+ 'patterns': [r".*: warning: \[UnnecessaryDefaultInEnumSwitch\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Use withCause to associate Exceptions with log statements',
- 'patterns': [r".*: warning: \[FloggerWithoutCause\] .+"]},
+ 'Java: Unnecessary use of grouping parentheses',
+ 'patterns': [r".*: warning: \[UnnecessaryParentheses\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: No bug exists to track an ignored test',
- 'patterns': [r".*: warning: \[IgnoredTestWithoutBug\] .+"]},
+ 'Java: Finalizer may run before native code finishes execution',
+ 'patterns': [r".*: warning: \[UnsafeFinalization\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: @Ignore is preferred to @Suppress for JUnit4 tests. @Suppress may silently fail in JUnit4 (that is, tests may run anyway.)',
- 'patterns': [r".*: warning: \[JUnit4SuppressWithoutIgnore\] .+"]},
+ 'Java: Prefer `asSubclass` instead of casting the result of `newInstance`, to detect classes of incorrect type before invoking their constructors.This way, if the class is of the incorrect type,it will throw an exception before invoking its constructor.',
+ 'patterns': [r".*: warning: \[UnsafeReflectiveConstructionCast\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Medium and large test classes should document why they are medium or large',
- 'patterns': [r".*: warning: \[JUnit4TestAttributeMissing\] .+"]},
+ 'Java: Unsynchronized method overrides a synchronized method.',
+ 'patterns': [r".*: warning: \[UnsynchronizedOverridesSynchronized\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: java.net.IDN implements the older IDNA2003 standard. Prefer com.google.i18n.Idn, which implements the newer UTS #46 standard',
- 'patterns': [r".*: warning: \[JavaNetIdn\] .+"]},
+ 'Java: Unused.',
+ 'patterns': [r".*: warning: \[Unused\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Consider requiring strict parsing on JodaDurationFlag instances. Before adjusting existing flags, check the documentation and your existing configuration to avoid crashes!',
- 'patterns': [r".*: warning: \[JodaDurationFlagStrictParsing\] .+"]},
+ 'Java: This catch block catches an exception and re-throws another, but swallows the caught exception rather than setting it as a cause. This can make debugging harder.',
+ 'patterns': [r".*: warning: \[UnusedException\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Logging an exception and throwing it (or a new exception) for the same exceptional situation is an anti-pattern.',
- 'patterns': [r".*: warning: \[LogAndThrow\] .+"]},
+ 'Java: Java assert is used in test. For testing purposes Assert.* matchers should be used.',
+ 'patterns': [r".*: warning: \[UseCorrectAssertInTests\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: FormattingLogger uses wrong or mismatched format string',
- 'patterns': [r".*: warning: \[MisusedFormattingLogger\] .+"]},
+ 'Java: Non-constant variable missing @Var annotation',
+ 'patterns': [r".*: warning: \[Var\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Flags should be final',
- 'patterns': [r".*: warning: \[NonFinalFlag\] .+"]},
+ 'Java: variableName and type with the same name would refer to the static field instead of the class',
+ 'patterns': [r".*: warning: \[VariableNameSameAsType\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Reading a flag from a static field or initializer block will cause it to always receive the default value and will cause an IllegalFlagStateException if the flag is ever set.',
- 'patterns': [r".*: warning: \[StaticFlagUsage\] .+"]},
+ 'Java: Because of spurious wakeups, Object.wait() and Condition.await() must always be called in a loop',
+ 'patterns': [r".*: warning: \[WaitNotInLoop\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Apps must use BuildCompat.isAtLeastO to check whether they\'re running on Android O',
- 'patterns': [r".*: warning: \[UnsafeSdkVersionCheck\] .+"]},
+ 'Java: A wakelock acquired with a timeout may be released by the system before calling `release`, even after checking `isHeld()`. If so, it will throw a RuntimeException. Please wrap in a try/catch block.',
+ 'patterns': [r".*: warning: \[WakelockReleasedDangerously\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Logging tag cannot be longer than 23 characters.',
- 'patterns': [r".*: warning: \[LogTagLength\] .+"]},
+ 'Java: AndroidInjection.inject() should always be invoked before calling super.lifecycleMethod()',
+ 'patterns': [r".*: warning: \[AndroidInjectionBeforeSuper\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Relative class name passed to ComponentName constructor',
- 'patterns': [r".*: warning: \[RelativeComponentName\] .+"]},
+ 'Java: Use of class, field, or method that is not compatible with legacy Android devices',
+ 'patterns': [r".*: warning: \[AndroidJdkLibsChecker\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Explicitly enumerate all cases in switch statements for certain enum types.',
- 'patterns': [r".*: warning: \[EnumerateAllCasesInEnumSwitch\] .+"]},
+ 'Java: Reference equality used to compare arrays',
+ 'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Do not call assumeTrue(tester.getExperimentValueFor(...)). Use @RequireEndToEndTestExperiment instead.',
- 'patterns': [r".*: warning: \[JUnitAssumeExperiment\] .+"]},
+ 'Java: Arrays.fill(Object[], Object) called with incompatible types.',
+ 'patterns': [r".*: warning: \[ArrayFillIncompatibleType\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: The accessed field or method is not visible here. Note that the default production visibility for @VisibleForTesting is Visibility.PRIVATE.',
- 'patterns': [r".*: warning: \[VisibleForTestingChecker\] .+"]},
+ 'Java: hashcode method on array does not hash array contents',
+ 'patterns': [r".*: warning: \[ArrayHashCode\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Detects errors encountered building Error Prone plugins',
- 'patterns': [r".*: warning: \[ErrorPronePluginCorrectness\] .+"]},
+ 'Java: Calling toString on an array does not provide useful information',
+ 'patterns': [r".*: warning: \[ArrayToString\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Parcelable CREATOR fields should be Creator\u003cT>',
- 'patterns': [r".*: warning: \[ParcelableCreatorType\] .+"]},
+ 'Java: Arrays.asList does not autobox primitive arrays, as one might expect.',
+ 'patterns': [r".*: warning: \[ArraysAsListPrimitiveArray\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Enforce reflected Parcelables are kept by Proguard',
- 'patterns': [r".*: warning: \[ReflectedParcelable\] .+"]},
+ 'Java: @AssistedInject and @Inject cannot be used on the same constructor.',
+ 'patterns': [r".*: warning: \[AssistedInjectAndInjectOnSameConstructor\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Any class that extends IntentService should have @Nullable notation on method onHandleIntent(@Nullable Intent intent) and handle the case if intent is null.',
- 'patterns': [r".*: warning: \[OnHandleIntentNullableChecker\] .+"]},
+ 'Java: AsyncCallable should not return a null Future, only a Future whose result is null.',
+ 'patterns': [r".*: warning: \[AsyncCallableReturnsNull\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: In many cases, randomUUID is not necessary, and it slows the performance, which can be quite severe especially when this operation happens at start up time. Consider replacing it with cheaper alternatives, like object.hashCode() or IdGenerator.INSTANCE.getRandomId()',
- 'patterns': [r".*: warning: \[UUIDChecker\] .+"]},
+ 'Java: AsyncFunction should not return a null Future, only a Future whose result is null.',
+ 'patterns': [r".*: warning: \[AsyncFunctionReturnsNull\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: DynamicActivity.findViewById(int) is slow and should not be used inside View.onDraw(Canvas)!',
- 'patterns': [r".*: warning: \[NoFindViewByIdInOnDrawChecker\] .+"]},
+ 'Java: @AutoFactory and @Inject should not be used in the same type.',
+ 'patterns': [r".*: warning: \[AutoFactoryAtInject\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Passing Throwable/Exception argument to the message format L.x(). Calling L.w(tag, message, ex) instead of L.w(tag, ex, message)',
- 'patterns': [r".*: warning: \[WrongThrowableArgumentInLogChecker\] .+"]},
+ 'Java: Arguments to AutoValue constructor are in the wrong order',
+ 'patterns': [r".*: warning: \[AutoValueConstructorOrderChecker\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: New splicers are disallowed on paths that are being Libsearched',
- 'patterns': [r".*: warning: \[BlacklistedSplicerPathChecker\] .+"]},
+ 'Java: Shift by an amount that is out of range',
+ 'patterns': [r".*: warning: \[BadShiftAmount\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
@@ -1379,18 +1515,18 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Log tag too long, cannot exceed 23 characters.',
- 'patterns': [r".*: warning: \[IsLoggableTagLength\] .+"]},
+ 'Java: The called constructor accepts a parameter with the same name and type as one of its caller\'s parameters, but its caller doesn\'t pass that parameter to it. It\'s likely that it was intended to.',
+ 'patterns': [r".*: warning: \[ChainingConstructorIgnoresParameter\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Certain resources in `android.R.string` have names that do not match their content',
- 'patterns': [r".*: warning: \[MislabeledAndroidString\] .+"]},
+ 'Java: Ignored return value of method that is annotated with @CheckReturnValue',
+ 'patterns': [r".*: warning: \[CheckReturnValue\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Return value of android.graphics.Rect.intersect() must be checked',
- 'patterns': [r".*: warning: \[RectIntersectReturnValueIgnored\] .+"]},
+ 'Java: The source file name should match the name of the top-level class it contains',
+ 'patterns': [r".*: warning: \[ClassName\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
@@ -1399,228 +1535,233 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: @CompatibleWith\'s value is not a type argument.',
- 'patterns': [r".*: warning: \[CompatibleWithAnnotationMisuse\] .+"]},
+ 'Java: Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
+ 'patterns': [r".*: warning: \[ComparableType\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Passing argument to a generic method with an incompatible type.',
- 'patterns': [r".*: warning: \[IncompatibleArgumentType\] .+"]},
+ 'Java: this == null is always false, this != null is always true',
+ 'patterns': [r".*: warning: \[ComparingThisWithNull\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Invalid printf-style format string',
- 'patterns': [r".*: warning: \[FormatString\] .+"]},
+ 'Java: This comparison method violates the contract',
+ 'patterns': [r".*: warning: \[ComparisonContractViolated\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Invalid format string passed to formatting method.',
- 'patterns': [r".*: warning: \[FormatStringAnnotation\] .+"]},
+ 'Java: Comparison to value that is out of range for the compared type',
+ 'patterns': [r".*: warning: \[ComparisonOutOfRange\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Checks for unguarded accesses to fields and methods with @GuardedBy annotations',
- 'patterns': [r".*: warning: \[GuardedBy\] .+"]},
+ 'Java: @CompatibleWith\'s value is not a type argument.',
+ 'patterns': [r".*: warning: \[CompatibleWithAnnotationMisuse\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Type declaration annotated with @Immutable is not immutable',
- 'patterns': [r".*: warning: \[Immutable\] .+"]},
+ 'Java: Non-compile-time constant expression passed to parameter with @CompileTimeConstant type annotation.',
+ 'patterns': [r".*: warning: \[CompileTimeConstant\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: This method does not acquire the locks specified by its @LockMethod annotation',
- 'patterns': [r".*: warning: \[LockMethodChecker\] .+"]},
+ 'Java: Non-trivial compile time constant boolean expressions shouldn\'t be used.',
+ 'patterns': [r".*: warning: \[ComplexBooleanConstant\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: This method does not acquire the locks specified by its @UnlockMethod annotation',
- 'patterns': [r".*: warning: \[UnlockMethod\] .+"]},
+ 'Java: A conditional expression with numeric operands of differing types will perform binary numeric promotion of the operands; when these operands are of reference types, the expression\'s result may not be of the expected type.',
+ 'patterns': [r".*: warning: \[ConditionalExpressionNumericPromotion\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Reference equality used to compare arrays',
- 'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
+ 'Java: Compile-time constant expression overflows',
+ 'patterns': [r".*: warning: \[ConstantOverflow\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Arrays.fill(Object[], Object) called with incompatible types.',
- 'patterns': [r".*: warning: \[ArrayFillIncompatibleType\] .+"]},
+ 'Java: Dagger @Provides methods may not return null unless annotated with @Nullable',
+ 'patterns': [r".*: warning: \[DaggerProvidesNull\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: hashcode method on array does not hash array contents',
- 'patterns': [r".*: warning: \[ArrayHashCode\] .+"]},
+ 'Java: Exception created but not thrown',
+ 'patterns': [r".*: warning: \[DeadException\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Calling toString on an array does not provide useful information',
- 'patterns': [r".*: warning: \[ArrayToString\] .+"]},
+ 'Java: Thread created but not started',
+ 'patterns': [r".*: warning: \[DeadThread\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Arrays.asList does not autobox primitive arrays, as one might expect.',
- 'patterns': [r".*: warning: \[ArraysAsListPrimitiveArray\] .+"]},
+ 'Java: Deprecated item is not annotated with @Deprecated',
+ 'patterns': [r".*: warning: \[DepAnn\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: AsyncCallable should not return a null Future, only a Future whose result is null.',
- 'patterns': [r".*: warning: \[AsyncCallableReturnsNull\] .+"]},
+ 'Java: Division by integer literal zero',
+ 'patterns': [r".*: warning: \[DivZero\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: AsyncFunction should not return a null Future, only a Future whose result is null.',
- 'patterns': [r".*: warning: \[AsyncFunctionReturnsNull\] .+"]},
+ 'Java: This method should not be called.',
+ 'patterns': [r".*: warning: \[DoNotCall\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Shift by an amount that is out of range',
- 'patterns': [r".*: warning: \[BadShiftAmount\] .+"]},
+ 'Java: Empty statement after if',
+ 'patterns': [r".*: warning: \[EmptyIf\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: The called constructor accepts a parameter with the same name and type as one of its caller\'s parameters, but its caller doesn\'t pass that parameter to it. It\'s likely that it was intended to.',
- 'patterns': [r".*: warning: \[ChainingConstructorIgnoresParameter\] .+"]},
+ 'Java: == NaN always returns false; use the isNaN methods instead',
+ 'patterns': [r".*: warning: \[EqualsNaN\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Ignored return value of method that is annotated with @CheckReturnValue',
- 'patterns': [r".*: warning: \[CheckReturnValue\] .+"]},
+ 'Java: == must be used in equals method to check equality to itself or an infinite loop will occur.',
+ 'patterns': [r".*: warning: \[EqualsReference\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: The source file name should match the name of the top-level class it contains',
- 'patterns': [r".*: warning: \[ClassName\] .+"]},
+ 'Java: Comparing different pairs of fields/getters in an equals implementation is probably a mistake.',
+ 'patterns': [r".*: warning: \[EqualsWrongThing\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
- 'patterns': [r".*: warning: \[ComparableType\] .+"]},
+ 'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class, or from an override of the method',
+ 'patterns': [r".*: warning: \[ForOverride\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: This comparison method violates the contract',
- 'patterns': [r".*: warning: \[ComparisonContractViolated\] .+"]},
+ 'Java: Invalid printf-style format string',
+ 'patterns': [r".*: warning: \[FormatString\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Comparison to value that is out of range for the compared type',
- 'patterns': [r".*: warning: \[ComparisonOutOfRange\] .+"]},
+ 'Java: Invalid format string passed to formatting method.',
+ 'patterns': [r".*: warning: \[FormatStringAnnotation\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Non-compile-time constant expression passed to parameter with @CompileTimeConstant type annotation.',
- 'patterns': [r".*: warning: \[CompileTimeConstant\] .+"]},
+ 'Java: Casting a lambda to this @FunctionalInterface can cause a behavior change from casting to a functional superinterface, which is surprising to users. Prefer decorator methods to this surprising behavior.',
+ 'patterns': [r".*: warning: \[FunctionalInterfaceMethodChanged\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Non-trivial compile time constant boolean expressions shouldn\'t be used.',
- 'patterns': [r".*: warning: \[ComplexBooleanConstant\] .+"]},
+ 'Java: Futures.getChecked requires a checked exception type with a standard constructor.',
+ 'patterns': [r".*: warning: \[FuturesGetCheckedIllegalExceptionType\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: A conditional expression with numeric operands of differing types will perform binary numeric promotion of the operands; when these operands are of reference types, the expression\'s result may not be of the expected type.',
- 'patterns': [r".*: warning: \[ConditionalExpressionNumericPromotion\] .+"]},
+ 'Java: DoubleMath.fuzzyEquals should never be used in an Object.equals() method',
+ 'patterns': [r".*: warning: \[FuzzyEqualsShouldNotBeUsedInEqualsMethod\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Compile-time constant expression overflows',
- 'patterns': [r".*: warning: \[ConstantOverflow\] .+"]},
+ 'Java: Calling getClass() on an annotation may return a proxy class',
+ 'patterns': [r".*: warning: \[GetClassOnAnnotation\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Exception created but not thrown',
- 'patterns': [r".*: warning: \[DeadException\] .+"]},
+ 'Java: Calling getClass() on an object of type Class returns the Class object for java.lang.Class; you probably meant to operate on the object directly',
+ 'patterns': [r".*: warning: \[GetClassOnClass\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Thread created but not started',
- 'patterns': [r".*: warning: \[DeadThread\] .+"]},
+ 'Java: Checks for unguarded accesses to fields and methods with @GuardedBy annotations',
+ 'patterns': [r".*: warning: \[GuardedBy\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Division by integer literal zero',
- 'patterns': [r".*: warning: \[DivZero\] .+"]},
+ 'Java: Scope annotation on implementation class of AssistedInject factory is not allowed',
+ 'patterns': [r".*: warning: \[GuiceAssistedInjectScoping\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: This method should not be called.',
- 'patterns': [r".*: warning: \[DoNotCall\] .+"]},
+ 'Java: A constructor cannot have two @Assisted parameters of the same type unless they are disambiguated with named @Assisted annotations.',
+ 'patterns': [r".*: warning: \[GuiceAssistedParameters\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Empty statement after if',
- 'patterns': [r".*: warning: \[EmptyIf\] .+"]},
+ 'Java: Although Guice allows injecting final fields, doing so is disallowed because the injected value may not be visible to other threads.',
+ 'patterns': [r".*: warning: \[GuiceInjectOnFinalField\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: == NaN always returns false; use the isNaN methods instead',
- 'patterns': [r".*: warning: \[EqualsNaN\] .+"]},
+ 'Java: contains() is a legacy method that is equivalent to containsValue()',
+ 'patterns': [r".*: warning: \[HashtableContains\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: == must be used in equals method to check equality to itself or an infinite loop will occur.',
- 'patterns': [r".*: warning: \[EqualsReference\] .+"]},
+ 'Java: A binary expression where both operands are the same is usually incorrect.',
+ 'patterns': [r".*: warning: \[IdentityBinaryExpression\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class, or from an override of the method',
- 'patterns': [r".*: warning: \[ForOverride\] .+"]},
+ 'Java: Type declaration annotated with @Immutable is not immutable',
+ 'patterns': [r".*: warning: \[Immutable\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Casting a lambda to this @FunctionalInterface can cause a behavior change from casting to a functional superinterface, which is surprising to users. Prefer decorator methods to this surprising behavior.',
- 'patterns': [r".*: warning: \[FunctionalInterfaceMethodChanged\] .+"]},
+ 'Java: Modifying an immutable collection is guaranteed to throw an exception and leave the collection unmodified',
+ 'patterns': [r".*: warning: \[ImmutableModification\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Futures.getChecked requires a checked exception type with a standard constructor.',
- 'patterns': [r".*: warning: \[FuturesGetCheckedIllegalExceptionType\] .+"]},
+ 'Java: Passing argument to a generic method with an incompatible type.',
+ 'patterns': [r".*: warning: \[IncompatibleArgumentType\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: DoubleMath.fuzzyEquals should never be used in an Object.equals() method',
- 'patterns': [r".*: warning: \[FuzzyEqualsShouldNotBeUsedInEqualsMethod\] .+"]},
+ 'Java: The first argument to indexOf is a Unicode code point, and the second is the index to start the search from',
+ 'patterns': [r".*: warning: \[IndexOfChar\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Calling getClass() on an annotation may return a proxy class',
- 'patterns': [r".*: warning: \[GetClassOnAnnotation\] .+"]},
+ 'Java: Conditional expression in varargs call contains array and non-array arguments',
+ 'patterns': [r".*: warning: \[InexactVarargsConditional\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Calling getClass() on an object of type Class returns the Class object for java.lang.Class; you probably meant to operate on the object directly',
- 'patterns': [r".*: warning: \[GetClassOnClass\] .+"]},
+ 'Java: This method always recurses, and will cause a StackOverflowError',
+ 'patterns': [r".*: warning: \[InfiniteRecursion\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: contains() is a legacy method that is equivalent to containsValue()',
- 'patterns': [r".*: warning: \[HashtableContains\] .+"]},
+ 'Java: A scoping annotation\'s Target should include TYPE and METHOD.',
+ 'patterns': [r".*: warning: \[InjectInvalidTargetingOnScopingAnnotation\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: A binary expression where both operands are the same is usually incorrect.',
- 'patterns': [r".*: warning: \[IdentityBinaryExpression\] .+"]},
+ 'Java: Using more than one qualifier annotation on the same element is not allowed.',
+ 'patterns': [r".*: warning: \[InjectMoreThanOneQualifier\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Modifying an immutable collection is guaranteed to throw an exception and leave the collection unmodified',
- 'patterns': [r".*: warning: \[ImmutableModification\] .+"]},
+ 'Java: A class can be annotated with at most one scope annotation.',
+ 'patterns': [r".*: warning: \[InjectMoreThanOneScopeAnnotationOnClass\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: The first argument to indexOf is a Unicode code point, and the second is the index to start the search from',
- 'patterns': [r".*: warning: \[IndexOfChar\] .+"]},
+ 'Java: Members shouldn\'t be annotated with @Inject if constructor is already annotated @Inject',
+ 'patterns': [r".*: warning: \[InjectOnMemberAndConstructor\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Conditional expression in varargs call contains array and non-array arguments',
- 'patterns': [r".*: warning: \[InexactVarargsConditional\] .+"]},
+ 'Java: Scope annotation on an interface or abstact class is not allowed',
+ 'patterns': [r".*: warning: \[InjectScopeAnnotationOnInterfaceOrAbstractClass\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: This method always recurses, and will cause a StackOverflowError',
- 'patterns': [r".*: warning: \[InfiniteRecursion\] .+"]},
+ 'Java: Scoping and qualifier annotations must have runtime retention.',
+ 'patterns': [r".*: warning: \[InjectScopeOrQualifierAnnotationRetention\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
+ 'Java: Injected constructors cannot be optional nor have binding annotations',
+ 'patterns': [r".*: warning: \[InjectedConstructorAnnotations\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
@@ -1644,6 +1785,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Log tag too long, cannot exceed 23 characters.',
+ 'patterns': [r".*: warning: \[IsLoggableTagLength\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
'patterns': [r".*: warning: \[IterablePathParameter\] .+"]},
{'category': 'java',
@@ -1674,7 +1820,7 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: This looks like a test method but is not run; please add @Test or @Ignore, or, if this is a helper method, reduce its visibility.',
+ 'Java: This looks like a test method but is not run; please add @Test and @Ignore, or, if this is a helper method, reduce its visibility.',
'patterns': [r".*: warning: \[JUnit4TestNotRun\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
@@ -1684,21 +1830,61 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Use of class, field, or method that is not compatible with JDK 7',
+ 'patterns': [r".*: warning: \[Java7ApiChecker\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
+ 'Java: Abstract and default methods are not injectable with javax.inject.Inject',
+ 'patterns': [r".*: warning: \[JavaxInjectOnAbstractMethod\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
+ 'Java: @javax.inject.Inject cannot be put on a final field.',
+ 'patterns': [r".*: warning: \[JavaxInjectOnFinalField\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: This pattern will silently corrupt certain byte sequences from the serialized protocol message. Use ByteString or byte[] directly',
'patterns': [r".*: warning: \[LiteByteStringUtf8\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: This method does not acquire the locks specified by its @LockMethod annotation',
+ 'patterns': [r".*: warning: \[LockMethodChecker\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
+ 'Java: Prefer \'L\' to \'l\' for the suffix to long literals',
+ 'patterns': [r".*: warning: \[LongLiteralLowerCaseSuffix\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Loop condition is never modified in loop body.',
'patterns': [r".*: warning: \[LoopConditionChecker\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Math.round(Integer) results in truncation',
+ 'patterns': [r".*: warning: \[MathRoundIntLong\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
+ 'Java: Certain resources in `android.R.string` have names that do not match their content',
+ 'patterns': [r".*: warning: \[MislabeledAndroidString\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Overriding method is missing a call to overridden super method',
'patterns': [r".*: warning: \[MissingSuperCall\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: A terminating method call is required for a test helper to have any effect.',
+ 'patterns': [r".*: warning: \[MissingTestCall\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Use of "YYYY" (week year) in a date pattern without "ww" (week in year). You probably meant to use "yyyy" (year) instead.',
'patterns': [r".*: warning: \[MisusedWeekYear\] .+"]},
{'category': 'java',
@@ -1719,6 +1905,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: This class has more than one @Inject-annotated constructor. Please remove the @Inject annotation from all but one of them.',
+ 'patterns': [r".*: warning: \[MoreThanOneInjectableConstructor\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: The result of this method must be closed.',
'patterns': [r".*: warning: \[MustBeClosedChecker\] .+"]},
{'category': 'java',
@@ -1764,11 +1955,31 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Annotations cannot be both Scope annotations and Qualifier annotations: this causes confusion when trying to use them.',
+ 'patterns': [r".*: warning: \[OverlappingQualifierAndScopeAnnotation\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
+ 'Java: This method is not annotated with @Inject, but it overrides a method that is annotated with @javax.inject.Inject. The method will not be Injected.',
+ 'patterns': [r".*: warning: \[OverridesJavaxInjectableMethod\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Declaring types inside package-info.java files is very bad form',
'patterns': [r".*: warning: \[PackageInfo\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Method parameter has wrong package',
+ 'patterns': [r".*: warning: \[ParameterPackage\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
+ 'Java: Detects classes which implement Parcelable but don\'t have CREATOR',
+ 'patterns': [r".*: warning: \[ParcelableCreator\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Literal passed as first argument to Preconditions.checkNotNull() can never be null',
'patterns': [r".*: warning: \[PreconditionsCheckNotNull\] .+"]},
{'category': 'java',
@@ -1779,7 +1990,7 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Using ::equals as an incompatible Predicate; the predicate will always return false',
+ 'Java: Using ::equals or ::isInstance as an incompatible Predicate; the predicate will always return false',
'patterns': [r".*: warning: \[PredicateIncompatibleType\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
@@ -1789,7 +2000,7 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Protobuf fields cannot be null',
+ 'Java: Protobuf fields cannot be null.',
'patterns': [r".*: warning: \[ProtoFieldNullComparison\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
@@ -1804,6 +2015,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: @Provides methods need to be declared in a Module to have any effect.',
+ 'patterns': [r".*: warning: \[ProvidesMethodOutsideOfModule\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Casting a random number in the range [0.0, 1.0) to an integer or long always results in 0.',
'patterns': [r".*: warning: \[RandomCast\] .+"]},
{'category': 'java',
@@ -1814,6 +2030,16 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Return value of android.graphics.Rect.intersect() must be checked',
+ 'patterns': [r".*: warning: \[RectIntersectReturnValueIgnored\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
+ 'Java: Use of method or class annotated with @RestrictTo',
+ 'patterns': [r".*: warning: \[RestrictTo\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Check for non-whitelisted callers to RestrictedApiChecker.',
'patterns': [r".*: warning: \[RestrictedApiChecker\] .+"]},
{'category': 'java',
@@ -1849,6 +2075,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Static and default interface methods are not natively supported on older Android devices. ',
+ 'patterns': [r".*: warning: \[StaticOrDefaultInterfaceMethod\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Calling toString on a Stream does not provide useful information',
'patterns': [r".*: warning: \[StreamToString\] .+"]},
{'category': 'java',
@@ -1859,6 +2090,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: String.substring(0) returns the original String',
+ 'patterns': [r".*: warning: \[SubstringOfZero\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Suppressing "deprecated" is probably a typo for "deprecation"',
'patterns': [r".*: warning: \[SuppressWarningsDeprecated\] .+"]},
{'category': 'java',
@@ -1889,6 +2125,11 @@ warn_patterns = [
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: This method does not acquire the locks specified by its @UnlockMethod annotation',
+ 'patterns': [r".*: warning: \[UnlockMethod\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Non-generic methods should not be invoked with type arguments',
'patterns': [r".*: warning: \[UnnecessaryTypeArgument\] .+"]},
{'category': 'java',
@@ -1906,191 +2147,6 @@ warn_patterns = [
'description':
'Java: `var` should not be used as a type name.',
'patterns': [r".*: warning: \[VarTypeName\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Method parameter has wrong package',
- 'patterns': [r".*: warning: \[ParameterPackage\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Type declaration annotated with @ThreadSafe is not thread safe',
- 'patterns': [r".*: warning: \[ThreadSafe\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of class, field, or method that is not compatible with legacy Android devices',
- 'patterns': [r".*: warning: \[AndroidApiChecker\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Invalid use of Flogger format string',
- 'patterns': [r".*: warning: \[AndroidFloggerFormatString\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use TunnelException.getCauseAs(Class) instead of casting the result of TunnelException.getCause().',
- 'patterns': [r".*: warning: \[DoNotCastTunnelExceptionCause\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Identifies undesirable mocks.',
- 'patterns': [r".*: warning: \[DoNotMock_ForJavaBuilder\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Duration Flag should NOT have units in the variable name or the @FlagSpec\'s name or altName field.',
- 'patterns': [r".*: warning: \[DurationFlagWithUnits\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Duration.get() only works with SECONDS or NANOS.',
- 'patterns': [r".*: warning: \[DurationGetTemporalUnit\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Invalid printf-style format string',
- 'patterns': [r".*: warning: \[FloggerFormatString\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Test class may not be run because it is missing a @RunWith annotation',
- 'patterns': [r".*: warning: \[JUnit4RunWithMissing\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of class, field, or method that is not compatible with JDK 7',
- 'patterns': [r".*: warning: \[Java7ApiChecker\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of java.time.Duration.withNanos(int) is not allowed.',
- 'patterns': [r".*: warning: \[JavaDurationWithNanos\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of java.time.Duration.withSeconds(long) is not allowed.',
- 'patterns': [r".*: warning: \[JavaDurationWithSeconds\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: java.time APIs that silently use the default system time-zone are not allowed.',
- 'patterns': [r".*: warning: \[JavaTimeDefaultTimeZone\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of new Duration(long) is not allowed. Please use Duration.millis(long) instead.',
- 'patterns': [r".*: warning: \[JodaDurationConstructor\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of duration.withMillis(long) is not allowed. Please use Duration.millis(long) instead.',
- 'patterns': [r".*: warning: \[JodaDurationWithMillis\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of instant.withMillis(long) is not allowed. Please use new Instant(long) instead.',
- 'patterns': [r".*: warning: \[JodaInstantWithMillis\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of JodaTime\'s type.plus(long) or type.minus(long) is not allowed (where \u003ctype> = {Duration,Instant,DateTime,DateMidnight}). Please use type.plus(Duration.millis(long)) or type.minus(Duration.millis(long)) instead.',
- 'patterns': [r".*: warning: \[JodaPlusMinusLong\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Changing JodaTime\'s current time is not allowed in non-testonly code.',
- 'patterns': [r".*: warning: \[JodaSetCurrentMillis\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of Joda-Time\'s DateTime.toDateTime(), Duration.toDuration(), Instant.toInstant(), Interval.toInterval(), and Period.toPeriod() are not allowed.',
- 'patterns': [r".*: warning: \[JodaToSelf\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Use of JodaTime\'s type.withDurationAdded(long, int) (where \u003ctype> = {Duration,Instant,DateTime}). Please use type.withDurationAdded(Duration.millis(long), int) instead.',
- 'patterns': [r".*: warning: \[JodaWithDurationAddedLong\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: LanguageCode comparison using reference equality instead of value equality',
- 'patterns': [r".*: warning: \[LanguageCodeEquality\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: The zero argument toString is not part of the Localizable interface and likely is just the java Object toString. You probably want to call toString(Locale).',
- 'patterns': [r".*: warning: \[LocalizableWrongToString\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Period.get() only works with YEARS, MONTHS, or DAYS.',
- 'patterns': [r".*: warning: \[PeriodGetTemporalUnit\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Return value of methods returning Promise must be checked. Ignoring returned Promises suppresses exceptions thrown from the code that completes the Promises.',
- 'patterns': [r".*: warning: \[PromiseReturnValueIgnored\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: When returning a Promise, use thenChain() instead of then()',
- 'patterns': [r".*: warning: \[PromiseThenReturningPromise\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Streams.iterating() is unsafe for use except in the header of a for-each loop; please see its Javadoc for details.',
- 'patterns': [r".*: warning: \[StreamsIteratingNotInLoop\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: TemporalAccessor.get() only works for certain values of ChronoField.',
- 'patterns': [r".*: warning: \[TemporalAccessorGetChronoField\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Try-with-resources is not supported in this code, use try/finally instead',
- 'patterns': [r".*: warning: \[TryWithResources\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Adds checkOrThrow calls where needed',
- 'patterns': [r".*: warning: \[AddCheckOrThrow\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Equality on Nano protos (== or .equals) might not be the same in Lite',
- 'patterns': [r".*: warning: \[ForbidNanoEquality\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Submessages of a proto cannot be mutated',
- 'patterns': [r".*: warning: \[ForbidSubmessageMutation\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Repeated fields on proto messages cannot be directly referenced',
- 'patterns': [r".*: warning: \[NanoUnsafeRepeatedFieldUsage\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Requires that non-@enum int assignments to @enum ints is wrapped in a checkOrThrow',
- 'patterns': [r".*: warning: \[RequireCheckOrThrow\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Assignments into repeated field elements must be sequential',
- 'patterns': [r".*: warning: \[RequireSequentialRepeatedFields\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Future.get in Google Now Producers code',
- 'patterns': [r".*: warning: \[FutureGetInNowProducers\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: @SimpleEnum applied to non-enum type',
- 'patterns': [r".*: warning: \[SimpleEnumUsage\] .+"]},
# End warnings generated by Error Prone
@@ -2472,6 +2528,9 @@ warn_patterns = [
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wunnamed-type-template-args',
'description': 'Unnamed template type argument',
'patterns': [r".*: warning: template argument.+Wunnamed-type-template-args"]},
+ {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wimplicit-fallthrough',
+ 'description': 'Unannotated fall-through between switch labels',
+ 'patterns': [r".*: warning: unannotated fall-through between switch labels.+Wimplicit-fallthrough"]},
{'category': 'C/C++', 'severity': Severity.HARMLESS,
'description': 'Discarded qualifier from pointer target type',
@@ -2513,6 +2572,29 @@ warn_patterns = [
# warnings from clang-tidy
group_tidy_warn_pattern('android'),
+ simple_tidy_warn_pattern('bugprone-argument-comment'),
+ simple_tidy_warn_pattern('bugprone-copy-constructor-init'),
+ simple_tidy_warn_pattern('bugprone-fold-init-type'),
+ simple_tidy_warn_pattern('bugprone-forward-declaration-namespace'),
+ simple_tidy_warn_pattern('bugprone-forwarding-reference-overload'),
+ simple_tidy_warn_pattern('bugprone-inaccurate-erase'),
+ simple_tidy_warn_pattern('bugprone-incorrect-roundings'),
+ simple_tidy_warn_pattern('bugprone-integer-division'),
+ simple_tidy_warn_pattern('bugprone-lambda-function-name'),
+ simple_tidy_warn_pattern('bugprone-macro-parentheses'),
+ simple_tidy_warn_pattern('bugprone-misplaced-widening-cast'),
+ simple_tidy_warn_pattern('bugprone-move-forwarding-reference'),
+ simple_tidy_warn_pattern('bugprone-sizeof-expression'),
+ simple_tidy_warn_pattern('bugprone-string-constructor'),
+ simple_tidy_warn_pattern('bugprone-string-integer-assignment'),
+ simple_tidy_warn_pattern('bugprone-suspicious-enum-usage'),
+ simple_tidy_warn_pattern('bugprone-suspicious-missing-comma'),
+ simple_tidy_warn_pattern('bugprone-suspicious-string-compare'),
+ simple_tidy_warn_pattern('bugprone-suspicious-semicolon'),
+ simple_tidy_warn_pattern('bugprone-undefined-memory-manipulation'),
+ simple_tidy_warn_pattern('bugprone-unused-raii'),
+ simple_tidy_warn_pattern('bugprone-use-after-move'),
+ group_tidy_warn_pattern('bugprone'),
group_tidy_warn_pattern('cert'),
group_tidy_warn_pattern('clang-diagnostic'),
group_tidy_warn_pattern('cppcoreguidelines'),
@@ -2635,7 +2717,6 @@ project_list = [
simple_project_pattern('frameworks/av/cmds'),
simple_project_pattern('frameworks/av/drm'),
simple_project_pattern('frameworks/av/include'),
- simple_project_pattern('frameworks/av/media/common_time'),
simple_project_pattern('frameworks/av/media/img_utils'),
simple_project_pattern('frameworks/av/media/libcpustats'),
simple_project_pattern('frameworks/av/media/libeffects'),
@@ -2731,7 +2812,6 @@ project_list = [
simple_project_pattern('system/extras/iotop'),
simple_project_pattern('system/extras/libfec'),
simple_project_pattern('system/extras/memory_replay'),
- simple_project_pattern('system/extras/micro_bench'),
simple_project_pattern('system/extras/mmap-perf'),
simple_project_pattern('system/extras/multinetwork'),
simple_project_pattern('system/extras/perfprofd'),
@@ -3006,6 +3086,7 @@ def find_project_index(line):
def classify_one_warning(line, results):
+ """Classify one warning line."""
for i in range(len(warn_patterns)):
w = warn_patterns[i]
for cpat in w['compiled_patterns']:
@@ -3331,6 +3412,14 @@ def emit_const_string_array(name, array):
print '];'
+# Emit a JavaScript const string array for HTML.
+def emit_const_html_string_array(name, array):
+ print 'const ' + name + ' = ['
+ for s in array:
+ print '"' + cgi.escape(strip_escape_string(s)) + '",'
+ print '];'
+
+
# Emit a JavaScript const object array.
def emit_const_object_array(name, array):
print 'const ' + name + ' = ['
@@ -3349,11 +3438,11 @@ def emit_js_data():
emit_const_string_array('ProjectNames', project_names)
emit_const_int_array('WarnPatternsSeverity',
[w['severity'] for w in warn_patterns])
- emit_const_string_array('WarnPatternsDescription',
- [w['description'] for w in warn_patterns])
- emit_const_string_array('WarnPatternsOption',
- [w['option'] for w in warn_patterns])
- emit_const_string_array('WarningMessages', warning_messages)
+ emit_const_html_string_array('WarnPatternsDescription',
+ [w['description'] for w in warn_patterns])
+ emit_const_html_string_array('WarnPatternsOption',
+ [w['option'] for w in warn_patterns])
+ emit_const_html_string_array('WarningMessages', warning_messages)
emit_const_object_array('Warnings', warning_records)
draw_table_javascript = """
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index d56ac291ac..eea1749970 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -102,7 +102,7 @@ static int copyAndAlign(ZipFile* pZin, ZipFile* pZout, int alignment, bool zopfl
* file position in the new file will be equal to the file
* position in the original.
*/
- long newOffset = pEntry->getFileOffset() + bias;
+ off_t newOffset = pEntry->getFileOffset() + bias;
padding = (alignTo - (newOffset % alignTo)) % alignTo;
//printf("--- %s: orig at %ld(+%d) len=%ld, adding pad=%d\n",
@@ -111,7 +111,7 @@ static int copyAndAlign(ZipFile* pZin, ZipFile* pZout, int alignment, bool zopfl
status = pZout->add(pZin, pEntry, padding, &pNewEntry);
}
- if (status != NO_ERROR)
+ if (status != OK)
return 1;
bias += padding;
//printf(" added '%s' at %ld (pad=%d)\n",
@@ -146,13 +146,13 @@ static int process(const char* inFileName, const char* outFileName,
return 1;
}
- if (zin.open(inFileName, ZipFile::kOpenReadOnly) != NO_ERROR) {
+ if (zin.open(inFileName, ZipFile::kOpenReadOnly) != OK) {
fprintf(stderr, "Unable to open '%s' as zip archive\n", inFileName);
return 1;
}
if (zout.open(outFileName,
ZipFile::kOpenReadWrite|ZipFile::kOpenCreate|ZipFile::kOpenTruncate)
- != NO_ERROR)
+ != OK)
{
fprintf(stderr, "Unable to open '%s' as zip archive\n", outFileName);
return 1;
@@ -178,7 +178,7 @@ static int verify(const char* fileName, int alignment, bool verbose,
if (verbose)
printf("Verifying alignment of %s (%d)...\n", fileName, alignment);
- if (zipFile.open(fileName, ZipFile::kOpenReadOnly) != NO_ERROR) {
+ if (zipFile.open(fileName, ZipFile::kOpenReadOnly) != OK) {
fprintf(stderr, "Unable to open '%s' for verification\n", fileName);
return 1;
}
@@ -190,23 +190,23 @@ static int verify(const char* fileName, int alignment, bool verbose,
pEntry = zipFile.getEntryByIndex(i);
if (pEntry->isCompressed()) {
if (verbose) {
- printf("%8ld %s (OK - compressed)\n",
- (long) pEntry->getFileOffset(), pEntry->getFileName());
+ printf("%8jd %s (OK - compressed)\n",
+ (intmax_t) pEntry->getFileOffset(), pEntry->getFileName());
}
} else {
- long offset = pEntry->getFileOffset();
+ off_t offset = pEntry->getFileOffset();
const int alignTo = getAlignment(pageAlignSharedLibs, alignment, pEntry);
if ((offset % alignTo) != 0) {
if (verbose) {
- printf("%8ld %s (BAD - %ld)\n",
- (long) offset, pEntry->getFileName(),
- offset % alignTo);
+ printf("%8jd %s (BAD - %jd)\n",
+ (intmax_t) offset, pEntry->getFileName(),
+ (intmax_t) (offset % alignTo));
}
foundBad = true;
} else {
if (verbose) {
- printf("%8ld %s (OK)\n",
- (long) offset, pEntry->getFileName());
+ printf("%8jd %s (OK)\n",
+ (intmax_t) offset, pEntry->getFileName());
}
}
}
diff --git a/tools/zipalign/ZipEntry.cpp b/tools/zipalign/ZipEntry.cpp
index 63d75d1057..810d74a622 100644
--- a/tools/zipalign/ZipEntry.cpp
+++ b/tools/zipalign/ZipEntry.cpp
@@ -41,14 +41,14 @@ using namespace android;
status_t ZipEntry::initFromCDE(FILE* fp)
{
status_t result;
- long posn;
+ long posn; // NOLINT(google-runtime-int), for ftell/fseek
bool hasDD;
//ALOGV("initFromCDE ---\n");
/* read the CDE */
result = mCDE.read(fp);
- if (result != NO_ERROR) {
+ if (result != OK) {
ALOGD("mCDE.read failed\n");
return result;
}
@@ -64,7 +64,7 @@ status_t ZipEntry::initFromCDE(FILE* fp)
}
result = mLFH.read(fp);
- if (result != NO_ERROR) {
+ if (result != OK) {
ALOGD("mLFH.read failed\n");
return result;
}
@@ -103,7 +103,7 @@ status_t ZipEntry::initFromCDE(FILE* fp)
* can defer worrying about that to when we're extracting data.
*/
- return NO_ERROR;
+ return OK;
}
/*
@@ -189,7 +189,7 @@ status_t ZipEntry::initFromExternal(const ZipEntry* pEntry)
mLFH.mExtraFieldLength+1);
}
- return NO_ERROR;
+ return OK;
}
/*
@@ -225,7 +225,7 @@ status_t ZipEntry::addPadding(int padding)
mLFH.mExtraFieldLength = padding;
}
- return NO_ERROR;
+ return OK;
}
/*
@@ -258,8 +258,8 @@ void ZipEntry::copyCDEtoLFH(void)
/*
* Set some information about a file after we add it.
*/
-void ZipEntry::setDataInfo(long uncompLen, long compLen, uint32_t crc32,
- int compressionMethod)
+void ZipEntry::setDataInfo(uint32_t uncompLen, uint32_t compLen, uint32_t crc32,
+ uint32_t compressionMethod)
{
mCDE.mCompressionMethod = compressionMethod;
mCDE.mCRC32 = crc32;
@@ -367,7 +367,7 @@ void ZipEntry::setModWhen(time_t when)
struct tm* ptm;
/* round up to an even number of seconds */
- even = (time_t)(((unsigned long)(when) + 1) & (~1));
+ even = (when & 1) ? (when + 1) : when;
/* expand */
#if !defined(_WIN32)
@@ -403,7 +403,7 @@ void ZipEntry::setModWhen(time_t when)
*/
status_t ZipEntry::LocalFileHeader::read(FILE* fp)
{
- status_t result = NO_ERROR;
+ status_t result = OK;
uint8_t buf[kLFHLen];
assert(mFileName == NULL);
@@ -499,7 +499,7 @@ status_t ZipEntry::LocalFileHeader::write(FILE* fp)
return UNKNOWN_ERROR;
}
- return NO_ERROR;
+ return OK;
}
@@ -537,7 +537,7 @@ void ZipEntry::LocalFileHeader::dump(void) const
*/
status_t ZipEntry::CentralDirEntry::read(FILE* fp)
{
- status_t result = NO_ERROR;
+ status_t result = OK;
uint8_t buf[kCDELen];
/* no re-use */
@@ -669,7 +669,7 @@ status_t ZipEntry::CentralDirEntry::write(FILE* fp)
return UNKNOWN_ERROR;
}
- return NO_ERROR;
+ return OK;
}
/*
diff --git a/tools/zipalign/ZipEntry.h b/tools/zipalign/ZipEntry.h
index 247cf69f29..4c7e18c2c7 100644
--- a/tools/zipalign/ZipEntry.h
+++ b/tools/zipalign/ZipEntry.h
@@ -179,8 +179,8 @@ protected:
/*
* Set information about the data for this entry.
*/
- void setDataInfo(long uncompLen, long compLen, uint32_t crc32,
- int compressionMethod);
+ void setDataInfo(uint32_t uncompLen, uint32_t compLen, uint32_t crc32,
+ uint32_t compressionMethod);
/*
* Set the modification date.
diff --git a/tools/zipalign/ZipFile.cpp b/tools/zipalign/ZipFile.cpp
index 43bc9bfb59..63fb962767 100644
--- a/tools/zipalign/ZipFile.cpp
+++ b/tools/zipalign/ZipFile.cpp
@@ -120,7 +120,7 @@ status_t ZipFile::open(const char* zipFileName, int flags)
* have a need for empty zip files.)
*/
mNeedCDRewrite = true;
- result = NO_ERROR;
+ result = OK;
}
if (flags & kOpenReadOnly)
@@ -205,7 +205,7 @@ void ZipFile::discardEntries(void)
*/
status_t ZipFile::readCentralDir(void)
{
- status_t result = NO_ERROR;
+ status_t result = OK;
uint8_t* buf = NULL;
off_t fileLength, seekStart;
long readAmount;
@@ -267,7 +267,7 @@ status_t ZipFile::readCentralDir(void)
/* extract eocd values */
result = mEOCD.readBuf(buf + i, readAmount - i);
- if (result != NO_ERROR) {
+ if (result != OK) {
ALOGD("Failure reading %ld bytes of EOCD values", readAmount - i);
goto bail;
}
@@ -311,7 +311,7 @@ status_t ZipFile::readCentralDir(void)
ZipEntry* pEntry = new ZipEntry;
result = pEntry->initFromCDE(mZipFp);
- if (result != NO_ERROR) {
+ if (result != OK) {
ALOGD("initFromCDE failed\n");
delete pEntry;
goto bail;
@@ -361,7 +361,7 @@ status_t ZipFile::addCommon(const char* fileName, const void* data, size_t size,
const char* storageName, int compressionMethod, ZipEntry** ppEntry)
{
ZipEntry* pEntry = NULL;
- status_t result = NO_ERROR;
+ status_t result = OK;
long lfhPosn, startPosn, endPosn, uncompressedLen;
FILE* inputFp = NULL;
uint32_t crc;
@@ -415,7 +415,7 @@ status_t ZipFile::addCommon(const char* fileName, const void* data, size_t size,
if (compressionMethod == ZipEntry::kCompressDeflated) {
bool failed = false;
result = compressFpToFp(mZipFp, inputFp, data, size, &crc);
- if (result != NO_ERROR) {
+ if (result != OK) {
ALOGD("compression failed, storing\n");
failed = true;
} else {
@@ -447,7 +447,7 @@ status_t ZipFile::addCommon(const char* fileName, const void* data, size_t size,
} else {
result = copyDataToFp(mZipFp, data, size, &crc);
}
- if (result != NO_ERROR) {
+ if (result != OK) {
// don't need to truncate; happens in CDE rewrite
ALOGD("failed copying data in\n");
goto bail;
@@ -535,11 +535,11 @@ status_t ZipFile::add(const ZipFile* pSourceZip, const ZipEntry* pSourceEntry,
}
result = pEntry->initFromExternal(pSourceEntry);
- if (result != NO_ERROR)
+ if (result != OK)
goto bail;
if (padding != 0) {
result = pEntry->addPadding(padding);
- if (result != NO_ERROR)
+ if (result != OK)
goto bail;
}
@@ -574,7 +574,7 @@ status_t ZipFile::add(const ZipFile* pSourceZip, const ZipEntry* pSourceEntry,
copyLen += ZipEntry::kDataDescriptorLen;
if (copyPartialFpToFp(mZipFp, pSourceZip->mZipFp, copyLen, NULL)
- != NO_ERROR)
+ != OK)
{
ALOGW("copy of '%s' failed\n", pEntry->mCDE.mFileName);
result = UNKNOWN_ERROR;
@@ -603,7 +603,7 @@ status_t ZipFile::add(const ZipFile* pSourceZip, const ZipEntry* pSourceEntry,
*ppEntry = pEntry;
pEntry = NULL;
- result = NO_ERROR;
+ result = OK;
bail:
delete pEntry;
@@ -642,7 +642,7 @@ status_t ZipFile::addRecompress(const ZipFile* pSourceZip, const ZipEntry* pSour
}
result = pEntry->initFromExternal(pSourceEntry);
- if (result != NO_ERROR)
+ if (result != OK)
goto bail;
/*
@@ -682,7 +682,7 @@ status_t ZipFile::addRecompress(const ZipFile* pSourceZip, const ZipEntry* pSour
}
long startPosn = ftell(mZipFp);
uint32_t crc;
- if (compressFpToFp(mZipFp, NULL, buf, uncompressedLen, &crc) != NO_ERROR) {
+ if (compressFpToFp(mZipFp, NULL, buf, uncompressedLen, &crc) != OK) {
ALOGW("recompress of '%s' failed\n", pEntry->mCDE.mFileName);
result = UNKNOWN_ERROR;
free(buf);
@@ -699,7 +699,7 @@ status_t ZipFile::addRecompress(const ZipFile* pSourceZip, const ZipEntry* pSour
copyLen += ZipEntry::kDataDescriptorLen;
if (copyPartialFpToFp(mZipFp, pSourceZip->mZipFp, copyLen, NULL)
- != NO_ERROR)
+ != OK)
{
ALOGW("copy of '%s' failed\n", pEntry->mCDE.mFileName);
result = UNKNOWN_ERROR;
@@ -738,7 +738,7 @@ status_t ZipFile::addRecompress(const ZipFile* pSourceZip, const ZipEntry* pSour
*ppEntry = pEntry;
pEntry = NULL;
- result = NO_ERROR;
+ result = OK;
bail:
delete pEntry;
@@ -773,7 +773,7 @@ status_t ZipFile::copyFpToFp(FILE* dstFp, FILE* srcFp, uint32_t* pCRC32)
}
}
- return NO_ERROR;
+ return OK;
}
/*
@@ -793,7 +793,7 @@ status_t ZipFile::copyDataToFp(FILE* dstFp,
}
}
- return NO_ERROR;
+ return OK;
}
/*
@@ -804,7 +804,7 @@ status_t ZipFile::copyDataToFp(FILE* dstFp,
* On exit, "srcFp" will be seeked to the end of the file, and "dstFp"
* will be seeked immediately past the data just written.
*/
-status_t ZipFile::copyPartialFpToFp(FILE* dstFp, FILE* srcFp, long length,
+status_t ZipFile::copyPartialFpToFp(FILE* dstFp, FILE* srcFp, size_t length,
uint32_t* pCRC32)
{
uint8_t tmpBuf[32768];
@@ -814,14 +814,14 @@ status_t ZipFile::copyPartialFpToFp(FILE* dstFp, FILE* srcFp, long length,
*pCRC32 = crc32(0L, Z_NULL, 0);
while (length) {
- long readSize;
+ size_t readSize;
readSize = sizeof(tmpBuf);
if (readSize > length)
readSize = length;
count = fread(tmpBuf, 1, readSize, srcFp);
- if ((long) count != readSize) { // error or unexpected EOF
+ if (count != readSize) { // error or unexpected EOF
ALOGD("fread %d bytes failed\n", (int) readSize);
return UNKNOWN_ERROR;
}
@@ -837,7 +837,7 @@ status_t ZipFile::copyPartialFpToFp(FILE* dstFp, FILE* srcFp, long length,
length -= readSize;
}
- return NO_ERROR;
+ return OK;
}
/*
@@ -849,7 +849,7 @@ status_t ZipFile::copyPartialFpToFp(FILE* dstFp, FILE* srcFp, long length,
status_t ZipFile::compressFpToFp(FILE* dstFp, FILE* srcFp,
const void* data, size_t size, uint32_t* pCRC32)
{
- status_t result = NO_ERROR;
+ status_t result = OK;
const size_t kBufSize = 1024 * 1024;
uint8_t* inBuf = NULL;
uint8_t* outBuf = NULL;
@@ -933,7 +933,7 @@ status_t ZipFile::remove(ZipEntry* pEntry)
/* mark entry as deleted, and mark archive as dirty */
pEntry->setDeleted();
mNeedCDRewrite = true;
- return NO_ERROR;
+ return OK;
}
/*
@@ -944,19 +944,19 @@ status_t ZipFile::remove(ZipEntry* pEntry)
*/
status_t ZipFile::flush(void)
{
- status_t result = NO_ERROR;
+ status_t result = OK;
long eocdPosn;
int i, count;
if (mReadOnly)
return INVALID_OPERATION;
if (!mNeedCDRewrite)
- return NO_ERROR;
+ return OK;
assert(mZipFp != NULL);
result = crunchArchive();
- if (result != NO_ERROR)
+ if (result != OK)
return result;
if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0)
@@ -986,7 +986,7 @@ status_t ZipFile::flush(void)
/* should we clear the "newly added" flag in all entries now? */
mNeedCDRewrite = false;
- return NO_ERROR;
+ return OK;
}
/*
@@ -997,7 +997,7 @@ status_t ZipFile::flush(void)
*/
status_t ZipFile::crunchArchive(void)
{
- status_t result = NO_ERROR;
+ status_t result = OK;
int i, count;
long delCount, adjust;
@@ -1065,7 +1065,7 @@ status_t ZipFile::crunchArchive(void)
// pEntry->getFileName(), adjust);
result = filemove(mZipFp, pEntry->getLFHOffset() - adjust,
pEntry->getLFHOffset(), span);
- if (result != NO_ERROR) {
+ if (result != OK) {
/* this is why you use a temp file */
ALOGE("error during crunch - archive is toast\n");
return result;
@@ -1097,7 +1097,7 @@ status_t ZipFile::crunchArchive(void)
status_t ZipFile::filemove(FILE* fp, off_t dst, off_t src, size_t n)
{
if (dst == src || n <= 0)
- return NO_ERROR;
+ return OK;
uint8_t readBuf[32768];
@@ -1140,7 +1140,7 @@ status_t ZipFile::filemove(FILE* fp, off_t dst, off_t src, size_t n)
return UNKNOWN_ERROR;
}
- return NO_ERROR;
+ return OK;
}
@@ -1355,7 +1355,7 @@ status_t ZipFile::EndOfCentralDir::readBuf(const uint8_t* buf, int len)
memcpy(mComment, buf + kEOCDLen, mCommentLen);
}
- return NO_ERROR;
+ return OK;
}
/*
@@ -1382,7 +1382,7 @@ status_t ZipFile::EndOfCentralDir::write(FILE* fp)
return UNKNOWN_ERROR;
}
- return NO_ERROR;
+ return OK;
}
/*
diff --git a/tools/zipalign/ZipFile.h b/tools/zipalign/ZipFile.h
index d5ace7c492..11d20c5a2a 100644
--- a/tools/zipalign/ZipFile.h
+++ b/tools/zipalign/ZipFile.h
@@ -224,7 +224,7 @@ private:
status_t copyDataToFp(FILE* dstFp,
const void* data, size_t size, uint32_t* pCRC32);
/* copy some of "srcFp" into "dstFp" */
- status_t copyPartialFpToFp(FILE* dstFp, FILE* srcFp, long length,
+ status_t copyPartialFpToFp(FILE* dstFp, FILE* srcFp, size_t length,
uint32_t* pCRC32);
/* like memmove(), but on parts of a single file */
status_t filemove(FILE* fp, off_t dest, off_t src, size_t n);