aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2023-07-07 04:41:17 +0000
committerAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2023-07-07 04:41:17 +0000
commit1613022e1a36fc78f8f714d997d48839bba78bc0 (patch)
treeb29c0cdf22352472acebd6223deb592ac4f2e7ec
parent9850fee71151e8e244e202597d6e1920ee3a71c3 (diff)
parentfc5f0418611c0dfb4950f7f7200556f62f49be32 (diff)
downloadbazel-1613022e1a36fc78f8f714d997d48839bba78bc0.tar.gz
Snap for 10453563 from fc5f0418611c0dfb4950f7f7200556f62f49be32 to mainline-conscrypt-release
Change-Id: I6afd8b9e677ad645e0d2c302dff33d1dc2e35721
-rw-r--r--.gitignore4
-rw-r--r--PREUPLOAD.cfg5
-rw-r--r--api_surfaces/BUILD0
-rw-r--r--api_surfaces/README.md10
-rw-r--r--api_surfaces/WORKSPACE0
-rw-r--r--bazel.BUILD5
-rw-r--r--bazel.WORKSPACE169
-rwxr-xr-xbazel.sh198
-rwxr-xr-xbin/b80
-rwxr-xr-xbin/bazel238
-rwxr-xr-xbuildifier_hook.sh2
-rwxr-xr-xci/bp2build.sh164
-rw-r--r--ci/build_with_bazel.sh95
-rwxr-xr-xci/determinism_test.sh106
-rwxr-xr-xci/diffs.sh88
-rw-r--r--ci/dist/BUILD6
-rwxr-xr-xci/mixed_droid.sh25
-rwxr-xr-xci/mixed_e2e.sh25
-rwxr-xr-xci/mixed_libc.sh46
-rwxr-xr-xci/mixed_mode_toggle.sh124
-rwxr-xr-xci/multiproduct_analysis.sh95
-rwxr-xr-xci/rbc_dashboard.py109
-rwxr-xr-xci/rbc_regression_test.sh12
-rw-r--r--ci/target_lists.sh91
-rw-r--r--common.bazelrc84
-rw-r--r--compliance/Android.bp24
-rw-r--r--compliance/cmd/BUILD.bazel22
-rw-r--r--compliance/cmd/bazel_notice_gen/bazel_notice_gen.go174
-rw-r--r--compliance/cmd/bazel_notice_gen/bazel_notice_gen_test.go128
-rw-r--r--compliance/go.mod5
-rw-r--r--compliance/testdata/NOTICE_LICENSE2
-rw-r--r--darwin.bazelrc2
-rw-r--r--examples/android_app/java/com/app/BUILD42
-rw-r--r--examples/android_app/java/com/app/example_lib.aarbin0 -> 4228 bytes
-rw-r--r--examples/android_app/java/com/app/some_kotlin.kt3
-rw-r--r--examples/apex/certificate_name/Android.bp11
-rw-r--r--examples/apex/certificate_name/file_contexts0
-rw-r--r--examples/apex/certificate_name/manifest.json4
-rw-r--r--examples/apex/minimal/Android.bp70
-rw-r--r--examples/apex/minimal/README.md41
-rw-r--r--examples/apex/minimal/dummy_cc_lib.cc23
-rw-r--r--examples/apex/minimal/dummy_cc_lib_2.cc18
-rw-r--r--examples/apex/minimal/dummy_cc_lib_3.cc18
-rw-r--r--examples/apex/minimal/main.cc12
-rw-r--r--examples/cc/BUILD22
-rw-r--r--examples/cc/aidl/BUILD27
-rw-r--r--examples/cc/aidl/a/b/c/d/ABCD.aidl3
-rw-r--r--examples/cc/aidl/foo/IFoo.aidl5
-rw-r--r--examples/cc/aidl_library/BUILD51
-rw-r--r--examples/cc/aidl_library/foo.cpp9
-rw-r--r--examples/cc/aidl_library/program.cpp8
-rw-r--r--examples/cc/foo.cpp0
-rw-r--r--examples/cc/proto/deps/BUILD6
-rw-r--r--examples/cc/proto/deps/src/enums/proto_deps.proto0
-rw-r--r--examples/cc/proto/external/BUILD6
-rw-r--r--examples/cc/proto/external/src/enums/proto_external.proto0
-rw-r--r--examples/gensrcs/Android.bp28
-rw-r--r--examples/gensrcs/data/data2.txt1
-rw-r--r--examples/gensrcs/data1.txt1
-rw-r--r--examples/gensrcs/generated_headers/bar/Android.bp46
-rw-r--r--examples/gensrcs/generated_headers/bar/bar.proto2
-rw-r--r--examples/gensrcs/generated_headers/foo/Android.bp57
-rw-r--r--examples/gensrcs/generated_headers/foo/foo.cpp2
-rw-r--r--examples/gensrcs/generated_headers/foo/foo.proto2
-rw-r--r--examples/java/com/bazel/BUILD.bazel50
-rw-r--r--examples/java/com/bazel/example_lib/SomeKotlin.kt18
-rwxr-xr-xexamples/java/com/bazel/hello_java_import.jarbin0 -> 1094 bytes
-rw-r--r--examples/partitions/BUILD12
-rw-r--r--examples/python/library/Android.bp34
-rw-r--r--examples/python/library/lib.py16
-rw-r--r--examples/python/library/main.py19
-rw-r--r--examples/python/library/main2.py16
-rw-r--r--examples/python/protobuf/Android.bp33
-rw-r--r--examples/python/protobuf/main.py22
-rw-r--r--examples/python/protobuf/test.proto26
-rw-r--r--examples/soong_config_variables/Android.bp8
-rw-r--r--flags/BUILD.bazel0
-rw-r--r--flags/cc/abi/BUILD.bazel27
-rw-r--r--flags/cc/tidy/BUILD.bazel54
-rw-r--r--flags/common.bzl23
-rw-r--r--json_module_graph/README.md37
-rw-r--r--json_module_graph/distanceFromLeaves.jq4
-rw-r--r--json_module_graph/findModulesWithTestSuitesValue.jq11
-rw-r--r--json_module_graph/fullTransitiveDeps.jq4
-rw-r--r--json_module_graph/fullTransitiveDepsProperties.jq4
-rw-r--r--json_module_graph/library.jq4
-rw-r--r--json_module_graph/reverseDeps.jq5
-rw-r--r--json_module_graph/transitiveDeps.jq4
-rw-r--r--linux.bazelrc6
-rw-r--r--mkcompare/README.md145
-rw-r--r--mkcompare/cmd/mkcompare.go253
-rw-r--r--mkcompare/elfdiff/elfdiff.go237
-rw-r--r--mkcompare/go.mod4
-rw-r--r--mkcompare/go.work8
-rw-r--r--mkcompare/mkdiff.go200
-rw-r--r--mkcompare/mkdiff_test.go122
-rw-r--r--mkcompare/mkfile.go175
-rw-r--r--mkcompare/mkfile_test.go62
-rw-r--r--platforms/BUILD.bazel121
-rw-r--r--platforms/arch/BUILD36
-rw-r--r--platforms/arch/variants/BUILD17
-rw-r--r--platforms/arch/variants/constants.bzl23
-rw-r--r--platforms/arch/variants/constants_test.bzl85
-rw-r--r--platforms/os_arch/BUILD.bazel24
-rw-r--r--platforms/platform_utils.bzl166
-rw-r--r--platforms/product_variables/product_platform.bzl148
-rw-r--r--platforms/rule_utilities.bzl47
-rw-r--r--product_config/BUILD43
-rw-r--r--product_config/android_product.bzl433
-rw-r--r--product_config/product_variables_providing_rule.bzl141
-rw-r--r--product_variables/constants.bzl11
-rw-r--r--rules/BUILD.bazel49
-rw-r--r--rules/README.md12
-rw-r--r--rules/abi/BUILD.bazel18
-rw-r--r--rules/abi/abi-dumps/platform/32/64/x86_64/source-based/libabi_diff_action.so.lsdump0
-rw-r--r--rules/abi/abi-dumps/platform/33/64/x86_64/source-based/libabi_diff_action.so.lsdump0
-rw-r--r--rules/abi/abi-dumps/platform/34/64/x86_64/source-based/libabi_diff_action.so.lsdump0
-rw-r--r--rules/abi/abi-dumps/platform/BUILD.bazel10
-rw-r--r--rules/abi/abi-dumps/platform/current/64/x86_64/source-based/libabi_diff_action.so.lsdump0
-rw-r--r--rules/abi/abi_dump.bzl412
-rw-r--r--rules/abi/abi_dump_test.bzl677
-rw-r--r--rules/aidl/BUILD22
-rw-r--r--rules/aidl/aidl_interface.bzl331
-rw-r--r--rules/aidl/aidl_interface_test.bzl471
-rw-r--r--rules/aidl/aidl_library.bzl241
-rw-r--r--rules/aidl/aidl_library_test.bzl260
-rw-r--r--rules/aidl/testing/BUILD141
-rw-r--r--rules/aidl/testing/Test.aidl0
-rw-r--r--rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test.aidl0
-rw-r--r--rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test2.aidl0
-rw-r--r--rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test3.aidl0
-rw-r--r--rules/aidl/testing/aidl_api/aidl_interface_test/2/Test2Only.aidl0
-rwxr-xr-xrules/aidl/testing/generated_targets_have_correct_srcs.sh55
-rwxr-xr-xrules/aidl/testing/interface_macro_produces_all_targets.sh56
-rw-r--r--rules/android/BUILD.bazel1
-rw-r--r--rules/android/aar_import.bzl75
-rw-r--r--rules/android/aar_import_aosp_internal/BUILD.bazel0
-rw-r--r--rules/android/aar_import_aosp_internal/attrs.bzl40
-rw-r--r--rules/android/aar_import_aosp_internal/rule.bzl33
-rw-r--r--rules/android/android_app_certificate.bzl162
-rw-r--r--rules/android/android_app_keystore.bzl52
-rw-r--r--rules/android/android_binary.bzl150
-rw-r--r--rules/android/android_binary_aosp_internal/BUILD.bazel0
-rw-r--r--rules/android/android_binary_aosp_internal/impl.bzl62
-rw-r--r--rules/android/android_binary_aosp_internal/rule.bzl28
-rw-r--r--rules/android/android_library.bzl110
-rw-r--r--rules/android/android_library_aosp_internal/BUILD.bazel0
-rw-r--r--rules/android/android_library_aosp_internal/attrs.bzl128
-rw-r--r--rules/android/android_library_aosp_internal/impl.bzl131
-rw-r--r--rules/android/android_library_aosp_internal/rule.bzl41
-rw-r--r--rules/android/rules.bzl62
-rw-r--r--rules/apex.bzl432
-rw-r--r--rules/apex/BUILD205
-rw-r--r--rules/apex/METADATA1
-rw-r--r--rules/apex/README.md73
-rw-r--r--rules/apex/apex.bzl1171
-rw-r--r--rules/apex/apex_aab.bzl434
-rw-r--r--rules/apex/apex_aab_test.bzl151
-rw-r--r--rules/apex/apex_available.bzl159
-rw-r--r--rules/apex/apex_deps_validation.bzl261
-rw-r--r--rules/apex/apex_info.bzl41
-rw-r--r--rules/apex/apex_key.bzl95
-rw-r--r--rules/apex/apex_key_test.bzl163
-rw-r--r--rules/apex/apex_mk_test.bzl199
-rw-r--r--rules/apex/apex_test.bzl2830
-rw-r--r--rules/apex/apex_test_helpers.bzl81
-rw-r--r--rules/apex/bazel_apexer_wrapper.py207
-rw-r--r--rules/apex/bundle.bzl157
-rw-r--r--rules/apex/cc.bzl332
-rw-r--r--rules/apex/mainline_modules.bzl260
-rw-r--r--rules/apex/sdk_versions.bzl47
-rw-r--r--rules/apex/testdata/BUILD20
-rw-r--r--rules/apex/testdata/another.pk80
-rw-r--r--rules/apex/testdata/another.x509.pem0
-rw-r--r--rules/apex/testdata/devkey.avbpubkey0
-rw-r--r--rules/apex/testdata/devkey.keystore0
-rw-r--r--rules/apex/testdata/devkey.pem0
-rw-r--r--rules/apex/testdata/devkey.priv0
-rw-r--r--rules/apex/testdata/devkey.pub0
-rw-r--r--rules/apex/toolchain.bzl79
-rw-r--r--rules/apex/transition.bzl173
-rw-r--r--rules/apex_key.bzl36
-rw-r--r--rules/api_surfaces_injection.bzl26
-rw-r--r--rules/apis/BUILD21
-rw-r--r--rules/apis/README.md9
-rw-r--r--rules/apis/api_domain.bzl56
-rw-r--r--rules/apis/api_domain_test.bzl161
-rw-r--r--rules/apis/api_surface.bzl44
-rw-r--r--rules/apis/cc_api_contribution.bzl235
-rw-r--r--rules/apis/cc_api_contribution_test.bzl294
-rw-r--r--rules/apis/java_api_contribution.bzl95
-rw-r--r--rules/bpf/BUILD19
-rw-r--r--rules/bpf/bpf.bzl151
-rw-r--r--rules/bpf/bpf_test.bzl157
-rw-r--r--rules/cc/BUILD.bazel92
-rw-r--r--rules/cc/cc_aidl_code_gen.bzl211
-rw-r--r--rules/cc/cc_aidl_library.bzl66
-rw-r--r--rules/cc/cc_aidl_library_test.bzl192
-rw-r--r--rules/cc/cc_binary.bzl146
-rw-r--r--rules/cc/cc_binary_test.bzl295
-rw-r--r--rules/cc/cc_constants.bzl34
-rw-r--r--rules/cc/cc_hidl_library.bzl145
-rw-r--r--rules/cc/cc_hidl_library_test.bzl343
-rw-r--r--rules/cc/cc_library_common.bzl409
-rw-r--r--rules/cc/cc_library_common_test.bzl157
-rw-r--r--rules/cc/cc_library_headers.bzl32
-rw-r--r--rules/cc/cc_library_shared.bzl508
-rw-r--r--rules/cc/cc_library_shared_test.bzl785
-rw-r--r--rules/cc/cc_library_static.bzl499
-rw-r--r--rules/cc/cc_library_static_test.bzl602
-rw-r--r--rules/cc/cc_object.bzl103
-rw-r--r--rules/cc/cc_object_test.bzl128
-rw-r--r--rules/cc/cc_prebuilt_binary.bzl51
-rw-r--r--rules/cc/cc_prebuilt_binary_test.bzl222
-rw-r--r--rules/cc/cc_prebuilt_library_shared.bzl50
-rw-r--r--rules/cc/cc_prebuilt_library_shared_test.bzl140
-rw-r--r--rules/cc/cc_prebuilt_library_static.bzl52
-rw-r--r--rules/cc/cc_prebuilt_library_static_test.bzl208
-rw-r--r--rules/cc/cc_prebuilt_object.bzl35
-rw-r--r--rules/cc/cc_proto.bzl138
-rw-r--r--rules/cc/cc_proto_test.bzl449
-rw-r--r--rules/cc/cc_stub_library.bzl239
-rw-r--r--rules/cc/cc_sysprop_library.bzl162
-rw-r--r--rules/cc/cc_sysprop_library_test.bzl267
-rw-r--r--rules/cc/cc_test.bzl180
-rw-r--r--rules/cc/cc_test_test.bzl80
-rw-r--r--rules/cc/clang_tidy.bzl354
-rw-r--r--rules/cc/clang_tidy_test.bzl770
-rw-r--r--rules/cc/composed_transitions.bzl49
-rw-r--r--rules/cc/fdo_profile_transitions.bzl62
-rw-r--r--rules/cc/flex.bzl121
-rw-r--r--rules/cc/flex_test.bzl346
-rw-r--r--rules/cc/generate_toc.bzl70
-rw-r--r--rules/cc/lto_transitions.bzl62
-rw-r--r--rules/cc/lto_transitions_test.bzl247
-rw-r--r--rules/cc/prebuilt_library_shared.bzl36
-rw-r--r--rules/cc/prebuilt_library_static.bzl40
-rw-r--r--rules/cc/static_libc.bzl28
-rw-r--r--rules/cc/stl.bzl154
-rw-r--r--rules/cc/stl_test.bzl994
-rw-r--r--rules/cc/stripped_cc_common.bzl206
-rw-r--r--rules/cc/testing/BUILD.bazel33
-rw-r--r--rules/cc/testing/gunit_test.cpp5
-rw-r--r--rules/cc/testing/test.cpp5
-rw-r--r--rules/cc/testing/transitions.bzl58
-rw-r--r--rules/cc/versioned_cc_common.bzl32
-rw-r--r--rules/cc/yasm.bzl102
-rw-r--r--rules/cc/yasm_test.bzl126
-rw-r--r--rules/common.bzl53
-rw-r--r--rules/common/BUILD.bazel6
-rw-r--r--rules/common/api.bzl129
-rw-r--r--rules/common/api_test.bzl77
-rw-r--r--rules/common/sdk_version.bzl84
-rw-r--r--rules/common/sdk_version_test.bzl150
-rw-r--r--rules/coverage/remote_coverage_tools/BUILD9
-rw-r--r--rules/coverage/remote_coverage_tools/WORKSPACE2
-rw-r--r--rules/env.bzl70
-rw-r--r--rules/filegroup.bzl40
-rw-r--r--rules/gensrcs.bzl100
-rw-r--r--rules/gensrcs_test.bzl209
-rw-r--r--rules/hidl/BUILD18
-rw-r--r--rules/hidl/hidl_interface.bzl48
-rw-r--r--rules/hidl/hidl_library.bzl77
-rw-r--r--rules/hidl/hidl_library_test.bzl163
-rw-r--r--rules/hidl_file_utils.bzl119
-rw-r--r--rules/java/BUILD112
-rw-r--r--rules/java/bootclasspath.bzl49
-rw-r--r--rules/java/bootclasspath_test.bzl67
-rw-r--r--rules/java/event_log_tags.bzl49
-rw-r--r--rules/java/host_for_device.bzl40
-rw-r--r--rules/java/host_for_device_test.bzl78
-rw-r--r--rules/java/import.bzl67
-rw-r--r--rules/java/java_aidl_library.bzl91
-rw-r--r--rules/java/java_system_modules.bzl177
-rw-r--r--rules/java/java_system_modules_test.bzl59
-rw-r--r--rules/java/library.bzl80
-rw-r--r--rules/java/merged_txts.bzl84
-rw-r--r--rules/java/merged_txts_test.bzl156
-rw-r--r--rules/java/proto.bzl97
-rw-r--r--rules/java/rules.bzl30
-rw-r--r--rules/java/sdk/BUILD.bazel112
-rw-r--r--rules/java/sdk/config_setting_names.bzl35
-rw-r--r--rules/java/sdk_library.bzl61
-rw-r--r--rules/java/sdk_library_test.bzl115
-rw-r--r--rules/java/sdk_transition.bzl68
-rw-r--r--rules/java/sdk_transition_test.bzl173
-rw-r--r--rules/java/stub_local_jdk/BUILD.bazel8
-rw-r--r--rules/java/stub_local_jdk/WORKSPACE0
-rw-r--r--rules/java/versions.bzl104
-rw-r--r--rules/java/versions_test.bzl44
-rw-r--r--rules/java/wrapper_test.sh38
-rw-r--r--rules/kotlin/BUILD0
-rw-r--r--rules/kotlin/kotlinc.BUILD96
-rw-r--r--rules/kotlin/kt_jvm_library.bzl140
-rw-r--r--rules/kotlin/maven_interface/BUILD29
-rw-r--r--rules/kotlin/maven_interface/WORKSPACE0
-rw-r--r--rules/kotlin/rules.bzl25
-rw-r--r--rules/license/BUILD17
-rw-r--r--rules/license/license.bzl79
-rw-r--r--rules/license/license_aspect.bzl156
-rw-r--r--rules/linker_config.bzl56
-rw-r--r--rules/linker_config_test.bzl126
-rw-r--r--rules/make_injection.bzl29
-rw-r--r--rules/metadata.bzl29
-rw-r--r--rules/partitions/BUILD24
-rw-r--r--rules/partitions/installable_info.bzl43
-rw-r--r--rules/partitions/partition.bzl377
-rw-r--r--rules/partitions/toolchain.bzl38
-rw-r--r--rules/prebuilt_file.bzl44
-rw-r--r--rules/prebuilt_file_test.bzl80
-rw-r--r--rules/prebuilt_xml.bzl103
-rw-r--r--rules/prebuilt_xml_test.bzl249
-rw-r--r--rules/proto_file_utils.bzl66
-rw-r--r--rules/python/library.bzl15
-rw-r--r--rules/python/py_proto.bzl103
-rw-r--r--rules/sh_binary.bzl3
-rw-r--r--rules/soong_injection.bzl32
-rw-r--r--rules/staging_dir_builder.py133
-rwxr-xr-xrules/staging_dir_builder_test.sh (renamed from rules/apex/bazel_apexer_wrapper_test.sh)94
-rw-r--r--rules/sysprop/BUILD.bazel19
-rw-r--r--rules/sysprop/sysprop_library.bzl36
-rw-r--r--rules/sysprop/sysprop_library_test.bzl54
-rw-r--r--rules/test.pem (renamed from rules/apex/test.pem)0
-rw-r--r--rules/test_common/BUILD.bazel15
-rw-r--r--rules/test_common/args.bzl98
-rw-r--r--rules/test_common/asserts.bzl34
-rw-r--r--rules/test_common/flags.bzl159
-rw-r--r--rules/test_common/paths.bzl31
-rw-r--r--rules/test_common/rules.bzl38
-rw-r--r--rules/toolchain_utils.bzl23
-rw-r--r--rules/tradefed/BUILD.bazel47
-rw-r--r--rules/tradefed/test/BUILD.bazel5
-rw-r--r--rules/tradefed/test/example_config.xml7
-rw-r--r--rules/tradefed/tradefed.bzl184
-rw-r--r--rules/tradefed/tradefed.sh.tpl55
-rw-r--r--rules/tradefed/tradefed_test.bzl159
-rw-r--r--rules_cc/README.md13
-rw-r--r--rules_cc/WORKSPACE76
-rw-r--r--rules_cc/cc/BUILD6
-rw-r--r--rules_cc/cc/defs.bzl175
-rw-r--r--rules_cc/cc/find_cc_toolchain.bzl89
-rw-r--r--rules_cc/examples/BUILD43
-rw-r--r--rules_cc/examples/experimental_cc_shared_library.bzl514
-rwxr-xr-xscripts/apex_compare.sh208
-rw-r--r--scripts/bp2build-progress/README.md40
-rwxr-xr-xscripts/bp2build-progress/bp2build-module-dep-infos.py177
-rwxr-xr-xscripts/bp2build-progress/bp2build-progress.py428
-rw-r--r--scripts/bp2build-progress/dependency_analysis.py135
-rw-r--r--scripts/bp2build_progress/BUILD.bazel73
-rw-r--r--scripts/bp2build_progress/README.md48
-rwxr-xr-xscripts/bp2build_progress/bp2build_module_dep_infos.py167
-rwxr-xr-xscripts/bp2build_progress/bp2build_progress.py530
-rw-r--r--scripts/bp2build_progress/bp2build_progress_test.py414
-rw-r--r--scripts/bp2build_progress/dependency_analysis.py466
-rwxr-xr-xscripts/bp2build_progress/dependency_analysis_test.py558
-rw-r--r--scripts/bp2build_progress/queryview_xml.py63
-rw-r--r--scripts/bp2build_progress/soong_module_json.py61
-rw-r--r--scripts/difftool/BUILD.bazel43
-rw-r--r--scripts/difftool/README.md2
-rwxr-xr-xscripts/difftool/action_diff_notebook.py199
-rw-r--r--scripts/difftool/clangcompile.py112
-rw-r--r--scripts/difftool/commands.py102
-rw-r--r--scripts/difftool/diffs/BUILD.bazel (renamed from scripts/bp2build-progress/BUILD.bazel)25
-rw-r--r--scripts/difftool/diffs/bloaty.py133
-rw-r--r--scripts/difftool/diffs/context.py38
-rw-r--r--scripts/difftool/diffs/diff.py30
-rw-r--r--scripts/difftool/diffs/nm.py93
-rwxr-xr-xscripts/difftool/difftool.py227
-rwxr-xr-xscripts/elf_compare.sh126
-rwxr-xr-xscripts/elf_input_files.sh68
-rwxr-xr-xscripts/gen_build_number.sh10
-rw-r--r--scripts/incremental_build/BUILD.bazel52
-rw-r--r--scripts/incremental_build/README.md16
-rwxr-xr-xscripts/incremental_build/canonical_perf.sh61
-rw-r--r--scripts/incremental_build/cuj_catalog.py494
-rw-r--r--scripts/incremental_build/incremental_build.py247
-rwxr-xr-xscripts/incremental_build/incremental_build.sh12
-rw-r--r--scripts/incremental_build/perf_metrics.py249
-rw-r--r--scripts/incremental_build/perf_metrics_test.py67
-rw-r--r--scripts/incremental_build/pretty.py112
-rw-r--r--scripts/incremental_build/ui.py210
-rw-r--r--scripts/incremental_build/util.py278
-rw-r--r--scripts/incremental_build/util_test.py107
-rwxr-xr-xscripts/milestone-2/demo.sh2
-rwxr-xr-xscripts/mkmodules_diff.sh35
-rwxr-xr-xscripts/print_analysis_metrics.py197
-rwxr-xr-xscripts/queryview-bottom-up.sh2
-rwxr-xr-xscripts/run_apex_tests.sh45
-rw-r--r--tests/apex/BUILD224
-rw-r--r--tests/apex/adb_apks_arches.golden14
-rw-r--r--tests/apex/adbd_aab_zipinfo.golden18
-rw-r--r--tests/apex/adbd_apks_zipinfo.golden7
-rw-r--r--tests/apex/apex_aab_test.bzl43
-rwxr-xr-xtests/apex/apex_aab_test.sh105
-rw-r--r--tests/apex/apex_compression_test.bzl41
-rwxr-xr-xtests/apex/apex_compression_test.sh (renamed from tests/apex/apex_test.sh)1
-rw-r--r--tests/apex/apex_diff_test.bzl21
-rw-r--r--tests/apex/apex_package_name_test.bzl38
-rw-r--r--tests/apex/apex_test.bzl42
-rw-r--r--tests/apex/apks_binary_arches.py77
-rw-r--r--tests/apex/build.bazel.examples.apex.minimal.aabbin954389 -> 0 bytes
-rw-r--r--tests/apex/com.android.adbd_backing.txt.golden1
-rw-r--r--tests/apex/com.android.adbd_installed-files.txt.golden11
-rw-r--r--tests/apex/com.android.adbd_manifest.json.golden19
-rw-r--r--tests/apex/minimal_apex_android_manifest.golden13
-rw-r--r--tests/apex/minimal_apex_using.txt.golden151
-rw-r--r--tests/apex/minimal_apex_using.xml.golden2
-rw-r--r--tests/bionic/BUILD6
-rwxr-xr-xtests/bionic/verify_bionic_outputs.sh4
-rw-r--r--tests/partitions/BUILD9
-rw-r--r--tests/partitions/image_contents_test.bzl31
-rw-r--r--tests/products/BUILD88
-rw-r--r--tests/products/aosp_arm.variables.bzl242
-rw-r--r--tests/products/aosp_arm64.variables.bzl243
-rw-r--r--tests/products/aosp_x86.variables.bzl242
-rw-r--r--tests/products/aosp_x86_64.variables.bzl243
-rw-r--r--tests/products/product_labels.bzl11
-rw-r--r--vendor/google/BUILD34
-rwxr-xr-xvendor/google/build_mainline_modules.sh4
419 files changed, 40075 insertions, 5305 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000..618478d4
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,4 @@
+/.idea
+*.ipr
+*.iml
+__pycache__
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
new file mode 100644
index 00000000..0fb4360c
--- /dev/null
+++ b/PREUPLOAD.cfg
@@ -0,0 +1,5 @@
+[Builtin Hooks]
+bpfmt = true
+
+[Hook Scripts]
+buildifier_hook = ./buildifier_hook.sh ${PREUPLOAD_FILES}
diff --git a/api_surfaces/BUILD b/api_surfaces/BUILD
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/api_surfaces/BUILD
diff --git a/api_surfaces/README.md b/api_surfaces/README.md
new file mode 100644
index 00000000..5146aed9
--- /dev/null
+++ b/api_surfaces/README.md
@@ -0,0 +1,10 @@
+# Aliases to API surface stubs
+
+This directory contains aliases to stub libraries (c,java,...). This directory
+will be used as an external repository in single-tree platform builds.
+
+## Warning
+1. build/bazel/api_surfaces is a temporary placeholder for these aliases. This
+ directory will be reorged into a separate git project eventually.
+2. The internal source code layout of this directory is under development and
+ will likely change in the final design.
diff --git a/api_surfaces/WORKSPACE b/api_surfaces/WORKSPACE
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/api_surfaces/WORKSPACE
diff --git a/bazel.BUILD b/bazel.BUILD
index f9289357..61c8e6ab 100644
--- a/bazel.BUILD
+++ b/bazel.BUILD
@@ -11,4 +11,7 @@
# dependencies of the cc_toolchain. Then the root directory of the repository
# becomes part of said transitive dependencies and thus every file is in a
# subdirectory of it.
-filegroup(name="empty", visibility=["//visibility:public"])
+filegroup(
+ name = "empty",
+ visibility = ["//visibility:public"],
+)
diff --git a/bazel.WORKSPACE b/bazel.WORKSPACE
index 4b889d10..dc96892f 100644
--- a/bazel.WORKSPACE
+++ b/bazel.WORKSPACE
@@ -1,84 +1,95 @@
-load("//build/bazel/rules:soong_injection.bzl", "soong_injection_repository")
-load("//build/bazel/rules:make_injection.bzl", "make_injection_repository")
-
-register_toolchains(
- "//prebuilts/build-tools:py_toolchain",
- "//prebuilts/clang/host/linux-x86:all",
-)
-
# This repository provides files that Soong emits during bp2build (other than
# converted BUILD files), mostly .bzl files containing constants to support the
# converted BUILD files.
-soong_injection_repository(name="soong_injection")
+load("//build/bazel/rules:soong_injection.bzl", "soong_injection_repository")
+
+soong_injection_repository(name = "soong_injection")
+
+# ! WARNING ! WARNING ! WARNING !
+# make_injection is a repository rule to allow Bazel builds to depend on
+# Soong-built prebuilts for experimental purposes. It is fragile, slow, and
+# works for very limited use cases. Do not add a dependency that will cause
+# make_injection to run for any prod builds or tests.
+#
+# If you need to add something in this list, please contact the Roboleaf
+# team and ask jingwen@ for a review.
+load("//build/bazel/rules:make_injection.bzl", "make_injection_repository")
-# This is a repository rule to allow Bazel builds to depend on Soong-built
-# prebuilts for migration purposes.
make_injection_repository(
name = "make_injection",
binaries = [
- # APEX tools
- "apex_compression_tool",
- "apexer",
- "conv_apex_manifest",
- "deapexer",
- "sefcontext_compile",
+ "build_image",
+ "mkuserimg_mke2fs",
],
- target_module_files = {
- # For APEX comparisons
- "com.android.tzdata": ["system/apex/com.android.tzdata.apex"],
- "com.android.runtime": ["system/apex/com.android.runtime.apex"],
- "com.android.adbd": ["system/apex/com.android.adbd.capex"],
- "build.bazel.examples.apex.minimal": ["system/product/apex/build.bazel.examples.apex.minimal.apex"],
- },
+ target_module_files = {},
watch_android_bp_files = [
- "//:build/bazel/examples/apex/minimal/Android.bp", # for build.bazel.examples.apex.minimal
- "//:packages/modules/adbd/apex/Android.bp", # for com.android.adbd
- # TODO(b/210399979) - add the other .bp files to watch for the other modules built in these rule
+ "//:build/make/tools/releasetools/Android.bp", # for build_image
+ "//:system/extras/ext4_utils/Android.bp", # for mkuserimg_mke2fs
],
)
+# ! WARNING ! WARNING ! WARNING !
-local_repository(
- name = "rules_cc",
- path = "build/bazel/rules_cc",
+load("//build/bazel/rules:env.bzl", "env_repository")
+
+env_repository(
+ name = "env",
)
-local_repository(
- name = "bazel_skylib",
- path = "external/bazel-skylib",
+# This repository is a containter for API surface stub libraries.
+load("//build/bazel/rules:api_surfaces_injection.bzl", "api_surfaces_repository")
+
+# TODO: Once BUILD files for stubs are checked-in, this should be converted to a local_repository.
+api_surfaces_repository(name = "api_surfaces")
+
+load("//build/bazel_common_rules/workspace:external.bzl", "import_external_repositories")
+
+import_external_repositories(
+ bazel_skylib = True,
+ io_abseil_py = True,
)
+load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
+
+bazel_skylib_workspace()
+
local_repository(
name = "rules_android",
path = "external/bazelbuild-rules_android",
)
+local_repository(
+ name = "rules_license",
+ path = "external/bazelbuild-rules_license",
+)
+
register_toolchains(
- # For Starlark Android rules
- "//prebuilts/sdk:android_default_toolchain",
- "//prebuilts/sdk:android_sdk_tools",
+ "//prebuilts/build-tools:py_toolchain",
+
+ # For Android rules
+ "//prebuilts/sdk:all",
- # For native android_binary
- "//prebuilts/sdk:android_sdk_tools_for_native_android_binary",
+ # For APEX rules
+ "//build/bazel/rules/apex:all",
- # For APEX rules
- "//build/bazel/rules/apex:all"
+ # For partition rules
+ "//build/bazel/rules/partitions:all",
)
bind(
- name = "databinding_annotation_processor",
- actual = "//prebuilts/sdk:compiler_annotation_processor",
+ name = "databinding_annotation_processor",
+ actual = "//prebuilts/sdk:compiler_annotation_processor",
)
bind(
- name = "android/dx_jar_import",
- actual = "//prebuilts/sdk:dx_jar_import",
+ name = "android/dx_jar_import",
+ actual = "//prebuilts/sdk:dx_jar_import",
)
# The r8.jar in prebuilts/r8 happens to have the d8 classes needed
# for Android app building, whereas the d8.jar in prebuilts/sdk/tools doesn't.
bind(
- name = "android/d8_jar_import",
- actual = "//prebuilts/r8:r8_jar_import",
+ name = "android/d8_jar_import",
+ actual = "//prebuilts/bazel/common/r8:r8_jar_import",
)
# TODO(b/201242197): Avoid downloading remote_coverage_tools (on CI) by creating
@@ -87,5 +98,69 @@ bind(
# cut the dependency from test rules to the external repo.
local_repository(
name = "remote_coverage_tools",
- path = "build/bazel/rules/coverage/remote_coverage_tools",
+ path = "build/bazel_common_rules/rules/coverage/remote_coverage_tools",
+)
+
+# Stubbing the local_jdk both ensures that we don't accidentally download remote
+# repositories and allows us to let the Kotlin rules continue to access
+# @local_jdk//jar.
+local_repository(
+ name = "local_jdk",
+ path = "build/bazel/rules/java/stub_local_jdk",
)
+
+# The following 2 repositories contain prebuilts that are necessary to the Java Rules.
+# They are vendored locally to avoid the need for CI bots to download them.
+local_repository(
+ name = "remote_java_tools",
+ path = "prebuilts/bazel/common/remote_java_tools",
+)
+
+local_repository(
+ name = "remote_java_tools_linux",
+ path = "prebuilts/bazel/linux-x86_64/remote_java_tools_linux",
+)
+
+# The following repository contains android_tools prebuilts.
+local_repository(
+ name = "android_tools",
+ path = "prebuilts/bazel/common/android_tools",
+)
+
+# The rules_java repository is stubbed and points to the native Java rules until
+# it can be properly vendored.
+local_repository(
+ name = "rules_java",
+ path = "build/bazel_common_rules/rules/java/rules_java",
+)
+
+register_toolchains(
+ "//prebuilts/jdk/jdk17:runtime_toolchain_definition",
+ "//build/bazel/rules/java:jdk17_host_toolchain_java_definition",
+)
+
+local_repository(
+ name = "kotlin_maven_interface",
+ path = "build/bazel/rules/kotlin/maven_interface",
+)
+
+local_repository(
+ name = "rules_kotlin",
+ path = "external/bazelbuild-kotlin-rules",
+ repo_mapping = {
+ "@maven": "@kotlin_maven_interface",
+ "@bazel_platforms": "@platforms",
+ },
+)
+
+new_local_repository(
+ name = "kotlinc",
+ build_file = "//build/bazel/rules/kotlin:kotlinc.BUILD",
+ path = "external/kotlinc",
+)
+
+register_toolchains("@rules_kotlin//toolchains/kotlin_jvm:kt_jvm_toolchain")
+
+load("//prebuilts/clang/host/linux-x86:cc_toolchain_config.bzl", "cc_register_toolchains")
+
+cc_register_toolchains()
diff --git a/bazel.sh b/bazel.sh
deleted file mode 100755
index c30a8f5f..00000000
--- a/bazel.sh
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/bin/bash
-
-set -eo pipefail
-
-# TODO: Refactor build/make/envsetup.sh to make gettop() available elsewhere
-function gettop
-{
- local TOPFILE=build/bazel/bazel.sh
- if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
- # The following circumlocution ensures we remove symlinks from TOP.
- (cd "$TOP"; PWD= /bin/pwd)
- else
- if [ -f $TOPFILE ] ; then
- # The following circumlocution (repeated below as well) ensures
- # that we record the true directory name and not one that is
- # faked up with symlink names.
- PWD= /bin/pwd
- else
- local HERE=$PWD
- local T=
- while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
- \cd ..
- T=`PWD= /bin/pwd -P`
- done
- \cd "$HERE"
- if [ -f "$T/$TOPFILE" ]; then
- echo "$T"
- fi
- fi
- fi
-}
-
-# TODO: Refactor build/soong/scripts/microfactory.bash to make getoutdir() available elsewhere
-function getoutdir
-{
- local out_dir="${OUT_DIR-}"
- if [ -z "${out_dir}" ]; then
- if [ "${OUT_DIR_COMMON_BASE-}" ]; then
- out_dir="${OUT_DIR_COMMON_BASE}/$(basename ${TOP})"
- else
- out_dir="out"
- fi
- fi
- if [[ "${out_dir}" != /* ]]; then
- out_dir="${TOP}/${out_dir}"
- fi
- echo "${out_dir}"
-}
-
-TOP="$(gettop)"
-if [ ! "$TOP" ]; then
- >&2 echo "Couldn't locate the top of the tree. Try setting TOP."
- exit 1
-fi
-
-case $(uname -s) in
- Darwin)
- ANDROID_BAZEL_PATH="${TOP}/prebuilts/bazel/darwin-x86_64/bazel"
- ANDROID_BAZELRC_NAME="darwin.bazelrc"
- ANDROID_BAZEL_JDK_PATH="${TOP}/prebuilts/jdk/jdk11/darwin-x86"
-
- # Lock down PATH in action execution environment, thereby removing
- # Bazel's default /bin, /usr/bin, /usr/local/bin and ensuring
- # hermeticity from the system.
- #
- # The new PATH components are:
- #
- # - prebuilts/build-tools/path: contains checked-in tools that can be
- # used as executables in actions.
- #
- # - out/.path: a special directory created by path_interposer with
- # config from ui/build/paths/config.go for allowlisting specific
- # binaries not in prebuilts/build-tools/path, but on the host system.
- # If one runs Bazel without soong_ui, then this directory wouldn't
- # exist, making standalone Bazel execution's PATH variable stricter than
- # Bazel execution within soong_ui.
- RESTRICTED_PATH="${TOP}/prebuilts/build-tools/path/darwin-x86:${TOP}/out/.path"
- ;;
- Linux)
- ANDROID_BAZEL_PATH="${TOP}/prebuilts/bazel/linux-x86_64/bazel"
- ANDROID_BAZELRC_NAME="linux.bazelrc"
- ANDROID_BAZEL_JDK_PATH="${TOP}/prebuilts/jdk/jdk11/linux-x86"
- RESTRICTED_PATH="${TOP}/prebuilts/build-tools/path/linux-x86:${TOP}/out/.path"
- ;;
- *)
- >&2 echo "Bazel is supported on Linux and Darwin only. Your OS is not supported for Bazel usage, based on 'uname -s': $(uname -s)"
- exit 1
- ;;
-esac
-
-function verify_soong_outputs_exist() {
- local to_check="${ABSOLUTE_OUT_DIR}/.path"
- local no_soong=0
- if [[ ! -d "${to_check}" ]]; then
- no_soong=1
- fi
-
- local bazel_configs=(
- "bp2build"
- "queryview"
- )
- local valid_bazel_config=0
- for c in "${bazel_configs[@]}"
- do
- if [[ -d "${ABSOLUTE_OUT_DIR}/soong/""${c}" ]]; then
- valid_bazel_config=1
- fi
- done
-
- if [[ "${no_soong}" -eq "1" || "${valid_bazel_config}" -eq "0" ]]; then
- >&2 echo "Error: missing generated Bazel files. Have you run bp2build or queryview?"
- >&2 echo "Run bp2build with the command: m bp2build"
- >&2 echo "Run queryview with the command: m queryview"
- >&2 echo "Alternatively, for non-queryview applications, invoke Bazel using 'b' with the command: source envsetup.sh; b query/build/test <targets>"
- exit 1
- fi
-}
-
-function create_bazelrc() {
- cat > "${ABSOLUTE_OUT_DIR}/bazel/path.bazelrc" <<EOF
- # This file is generated by tools/bazel. Do not edit manually.
-build --action_env=PATH=${RESTRICTED_PATH}
-EOF
-}
-
-case "x${ANDROID_BAZELRC_PATH}" in
- x)
- # Path not provided, use default.
- ANDROID_BAZELRC_PATH="${TOP}/build/bazel"
- ;;
- x/*)
- # Absolute path, take it as-is.
- ANDROID_BAZELRC_PATH="${ANDROID_BAZELRC_PATH}"
- ;;
- x*)
- # Relative path, consider it relative to TOP.
- ANDROID_BAZELRC_PATH="${TOP}/${ANDROID_BAZELRC_PATH}"
- ;;
-esac
-
-if [ -d "${ANDROID_BAZELRC_PATH}" ]; then
- # If we're given a directory, find the correct bazelrc file there.
- ANDROID_BAZELRC_PATH="${ANDROID_BAZELRC_PATH}/${ANDROID_BAZELRC_NAME}"
-fi
-
-
-if [ -n "$ANDROID_BAZEL_PATH" -a -f "$ANDROID_BAZEL_PATH" ]; then
- export ANDROID_BAZEL_PATH
-else
- >&2 echo "Couldn't locate Bazel binary"
- exit 1
-fi
-
-if [ -n "$ANDROID_BAZELRC_PATH" -a -f "$ANDROID_BAZELRC_PATH" ]; then
- export ANDROID_BAZELRC_PATH
-else
- >&2 echo "Couldn't locate bazelrc file for Bazel"
- exit 1
-fi
-
-if [ -n "$ANDROID_BAZEL_JDK_PATH" -a -d "$ANDROID_BAZEL_JDK_PATH" ]; then
- export ANDROID_BAZEL_JDK_PATH
-else
- >&2 echo "Couldn't locate JDK to use for Bazel"
- exit 1
-fi
-
-ABSOLUTE_OUT_DIR="$(getoutdir)"
-
-# In order to be able to load JNI libraries, this directory needs to exist
-mkdir -p "${ABSOLUTE_OUT_DIR}/bazel/javatmp"
-
-ADDITIONAL_FLAGS=()
-if [[ "${STANDALONE_BAZEL}" =~ ^(true|TRUE|1)$ ]]; then
- # STANDALONE_BAZEL is set.
- >&2 echo "WARNING: Using Bazel in standalone mode. This mode is not integrated with Soong and Make, and is not supported"
- >&2 echo "for Android Platform builds. Use this mode at your own risk."
- >&2 echo
-else
- # STANDALONE_BAZEL is not set.
- >&2 echo "WARNING: Bazel support for the Android Platform is experimental and is undergoing development."
- >&2 echo "WARNING: Currently, build stability is not guaranteed. Thank you."
- >&2 echo
-
- # Generate a bazelrc with dynamic content, like the absolute path to PATH variable values.
- create_bazelrc
- # Check that the Bazel synthetic workspace and other required inputs exist before handing over control to Bazel.
- verify_soong_outputs_exist
- ADDITIONAL_FLAGS+=("--bazelrc=${ABSOLUTE_OUT_DIR}/bazel/path.bazelrc")
-fi
-
-JAVA_HOME="${ANDROID_BAZEL_JDK_PATH}" "${ANDROID_BAZEL_PATH}" \
- --server_javabase="${ANDROID_BAZEL_JDK_PATH}" \
- --output_user_root="${ABSOLUTE_OUT_DIR}/bazel/output_user_root" \
- --host_jvm_args=-Djava.io.tmpdir="${ABSOLUTE_OUT_DIR}/bazel/javatmp" \
- --bazelrc="${ANDROID_BAZELRC_PATH}" \
- "${ADDITIONAL_FLAGS[@]}" \
- "$@"
diff --git a/bin/b b/bin/b
new file mode 100755
index 00000000..c9a6da90
--- /dev/null
+++ b/bin/b
@@ -0,0 +1,80 @@
+#!/bin/bash -e
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Common script utilities
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../make/shell_utils.sh
+require_top
+
+if [[ -z ${OUT_DIR+x} ]]; then
+ PROFILE_OUT=$TOP/out
+else
+ PROFILE_OUT=$OUT_DIR
+fi
+mkdir -p $PROFILE_OUT
+
+# Look for the --run-soong-tests flag and skip passing --skip-soong-tests to Soong if present
+bazel_args=""
+skip_tests="--skip-soong-tests"
+for i in $@; do
+ if [[ $i != "--run-soong-tests" ]]; then
+ bazel_args+="$i "
+ else
+ skip_tests=""
+ fi
+done
+
+# Generate BUILD, bzl files into the synthetic Bazel workspace (out/soong/workspace).
+# RBE is disabled because it's not used with b builds and adds overhead: b/251441524
+# TODO(b/262904551) - this is Darwin incompatible and should eventually be updated.
+BUILD_STARTED_TIME=`date +%s%3N`
+B_ARGS=$*
+USE_RBE=false "$TOP/build/soong/soong_ui.bash" --build-mode --all-modules --dir="$(pwd)" $skip_tests bp2build USE_BAZEL_ANALYSIS= --build-command="b ${B_ARGS}" --skip-metrics-upload --build-started-time-unix-millis=$BUILD_STARTED_TIME || exit 1
+
+
+# Then, run Bazel using the synthetic workspace as the --package_path.
+if [[ -z "$bazel_args" ]]; then
+ # If there are no args, show help and exit.
+ "$TOP/build/bazel/bin/bazel" help
+else
+ # Else, always run with the bp2build configuration, which sets Bazel's package path to
+ # the synthetic workspace.
+ # Add the --config=bp2build after the first argument that doesn't start with a dash. That
+ # should be the bazel
+ # command. (build, test, run, ect) If the --config was added at the end, it wouldn't work
+ # with commands like: b run //foo -- --args-for-foo
+ config_set=0
+
+ # Represent the args as an array, not a string.
+ bazel_args_with_config=()
+ for arg in $bazel_args; do
+ if [[ $arg == "--" && $config_set -ne 1 ]]; # if we find --, insert config argument here
+ then
+ bazel_args_with_config+=("--profile=$PROFILE_OUT/bazel_metrics-profile --config=bp2build -- ")
+ config_set=1
+ else
+ bazel_args_with_config+=("$arg ")
+ fi
+ done
+ if [[ $config_set -ne 1 ]]; then
+ bazel_args_with_config+=("--profile=$PROFILE_OUT/bazel_metrics-profile --config=bp2build ")
+ fi
+
+ # Call Bazel.
+ "$TOP/build/bazel/bin/bazel" ${bazel_args_with_config[@]}
+ "$TOP/build/bazel/bin/bazel" analyze-profile $PROFILE_OUT/bazel_metrics-profile > $PROFILE_OUT/analyzed_bazel_profile.txt
+ rm $PROFILE_OUT/bazel_metrics-profile
+ "$TOP/build/soong/soong_ui.bash" --upload-metrics-only
+fi
diff --git a/bin/bazel b/bin/bazel
new file mode 100755
index 00000000..d2c14ccc
--- /dev/null
+++ b/bin/bazel
@@ -0,0 +1,238 @@
+#!/bin/bash
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../make/shell_utils.sh
+require_top
+
+
+ABSOLUTE_OUT_DIR="$(getoutdir)"
+# Store all bazel-related metadata files in this subdir of OUT_DIR.
+BAZEL_METADATA_OUT="${ABSOLUTE_OUT_DIR}/bazel"
+mkdir -p "${BAZEL_METADATA_OUT}"
+
+case $(uname -s) in
+ Darwin)
+ ANDROID_BAZEL_PATH="${TOP}/prebuilts/bazel/darwin-x86_64/bazel"
+ ANDROID_BAZELRC_NAME="darwin.bazelrc"
+ ANDROID_BAZEL_JDK_PATH="${TOP}/prebuilts/jdk/jdk11/darwin-x86"
+
+ # Lock down PATH in action execution environment, thereby removing
+ # Bazel's default /bin, /usr/bin, /usr/local/bin and ensuring
+ # hermeticity from the system.
+ #
+ # The new PATH components are:
+ #
+ # - prebuilts/build-tools/path: contains checked-in tools that can be
+ # used as executables in actions.
+ #
+ # - out/.path: a special directory created by path_interposer with
+ # config from ui/build/paths/config.go for allowlisting specific
+ # binaries not in prebuilts/build-tools/path, but on the host system.
+ # If one runs Bazel without soong_ui, then this directory wouldn't
+ # exist, making standalone Bazel execution's PATH variable stricter than
+ # Bazel execution within soong_ui.
+ RESTRICTED_PATH="${TOP}/prebuilts/build-tools/path/darwin-x86:${ABSOLUTE_OUT_DIR}/.path"
+ ;;
+ Linux)
+ ANDROID_BAZEL_PATH="${TOP}/prebuilts/bazel/linux-x86_64/bazel"
+ ANDROID_BAZELRC_NAME="linux.bazelrc"
+ ANDROID_BAZEL_JDK_PATH="${TOP}/prebuilts/jdk/jdk11/linux-x86"
+ RESTRICTED_PATH="${TOP}/prebuilts/build-tools/path/linux-x86:${ABSOLUTE_OUT_DIR}/.path"
+ ;;
+ *)
+ >&2 echo "Bazel is supported on Linux and Darwin only. Your OS is not supported for Bazel usage, based on 'uname -s': $(uname -s)"
+ exit 1
+ ;;
+esac
+
+function verify_soong_outputs_exist() {
+ local to_check="${ABSOLUTE_OUT_DIR}/.path"
+ local no_soong=0
+ if [[ ! -d "${to_check}" ]]; then
+ no_soong=1
+ fi
+
+ local bazel_configs=(
+ "bp2build"
+ "api_bp2build"
+ "queryview"
+ )
+ local valid_bazel_config=0
+ for c in "${bazel_configs[@]}"
+ do
+ if [[ -d "${ABSOLUTE_OUT_DIR}/soong/""${c}" ]]; then
+ valid_bazel_config=1
+ fi
+ done
+
+ if [[ "${no_soong}" -eq "1" || "${valid_bazel_config}" -eq "0" ]]; then
+ >&2 echo "Error: missing generated Bazel files. Have you run bp2build or queryview?"
+ >&2 echo "Run bp2build with the command: m bp2build"
+ >&2 echo "Run queryview with the command: m queryview"
+ >&2 echo "Alternatively, for non-queryview applications, invoke Bazel using 'b' with the command: source envsetup.sh; b query/build/test <targets>"
+ exit 1
+ fi
+}
+
+function create_bazelrc() {
+ cat > "${BAZEL_METADATA_OUT}/generated.bazelrc" <<EOF
+# This file is generated by build/bazel/bin/bazel. Do not edit manually.
+build --action_env=PATH=${RESTRICTED_PATH}
+
+# The --package_path option needs to be added to several different
+# bazel subcommands, because they don't inherit from each other, and
+# if we put it on "common", then it would break other commands like
+# shutdown that don't support --package_path. In addition, we have
+# to set at least one option on the "common" target so that bazel
+# won't complain that bp2build doesn't exist when using
+# --config=bp2build. We'll use --noannounce_rc for that, because
+# --noannounce_rc is the default.
+
+
+# Run bazel query from the workspace, without cd'ing into \$OUT_DIR/soong/queryview
+common:queryview --noannounce_rc
+build:queryview --package_path=${ABSOLUTE_OUT_DIR}/soong/queryview
+fetch:queryview --package_path=${ABSOLUTE_OUT_DIR}/soong/queryview
+modquery:queryview --package_path=${ABSOLUTE_OUT_DIR}/soong/queryview
+query:queryview --package_path=${ABSOLUTE_OUT_DIR}/soong/queryview
+sync:queryview --package_path=${ABSOLUTE_OUT_DIR}/soong/queryview
+
+# Run bazel build from the workspace, without cd'ing into \$OUT_DIR/soong/api_bp2build
+common:api_bp2build --noannounce_rc
+build:api_bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/api_bp2build
+fetch:api_bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/api_bp2build
+modquery:api_bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/api_bp2build
+query:api_bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/api_bp2build
+sync:api_bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/api_bp2build
+
+# Run bazel build from the workspace, without cd'ing into \$OUT_DIR/soong/workspace
+common:bp2build --noannounce_rc
+build:bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/workspace
+fetch:bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/workspace
+modquery:bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/workspace
+query:bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/workspace
+sync:bp2build --package_path=${ABSOLUTE_OUT_DIR}/soong/workspace
+
+EOF
+}
+
+# Return 1 if STANDALONE_BAZEL is truthy
+function is_standalone_bazel() {
+ [[ ${STANDALONE_BAZEL} =~ ^(true|TRUE|1)$ ]]
+}
+
+case "x${ANDROID_BAZELRC_PATH}" in
+ x)
+ # Path not provided, use default.
+ if is_standalone_bazel; then
+ # Standalone bazel uses the empty /dev/null bazelrc
+ # This is necessary since some configs in common.bazelrc depend on soong_injection
+ ANDROID_BAZELRC_PATH=/dev/null
+ else
+ ANDROID_BAZELRC_PATH="${TOP}/build/bazel"
+ fi
+ ;;
+ x/*)
+ # Absolute path, take it as-is.
+ ANDROID_BAZELRC_PATH="${ANDROID_BAZELRC_PATH}"
+ ;;
+ x*)
+ # Relative path, consider it relative to TOP.
+ ANDROID_BAZELRC_PATH="${TOP}/${ANDROID_BAZELRC_PATH}"
+ ;;
+esac
+
+if [ -d "${ANDROID_BAZELRC_PATH}" ]; then
+ # If we're given a directory, find the correct bazelrc file there.
+ ANDROID_BAZELRC_PATH="${ANDROID_BAZELRC_PATH}/${ANDROID_BAZELRC_NAME}"
+fi
+
+
+if [ -n "$ANDROID_BAZEL_PATH" -a -f "$ANDROID_BAZEL_PATH" ]; then
+ export ANDROID_BAZEL_PATH
+else
+ >&2 echo "Couldn't locate Bazel binary"
+ exit 1
+fi
+
+if [ "$ANDROID_BAZELRC_PATH" == "/dev/null" ] || [ -n "$ANDROID_BAZELRC_PATH" -a -f "$ANDROID_BAZELRC_PATH" ]; then
+ export ANDROID_BAZELRC_PATH
+else
+ >&2 echo "Couldn't locate bazelrc file for Bazel"
+ exit 1
+fi
+
+if [ -n "$ANDROID_BAZEL_JDK_PATH" -a -d "$ANDROID_BAZEL_JDK_PATH" ]; then
+ export ANDROID_BAZEL_JDK_PATH
+else
+ >&2 echo "Couldn't locate JDK to use for Bazel"
+ exit 1
+fi
+
+# In order to be able to load JNI libraries, this directory needs to exist
+mkdir -p "${BAZEL_METADATA_OUT}/javatmp"
+
+# Output a deps file. Soong will read these as dependencies for mixed builds
+MIXED_BUILDS_DOTD="${BAZEL_METADATA_OUT}/bazel.list"
+touch "${MIXED_BUILDS_DOTD}"
+echo $ANDROID_BAZEL_PATH > "${MIXED_BUILDS_DOTD}"
+echo $ANDROID_BAZELRC_PATH >> "${MIXED_BUILDS_DOTD}"
+echo $ANDROID_BAZEL_JDK_PATH >> "${MIXED_BUILDS_DOTD}"
+
+
+ADDITIONAL_FLAGS=()
+if is_standalone_bazel; then
+ # STANDALONE_BAZEL is set.
+ >&2 echo "WARNING: Using Bazel in standalone mode. This mode is not integrated with Soong and Make, and is not supported"
+ >&2 echo "for Android Platform builds. Use this mode at your own risk."
+ >&2 echo
+else
+ # Generate a bazelrc with dynamic content, like the absolute path to PATH variable values.
+ create_bazelrc
+ # Check that the Bazel synthetic workspace and other required inputs exist before handing over control to Bazel.
+ verify_soong_outputs_exist
+ ADDITIONAL_FLAGS+=("--bazelrc=${BAZEL_METADATA_OUT}/generated.bazelrc")
+
+ # These bazelrc files are only available when bp2build has been run.
+ # Standalone bazel and queryview don't run bp2build.
+ if [[ -f "${ABSOLUTE_OUT_DIR}/soong/soong_injection/product_config_platforms/common.bazelrc" ]]; then
+ ADDITIONAL_FLAGS+=("--bazelrc=${ABSOLUTE_OUT_DIR}/soong/soong_injection/product_config_platforms/common.bazelrc")
+ case $(uname -s) in
+ Darwin)
+ ADDITIONAL_FLAGS+=("--bazelrc=${ABSOLUTE_OUT_DIR}/soong/soong_injection/product_config_platforms/darwin.bazelrc")
+ ;;
+ Linux)
+ ADDITIONAL_FLAGS+=("--bazelrc=${ABSOLUTE_OUT_DIR}/soong/soong_injection/product_config_platforms/linux.bazelrc")
+ ;;
+ *)
+ >&2 echo "Bazel is supported on Linux and Darwin only. Your OS is not supported for Bazel usage, based on 'uname -s': $(uname -s)"
+ exit 1
+ ;;
+ esac
+ fi
+fi
+
+# TODO(b/240354506): Re-enable hsperfdata file creation without causing SIGBUS errors
+JAVA_HOME="${ANDROID_BAZEL_JDK_PATH}" "${ANDROID_BAZEL_PATH}" \
+ --server_javabase="${ANDROID_BAZEL_JDK_PATH}" \
+ --output_user_root="${BAZEL_METADATA_OUT}/output_user_root" \
+ --host_jvm_args=-Djava.io.tmpdir="${BAZEL_METADATA_OUT}/javatmp" \
+ --nohome_rc --nosystem_rc \
+ --bazelrc="${ANDROID_BAZELRC_PATH}" \
+ "${ADDITIONAL_FLAGS[@]}" \
+ "$@"
diff --git a/buildifier_hook.sh b/buildifier_hook.sh
new file mode 100755
index 00000000..2f883459
--- /dev/null
+++ b/buildifier_hook.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+[[ ! $(command -v buildifier) ]] || buildifier -mode=check -lint=warn -warnings="out-of-order-load,load-on-top,load,unused-variable,list-append" `printf "%s\n" $@ | grep -E "^(.*/)?(BUILD|BUILD.bazel|bazel.WORKSPACE|.*\\.bzl)$"` < /dev/null
diff --git a/ci/bp2build.sh b/ci/bp2build.sh
index 8f9e7ac9..24b3d10f 100755
--- a/ci/bp2build.sh
+++ b/ci/bp2build.sh
@@ -9,122 +9,110 @@
# Setup
#######
+# Set the test output directories.
+AOSP_ROOT="$(dirname $0)/../../.."
+OUT_DIR=$(realpath ${OUT_DIR:-${AOSP_ROOT}/out})
if [[ -z ${DIST_DIR+x} ]]; then
- echo "DIST_DIR not set. Using out/dist. This should only be used for manual developer testing."
- DIST_DIR="out/dist"
+ DIST_DIR="${OUT_DIR}/dist"
+ echo "DIST_DIR not set. Using ${OUT_DIR}/dist. This should only be used for manual developer testing."
fi
-# Generate BUILD files into out/soong/bp2build
-AOSP_ROOT="$(dirname $0)/../../.."
-"${AOSP_ROOT}/build/soong/soong_ui.bash" --make-mode BP2BUILD_VERBOSE=1 --skip-soong-tests bp2build dist
-
-# Dist the entire workspace of generated BUILD files, rooted from
-# out/soong/bp2build. This is done early so it's available even if builds/tests
-# fail.
-tar -czf "${DIST_DIR}/bp2build_generated_workspace.tar.gz" -C out/soong/bp2build .
-
-# Remove the ninja_build output marker file to communicate to buildbot that this is not a regular Ninja build, and its
-# output should not be parsed as such.
-rm -f out/ninja_build
-
# Before you add flags to this list, cosnider adding it to the "ci" bazelrc
# config instead of this list so that flags are not duplicated between scripts
# and bazelrc, and bazelrc is the Bazel-native way of organizing flags.
-FLAGS_LIST=(
+FLAGS=(
--config=bp2build
--config=ci
)
-FLAGS="${FLAGS_LIST[@]}"
+FLAGS="${FLAGS[@]}"
+
+source "$(dirname $0)/build_with_bazel.sh"
+source "$(dirname $0)/target_lists.sh"
###############
-# Build targets
+# Build and test targets for device target platform.
###############
-BUILD_TARGETS_LIST=(
- //art/...
- //bionic/...
- //bootable/recovery/tools/recovery_l10n/...
- //build/...
- //cts/...
- //development/...
- //external/...
- //frameworks/...
- //libnativehelper/...
- //packages/...
- //prebuilts/clang/host/linux-x86:all
- //system/...
- //tools/apksig/...
- //tools/platform-compat/...
-
- # These tools only build for host currently
- -//external/e2fsprogs/misc:all
- -//external/e2fsprogs/resize:all
- -//external/e2fsprogs/debugfs:all
- -//external/e2fsprogs/e2fsck:all
-)
-BUILD_TARGETS="${BUILD_TARGETS_LIST[@]}"
-# Iterate over various architectures supported in the platform build.
-tools/bazel --max_idle_secs=5 build ${FLAGS} --platforms //build/bazel/platforms:android_x86 -k -- ${BUILD_TARGETS}
-tools/bazel --max_idle_secs=5 build ${FLAGS} --platforms //build/bazel/platforms:android_x86_64 -k -- ${BUILD_TARGETS}
-tools/bazel --max_idle_secs=5 build ${FLAGS} --platforms //build/bazel/platforms:android_arm -k -- ${BUILD_TARGETS}
-tools/bazel --max_idle_secs=5 build ${FLAGS} --platforms //build/bazel/platforms:android_arm64 -k -- ${BUILD_TARGETS}
-
-HOST_INCOMPATIBLE_TARGETS=(
- # TODO(b/217756861): Apex toolchain is incompatible with host arches but apex modules do
- # not have this restriction
- -//build/bazel/examples/apex/...
- -//packages/modules/adb/apex:com.android.adbd
- -//system/timezone/apex:com.android.tzdata
- -//build/bazel/tests/apex/...
- -//build/bazel/ci/dist/...
-
- # TODO(b/217927043): Determine how to address targets that are device only
- -//system/core/libpackagelistparser:all
- -//external/icu/libicu:all
- //external/icu/libicu:libicu
- -//external/icu/icu4c/source/tools/ctestfw:all
-
- # TODO(b/217926427): determine why these host_supported modules do not build on host
- -//packages/modules/adb:all
- -//packages/modules/adb/pairing_connection:all
-)
-# build for host
-tools/bazel --max_idle_secs=5 build ${FLAGS} \
- --platforms //build/bazel/platforms:linux_x86_64 \
- -- ${BUILD_TARGETS} "${HOST_INCOMPATIBLE_TARGETS[@]}"
+build_for_device BUILD_TARGETS TEST_TARGETS
-###########
-# Run tests
-###########
-tools/bazel --max_idle_secs=5 test ${FLAGS} //build/bazel/tests/... //build/bazel/rules/apex/... //build/bazel/scripts/...
+declare -a host_targets
+host_targets+=( "${BUILD_TARGETS[@]}" )
+host_targets+=( "${TEST_TARGETS[@]}" )
+host_targets+=( "${HOST_INCOMPATIBLE_TARGETS[@]}" )
+host_targets+=( "${HOST_ONLY_TEST_TARGETS[@]}" )
-###########
-# Dist mainline modules
-###########
-tools/bazel --max_idle_secs=5 run //build/bazel/ci/dist:mainline_modules ${FLAGS} --platforms=//build/bazel/platforms:android_x86 -- --dist_dir="${DIST_DIR}/mainline_modules_x86"
-tools/bazel --max_idle_secs=5 run //build/bazel/ci/dist:mainline_modules ${FLAGS} --platforms=//build/bazel/platforms:android_x86_64 -- --dist_dir="${DIST_DIR}/mainline_modules_x86_64"
-tools/bazel --max_idle_secs=5 run //build/bazel/ci/dist:mainline_modules ${FLAGS} --platforms=//build/bazel/platforms:android_arm -- --dist_dir="${DIST_DIR}/mainline_modules_arm"
-tools/bazel --max_idle_secs=5 run //build/bazel/ci/dist:mainline_modules ${FLAGS} --platforms=//build/bazel/platforms:android_arm64 -- --dist_dir="${DIST_DIR}/mainline_modules_arm64"
+build_for_host ${host_targets[@]}
+
+#########################################################################
+# Check that rule wrappers have the same providers as the rules they wrap
+#########################################################################
+
+source "$(dirname $0)/../rules/java/wrapper_test.sh"
+test_wrapper_providers
###################
-# bp2build-progress
+# bp2build progress
###################
+function get_soong_names_from_queryview() {
+ names=$( build/bazel/bin/bazel query --config=ci --config=queryview --output=xml "${@}" \
+ | awk -F'"' '$2 ~ /soong_module_name/ { print $4 }' \
+ | sort -u )
+ echo "${names[@]}"
+}
+
# Generate bp2build progress reports and graphs for these modules into the dist
# dir so that they can be downloaded from the CI artifact list.
BP2BUILD_PROGRESS_MODULES=(
- com.android.runtime
- com.android.neuralnetworks
- com.android.media.swcodec
+ NetworkStackNext # not updatable but will be
+ android_module_lib_stubs_current
+ android_stubs_current
+ android_system_server_stubs_current
+ android_system_stubs_current
+ android_test_stubs_current
+ build-tools # host sdk
+ com.android.runtime # not updatable but will be
+ core-lambda-stubs # DefaultLambdaStubsPath, StableCorePlatformBootclasspathLibraries
+ core-public-stubs-system-modules
+ ext # FrameworkLibraries
+ framework # FrameworkLibraries
+ framework-minus-apex
+ framework-res # sdk dep Framework Res Module
+ legacy-core-platform-api-stubs-system-modules
+ legacy.core.platform.api.stubs
+ platform-tools # host sdk
+ sdk
+ stable-core-platform-api-stubs-system-modules # StableCorePlatformSystemModules
+ stable.core.platform.api.stubs # StableCorePlatformBootclasspathLibraries
)
-bp2build_progress_script="${AOSP_ROOT}/build/bazel/scripts/bp2build-progress/bp2build-progress.py"
+
+# Query for some module types of interest so that we don't have to hardcode the
+# lists
+"${AOSP_ROOT}/build/soong/soong_ui.bash" --make-mode BP2BUILD_VERBOSE=1 --skip-soong-tests queryview
+rm -f out/ninja_build
+
+# Only apexes/apps that specify updatable=1 are mainline modules, the other are
+# "just" apexes/apps. Often this is not specified in the process of becoming a
+# mainline module as enables a number of validations.
+# Ignore defaults and test rules.
+APEX_QUERY='attr(updatable, 1, //...) - kind("_defaults rule", //...) - kind("apex_test_ rule", //...)'
+APEX_VNDK_QUERY="kind(\"apex_vndk rule\", //...)"
+
+BP2BUILD_PROGRESS_MODULES+=( $(get_soong_names_from_queryview "${APEX_QUERY}"" + ""${APEX_VNDK_QUERY}" ) )
+
+bp2build_progress_script="//build/bazel/scripts/bp2build_progress:bp2build_progress"
bp2build_progress_output_dir="${DIST_DIR}/bp2build-progress"
mkdir -p "${bp2build_progress_output_dir}"
report_args=""
for m in "${BP2BUILD_PROGRESS_MODULES[@]}"; do
report_args="$report_args -m ""${m}"
- "${bp2build_progress_script}" graph -m "${m}" --use_queryview=true > "${bp2build_progress_output_dir}/${m}_graph.dot"
+ if [[ "${m}" =~ (media.swcodec|neuralnetworks)$ ]]; then
+ build/bazel/bin/bazel run ${FLAGS} --config=linux_x86_64 "${bp2build_progress_script}" -- graph -m "${m}" --out-file=$( realpath "${bp2build_progress_output_dir}" )"/${m}_graph.dot"
+ fi
done
-"${bp2build_progress_script}" report ${report_args} --use_queryview=true > "${bp2build_progress_output_dir}/progress_report.txt"
+build/bazel/bin/bazel run ${FLAGS} --config=linux_x86_64 "${bp2build_progress_script}" -- \
+ report ${report_args} \
+ --proto-file=$( realpath "${bp2build_progress_output_dir}" )"/bp2build-progress.pb" \
+ --out-file=$( realpath "${bp2build_progress_output_dir}" )"/progress_report.txt" \
diff --git a/ci/build_with_bazel.sh b/ci/build_with_bazel.sh
new file mode 100644
index 00000000..b93a4b8f
--- /dev/null
+++ b/ci/build_with_bazel.sh
@@ -0,0 +1,95 @@
+#!/bin/bash -eux
+
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Verifies mixed builds does not run if neither --bazel-mode-dev nor --bazel-mode
+# is set.
+# This verification script is designed to be used for continuous integration
+# tests, though may also be used for manual developer verification.
+
+STARTUP_FLAGS=(
+ --max_idle_secs=5
+)
+
+# Before you add flags to this list, cosnider adding it to the "ci" bazelrc
+# config instead of this list so that flags are not duplicated between scripts
+# and bazelrc, and bazelrc is the Bazel-native way of organizing flags.
+FLAGS=(
+ --config=bp2build
+ --config=ci
+ --keep_going
+)
+
+function build_for_device() {
+ local -n build_targets=$1
+ local -n test_targets=$2
+ ###########
+ # Iterate over various products supported in the platform build.
+ ###########
+ product_prefix="aosp_"
+ for arch in arm arm64 x86 x86_64; do
+ # Re-run product config and bp2build for every TARGET_PRODUCT.
+ product=${product_prefix}${arch}
+ "${AOSP_ROOT}/build/soong/soong_ui.bash" --make-mode BP2BUILD_VERBOSE=1 TARGET_PRODUCT=${product} --skip-soong-tests bp2build dist
+ # Remove the ninja_build output marker file to communicate to buildbot that this is not a regular Ninja build, and its
+ # output should not be parsed as such.
+ rm -f out/ninja_build
+
+ # Dist the entire workspace of generated BUILD files, rooted from
+ # out/soong/bp2build. This is done early so it's available even if
+ # builds/tests fail. Currently the generated BUILD files can be different
+ # between products due to Soong plugins and non-deterministic codegeneration.
+ # We tar and gzip in separate steps because when using tar -z, you can't tell it to not include
+ # a timestamp in the gzip header.
+ tar c --mtime='1970-01-01' -C out/soong/bp2build . | gzip -n > "${DIST_DIR}/bp2build_generated_workspace_${product}.tar.gz"
+
+ local device_startup_flags=(
+ # Unique output bases per product to help with incremental builds across
+ # invocations of this script.
+ # e.g. the second invocation of this script for aosp_x86 would use the output_base
+ # of aosp_x86 from the first invocation.
+ --output_base="${OUT_DIR}/bazel/test_output_bases/${product}"
+ )
+ device_startup_flags+=( "${STARTUP_FLAGS[@]}" )
+
+ # Use a loop to prevent unnecessarily switching --platforms because that drops
+ # the Bazel analysis cache.
+ #
+ # 1. Build every target in $BUILD_TARGETS
+ build/bazel/bin/bazel ${device_startup_flags[@]} \
+ build ${FLAGS[@]} --config=android -- \
+ ${build_targets[@]}
+
+ # 2. Test every target that is compatible with an android target platform (e.g. analysis_tests, sh_tests, diff_tests).
+ build/bazel/bin/bazel ${device_startup_flags[@]} \
+ test ${FLAGS[@]} --build_tests_only --config=android -- \
+ ${test_targets[@]}
+
+ # 3. Dist mainline modules.
+ build/bazel/bin/bazel ${device_startup_flags[@]} \
+ run //build/bazel/ci/dist:mainline_modules ${FLAGS[@]} \
+ --config=android -- \
+ --dist_dir="${DIST_DIR}/mainline_modules_${arch}"
+ done
+}
+
+function build_for_host() {
+ targets=("$@")
+ # We can safely build and test all targets on the host linux config, and rely on
+ # incompatible target skipping for tests that cannot run on the host.
+ build/bazel/bin/bazel \
+ "${STARTUP_FLAGS[@]}" test ${FLAGS[@]} --build_tests_only=false \
+ -- ${targets[@]}
+}
diff --git a/ci/determinism_test.sh b/ci/determinism_test.sh
new file mode 100755
index 00000000..622bc0e3
--- /dev/null
+++ b/ci/determinism_test.sh
@@ -0,0 +1,106 @@
+#!/bin/bash -eu
+
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Verifies that various intermediate outputs of the build have deterministic
+# outputs. Nondeterministic intermediate outputs have incremental performance
+# implications, so this is a critical test even if the determinism if the final
+# outputs is not in question.
+#
+# Determinism is verified by running several builds and comparing checksums of
+# outputs. This may provides confidence in determinism, but does not guarantee
+# it. "Flakiness" in this test should thus be treated as indicative of a
+# failure, and investigated promptly.
+if [[ -z ${OUT_DIR+x} ]]; then
+ OUT_DIR="out"
+fi
+
+if [[ -z ${DIST_DIR+x} ]]; then
+ echo "DIST_DIR not set. Using ${OUT_DIR}/dist. This should only be used for manual developer testing."
+ DIST_DIR="${OUT_DIR}/dist"
+fi
+
+if [[ -z ${TARGET_PRODUCT+x} ]]; then
+ echo "TARGET_PRODUCT not set. Using aosp_arm64"
+ TARGET_PRODUCT=aosp_arm64
+fi
+
+if [[ -z ${TARGET_BUILD_VARIANT+x} ]]; then
+ echo "TARGET_BUILD_VARIANT not set. Using userdebug"
+ TARGET_BUILD_VARIANT=userdebug
+fi
+
+UNAME="$(uname)"
+case "$UNAME" in
+Linux)
+ PREBUILTS="prebuilts/build-tools/path/linux-x86"
+ ;;
+Darwin)
+ PREBUILTS="prebuilts/build-tools/path/darwin-x86"
+ ;;
+*)
+ exit 1
+ ;;
+esac
+
+function clean_build {
+ build/soong/soong_ui.bash --make-mode clean
+
+ # Generate the ninja file with default setting. We expect Bazel to be enabled by
+ # default.
+ build/soong/soong_ui.bash --make-mode \
+ --mk-metrics \
+ BAZEL_STARTUP_ARGS="--max_idle_secs=5" \
+ BAZEL_BUILD_ARGS="--color=no --curses=no --show_progress_rate_limit=5" \
+ TARGET_PRODUCT=${TARGET_PRODUCT} \
+ TARGET_BUILD_VARIANT=${TARGET_BUILD_VARIANT} \
+ nothing \
+ dist DIST_DIR=$DIST_DIR
+}
+
+function save_hash {
+ local -r filepath="$1"
+ find $OUT_DIR/soong/workspace -type f,l -iname "BUILD.bazel" -o -iname "*.bzl" | xargs "${PREBUILTS}"/md5sum > $filepath
+ find $OUT_DIR/soong/soong_injection -type f,l | xargs "${PREBUILTS}"/md5sum >> $filepath
+ "${PREBUILTS}"/md5sum $OUT_DIR/soong/Android-${TARGET_PRODUCT}.mk >> $filepath
+ if [[ -z ${SKIP_NINJA_CHECK+x} ]]; then
+ "${PREBUILTS}"/md5sum $OUT_DIR/soong/build.ninja >> $filepath
+ fi
+}
+
+TESTDIR=$(mktemp -t testdir.XXXXXX -d)
+FIRST_FILE=$TESTDIR/first_hashes
+TEST_FILE=$TESTDIR/hashes_to_test
+
+clean_build
+save_hash $FIRST_FILE
+
+for i in {1..4} ; do
+ clean_build
+ save_hash $TEST_FILE
+ if cmp -s "$FIRST_FILE" "$TEST_FILE"
+ then
+ echo "Comparison $i succeeded."
+ else
+ cp $FIRST_FILE $TEST_FILE $DIST_DIR
+ >&2 echo "Comparison $i failed. This likely indicates nondeterminism in the differing files."
+ >&2 echo "\n\nFirst file hashes:\n"
+ >&2 cat $FIRST_FILE
+ >&2 echo "\n\nRerun $i:\n"
+ >&2 cat $TEST_FILE
+ exit 1
+ fi
+done
diff --git a/ci/diffs.sh b/ci/diffs.sh
deleted file mode 100755
index 210b61b5..00000000
--- a/ci/diffs.sh
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/bin/bash -eu
-# checks the diff between legacy Soong built artifacts and their counterparts
-# built with bazel/mixed build
-export TARGET_PRODUCT=aosp_arm64
-export TARGET_BUILD_VARIANT=userdebug
-
-build/soong/soong_ui.bash \
- --build-mode \
- --all-modules \
- --dir="$(pwd)" \
- bp2build
-tools/bazel build --config=bp2build //build/bazel/scripts/difftool:collect_zip
-tools/bazel build --config=bp2build //build/bazel/scripts/difftool:difftool_zip
-
-# the following 2 arrays must be of the same size
-MODULES=(
- libnativehelper
-)
-OUTPUTS=(
- JNIHelp.o
-)
-PATH_FILTERS=(
- "linux_glibc_x86_shared/\|linux_x86-fastbuild"
- "linux_glibc_x86_64_shared/\|linux_x86_64-fastbuild"
- "android_arm64[-_]"
-# "android_arm[-_]" TODO(usta) investigate why there is a diff for this
-)
-readonly AOSP_ROOT="$(readlink -f "$(dirname "$0")"/../../..)"
-#TODO(usta): absolute path isn't compatible with collect.py and ninja
-readonly LEGACY_OUTPUT_SEARCH_TREE="out/soong/.intermediates/libnativehelper"
-readonly MIXED_OUTPUT_SEARCH_TREE="out/bazel/output/execroot/__main__/bazel-out"
-readonly NINJA_FILE="$AOSP_ROOT/out/combined-$TARGET_PRODUCT.ninja"
-# python is expected in PATH but used only to start a zipped python archive,
-# which bundles its own interpreter. We could also simply use `tools/bazel run`
-# instead however that sets the working directly differently and collect.py
-# won't work because it expects paths relative to $OUT_DIR
-# TODO(usta) make collect.py work with absolute paths and maybe consider
-# using `tools/bazel run` on the `py_binary` target directly instead of using
-# the python_zip_file filegroup's output
-readonly stub_python=python3
-readonly LEGACY_COLLECTION="$AOSP_ROOT/out/diff_metadata/legacy"
-readonly MIXED_COLLECTION="$AOSP_ROOT/out/diff_metadata/mixed"
-mkdir -p "$LEGACY_COLLECTION"
-mkdir -p "$MIXED_COLLECTION"
-
-function findIn() {
- result=$(find "$1" -name "$3" | grep "$2")
- count=$(echo "$result" | wc -l)
- if [ "$count" != 1 ]; then
- printf "multiple files found instead of exactly ONE:\n%s\n" "$result" 1>&2
- exit 1
- fi
- echo "$result"
-}
-
-for ((i = 0; i < ${#MODULES[@]}; i++)); do
- MODULE=${MODULES[$i]}
- echo "Building $MODULE for comparison"
- build/soong/soong_ui.bash --make-mode "$MODULE"
- $stub_python "bazel-bin/build/bazel/scripts/difftool/collect.zip" \
- "$NINJA_FILE" "$LEGACY_COLLECTION"
- build/soong/soong_ui.bash \
- --make-mode \
- USE_BAZEL_ANALYSIS=1 \
- BAZEL_STARTUP_ARGS="--max_idle_secs=5" \
- BAZEL_BUILD_ARGS="--color=no --curses=no --noshow_progress" \
- "$MODULE"
- $stub_python "bazel-bin/build/bazel/scripts/difftool/collect.zip" \
- "$NINJA_FILE" "$MIXED_COLLECTION"
- OUTPUT=${OUTPUTS[$i]}
- for ((j = 0; j < ${#PATH_FILTERS[@]}; j++)); do
- PATH_FILTER=${PATH_FILTERS[$j]}
- LEGACY_OUTPUT=$(findIn "$LEGACY_OUTPUT_SEARCH_TREE" "$PATH_FILTER" "$OUTPUT")
- MIXED_OUTPUT=$(findIn "$MIXED_OUTPUT_SEARCH_TREE" "$PATH_FILTER" "$OUTPUT")
-
- LEGACY_COLLECTION_DIR=$(dirname "$LEGACY_COLLECTION/$LEGACY_OUTPUT")
- mkdir -p "$LEGACY_COLLECTION_DIR"
- cp "$LEGACY_OUTPUT" "$LEGACY_COLLECTION_DIR"
- MIXED_COLLECTION_DIR=$(dirname "$MIXED_COLLECTION/$MIXED_OUTPUT")
- mkdir -p "$MIXED_COLLECTION_DIR"
- cp "$MIXED_OUTPUT" "$MIXED_COLLECTION_DIR"
-
- $stub_python "bazel-bin/build/bazel/scripts/difftool/difftool.zip" \
- --level=SEVERE -v "$LEGACY_COLLECTION" "$MIXED_COLLECTION" \
- -l="$LEGACY_OUTPUT" -r="$MIXED_OUTPUT"
- done
-done
-
diff --git a/ci/dist/BUILD b/ci/dist/BUILD
index c4cd15ee..0ae05549 100644
--- a/ci/dist/BUILD
+++ b/ci/dist/BUILD
@@ -5,8 +5,10 @@ load("//build/bazel_common_rules/dist:dist.bzl", "copy_to_dist_dir")
copy_to_dist_dir(
name = "mainline_modules",
data = [
- "//system/timezone/apex:com.android.tzdata.apex",
- "//packages/modules/adb/apex:com.android.adbd.apex",
+ "//frameworks/av/apex:com.android.media.swcodec",
+ "//packages/modules/NeuralNetworks/apex:com.android.neuralnetworks",
+ "//packages/modules/adb/apex:com.android.adbd",
+ "//system/timezone/apex:com.android.tzdata",
],
flat = True,
)
diff --git a/ci/mixed_droid.sh b/ci/mixed_droid.sh
index 06dcf39d..1e13e4ac 100755
--- a/ci/mixed_droid.sh
+++ b/ci/mixed_droid.sh
@@ -7,16 +7,35 @@ if [[ -z ${DIST_DIR+x} ]]; then
echo "DIST_DIR not set. Using out/dist. This should only be used for manual developer testing."
DIST_DIR="out/dist"
fi
+if [[ -z ${MIXED_DROID_MODE+x} ]]; then
+ echo "MIXED_DROID_MODE not set. Using 'dev'."
+ MIXED_DROID_MODE="dev"
+fi
+if [[ -z ${TARGET_PRODUCT+x} ]]; then
+ echo "TARGET_PRODUCT not set. Have you run lunch?"
+ exit 1
+fi
+
+if [ "$MIXED_DROID_MODE" == "dev" ]; then
+ MIXED_BUILD_FLAG="--bazel-mode-dev"
+elif [ "$MIXED_DROID_MODE" == "prod" ]; then
+ MIXED_BUILD_FLAG="--bazel-mode"
+elif [ "$MIXED_DROID_MODE" == "staging" ]; then
+ MIXED_BUILD_FLAG="--bazel-mode-staging"
+else
+ echo "MIXED_DROID_MODE value \"$MIXED_DROID_MODE\" invalid. Must be either 'dev', 'prod', or 'staging'"
+ exit 1
+fi
# Run a mixed build of "droid"
+# TODO(b/254572169): Remove DISABLE_ARTIFACT_PATH_REQUIREMENT before launching --bazel-mode.
build/soong/soong_ui.bash --make-mode \
--mk-metrics \
+ DISABLE_ARTIFACT_PATH_REQUIREMENTS=true \
+ ${MIXED_BUILD_FLAG} \
BP2BUILD_VERBOSE=1 \
- USE_BAZEL_ANALYSIS=1 \
BAZEL_STARTUP_ARGS="--max_idle_secs=5" \
BAZEL_BUILD_ARGS="--color=no --curses=no --show_progress_rate_limit=5" \
- TARGET_PRODUCT=aosp_arm64 \
- TARGET_BUILD_VARIANT=userdebug \
droid platform_tests \
dist DIST_DIR=$DIST_DIR
diff --git a/ci/mixed_e2e.sh b/ci/mixed_e2e.sh
new file mode 100755
index 00000000..2da26ed4
--- /dev/null
+++ b/ci/mixed_e2e.sh
@@ -0,0 +1,25 @@
+#!/bin/bash -eu
+
+set -o pipefail
+
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This test suite contains a number of end to end tests verifying Bazel's integration
+# with Soong in Android builds.
+
+TOP="$(readlink -f "$(dirname "$0")"/../../..)"
+"$TOP/build/bazel/ci/determinism_test.sh"
+"$TOP/build/bazel/ci/mixed_mode_toggle.sh"
diff --git a/ci/mixed_libc.sh b/ci/mixed_libc.sh
index e939a4ed..804eebdd 100755
--- a/ci/mixed_libc.sh
+++ b/ci/mixed_libc.sh
@@ -7,35 +7,51 @@ if [[ -z ${DIST_DIR+x} ]]; then
echo "DIST_DIR not set. Using out/dist. This should only be used for manual developer testing."
DIST_DIR="out/dist"
fi
+if [[ -z ${TARGET_PRODUCT+x} ]]; then
+ echo "TARGET_PRODUCT not set. Have you run lunch?"
+ exit 1
+fi
TARGETS=(
- libbacktrace
- libfdtrack
- libsimpleperf
- com.android.adbd
- com.android.runtime
- bluetoothtbd
+ CaptivePortalLogin
+ com.android.neuralnetworks
framework-minus-apex
+ libsimpleperf
+
+
+ # TODO(b/266459895): uncomment these after re-enabling libunwindstack
+ # com.android.media
+ # com.android.media.swcodec
+ # com.android.runtime
)
# Run a mixed build of "libc"
+# TODO(b/254572169): Remove DISABLE_ARTIFACT_PATH_REQUIREMENT before launching --bazel-mode.
build/soong/soong_ui.bash --make-mode \
--mk-metrics \
+ --bazel-mode-dev \
+ DISABLE_ARTIFACT_PATH_REQUIREMENTS=true \
BP2BUILD_VERBOSE=1 \
- USE_BAZEL_ANALYSIS=1 \
BAZEL_STARTUP_ARGS="--max_idle_secs=5" \
BAZEL_BUILD_ARGS="--color=no --curses=no --show_progress_rate_limit=5" \
- TARGET_PRODUCT=aosp_arm64 \
- TARGET_BUILD_VARIANT=userdebug \
"${TARGETS[@]}" \
dist DIST_DIR=$DIST_DIR
-# Verify there are artifacts under the out directory that originated from bazel.
-echo "Verifying OUT_DIR contains bazel-out..."
-if find out/ -type d -name bazel-out &>/dev/null; then
- echo "bazel-out found."
-else
- echo "bazel-out not found. This may indicate that mixed builds are silently not running."
+echo "Verifying libc.so..."
+LIBC_OUTPUT_FILE="$(find out/ -regex '.*/bazel-out/[^/]*android_arm64.*-opt.*/bin/bionic/libc/libc.so' || echo '')"
+LIBC_STUB_OUTPUT_FILE="$(find out/ -regex '.*/bazel-out/[^/]*android_arm64.*-opt.*/bin/bionic/libc/liblibc_stub_libs-current_so.so' || echo '')"
+
+if [ -z "$LIBC_OUTPUT_FILE" -a -z "$LIBC_STUB_OUTPUT_FILE" ]; then
+ echo "Could not find libc.so or its stub lib at expected path."
+ exit 1
+fi
+
+if [ -L "$LIBC_OUTPUT_FILE" ]; then
+ # It's problematic to have libc.so be a symlink, as it means that installed
+ # libc.so in an Android system image will be a symlink to a location outside
+ # of that system image.
+ echo "$LIBC_OUTPUT_FILE is expected as a file not a symlink"
exit 1
fi
+echo "libc.so verified."
diff --git a/ci/mixed_mode_toggle.sh b/ci/mixed_mode_toggle.sh
new file mode 100755
index 00000000..894cc4cb
--- /dev/null
+++ b/ci/mixed_mode_toggle.sh
@@ -0,0 +1,124 @@
+#!/bin/bash -eux
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Verifies mixed builds does not run if neither --bazel-mode-dev nor --bazel-mode
+# is set.
+# This verification script is designed to be used for continuous integration
+# tests, though may also be used for manual developer verification.
+
+if [[ -z ${OUT_DIR+x} ]]; then
+ OUT_DIR="out"
+fi
+
+if [[ -z ${DIST_DIR+x} ]]; then
+ echo "DIST_DIR not set. Using ${OUT_DIR}/dist. This should only be used for manual developer testing."
+ DIST_DIR="${OUT_DIR}/dist"
+fi
+
+# Generate the ninja file with default setting. We expect Bazel to be enabled by
+# default.
+build/soong/soong_ui.bash --make-mode \
+ --mk-metrics \
+ BAZEL_STARTUP_ARGS="--max_idle_secs=5" \
+ BAZEL_BUILD_ARGS="--color=no --curses=no --show_progress_rate_limit=5" \
+ TARGET_PRODUCT=aosp_arm64 \
+ TARGET_BUILD_VARIANT=userdebug \
+ com.android.tzdata \
+ dist DIST_DIR=$DIST_DIR
+
+
+# PLEASE NOTE - IF TZDATA IS EVER REMOVED FROM THE PROD ALLOWLIST, THIS _WILL_ FAIL
+# Should that happen, look into reverting to the assertions on bazel-out or switching
+
+if [[ ! $(ls out/bazel/output/execroot/__main__/bazel-out/aosp_arm64-userdebug-opt-ST-743b56eaae08/bin/system/timezone/apex/com.android.tzdata_staging_dir/etc/tz/tzdata) ]] ; then
+ echo "Expected tzdata files under bazel-out"
+ exit 1
+fi
+
+# Default setting should contain bazel-out, as *at least* tzdata is allowlisted for
+# default prod mode.
+if [[ $(grep -L "bazel-out" ${OUT_DIR}/soong/build.ninja) ]]; then
+ echo "Expected default build to reference bazel-out"
+ exit 1
+fi
+
+# Regenerate the ninja file with BUILD_BROKEN override. This should have mixed builds
+# disabled.
+build/soong/soong_ui.bash --make-mode \
+ --mk-metrics \
+ DISABLE_ARTIFACT_PATH_REQUIREMENTS=true \
+ BUILD_BROKEN_DISABLE_BAZEL=true \
+ BAZEL_STARTUP_ARGS="--max_idle_secs=5" \
+ BAZEL_BUILD_ARGS="--color=no --curses=no --show_progress_rate_limit=5" \
+ TARGET_PRODUCT=aosp_arm64 \
+ TARGET_BUILD_VARIANT=userdebug \
+ nothing \
+ dist DIST_DIR=$DIST_DIR
+
+# Note - we could m clean and assert that the bazel build doesn't exist, but this is
+# a better use of time
+if [[ ! $(grep -L "bazel-out" ${OUT_DIR}/soong/build.ninja) ]]; then
+ echo "Expected BUILD_BROKEN override to not reference bazel-out"
+ exit 1
+fi
+
+build/soong/soong_ui.bash --make-mode clean
+
+# Rerun default setting. This verifies that removing BUILD_BROKEN_DISABLE_BAZEL
+# causes analysis to be rerun.
+build/soong/soong_ui.bash --make-mode \
+ --mk-metrics \
+ DISABLE_ARTIFACT_PATH_REQUIREMENTS=true \
+ BAZEL_STARTUP_ARGS="--max_idle_secs=5" \
+ BAZEL_BUILD_ARGS="--color=no --curses=no --show_progress_rate_limit=5" \
+ TARGET_PRODUCT=aosp_arm64 \
+ TARGET_BUILD_VARIANT=userdebug \
+ com.android.tzdata \
+ dist DIST_DIR=$DIST_DIR
+
+if [[ ! $(ls out/bazel/output/execroot/__main__/bazel-out/aosp_arm64-userdebug-opt-ST-743b56eaae08/bin/system/timezone/apex/com.android.tzdata_staging_dir/etc/tz/tzdata) ]] ; then
+ echo "Expected tzdata files under bazel-out"
+ exit 1
+fi
+
+if [[ $(grep -L "bazel-out" ${OUT_DIR}/soong/build.ninja) ]]; then
+ echo "Expected default build rerun to reference bazel-out"
+ exit 1
+fi
+
+build/soong/soong_ui.bash --make-mode clean
+
+# Regen ninja file with mixed builds dev mode.
+build/soong/soong_ui.bash --make-mode \
+ --mk-metrics \
+ --bazel-mode-dev \
+ DISABLE_ARTIFACT_PATH_REQUIREMENTS=true \
+ BAZEL_STARTUP_ARGS="--max_idle_secs=5" \
+ BAZEL_BUILD_ARGS="--color=no --curses=no --show_progress_rate_limit=5" \
+ TARGET_PRODUCT=aosp_arm64 \
+ TARGET_BUILD_VARIANT=userdebug \
+ com.android.tzdata \
+ dist DIST_DIR=$DIST_DIR
+
+if [[ ! $(ls out/bazel/output/execroot/__main__/bazel-out/aosp_arm64-userdebug-opt-ST-743b56eaae08/bin/system/timezone/apex/com.android.tzdata_staging_dir/etc/tz/tzdata) ]] ; then
+ echo "Expected tzdata files under bazel-out"
+ exit 1
+fi
+
+if [[ $(grep -L "bazel-out" ${OUT_DIR}/soong/build.ninja) ]]; then
+ echo "Expected dev mode build to reference bazel-out"
+ exit 1
+fi
diff --git a/ci/multiproduct_analysis.sh b/ci/multiproduct_analysis.sh
new file mode 100755
index 00000000..cbeee556
--- /dev/null
+++ b/ci/multiproduct_analysis.sh
@@ -0,0 +1,95 @@
+#!/bin/bash -eux
+
+source "$(dirname $0)/target_lists.sh"
+cd "$(dirname $0)/../../.."
+OUT_DIR=$(realpath ${OUT_DIR:-out})
+DIST_DIR=$(realpath ${DIST_DIR:-out/dist})
+
+
+read -ra PRODUCTS <<<"$(build/soong/soong_ui.bash --dumpvar-mode all_named_products)"
+
+FAILED_PRODUCTS=()
+PRODUCTS_WITH_BP2BUILD_DIFFS=()
+
+function report {
+ # Turn off -x so that we can see the printfs more clearly
+ set +x
+ # check if FAILED_PRODUCTS is not empty
+ if (( ${#FAILED_PRODUCTS[@]} )); then
+ printf "Failed products:\n"
+ printf '%s\n' "${FAILED_PRODUCTS[@]}"
+
+ # TODO(b/262192655): Support riscv64 products in Bazel.
+ # TODO(b/261023967): Don't fail the build until every product is OK and we want to prevent backsliding.
+ # exit 1
+ fi
+ if (( ${#PRODUCTS_WITH_BP2BUILD_DIFFS[@]} )); then
+ printf "\n\nProducts that produced different bp2build files from aosp_arm64:\n"
+ printf '%s\n' "${PRODUCTS_WITH_BP2BUILD_DIFFS[@]}"
+
+ # TODO(b/261023967): Don't fail the build until every product is OK and we want to prevent backsliding.
+ # exit 1
+ fi
+}
+
+trap report EXIT
+
+rm -rf "${DIST_DIR}/multiproduct_analysis"
+mkdir -p "${DIST_DIR}/multiproduct_analysis"
+
+# Create zip of the bp2build files for aosp_arm64. We'll check that all other products produce
+# identical bp2build files.
+# We have to run tar and gzip as separate commands because tar with -z doesn't provide an option
+# to not include a timestamp in the gzip header. (--mtime is only for the tar parts, not gzip)
+export TARGET_PRODUCT="aosp_arm64"
+build/soong/soong_ui.bash --make-mode --skip-soong-tests bp2build
+tar c --mtime='1970-01-01' -C out/soong/bp2build . | gzip -n > "${DIST_DIR}/multiproduct_analysis/reference_bp2build_files_aosp_arm64.tar.gz"
+
+total=${#PRODUCTS[@]}
+count=1
+
+for product in "${PRODUCTS[@]}"; do
+ echo "Product ${count}/${total}: ${product}"
+
+ # Ensure that all processes later use the same TARGET_PRODUCT.
+ export TARGET_PRODUCT="${product}"
+
+ # Re-run product config and bp2build for every TARGET_PRODUCT.
+ build/soong/soong_ui.bash --make-mode --skip-soong-tests bp2build
+ # Remove the ninja_build output marker file to communicate to buildbot that this is not a regular Ninja build, and its
+ # output should not be parsed as such.
+ rm -f out/ninja_build
+
+ rm -f out/multiproduct_analysis_current_bp2build_files.tar.gz
+ tar c --mtime='1970-01-01' -C out/soong/bp2build . | gzip -n > "${DIST_DIR}/multiproduct_analysis/bp2build_files_${product}.tar.gz"
+ if diff -q "${DIST_DIR}/multiproduct_analysis/bp2build_files_${product}.tar.gz" "${DIST_DIR}/multiproduct_analysis/reference_bp2build_files_aosp_arm64.tar.gz"; then
+ rm -f "${DIST_DIR}/multiproduct_analysis/bp2build_files_${product}.tar.gz"
+ else
+ PRODUCTS_WITH_BP2BUILD_DIFFS+=("${product}")
+ fi
+
+ STARTUP_FLAGS=(
+ # Keep the Bazel server alive, package cache hot and reduce excessive I/O
+ # and wall time by ensuring that max_idle_secs is longer than bp2build which
+ # runs in every loop. bp2build takes ~20 seconds to run, so set this to a
+ # minute to account for resource contention, but still ensure that the bazel
+ # server doesn't stick around after.
+ --max_idle_secs=60
+ )
+
+ FLAGS=(
+ --config=bp2build
+ --config=ci
+ --nobuild
+ --keep_going
+ )
+
+ build/bazel/bin/bazel ${STARTUP_FLAGS[@]} build ${FLAGS[@]} --config=linux_x86_64 -- ${BUILD_TARGETS} || \
+ FAILED_PRODUCTS+=("${product} --config=linux_x86_64")
+
+ build/bazel/bin/bazel ${STARTUP_FLAGS[@]} build ${FLAGS[@]} --config=android -- ${BUILD_TARGETS} || \
+ FAILED_PRODUCTS+=("${product} --config=android")
+
+ count=$((count+1))
+done
+
diff --git a/ci/rbc_dashboard.py b/ci/rbc_dashboard.py
index 2e3ef1b9..515770c5 100755
--- a/ci/rbc_dashboard.py
+++ b/ci/rbc_dashboard.py
@@ -6,6 +6,7 @@ import argparse
import asyncio
import dataclasses
import datetime
+import itertools
import os
import re
import shutil
@@ -35,16 +36,15 @@ class Product:
@dataclasses.dataclass(frozen=True)
class ProductResult:
+ product: Product
baseline_success: bool
product_success: bool
- board_success: bool
product_has_diffs: bool
- board_has_diffs: bool
def success(self) -> bool:
return not self.baseline_success or (
- self.product_success and self.board_success
- and not self.product_has_diffs and not self.board_has_diffs)
+ self.product_success
+ and not self.product_has_diffs)
@dataclasses.dataclass(frozen=True)
@@ -52,7 +52,6 @@ class Directories:
out: str
out_baseline: str
out_product: str
- out_board: str
results: str
@@ -126,7 +125,7 @@ async def run_build(flags: List[str], out_dir: str) -> bool:
], out_dir)
-async def run_config(product: Product, rbc_product: bool, rbc_board: bool, out_dir: str) -> bool:
+async def run_config(product: Product, rbc_product: bool, out_dir: str) -> bool:
"""Runs config.mk and saves results to out/rbc_variable_dump.txt."""
env = {
'OUT_DIR': 'out',
@@ -136,7 +135,6 @@ async def run_config(product: Product, rbc_product: bool, rbc_board: bool, out_d
'TARGET_PRODUCT': product.product,
'TARGET_BUILD_VARIANT': product.variant,
'RBC_PRODUCT_CONFIG': 'true' if rbc_product else '',
- 'RBC_BOARD_CONFIG': 'true' if rbc_board else '',
'RBC_DUMP_CONFIG_FILE': 'out/rbc_variable_dump.txt',
}
return await run_jailed_command([
@@ -166,7 +164,7 @@ async def has_diffs(success: bool, file_pairs: List[Tuple[str]], results_folder:
return False
-def generate_html_row(num: int, product: Product, results: ProductResult):
+def generate_html_row(num: int, results: ProductResult):
def generate_status_cell(success: bool, diffs: bool) -> str:
message = 'Success'
if diffs:
@@ -175,13 +173,13 @@ def generate_html_row(num: int, product: Product, results: ProductResult):
message = 'Build failed'
return f'<td style="background-color: {"lightgreen" if success and not diffs else "salmon"}">{message}</td>'
+ product = results.product
return f'''
<tr>
<td>{num}</td>
<td>{product if results.success() and results.baseline_success else f'<a href="{product}/">{product}</a>'}</td>
{generate_status_cell(results.baseline_success, False)}
{generate_status_cell(results.product_success, results.product_has_diffs)}
- {generate_status_cell(results.board_success, results.board_has_diffs)}
</tr>
'''
@@ -192,7 +190,7 @@ def get_branch() -> str:
default_tag = tree.getroot().find('default')
return default_tag.get('remote') + '/' + default_tag.get('revision')
except Exception as e: # pylint: disable=broad-except
- print(str(e), file=sys.stderr)
+ # Most likely happens due to .repo not existing on CI
return 'Unknown'
@@ -207,9 +205,22 @@ def cleanup_empty_files(path):
os.rmdir(path)
+def dump_files_to_stderr(path):
+ if os.path.isfile(path):
+ with open(path, 'r') as f:
+ print(f'{path}:', file=sys.stderr)
+ for line in itertools.islice(f, 200):
+ print(line.rstrip('\r\n'), file=sys.stderr)
+ if next(f, None) != None:
+ print('... Remaining lines skipped ...', file=sys.stderr)
+ elif os.path.isdir(path):
+ for subfile in os.listdir(path):
+ dump_files_to_stderr(os.path.join(path, subfile))
+
+
async def test_one_product(product: Product, dirs: Directories) -> ProductResult:
"""Runs the builds and tests for differences for a single product."""
- baseline_success, product_success, board_success = await asyncio.gather(
+ baseline_success, product_success = await asyncio.gather(
run_build([
f'TARGET_PRODUCT={product.product}',
f'TARGET_BUILD_VARIANT={product.variant}',
@@ -219,18 +230,12 @@ async def test_one_product(product: Product, dirs: Directories) -> ProductResult
f'TARGET_BUILD_VARIANT={product.variant}',
'RBC_PRODUCT_CONFIG=1',
], dirs.out_product),
- run_build([
- f'TARGET_PRODUCT={product.product}',
- f'TARGET_BUILD_VARIANT={product.variant}',
- 'RBC_BOARD_CONFIG=1',
- ], dirs.out_board),
)
product_dashboard_folder = os.path.join(dirs.results, str(product))
os.mkdir(product_dashboard_folder)
os.mkdir(product_dashboard_folder+'/baseline')
os.mkdir(product_dashboard_folder+'/product')
- os.mkdir(product_dashboard_folder+'/board')
if not baseline_success:
shutil.copy2(os.path.join(dirs.out_baseline, 'build.log'),
@@ -238,20 +243,21 @@ async def test_one_product(product: Product, dirs: Directories) -> ProductResult
if not product_success:
shutil.copy2(os.path.join(dirs.out_product, 'build.log'),
f'{product_dashboard_folder}/product/build.log')
- if not board_success:
- shutil.copy2(os.path.join(dirs.out_board, 'build.log'),
- f'{product_dashboard_folder}/board/build.log')
+ add_message = False
+ with open(f'{product_dashboard_folder}/product/build.log', 'r') as f:
+ if '/out/rbc/' in f.read():
+ add_message = True
+ if add_message:
+ with open(f'{product_dashboard_folder}/product/build.log', 'a') as f:
+ f.write(f'\nPaths involving out/rbc are actually under {dirs.out_product}\n')
files = [f'build-{product.product}.ninja', f'build-{product.product}-package.ninja', 'soong/build.ninja']
product_files = [(os.path.join(dirs.out_baseline, x), os.path.join(dirs.out_product, x)) for x in files]
- board_files = [(os.path.join(dirs.out_baseline, x), os.path.join(dirs.out_board, x)) for x in files]
- product_has_diffs, board_has_diffs = await asyncio.gather(
- has_diffs(baseline_success and product_success, product_files, product_dashboard_folder+'/product'),
- has_diffs(baseline_success and board_success, board_files, product_dashboard_folder+'/board'))
+ product_has_diffs = await has_diffs(baseline_success and product_success, product_files, product_dashboard_folder+'/product')
# delete files that contain the product name in them to save space,
# otherwise the ninja files end up filling up the whole harddrive
- for out_folder in [dirs.out_baseline, dirs.out_product, dirs.out_board]:
+ for out_folder in [dirs.out_baseline, dirs.out_product]:
for subfolder in ['', 'soong']:
folder = os.path.join(out_folder, subfolder)
for file in os.listdir(folder):
@@ -260,34 +266,26 @@ async def test_one_product(product: Product, dirs: Directories) -> ProductResult
cleanup_empty_files(product_dashboard_folder)
- return ProductResult(baseline_success, product_success, board_success, product_has_diffs, board_has_diffs)
+ return ProductResult(product, baseline_success, product_success, product_has_diffs)
async def test_one_product_quick(product: Product, dirs: Directories) -> ProductResult:
"""Runs the builds and tests for differences for a single product."""
- baseline_success, product_success, board_success = await asyncio.gather(
+ baseline_success, product_success = await asyncio.gather(
run_config(
product,
False,
- False,
dirs.out_baseline),
run_config(
product,
True,
- False,
dirs.out_product),
- run_config(
- product,
- False,
- True,
- dirs.out_board),
)
product_dashboard_folder = os.path.join(dirs.results, str(product))
os.mkdir(product_dashboard_folder)
os.mkdir(product_dashboard_folder+'/baseline')
os.mkdir(product_dashboard_folder+'/product')
- os.mkdir(product_dashboard_folder+'/board')
if not baseline_success:
shutil.copy2(os.path.join(dirs.out_baseline, 'build.log'),
@@ -295,20 +293,21 @@ async def test_one_product_quick(product: Product, dirs: Directories) -> Product
if not product_success:
shutil.copy2(os.path.join(dirs.out_product, 'build.log'),
f'{product_dashboard_folder}/product/build.log')
- if not board_success:
- shutil.copy2(os.path.join(dirs.out_board, 'build.log'),
- f'{product_dashboard_folder}/board/build.log')
+ add_message = False
+ with open(f'{product_dashboard_folder}/product/build.log', 'r') as f:
+ if '/out/rbc/' in f.read():
+ add_message = True
+ if add_message:
+ with open(f'{product_dashboard_folder}/product/build.log', 'a') as f:
+ f.write(f'\nPaths involving out/rbc are actually under {dirs.out_product}\n')
files = ['rbc_variable_dump.txt']
product_files = [(os.path.join(dirs.out_baseline, x), os.path.join(dirs.out_product, x)) for x in files]
- board_files = [(os.path.join(dirs.out_baseline, x), os.path.join(dirs.out_board, x)) for x in files]
- product_has_diffs, board_has_diffs = await asyncio.gather(
- has_diffs(baseline_success and product_success, product_files, product_dashboard_folder+'/product'),
- has_diffs(baseline_success and board_success, board_files, product_dashboard_folder+'/board'))
+ product_has_diffs = await has_diffs(baseline_success and product_success, product_files, product_dashboard_folder+'/product')
cleanup_empty_files(product_dashboard_folder)
- return ProductResult(baseline_success, product_success, board_success, product_has_diffs, board_has_diffs)
+ return ProductResult(product, baseline_success, product_success, product_has_diffs)
async def main():
@@ -328,6 +327,8 @@ async def main():
parser.add_argument('--results-directory',
help='Directory to store results in. Defaults to $(OUT_DIR)/rbc_dashboard. '
+ 'Warning: will be cleared!')
+ parser.add_argument('--failure-message',
+ help='Additional message to append to stderr on failure.')
args = parser.parse_args()
if args.results_directory:
@@ -361,10 +362,9 @@ async def main():
out=out_dir,
out_baseline=os.path.join(out_dir, 'rbc_out_baseline'),
out_product=os.path.join(out_dir, 'rbc_out_product'),
- out_board=os.path.join(out_dir, 'rbc_out_board'),
results=args.results_directory if args.results_directory else os.path.join(out_dir, 'rbc_dashboard'))
- for folder in [dirs.out_baseline, dirs.out_product, dirs.out_board, dirs.results]:
+ for folder in [dirs.out_baseline, dirs.out_product, dirs.results]:
# delete and recreate the out directories. You can't reuse them for
# a particular product, because after we delete some product-specific
# files inside the out dir to save space, the build will fail if you
@@ -377,14 +377,16 @@ async def main():
# that in each folder.
if args.quick:
commands = []
- for folder in [dirs.out_baseline, dirs.out_product, dirs.out_board]:
+ folders = [dirs.out_baseline, dirs.out_product]
+ for folder in folders:
commands.append(run_jailed_command([
'build/soong/soong_ui.bash',
'--dumpvar-mode',
'TARGET_PRODUCT'
], folder))
- for success in await asyncio.gather(*commands):
+ for i, success in enumerate(await asyncio.gather(*commands)):
if not success:
+ dump_files_to_stderr(os.path.join(folders[i], 'build.log'))
sys.exit('Failed to setup output directories')
with open(os.path.join(dirs.results, 'index.html'), 'w') as f:
@@ -398,7 +400,6 @@ async def main():
<th>product</th>
<th>baseline</th>
<th>RBC product config</th>
- <th>RBC board config</th>
</tr>\n''')
f.flush()
@@ -430,26 +431,23 @@ async def main():
else:
print('Failure')
- f.write(generate_html_row(i+1, product, result))
+ f.write(generate_html_row(i+1, result))
f.flush()
baseline_successes = len([x for x in all_results if x.baseline_success])
product_successes = len([x for x in all_results if x.product_success and not x.product_has_diffs])
- board_successes = len([x for x in all_results if x.board_success and not x.board_has_diffs])
f.write(f'''
<tr>
<td></td>
<td># Successful</td>
<td>{baseline_successes}</td>
<td>{product_successes}</td>
- <td>{board_successes}</td>
</tr>
<tr>
<td></td>
<td># Failed</td>
<td>N/A</td>
<td>{baseline_successes - product_successes}</td>
- <td>{baseline_successes - board_successes}</td>
</tr>
</table>
Finished running successfully.
@@ -459,8 +457,11 @@ async def main():
print('file://'+os.path.abspath(os.path.join(dirs.results, 'index.html')))
for result in all_results:
- if result.baseline_success and not result.success():
- print('There were one or more failing products. See the html report for details.')
+ if not result.success():
+ print('There were one or more failing products. First failure:', file=sys.stderr)
+ dump_files_to_stderr(os.path.join(dirs.results, str(result.product)))
+ if args.failure_message:
+ print(args.failure_message, file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
diff --git a/ci/rbc_regression_test.sh b/ci/rbc_regression_test.sh
index b1f4e575..16a14fb1 100755
--- a/ci/rbc_regression_test.sh
+++ b/ci/rbc_regression_test.sh
@@ -13,9 +13,7 @@ function die() {
function usage() {
cat <<EOF >&2
-Usage: $myname [-p] [-b] [-q] [-r] <product-variant> [product-variant ...]
- -p: Test RBC product configuration. This is implied if -b is not supplied
- -b: Test RBC board configuration. This is implied if -p is not supplied
+Usage: $myname [-q] [-r] <product-variant> [product-variant ...]
-q: Quiet. Suppress all output other than a failure message
-r: Retain Ninja files
EOF
@@ -38,21 +36,17 @@ mypath=$(realpath "$0")
declare -r mydir=${mypath%/*/*/*/*}
declare -r myname=${mypath#${mydir}/}
-flags_rbc=()
+flags_rbc=(RBC_PRODUCT_CONFIG=true)
quiet=
-while getopts "bkpqr" o; do
+while getopts "qr" o; do
case "${o}" in
- k) ;; # backward compatibility to be removed later
q) quiet=true ;;
- b) flags_rbc+=(RBC_BOARD_CONFIG=true) ;;
- p) flags_rbc+=(RBC_PRODUCT_CONFIG=true) ;;
r) retain_files=t ;;
*) usage ;;
esac
done
shift $((OPTIND-1))
[[ $# -gt 0 ]] || usage
-((${#flags_rbc[@]})) || flags_rbc+=(RBC_PRODUCT_CONFIG=true RBC_BOARD_CONFIG=true)
cd $mydir
rc=0
diff --git a/ci/target_lists.sh b/ci/target_lists.sh
new file mode 100644
index 00000000..ecfbe6e6
--- /dev/null
+++ b/ci/target_lists.sh
@@ -0,0 +1,91 @@
+#!/usr/bin/env bash
+
+###############
+# Build and test targets for device target platform.
+###############
+BUILD_TARGETS=(
+ //art/...
+ //bionic/...
+ //bootable/recovery/tools/recovery_l10n/...
+ //build/...
+ //cts/...
+ //development/...
+ //external/...
+ //frameworks/...
+ //libnativehelper/...
+ //packages/...
+ //prebuilts/clang/host/linux-x86:all
+ //prebuilts/build-tools/tests/...
+ //prebuilts/runtime/...
+ //prebuilts/tools/...
+ //platform_testing/...
+ //system/...
+ //tools/apksig/...
+ //tools/asuite/...
+ //tools/platform-compat/...
+
+ # These tools only build for host currently
+ -//external/e2fsprogs/misc:all
+ -//external/e2fsprogs/resize:all
+ -//external/e2fsprogs/debugfs:all
+ -//external/e2fsprogs/e2fsck:all
+ # TODO(b/277616982): These modules depend on private java APIs, but maybe they don't need to.
+ -//external/ow2-asm:all
+
+ # TODO(b/266459895): remove these after re-enabling libunwindstack
+ -//bionic/libc/malloc_debug:libc_malloc_debug
+ -//bionic/libfdtrack:libfdtrack
+ -//frameworks/av/media/codec2/hidl/1.0/utils:libcodec2_hidl@1.0
+ -//frameworks/av/media/codec2/hidl/1.1/utils:libcodec2_hidl@1.1
+ -//frameworks/av/media/codec2/hidl/1.2/utils:libcodec2_hidl@1.2
+ -//frameworks/av/media/module/bqhelper:libstagefright_bufferqueue_helper_novndk
+ -//frameworks/av/media/module/codecserviceregistrant:libmedia_codecserviceregistrant
+ -//frameworks/av/services/mediacodec:mediaswcodec
+ -//frameworks/native/libs/gui:libgui
+ -//frameworks/native/libs/gui:libgui_bufferqueue_static
+ -//frameworks/native/opengl/libs:libEGL
+ -//frameworks/native/opengl/libs:libGLESv2
+ -//system/core/libutils:all
+ -//system/unwinding/libunwindstack:all
+)
+
+TEST_TARGETS=(
+ //build/bazel/...
+ //prebuilts/clang/host/linux-x86:all
+ //prebuilts/sdk:toolchains_have_all_prebuilts
+)
+
+HOST_ONLY_TEST_TARGETS=(
+ //tools/trebuchet:AnalyzerKt
+ //tools/metalava:metalava
+ # Test both unstripped and stripped versions of a host native unit test
+ //system/core/libcutils:libcutils_test
+ //system/core/libcutils:libcutils_test__test_binary_unstripped
+ # TODO(b/268186228): adb_test fails only on CI
+ -//packages/modules/adb:adb_test
+ # TODO(b/268185249): libbase_test asserts on the Soong basename of the test
+ -//system/libbase:libbase_test
+)
+
+HOST_INCOMPATIBLE_TARGETS=(
+ # TODO(b/216626461): add support for host_ldlibs
+ -//packages/modules/adb:all
+ -//packages/modules/adb/pairing_connection:all
+)
+
+# These targets are used to ensure that the aosp-specific rule wrappers forward
+# all providers of the underlying rule.
+EXAMPLE_WRAPPER_TARGETS=(
+ # java_import wrapper
+ //build/bazel/examples/java/com/bazel:hello_java_import
+ # java_library wrapper
+ //build/bazel/examples/java/com/bazel:hello_java_lib
+ # kt_jvm_library wrapper
+ //build/bazel/examples/java/com/bazel:some_kotlin_lib
+ # android_library wrapper
+ //build/bazel/examples/android_app/java/com/app:applib
+ # android_binary wrapper
+ //build/bazel/examples/android_app/java/com/app:app
+ # aar_import wrapper
+ //build/bazel/examples/android_app/java/com/app:import
+)
diff --git a/common.bazelrc b/common.bazelrc
index 368efa21..cef887af 100644
--- a/common.bazelrc
+++ b/common.bazelrc
@@ -1,8 +1,3 @@
-# Platforms and toolchains for AOSP.
-#
-# Set default target platform for builds to rely on product config's arch and os variables
-build --platforms //build/bazel/platforms:android_target
-
# Use the target platform (android_x86, android_arm) in the bazel-out/ output
# directory name fragment instead of the CPU (darwin, k8). This avoids
# thrashing the output directory when switching between top level target
@@ -12,9 +7,16 @@ build --experimental_platform_in_output_dir
# Use toolchain resolution to find the cc toolchain.
build --incompatible_enable_cc_toolchain_resolution
-# Ensure that the host_javabase always use @local_jdk, the checked-in JDK.
-build --tool_java_runtime_version=local_jdk
-build --java_runtime_version=local_jdk
+# Ensure that the host_javabase always use the checked-in JDK.
+build --tool_java_runtime_version=jdk17
+build --java_runtime_version=jdk17
+
+# Disable errorprone to continue making progress on java and android conversion
+# while we determine how errorprone will be handled in Bazel-only builds b/227504307.
+# TODO(b/227504307): Remove --javacopt="-XepDisableAllChecks" after implementing
+# the correct solution around errorprone.
+build --javacopt="-XepDisableAllChecks"
+build --host_javacopt="-XepDisableAllChecks"
# Lock down the PATH variable in actions to /usr/bin and /usr/local/bin.
build --experimental_strict_action_env
@@ -23,7 +25,7 @@ build --experimental_strict_action_env
build --experimental_allow_unresolved_symlinks
# Enable usage of experimental cc-related build APIs
-build --experimental_cc_shared_library
+common --experimental_cc_shared_library
build --experimental_starlark_cc_import
# Do not tokenize copts, other than strings that consist of a single Make
@@ -44,13 +46,16 @@ build --strategy=CppCompile=standalone
# Enable use of the implementation_deps attribute in native cc rules
build --experimental_cc_implementation_deps
+# Use the llvm_coverage_map_format feature to turn on native code coverage.
+build --experimental_use_llvm_covmap
+
+# Always emit the stdout of failing tests as they should be emphasized. (b/247516541)
+test --test_output=errors
+
# Enable building targets in //external:__subpackages__.
common --experimental_sibling_repository_layout
common --experimental_disable_external_package
-# Enable toplevel_output_directories and Ninja executor in Bazel
-common --experimental_ninja_actions
-
# Increase refresh rate of command line UI for improved perceived responsiveness.
common --show_progress_rate_limit=0.05
@@ -58,52 +63,53 @@ common --show_progress_rate_limit=0.05
common --color=yes
common --curses=yes
-# Show the full set of flags for observability and debuggability.
-common --announce_rc
-
-# Run bazel query from the workspace, without cd'ing into out/soong/queryview
-# Note that this hardcodes the output dir. It will not work if $OUT_DIR != out.
-common:queryview --package_path=%workspace%/out/soong/queryview
-
-# Run bazel query from the workspace, without cd'ing into out/soong/workspace
-# Note that this hardcodes the output dir. It will not work if $OUT_DIR != out.
-common:bp2build --package_path=%workspace%/out/soong/workspace
-
# Configurations specific to CI builds, generally to improve signal-to-noise ratio in server logs.
common:ci --color=no
common:ci --curses=no
common:ci --show_progress_rate_limit=5
common:ci --noshow_loading_progress
+# Show the full set of flags for observability and debuggability.
+common:ci --announce_rc
+build:ci --verbose_failures
+# Log apexer verbosely on CI (it has a noisy but useful output)
+build:ci --//build/bazel/rules/apex:apexer_verbose
test:ci --keep_going
-test:ci --test_output=errors
# Support a local user-specific bazelrc file.
try-import %workspace%/user.bazelrc
+# Get rid no-op "affected by Starlark transition" config when label-based flag
+# (i.e. //command_line_option:fdo_profile) is set back to None
+build --experimental_output_directory_naming_scheme=diff_against_baseline
+common --experimental_google_legacy_api
build --android_sdk=//prebuilts/sdk:android_sdk
-build --experimental_enable_android_migration_apis
-build --experimental_google_legacy_api
-build --incompatible_java_common_parameters
+build --incompatible_enable_android_toolchain_resolution
+common --experimental_enable_android_migration_apis
+common --incompatible_java_common_parameters
build --android_databinding_use_v3_4_args
build --experimental_android_databinding_v2
build --define=android_incremental_dexing_tool=d8_dexbuilder
build --define=android_dexmerger_tool=d8_dexmerger
build --nouse_workers_with_dexbuilder
-build --fat_apk_cpu=k8
# TODO(b/199038020): Use a python_toolchain when we have Starlark rules_python.
# This also means all python scripts are using py3 runtime.
build --python_top=//prebuilts/build-tools:python3
build --noincompatible_use_python_toolchains
-# Developer instance for result storage. This only works if you have access
-# to the Bazel GCP project. Follow the GCP gcloud client's auth instructions to
-# use --google_default_credentials.
-build:results --remote_instance_name=projects/bazel-untrusted/instances/default_instance
-build:results --project_id=bazel-untrusted
-build:results --remote_timeout=600
-build:results --google_default_credentials
-build:results --test_summary=detailed
-build:results --bes_backend=buildeventservice.googleapis.com
-build:results --bes_results_url=https://source.cloud.google.com/results/invocations
-build:results --show_progress_rate_limit=5
+# Enable optimization compilation mode to build cc with fdo
+# https://github.com/bazelbuild/bazel/blob/de7b26acfdfcd9c36ec957305a889ac29f0da30e/src/main/java/com/google/devtools/build/lib/rules/cpp/FdoHelper.java#L55
+build --compilation_mode=opt
+
+common --incompatible_remove_rule_name_parameter=false
+
+# TODO(b/264304464): Remove when bugs with java_binary starlark impl are fixed.
+
+common --experimental_builtins_injection_override=-java_binary
+
+# Set writable bit for all bazel outputs. This makes it easy to remove
+# the output directory and its contents after a build (using `rm`).
+build --experimental_writable_outputs
+
+# Make the Linux sandbox work under /tmp
+build --incompatible_sandbox_hermetic_tmp
diff --git a/compliance/Android.bp b/compliance/Android.bp
new file mode 100644
index 00000000..0e709429
--- /dev/null
+++ b/compliance/Android.bp
@@ -0,0 +1,24 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+blueprint_go_binary {
+ name: "bazel_notice_gen",
+ srcs: ["cmd/bazel_notice_gen/bazel_notice_gen.go"],
+ testSrcs: ["cmd/bazel_notice_gen/bazel_notice_gen_test.go"],
+}
diff --git a/compliance/cmd/BUILD.bazel b/compliance/cmd/BUILD.bazel
new file mode 100644
index 00000000..6ca94f1d
--- /dev/null
+++ b/compliance/cmd/BUILD.bazel
@@ -0,0 +1,22 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+package(default_visibility = ["//visibility:public"])
+
+# TODO(b/194644518): Switch to the source version when Bazel can build go
+# binaries.
+alias(
+ name = "bazel_notice_gen",
+ actual = "//prebuilts/build-tools:linux-x86/bin/bazel_notice_gen",
+)
diff --git a/compliance/cmd/bazel_notice_gen/bazel_notice_gen.go b/compliance/cmd/bazel_notice_gen/bazel_notice_gen.go
new file mode 100644
index 00000000..75a206cc
--- /dev/null
+++ b/compliance/cmd/bazel_notice_gen/bazel_notice_gen.go
@@ -0,0 +1,174 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "compress/gzip"
+ "crypto/md5"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "html/template"
+ "io"
+ "os"
+ "strings"
+)
+
+var (
+ inputFile string
+ outputFile = flag.String("o", "", "output file")
+ listTargets = flag.Bool("list_targets", false, "list targets using each license")
+)
+
+type LicenseKind struct {
+ Target string `json:"target"`
+ Name string `json:"name"`
+ Conditions []string `json:"conditions"`
+}
+
+type License struct {
+ Rule string `json:"rule"`
+ CopyrightNotice string `json:"copyright_notice"`
+ PackageName string `json:"package_name"`
+ PackageUrl string `json:"package_url"`
+ PackageVersion string `json:"package_version"`
+ LicenseFile string `json:"license_text"`
+ LicenseKinds []LicenseKind `json:"license_kinds"`
+ Licensees []string `json:"licensees"`
+}
+
+type LicenseTextHash string
+
+// generator generates the notices for the given set of licenses read from the JSON-encoded string.
+// As the contents of the license files is often the same, they are read into the map by their hash.
+type generator struct {
+ Licenses []License
+ LicenseTextHash map[string]LicenseTextHash // License.rule->hash of license text contents
+ LicenseTextIndex map[LicenseTextHash]string
+}
+
+func newGenerator(in string) *generator {
+ g := generator{}
+ decoder := json.NewDecoder(strings.NewReader(in))
+ decoder.DisallowUnknownFields() //useful to detect typos, e.g. in unit tests
+ err := decoder.Decode(&g.Licenses)
+ maybeQuit(err)
+ return &g
+}
+
+func (g *generator) buildLicenseTextIndex() {
+ g.LicenseTextHash = make(map[string]LicenseTextHash, len(g.Licenses))
+ g.LicenseTextIndex = make(map[LicenseTextHash]string)
+ for _, l := range g.Licenses {
+ if l.LicenseFile == "" {
+ continue
+ }
+ data, err := os.ReadFile(l.LicenseFile)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "%s: bad license file %s: %s\n", l.Rule, l.LicenseFile, err)
+ os.Exit(1)
+ }
+ h := LicenseTextHash(fmt.Sprintf("%x", md5.Sum(data)))
+ g.LicenseTextHash[l.Rule] = h
+ if _, found := g.LicenseTextIndex[h]; !found {
+ g.LicenseTextIndex[h] = string(data)
+ }
+ }
+}
+
+func (g *generator) generate(sink io.Writer, listTargets bool) {
+ const tpl = `<!DOCTYPE html>
+<html>
+ <head>
+ <style type="text/css">
+ body { padding: 2px; margin: 0; }
+ .license { background-color: seashell; margin: 1em;}
+ pre { padding: 1em; }</style></head>
+ <body>
+ The following software has been included in this product and contains the license and notice as shown below.<p>
+ {{- $x := . }}
+ {{- range .Licenses }}
+ {{ if .PackageName }}<strong>{{.PackageName}}</strong>{{- else }}Rule: {{.Rule}}{{ end }}
+ {{- if .CopyrightNotice }}<br>Copyright Notice: {{.CopyrightNotice}}{{ end }}
+ {{- $v := index $x.LicenseTextHash .Rule }}{{- if $v }}<br><a href=#{{$v}}>License</a>{{- end }}<br>
+ {{- if list_targets }}
+ Used by: {{- range .Licensees }} {{.}} {{- end }}<hr>
+ {{- end }}
+ {{- end }}
+ {{ range $k, $v := .LicenseTextIndex }}<div id="{{$k}}" class="license"><pre>{{$v}}
+ </pre></div> {{- end }}
+ </body>
+</html>
+`
+ funcMap := template.FuncMap{
+ "list_targets": func() bool { return listTargets },
+ }
+ t, err := template.New("NoticesPage").Funcs(funcMap).Parse(tpl)
+ maybeQuit(err)
+ if g.LicenseTextHash == nil {
+ g.buildLicenseTextIndex()
+ }
+ maybeQuit(t.Execute(sink, g))
+}
+
+func maybeQuit(err error) {
+ if err == nil {
+ return
+ }
+
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+}
+
+func processArgs() {
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, `usage: bazelhtmlnotice -o <output> <input>`)
+ flag.PrintDefaults()
+ os.Exit(2)
+ }
+ flag.Parse()
+ if len(flag.Args()) != 1 {
+ flag.Usage()
+ }
+ inputFile = flag.Arg(0)
+}
+
+func setupWriting() (io.Writer, io.Closer, *os.File) {
+ if *outputFile == "" {
+ return os.Stdout, nil, nil
+ }
+ ofile, err := os.Create(*outputFile)
+ maybeQuit(err)
+ if !strings.HasSuffix(*outputFile, ".gz") {
+ return ofile, nil, ofile
+ }
+ gz, err := gzip.NewWriterLevel(ofile, gzip.BestCompression)
+ maybeQuit(err)
+ return gz, gz, ofile
+}
+
+func main() {
+ processArgs()
+ data, err := os.ReadFile(inputFile)
+ maybeQuit(err)
+ sink, closer, ofile := setupWriting()
+ newGenerator(string(data)).generate(sink, *listTargets)
+ if closer != nil {
+ maybeQuit(closer.Close())
+ }
+ if ofile != nil {
+ maybeQuit(ofile.Close())
+ }
+}
diff --git a/compliance/cmd/bazel_notice_gen/bazel_notice_gen_test.go b/compliance/cmd/bazel_notice_gen/bazel_notice_gen_test.go
new file mode 100644
index 00000000..31953871
--- /dev/null
+++ b/compliance/cmd/bazel_notice_gen/bazel_notice_gen_test.go
@@ -0,0 +1,128 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "testing"
+)
+
+func Test_doit(t *testing.T) {
+ input := `
+[
+ {
+ "rule": "@//build/soong/licenses:Android-Apache-2.0",
+ "license_kinds": [
+ {
+ "target": "@//build/soong/licenses:SPDX-license-identifier-Apache-2.0",
+ "name": "SPDX-license-identifier-Apache-2.0",
+ "conditions": ["notice"]
+ }
+ ],
+ "copyright_notice": "Copyright (C) The Android Open Source Project",
+ "package_name": "Discombobulator",
+ "package_url": null,
+ "package_version": null,
+ "license_text": "../../testdata/NOTICE_LICENSE",
+ "licensees": [
+ "@//bionic/libc:libc_bionic_ndk",
+ "@//system/logging/liblog:liblog"
+ ]
+ },
+ {
+ "rule": "@//external/scudo:external_scudo_license",
+ "license_kinds": [
+ {
+ "target": "@//build/soong/licenses:SPDX-license-identifier-Apache-2.0",
+ "name": "SPDX-license-identifier-Apache-2.0",
+ "conditions": ["notice"]
+ }
+ ],
+ "copyright_notice": "",
+ "package_name": "Scudo Standalone",
+ "package_url": null,
+ "package_version": null,
+ "licensees": [
+ "@//external/scudo:foo"
+ ]
+ }
+]
+`
+ tests := []struct {
+ name string
+ in string
+ listTargets bool
+ want string
+ }{
+ {
+ name: "ListTargets",
+ in: input,
+ listTargets: true,
+ want: `<!DOCTYPE html>
+<html>
+ <head>
+ <style type="text/css">
+ body { padding: 2px; margin: 0; }
+ .license { background-color: seashell; margin: 1em;}
+ pre { padding: 1em; }</style></head>
+ <body>
+ The following software has been included in this product and contains the license and notice as shown below.<p>
+ <strong>Discombobulator</strong><br>Copyright Notice: Copyright (C) The Android Open Source Project<br><a href=#b9835e4a000fb18a4c8970690daa3b95>License</a><br>
+ Used by: @//bionic/libc:libc_bionic_ndk @//system/logging/liblog:liblog<hr>
+ <strong>Scudo Standalone</strong><br>
+ Used by: @//external/scudo:foo<hr>
+ <div id="b9835e4a000fb18a4c8970690daa3b95" class="license"><pre>neque porro quisquam est qui do-
+lorem ipsum
+
+ </pre></div>
+ </body>
+</html>
+`,
+ },
+ {
+ name: "NoTargets",
+ in: input,
+ listTargets: false,
+ want: `<!DOCTYPE html>
+<html>
+ <head>
+ <style type="text/css">
+ body { padding: 2px; margin: 0; }
+ .license { background-color: seashell; margin: 1em;}
+ pre { padding: 1em; }</style></head>
+ <body>
+ The following software has been included in this product and contains the license and notice as shown below.<p>
+ <strong>Discombobulator</strong><br>Copyright Notice: Copyright (C) The Android Open Source Project<br><a href=#b9835e4a000fb18a4c8970690daa3b95>License</a><br>
+ <strong>Scudo Standalone</strong><br>
+ <div id="b9835e4a000fb18a4c8970690daa3b95" class="license"><pre>neque porro quisquam est qui do-
+lorem ipsum
+
+ </pre></div>
+ </body>
+</html>
+`,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ buf := bytes.Buffer{}
+ newGenerator(tt.in).generate(&buf, tt.listTargets)
+ got := buf.String()
+ if got != tt.want {
+ t.Errorf("doit() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/compliance/go.mod b/compliance/go.mod
new file mode 100644
index 00000000..336254d2
--- /dev/null
+++ b/compliance/go.mod
@@ -0,0 +1,5 @@
+module android/bazel/compliance
+
+
+
+go 1.19
diff --git a/compliance/testdata/NOTICE_LICENSE b/compliance/testdata/NOTICE_LICENSE
new file mode 100644
index 00000000..da74c170
--- /dev/null
+++ b/compliance/testdata/NOTICE_LICENSE
@@ -0,0 +1,2 @@
+neque porro quisquam est qui do-
+lorem ipsum
diff --git a/darwin.bazelrc b/darwin.bazelrc
index 0f86da10..f2216962 100644
--- a/darwin.bazelrc
+++ b/darwin.bazelrc
@@ -1,3 +1 @@
import %workspace%/build/bazel/common.bazelrc
-
-build --host_platform //build/bazel/platforms:darwin_x86_64
diff --git a/examples/android_app/java/com/app/BUILD b/examples/android_app/java/com/app/BUILD
index 786bfbdb..8522e0cc 100644
--- a/examples/android_app/java/com/app/BUILD
+++ b/examples/android_app/java/com/app/BUILD
@@ -1,11 +1,28 @@
-load("//build/bazel/rules/android:android_binary.bzl", "android_binary")
-load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/android:rules.bzl", "aar_import", "android_binary", "android_library")
load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
-load("@rules_android//rules:rules.bzl", "android_library")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+
+package(default_applicable_licenses = ["//build/soong/licenses:Android-Apache-2.0"])
android_binary(
name = "app",
manifest = "AndroidManifest.xml",
+ sdk_version = "current",
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
deps = [
":applib",
],
@@ -15,6 +32,8 @@ android_binary(
name = "app-cert-string",
certificate_name = "platform",
manifest = "AndroidManifest.xml",
+ sdk_version = "current",
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
deps = [
":applib",
],
@@ -24,6 +43,8 @@ android_binary(
name = "app-cert-module",
certificate = "//build/make/target/product/security:aosp-testkey",
manifest = "AndroidManifest.xml",
+ sdk_version = "current",
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
deps = [
":applib",
],
@@ -34,11 +55,15 @@ android_library(
srcs = [
"Jni.java",
"MainActivity.java",
+ "some_kotlin.kt",
],
manifest = "AndroidManifest.xml",
resource_files = glob(["res/**"]),
+ sdk_version = "current",
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
deps = [
- ":jni",
+ # TODO(b/240555494): re-enable JNI when it is supported
+ # ":jni",
":lib",
],
)
@@ -46,6 +71,8 @@ android_library(
android_library(
name = "lib",
srcs = ["Lib.java"],
+ sdk_version = "current",
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
)
cc_library_shared(
@@ -60,3 +87,10 @@ cc_library_static(
hdrs = ["jni_dep.h"],
deps = ["//libnativehelper:jni_headers"],
)
+
+aar_import(
+ name = "import",
+ aar = "example_lib.aar",
+ sdk_version = "32",
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+)
diff --git a/examples/android_app/java/com/app/example_lib.aar b/examples/android_app/java/com/app/example_lib.aar
new file mode 100644
index 00000000..05000e19
--- /dev/null
+++ b/examples/android_app/java/com/app/example_lib.aar
Binary files differ
diff --git a/examples/android_app/java/com/app/some_kotlin.kt b/examples/android_app/java/com/app/some_kotlin.kt
new file mode 100644
index 00000000..537758ca
--- /dev/null
+++ b/examples/android_app/java/com/app/some_kotlin.kt
@@ -0,0 +1,3 @@
+package com.app
+
+class MyKotlinClass
diff --git a/examples/apex/certificate_name/Android.bp b/examples/apex/certificate_name/Android.bp
new file mode 100644
index 00000000..9beecb25
--- /dev/null
+++ b/examples/apex/certificate_name/Android.bp
@@ -0,0 +1,11 @@
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+apex {
+ name: "build.bazel.examples.apex.certificate_name",
+ defaults: [
+ "build.bazel.examples.apex.minimal_defaults",
+ ],
+ certificate: "platform",
+}
diff --git a/examples/apex/certificate_name/file_contexts b/examples/apex/certificate_name/file_contexts
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/examples/apex/certificate_name/file_contexts
diff --git a/examples/apex/certificate_name/manifest.json b/examples/apex/certificate_name/manifest.json
new file mode 100644
index 00000000..196fce0d
--- /dev/null
+++ b/examples/apex/certificate_name/manifest.json
@@ -0,0 +1,4 @@
+{
+ "name": "build.bazel.examples.apex.certificate_name",
+ "version": 1
+}
diff --git a/examples/apex/minimal/Android.bp b/examples/apex/minimal/Android.bp
index e18120fc..16f7149c 100644
--- a/examples/apex/minimal/Android.bp
+++ b/examples/apex/minimal/Android.bp
@@ -34,10 +34,10 @@ android_app_certificate {
}
filegroup {
- name: "build.bazel.examples.apex.minimal-file_contexts",
- srcs: [
- "file_contexts",
- ],
+ name: "build.bazel.examples.apex.minimal-file_contexts",
+ srcs: [
+ "file_contexts",
+ ],
}
cc_library {
@@ -46,6 +46,43 @@ cc_library {
srcs: ["dummy_cc_lib.cc"],
apex_available: [
+ "build.bazel.examples.apex.certificate_name",
+ "build.bazel.examples.apex.minimal",
+ "build.bazel.examples.apex.minimal_compressed",
+ ],
+
+ // Because the APEX sets this
+ product_specific: true,
+
+ // Because the APEX sets this
+ min_sdk_version: "30",
+}
+
+cc_library {
+ name: "build.bazel.examples.apex.minimal_dummy_cc_lib_2",
+
+ srcs: ["dummy_cc_lib_2.cc"],
+
+ apex_available: [
+ "build.bazel.examples.apex.certificate_name",
+ "build.bazel.examples.apex.minimal",
+ "build.bazel.examples.apex.minimal_compressed",
+ ],
+
+ // Because the APEX sets this
+ product_specific: true,
+
+ // Because the APEX sets this
+ min_sdk_version: "30",
+}
+
+cc_library {
+ name: "build.bazel.examples.apex.minimal_dummy_cc_lib_3",
+
+ srcs: ["dummy_cc_lib_3.cc"],
+
+ apex_available: [
+ "build.bazel.examples.apex.certificate_name",
"build.bazel.examples.apex.minimal",
"build.bazel.examples.apex.minimal_compressed",
],
@@ -80,10 +117,14 @@ cc_binary {
srcs: ["main.cc"],
apex_available: [
+ "build.bazel.examples.apex.certificate_name",
"build.bazel.examples.apex.minimal",
- "build.bazel.examples.apex.minimal_compressed"
+ "build.bazel.examples.apex.minimal_compressed",
],
+ shared_libs: ["build.bazel.examples.apex.minimal_dummy_cc_lib_2"],
+ runtime_libs: ["build.bazel.examples.apex.minimal_dummy_cc_lib_3"],
+
// Because the APEX sets these
product_specific: true,
min_sdk_version: "30",
@@ -101,13 +142,13 @@ apex_defaults {
min_sdk_version: "30",
native_shared_libs: [
- "build.bazel.examples.apex.minimal_dummy_cc_lib",
+ "build.bazel.examples.apex.minimal_dummy_cc_lib",
],
prebuilts: [
- "build.bazel.examples.apex.minimal_dummy_named_prebuilt_etc",
- "build.bazel.examples.apex.minimal_dummy_unnamed_prebuilt_etc",
- "build.bazel.examples.apex.minimal_dummy_prebuilt_etc_without_subdir",
+ "build.bazel.examples.apex.minimal_dummy_named_prebuilt_etc",
+ "build.bazel.examples.apex.minimal_dummy_unnamed_prebuilt_etc",
+ "build.bazel.examples.apex.minimal_dummy_prebuilt_etc_without_subdir",
],
binaries: [
@@ -121,7 +162,14 @@ apex {
name: "build.bazel.examples.apex.minimal",
defaults: [
"build.bazel.examples.apex.minimal_defaults",
- ]
+ ],
+}
+
+override_apex {
+ name: "build.bazel.examples.apex.override.minimal",
+ base: "build.bazel.examples.apex.minimal",
+ package_name: "build.bazel.examples.apex.override.minimal",
+ logging_parent: "build.bazel.logging.parent",
}
apex {
@@ -129,5 +177,5 @@ apex {
compressible: true,
defaults: [
"build.bazel.examples.apex.minimal_defaults",
- ]
+ ],
}
diff --git a/examples/apex/minimal/README.md b/examples/apex/minimal/README.md
new file mode 100644
index 00000000..b04d0046
--- /dev/null
+++ b/examples/apex/minimal/README.md
@@ -0,0 +1,41 @@
+Instructions for building/running the minimal apex
+
+You need an android device/emulator to run it on, an easy option is:
+
+```
+lunch sdk_phone_x86_64-userdebug
+m
+emulator
+```
+
+To build and install with soong:
+```
+m build.bazel.examples.apex.minimal && adb install out/target/product/emulator_x86_64/product/apex/build.bazel.examples.apex.minimal.apex && adb reboot
+```
+
+To build and install with bazel:
+```
+b build --config=android_x86_64 //build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal && adb install bazel-bin/build/bazel/examples/apex/minimal/build.bazel.examples.apex.minimal.apex && adb reboot
+```
+
+The first time you try to install the apex, you will probably get this error:
+
+```
+adb: failed to install out/target/product/emulator_x86_64/product/apex/build.bazel.examples.apex.minimal.apex: Error [1] [apexd verification failed : No preinstalled apex found for package build.bazel.examples.apex.minimal]
+```
+
+There's probably better ways to resolve it, but one easy way is to take advantage of a bug (b/205632228) in soong and force it to be preinstalled by running:
+
+```
+m installclean
+m build.bazel.examples.apex.minimal
+m
+```
+
+and then restarting the emulator. After you've done this once you can use the regular install commands above from then on.
+
+To run the binary that the apex installs:
+
+```
+adb shell /apex/build.bazel.examples.apex.minimal/bin/build.bazel.examples.apex.cc_binary
+```
diff --git a/examples/apex/minimal/dummy_cc_lib.cc b/examples/apex/minimal/dummy_cc_lib.cc
index 36a07a0a..d4376804 100644
--- a/examples/apex/minimal/dummy_cc_lib.cc
+++ b/examples/apex/minimal/dummy_cc_lib.cc
@@ -1,9 +1,18 @@
-int main(int argc, char** argv) {
- // Unused
- (void)argc;
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include <iostream>
- // Unused
- (void)argv;
-
- return 0;
+void shared_lib_func(const char* name) {
+ std::cout << "Hello from shared_lib_func, " << name << std::endl;
}
diff --git a/examples/apex/minimal/dummy_cc_lib_2.cc b/examples/apex/minimal/dummy_cc_lib_2.cc
new file mode 100644
index 00000000..83e8de48
--- /dev/null
+++ b/examples/apex/minimal/dummy_cc_lib_2.cc
@@ -0,0 +1,18 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include <iostream>
+
+void shared_lib_2_func(const char* name) {
+ std::cout << "Hello from shared_lib_2_func, " << name << std::endl;
+}
diff --git a/examples/apex/minimal/dummy_cc_lib_3.cc b/examples/apex/minimal/dummy_cc_lib_3.cc
new file mode 100644
index 00000000..774c6e72
--- /dev/null
+++ b/examples/apex/minimal/dummy_cc_lib_3.cc
@@ -0,0 +1,18 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include <iostream>
+
+void shared_lib_3_func(const char* name) {
+ std::cout << "Hello from shared_lib_3_func, " << name << std::endl;
+}
diff --git a/examples/apex/minimal/main.cc b/examples/apex/minimal/main.cc
index 76e81970..947d7ee8 100644
--- a/examples/apex/minimal/main.cc
+++ b/examples/apex/minimal/main.cc
@@ -1 +1,11 @@
-int main() { return 0; }
+#include <iostream>
+
+// Forward declaration because we don't have a proper header file
+// for the dummy shared lib.
+void shared_lib_2_func(const char* name);
+
+int main() {
+ std::cout << "Hello, world!" << std::endl;
+ shared_lib_2_func("world");
+ return 0;
+}
diff --git a/examples/cc/BUILD b/examples/cc/BUILD
new file mode 100644
index 00000000..fb44c9db
--- /dev/null
+++ b/examples/cc/BUILD
@@ -0,0 +1,22 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+
+package(default_visibility = ["//build/bazel:__subpackages__"])
+
+cc_library_static(
+ name = "foo_static",
+ srcs = ["foo.cpp"],
+)
diff --git a/examples/cc/aidl/BUILD b/examples/cc/aidl/BUILD
new file mode 100644
index 00000000..fcab30eb
--- /dev/null
+++ b/examples/cc/aidl/BUILD
@@ -0,0 +1,27 @@
+load("//build/bazel/rules/aidl:aidl_library.bzl", "aidl_library")
+load("//build/bazel/rules/cc:cc_aidl_library.bzl", "cc_aidl_library")
+
+aidl_library(
+ name = "abcd",
+ srcs = ["a/b/c/d/ABCD.aidl"],
+ strip_import_prefix = "a/b",
+)
+
+# Similar to cc_proto_library, output path for generated code is
+# bazel-bin/<abcd_cc_aidl_library package dir>/abcd_cc_aidl_library_aidl_code_gen/<abcd package dir>/_virtual_imports/abcd/c/d
+#
+# AIDL sources are symlinked to bazel-bin/<abcd package dir>/_virtual_imports/abcd/c/d
+cc_aidl_library(
+ name = "abcd_cc_aidl_library",
+ implementation_dynamic_deps = [
+ "//frameworks/native/libs/binder:libbinder",
+ "//system/core/libutils:libutils",
+ ],
+ deps = [":abcd"],
+)
+
+aidl_library(
+ name = "foo",
+ srcs = ["foo/IFoo.aidl"],
+ visibility = ["//build/bazel/examples/cc/aidl_library:__subpackages__"],
+)
diff --git a/examples/cc/aidl/a/b/c/d/ABCD.aidl b/examples/cc/aidl/a/b/c/d/ABCD.aidl
new file mode 100644
index 00000000..8d91baee
--- /dev/null
+++ b/examples/cc/aidl/a/b/c/d/ABCD.aidl
@@ -0,0 +1,3 @@
+package c.d;
+
+interface ABCD {}
diff --git a/examples/cc/aidl/foo/IFoo.aidl b/examples/cc/aidl/foo/IFoo.aidl
new file mode 100644
index 00000000..8ae46f1c
--- /dev/null
+++ b/examples/cc/aidl/foo/IFoo.aidl
@@ -0,0 +1,5 @@
+package foo;
+
+interface IFoo {
+ void doFoo();
+}
diff --git a/examples/cc/aidl_library/BUILD b/examples/cc/aidl_library/BUILD
new file mode 100644
index 00000000..9e08dea3
--- /dev/null
+++ b/examples/cc/aidl_library/BUILD
@@ -0,0 +1,51 @@
+# This BUILD file mimics what bp2build will convert cc modules with aidl srcs to
+load("//build/bazel/rules/cc:cc_aidl_library.bzl", "cc_aidl_library")
+load("//build/bazel/rules/cc:cc_binary.bzl", "cc_binary")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+
+# Use aidl sources from another package
+cc_aidl_library(
+ name = "foo_cc_aidl_library",
+ implementation_dynamic_deps = [
+ "//frameworks/native/libs/binder:libbinder",
+ "//system/core/libutils:libutils",
+ ],
+ deps = ["//build/bazel/examples/cc/aidl:foo"],
+)
+
+cc_library_shared(
+ name = "foo",
+ srcs = ["foo.cpp"],
+ implementation_dynamic_deps = [
+ "//frameworks/native/libs/binder:libbinder",
+ "//system/core/libutils:libutils",
+ ],
+ whole_archive_deps = [
+ "foo_cc_aidl_library",
+ ],
+)
+
+cc_library_static(
+ name = "foo_bp2build_cc_library_static",
+ srcs = ["foo.cpp"],
+ implementation_dynamic_deps = [
+ "//frameworks/native/libs/binder:libbinder",
+ "//system/core/libutils",
+ ],
+ whole_archive_deps = [
+ "foo_cc_aidl_library",
+ ],
+)
+
+cc_binary(
+ name = "program_cc_binary",
+ srcs = ["program.cpp"],
+ dynamic_deps = [
+ "//frameworks/native/libs/binder:libbinder",
+ "//system/core/libutils:libutils",
+ ],
+ whole_archive_deps = [
+ "foo_cc_aidl_library",
+ ],
+)
diff --git a/examples/cc/aidl_library/foo.cpp b/examples/cc/aidl_library/foo.cpp
new file mode 100644
index 00000000..ab97e17a
--- /dev/null
+++ b/examples/cc/aidl_library/foo.cpp
@@ -0,0 +1,9 @@
+#include <foo/BpFoo.h>
+
+namespace android {
+ void main() {
+ // Call boilerplate implementation of proxy (e.g. BpFoo)
+ foo::BpFoo* bf = new foo::BpFoo(nullptr);
+ bf->doFoo();
+ };
+}
diff --git a/examples/cc/aidl_library/program.cpp b/examples/cc/aidl_library/program.cpp
new file mode 100644
index 00000000..c4b78552
--- /dev/null
+++ b/examples/cc/aidl_library/program.cpp
@@ -0,0 +1,8 @@
+#include <foo/BpFoo.h>
+
+int main() {
+ // Call boilerplate implementation of proxy (e.g. BpFoo)
+ foo::BpFoo* bf = new foo::BpFoo(nullptr);
+ bf->doFoo();
+ return 0;
+}
diff --git a/examples/cc/foo.cpp b/examples/cc/foo.cpp
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/examples/cc/foo.cpp
diff --git a/examples/cc/proto/deps/BUILD b/examples/cc/proto/deps/BUILD
new file mode 100644
index 00000000..bdcb56bf
--- /dev/null
+++ b/examples/cc/proto/deps/BUILD
@@ -0,0 +1,6 @@
+package(default_visibility = ["//visibility:public"])
+
+proto_library(
+ name = "deps_proto",
+ srcs = glob(["src/**/*.proto"]),
+)
diff --git a/examples/cc/proto/deps/src/enums/proto_deps.proto b/examples/cc/proto/deps/src/enums/proto_deps.proto
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/examples/cc/proto/deps/src/enums/proto_deps.proto
diff --git a/examples/cc/proto/external/BUILD b/examples/cc/proto/external/BUILD
new file mode 100644
index 00000000..ad1f4f4a
--- /dev/null
+++ b/examples/cc/proto/external/BUILD
@@ -0,0 +1,6 @@
+package(default_visibility = ["//visibility:public"])
+
+proto_library(
+ name = "external_proto",
+ srcs = glob(["src/**/*.proto"]),
+)
diff --git a/examples/cc/proto/external/src/enums/proto_external.proto b/examples/cc/proto/external/src/enums/proto_external.proto
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/examples/cc/proto/external/src/enums/proto_external.proto
diff --git a/examples/gensrcs/Android.bp b/examples/gensrcs/Android.bp
new file mode 100644
index 00000000..d3262ec6
--- /dev/null
+++ b/examples/gensrcs/Android.bp
@@ -0,0 +1,28 @@
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+// Soong expands $(genDir) to __SBOX_SANDBOX_DIR__/out
+// and $(out) to __SBOX_SANDBOX_DIR__/out/build/bazel/examples/gensrcs/data1.out
+// In mixed build, Bp2build converts $(genDir) to $(GENDIR)
+// which is expanded to ctx.var['GENDIR']
+gensrcs {
+ name: "examples.gensrcs.make_data1",
+ srcs: ["data1.txt"],
+ output_extension: "out",
+ cmd: "mkdir -p $(genDir)/out_dir " +
+ "&& touch $(genDir)/out_dir/data.text " +
+ "&& cat $(genDir)/out_dir/data.text > $(out)",
+}
+
+// Verify
+// * Regardless of whether the action is generated by Soong or Bazel,
+// * $(genDir)/build/bazel/examples/gensrcs/data/data2.out is equivalent to $(out) both Soong and Bazel
+// * The output path (i.e. data/data2.txt) is nestedly intact with input path (i.e. data/data2.out)
+gensrcs {
+ name: "examples.gensrcs.make_data2",
+ srcs: ["data/data2.txt"],
+ output_extension: "out",
+ cmd: "cat $(in) > $(out) " +
+ "&& cat $(genDir)/build/bazel/examples/gensrcs/data/data2.out",
+}
diff --git a/examples/gensrcs/data/data2.txt b/examples/gensrcs/data/data2.txt
new file mode 100644
index 00000000..29a0d9bd
--- /dev/null
+++ b/examples/gensrcs/data/data2.txt
@@ -0,0 +1 @@
+data2 text
diff --git a/examples/gensrcs/data1.txt b/examples/gensrcs/data1.txt
new file mode 100644
index 00000000..17b87af5
--- /dev/null
+++ b/examples/gensrcs/data1.txt
@@ -0,0 +1 @@
+data1 text
diff --git a/examples/gensrcs/generated_headers/bar/Android.bp b/examples/gensrcs/generated_headers/bar/Android.bp
new file mode 100644
index 00000000..6ed5e02c
--- /dev/null
+++ b/examples/gensrcs/generated_headers/bar/Android.bp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+gensrcs {
+ name: "bar-protoc-gen-headers",
+
+ tools: [
+ "aprotoc",
+ "protoc-gen-cppstream",
+ ],
+
+ tool_files: [
+ ":libprotobuf-internal-protos",
+ ],
+
+ cmd: "mkdir -p $(genDir) " +
+ "&& $(location aprotoc) " +
+ " --plugin=$(location protoc-gen-cppstream) " +
+ " --cppstream_out=$(genDir) " +
+ " -Iexternal/protobuf/src " +
+ " -I . " +
+ " $(in)",
+
+ srcs: [
+ "bar.proto",
+ ],
+
+ output_extension: "proto.h",
+}
diff --git a/examples/gensrcs/generated_headers/bar/bar.proto b/examples/gensrcs/generated_headers/bar/bar.proto
new file mode 100644
index 00000000..4c9a03af
--- /dev/null
+++ b/examples/gensrcs/generated_headers/bar/bar.proto
@@ -0,0 +1,2 @@
+syntax = "proto2";
+package bar;
diff --git a/examples/gensrcs/generated_headers/foo/Android.bp b/examples/gensrcs/generated_headers/foo/Android.bp
new file mode 100644
index 00000000..45c6ccb4
--- /dev/null
+++ b/examples/gensrcs/generated_headers/foo/Android.bp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+gensrcs {
+ name: "foo-protoc-gen-headers",
+
+ tools: [
+ "aprotoc",
+ "protoc-gen-cppstream",
+ ],
+
+ tool_files: [
+ ":libprotobuf-internal-protos",
+ ],
+
+ cmd: "mkdir -p $(genDir) " +
+ "&& $(location aprotoc) " +
+ " --plugin=$(location protoc-gen-cppstream) " +
+ " --cppstream_out=$(genDir) " +
+ " -Iexternal/protobuf/src " +
+ " -I . " +
+ " $(in)",
+
+ srcs: [
+ "foo.proto",
+ ],
+
+ output_extension: "proto.h",
+}
+
+cc_library_shared {
+ name: "foo-cc_library_shared",
+ srcs: [
+ "foo.cpp",
+ ],
+ generated_headers: [
+ "foo-protoc-gen-headers",
+ "bar-protoc-gen-headers",
+ ],
+}
diff --git a/examples/gensrcs/generated_headers/foo/foo.cpp b/examples/gensrcs/generated_headers/foo/foo.cpp
new file mode 100644
index 00000000..39de6d83
--- /dev/null
+++ b/examples/gensrcs/generated_headers/foo/foo.cpp
@@ -0,0 +1,2 @@
+#include <build/bazel/examples/gensrcs/generated_headers/foo/foo.proto.h>
+#include <build/bazel/examples/gensrcs/generated_headers/bar/bar.proto.h>
diff --git a/examples/gensrcs/generated_headers/foo/foo.proto b/examples/gensrcs/generated_headers/foo/foo.proto
new file mode 100644
index 00000000..4c813144
--- /dev/null
+++ b/examples/gensrcs/generated_headers/foo/foo.proto
@@ -0,0 +1,2 @@
+syntax = "proto2";
+package foo;
diff --git a/examples/java/com/bazel/BUILD.bazel b/examples/java/com/bazel/BUILD.bazel
index 6f96f9d3..5b3e2f69 100644
--- a/examples/java/com/bazel/BUILD.bazel
+++ b/examples/java/com/bazel/BUILD.bazel
@@ -1,11 +1,49 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/java:rules.bzl", "java_binary", "java_import", "java_library")
+load("//build/bazel/rules/kotlin:rules.bzl", "kt_jvm_library")
+
+package(default_applicable_licenses = ["//build/soong/licenses:Android-Apache-2.0"])
+
java_binary(
- name = "hello_java",
- srcs = ["example/HelloWorld.java"],
- main_class = "com.bazel.example.HelloWorld",
- deps = [":hello_java_lib"],
+ name = "hello_java",
+ srcs = ["example/HelloWorld.java"],
+ main_class = "com.bazel.example.HelloWorld",
+ target_compatible_with = select({
+ "//build/bazel/platforms/os:android": ["@platforms//:incompatible"],
+ "//conditions:default": [],
+ }),
+ deps = [
+ ":hello_java_lib",
+ ":some_kotlin_lib",
+ ],
)
java_library(
- name = "hello_java_lib",
- srcs = ["example_lib/HelloLib.java"],
+ name = "hello_java_lib",
+ srcs = ["example_lib/HelloLib.java"],
+ sdk_version = "current",
+)
+
+java_import(
+ name = "hello_java_import",
+ jars = ["hello_java_import.jar"],
+)
+
+kt_jvm_library(
+ name = "some_kotlin_lib",
+ srcs = ["example_lib/SomeKotlin.kt"],
+ sdk_version = "current",
)
diff --git a/examples/java/com/bazel/example_lib/SomeKotlin.kt b/examples/java/com/bazel/example_lib/SomeKotlin.kt
new file mode 100644
index 00000000..e7577e4b
--- /dev/null
+++ b/examples/java/com/bazel/example_lib/SomeKotlin.kt
@@ -0,0 +1,18 @@
+/**
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.bazel.example_lib
+
+class SomeKotlin
diff --git a/examples/java/com/bazel/hello_java_import.jar b/examples/java/com/bazel/hello_java_import.jar
new file mode 100755
index 00000000..5c460075
--- /dev/null
+++ b/examples/java/com/bazel/hello_java_import.jar
Binary files differ
diff --git a/examples/partitions/BUILD b/examples/partitions/BUILD
new file mode 100644
index 00000000..eb3efa12
--- /dev/null
+++ b/examples/partitions/BUILD
@@ -0,0 +1,12 @@
+load("//build/bazel/rules/partitions:partition.bzl", "partition")
+
+package(default_visibility = ["//visibility:public"])
+
+partition(
+ name = "system_image",
+ tags = ["manual"], # b/234509030
+ type = "system",
+ deps = [
+ "//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal",
+ ],
+)
diff --git a/examples/python/library/Android.bp b/examples/python/library/Android.bp
new file mode 100644
index 00000000..4d28fd90
--- /dev/null
+++ b/examples/python/library/Android.bp
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_binary_host {
+ name: "build_bazel_examples_python_library_main",
+ srcs: [
+ "main.py",
+ "main2.py",
+ ],
+ libs: ["build_bazel_examples_python_library_lib"],
+ main: "main.py",
+}
+
+python_library_host {
+ name: "build_bazel_examples_python_library_lib",
+ srcs: ["lib.py"],
+}
diff --git a/examples/python/library/lib.py b/examples/python/library/lib.py
new file mode 100644
index 00000000..7b07b3da
--- /dev/null
+++ b/examples/python/library/lib.py
@@ -0,0 +1,16 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def func_in_lib():
+ print("func_in_lib called!")
diff --git a/examples/python/library/main.py b/examples/python/library/main.py
new file mode 100644
index 00000000..f568f212
--- /dev/null
+++ b/examples/python/library/main.py
@@ -0,0 +1,19 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import main2
+main2.func_in_main2()
+
+import lib
+lib.func_in_lib()
diff --git a/examples/python/library/main2.py b/examples/python/library/main2.py
new file mode 100644
index 00000000..f00e54e8
--- /dev/null
+++ b/examples/python/library/main2.py
@@ -0,0 +1,16 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def func_in_main2():
+ print("func_in_main2 called!")
diff --git a/examples/python/protobuf/Android.bp b/examples/python/protobuf/Android.bp
new file mode 100644
index 00000000..2f2b5389
--- /dev/null
+++ b/examples/python/protobuf/Android.bp
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_binary_host {
+ name: "build_bazel_examples_python_protobuf_main",
+ srcs: ["main.py"],
+ libs: ["build_bazel_examples_python_protobuf_protos"],
+ main: "main.py",
+}
+
+python_library_host {
+ name: "build_bazel_examples_python_protobuf_protos",
+ srcs: ["test.proto"],
+ proto: {
+ canonical_path_from_root: false,
+ },
+}
diff --git a/examples/python/protobuf/main.py b/examples/python/protobuf/main.py
new file mode 100644
index 00000000..f2ea3915
--- /dev/null
+++ b/examples/python/protobuf/main.py
@@ -0,0 +1,22 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import test_pb2
+import google.protobuf.text_format as text_format
+
+message = test_pb2.ExampleMessage()
+message.name = "Test name!"
+message.other_field = "asdf"
+
+print(text_format.MessageToString(message))
diff --git a/examples/python/protobuf/test.proto b/examples/python/protobuf/test.proto
new file mode 100644
index 00000000..3b76f858
--- /dev/null
+++ b/examples/python/protobuf/test.proto
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+syntax = "proto3";
+
+package bazel.example.proto;
+
+message ExampleMessage {
+ string name = 1;
+ string other_field = 2;
+}
+
+
diff --git a/examples/soong_config_variables/Android.bp b/examples/soong_config_variables/Android.bp
index e863e820..d8db8401 100644
--- a/examples/soong_config_variables/Android.bp
+++ b/examples/soong_config_variables/Android.bp
@@ -59,7 +59,7 @@ fake_library_linking_strategy_cc_defaults {
"bp2build_bar",
],
- soong_config_variables:{
+ soong_config_variables: {
fake_library_linking_strategy: {
prefer_static: {
static_libs: [
@@ -93,7 +93,11 @@ fake_library_linking_strategy_cc_defaults {
// Experimental "stub" adbd for bp2build development
cc_binary {
name: "bp2build_adbd",
- defaults: ["adbd_defaults", "host_adbd_supported", "fake_libadbd_binary_dependencies"],
+ defaults: [
+ "adbd_defaults",
+ "host_adbd_supported",
+ "fake_libadbd_binary_dependencies",
+ ],
srcs: [
"main.cpp",
],
diff --git a/flags/BUILD.bazel b/flags/BUILD.bazel
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/flags/BUILD.bazel
diff --git a/flags/cc/abi/BUILD.bazel b/flags/cc/abi/BUILD.bazel
new file mode 100644
index 00000000..b1ac3e28
--- /dev/null
+++ b/flags/cc/abi/BUILD.bazel
@@ -0,0 +1,27 @@
+"""
+Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//rules:common_settings.bzl", "bool_flag")
+load("@env//:env.bzl", "env")
+load("//build/bazel/flags:common.bzl", "is_env_true")
+
+package(default_visibility = ["//visibility:public"])
+
+bool_flag(
+ name = "skip_abi_checks",
+ build_setting_default = is_env_true(env.get("SKIP_ABI_CHECKS")),
+)
diff --git a/flags/cc/tidy/BUILD.bazel b/flags/cc/tidy/BUILD.bazel
new file mode 100644
index 00000000..4649be26
--- /dev/null
+++ b/flags/cc/tidy/BUILD.bazel
@@ -0,0 +1,54 @@
+"""
+Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//rules:common_settings.bzl", "bool_flag", "string_flag", "string_list_flag")
+load("@env//:env.bzl", "env")
+load("//build/bazel/flags:common.bzl", "is_env_true")
+
+package(default_visibility = ["//visibility:public"])
+
+bool_flag(
+ name = "with_tidy",
+ build_setting_default = is_env_true(env.get("WITH_TIDY")),
+)
+
+bool_flag(
+ name = "allow_local_tidy_true",
+ build_setting_default = is_env_true(env.get("ALLOW_LOCAL_TIDY_TRUE")),
+)
+
+_with_tidy_flags = env.get("WITH_TIDY_FLAGS", None)
+
+string_list_flag(
+ name = "with_tidy_flags",
+ build_setting_default = _with_tidy_flags.split(" ") if _with_tidy_flags != None else [],
+)
+
+string_flag(
+ name = "default_tidy_header_dirs",
+ build_setting_default = env.get("DEFAULT_TIDY_HEADER_DIRS", ""),
+)
+
+string_flag(
+ name = "tidy_timeout",
+ build_setting_default = env.get("TIDY_TIMEOUT", ""),
+)
+
+bool_flag(
+ name = "tidy_external_vendor",
+ build_setting_default = is_env_true(env.get("TIDY_EXTERNAL_VENDOR")),
+)
diff --git a/flags/common.bzl b/flags/common.bzl
new file mode 100644
index 00000000..1e84d303
--- /dev/null
+++ b/flags/common.bzl
@@ -0,0 +1,23 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def is_env_true(value):
+ """return the truthiness of the value of an environment variable
+
+ Args:
+ value (str): the environment variable value to test
+ Returns:
+ if the value is truthy
+ """
+ return value != None and value.lower() in ["1", "y", "yes", "on", "true"]
diff --git a/json_module_graph/README.md b/json_module_graph/README.md
index f6d0f0a8..18c1f914 100644
--- a/json_module_graph/README.md
+++ b/json_module_graph/README.md
@@ -1,39 +1,18 @@
-# JSON module graph queries
+JSON module graph queries
+=========================
This directory contains `jq` scripts that query Soong's module graph.
`jq` may be installed through your distribution's repository.
-Usage:
+It's best to filter the full module graph to the part you are interested in
+because `jq` isn't too fast on the full graph.
+
+Usage
+-----
```
m json-module-graph
query.sh [-C] <command> <base-of-your-tree>/out/soong/module-graph.json [argument]
```
-The following commands are available:
-* `directDeps` prints the names of the direct dependencies of the given module
-* `distanceFromLeaves` prints the longest distance each module has from a leaf
- in the module graph within the transitive closure of given module
-* `filterSubtree` dumps only those modules that are in the given subtree of the
- source tree
-* `fullTransitiveDeps` returns the full transitive dependencies of the given
- module
-* `moduleTypeStats`: returns of a summary of the module types present on the
- input
-* `modulesOfType`: returns the names of modules of the input type
-* `printModule` prints all variations of a given module
-* `printModule`: returns a slightly more consise view of the input module
-* `properties`: returns the properties set in the input module, includes
- properties set via defaults
-* `transitiveDeps` prints the names of the transitive dependencies of the given
- module
-* `usedVariations` returns a map that shows which variations are used in the
- input and what values they take
-* `variantTransitions` summarizes the variant transitions in the transitive
- closure of the given module
-* `fullTransitiveDepsProperties` returns the properties set (including via
- defaults) grouped by module type of the modules in the transitive closure of
- the given module
-
-It's best to filter the full module graph to the part you are interested in
-because `jq` isn't too fast on the full graph.
+Run `./query.sh` with no arguments for additional usage information.
diff --git a/json_module_graph/distanceFromLeaves.jq b/json_module_graph/distanceFromLeaves.jq
index d48fa674..51b25bb2 100644
--- a/json_module_graph/distanceFromLeaves.jq
+++ b/json_module_graph/distanceFromLeaves.jq
@@ -1,4 +1,4 @@
-# CMD: Returns the maximum distance from a leaf for each module
+# CMD: Prints the longest distance each module has from a leaf in the module graph within the transitive closure of given module
include "library";
@@ -53,4 +53,4 @@ def variantlessDistancesFromLeaves($root):
maxDepths($m)
;
-variantlessDistancesFromLeaves($arg) \ No newline at end of file
+variantlessDistancesFromLeaves($arg)
diff --git a/json_module_graph/findModulesWithTestSuitesValue.jq b/json_module_graph/findModulesWithTestSuitesValue.jq
new file mode 100644
index 00000000..7167bab6
--- /dev/null
+++ b/json_module_graph/findModulesWithTestSuitesValue.jq
@@ -0,0 +1,11 @@
+# CMD: Returns the modules that have test_suites property with $arg as one of its value. Use $arg2 as the top level field key to be collected, e.g. Name, Blueprint.
+
+def hasTestSuitesWithValue($a):
+ .[] | select(.Name == "Test_suites") | .Values | .[] | . == $a
+;
+
+[.[] |
+select(.Module.Android.SetProperties |
+ if . == null then [] else . end |
+ hasTestSuitesWithValue($arg)) |
+.[$arg2] ] | unique | sort | .[]
diff --git a/json_module_graph/fullTransitiveDeps.jq b/json_module_graph/fullTransitiveDeps.jq
index 39e12b7d..760fdcf7 100644
--- a/json_module_graph/fullTransitiveDeps.jq
+++ b/json_module_graph/fullTransitiveDeps.jq
@@ -1,7 +1,7 @@
-# CMD: Returns the modules in the transitive closure of module $arg
+# CMD: Returns the modules in the transitive closure of module(s) $arg, splits on ","
include "library";
-fullTransitiveDeps([$arg])
+fullTransitiveDeps($arg | split(","))
diff --git a/json_module_graph/fullTransitiveDepsProperties.jq b/json_module_graph/fullTransitiveDepsProperties.jq
index ca28d359..e24ad3e5 100644
--- a/json_module_graph/fullTransitiveDepsProperties.jq
+++ b/json_module_graph/fullTransitiveDepsProperties.jq
@@ -1,9 +1,9 @@
-# CMD: Returns the properties of module types in the transitive closure of module $arg
+# CMD: Returns the properties (including defaults) grouped by module type of modules in the transitive closure of module(s) $arg, splits on ","
include "library";
[((moduleGraphNoVariants | removeSelfEdges) as $m |
- [$arg] |
+ $arg | split(",") |
transitiveDeps($m)) as $names |
.[] |
select (IN(.Name; $names | .[]))] |
diff --git a/json_module_graph/library.jq b/json_module_graph/library.jq
index 6550e1af..c97a7802 100644
--- a/json_module_graph/library.jq
+++ b/json_module_graph/library.jq
@@ -105,6 +105,10 @@ def directDeps($m):
map($m[.] // []) + [.] | flatten | unique
;
+def reverseDeps($m):
+ .[] | select(.Deps[].Name == $m)
+;
+
def transitiveDeps($m):
{Prev: [], Next: .} |
until (.Prev == .Next; {Prev: .Next, Next: .Next | directDeps($m)}) |
diff --git a/json_module_graph/reverseDeps.jq b/json_module_graph/reverseDeps.jq
new file mode 100644
index 00000000..b8201248
--- /dev/null
+++ b/json_module_graph/reverseDeps.jq
@@ -0,0 +1,5 @@
+# CMD: Returns the modules containing $arg as a dependency
+
+include "library";
+
+reverseDeps($arg)
diff --git a/json_module_graph/transitiveDeps.jq b/json_module_graph/transitiveDeps.jq
index d0a55e55..811e1046 100644
--- a/json_module_graph/transitiveDeps.jq
+++ b/json_module_graph/transitiveDeps.jq
@@ -1,7 +1,7 @@
-# CMD: Returns the names of the transitive dependencies of the module named $arg
+# CMD: Returns the names of the transitive dependencies of the module(s) $arg, splits on ","
include "library";
(moduleGraphNoVariants | removeSelfEdges) as $m |
- [$arg] |
+ ($arg | split(",")) |
transitiveDeps($m)
diff --git a/linux.bazelrc b/linux.bazelrc
index 4fb75ab9..6fb2d9a9 100644
--- a/linux.bazelrc
+++ b/linux.bazelrc
@@ -1,10 +1,6 @@
import %workspace%/build/bazel/common.bazelrc
-build --host_platform //build/bazel/platforms:linux_x86_64
-
-# Workaround JVM segfault issue as suggested at
-# https://github.com/bazelbuild/bazel/issues/3236#issuecomment-310656266
build --sandbox_tmpfs_path=/tmp/
# Create a build number that will be injected later.
-build --workspace_status_command=build/bazel/scripts/gen_build_number.sh \ No newline at end of file
+build --workspace_status_command=build/bazel/scripts/gen_build_number.sh
diff --git a/mkcompare/README.md b/mkcompare/README.md
new file mode 100644
index 00000000..8f9d62bf
--- /dev/null
+++ b/mkcompare/README.md
@@ -0,0 +1,145 @@
+# mkcompare: Compare generated Android-TARGET.mk makefiles
+
+## Summary
+
+This tool shows the differences between two `Android-`_target_`.mk` makefile.
+This makefile contains information about the Soong build graph that is exposed
+to Make (Android.mk) and packaging rules.
+
+## Usage
+
+```shell
+# run product config
+$ lunch ${target}
+
+# run soong for reference build
+$ m nothing && cp out/soong/Android-${target}.mk Android-${target}.mk.ref
+
+# apply your local changes..
+$ m nothing && cp out/soong/Android-${target}.mk Android-${target}.mk.new
+
+# compare!
+$ GOWORK=$PWD/build/bazel/mkcompare/go.work go run android/bazel/mkcompare/cmd \
+ -json \
+ Android-${target}.mk.ref \
+ Android-${target}.mk.new > ${target}.mk.json
+```
+
+## Options ##
+
+The comparator optionally:
+
+* Generates a JSON file with all the differences (`-json`). This option turns off all out output.
+* Stops after finding given _N_ different modules `-m N`)
+* Ignores variables with given names (`--ignore_variables=VAR,...`)
+* Shows per-variable value difference (`--show_module_diffs`)
+* For each module type, shows the names of the modules with this difference (`--show_type_modules`)
+
+## How it works
+
+We assume that both makefiles were generated for the same configuration (i.e.,
+the same _target_ value, and our goal is thus to find out the difference that
+a change contributes to the Makefile interface between Soong and Make.
+
+Currently, the comparator inspects only the module sections of a file.
+
+A _module section_ looks something like this:
+```makefile
+include $(CLEAR_VARS) # <module type>
+LOCAL_MODULE := mymod
+LOCAL_MODULE_CLASS := ETC
+include $(BUILD_PREBUILT)
+```
+
+i.e., it always starts with `include $(CLEAR_VARS)` ('module header') line
+and spans until the blank line. Before a blank line there is an
+`include <mkfile>` line ('module footer'), which may be followed by a few extra
+variable assignments. Between those two `include ` lines are the assignment lines.
+
+The name of the module is synthesized from the value of the `LOCAL_MODULE` variable
+and target configuration, e.g, `apex_tzdata.com.android.tzdata|cls:ETC|target_arch:arm64`
+or `aac_dec_fuzzer|cls:EXECUTABLES|host_arch:x86_64`
+
+The module header includes the module type as a comment (the plan was to use the
+_mkfile_ on the footer line, but it proved to be common to most of the modules,
+so Soong was modified to provide a module detailed module type as a comment
+on the header line).
+
+A module section in the reference file is compared with the
+identically named module section of our file. The following items are compared:
+
+* module types
+* the number of extra lines following the section footer
+* the variables and their values
+
+## Summary Output
+
+The default outputs look as follows:
+```
+159 missing modules, by type:
+ apex.apexBundle.files (159 modules)
+
+Missing variables (14):
+ ...
+ LOCAL_REQUIRED_MODULES, by type:
+ art_cc_library (2 modules)
+ art_cc_library_static (4 modules)
+ cc_library (28 modules)
+ cc_library_shared (2 modules)
+ LOCAL_SHARED_LIBRARIES, by type:
+ art_cc_library (60 modules)
+ ....
+Extra variables (7):
+ LOCAL_EXPORT_CFLAGS, by type:
+ cc_library (4 modules)
+ LOCAL_EXPORT_C_INCLUDE_DEPS, by type:
+ art_cc_library (28 modules)
+ ...
+Diff variables: (18)
+ LOCAL_EXPORT_C_INCLUDE_DEPS, by type:
+ aidl_interface.go_android/soong/aidl.wrapLibraryFactory.func1__topDownMutatorModule (1721 modules)
+ art_cc_library (12 modules)
+ LOCAL_PREBUILT_MODULE_FILE, by type:
+ apex.apexBundle (7 modules)
+ apex.apexBundle.files (625 modules)
+ ...
+```
+
+## JSON Output ##
+
+It looks like this:
+```JSON
+{
+ "RefPath": "<...>/out/soong/Android-aosp_arm64.mk",
+ "OurPath": "<...>/out.mixed/soong/Android-aosp_arm64.mk",
+ "MissingModules": [
+ "adbd.com.android.adbd|cls:EXECUTABLES|target_arch:arm64",
+ "android.hardware.common-V2-ndk.com.android.media.swcodec|cls:SHARED_LIBRARIES|target_arch:arm64",
+ "android.hardware.graphics.allocator-V1-ndk.com.android.media.swcodec|cls:SHARED_LIBRARIES|target_arch:arm64",
+ "android.hardware.graphics.allocator@2.0.com.android.media.swcodec|cls:SHARED_LIBRARIES|target_arch:arm64",
+ ...
+ ],
+ "DiffModules": [
+ {
+ "Name": "_makenames|cls:EXECUTABLES|target_arch:arm64",
+ "RefLocation": 137674,
+ "OurLocation": 137673,
+ "MissingVars": [ "LOCAL_SHARED_LIBRARIES", "LOCAL_STATIC_LIBRARIES" ],
+ "DiffVars": [
+ {
+ "Name": "LOCAL_PREBUILT_MODULE_FILE",
+ "MissingItems": [ "out/soong/.intermediates/external/libcap/_makenames/android_arm64_armv8-a/_makenames" ],
+ "ExtraItems": [ "out/bazel-bin/external/libcap/_makenames" ]
+ },
+ {
+ "Name": "LOCAL_SOONG_UNSTRIPPED_BINARY",
+ "MissingItems": [ "out/soong/.intermediates/external/libcap/_makenames/android_arm64_armv8-a/unstripped/_makenames" ],
+ "ExtraItems": [ "out/bazel-bin/external/libcap/_makenames_unstripped" ]
+ }
+ ]
+ },
+ ...
+ ]
+}
+```
+Use JSON query tool like [`jq`](https://github.com/stedolan/jq) to slice and dice it.
diff --git a/mkcompare/cmd/mkcompare.go b/mkcompare/cmd/mkcompare.go
new file mode 100644
index 00000000..550d1ece
--- /dev/null
+++ b/mkcompare/cmd/mkcompare.go
@@ -0,0 +1,253 @@
+package main
+
+import (
+ "android/bazel/mkcompare"
+ "bufio"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "math"
+ "os"
+ "runtime"
+ "runtime/pprof"
+ "sort"
+ "strings"
+)
+
+var cpuprofile = flag.String("cpuprofile", "", "write cpu profile to `file`")
+var memprofile = flag.String("memprofile", "", "write memory profile to `file`")
+var ignoredVariables = flag.String("ignore_variables", "", "comma-separated list of variables to ignore")
+var maxDiff = flag.Int("max", math.MaxInt, "stop after finding N different modules")
+var showPerModuleDiffs = flag.Bool("show_module_diffs", false, "show per-module differences")
+var showModulesPerType = flag.Bool("show_type_modules", false, "show modules for each differing type")
+var jsonOut = flag.Bool("json", false, "generate JSON output")
+var showSummary = flag.Bool("show_summary", true, "show summary")
+var ignoredVarSet map[string]bool
+
+func maybeQuit(err error) {
+ if err == nil {
+ return
+ }
+
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+}
+
+func parse(path string) *mkcompare.MkFile {
+ f, err := os.Open(path)
+ maybeQuit(err)
+ mkFile, err := mkcompare.ParseMkFile(bufio.NewReader(f))
+ maybeQuit(err)
+ f.Close()
+ return mkFile
+}
+
+func processArgs() {
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, `usage: mkcompare <options> refMkFile mkFile`)
+ flag.PrintDefaults()
+ os.Exit(2)
+ }
+ flag.Parse()
+ if len(flag.Args()) != 2 {
+ flag.Usage()
+ }
+ if *jsonOut {
+ *showPerModuleDiffs = false
+ *showModulesPerType = false
+ *showSummary = false
+ }
+}
+
+func goParse(path string) chan *mkcompare.MkFile {
+ ch := make(chan *mkcompare.MkFile, 1)
+ go func() { ch <- parse(path) }()
+ return ch
+}
+
+func printVars(title string, modulesByVar map[string][]string, mkFile *mkcompare.MkFile) {
+ if len(modulesByVar) > 0 {
+ fmt.Println(title)
+ for varName, mods := range modulesByVar {
+ printModulesByType(fmt.Sprintf(" %s, by type:", varName), mods, mkFile)
+ }
+ }
+}
+
+func printModulesByType(title string, moduleNames []string, mkFile *mkcompare.MkFile) {
+ // Indent all lines by the title's indent
+ prefix := title
+ for i, c := range title {
+ if string(c) != " " {
+ prefix = title[0:i]
+ break
+ }
+ }
+ fmt.Println(title)
+ sortedTypes, byType := mkFile.ModulesByType(moduleNames)
+ for _, typ := range sortedTypes {
+ fmt.Printf("%s %s (%d modules)\n", prefix, typ, len(byType[typ]))
+ if !*showPerModuleDiffs {
+ continue
+ }
+ for _, m := range byType[typ] {
+ fmt.Println(prefix, " ", m)
+ }
+ }
+}
+
+type diffMod struct {
+ Name string
+ mkcompare.MkModuleDiff
+ RefLocation int
+ OurLocation int
+ Type string
+ ReferenceType string `json:",omitempty"`
+}
+
+type missingOrExtraMod struct {
+ Name string
+ Location int
+ Type string
+}
+
+type Diff struct {
+ RefPath string
+ OurPath string
+ ExtraModules []missingOrExtraMod `json:",omitempty"`
+ MissingModules []missingOrExtraMod `json:",omitempty"`
+ DiffModules []diffMod `json:",omitempty"`
+}
+
+func process(refMkFile, ourMkFile *mkcompare.MkFile) bool {
+ diff := Diff{RefPath: refMkFile.Path, OurPath: ourMkFile.Path}
+ missing, common, extra :=
+ mkcompare.Classify(refMkFile.Modules, ourMkFile.Modules, func(_ string) bool { return true })
+
+ sort.Strings(missing)
+ if len(missing) > 0 {
+ if *showSummary {
+ printModulesByType(fmt.Sprintf("%d missing modules, by type:", len(missing)),
+ missing, refMkFile)
+ }
+ if *jsonOut {
+ for _, name := range missing {
+ mod := refMkFile.Modules[name]
+ diff.MissingModules = append(diff.MissingModules,
+ missingOrExtraMod{name, mod.Location, mod.Type})
+ }
+ }
+ }
+
+ sort.Strings(extra)
+ if len(extra) > 0 {
+ if *showSummary {
+ printModulesByType(fmt.Sprintf("%d extra modules, by type:", len(extra)), extra, ourMkFile)
+ }
+ if *jsonOut {
+ for _, name := range extra {
+ mod := ourMkFile.Modules[name]
+ diff.ExtraModules = append(diff.ExtraModules,
+ missingOrExtraMod{name, mod.Location, mod.Type})
+ }
+ }
+ }
+ filesAreEqual := len(diff.MissingModules)+len(diff.ExtraModules) == 0
+
+ nDiff := 0
+ sort.Strings(common)
+ filterVars := func(name string) bool {
+ _, ok := ignoredVarSet[name]
+ return !ok
+ }
+ var missingVariables = make(map[string][]string)
+ var extraVariables = make(map[string][]string)
+ var diffVariables = make(map[string][]string)
+ for _, name := range common {
+ d := mkcompare.Compare(refMkFile.Modules[name], ourMkFile.Modules[name], filterVars)
+ if d.Empty() {
+ continue
+ }
+ filesAreEqual = false
+ var refType string
+ if d.Ref.Type != d.Our.Type {
+ refType = d.Ref.Type
+ }
+ if *jsonOut {
+ diff.DiffModules = append(diff.DiffModules, diffMod{
+ MkModuleDiff: d,
+ Name: name,
+ RefLocation: d.Ref.Location,
+ OurLocation: d.Our.Location,
+ Type: d.Our.Type,
+ ReferenceType: refType,
+ })
+ }
+ nDiff = nDiff + 1
+ if nDiff >= *maxDiff {
+ fmt.Printf("Only the first %d module diffs are processed\n", *maxDiff)
+ break
+ }
+ addToDiffList := func(d map[string][]string, items []string) {
+ if len(items) == 0 {
+ return
+ }
+ for _, v := range items {
+ d[v] = append(d[v], name)
+ }
+ }
+ addToDiffList(missingVariables, d.MissingVars)
+ addToDiffList(extraVariables, d.ExtraVars)
+ for _, dv := range d.DiffVars {
+ diffVariables[dv.Name] = append(diffVariables[dv.Name], name)
+ }
+ if *showPerModuleDiffs {
+ fmt.Println()
+ d.Print(os.Stdout, name)
+ }
+ }
+ if *showSummary {
+ printVars(fmt.Sprintf("\nMissing variables (%d):", len(missingVariables)), missingVariables, refMkFile)
+ printVars(fmt.Sprintf("\nExtra variables (%d):", len(extraVariables)), extraVariables, ourMkFile)
+ printVars(fmt.Sprintf("\nDiff variables: (%d)", len(diffVariables)), diffVariables, refMkFile)
+ }
+ if *jsonOut {
+ enc := json.NewEncoder(os.Stdout)
+ enc.SetIndent("", " ")
+ enc.Encode(diff)
+ }
+ return filesAreEqual
+}
+
+func main() {
+ processArgs()
+ if *cpuprofile != "" {
+ f, err := os.Create(*cpuprofile)
+ maybeQuit(err)
+ defer f.Close() // error handling omitted for example
+ maybeQuit(pprof.StartCPUProfile(f))
+ defer pprof.StopCPUProfile()
+ }
+ chRef := goParse(flag.Arg(0))
+ chNew := goParse(flag.Arg(1))
+ if *ignoredVariables != "" {
+ ignoredVarSet = make(map[string]bool)
+ for _, v := range strings.Split(*ignoredVariables, ",") {
+ ignoredVarSet[v] = true
+ }
+ }
+ refMkFile, newMkFile := <-chRef, <-chNew
+ refMkFile.Path = flag.Arg(0)
+ newMkFile.Path = flag.Arg(1)
+ equal := process(refMkFile, newMkFile)
+ if *memprofile != "" {
+ f, err := os.Create(*memprofile)
+ maybeQuit(err)
+ defer f.Close() // error handling omitted for example
+ runtime.GC() // get up-to-date statistics
+ maybeQuit(pprof.WriteHeapProfile(f))
+ }
+ if !equal {
+ os.Exit(2)
+ }
+}
diff --git a/mkcompare/elfdiff/elfdiff.go b/mkcompare/elfdiff/elfdiff.go
new file mode 100644
index 00000000..4c44eb1f
--- /dev/null
+++ b/mkcompare/elfdiff/elfdiff.go
@@ -0,0 +1,237 @@
+package main
+
+// elfdiff compares two ELF files. Each one can be a standalone file or an archive (.a file)
+// member.
+import (
+ "android/bazel/mkcompare"
+ "bytes"
+ "debug/elf"
+ "flag"
+ "fmt"
+ "io"
+ "os"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+type myElf struct {
+ *elf.File
+ path string
+ sectionsByName map[string]*elf.Section
+}
+
+func always(_ string) bool {
+ return true
+}
+
+func processArgs() {
+ flag.Parse()
+ if len(flag.Args()) != 2 {
+ maybeQuit(fmt.Errorf("usage: %s REF-ELF OUR-ELF\n", os.Args[0]))
+ os.Exit(1)
+ }
+}
+
+func maybeQuit(err error) {
+ if err == nil {
+ return
+ }
+
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+}
+
+func main() {
+ processArgs()
+ elfRef := elfRead(flag.Arg(0))
+ elfOur := elfRead(flag.Arg(1))
+ missing, common, extra := mkcompare.Classify(elfRef.sectionsByName, elfOur.sectionsByName, always)
+ var hasDiff bool
+ newDifference := func() {
+ if !hasDiff {
+ hasDiff = true
+ }
+ }
+
+ if len(missing)+len(extra) > 0 {
+ newDifference()
+ }
+ if len(missing) > 0 {
+ sort.Strings(missing)
+ fmt.Print("Missing sections:\n ", strings.Join(missing, "\n "), "\n")
+ }
+ if len(extra) > 0 {
+ sort.Strings(extra)
+ fmt.Print("Extra sections:\n ", strings.Join(extra, "\n "), "\n")
+ }
+ commonDiff := false
+ newCommonDifference := func(format string, args ...interface{}) {
+ if !commonDiff {
+ fmt.Print("Sections that differ:\n")
+ commonDiff = true
+ }
+ newDifference()
+ fmt.Printf(format, args...)
+ }
+ sort.Strings(common)
+ for _, sname := range common {
+ sectionRef := elfRef.sectionsByName[sname]
+ sectionOur := elfOur.sectionsByName[sname]
+ refSize := int64(sectionRef.Size)
+ ourSize := int64(sectionOur.Size)
+ if refSize != ourSize {
+ newCommonDifference(" %s:%d%+d\n", sname, refSize, ourSize-refSize)
+ continue
+ }
+ dataOur, err := sectionOur.Data()
+ maybeQuit(err)
+ dataRef, err := sectionRef.Data()
+ maybeQuit(err)
+ if bytes.Compare(dataRef, dataOur) != 0 {
+ newCommonDifference(" %s:%d(data)\n", sname, refSize)
+ }
+ }
+
+ if hasDiff {
+ os.Exit(1)
+ }
+}
+
+const arMagic = "!<arch>\n"
+const arExtendedEntry = "//"
+
+// elfRead returns ELF file reader for URI. If URI has <path>(<member>) format,
+// <path> is an archive (usually an .a file) and <member> is an ELF file in it.
+func elfRead(path string) *myElf {
+ var reader io.ReaderAt
+ var err error
+ n := strings.LastIndex(path, "(")
+ if n > 0 && strings.HasSuffix(path, ")") {
+ reader = newArchiveReader(path[0:n], path[n+1:len(path)-1])
+ } else {
+ reader, err = os.Open(path)
+ maybeQuit(err)
+ }
+ res := &myElf{path: path}
+ res.File, err = elf.NewFile(reader)
+ maybeQuit(err)
+
+ // Build ELF sections map. Only allocatable sections are considered.
+ res.sectionsByName = make(map[string]*elf.Section)
+ for _, s := range res.File.Sections {
+ if _, ok := res.sectionsByName[s.Name]; ok {
+ fmt.Fprintf(os.Stderr, "%s: duplicate section %s, ignoring\n", res.path, s.Name)
+ continue
+ }
+ if s.Flags&elf.SHF_ALLOC != 0 && s.Type != elf.SHT_NOBITS {
+ res.sectionsByName[s.Name] = s
+ }
+ }
+ return res
+}
+
+type memberHeader []byte
+
+const headerSize = 60
+
+// memberHeader represents a member in an archive. It implements os.ReaderAt interface
+// so it can be passed to elf.NewFile
+type memberReader struct {
+ file *os.File
+ start int64
+ size int64
+}
+
+func (m memberReader) ReadAt(p []byte, off int64) (n int, err error) {
+ nToRead := int64(len(p))
+ nHas := m.size - off
+ if nHas <= 0 {
+ return 0, io.EOF
+ }
+ if nToRead > nHas {
+ nToRead = nHas
+ }
+ return m.file.ReadAt(p[0:nToRead], m.start+off)
+}
+
+func (h memberHeader) memberSize() int64 {
+ n, err := strconv.ParseInt(strings.TrimSpace(string(h[48:58])), 10, 64)
+ maybeQuit(err)
+ return (n + 1) & -2 // The size is always an even number
+}
+
+// newArchiveReader returns a reader for an archive member.
+// The format of the ar archive is sort of documented in Wikipedia:
+// https://en.wikipedia.org/wiki/Ar_(Unix)
+func newArchiveReader(path string, member string) io.ReaderAt {
+ f, err := os.Open(path)
+ maybeQuit(err)
+ fStat, err := f.Stat()
+ maybeQuit(err)
+ fileSize := fStat.Size()
+
+ var nextHeaderPos int64 = 8
+ var contentPos int64
+ var header memberHeader = make([]byte, headerSize)
+
+ // fill the buffer, reading from given position.
+ readFully := func(buf []byte, at int64) {
+ n, err := f.ReadAt(buf, at)
+ maybeQuit(err)
+ if n < len(buf) {
+ maybeQuit(fmt.Errorf("%s is corrupt, read %d bytes instead of %d\n", path, n, len(buf)))
+ }
+ }
+ // Read the header, update contents and next header pointers
+ readHeader := func() {
+ readFully(header, nextHeaderPos)
+ contentPos = nextHeaderPos + headerSize
+ nextHeaderPos = contentPos + header.memberSize()
+ }
+
+ // Read the file header
+ buf := make([]byte, len(arMagic))
+ readFully(buf, 0)
+ if bytes.Compare([]byte(arMagic), buf) != 0 {
+ maybeQuit(fmt.Errorf("%s is not an ar archive\n", path))
+ }
+
+ entry := []byte(member + "/") // `/` is member name sentinel
+ if len(entry) <= 16 {
+ // the name fits into a section header, so just scan the sections.
+ for nextHeaderPos < fileSize {
+ readHeader()
+ if bytes.Compare(entry, header[0:len(entry)]) == 0 {
+ return &memberReader{f, contentPos, header.memberSize()}
+ }
+ }
+ } else {
+ // If section's name is `/` followed by digits, these digits are an offset to
+ // its real name in the 'extended names' section.
+ // The name of the extended names section is `//`, and it should precede the
+ // sections with longer names.
+ var extendedNames []byte
+ for nextHeaderPos < fileSize {
+ readHeader()
+ if bytes.Compare(header[0:2], []byte(arExtendedEntry)) == 0 {
+ extendedNames = make([]byte, header.memberSize())
+ readFully(extendedNames, contentPos)
+ } else if bytes.Compare(header[0:1], []byte("/")) != 0 {
+ continue
+ }
+ if off, err := strconv.ParseInt(strings.TrimSpace(string(header[1:16])), 10, 64); err == nil {
+ // A section with extended name.
+ if extendedNames == nil {
+ maybeQuit(fmt.Errorf("%s: extended names entry is missing in archive\n", path))
+ }
+ if off+int64(len(entry)) <= int64(len(extendedNames)) &&
+ bytes.Compare(entry, extendedNames[off:off+int64(len(entry))]) == 0 {
+ return &memberReader{f, contentPos, header.memberSize()}
+ }
+ }
+ }
+ }
+ maybeQuit(fmt.Errorf("%s: no such member %s", path, member))
+ return nil
+}
diff --git a/mkcompare/go.mod b/mkcompare/go.mod
new file mode 100644
index 00000000..0bb084cb
--- /dev/null
+++ b/mkcompare/go.mod
@@ -0,0 +1,4 @@
+module android/bazel/mkcompare
+
+require github.com/google/go-cmp v0.0.0
+go 1.19
diff --git a/mkcompare/go.work b/mkcompare/go.work
new file mode 100644
index 00000000..298f26cd
--- /dev/null
+++ b/mkcompare/go.work
@@ -0,0 +1,8 @@
+go 1.19
+
+use (
+ .
+ ../../../external/go-cmp
+)
+
+replace github.com/google/go-cmp v0.0.0 => ../../../external/go-cmp
diff --git a/mkcompare/mkdiff.go b/mkcompare/mkdiff.go
new file mode 100644
index 00000000..9fdab579
--- /dev/null
+++ b/mkcompare/mkdiff.go
@@ -0,0 +1,200 @@
+package mkcompare
+
+import (
+ "fmt"
+ "io"
+ "regexp"
+ "sort"
+ "strings"
+)
+
+// Classify takes two maps with string keys and return the lists of left-only, common, and right-only keys
+func Classify[V interface{}](mLeft map[string]V, mRight map[string]V, varFilter func(_ string) bool) (left []string, common []string, right []string) {
+ for k := range mLeft {
+ if !varFilter(k) {
+ break
+ }
+ if _, ok := mRight[k]; ok {
+ common = append(common, k)
+ } else {
+ left = append(left, k)
+ }
+ }
+ for k := range mRight {
+ if !varFilter(k) {
+ break
+ }
+ if _, ok := mLeft[k]; !ok {
+ right = append(right, k)
+ }
+ }
+
+ return left, common, right
+}
+
+var normalizer = map[string]func(ref, our string) (string, string){
+ "LOCAL_SOONG_INSTALL_PAIRS": normalizeInstallPairs,
+ "LOCAL_COMPATIBILITY_SUPPORT_FILES": normalizeInstallPairs,
+ "LOCAL_PREBUILT_MODULE_FILE": normalizePrebuiltModuleFile,
+ "LOCAL_SOONG_CLASSES_JAR": normalizePrebuiltModuleFile,
+ "LOCAL_SOONG_HEADER_JAR": normalizePrebuiltModuleFile,
+}
+
+func normalizePrebuiltModuleFile(ref string, our string) (string, string) {
+ return strings.ReplaceAll(ref, "/bazelCombined/", "/combined/"), strings.ReplaceAll(our, "/bazelCombined/", "/combined/")
+}
+
+var rexRemoveInstallSource = regexp.MustCompile("([^ ]+:)")
+
+func normalizeInstallPairs(ref string, our string) (string, string) {
+ return rexRemoveInstallSource.ReplaceAllString(ref, ""), rexRemoveInstallSource.ReplaceAllString(our, "")
+}
+
+type MkVarDiff struct {
+ Name string
+ MissingItems []string `json:",omitempty"`
+ ExtraItems []string `json:",omitempty"`
+}
+
+// MkModuleDiff holds module difference between reference and our mkfile.
+type MkModuleDiff struct {
+ Ref *MkModule `json:"-"`
+ Our *MkModule `json:"-"`
+ MissingVars []string `json:",omitempty"`
+ ExtraVars []string `json:",omitempty"`
+ DiffVars []MkVarDiff `json:",omitempty"`
+ TypeDiffers bool `json:",omitempty"`
+ ExtrasDiffer bool `json:",omitempty"`
+}
+
+// Empty returns true if there is no difference
+func (d *MkModuleDiff) Empty() bool {
+ return !d.TypeDiffers && !d.ExtrasDiffer && len(d.MissingVars) == 0 && len(d.ExtraVars) == 0 && len(d.DiffVars) == 0
+}
+
+// Print prints the difference
+func (d *MkModuleDiff) Print(sink io.Writer, name string) {
+ if d.Empty() {
+ return
+ }
+ fmt.Fprintf(sink, "%s (ref line %d, our line %d):\n", name, d.Ref.Location, d.Our.Location)
+ if d.TypeDiffers {
+ fmt.Fprintf(sink, " type %s <-> %s\n", d.Ref.Type, d.Our.Type)
+ }
+
+ if !d.ExtrasDiffer {
+ fmt.Fprintf(sink, " extras %d <-> %d\n", d.Ref.Extras, d.Our.Extras)
+ }
+
+ if len(d.MissingVars)+len(d.DiffVars) > 0 {
+ fmt.Fprintf(sink, " variables:\n")
+ if len(d.MissingVars) > 0 {
+ fmt.Fprintf(sink, " -%v\n", d.MissingVars)
+ }
+ if len(d.ExtraVars) > 0 {
+ fmt.Fprintf(sink, " +%v\n", d.ExtraVars)
+ }
+ }
+ for _, vdiff := range d.DiffVars {
+ fmt.Printf(" %s value:\n", vdiff.Name)
+ if len(vdiff.MissingItems) > 0 {
+ fmt.Printf(" -%v\n", vdiff.MissingItems)
+ }
+ if len(vdiff.ExtraItems) > 0 {
+ fmt.Printf(" +%v\n", vdiff.ExtraItems)
+ }
+ }
+}
+
+// Compare returns the difference for a module. Only the variables filtered by the given
+// function are considered.
+func Compare(refMod *MkModule, ourMod *MkModule, varFilter func(string) bool) MkModuleDiff {
+ d := MkModuleDiff{
+ Ref: refMod,
+ Our: ourMod,
+ TypeDiffers: refMod.Type != ourMod.Type,
+ ExtrasDiffer: refMod.Extras != ourMod.Extras,
+ }
+ var common []string
+ d.MissingVars, common, d.ExtraVars = Classify(d.Ref.Variables, d.Our.Variables, varFilter)
+
+ if len(common) > 0 {
+ for _, v := range common {
+ doSort := true // TODO(asmundak): find if for some variables the value should not be sorted
+ refValue := d.Ref.Variables[v]
+ ourValue := d.Our.Variables[v]
+ if f, ok := normalizer[v]; ok {
+ refValue, ourValue = f(refValue, ourValue)
+ }
+ missingItems, extraItems := compareVariableValues(refValue, ourValue, doSort)
+ if len(missingItems)+len(extraItems) > 0 {
+ d.DiffVars = append(d.DiffVars, MkVarDiff{
+ Name: v,
+ MissingItems: missingItems,
+ ExtraItems: extraItems,
+ })
+ }
+ }
+ }
+ return d
+}
+
+func compareVariableValues(ref string, our string, sortItems bool) ([]string, []string) {
+ refTokens := strings.Split(ref, " ")
+ ourTokens := strings.Split(our, " ")
+ if sortItems {
+ sort.Strings(refTokens)
+ sort.Strings(ourTokens)
+ }
+ var missing []string
+ var extra []string
+ refStream := &tokenStream{refTokens, 0}
+ ourStream := &tokenStream{ourTokens, 0}
+ refToken := refStream.next()
+ ourToken := ourStream.next()
+ compare := 0
+ for refToken != tsEOF || ourToken != tsEOF {
+ if refToken == tsEOF {
+ compare = 1
+ } else if ourToken == tsEOF {
+ compare = -1
+ } else {
+ compare = 0
+ if refToken <= ourToken {
+ compare = -1
+ }
+ if refToken >= ourToken {
+ compare = compare + 1
+ }
+ }
+ switch compare {
+ case -1:
+ missing = append(missing, refToken)
+ refToken = refStream.next()
+ case 0:
+ refToken = refStream.next()
+ ourToken = ourStream.next()
+ case 1:
+ extra = append(extra, ourToken)
+ ourToken = ourStream.next()
+ }
+ }
+ return missing, extra
+}
+
+// Auxiliary stuff used to find the difference
+const tsEOF = " "
+
+type tokenStream struct {
+ tokens []string
+ current int
+}
+
+func (ts *tokenStream) next() string {
+ if ts.current >= len(ts.tokens) {
+ return tsEOF
+ }
+ ret := ts.tokens[ts.current]
+ ts.current = ts.current + 1
+ return ret
+}
diff --git a/mkcompare/mkdiff_test.go b/mkcompare/mkdiff_test.go
new file mode 100644
index 00000000..b4b4f402
--- /dev/null
+++ b/mkcompare/mkdiff_test.go
@@ -0,0 +1,122 @@
+package mkcompare
+
+import (
+ "github.com/google/go-cmp/cmp"
+ "reflect"
+ "testing"
+)
+
+func TestClassify(t *testing.T) {
+ tests := []struct {
+ name string
+ mLeft map[string]int
+ mRight map[string]int
+ wantLeft []string
+ wantCommon []string
+ wantRight []string
+ }{
+ {
+ name: "one",
+ mLeft: map[string]int{"a": 1, "b": 2},
+ mRight: map[string]int{"b": 3, "c": 4},
+ wantLeft: []string{"a"},
+ wantCommon: []string{"b"},
+ wantRight: []string{"c"},
+ },
+ {
+ name: "two",
+ mLeft: map[string]int{"a": 1, "b": 2},
+ mRight: map[string]int{"a": 3},
+ wantLeft: []string{"b"},
+ wantCommon: []string{"a"},
+ wantRight: nil,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ gotLeft, gotCommon, gotRight := Classify(tt.mLeft, tt.mRight, func(_ string) bool { return true })
+ if !reflect.DeepEqual(gotLeft, tt.wantLeft) {
+ t.Errorf("classify() gotLeft = %v, want %v", gotLeft, tt.wantLeft)
+ }
+ if !reflect.DeepEqual(gotCommon, tt.wantCommon) {
+ t.Errorf("classify() gotCommon = %v, want %v", gotCommon, tt.wantCommon)
+ }
+ if !reflect.DeepEqual(gotRight, tt.wantRight) {
+ t.Errorf("classify() gotRight = %v, want %v", gotRight, tt.wantRight)
+ }
+ })
+ }
+}
+
+func Test_compareVariableValues(t *testing.T) {
+ tests := []struct {
+ name string
+ ref string
+ our string
+ sort bool
+ want_missing []string
+ want_extra []string
+ }{
+ {name: "Same", ref: "x a b", our: "a b x", sort: true},
+ {name: "diff1", ref: "a b c", our: "d a", sort: true, want_missing: []string{"b", "c"}, want_extra: []string{"d"}},
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got_missing, got_extra := compareVariableValues(tt.ref, tt.our, tt.sort)
+ if diff := cmp.Diff(got_missing, tt.want_missing); diff != "" {
+ t.Errorf("missing items differ: %s", diff)
+ }
+ if diff := cmp.Diff(got_extra, tt.want_extra); diff != "" {
+ t.Errorf("extra items differ: %s", diff)
+ }
+ })
+ }
+}
+
+func TestCompare(t *testing.T) {
+ refMod1 := MkModule{Type: "foo", Location: 1, Variables: map[string]string{"var1": "a", "var2": "b"}}
+ ourMod1 := MkModule{Type: "foo", Location: 3, Variables: map[string]string{"var1": "a", "var2": "c"}}
+ tests := []struct {
+ name string
+ refMod *MkModule
+ ourMod *MkModule
+ isGoodVar func(string) bool
+ want MkModuleDiff
+ }{
+ {
+ name: "Ignored vars",
+ refMod: &refMod1,
+ ourMod: &ourMod1,
+ isGoodVar: func(v string) bool { return v == "var1" },
+ want: MkModuleDiff{},
+ },
+ {
+ name: "Different values",
+ refMod: &refMod1,
+ ourMod: &ourMod1,
+ isGoodVar: func(_ string) bool { return true },
+ want: MkModuleDiff{
+ DiffVars: []MkVarDiff{{"var2", []string{"b"}, []string{"c"}}},
+ },
+ },
+ {
+ name: "DifferentVars",
+ refMod: &refMod1,
+ ourMod: &MkModule{Type: "foo", Variables: map[string]string{"var2": "b", "var3": "c"}},
+ isGoodVar: func(_ string) bool { return true },
+ want: MkModuleDiff{
+ MissingVars: []string{"var1"},
+ ExtraVars: []string{"var3"},
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ tt.want.Ref = tt.refMod
+ tt.want.Our = tt.ourMod
+ if got := Compare(tt.refMod, tt.ourMod, tt.isGoodVar); !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("Compare() = %v, want %v (diff = %s)", got, tt.want, cmp.Diff(got, tt.want))
+ }
+ })
+ }
+}
diff --git a/mkcompare/mkfile.go b/mkcompare/mkfile.go
new file mode 100644
index 00000000..f2e98857
--- /dev/null
+++ b/mkcompare/mkfile.go
@@ -0,0 +1,175 @@
+package mkcompare
+
+import (
+ "bufio"
+ "fmt"
+ "github.com/google/go-cmp/cmp"
+ "io"
+ "regexp"
+ "sort"
+ "strings"
+)
+
+type MkVariable struct {
+ Name string
+ Value string
+}
+
+type MkModule struct {
+ Type string
+ Location int
+ Extras int
+ Variables map[string]string
+}
+
+type MkFile struct {
+ Path string
+ Modules map[string]*MkModule
+}
+
+type myScanner struct {
+ *bufio.Scanner
+ lineNo int
+}
+
+func (s *myScanner) Scan() bool {
+ if s.Scanner.Scan() {
+ s.lineNo = s.lineNo + 1
+ return true
+ }
+ return false
+}
+
+var (
+ rexEmpty = regexp.MustCompile("^ *$")
+ rexHeader = regexp.MustCompile("^include +\\Q$(CLEAR_VARS)\\E *(# *(.*))?")
+ rexAssign = regexp.MustCompile("^ *(.*) ([:+])= *(.*)$")
+ rexFooter = regexp.MustCompile("^-?include *(.*)$")
+ rexIgnore1 = regexp.MustCompile("\\$\\(call dist-for-goals")
+ rexIgnore2 = regexp.MustCompile("\\$\\(LOCAL_INSTALLED_MODULE\\)")
+)
+
+const (
+ rexPairsHeader = 6
+ rexPairsAssign = 8
+ rexPairsFooter = 4
+)
+
+func (mk *MkFile) handleModule(scanner *myScanner, moduleType string) (*MkModule, error) {
+ mod := MkModule{Location: scanner.lineNo, Type: moduleType, Variables: make(map[string]string)}
+ includePath := ""
+ for scanner.Scan() {
+ line := scanner.Text()
+ if rexEmpty.MatchString(line) {
+ break
+ }
+ if m := rexAssign.FindStringSubmatchIndex(line); len(m) == rexPairsAssign {
+ v := line[m[2]:m[3]]
+ if line[m[4]:m[5]] == "+" {
+ mod.Variables[v] = mod.Variables[v] + line[m[6]:m[7]]
+ } else {
+ mod.Variables[v] = line[m[6]:m[7]]
+ }
+ } else if m := rexFooter.FindStringSubmatchIndex(line); len(m) == rexPairsFooter {
+ if includePath != "" {
+ return nil, fmt.Errorf("%d: second include for module", scanner.lineNo)
+ }
+ includePath = strings.TrimSpace(line[m[2]:m[3]])
+ if mod.Type == "" {
+ mod.Type = includePath
+ }
+ } else if mod.Type != "" {
+ mod.Extras = mod.Extras + 1
+ continue
+ } else if rexIgnore1.MatchString(line) {
+ continue
+ } else if rexIgnore2.MatchString(line) {
+ continue
+ } else {
+ return nil, fmt.Errorf("%d: unexpected line:\n%s", scanner.lineNo, line)
+ }
+ }
+ return &mod, scanner.Err()
+}
+
+func (mk *MkFile) ModulesByType(names []string) (sortedKeys []string, byType map[string][]string) {
+ byType = make(map[string][]string)
+ for _, name := range names {
+ mod, ok := mk.Modules[name]
+ if !ok {
+ break
+ }
+ mt := mod.Type
+ v, ok := byType[mt]
+ if !ok {
+ sortedKeys = append(sortedKeys, mt)
+ }
+ byType[mt] = append(v, name)
+ }
+ sort.Strings(sortedKeys)
+ return
+}
+
+func (mk *MkFile) moduleKey(mod *MkModule) (string, error) {
+ // Synthesize unique module name.
+ name := mod.Variables["LOCAL_MODULE"]
+ if name == "" {
+ return "", fmt.Errorf("%d: the module above lacks LOCAL_MODULE assignment", mod.Location)
+ }
+ var buf strings.Builder
+ writebuf := func(chunks ...string) {
+ for _, s := range chunks {
+ buf.WriteString(s)
+ }
+ }
+
+ writebuf(name, "|class:", mod.Variables["LOCAL_MODULE_CLASS"])
+ if mod.Variables["LOCAL_IS_HOST_MODULE"] == "true" {
+ if v, ok := mod.Variables["LOCAL_MODULE_HOST_ARCH"]; ok {
+ writebuf("|host_arch:", v)
+ }
+ if v, ok := mod.Variables["LOCAL_MODULE_HOST_CROSS_ARCH"]; ok {
+ writebuf("|cross_arch:", v)
+ }
+ } else {
+ if v, ok := mod.Variables["LOCAL_MODULE_TARGET_ARCH"]; ok {
+ writebuf("|target_arch:", v)
+ } else {
+ writebuf("|target_arch:*")
+ }
+ }
+ return buf.String(), nil
+}
+
+// ParseMkFile parses Android-TARGET.mk file generated by Android build
+func ParseMkFile(source io.Reader) (*MkFile, error) {
+ scanner := &myScanner{bufio.NewScanner(source), 0}
+ buffer := make([]byte, 1000000000)
+ scanner.Scanner.Buffer(buffer, len(buffer))
+ mkFile := &MkFile{Modules: make(map[string]*MkModule)}
+
+ for scanner.Scan() {
+ line := scanner.Text()
+ m := rexHeader.FindStringSubmatchIndex(line)
+ if len(m) != rexPairsHeader {
+ continue
+ }
+ moduleType := ""
+ if m[4] >= 0 {
+ moduleType = line[m[4]:m[5]]
+ }
+ mod, err := mkFile.handleModule(scanner, moduleType)
+ if err != nil {
+ return mkFile, err
+ }
+ name, err := mkFile.moduleKey(mod)
+ if err != nil {
+ return mkFile, err
+ }
+ if old, found := mkFile.Modules[name]; found {
+ return mkFile, fmt.Errorf(":%d: module %s already found, diff: %s", old.Location, name, cmp.Diff(old, mod))
+ }
+ mkFile.Modules[name] = mod
+ }
+ return mkFile, scanner.Err()
+}
diff --git a/mkcompare/mkfile_test.go b/mkcompare/mkfile_test.go
new file mode 100644
index 00000000..fd3248db
--- /dev/null
+++ b/mkcompare/mkfile_test.go
@@ -0,0 +1,62 @@
+package mkcompare
+
+import (
+ "github.com/google/go-cmp/cmp"
+ "strings"
+ "testing"
+)
+
+func TestParseMkFile(t *testing.T) {
+ tests := []struct {
+ name string
+ source string
+ want MkFile
+ wantErr bool
+ }{
+ {
+ name: "Good1",
+ source: `
+include $(CLEAR_VARS) # modType
+LOCAL_MODULE := mymod
+LOCAL_MODULE_CLASS := ETC
+include $(BUILD_PREBUILT)
+
+ignored
+ignored2
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := mymod2
+LOCAL_MODULE_CLASS := BIN
+MY_PATH := foo
+include $(BUILD_PREBUILT)
+`,
+ want: MkFile{
+ Modules: map[string]*MkModule{
+ "mymod|class:ETC|target_arch:*": {
+ Type: "modType",
+ Location: 2,
+ Variables: map[string]string{"LOCAL_MODULE": "mymod", "LOCAL_MODULE_CLASS": "ETC"},
+ },
+ "mymod2|class:BIN|target_arch:*": {
+ Type: "$(BUILD_PREBUILT)",
+ Location: 10,
+ Variables: map[string]string{"LOCAL_MODULE": "mymod2", "LOCAL_MODULE_CLASS": "BIN", "MY_PATH": "foo"},
+ },
+ },
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := ParseMkFile(strings.NewReader(tt.source))
+ if (err != nil) != tt.wantErr {
+ t.Errorf("ParseMkFile() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if !cmp.Equal(got.Modules, tt.want.Modules) {
+ t.Errorf("ParseMkFile() got = %v, want %v, \ndiff: %s", got.Modules, tt.want.Modules,
+ cmp.Diff(got, tt.want))
+ }
+ })
+ }
+}
diff --git a/platforms/BUILD.bazel b/platforms/BUILD.bazel
index 24829e23..aac1e61d 100644
--- a/platforms/BUILD.bazel
+++ b/platforms/BUILD.bazel
@@ -11,125 +11,8 @@
#
# These model after the arch and OS definitions in build/soong/android/arch.go.
-load("@soong_injection//product_config:product_variables.bzl", "product_vars")
-load("//build/bazel/platforms:product_variables/product_platform.bzl", "android_platform", "product_variable_config")
-load("//build/bazel/platforms/arch/variants:constants.bzl", "constants")
-load(
- "//prebuilts/clang/host/linux-x86:cc_toolchain_constants.bzl",
- "arch_to_variants",
- "variant_constraints",
- "variant_name",
-)
+load(":platform_utils.bzl", "platform_utils")
package(default_visibility = ["//visibility:public"])
-product_variable_config(
- name = "android_target",
- product_config_vars = product_vars,
-)
-
-# Linux is the OS
-# for the Linux kernel plus the glibc runtime.
-android_platform(
- name = "linux_x86",
- constraint_values = [
- "//build/bazel/platforms/arch:x86",
- "//build/bazel/platforms/os:linux",
- ],
- product = ":android_target",
-)
-
-android_platform(
- name = "linux_x86_64",
- constraint_values = [
- "//build/bazel/platforms/arch:x86_64",
- "//build/bazel/platforms/os:linux",
- ],
- product = ":android_target",
-)
-
-# linux_bionic is the OS for the Linux kernel plus the Bionic libc runtime, but
-# without the rest of Android.
-android_platform(
- name = "linux_bionic_arm64",
- constraint_values = [
- "//build/bazel/platforms/arch:arm64",
- "//build/bazel/platforms/os:linux_bionic",
- ],
- product = ":android_target",
-)
-
-android_platform(
- name = "linux_bionic_x86_64",
- constraint_values = [
- "//build/bazel/platforms/arch:x86_64",
- "//build/bazel/platforms/os:linux_bionic",
- ],
- product = ":android_target",
-)
-
-# Darwin is the OS for MacOS host machines.
-android_platform(
- name = "darwin_arm64",
- constraint_values = [
- "//build/bazel/platforms/arch:arm64",
- "//build/bazel/platforms/os:darwin",
- ],
- product = ":android_target",
-)
-
-android_platform(
- name = "darwin_x86_64",
- constraint_values = [
- "//build/bazel/platforms/arch:x86_64",
- "//build/bazel/platforms/os:darwin",
- ],
- product = ":android_target",
-)
-
-# Windows is the OS for Windows host machines.
-android_platform(
- name = "windows_x86",
- constraint_values = [
- "//build/bazel/platforms/arch:x86",
- "//build/bazel/platforms/os:windows",
- ],
- product = ":android_target",
-)
-
-android_platform(
- name = "windows_x86_64",
- constraint_values = [
- "//build/bazel/platforms/arch:x86_64",
- "//build/bazel/platforms/os:windows",
- ],
- product = ":android_target",
-)
-
-alias(
- name = "android_arm",
- actual = ":android_arm_armv7-a-neon", # default to armv7-a-neon
-)
-
-alias(
- name = "android_arm64",
- actual = ":android_arm64_armv8-a", # default to armv8-a
-)
-
-[
- [
- android_platform(
- name = "android_" + arch + variant_name(variant),
- constraint_values = [
- "//build/bazel/platforms/arch:" + arch,
- "//build/bazel/platforms/os:android",
- ] + variant_constraints(
- variant,
- constants.AndroidArchToVariantToFeatures[arch],
- ),
- product = ":android_target",
- )
- for variant in variants
- ]
- for arch, variants in arch_to_variants.items()
-]
+platform_utils(name = "platform_utils")
diff --git a/platforms/arch/BUILD b/platforms/arch/BUILD
index 35df294f..cc458d0e 100644
--- a/platforms/arch/BUILD
+++ b/platforms/arch/BUILD
@@ -17,6 +17,11 @@ constraint_value(
)
constraint_value(
+ name = "riscv64",
+ constraint_setting = "@platforms//cpu:cpu",
+)
+
+constraint_value(
name = "x86",
constraint_setting = "@platforms//cpu:cpu",
)
@@ -25,5 +30,34 @@ constraint_value(
# correctly with --tool_java_runtime_version=local_jdk and the checked-in JDK.
alias(
name = "x86_64",
- actual = "@platforms//cpu:x86_64"
+ actual = "@platforms//cpu:x86_64",
+)
+
+constraint_setting(
+ name = "secondary_arch_constraint",
+)
+
+constraint_value(
+ name = "secondary_arm",
+ constraint_setting = ":secondary_arch_constraint",
+)
+
+constraint_value(
+ name = "secondary_arm64",
+ constraint_setting = ":secondary_arch_constraint",
+)
+
+constraint_value(
+ name = "secondary_riscv64",
+ constraint_setting = ":secondary_arch_constraint",
+)
+
+constraint_value(
+ name = "secondary_x86",
+ constraint_setting = ":secondary_arch_constraint",
+)
+
+constraint_value(
+ name = "secondary_x86_64",
+ constraint_setting = ":secondary_arch_constraint",
)
diff --git a/platforms/arch/variants/BUILD b/platforms/arch/variants/BUILD
index 0b722b6b..16dcd6a4 100644
--- a/platforms/arch/variants/BUILD
+++ b/platforms/arch/variants/BUILD
@@ -1,7 +1,8 @@
# Cpu/Arch Variants and features
load("//build/bazel/product_variables:constants.bzl", _product_variable_constants = "constants")
-load(":constants.bzl", "constants")
+load(":constants.bzl", "constants", "power_set")
+load(":constants_test.bzl", "power_set_test_suite")
constraint_setting(
name = "arch_variant_constraint",
@@ -77,13 +78,17 @@ constraint_setting(
[
[
config_setting(
- name = feature + "-" + arch,
+ name = arch + "-" + "-".join(sorted(features)),
constraint_values = [
_product_variable_constants.ArchVariantToConstraints[arch],
- ":" + feature,
- ],
+ ] + [":" + feature for feature in features],
+ )
+ for features in power_set(
+ all_features,
+ include_empty = False,
)
- for feature in features
]
- for arch, features in constants.ArchToFeatures.items()
+ for arch, all_features in constants.ArchToFeatures.items()
]
+
+power_set_test_suite(name = "power_set_tests")
diff --git a/platforms/arch/variants/constants.bzl b/platforms/arch/variants/constants.bzl
index 1e5feb94..6b096323 100644
--- a/platforms/arch/variants/constants.bzl
+++ b/platforms/arch/variants/constants.bzl
@@ -16,10 +16,12 @@
load(
"@soong_injection//product_config:arch_configuration.bzl",
+ _aml_arches = "aml_arches",
_android_arch_feature_for_arch_variant = "android_arch_feature_for_arch_variants",
_arch_to_cpu_variants = "arch_to_cpu_variants",
_arch_to_features = "arch_to_features",
_arch_to_variants = "arch_to_variants",
+ _ndk_arches = "ndk_arches",
)
def _flatten_string_list_dict_to_set(string_list_dict):
@@ -41,4 +43,25 @@ constants = struct(
CpuToVariants = _arch_to_cpu_variants,
ArchToFeatures = _arch_to_features,
AndroidArchToVariantToFeatures = _android_arch_feature_for_arch_variant,
+ aml_arches = _aml_arches,
+ ndk_arches = _ndk_arches,
)
+
+def power_set(items, *, include_empty = True):
+ """Calculates the power set of the given items."""
+
+ def _exp(x, y):
+ result = 1
+ for _ in range(y):
+ result *= x
+ return result
+
+ power_set = []
+ n = len(items)
+ for i in range(0 if include_empty else 1, _exp(2, n)):
+ combination = []
+ for j in range(n):
+ if (i >> j) % 2 == 1:
+ combination.append(items[j])
+ power_set.append(combination)
+ return power_set
diff --git a/platforms/arch/variants/constants_test.bzl b/platforms/arch/variants/constants_test.bzl
new file mode 100644
index 00000000..120b10b1
--- /dev/null
+++ b/platforms/arch/variants/constants_test.bzl
@@ -0,0 +1,85 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "asserts", "unittest")
+load(":constants.bzl", "power_set")
+
+def _power_set_test(ctx):
+ env = unittest.begin(ctx)
+
+ actual = power_set(ctx.attr.items, include_empty = ctx.attr.include_empty)
+ expected = json.decode(ctx.attr.expected_value_json)
+
+ asserts.equals(env, expected, actual, "expected power_set({items}) to be {expected}, got {actual}".format(
+ items = ctx.attr.items,
+ expected = expected,
+ actual = actual,
+ ))
+
+ return unittest.end(env)
+
+power_set_test = unittest.make(
+ _power_set_test,
+ attrs = {
+ "items": attr.string_list(doc = "Input to the power set function"),
+ "include_empty": attr.bool(doc = "The include_empty argument to the power set function", default = True),
+ "expected_value_json": attr.string(doc = "Expected output as a json-encoded string because attributes can't be a list of lists of strings"),
+ },
+)
+
+def _power_set_tests():
+ power_set_test(
+ name = "power_set_test_0",
+ items = ["a", "b", "c"],
+ include_empty = True,
+ expected_value_json = json.encode([[], ["a"], ["b"], ["a", "b"], ["c"], ["a", "c"], ["b", "c"], ["a", "b", "c"]]),
+ )
+ power_set_test(
+ name = "power_set_test_1",
+ items = ["a", "b", "c"],
+ include_empty = False,
+ expected_value_json = json.encode([["a"], ["b"], ["a", "b"], ["c"], ["a", "c"], ["b", "c"], ["a", "b", "c"]]),
+ )
+ power_set_test(
+ name = "power_set_test_2",
+ items = [],
+ include_empty = True,
+ expected_value_json = json.encode([[]]),
+ )
+ power_set_test(
+ name = "power_set_test_3",
+ items = [],
+ include_empty = False,
+ expected_value_json = json.encode([]),
+ )
+ power_set_test(
+ name = "power_set_test_4",
+ items = ["a"],
+ include_empty = True,
+ expected_value_json = json.encode([[], ["a"]]),
+ )
+ power_set_test(
+ name = "power_set_test_5",
+ items = ["a"],
+ include_empty = False,
+ expected_value_json = json.encode([["a"]]),
+ )
+
+ return ["power_set_test_" + str(i) for i in range(6)]
+
+def power_set_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = _power_set_tests(),
+ )
diff --git a/platforms/os_arch/BUILD.bazel b/platforms/os_arch/BUILD.bazel
index d9c0ebe7..39e24cce 100644
--- a/platforms/os_arch/BUILD.bazel
+++ b/platforms/os_arch/BUILD.bazel
@@ -15,6 +15,14 @@ config_setting(
)
config_setting(
+ name = "android_riscv64",
+ constraint_values = [
+ "//build/bazel/platforms/arch:riscv64",
+ "//build/bazel/platforms/os:android",
+ ],
+)
+
+config_setting(
name = "android_x86",
constraint_values = [
"//build/bazel/platforms/arch:x86",
@@ -79,6 +87,22 @@ config_setting(
)
config_setting(
+ name = "linux_musl_arm",
+ constraint_values = [
+ "//build/bazel/platforms/arch:arm",
+ "//build/bazel/platforms/os:linux_musl",
+ ],
+)
+
+config_setting(
+ name = "linux_musl_arm64",
+ constraint_values = [
+ "//build/bazel/platforms/arch:arm64",
+ "//build/bazel/platforms/os:linux_musl",
+ ],
+)
+
+config_setting(
name = "linux_musl_x86",
constraint_values = [
"//build/bazel/platforms/arch:x86",
diff --git a/platforms/platform_utils.bzl b/platforms/platform_utils.bzl
new file mode 100644
index 00000000..3ac93c15
--- /dev/null
+++ b/platforms/platform_utils.bzl
@@ -0,0 +1,166 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+platform_utils.bzl defines a platform_utils rule, and several
+utility functions that accept an instance of that rule and return
+information about the target platform. One instance of the platform_utils
+rule is defined in //build/bazel/platforms:platform_utils. All rules
+that need it can depend on that target, and then call the util
+functions by doing something like `is_target_linux(ctx.attr._platform_utils)`.
+This works because child targets inherit their parent's configuration.
+"""
+
+_name_to_constraint = {
+ "_x86_constraint": "//build/bazel/platforms/arch:x86",
+ "_x86_64_constraint": "//build/bazel/platforms/arch:x86_64",
+ "_arm_constraint": "//build/bazel/platforms/arch:arm",
+ "_arm64_constraint": "//build/bazel/platforms/arch:arm64",
+ "_secondary_x86_constraint": "//build/bazel/platforms/arch:secondary_x86",
+ "_secondary_x86_64_constraint": "//build/bazel/platforms/arch:secondary_x86_64",
+ "_secondary_arm_constraint": "//build/bazel/platforms/arch:secondary_arm",
+ "_secondary_arm64_constraint": "//build/bazel/platforms/arch:secondary_arm64",
+ "_android_constraint": "//build/bazel/platforms/os:android",
+ "_linux_constraint": "//build/bazel/platforms/os:linux",
+ "_linux_musl_constraint": "//build/bazel/platforms/os:linux_musl",
+ "_linux_bionic_constraint": "//build/bazel/platforms/os:linux_bionic",
+ "_darwin_constraint": "//build/bazel/platforms/os:darwin",
+}
+
+_AndroidPlatformUtilsInfo = provider(
+ "_AndroidPlatformUtilsInfo exports metadata about what platform the code is being run on.",
+ fields = {
+ "target" + name: "Whether the target platform has the constraint %s" % constraint
+ for name, constraint in _name_to_constraint.items()
+ },
+)
+
+def _platform_utils_impl(ctx):
+ return [
+ _AndroidPlatformUtilsInfo(**{
+ "target" + name: ctx.target_platform_has_constraint(getattr(ctx.attr, name)[platform_common.ConstraintValueInfo])
+ for name in _name_to_constraint
+ }),
+ ]
+
+platform_utils = rule(
+ implementation = _platform_utils_impl,
+ attrs = {
+ name: attr.label(
+ default = Label(constraint),
+ doc = "An internal reference to the constraint so it can be used in the rule implementation.",
+ )
+ for name, constraint in _name_to_constraint.items()
+ },
+)
+
+def _get_platform_info(utils):
+ if _AndroidPlatformUtilsInfo not in utils:
+ fail("Provided object was not an instance of platform_utils. " +
+ "You should depend on //build/bazel/platforms:platform_utils and then pass " +
+ "ctx.attr._platform_utils to this function.")
+ return utils[_AndroidPlatformUtilsInfo]
+
+def _is_target_linux(utils):
+ """Returns if the target platform is linux with any variation of libc."""
+ platforminfo = _get_platform_info(utils)
+ return (platforminfo.target_linux_constraint or
+ platforminfo.target_linux_musl_constraint or
+ platforminfo.target_linux_bionic_constraint)
+
+def _is_target_android(utils):
+ """Returns if the target platform is android."""
+ return _get_platform_info(utils).target_android_constraint
+
+def _is_target_darwin(utils):
+ """Returns if the target platform is darwin."""
+ return _get_platform_info(utils).target_darwin_constraint
+
+def _is_target_linux_or_android(utils):
+ """Returns if the target platform is linux with any variation of libc, or android."""
+ return _is_target_linux(utils) or _is_target_android(utils)
+
+def _is_target_bionic(utils):
+ """Returns if the target platform uses the Bionic libc"""
+ return _is_target_linux_bionic(utils) or _is_target_android(utils)
+
+def _is_target_linux_bionic(utils):
+ """Returns if the target platform runs (non-Android) Linux and uses the Bionic libc"""
+ return _get_platform_info(utils).target_linux_bionic_constraint
+
+def _get_target_bitness(utils):
+ """Returns 32 or 64 depending on the bitness of the target platform."""
+ platforminfo = _get_platform_info(utils)
+
+ if platforminfo.target_x86_constraint or platforminfo.target_arm_constraint:
+ return 32
+ elif platforminfo.target_x86_64_constraint or platforminfo.target_arm64_constraint:
+ return 64
+ fail("Unable to determine target bitness")
+
+def _get_target_arch(utils):
+ """Returns 'x86', 'x86_64', 'arm', or 'arm64' depending on the target platform."""
+ platforminfo = _get_platform_info(utils)
+
+ if platforminfo.target_x86_constraint:
+ return "x86"
+ elif platforminfo.target_x86_64_constraint:
+ return "x86_64"
+ elif platforminfo.target_arm_constraint:
+ return "arm"
+ elif platforminfo.target_arm64_constraint:
+ return "arm64"
+
+ fail("Unable to determine target arch")
+
+def _get_target_secondary_arch(utils):
+ """
+ Returns 'x86', 'x86_64', 'arm', 'arm64', or '' depending on the target platform.
+
+ If the secondary arch is the same as the primary arch, an empty string will be returned.
+ This is supposed to indicate that no secondary arch exists. The main motivation for this
+ behavior is in soong.variables, DeviceSecondaryArch and related variables are empty
+ strings when they don't exist, so a lot of code revolves around that. However in bazel
+ a constraint setting must always have a value, and a "none" value would probably
+ introduce more problems, so instead the secondary arch copies the primary arch if it
+ doesn't exist.
+ """
+ platforminfo = _get_platform_info(utils)
+
+ result = ""
+ if platforminfo.target_secondary_x86_constraint:
+ result = "x86"
+ elif platforminfo.target_secondary_x86_64_constraint:
+ result = "x86_64"
+ elif platforminfo.target_secondary_arm_constraint:
+ result = "arm"
+ elif platforminfo.target_secondary_arm64_constraint:
+ result = "arm64"
+ else:
+ fail("Unable to determine target secondary arch")
+
+ if _get_target_arch(utils) == result:
+ return ""
+ return result
+
+platforms = struct(
+ is_target_linux = _is_target_linux,
+ is_target_android = _is_target_android,
+ is_target_bionic = _is_target_bionic,
+ is_target_darwin = _is_target_darwin,
+ is_target_linux_or_android = _is_target_linux_or_android,
+ get_target_bitness = _get_target_bitness,
+ get_target_arch = _get_target_arch,
+ get_target_secondary_arch = _get_target_secondary_arch,
+)
diff --git a/platforms/product_variables/product_platform.bzl b/platforms/product_variables/product_platform.bzl
deleted file mode 100644
index f2c6f91f..00000000
--- a/platforms/product_variables/product_platform.bzl
+++ /dev/null
@@ -1,148 +0,0 @@
-"""Parallels variable.go to provide variables and create a platform based on converted config."""
-
-load("//build/bazel/product_variables:constants.bzl", "constants")
-load("//prebuilts/clang/host/linux-x86:cc_toolchain_constants.bzl", "variant_name")
-
-def _product_variables_providing_rule_impl(ctx):
- return [
- platform_common.TemplateVariableInfo(ctx.attr.product_vars),
- ]
-
-# Provides product variables for templated string replacement.
-product_variables_providing_rule = rule(
- implementation = _product_variables_providing_rule_impl,
- attrs = {
- "product_vars": attr.string_dict(),
- },
-)
-
-_arch_os_only_suffix = "_arch_os"
-_product_only_suffix = "_product"
-
-def add_providing_var(providing_vars, typ, var, value):
- if typ == "bool":
- providing_vars[var] = "1" if value else "0"
- elif typ == "list":
- providing_vars[var] = ",".join(value)
- elif typ == "int":
- providing_vars[var] = str(value)
- elif typ == "string":
- providing_vars[var] = value
-
-def product_variable_config(name, product_config_vars):
- constraints = []
-
- local_vars = dict(product_config_vars)
-
- # Native_coverage is not set within soong.variables, but is hardcoded
- # within config.go NewConfig
- local_vars["Native_coverage"] = (
- local_vars.get("ClangCoverage", False) or
- local_vars.get("GcovCoverage", False)
- )
-
- providing_vars = {}
-
- # Generate constraints for Soong config variables (bool, value, string typed).
- vendor_vars = local_vars.pop("VendorVars", default = {})
- for (namespace, variables) in vendor_vars.items():
- for (var, value) in variables.items():
- # All vendor vars are Starlark string-typed, even though they may be
- # boxed bools/strings/arbitrary printf'd values, like numbers, so
- # we'll need to do some translation work here by referring to
- # soong_injection's generated data.
-
- if value == "":
- # Variable is not set so skip adding this as a constraint.
- continue
-
- # Create the identifier for the constraint var (or select key)
- config_var = namespace + "__" + var
-
- # List of all soong_config_module_type variables.
- if not config_var in constants.SoongConfigVariables:
- continue
-
- # Normalize all constraint vars (i.e. select keys) to be lowercased.
- constraint_var = config_var.lower()
-
- if config_var in constants.SoongConfigBoolVariables:
- constraints.append("//build/bazel/product_variables:" + constraint_var)
- elif config_var in constants.SoongConfigStringVariables:
- # The string value is part of the the select key.
- constraints.append("//build/bazel/product_variables:" + constraint_var + "__" + value.lower())
- elif config_var in constants.SoongConfigValueVariables:
- # For value variables, providing_vars add support for substituting
- # the value using TemplateVariableInfo.
- constraints.append("//build/bazel/product_variables:" + constraint_var)
- add_providing_var(providing_vars, "string", constraint_var, value)
-
- for (var, value) in local_vars.items():
- # TODO(b/187323817): determine how to handle remaining product
- # variables not used in product_variables
- constraint_var = var.lower()
- if not constants.ProductVariables.get(constraint_var):
- continue
-
- # variable.go excludes nil values
- add_constraint = (value != None)
- add_providing_var(providing_vars, type(value), var, value)
- if type(value) == "bool":
- # variable.go special cases bools
- add_constraint = value
-
- if add_constraint:
- constraints.append("//build/bazel/product_variables:" + constraint_var)
-
- native.platform(
- name = name + _product_only_suffix,
- constraint_values = constraints,
- )
-
- arch = local_vars.get("DeviceArch")
- arch_variant = local_vars.get("DeviceArchVariant")
- cpu_variant = local_vars.get("DeviceCpuVariant")
-
- os = "android"
-
- native.alias(
- name = name,
- actual = "{os}_{arch}{variant}".format(os = os, arch = arch, variant = _variant_name(arch, arch_variant, cpu_variant)),
- )
-
- arch = local_vars.get("DeviceSecondaryArch")
- arch_variant = local_vars.get("DeviceSecondaryArchVariant")
- cpu_variant = local_vars.get("DeviceSecondaryCpuVariant")
-
- if arch:
- native.alias(
- name = name + "_secondary",
- actual = "{os}_{arch}{variant}".format(os = os, arch = arch, variant = _variant_name(arch, arch_variant, cpu_variant)),
- )
-
- product_variables_providing_rule(
- name = name + "_product_vars",
- product_vars = providing_vars,
- )
-
-def _is_variant_default(arch, variant):
- return variant == None or variant in (arch, "generic")
-
-def _variant_name(arch, arch_variant, cpu_variant):
- if _is_variant_default(arch, arch_variant):
- arch_variant = ""
- if _is_variant_default(arch, cpu_variant):
- cpu_variant = ""
- variant = struct(
- arch_variant = arch_variant,
- cpu_variant = cpu_variant,
- )
- return variant_name(variant)
-
-def android_platform(name = None, constraint_values = [], product = None):
- """ android_platform creates a platform with the specified constraint_values and product constraints."""
- native.platform(
- name = name,
- constraint_values = constraint_values,
- parents = [product + _product_only_suffix],
- )
diff --git a/platforms/rule_utilities.bzl b/platforms/rule_utilities.bzl
deleted file mode 100644
index 97481b37..00000000
--- a/platforms/rule_utilities.bzl
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for rule implementations to interact with platform definitions."""
-
-# Merge ARCH_CONSTRAINT_ATTRS with the rule attrs to use get_arch(ctx).
-ARCH_CONSTRAINT_ATTRS = {
- "_x86_constraint": attr.label(default = Label("//build/bazel/platforms/arch:x86")),
- "_x86_64_constraint": attr.label(default = Label("//build/bazel/platforms/arch:x86_64")),
- "_arm_constraint": attr.label(default = Label("//build/bazel/platforms/arch:arm")),
- "_arm64_constraint": attr.label(default = Label("//build/bazel/platforms/arch:arm64")),
-}
-
-# get_arch takes a rule context with ARCH_CONSTRAINT_ATTRS and returns the string representation
-# of the target platform by executing the target_platform_has_constraint boilerplate.
-def get_arch(ctx):
- if not hasattr(ctx.attr, "_x86_constraint") or \
- not hasattr(ctx.attr, "_x86_64_constraint") or \
- not hasattr(ctx.attr, "_arm_constraint") or \
- not hasattr(ctx.attr, "_arm64_constraint"):
- fail("Could not get the target architecture of this rule due to missing constraint attrs.",
- "Have you merged ARCH_CONSTRAINT_ATTRS into this rule's attributes?")
-
- x86_constraint = ctx.attr._x86_constraint[platform_common.ConstraintValueInfo]
- x86_64_constraint = ctx.attr._x86_64_constraint[platform_common.ConstraintValueInfo]
- arm_constraint = ctx.attr._arm_constraint[platform_common.ConstraintValueInfo]
- arm64_constraint = ctx.attr._arm64_constraint[platform_common.ConstraintValueInfo]
-
- if ctx.target_platform_has_constraint(x86_constraint):
- return "x86"
- elif ctx.target_platform_has_constraint(x86_64_constraint):
- return "x86_64"
- elif ctx.target_platform_has_constraint(arm_constraint):
- return "arm"
- elif ctx.target_platform_has_constraint(arm64_constraint):
- return "arm64"
diff --git a/product_config/BUILD b/product_config/BUILD
new file mode 100644
index 00000000..598c8b12
--- /dev/null
+++ b/product_config/BUILD
@@ -0,0 +1,43 @@
+load(":android_product.bzl", "all_android_product_labels")
+
+package(default_visibility = ["//visibility:public"])
+
+# The current product we're building for. We could've chosen this product via a --platforms flag,
+# or transitioned into it. This is used to select() on the current product in the product_vars
+# target below. Note that this represents the current android _product_ as defined in pre-bazel
+# terms. There are several bazel platforms that can resolve to the same product, for example the
+# host platform and the device platform.
+constraint_setting(name = "current_product")
+
+# This target can be used to read product variables that aren't represented by other constraint
+# values (like the arch is).
+#
+# You may ask why every product variable isn't represented as a constraint setting. There are a
+# couple reasons for this:
+# - Some variables represent files to use in rule implementations, like the default app
+# certificate. Constraint settings would not allow adding a dependency on another file.
+# - Some variables have more complicated data than a simple enum, like TidyChecks which is a list
+# of strings. While this may be possible to extract from the name of a constraint value, it
+# would be awkward.
+# - Not all configuration variables need to be used in toolchain resolution.
+#
+# This target has 2 providers:
+# - TemplateVariableInfo: this is a native platform_common.TemplateVariableInfo provider that
+# contains a subset of the product variables to be available for expansion using make-like syntax
+# on certain rule attributes. Soong had this feature, so it was ported to bazel.
+# - ProductVariablesInfo: A custom provider that is supposed to replace direct accesses of
+# soong.variables. You'd use this rule instead of reading from soong.variables directly so that
+# when you transition into another product, you get the updated product config variables.
+#
+# This method of accessing product variables can't be used from macros / the bazel loading phase.
+# This is intentional, because if you could read them during the loading phase, you wouldn't be
+# able to transition into new values. Instead, please defer accesses to the analysis phase.
+#
+# All android_product()s must register their label here.
+alias(
+ name = "product_vars",
+ actual = select({
+ label + "_constraint_value": label + "_product_vars"
+ for label in all_android_product_labels
+ }),
+)
diff --git a/product_config/android_product.bzl b/product_config/android_product.bzl
new file mode 100644
index 00000000..52f5cd9a
--- /dev/null
+++ b/product_config/android_product.bzl
@@ -0,0 +1,433 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@//build/bazel/tests/products:product_labels.bzl", _test_product_labels = "product_labels")
+load("@soong_injection//product_config_platforms:product_labels.bzl", _product_labels = "product_labels")
+load("//build/bazel/platforms/arch/variants:constants.bzl", _arch_constants = "constants")
+load("//build/bazel/product_variables:constants.bzl", "constants")
+load(
+ "//prebuilts/clang/host/linux-x86:cc_toolchain_constants.bzl",
+ "arch_to_variants",
+ "variant_constraints",
+ "variant_name",
+)
+load(":product_variables_providing_rule.bzl", "product_variables_providing_rule")
+
+all_android_product_labels = _product_labels + _test_product_labels
+
+def _is_variant_default(arch, variant):
+ return variant == None or variant in (arch, "generic")
+
+def _soong_arch_config_to_struct(soong_arch_config):
+ return struct(
+ arch = soong_arch_config["arch"],
+ arch_variant = soong_arch_config["arch_variant"],
+ cpu_variant = soong_arch_config["cpu_variant"],
+ )
+
+def _determine_target_arches_from_config(config):
+ arches = []
+
+ # ndk_abis and aml_abis explicitly get handled first as they override any setting
+ # for DeviceArch, DeviceSecondaryArch in Soong:
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/android/config.go;l=455-468;drc=b45a2ea782074944f79fc388df20b06e01f265f7
+ if config.get("Ndk_abis"):
+ for arch_config in _arch_constants.ndk_arches:
+ arches.append(_soong_arch_config_to_struct(arch_config))
+ return arches
+ elif config.get("Aml_abis"):
+ for arch_config in _arch_constants.aml_arches:
+ arches.append(_soong_arch_config_to_struct(arch_config))
+ return arches
+
+ arch = config.get("DeviceArch")
+ arch_variant = config.get("DeviceArchVariant")
+ cpu_variant = config.get("DeviceCpuVariant")
+
+ if _is_variant_default(arch, arch_variant):
+ arch_variant = ""
+ if _is_variant_default(arch, cpu_variant):
+ cpu_variant = ""
+
+ if not arch:
+ # TODO(b/258839711): determine how to better id whether a config is actually host only or we're just missing the target config
+ if "DeviceArch" in config:
+ fail("No architecture was specified in the product config, expected one of Ndk_abis, Aml_abis, or DeviceArch to be set:\n%s" % config)
+ else:
+ return arches
+
+ arches.append(struct(
+ arch = arch,
+ arch_variant = arch_variant,
+ cpu_variant = cpu_variant,
+ ))
+
+ arch = config.get("DeviceSecondaryArch")
+ arch_variant = config.get("DeviceSecondaryArchVariant")
+ cpu_variant = config.get("DeviceSecondaryCpuVariant")
+
+ if _is_variant_default(arch, arch_variant):
+ arch_variant = ""
+ if _is_variant_default(arch, cpu_variant):
+ cpu_variant = ""
+
+ if arch:
+ arches.append(struct(
+ arch = arch,
+ arch_variant = arch_variant,
+ cpu_variant = cpu_variant,
+ ))
+ return arches
+
+def _product_variable_constraint_settings(variables):
+ constraints = []
+
+ local_vars = dict(variables)
+
+ # Native_coverage is not set within soong.variables, but is hardcoded
+ # within config.go NewConfig
+ local_vars["Native_coverage"] = (
+ local_vars.get("ClangCoverage", False) or
+ local_vars.get("GcovCoverage", False)
+ )
+
+ # Some attributes on rules are able to access the values of product
+ # variables via make-style expansion (like $(foo)). We collect the values
+ # of the relevant product variables here so that it can be passed to
+ # product_variables_providing_rule, which exports a
+ # platform_common.TemplateVariableInfo provider to allow the substitution.
+ attribute_vars = {}
+
+ def add_attribute_var(typ, var, value):
+ if typ == "bool":
+ attribute_vars[var] = "1" if value else "0"
+ elif typ == "list":
+ attribute_vars[var] = ",".join(value)
+ elif typ == "int":
+ attribute_vars[var] = str(value)
+ elif typ == "string":
+ attribute_vars[var] = value
+
+ # Generate constraints for Soong config variables (bool, value, string typed).
+ vendor_vars = local_vars.pop("VendorVars", default = {})
+ for (namespace, variables) in vendor_vars.items():
+ for (var, value) in variables.items():
+ # All vendor vars are Starlark string-typed, even though they may be
+ # boxed bools/strings/arbitrary printf'd values, like numbers, so
+ # we'll need to do some translation work here by referring to
+ # soong_injection's generated data.
+
+ if value == "":
+ # Variable is not set so skip adding this as a constraint.
+ continue
+
+ # Create the identifier for the constraint var (or select key)
+ config_var = namespace + "__" + var
+
+ # List of all soong_config_module_type variables.
+ if not config_var in constants.SoongConfigVariables:
+ continue
+
+ # Normalize all constraint vars (i.e. select keys) to be lowercased.
+ constraint_var = config_var.lower()
+
+ if config_var in constants.SoongConfigBoolVariables:
+ constraints.append("@//build/bazel/product_variables:" + constraint_var)
+ elif config_var in constants.SoongConfigStringVariables:
+ # The string value is part of the the select key.
+ constraints.append("@//build/bazel/product_variables:" + constraint_var + "__" + value.lower())
+ elif config_var in constants.SoongConfigValueVariables:
+ # For value variables, providing_vars add support for substituting
+ # the value using TemplateVariableInfo.
+ constraints.append("@//build/bazel/product_variables:" + constraint_var)
+ add_attribute_var("string", constraint_var, value)
+
+ for (var, value) in local_vars.items():
+ # TODO(b/187323817): determine how to handle remaining product
+ # variables not used in product_variables
+ constraint_var = var.lower()
+ if not constants.ProductVariables.get(constraint_var):
+ continue
+
+ # variable.go excludes nil values
+ add_constraint = (value != None)
+ add_attribute_var(type(value), var, value)
+ if type(value) == "bool":
+ # variable.go special cases bools
+ add_constraint = value
+
+ if add_constraint:
+ constraints.append("@//build/bazel/product_variables:" + constraint_var)
+
+ return constraints, attribute_vars
+
+def _define_platform_for_arch(name, common_constraints, arch, secondary_arch = None):
+ if secondary_arch == None:
+ # When there is no secondary arch, we'll pretend it exists but is the same as the primary arch
+ secondary_arch = arch
+ native.platform(
+ name = name,
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:" + arch.arch,
+ "@//build/bazel/platforms/arch:secondary_" + secondary_arch.arch,
+ "@//build/bazel/platforms/os:android",
+ ] + ["@" + v for v in variant_constraints(
+ arch,
+ _arch_constants.AndroidArchToVariantToFeatures[arch.arch],
+ )],
+ )
+
+def _define_platform_for_arch_with_secondary(name, common_constraints, arch, secondary_arch = None):
+ if secondary_arch != None:
+ _define_platform_for_arch(name, common_constraints, arch, secondary_arch)
+ _define_platform_for_arch(name + "_secondary", common_constraints, secondary_arch)
+ else:
+ _define_platform_for_arch(name, common_constraints, arch)
+ native.alias(
+ name = name + "_secondary",
+ actual = ":" + name,
+ )
+
+def _verify_product_is_registered(name):
+ """
+ Verifies that this android_product() is listed in all_android_product_labels.
+
+ all_android_product_labels is used to build a select statement from each product to its
+ _product_vars rule. This is because we store most product configuration in a rule instead of
+ constraint settings or build settings due to limitations in bazel. (constraint settings can't
+ be unbounded, typed, or have dependencies, build settings can't be set with --platforms)
+ """
+ my_label = native.repository_name() + "//" + native.package_name() + ":" + name
+ for label in all_android_product_labels:
+ if my_label == label:
+ return
+ fail("All android_product() instances must be listed in all_android_product_labels in " +
+ "//build/bazel/product_config/android_product.bzl. By default the products generated " +
+ "from legacy android product configurations are included, additional platforms (like " +
+ "testing-specific platforms) must be manually listed in " +
+ "//build/bazel/tests/products/product_labels.bzl.")
+
+def android_product(name, soong_variables):
+ """
+ android_product integrates product variables into Bazel platforms.
+
+ This uses soong.variables to create constraints and platforms used by the
+ bazel build. The soong.variables file itself contains a post-processed list of
+ variables derived from Make variables, through soong_config.mk, generated
+ during the product config step.
+
+ Some constraints used here are handcrafted in
+ //build/bazel/platforms/{arch,os}. The rest are dynamically generated.
+
+ If you're looking for what --config=android, --config=linux_x86_64 or most
+ select statements in the BUILD files (ultimately) refer to, they're all
+ created here.
+ """
+ _verify_product_is_registered(name)
+
+ product_var_constraints, attribute_vars = _product_variable_constraint_settings(soong_variables)
+ arch_configs = _determine_target_arches_from_config(soong_variables)
+
+ product_variables_providing_rule(
+ name = name + "_product_vars",
+ attribute_vars = attribute_vars,
+ product_vars = soong_variables,
+ )
+
+ native.constraint_value(
+ name = name + "_constraint_value",
+ constraint_setting = "@//build/bazel/product_config:current_product",
+ )
+
+ common_constraints = product_var_constraints + [name + "_constraint_value"]
+
+ # TODO(b/258802089): figure out how to deal with multiple arches for target
+ if len(arch_configs) > 0:
+ arch = arch_configs[0]
+ secondary_arch = None
+ if len(arch_configs) > 1:
+ secondary_arch = arch_configs[1]
+
+ _define_platform_for_arch_with_secondary(name, common_constraints, arch, secondary_arch)
+
+ # These variants are mostly for mixed builds, which may request a
+ # module with a certain arch
+ for arch, variants in arch_to_variants.items():
+ for variant in variants:
+ native.platform(
+ name = name + "_android_" + arch + variant_name(variant),
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:" + arch,
+ "@//build/bazel/platforms/arch:secondary_" + arch,
+ "@//build/bazel/platforms/os:android",
+ ] + ["@" + v for v in variant_constraints(
+ variant,
+ _arch_constants.AndroidArchToVariantToFeatures[arch],
+ )],
+ )
+
+ arch_transitions = [
+ struct(
+ name = "arm",
+ arch = struct(
+ arch = "arm",
+ arch_variant = "armv7-a-neon",
+ cpu_variant = "",
+ ),
+ secondary_arch = None,
+ ),
+ struct(
+ name = "arm64",
+ arch = struct(
+ arch = "arm64",
+ arch_variant = "armv8-a",
+ cpu_variant = "",
+ ),
+ secondary_arch = struct(
+ arch = "arm",
+ arch_variant = "armv7-a-neon",
+ cpu_variant = "",
+ ),
+ ),
+ struct(
+ name = "arm64only",
+ arch = struct(
+ arch = "arm64",
+ arch_variant = "armv8-a",
+ cpu_variant = "",
+ ),
+ secondary_arch = None,
+ ),
+ struct(
+ name = "x86",
+ arch = struct(
+ arch = "x86",
+ arch_variant = "",
+ cpu_variant = "",
+ ),
+ secondary_arch = None,
+ ),
+ struct(
+ name = "x86_64",
+ arch = struct(
+ arch = "x86_64",
+ arch_variant = "",
+ cpu_variant = "",
+ ),
+ secondary_arch = struct(
+ arch = "x86",
+ arch_variant = "",
+ cpu_variant = "",
+ ),
+ ),
+ struct(
+ name = "x86_64only",
+ arch = struct(
+ arch = "x86_64",
+ arch_variant = "",
+ cpu_variant = "",
+ ),
+ secondary_arch = None,
+ ),
+ ]
+
+ # TODO(b/249685973): Remove this, this is currently just for aabs
+ # to build each architecture
+ for arch in arch_transitions:
+ _define_platform_for_arch_with_secondary(name + "__internal_" + arch.name, common_constraints, arch.arch, arch.secondary_arch)
+
+ # Now define the host platforms. We need a host platform per product because
+ # the host platforms still use the product variables.
+ # TODO(b/262753134): Investigate making the host platforms product-independant
+ native.platform(
+ name = name + "_linux_x86",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:x86",
+ "@//build/bazel/platforms/os:linux",
+ ],
+ )
+
+ native.platform(
+ name = name + "_linux_x86_64",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:x86_64",
+ "@//build/bazel/platforms/os:linux",
+ ],
+ )
+
+ native.platform(
+ name = name + "_linux_musl_x86",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:x86",
+ "@//build/bazel/platforms/os:linux_musl",
+ ],
+ )
+
+ native.platform(
+ name = name + "_linux_musl_x86_64",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:x86_64",
+ "@//build/bazel/platforms/os:linux_musl",
+ ],
+ )
+
+ # linux_bionic is the OS for the Linux kernel plus the Bionic libc runtime, but
+ # without the rest of Android.
+ native.platform(
+ name = name + "_linux_bionic_arm64",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:arm64",
+ "@//build/bazel/platforms/os:linux_bionic",
+ ],
+ )
+
+ native.platform(
+ name = name + "_linux_bionic_x86_64",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:x86_64",
+ "@//build/bazel/platforms/os:linux_bionic",
+ ],
+ )
+
+ native.platform(
+ name = name + "_darwin_arm64",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:arm64",
+ "@//build/bazel/platforms/os:darwin",
+ ],
+ )
+
+ native.platform(
+ name = name + "_darwin_x86_64",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:x86_64",
+ "@//build/bazel/platforms/os:darwin",
+ ],
+ )
+
+ native.platform(
+ name = name + "_windows_x86",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:x86",
+ "@//build/bazel/platforms/os:windows",
+ ],
+ )
+
+ native.platform(
+ name = name + "_windows_x86_64",
+ constraint_values = common_constraints + [
+ "@//build/bazel/platforms/arch:x86_64",
+ "@//build/bazel/platforms/os:windows",
+ ],
+ )
diff --git a/product_config/product_variables_providing_rule.bzl b/product_config/product_variables_providing_rule.bzl
new file mode 100644
index 00000000..c6ce1ab1
--- /dev/null
+++ b/product_config/product_variables_providing_rule.bzl
@@ -0,0 +1,141 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@soong_injection//metrics:converted_modules_path_map.bzl", "modules")
+
+ProductVariablesInfo = provider(
+ "ProductVariablesInfo provides the android product config variables.",
+ fields = {
+ "Always_use_prebuilt_sdks": "Boolean to always use a prebuilt sdk instead of source-built.",
+ "CompressedApex": "Boolean indicating if apexes are compressed or not.",
+ "DefaultAppCertificate": "The default certificate to sign APKs and APEXes with. The $(dirname) of this certificate will also be used to find additional certificates when modules only give their names.",
+ "TidyChecks": "List of clang tidy checks to enable.",
+ "Unbundled_apps": "List of apps to build as unbundled.",
+ "Unbundled_build": "True if this is an unbundled build",
+ "ManifestPackageNameOverrides": "A list of string:string mapping from APEX/APK name to package name to override the AndroidManifest.xml package of the module.",
+ "CertificateOverrides": "A list of string:string mapping from APEX/APK name to the certificate name to override the certificate used to sign the APEX/APK container.",
+ "DeviceMaxPageSizeSupported": "String indicating the max-page-size supported by the device.",
+ "DeviceProduct": "Device product",
+ "DeviceName": "Device name",
+ "Platform_version_name": "Platform version name",
+ "BuildId": "Build ID",
+ "ProductManufacturer": "Product manufacturer",
+ "ProductBrand": "Product brand",
+ "TargetBuildVariant": "Target build variant",
+ "BuildVersionTags": "Build version tags",
+ },
+)
+
+ProductVariablesDepsInfo = provider(
+ "ProductVariablesDepsInfo provides fields that are not regular product config variables, but rather the concrete files that other product config vars reference.",
+ fields = {
+ "DefaultAppCertificateFiles": "All the .pk8, .pem, and .avbpubkey files in the DefaultAppCertificate directory.",
+ "OverridingCertificateFiles": "All the android_certificate_directory filegroups referenced by certificates in the CertificateOverrides mapping. Superset of DefaultAppCertificateFiles.",
+ },
+)
+
+def _product_variables_providing_rule_impl(ctx):
+ vars = json.decode(ctx.attr.product_vars)
+
+ tidy_checks = vars.get("TidyChecks", "")
+ tidy_checks = tidy_checks.split(",") if tidy_checks else []
+ target_build_variant = "user"
+ if vars.get("Eng"):
+ target_build_variant = "eng"
+ elif vars.get("Debuggable"):
+ target_build_variant = "userdebug"
+
+ return [
+ platform_common.TemplateVariableInfo(ctx.attr.attribute_vars),
+ ProductVariablesInfo(
+ Always_use_prebuilt_sdks = vars.get("Always_use_prebuilt_sdks", False),
+ CompressedApex = vars.get("CompressedApex", False),
+ DefaultAppCertificate = vars.get("DefaultAppCertificate", None),
+ TidyChecks = tidy_checks,
+ Unbundled_apps = vars.get("Unbundled_apps", []),
+ Unbundled_build = vars.get("Unbundled_build", False),
+ ManifestPackageNameOverrides = vars.get("ManifestPackageNameOverrides", []),
+ CertificateOverrides = vars.get("CertificateOverrides", []),
+ DeviceMaxPageSizeSupported = vars.get("DeviceMaxPageSizeSupported", ""),
+ DeviceProduct = vars.get("DeviceProduct", ""),
+ DeviceName = vars.get("DeviceName", ""),
+ Platform_version_name = vars.get("Platform_version_name", ""),
+ BuildId = vars.get("BuildId", ""),
+ ProductManufacturer = vars.get("ProductManufacturer", ""),
+ ProductBrand = vars.get("ProductBrand", ""),
+ TargetBuildVariant = target_build_variant,
+ BuildVersionTags = vars.get("BuildVersionTags", []),
+ ),
+ ProductVariablesDepsInfo(
+ DefaultAppCertificateFiles = ctx.files.default_app_certificate_filegroup,
+ OverridingCertificateFiles = ctx.files.overriding_cert_filegroups,
+ ),
+ ]
+
+# Provides product variables for templated string replacement.
+_product_variables_providing_rule = rule(
+ implementation = _product_variables_providing_rule_impl,
+ attrs = {
+ "attribute_vars": attr.string_dict(doc = "Variables that can be expanded using make-style syntax in attributes"),
+ "product_vars": attr.string(doc = "Regular android product variables, a copy of the soong.variables file. Unfortunately this needs to be a json-encoded string because bazel attributes can only be simple types."),
+ "default_app_certificate_filegroup": attr.label(doc = "The filegroup that contains all the .pem, .pk8, and .avbpubkey files in $(dirname product_vars.DefaultAppCertificate)"),
+ "overriding_cert_filegroups": attr.label_list(doc = "All certificates that are used to override an android_app_certificate using the CertificatesOverride product variable."),
+ },
+)
+
+def product_variables_providing_rule(
+ name,
+ attribute_vars,
+ product_vars):
+ default_app_certificate_filegroup = None
+ default_app_certificate = product_vars.get("DefaultAppCertificate", None)
+ if default_app_certificate:
+ default_app_certificate_filegroup = "@//" + paths.dirname(default_app_certificate) + ":android_certificate_directory"
+
+ # Overriding certificates can be from anywhere, and may not always be in the
+ # same directory as DefaultAppCertificate / PRODUCT_DEFAULT_DEV_CERTIFICATE.
+ # Collect their additional 'android_certificate_directory' filegroups here.
+ #
+ # e.g. if CertificateOverrides is [m1:c1, m2:c2, ..., mn:cn], then collect
+ # //pkg(c1):android_certificate_directory,
+ # //pkg(c2):android_certificate_directory, and so on.
+ #
+ # We cannot add directory dependencies on c1, c2, etc because that would
+ # form a cyclic dependency graph from product_vars to
+ # android_app_certificate (where the override happens) and back to
+ # product_vars again. So reference the filegroups instead.
+ #
+ # Note that this relies on a global bzl mapping of android_app_certificate
+ # module names to the packages they belong to. This is currently generated
+ # by bp2build, but may need to be maintained in a different approach in the
+ # future when the android_app_certificate modules are no longer auto converted.
+ cert_overrides = product_vars.get("CertificateOverrides", [])
+ cert_filegroups = {}
+ if default_app_certificate_filegroup:
+ cert_filegroups[default_app_certificate_filegroup] = True
+ if cert_overrides:
+ for c in cert_overrides:
+ module_name = c.split(":")[1]
+ pkg = modules.get(module_name) # use the global mapping of module names to their enclosing package.
+ if pkg:
+ # not everything is converted.
+ cert_filegroups["@" + pkg + ":android_certificate_directory"] = True
+
+ _product_variables_providing_rule(
+ name = name,
+ attribute_vars = attribute_vars,
+ product_vars = json.encode(product_vars),
+ default_app_certificate_filegroup = default_app_certificate_filegroup,
+ overriding_cert_filegroups = cert_filegroups.keys(),
+ )
diff --git a/product_variables/constants.bzl b/product_variables/constants.bzl
index 09fd3142..3278a6f0 100644
--- a/product_variables/constants.bzl
+++ b/product_variables/constants.bzl
@@ -1,16 +1,16 @@
"""Constants for product variables based on information in variable.go"""
load(
+ "@soong_injection//product_config:product_variable_constants.bzl",
+ _arch_variant_product_var_constraints = "arch_variant_product_var_constraints",
+ _product_var_constraints = "product_var_constraints",
+)
+load(
"@soong_injection//product_config:soong_config_variables.bzl",
_soong_config_bool_variables = "soong_config_bool_variables",
_soong_config_string_variables = "soong_config_string_variables",
_soong_config_value_variables = "soong_config_value_variables",
)
-load(
- "@soong_injection//product_config:product_variables.bzl",
- _arch_variant_product_var_constraints = "arch_variant_product_var_constraints",
- _product_var_constraints = "product_var_constraints",
-)
_soong_config_variables = _soong_config_bool_variables.keys() + \
_soong_config_string_variables.keys() + \
@@ -31,6 +31,7 @@ _arch_variant_to_constraints = {
"arm64": "//build/bazel/platforms/arch:arm64",
"x86": "//build/bazel/platforms/arch:x86",
"x86_64": "//build/bazel/platforms/arch:x86_64",
+ "riscv64": "//build/bazel/platforms/arch:riscv64",
"android": "//build/bazel/platforms/os:android",
"darwin": "//build/bazel/platforms/os:darwin",
"linux": "//build/bazel/platforms/os:linux",
diff --git a/rules/BUILD.bazel b/rules/BUILD.bazel
index f8c3eeca..f5c99b77 100644
--- a/rules/BUILD.bazel
+++ b/rules/BUILD.bazel
@@ -1 +1,50 @@
+load(":gensrcs_test.bzl", "gensrcs_tests_suite")
+load(":linker_config_test.bzl", "linker_config_test_suite")
+load(":prebuilt_file_test.bzl", "prebuilt_file_test_suite")
+load(":prebuilt_xml_test.bzl", "prebuilt_xml_test_suite")
+
exports_files(["lunch.bzl"])
+
+gensrcs_tests_suite(name = "gensrcs_tests")
+
+py_binary(
+ name = "staging_dir_builder",
+ srcs = ["staging_dir_builder.py"],
+ visibility = ["//visibility:public"],
+)
+
+sh_test(
+ name = "staging_dir_builder_test",
+ srcs = ["staging_dir_builder_test.sh"],
+ data = [
+ "test.pem",
+ ":staging_dir_builder",
+ "//external/avb:avbtool",
+ "//external/e2fsprogs/contrib/android:e2fsdroid",
+ "//external/e2fsprogs/debugfs",
+ "//external/e2fsprogs/misc:blkid",
+ "//external/e2fsprogs/misc:mke2fs",
+ "//external/e2fsprogs/resize:resize2fs",
+ "//external/erofs-utils:fsck.erofs",
+ "//external/selinux/libselinux:sefcontext_compile",
+ "//frameworks/base/tools/aapt2",
+ "//prebuilts/build-tools:linux-x86/bin/soong_zip",
+ "//prebuilts/sdk/current:public/android.jar",
+ "//system/apex/apexer",
+ "//system/apex/apexer:conv_apex_manifest",
+ "//system/apex/tools:apex_compression_tool",
+ "//system/apex/tools:deapexer",
+ ],
+ # This is a host test.
+ target_compatible_with = select({
+ "//build/bazel/platforms/os:linux": [],
+ "//conditions:default": ["@platforms//:incompatible"],
+ }),
+ deps = ["@bazel_tools//tools/bash/runfiles"],
+)
+
+linker_config_test_suite(name = "linker_config_tests")
+
+prebuilt_xml_test_suite(name = "prebuilt_xml_tests")
+
+prebuilt_file_test_suite(name = "prebuilt_file_tests")
diff --git a/rules/README.md b/rules/README.md
index d6a1b8a7..75c76bbd 100644
--- a/rules/README.md
+++ b/rules/README.md
@@ -4,14 +4,4 @@ This directory contains Starlark extensions for building the Android Platform wi
## APEX
-Run the following command to build a miminal APEX example.
-
-```
-$ b build //build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal
-```
-
-Verify the contents of the APEX with `zipinfo`:
-
-```
-$ zipinfo bazel-bin/build/bazel/examples/apex/minimal/build.bazel.examples.apex.minimal.apex
-```
+See [apex/README.md](apex/README.md).
diff --git a/rules/abi/BUILD.bazel b/rules/abi/BUILD.bazel
new file mode 100644
index 00000000..55f9bcb3
--- /dev/null
+++ b/rules/abi/BUILD.bazel
@@ -0,0 +1,18 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(":abi_dump_test.bzl", "abi_dump_test_suite")
+
+abi_dump_test_suite(name = "abi_dump_tests")
diff --git a/rules/abi/abi-dumps/platform/32/64/x86_64/source-based/libabi_diff_action.so.lsdump b/rules/abi/abi-dumps/platform/32/64/x86_64/source-based/libabi_diff_action.so.lsdump
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/abi/abi-dumps/platform/32/64/x86_64/source-based/libabi_diff_action.so.lsdump
diff --git a/rules/abi/abi-dumps/platform/33/64/x86_64/source-based/libabi_diff_action.so.lsdump b/rules/abi/abi-dumps/platform/33/64/x86_64/source-based/libabi_diff_action.so.lsdump
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/abi/abi-dumps/platform/33/64/x86_64/source-based/libabi_diff_action.so.lsdump
diff --git a/rules/abi/abi-dumps/platform/34/64/x86_64/source-based/libabi_diff_action.so.lsdump b/rules/abi/abi-dumps/platform/34/64/x86_64/source-based/libabi_diff_action.so.lsdump
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/abi/abi-dumps/platform/34/64/x86_64/source-based/libabi_diff_action.so.lsdump
diff --git a/rules/abi/abi-dumps/platform/BUILD.bazel b/rules/abi/abi-dumps/platform/BUILD.bazel
new file mode 100644
index 00000000..10f952a4
--- /dev/null
+++ b/rules/abi/abi-dumps/platform/BUILD.bazel
@@ -0,0 +1,10 @@
+# READ THIS FIRST:
+# This file was automatically generated by bp2build for the Bazel migration project.
+# Feel free to edit or test it, but do *not* check it into your version control system.
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+ name = "bp2build_all_srcs",
+ srcs = glob(["**/*"]),
+)
diff --git a/rules/abi/abi-dumps/platform/current/64/x86_64/source-based/libabi_diff_action.so.lsdump b/rules/abi/abi-dumps/platform/current/64/x86_64/source-based/libabi_diff_action.so.lsdump
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/abi/abi-dumps/platform/current/64/x86_64/source-based/libabi_diff_action.so.lsdump
diff --git a/rules/abi/abi_dump.bzl b/rules/abi/abi_dump.bzl
new file mode 100644
index 00000000..f1e47618
--- /dev/null
+++ b/rules/abi/abi_dump.bzl
@@ -0,0 +1,412 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load(
+ "@bazel_tools//tools/build_defs/cc:action_names.bzl",
+ "CPP_COMPILE_ACTION_NAME",
+ "C_COMPILE_ACTION_NAME",
+)
+load("@soong_injection//api_levels:platform_versions.bzl", "platform_versions")
+load("//build/bazel/platforms:platform_utils.bzl", "platforms")
+load(
+ "//build/bazel/rules/cc:cc_library_common.bzl",
+ "build_compilation_flags",
+ "get_non_header_srcs",
+ "is_bionic_lib",
+ "is_bootstrap_lib",
+ "parse_apex_sdk_version",
+)
+load("//build/bazel/rules/cc:cc_library_static.bzl", "CcStaticLibraryInfo")
+
+AbiDumpInfo = provider(fields = ["dump_files"])
+AbiDiffInfo = provider(fields = ["diff_files"])
+
+_ABI_CLASS_PLATFORM = "platform"
+
+def _abi_dump_aspect_impl(target, ctx):
+ if not _abi_diff_enabled(ctx, ctx.label.name, True):
+ return [
+ AbiDumpInfo(
+ dump_files = depset(),
+ ),
+ ]
+
+ transitive_dumps = []
+ direct_dumps = []
+
+ if CcStaticLibraryInfo in target:
+ direct_dumps.extend(_create_abi_dumps(
+ ctx,
+ target,
+ ctx.rule.files.srcs_cpp,
+ ctx.rule.attr.copts_cpp,
+ CPP_COMPILE_ACTION_NAME,
+ ))
+ direct_dumps.extend(_create_abi_dumps(
+ ctx,
+ target,
+ ctx.rule.files.srcs_c,
+ ctx.rule.attr.copts_c,
+ C_COMPILE_ACTION_NAME,
+ ))
+
+ for dep in ctx.rule.attr.static_deps:
+ if AbiDumpInfo in dep:
+ transitive_dumps.append(dep[AbiDumpInfo].dump_files)
+
+ return [
+ AbiDumpInfo(
+ dump_files = depset(
+ direct_dumps,
+ transitive = transitive_dumps,
+ ),
+ ),
+ ]
+
+abi_dump_aspect = aspect(
+ implementation = _abi_dump_aspect_impl,
+ attr_aspects = ["static_deps", "whole_archive_deps"],
+ attrs = {
+ "_skip_abi_checks": attr.label(
+ default = "//build/bazel/flags/cc/abi:skip_abi_checks",
+ ),
+ # Need this in order to call _abi_diff_enabled in the aspects code.
+ "_within_apex": attr.label(
+ default = "//build/bazel/rules/apex:within_apex",
+ ),
+ "_abi_dumper": attr.label(
+ allow_files = True,
+ executable = True,
+ cfg = "exec",
+ default = Label("//prebuilts/clang-tools:linux-x86/bin/header-abi-dumper"),
+ ),
+ "_platform_utils": attr.label(default = Label("//build/bazel/platforms:platform_utils")),
+ },
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+ fragments = ["cpp"],
+ provides = [AbiDumpInfo],
+)
+
+def _create_abi_dumps(ctx, target, srcs, user_flags, action_name):
+ dumps = []
+
+ if len(srcs) == 0:
+ return dumps
+
+ compilation_context, compilation_flags = build_compilation_flags(
+ ctx,
+ ctx.rule.attr.roots + ctx.rule.attr.deps + ctx.rule.attr.includes,
+ user_flags,
+ action_name,
+ )
+ sources, headers = get_non_header_srcs(srcs)
+
+ header_inputs = (
+ headers +
+ compilation_context.headers.to_list() +
+ compilation_context.direct_headers +
+ compilation_context.direct_private_headers +
+ compilation_context.direct_public_headers +
+ compilation_context.direct_textual_headers
+ )
+ objects = []
+ linker_inputs = target[CcInfo].linking_context.linker_inputs.to_list()
+
+ # These are created in cc_library_static and there should be only one
+ # linker_inputs and one libraries
+ if CcInfo in target and len(linker_inputs) == 1 and len(linker_inputs[0].libraries) == 1:
+ objects = linker_inputs[0].libraries[0].objects
+ for file in sources:
+ output = _create_abi_dump(ctx, target, file, objects, header_inputs, compilation_flags)
+ dumps.append(output)
+
+ return dumps
+
+def _include_flag(flag):
+ return ["-I", flag]
+
+def _create_abi_dump(ctx, target, src, objects, header_inputs, compilation_flags):
+ """ Utility function to generate abi dump file."""
+
+ file = paths.join(src.dirname, target.label.name + "." + src.basename + ".sdump")
+ output = ctx.actions.declare_file(file)
+ args = ctx.actions.args()
+
+ args.add("--root-dir", ".")
+ args.add("-o", output)
+ args.add(src)
+
+ args.add_all(ctx.rule.attr.exports[0][CcInfo].compilation_context.includes.to_list(), map_each = _include_flag)
+
+ args.add("--")
+ args.add_all(compilation_flags)
+
+ # The following two args come from here:
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/builder.go;l=247;drc=ba17c7243d0e297efbc6fb5385d6d5aa81db9152
+ args.add("-w")
+
+ # TODO(b/254625084): support darwin as well.
+ args.add("-isystem", "prebuilts/clang-tools/linux-x86/clang-headers")
+
+ ctx.actions.run(
+ inputs = [src] + header_inputs + objects,
+ executable = ctx.executable._abi_dumper,
+ outputs = [output],
+ arguments = [args],
+ # TODO(b/186116353): enable sandbox once the bug is fixed.
+ execution_requirements = {
+ "no-sandbox": "1",
+ },
+ mnemonic = "AbiDump",
+ )
+
+ return output
+
+def create_linked_abi_dump(ctx, dump_files):
+ """ Utility function to generate abi dump files."""
+ shared_files = ctx.attr.shared[DefaultInfo].files.to_list()
+ if len(shared_files) != 1:
+ fail("Expected only one shared library file")
+
+ file = ctx.attr.soname + ".lsdump"
+ output = ctx.actions.declare_file(file)
+ args = ctx.actions.args()
+
+ args.add("--root-dir", ".")
+ args.add("-o", output)
+ args.add("-so", shared_files[0])
+ inputs = dump_files + [shared_files[0]]
+
+ if ctx.file.symbol_file:
+ args.add("-v", ctx.file.symbol_file.path)
+ inputs.append(ctx.file.symbol_file)
+ for v in ctx.attr.exclude_symbol_versions:
+ args.add("--exclude-symbol-version", v)
+ for t in ctx.attr.exclude_symbol_tags:
+ args.add("--exclude-symbol-tag", t)
+
+ args.add("-arch", platforms.get_target_arch(ctx.attr._platform_utils))
+
+ args.add_all(ctx.attr.root[CcInfo].compilation_context.includes.to_list(), map_each = _include_flag)
+
+ args.add_all([d.path for d in dump_files])
+
+ ctx.actions.run(
+ inputs = inputs,
+ executable = ctx.executable._abi_linker,
+ outputs = [output],
+ arguments = [args],
+ # TODO(b/186116353): enable sandbox once the bug is fixed.
+ execution_requirements = {
+ "no-sandbox": "1",
+ },
+ mnemonic = "AbiLink",
+ )
+
+ return output
+
+def find_abi_config(_ctx):
+ sdk_version = str(platform_versions.platform_sdk_version)
+ prev_version = int(parse_apex_sdk_version(sdk_version))
+ version = "current"
+ if platform_versions.platform_sdk_final:
+ prev_version -= 1
+ version = sdk_version
+
+ return prev_version, version
+
+def create_abi_diff(ctx, dump_file):
+ prev_version, version = find_abi_config(ctx)
+
+ arch = platforms.get_target_arch(ctx.attr._platform_utils)
+ bitness = platforms.get_target_bitness(ctx.attr._platform_utils)
+ abi_class = _ABI_CLASS_PLATFORM
+
+ # The logic below comes from:
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/library.go;l=1891;drc=c645853ab73ac8c5889b42f4ce7dc9353ee8fd35
+ abi_reference_file = None
+ if not platform_versions.platform_sdk_final:
+ abi_reference_file = _find_abi_ref_file(ctx, prev_version, arch, bitness, abi_class, dump_file.basename)
+ if not abi_reference_file:
+ prev_version -= 1
+
+ diff_files = []
+
+ # We need to do the abi check for the previous version and current version if the reference
+ # abi dump files are available. If the current previous version doesn't have the reference
+ # abi dump file we will check against one version earlier.
+ if not abi_reference_file:
+ abi_reference_file = _find_abi_ref_file(ctx, prev_version, arch, bitness, abi_class, dump_file.basename)
+ if abi_reference_file:
+ diff_files.append(_run_abi_diff(ctx, arch, prev_version, dump_file, abi_reference_file, True))
+
+ abi_reference_file = _find_abi_ref_file(ctx, version, arch, bitness, abi_class, dump_file.basename)
+ if abi_reference_file:
+ diff_files.append(_run_abi_diff(ctx, arch, version, dump_file, abi_reference_file, False))
+
+ return diff_files
+
+def _run_abi_diff(ctx, arch, version, dump_file, abi_reference_file, prev_version_diff):
+ lib_name = ctx.attr.soname.removesuffix(".so")
+
+ args = ctx.actions.args()
+
+ if ctx.attr.check_all_apis:
+ args.add("-check-all-apis")
+ else:
+ args.add_all(["-allow-unreferenced-changes", "-allow-unreferenced-elf-symbol-changes"])
+
+ if prev_version_diff:
+ args.add("-target-version", version + 1)
+ diff_file_name = ctx.attr.soname + "." + str(version) + ".abidiff"
+ else:
+ args.add("-target-version", "current")
+ diff_file_name = ctx.attr.soname + ".abidiff"
+
+ args.add("-allow-extensions")
+
+ if len(ctx.attr.diff_flags) > 0:
+ args.add_all(ctx.attr.diff_flags)
+
+ args.add("-lib", lib_name)
+ args.add("-arch", arch)
+
+ diff_file = ctx.actions.declare_file(diff_file_name)
+ args.add("-o", diff_file)
+ args.add("-new", dump_file)
+ args.add("-old", abi_reference_file)
+
+ ctx.actions.run(
+ inputs = [dump_file, abi_reference_file],
+ executable = ctx.executable._abi_diff,
+ outputs = [diff_file],
+ arguments = [args],
+ execution_requirements = {
+ "no-sandbox": "1",
+ },
+ mnemonic = "AbiDiff",
+ )
+
+ return diff_file
+
+def _find_abi_ref_file(ctx, version, arch, bitness, abi_class, lsdump_name):
+ # Currently we only support platform.
+ if abi_class == _ABI_CLASS_PLATFORM:
+ abi_ref_dumps = ctx.attr.abi_ref_dumps_platform
+ else:
+ fail("Unsupported ABI class: %s" % abi_class)
+
+ # The expected reference abi dump file
+ ref_dump_file = paths.join(
+ ctx.attr.ref_dumps_home,
+ abi_class,
+ str(version),
+ str(bitness),
+ arch,
+ "source-based",
+ lsdump_name,
+ )
+
+ ref_file = None
+
+ for file in abi_ref_dumps.files.to_list():
+ if ref_dump_file == file.path:
+ ref_file = file
+ break
+
+ return ref_file
+
+def _abi_diff_enabled(ctx, lib_name, is_aspect):
+ # The logic here is based on:
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/sabi.go;l=103;drc=cb0ac95bde896fa2aa59193a37ceb580758c322c
+
+ if ctx.attr._skip_abi_checks[BuildSettingInfo].value:
+ return False
+ if not platforms.is_target_android(ctx.attr._platform_utils):
+ return False
+ if ctx.coverage_instrumented():
+ return False
+ if ctx.attr._within_apex[BuildSettingInfo].value:
+ if not is_aspect and not ctx.attr.has_stubs:
+ return False
+
+ # Logic comes from here:
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/sabi.go;l=158;drc=cb0ac95bde896fa2aa59193a37ceb580758c322c
+
+ elif is_bionic_lib(lib_name) or is_bootstrap_lib(lib_name):
+ return False
+
+ # TODO(b/260611960): handle all the other checks in sabi.go
+ return True
+
+def _abi_dump_impl(ctx):
+ diff_files = depset()
+ if _abi_diff_enabled(ctx, ctx.attr.soname.removesuffix(".so"), False) and ctx.attr.root != None:
+ dump_files = ctx.attr.root[AbiDumpInfo].dump_files.to_list()
+ linked_dump_file = create_linked_abi_dump(ctx, dump_files)
+ diff_files = depset(create_abi_diff(ctx, linked_dump_file))
+
+ return ([
+ DefaultInfo(files = diff_files),
+ AbiDiffInfo(diff_files = diff_files),
+ ])
+
+abi_dump = rule(
+ implementation = _abi_dump_impl,
+ attrs = {
+ "shared": attr.label(mandatory = True, providers = [CcSharedLibraryInfo]),
+ "root": attr.label(providers = [CcInfo], aspects = [abi_dump_aspect]),
+ "soname": attr.string(mandatory = True),
+ "has_stubs": attr.bool(default = False),
+ "enabled": attr.bool(default = False),
+ "explicitly_disabled": attr.bool(default = False),
+ "symbol_file": attr.label(allow_single_file = True),
+ "exclude_symbol_versions": attr.string_list(default = []),
+ "exclude_symbol_tags": attr.string_list(default = []),
+ "check_all_apis": attr.bool(default = False),
+ "diff_flags": attr.string_list(default = []),
+ "abi_ref_dumps_platform": attr.label(default = "//prebuilts/abi-dumps/platform:bp2build_all_srcs"),
+ "ref_dumps_home": attr.string(default = "prebuilts/abi-dumps"),
+ "_skip_abi_checks": attr.label(
+ default = "//build/bazel/flags/cc/abi:skip_abi_checks",
+ ),
+ "_within_apex": attr.label(
+ default = "//build/bazel/rules/apex:within_apex",
+ ),
+ # TODO(b/254625084): For the following tools we need to support darwin as well.
+ "_abi_dumper": attr.label(
+ allow_files = True,
+ executable = True,
+ cfg = "exec",
+ default = Label("//prebuilts/clang-tools:linux-x86/bin/header-abi-dumper"),
+ ),
+ "_abi_linker": attr.label(
+ allow_files = True,
+ executable = True,
+ cfg = "exec",
+ default = Label("//prebuilts/clang-tools:linux-x86/bin/header-abi-linker"),
+ ),
+ "_abi_diff": attr.label(
+ allow_files = True,
+ executable = True,
+ cfg = "exec",
+ default = Label("//prebuilts/clang-tools:linux-x86/bin/header-abi-diff"),
+ ),
+ "_platform_utils": attr.label(default = Label("//build/bazel/platforms:platform_utils")),
+ },
+ fragments = ["cpp"],
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+)
diff --git a/rules/abi/abi_dump_test.bzl b/rules/abi/abi_dump_test.bzl
new file mode 100644
index 00000000..87874331
--- /dev/null
+++ b/rules/abi/abi_dump_test.bzl
@@ -0,0 +1,677 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:sets.bzl", "sets")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+load("//build/bazel/rules/test_common:args.bzl", "get_arg_value", "get_arg_values")
+load(":abi_dump.bzl", "abi_dump", "find_abi_config")
+
+ABI_LINKER = "prebuilts/clang-tools/linux-x86/bin/header-abi-linker"
+ABI_DIFF = "prebuilts/clang-tools/linux-x86/bin/header-abi-diff"
+
+# cxa_demangle.cpp is added as part of the stl in cc_library_shared, so it's dump
+# file is always created.
+CXA_DEMANGLE = "external/libcxxabi/external/libcxxabi/src/libc++demangle.cxa_demangle.cpp.sdump"
+REF_DUMPS_HOME = "build/bazel/rules/abi/abi-dumps"
+ARCH = "x86_64"
+BITNESS = 64
+CONFIG_SETTING_COVERAGE = {
+ "//command_line_option:collect_code_coverage": True,
+}
+CONFIG_SETTING_SKIP_ABI_CHECK = {
+ "@//build/bazel/flags/cc/abi:skip_abi_checks": True,
+}
+CONFIG_SETTING_IN_APEX = {
+ "@//build/bazel/rules/apex:within_apex": True,
+}
+
+def _abi_linker_action_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ bin_home = analysistest.target_bin_dir_path(env)
+ bazel_out_base = paths.join(bin_home, ctx.label.package)
+
+ actions = analysistest.target_actions(env)
+ link_actions = [a for a in actions if a.mnemonic == "AbiLink"]
+
+ asserts.true(
+ env,
+ len(link_actions) == 1,
+ "Abi link action not found: %s" % link_actions,
+ )
+
+ action = link_actions[0]
+
+ stripped_so = paths.join(bazel_out_base, "lib" + ctx.attr.lib_name + "_stripped.so")
+ symbol_file = paths.join(ctx.label.package, ctx.attr.symbol_file)
+ asserts.set_equals(
+ env,
+ expected = sets.make(
+ [paths.join(bazel_out_base, ctx.label.package, file + ".sdump") for file in ctx.attr.dumps] + [
+ ABI_LINKER,
+ paths.join(bin_home, CXA_DEMANGLE),
+ stripped_so,
+ symbol_file,
+ ],
+ ),
+ actual = sets.make([
+ file.path
+ for file in action.inputs.to_list()
+ ]),
+ )
+
+ lsdump_file = paths.join(bazel_out_base, ctx.attr.lib_name + ".so.lsdump")
+ asserts.set_equals(
+ env,
+ expected = sets.make([lsdump_file]),
+ actual = sets.make([
+ file.path
+ for file in action.outputs.to_list()
+ ]),
+ )
+
+ argv = action.argv
+ _test_arg_set_correctly(env, argv, "--root-dir", ".")
+ _test_arg_set_correctly(env, argv, "-o", lsdump_file)
+ _test_arg_set_correctly(env, argv, "-so", stripped_so)
+ _test_arg_set_correctly(env, argv, "-arch", ARCH)
+ _test_arg_set_correctly(env, argv, "-v", symbol_file)
+ _test_arg_set_multi_values_correctly(env, argv, "--exclude-symbol-version", ctx.attr.exclude_symbol_versions)
+ _test_arg_set_multi_values_correctly(env, argv, "--exclude-symbol-tag", ctx.attr.exclude_symbol_tags)
+ _test_arg_set_multi_values_correctly(
+ env,
+ argv,
+ "-I",
+ [paths.join(bazel_out_base, file) for file in ctx.attr.export_includes] +
+ [paths.join(ctx.label.package, file) for file in ctx.attr.export_includes] +
+ ctx.attr.export_absolute_includes +
+ [paths.join(bin_home, file) for file in ctx.attr.export_absolute_includes],
+ )
+
+ sdump_files = []
+ args = " ".join(argv).split(" ")
+ args_len = len(args)
+
+ # The .sdump files are at the end of the args, the abi linker binary is always at index 0.
+ for i in reversed(range(args_len)):
+ if ".sdump" in args[i]:
+ sdump_files.append(args[i])
+ else:
+ break
+
+ asserts.set_equals(
+ env,
+ expected = sets.make(
+ [paths.join(bazel_out_base, ctx.label.package, file + ".sdump") for file in ctx.attr.dumps] + [
+ paths.join(bin_home, CXA_DEMANGLE),
+ ],
+ ),
+ actual = sets.make(sdump_files),
+ )
+
+ return analysistest.end(env)
+
+__abi_linker_action_test = analysistest.make(
+ impl = _abi_linker_action_test_impl,
+ attrs = {
+ "dumps": attr.string_list(),
+ "lib_name": attr.string(),
+ "symbol_file": attr.string(),
+ "exclude_symbol_versions": attr.string_list(),
+ "exclude_symbol_tags": attr.string_list(),
+ "export_includes": attr.string_list(),
+ "export_absolute_includes": attr.string_list(),
+ "_platform_utils": attr.label(default = Label("//build/bazel/platforms:platform_utils")),
+ },
+)
+
+def _abi_linker_action_test(**kwargs):
+ __abi_linker_action_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:android",
+ ],
+ **kwargs
+ )
+
+def _test_abi_linker_action():
+ name = "abi_linker_action"
+ static_dep_a = name + "_static_dep_a"
+ static_dep_b = name + "_static_dep_b"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = static_dep_a,
+ srcs = ["static_a.cpp"],
+ srcs_c = ["static_a.c"],
+ export_includes = ["export_includes_static_a"],
+ export_absolute_includes = ["export_absolute_includes_static_a"],
+ export_system_includes = ["export_system_includes_static_a"],
+ local_includes = ["local_includes_static_a"],
+ absolute_includes = ["absolute_includes_static_a"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = static_dep_b,
+ srcs = ["static_b.cpp"],
+ srcs_c = ["static_b.c"],
+ deps = [":" + static_dep_a],
+ export_includes = ["export_includes_static_b"],
+ export_absolute_includes = ["export_absolute_includes_static_b"],
+ export_system_includes = ["export_system_includes_static_b"],
+ local_includes = ["local_includes_static_b"],
+ absolute_includes = ["absolute_includes_static_b"],
+ tags = ["manual"],
+ )
+
+ symbol_file = "shared_a.map.txt"
+ exclude_symbol_versions = ["30", "31"]
+ exclude_symbol_tags = ["func_1", "func_2"]
+
+ cc_library_shared(
+ name = name,
+ srcs = ["shared.cpp"],
+ srcs_c = ["shared.c"],
+ deps = [":" + static_dep_b],
+ export_includes = ["export_includes_shared"],
+ export_absolute_includes = ["export_absolute_includes_shared"],
+ export_system_includes = ["export_system_includes_shared"],
+ local_includes = ["local_includes_shared"],
+ absolute_includes = ["absolute_includes_shared"],
+ stubs_symbol_file = name + ".map.txt",
+ abi_checker_symbol_file = symbol_file,
+ abi_checker_exclude_symbol_versions = exclude_symbol_versions,
+ abi_checker_exclude_symbol_tags = exclude_symbol_tags,
+ tags = ["manual"],
+ )
+
+ _abi_linker_action_test(
+ name = test_name,
+ target_under_test = name + "_abi_dump",
+ dumps = [
+ static_dep_a + ".static_a.cpp",
+ static_dep_b + ".static_b.cpp",
+ name + "__internal_root.shared.cpp",
+ static_dep_a + ".static_a.c",
+ static_dep_b + ".static_b.c",
+ name + "__internal_root.shared.c",
+ ],
+ lib_name = name,
+ symbol_file = symbol_file,
+ exclude_symbol_versions = exclude_symbol_versions,
+ exclude_symbol_tags = exclude_symbol_tags,
+ export_includes = [
+ "export_includes_shared",
+ "export_includes_static_a",
+ "export_includes_static_b",
+ ],
+ export_absolute_includes = [
+ "export_absolute_includes_shared",
+ "export_absolute_includes_static_a",
+ "export_absolute_includes_static_b",
+ ],
+ )
+
+ return test_name
+
+def _abi_linker_action_run_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ link_actions = [a for a in actions if a.mnemonic == "AbiLink"]
+
+ asserts.true(
+ env,
+ len(link_actions) == 1,
+ "Abi link action not found: %s" % link_actions,
+ )
+
+ return analysistest.end(env)
+
+__abi_linker_action_run_test = analysistest.make(
+ impl = _abi_linker_action_run_test_impl,
+)
+
+def _abi_linker_action_run_test(**kwargs):
+ __abi_linker_action_run_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:android",
+ ],
+ **kwargs
+ )
+
+def _test_abi_linker_action_run_for_enabled():
+ name = "abi_linker_action_run_for_enabled"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ abi_checker_enabled = True,
+ tags = ["manual"],
+ )
+
+ _abi_linker_action_run_test(
+ name = test_name,
+ target_under_test = name + "_abi_dump",
+ )
+
+ return test_name
+
+def _abi_linker_action_not_run_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ link_actions = [a for a in actions if a.mnemonic == "AbiLink"]
+
+ asserts.true(
+ env,
+ len(link_actions) == 0,
+ "Abi link action found: %s" % link_actions,
+ )
+
+ return analysistest.end(env)
+
+__abi_linker_action_not_run_test = analysistest.make(
+ impl = _abi_linker_action_not_run_test_impl,
+)
+
+def _abi_linker_action_not_run_test(**kwargs):
+ __abi_linker_action_not_run_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:android",
+ ],
+ **kwargs
+ )
+
+__abi_linker_action_not_run_for_no_device_test = analysistest.make(
+ impl = _abi_linker_action_not_run_test_impl,
+)
+
+def _abi_linker_action_not_run_for_no_device_test(**kwargs):
+ __abi_linker_action_not_run_for_no_device_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:linux",
+ ],
+ **kwargs
+ )
+
+__abi_linker_action_not_run_for_coverage_test = analysistest.make(
+ impl = _abi_linker_action_not_run_test_impl,
+ config_settings = CONFIG_SETTING_COVERAGE,
+)
+
+def _abi_linker_action_not_run_for_coverage_test(**kwargs):
+ __abi_linker_action_not_run_for_coverage_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:android",
+ ],
+ **kwargs
+ )
+
+__abi_linker_action_not_run_if_skipped_test = analysistest.make(
+ impl = _abi_linker_action_not_run_test_impl,
+ config_settings = CONFIG_SETTING_SKIP_ABI_CHECK,
+)
+
+def _abi_linker_action_not_run_if_skipped_test(**kwargs):
+ __abi_linker_action_not_run_if_skipped_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:android",
+ ],
+ **kwargs
+ )
+
+__abi_linker_action_not_run_apex_no_stubs_test = analysistest.make(
+ impl = _abi_linker_action_not_run_test_impl,
+ config_settings = CONFIG_SETTING_IN_APEX,
+)
+
+def _abi_linker_action_not_run_apex_no_stubs_test(**kwargs):
+ __abi_linker_action_not_run_apex_no_stubs_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:android",
+ ],
+ **kwargs
+ )
+
+def _test_abi_linker_action_not_run_for_default():
+ name = "abi_linker_action_not_run_for_default"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ tags = ["manual"],
+ )
+
+ _abi_linker_action_not_run_test(
+ name = test_name,
+ target_under_test = name + "_abi_dump",
+ )
+
+ return test_name
+
+def _test_abi_linker_action_not_run_for_disabled():
+ name = "abi_linker_action_not_run_for_disabled"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ stubs_symbol_file = name + ".map.txt",
+ abi_checker_enabled = False,
+ tags = ["manual"],
+ )
+
+ _abi_linker_action_not_run_test(
+ name = test_name,
+ target_under_test = name + "_abi_dump",
+ )
+
+ return test_name
+
+def _test_abi_linker_action_not_run_for_no_device():
+ name = "abi_linker_action_not_run_for_no_device"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ abi_checker_enabled = True,
+ tags = ["manual"],
+ )
+
+ _abi_linker_action_not_run_for_no_device_test(
+ name = test_name,
+ target_under_test = name + "_abi_dump",
+ )
+
+ return test_name
+
+def _test_abi_linker_action_not_run_if_skipped():
+ name = "abi_linker_action_not_run_if_skipped"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ abi_checker_enabled = True,
+ tags = ["manual"],
+ )
+
+ _abi_linker_action_not_run_if_skipped_test(
+ name = test_name,
+ target_under_test = name + "_abi_dump",
+ )
+
+ return test_name
+
+def _test_abi_linker_action_not_run_for_coverage_enabled():
+ name = "abi_linker_action_not_run_for_coverage_enabled"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ abi_checker_enabled = True,
+ features = ["coverage"],
+ # Coverage will add an extra lib to all the shared libs, we try to avoid
+ # that by clearing the system_dynamic_deps and stl.
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ )
+
+ _abi_linker_action_not_run_for_coverage_test(
+ name = test_name,
+ target_under_test = name + "_abi_dump",
+ )
+
+ return test_name
+
+def _test_abi_linker_action_not_run_for_apex_no_stubs():
+ name = "abi_linker_action_not_run_for_apex_no_stubs"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ abi_checker_enabled = True,
+ tags = ["manual"],
+ )
+
+ _abi_linker_action_not_run_apex_no_stubs_test(
+ name = test_name,
+ target_under_test = name + "_abi_dump",
+ )
+
+ return test_name
+
+def _abi_diff_action_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ diff_actions = [a for a in actions if a.mnemonic == "AbiDiff"]
+
+ asserts.true(
+ env,
+ len(diff_actions) == 2,
+ "There should be two abi diff actions: %s" % diff_actions,
+ )
+
+ prev_version, version = find_abi_config(ctx)
+ _verify_abi_diff_action(ctx, env, diff_actions[0], prev_version, True)
+ _verify_abi_diff_action(ctx, env, diff_actions[1], version, False)
+
+ return analysistest.end(env)
+
+def _verify_abi_diff_action(ctx, env, action, version, is_prev_version):
+ bin_home = analysistest.target_bin_dir_path(env)
+ bazel_out_base = paths.join(bin_home, ctx.label.package)
+ lsdump_file = paths.join(bazel_out_base, ctx.attr.lib_name + ".so.lsdump")
+
+ ref_dump = paths.join(
+ REF_DUMPS_HOME,
+ "platform",
+ str(version),
+ str(BITNESS),
+ ARCH,
+ "source-based",
+ ctx.attr.lib_name + ".so.lsdump",
+ )
+ asserts.set_equals(
+ env,
+ expected = sets.make([
+ lsdump_file,
+ ABI_DIFF,
+ ref_dump,
+ ]),
+ actual = sets.make([
+ file.path
+ for file in action.inputs.to_list()
+ ]),
+ )
+
+ if is_prev_version:
+ diff_file = paths.join(bazel_out_base, ".".join([ctx.attr.lib_name, "so", str(version), "abidiff"]))
+ else:
+ diff_file = paths.join(bazel_out_base, ".".join([ctx.attr.lib_name, "so", "abidiff"]))
+
+ asserts.set_equals(
+ env,
+ expected = sets.make([diff_file]),
+ actual = sets.make([
+ file.path
+ for file in action.outputs.to_list()
+ ]),
+ )
+
+ argv = action.argv
+ _test_arg_set_correctly(env, argv, "-o", diff_file)
+ _test_arg_set_correctly(env, argv, "-old", ref_dump)
+ _test_arg_set_correctly(env, argv, "-new", lsdump_file)
+ _test_arg_set_correctly(env, argv, "-lib", ctx.attr.lib_name)
+ _test_arg_set_correctly(env, argv, "-arch", ARCH)
+ _test_arg_exists(env, argv, "-allow-unreferenced-changes")
+ _test_arg_exists(env, argv, "-allow-unreferenced-elf-symbol-changes")
+ _test_arg_exists(env, argv, "-allow-extensions")
+ if is_prev_version:
+ _test_arg_set_correctly(env, argv, "-target-version", str(version + 1))
+ else:
+ _test_arg_set_correctly(env, argv, "-target-version", "current")
+
+__abi_diff_action_test = analysistest.make(
+ impl = _abi_diff_action_test_impl,
+ attrs = {
+ "lib_name": attr.string(),
+ "_platform_utils": attr.label(default = Label("//build/bazel/platforms:platform_utils")),
+ },
+)
+
+def _abi_diff_action_test(**kwargs):
+ __abi_diff_action_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:android",
+ ],
+ **kwargs
+ )
+
+def _test_abi_diff_action():
+ name = "abi_diff_action"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ srcs = ["shared.cpp"],
+ tags = ["manual"],
+ )
+
+ lib_name = "lib" + name
+ abi_dump_name = name + "_abi_dump_new"
+ abi_dump(
+ name = abi_dump_name,
+ shared = name + "_stripped",
+ root = name + "__internal_root",
+ soname = lib_name + ".so",
+ enabled = True,
+ abi_ref_dumps_platform = "//build/bazel/rules/abi/abi-dumps/platform:bp2build_all_srcs",
+ ref_dumps_home = "build/bazel/rules/abi/abi-dumps",
+ tags = ["manual"],
+ )
+
+ _abi_diff_action_test(
+ name = test_name,
+ target_under_test = abi_dump_name,
+ lib_name = lib_name,
+ )
+
+ return test_name
+
+def _abi_diff_action_not_run_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ diff_actions = [a for a in actions if a.mnemonic == "AbiDiff"]
+
+ asserts.true(
+ env,
+ len(diff_actions) == 0,
+ "Abi diff action found: %s" % diff_actions,
+ )
+
+ return analysistest.end(env)
+
+__abi_diff_action_not_run_test = analysistest.make(
+ impl = _abi_diff_action_not_run_test_impl,
+)
+
+def _abi_diff_action_not_run_test(**kwargs):
+ __abi_diff_action_not_run_test(
+ target_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:android",
+ ],
+ **kwargs
+ )
+
+def _test_abi_diff_action_not_run_if_no_ref_dump_found():
+ name = "abi_diff_action_not_run_if_no_ref_dump_found"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name,
+ srcs = ["shared.cpp"],
+ tags = ["manual"],
+ )
+
+ lib_name = "lib" + name
+ abi_dump_name = name + "_abi_dump_new"
+ abi_dump(
+ name = abi_dump_name,
+ shared = name + "_stripped",
+ root = name + "__internal_root",
+ soname = lib_name + ".so",
+ enabled = True,
+ ref_dumps_home = "build/bazel/rules/abi/abi-dumps",
+ tags = ["manual"],
+ )
+
+ _abi_diff_action_not_run_test(
+ name = test_name,
+ target_under_test = abi_dump_name,
+ )
+
+ return test_name
+
+def _test_arg_set_correctly(env, argv, arg_name, expected):
+ arg = get_arg_value(argv, arg_name)
+ asserts.true(
+ env,
+ arg == expected,
+ "%s is not set correctly: expected %s, actual %s" % (arg_name, expected, arg),
+ )
+
+def _test_arg_set_multi_values_correctly(env, argv, arg_name, expected):
+ args = get_arg_values(argv, arg_name)
+ asserts.set_equals(
+ env,
+ expected = sets.make(expected),
+ actual = sets.make(args),
+ )
+
+def _test_arg_exists(env, argv, arg_name):
+ asserts.true(
+ env,
+ arg_name in argv,
+ "arg %s is not found" % arg_name,
+ )
+
+def abi_dump_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_abi_linker_action(),
+ _test_abi_linker_action_not_run_for_default(),
+ _test_abi_linker_action_not_run_for_disabled(),
+ _test_abi_linker_action_run_for_enabled(),
+ _test_abi_linker_action_not_run_for_no_device(),
+ _test_abi_linker_action_not_run_for_coverage_enabled(),
+ _test_abi_linker_action_not_run_if_skipped(),
+ _test_abi_linker_action_not_run_for_apex_no_stubs(),
+ _test_abi_diff_action(),
+ _test_abi_diff_action_not_run_if_no_ref_dump_found(),
+ ],
+ )
diff --git a/rules/aidl/BUILD b/rules/aidl/BUILD
new file mode 100644
index 00000000..92c76b81
--- /dev/null
+++ b/rules/aidl/BUILD
@@ -0,0 +1,22 @@
+"""
+Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(":aidl_interface_test.bzl", "aidl_interface_test_suite")
+load(":aidl_library_test.bzl", "aidl_library_test_suite")
+
+aidl_library_test_suite(name = "aidl_library_tests")
+
+aidl_interface_test_suite(name = "aidl_interface_tests")
diff --git a/rules/aidl/aidl_interface.bzl b/rules/aidl/aidl_interface.bzl
new file mode 100644
index 00000000..da2cd669
--- /dev/null
+++ b/rules/aidl/aidl_interface.bzl
@@ -0,0 +1,331 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/aidl:aidl_library.bzl", "aidl_library")
+load("//build/bazel/rules/cc:cc_aidl_library.bzl", "cc_aidl_library")
+load("//build/bazel/rules/java:java_aidl_library.bzl", "java_aidl_library")
+
+JAVA = "java"
+CPP = "cpp"
+NDK = "ndk"
+#TODO(b/246803961) Add support for rust backend
+
+def _hash_file(name, version):
+ return "aidl_api/{}/{}/.hash".format(name, version)
+
+def _check_versions_with_info(versions_with_info):
+ for version_with_info in versions_with_info:
+ for dep in version_with_info.get("deps", []):
+ parts = dep.split("-V")
+ if len(parts) < 2 or not parts[-1].isdigit():
+ fail("deps in versions_with_info must specify its version, but", dep)
+
+ versions = []
+
+ # ensure that all versions are ints
+ for info in versions_with_info:
+ version = info["version"]
+ if version.isdigit() == False:
+ fail("version %s is not an integer".format(version))
+
+ versions.append(int(version))
+
+ if versions != sorted(versions):
+ fail("versions should be sorted")
+
+ for i, v in enumerate(versions):
+ if i > 0:
+ if v == versions[i - 1]:
+ fail("duplicate version found:", v)
+ if v <= 0:
+ fail("all versions should be > 0, but found version:", v)
+
+def _create_latest_version_aliases(name, last_version_name, backend_configs, **kwargs):
+ latest_name = name + "-latest"
+ native.alias(
+ name = latest_name,
+ actual = ":" + last_version_name,
+ **kwargs
+ )
+ for lang in backend_configs.keys():
+ language_binding_name = last_version_name + "-" + lang
+ native.alias(
+ name = latest_name + "-" + lang,
+ actual = ":" + language_binding_name,
+ **kwargs
+ )
+
+def _versioned_name(name, version):
+ if version == "":
+ return name
+
+ return name + "-V" + version
+
+# https://cs.android.com/android/platform/superproject/+/master:system/tools/aidl/build/aidl_interface.go;l=782-799;drc=5390d9a42f5e4f99ccb3a84068f554d948cb62b9
+def _next_version(versions_with_info, unstable):
+ if unstable:
+ return ""
+
+ if versions_with_info == None or len(versions_with_info) == 0:
+ return "1"
+
+ return str(int(versions_with_info[-1]["version"]) + 1)
+
+def _is_config_enabled(config):
+ if config == None:
+ return False
+
+ for key in config:
+ if key not in ["enabled", "min_sdk_version", "tags"]:
+ fail("unknown property in aidl configuration: " + str(key))
+
+ return config.get("enabled", False) == True
+
+def aidl_interface(
+ name,
+ deps = [],
+ strip_import_prefix = "",
+ srcs = None,
+ flags = None,
+ java_config = None,
+ cpp_config = None,
+ ndk_config = None,
+ stability = None,
+ versions_with_info = [],
+ unstable = False,
+ tags = [],
+ # TODO(b/261208761): Support frozen attr
+ frozen = False,
+ **kwargs):
+ """aidl_interface creates a versioned aidl_libraries and language-specific *_aidl_libraries
+
+ This macro loops over the list of required versions and searches for all
+ *.aidl source files located under the path `aidl_api/<version label/`.
+ For each version, an `aidl_library` is created with the corresponding sources.
+ For each `aidl_library`, a language-binding library *_aidl_library is created
+ based on the values passed to the `backends` argument.
+
+ Arguments:
+ name: string, base name of generated targets: <module-name>-V<version number>-<language-type>
+ deps: List[AidlGenInfo], a list of other aidl_libraries that all versions of this interface depend on
+ strip_import_prefix: str, a local directory to pass to the AIDL compiler to satisfy imports
+ srcs: List[file], a list of files to include in the development (unversioned) version of the aidl_interface
+ flags: List[string], a list of flags to pass to the AIDL compiler
+ java_config: Dict{"enabled": bool}, config for java backend
+ cpp_config: Dict{"enabled": bool, "min_sdk_version": string}, config for cpp backend
+ ndk_config: Dict{"enabled": bool, "min_sdk_version": string}, config for ndk backend
+ stability: string, stability promise of the interface. Currently, only supports "vintf"
+ backends: List[string], a list of the languages to generate bindings for
+ """
+
+ # When versions_with_info is set, versions is no-op.
+ # TODO(b/244349745): Modify bp2build to skip convert versions if versions_with_info is set
+ if (len(versions_with_info) == 0 and srcs == None):
+ fail("must specify at least versions_with_info or srcs")
+
+ if len(versions_with_info) == 0:
+ if frozen == True:
+ fail("frozen cannot be set without versions_with_info attr being set")
+ elif unstable == True:
+ # https://cs.android.com/android/platform/superproject/+/master:system/tools/aidl/build/aidl_interface.go;l=872;drc=5390d9a42f5e4f99ccb3a84068f554d948cb62b9
+ fail("cannot have versions for unstable interface")
+
+ aidl_flags = ["--structured"]
+ if flags != None:
+ aidl_flags.extend(flags)
+
+ enabled_backend_configs = {}
+ if _is_config_enabled(java_config):
+ enabled_backend_configs[JAVA] = java_config
+ if _is_config_enabled(cpp_config):
+ enabled_backend_configs[CPP] = cpp_config
+ if _is_config_enabled(ndk_config):
+ enabled_backend_configs[NDK] = ndk_config
+
+ if stability != None:
+ if unstable == True:
+ fail("stability must be unset when unstable is true")
+ if stability == "vintf":
+ aidl_flags.append("--stability=" + stability)
+
+ # TODO(b/245738285): Add support for vintf stability in java backend
+ if JAVA in enabled_backend_configs:
+ enabled_backend_configs.pop(JAVA)
+ else:
+ # https://cs.android.com/android/platform/superproject/+/master:system/tools/aidl/build/aidl_interface.go;l=329;drc=e88d9a9b14eafb064a234d555a5cd96de97ca9e2
+ # only vintf is allowed currently
+ fail("stability must be unset or \"vintf\"")
+
+ # next_version will be the last specified version + 1.
+ # https://cs.android.com/android/platform/superproject/+/master:system/tools/aidl/build/aidl_interface.go;l=791?q=system%2Ftools%2Faidl%2Fbuild%2Faidl_interface.go
+ next_version = None
+
+ if len(versions_with_info) > 0:
+ _check_versions_with_info(versions_with_info)
+ next_version = _next_version(versions_with_info, False)
+
+ for version_with_info in versions_with_info:
+ deps_for_version = version_with_info.get("deps", [])
+
+ create_aidl_binding_for_backends(
+ name = name,
+ version = version_with_info["version"],
+ deps = deps_for_version,
+ aidl_flags = aidl_flags,
+ backend_configs = enabled_backend_configs,
+ tags = tags,
+ **kwargs
+ )
+
+ _create_latest_version_aliases(
+ name,
+ _versioned_name(name, versions_with_info[-1]["version"]),
+ enabled_backend_configs,
+ tags = tags,
+ **kwargs
+ )
+ else:
+ next_version = _next_version(versions_with_info, unstable)
+
+ # https://cs.android.com/android/platform/superproject/+/master:system/tools/aidl/build/aidl_interface.go;l=941;drc=5390d9a42f5e4f99ccb3a84068f554d948cb62b9
+ # Create aidl binding for next_version with srcs
+ if srcs and len(srcs) > 0:
+ create_aidl_binding_for_backends(
+ name = name,
+ version = next_version,
+ srcs = srcs,
+ strip_import_prefix = strip_import_prefix,
+ deps = deps,
+ aidl_flags = aidl_flags,
+ backend_configs = enabled_backend_configs,
+ tags = tags,
+ **kwargs
+ )
+
+def create_aidl_binding_for_backends(
+ name,
+ version = None,
+ srcs = None,
+ strip_import_prefix = "",
+ deps = None,
+ aidl_flags = [],
+ backend_configs = {},
+ tags = [],
+ **kwargs):
+ """
+ Create aidl_library target and corrending <backend>_aidl_library target for a given version
+
+ Arguments:
+ name: string, base name of the aidl interface
+ version: string, version of the aidl interface
+ srcs: List[Label] list of unversioned AIDL srcs
+ strip_import_prefix string, the prefix to strip the paths of the .aidl files in srcs
+ deps: List[AidlGenInfo], a list of other aidl_libraries that the version depends on
+ the label of the targets have format <aidl-interface>-V<version_number>
+ aidl_flags: List[string], a list of flags to pass to the AIDL compiler
+ backends: List[string], a list of the languages to generate bindings for
+ """
+ aidl_library_name = _versioned_name(name, version)
+
+ # srcs is None when create_aidl_binding_for_backends is called with a
+ # frozen version specified via versions or versions_with_info.
+ # next_version being equal to "" means this is an unstable version and
+ # we should use srcs instead
+ if version != "":
+ aidl_flags = aidl_flags + ["--version=" + version]
+
+ hash_file = None
+
+ if srcs == None:
+ if version == "":
+ fail("need srcs for unversioned interface")
+ strip_import_prefix = "aidl_api/{}/{}".format(name, version)
+ srcs = native.glob([strip_import_prefix + "/**/*.aidl"])
+ hash_file = _hash_file(name, version)
+
+ aidl_library(
+ name = aidl_library_name,
+ deps = deps,
+ hash_file = hash_file,
+ version = version,
+ strip_import_prefix = strip_import_prefix,
+ srcs = srcs,
+ flags = aidl_flags,
+ # The language-specific backends will set more appropriate apex_available values.
+ tags = tags + ["apex_available=//apex_available:anyapex"],
+ **kwargs
+ )
+
+ for lang, config in backend_configs.items():
+ # https://cs.android.com/android/platform/superproject/+/master:system/tools/aidl/build/aidl_gen_rule.go;l=207;drc=a858ae7039b876a30002a1130f24196915a859a4
+ min_sdk_version = "current"
+ if "min_sdk_version" in config:
+ min_sdk_version = config["min_sdk_version"]
+
+ if lang == JAVA:
+ java_aidl_library(
+ name = aidl_library_name + "-java",
+ deps = [":" + aidl_library_name],
+ tags = tags + config.get("tags", []),
+ # TODO(b/249276008): Pass min_sdk_version to java_aidl_library
+ **(kwargs | {"target_compatible_with": ["//build/bazel/platforms/os:android"]})
+ )
+ elif lang == CPP or lang == NDK:
+ dynamic_deps = []
+ cppflags = []
+
+ # https://cs.android.com/android/platform/superproject/+/master:system/tools/aidl/build/aidl_interface_backends.go;l=564;drc=0517d97079d4b08f909e7f35edfa33b88fcc0d0e
+ if deps != None:
+ # For each aidl_library target label versioned_name, there's an
+ # associated cc_library_shared target with label versioned_name-<cpp|ndk>
+ dynamic_deps.extend(["{}-{}".format(dep, lang) for dep in deps])
+
+ # https://cs.android.com/android/platform/superproject/+/master:system/tools/aidl/build/aidl_interface_backends.go;l=111;drc=ef9f1352a1a8fec7bb134b1c713e13fc3ccee651
+ if lang == CPP:
+ dynamic_deps.extend([
+ "//frameworks/native/libs/binder:libbinder",
+ "//system/core/libutils:libutils",
+ ])
+ elif lang == NDK:
+ dynamic_deps = dynamic_deps + select({
+ "//build/bazel/rules/apex:android-in_apex": ["//frameworks/native/libs/binder/ndk:libbinder_ndk_stub_libs_current"],
+ "//conditions:default": ["//frameworks/native/libs/binder/ndk:libbinder_ndk"],
+ })
+
+ # https://source.corp.google.com/android/system/tools/aidl/build/aidl_interface_backends.go;l=120;rcl=18dd931bde35b502545b7a52987e2363042c151c
+ cppflags = ["-DBINDER_STABILITY_SUPPORT"]
+
+ if hasattr(kwargs, "tidy_checks_as_errors"):
+ fail("tidy_checks_as_errors cannot be overriden for aidl_interface cc_libraries")
+ tidy_checks_as_errors = [
+ "*",
+ "-clang-analyzer-deadcode.DeadStores", # b/253079031
+ "-clang-analyzer-cplusplus.NewDeleteLeaks", # b/253079031
+ "-clang-analyzer-optin.performance.Padding", # b/253079031
+ ]
+
+ cc_aidl_library(
+ name = "{}-{}".format(aidl_library_name, lang),
+ make_shared = True,
+ cppflags = cppflags,
+ deps = [":" + aidl_library_name],
+ dynamic_deps = dynamic_deps,
+ lang = lang,
+ min_sdk_version = min_sdk_version,
+ tidy = "local",
+ tidy_checks_as_errors = tidy_checks_as_errors,
+ tidy_gen_header_filter = True,
+ tags = tags + config.get("tags", []),
+ **kwargs
+ )
diff --git a/rules/aidl/aidl_interface_test.bzl b/rules/aidl/aidl_interface_test.bzl
new file mode 100644
index 00000000..538ad79b
--- /dev/null
+++ b/rules/aidl/aidl_interface_test.bzl
@@ -0,0 +1,471 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:new_sets.bzl", "sets")
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/aidl:aidl_interface.bzl", "aidl_interface")
+load("//build/bazel/rules/aidl:aidl_library.bzl", "AidlGenInfo")
+load("//build/bazel/rules/test_common:rules.bzl", "target_under_test_exist_test")
+load("//build/bazel/rules/test_common:flags.bzl", "action_flags_present_only_for_mnemonic_test_with_config_settings")
+
+def _ndk_backend_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "expected to have one action per aidl_library target",
+ )
+ cc_aidl_code_gen_target = analysistest.target_under_test(env)
+
+ # output_path: <bazel-bin>/<package-dir>/<cc_aidl_library-labelname>_aidl_code_gen
+ # Since cc_aidl_library-label is unique among cpp and ndk backends,
+ # the output_path is guaranteed to be unique
+ output_path = paths.join(
+ ctx.genfiles_dir.path,
+ ctx.label.package,
+ cc_aidl_code_gen_target.label.name,
+ )
+ expected_outputs = [
+ # headers for ndk backend are nested in aidl directory to prevent
+ # collision in c++ namespaces with cpp backend
+ paths.join(output_path, "aidl/b/BpFoo.h"),
+ paths.join(output_path, "aidl/b/BnFoo.h"),
+ paths.join(output_path, "aidl/b/Foo.h"),
+ paths.join(output_path, "b/Foo.cpp"),
+ ]
+
+ # Check output files in DefaultInfo provider
+ asserts.set_equals(
+ env,
+ sets.make(expected_outputs),
+ sets.make([
+ output.path
+ for output in cc_aidl_code_gen_target[DefaultInfo].files.to_list()
+ ]),
+ )
+
+ # Check the output path is correctly added to includes in CcInfo.compilation_context
+ asserts.true(
+ env,
+ output_path in cc_aidl_code_gen_target[CcInfo].compilation_context.includes.to_list(),
+ "output path is added to CcInfo.compilation_context.includes",
+ )
+
+ return analysistest.end(env)
+
+ndk_backend_test = analysistest.make(
+ _ndk_backend_test_impl,
+)
+
+def _ndk_backend_test():
+ name = "foo"
+ aidl_library_target = name + "-ndk"
+ aidl_code_gen_target = aidl_library_target + "_aidl_code_gen"
+ test_name = aidl_code_gen_target + "_test"
+
+ aidl_interface(
+ name = "foo",
+ ndk_config = {
+ "enabled": True,
+ },
+ unstable = True,
+ srcs = ["a/b/Foo.aidl"],
+ strip_import_prefix = "a",
+ tags = ["manual"],
+ )
+
+ ndk_backend_test(
+ name = test_name,
+ target_under_test = aidl_code_gen_target,
+ )
+
+ return test_name
+
+def _ndk_config_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "expected to have one action per aidl_library target",
+ )
+ asserts.true(
+ env,
+ "--min_sdk_version=30",
+ "expected to have min_sdk_version flag",
+ )
+ return analysistest.end(env)
+
+ndk_config_test = analysistest.make(
+ _ndk_config_test_impl,
+)
+
+def _ndk_config_test():
+ name = "ndk-config"
+ aidl_library_target = name + "-ndk"
+ aidl_code_gen_target = aidl_library_target + "_aidl_code_gen"
+ test_name = aidl_code_gen_target + "_test"
+
+ aidl_interface(
+ name = name,
+ unstable = True,
+ ndk_config = {
+ "enabled": True,
+ "min_sdk_version": "30",
+ },
+ srcs = ["Foo.aidl"],
+ tags = ["manual"],
+ )
+
+ ndk_config_test(
+ name = test_name,
+ target_under_test = aidl_code_gen_target,
+ )
+
+ return test_name
+
+def _aidl_library_has_flags_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+
+ asserts.true(
+ env,
+ AidlGenInfo in target_under_test,
+ "",
+ )
+ asserts.equals(
+ env,
+ ctx.attr.expected_flags,
+ target_under_test[AidlGenInfo].flags,
+ "",
+ )
+
+ return analysistest.end(env)
+
+aidl_library_has_flags_test = analysistest.make(
+ _aidl_library_has_flags_test_impl,
+ attrs = {
+ "expected_flags": attr.string_list(),
+ },
+)
+
+def _test_aidl_interface_passes_flags_to_aidl_libraries():
+ name = "aidl_interface_passes_version_flags_to_aidl_libraries"
+ aidl_interface(
+ name = name,
+ srcs = ["Foo.aidl"],
+ tags = ["manual"],
+ versions_with_info = [
+ {
+ "version": "1",
+ },
+ {
+ "version": "2",
+ },
+ ],
+ )
+
+ target_v1_test_name = name + "_test-V1"
+ aidl_library_has_flags_test(
+ name = target_v1_test_name,
+ target_under_test = name + "-V1",
+ expected_flags = [
+ "--structured",
+ "--version=1",
+ ],
+ )
+ target_v2_test_name = name + "_test-V2"
+ aidl_library_has_flags_test(
+ name = target_v2_test_name,
+ target_under_test = name + "-V2",
+ expected_flags = [
+ "--structured",
+ "--version=2",
+ ],
+ )
+ target_v_next_test_name = name + "_test-V_next"
+ aidl_library_has_flags_test(
+ name = target_v_next_test_name,
+ target_under_test = name + "-V3",
+ expected_flags = [
+ "--structured",
+ "--version=3",
+ ],
+ )
+
+ return [
+ target_v1_test_name,
+ target_v2_test_name,
+ target_v_next_test_name,
+ ]
+
+def _next_version_for_unversioned_stable_interface_test():
+ name = "unversioned_stable_interface_next_version"
+ test_name = name + "_test"
+ next_version_aidl_library_target = name + "-V1"
+
+ aidl_interface(
+ name = name,
+ srcs = ["Foo.aidl"],
+ tags = ["manual"],
+ )
+
+ target_under_test_exist_test(
+ name = test_name,
+ target_under_test = next_version_aidl_library_target,
+ )
+
+ return test_name
+
+def _next_version_for_versioned_stable_interface_test():
+ name = "versioned_stable_interface_next_version"
+ test_name = name + "_test"
+ next_version_aidl_library_target = name + "-V3"
+
+ aidl_interface(
+ name = name,
+ versions_with_info = [
+ {
+ "version": "1",
+ },
+ {
+ "version": "2",
+ },
+ ],
+ srcs = ["Foo.aidl"],
+ tags = ["manual"],
+ )
+
+ target_under_test_exist_test(
+ name = test_name,
+ target_under_test = next_version_aidl_library_target,
+ )
+
+ return test_name
+
+def _tidy_flags_has_generated_directory_header_filter_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ clang_tidy_actions = [a for a in actions if a.mnemonic == "ClangTidy"]
+
+ action_header_filter = None
+ for action in clang_tidy_actions:
+ for arg in action.argv:
+ if arg.startswith("-header-filter="):
+ action_header_filter = arg
+
+ if action_header_filter == None:
+ asserts.true(
+ env,
+ False,
+ "did not find header-filter in ClangTidy actions: `%s`" % (
+ clang_tidy_actions
+ ),
+ )
+
+ # The genfiles path for tests and cc_libraries is different (the latter contains a
+ # configuration prefix ST-<hash>). So instead (lacking regexes) we can just check
+ # that the beginning and end of the header-filter is correct.
+ expected_header_filter_prefix = "-header-filter=" + paths.join(ctx.genfiles_dir.path)
+ expected_header_filter_prefix = expected_header_filter_prefix.removesuffix("/bin")
+ expected_header_filter_suffix = ctx.label.package + ".*"
+ asserts.true(
+ env,
+ action_header_filter.startswith(expected_header_filter_prefix),
+ "expected header-filter to start with `%s`; but was `%s`" % (
+ expected_header_filter_prefix,
+ action_header_filter,
+ ),
+ )
+ asserts.true(
+ env,
+ action_header_filter.endswith(expected_header_filter_suffix),
+ "expected header-filter to end with `%s`; but was `%s`" % (
+ expected_header_filter_suffix,
+ action_header_filter,
+ ),
+ )
+
+ return analysistest.end(env)
+
+_tidy_flags_has_generated_directory_header_filter_test = analysistest.make(
+ _tidy_flags_has_generated_directory_header_filter_test_impl,
+ config_settings = {
+ "@//build/bazel/flags/cc/tidy:allow_local_tidy_true": True,
+ },
+)
+
+def _test_aidl_interface_generated_header_filter():
+ name = "aidl_interface_generated_header_filter"
+ test_name = name + "_test"
+ aidl_library_target = name + "-cpp"
+ shared_target_under_test = aidl_library_target + "__internal_root"
+ shared_test_name = test_name + "_shared"
+ static_target_under_test = aidl_library_target + "_bp2build_cc_library_static"
+ static_test_name = test_name + "_static"
+
+ aidl_interface(
+ name = name,
+ cpp_config = {
+ "enabled": True,
+ },
+ unstable = True,
+ srcs = ["a/b/Foo.aidl"],
+ tags = ["manual"],
+ )
+
+ _tidy_flags_has_generated_directory_header_filter_test(
+ name = shared_test_name,
+ target_under_test = shared_target_under_test,
+ )
+ _tidy_flags_has_generated_directory_header_filter_test(
+ name = static_test_name,
+ target_under_test = static_target_under_test,
+ )
+ return [
+ shared_test_name,
+ static_test_name,
+ ]
+
+_action_flags_present_with_tidy_test = action_flags_present_only_for_mnemonic_test_with_config_settings({
+ "@//build/bazel/flags/cc/tidy:allow_local_tidy_true": True,
+})
+
+def _test_aidl_interface_generated_cc_library_has_correct_tidy_checks_as_errors():
+ name = "aidl_interface_generated_cc_library_has_correct_tidy_checks_as_errors"
+ test_name = name + "_test"
+ aidl_library_target = name + "-cpp"
+ shared_target_under_test = aidl_library_target + "__internal_root"
+ shared_test_name = test_name + "_shared"
+ static_target_under_test = aidl_library_target + "_bp2build_cc_library_static"
+ static_test_name = test_name + "_static"
+
+ aidl_interface(
+ name = name,
+ cpp_config = {
+ "enabled": True,
+ },
+ unstable = True,
+ srcs = ["a/b/Foo.aidl"],
+ tags = ["manual"],
+ )
+
+ _action_flags_present_with_tidy_test(
+ name = shared_test_name,
+ target_under_test = shared_target_under_test,
+ mnemonics = ["ClangTidy"],
+ expected_flags = [
+ "-warnings-as-errors=*,-clang-analyzer-deadcode.DeadStores,-clang-analyzer-cplusplus.NewDeleteLeaks,-clang-analyzer-optin.performance.Padding,-bugprone-assignment-in-if-condition,-bugprone-branch-clone,-bugprone-signed-char-misuse,-misc-const-correctness",
+ ],
+ )
+ _action_flags_present_with_tidy_test(
+ name = static_test_name,
+ target_under_test = static_target_under_test,
+ mnemonics = ["ClangTidy"],
+ expected_flags = [
+ "-warnings-as-errors=*,-clang-analyzer-deadcode.DeadStores,-clang-analyzer-cplusplus.NewDeleteLeaks,-clang-analyzer-optin.performance.Padding,-bugprone-assignment-in-if-condition,-bugprone-branch-clone,-bugprone-signed-char-misuse,-misc-const-correctness",
+ ],
+ )
+ return [
+ shared_test_name,
+ static_test_name,
+ ]
+
+def _cc_library_has_flags_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ actions = [a for a in target.actions if a.mnemonic == "CppCompile"]
+
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "There should be one cc compile action: %s" % actions,
+ )
+
+ action = actions[0]
+ for flag in ctx.attr.expected_flags:
+ if flag not in action.argv:
+ fail("{} is not in list of flags for linking {}".format(flag, action.argv))
+
+ return analysistest.end(env)
+
+cc_library_has_flags_test = analysistest.make(
+ _cc_library_has_flags_test_impl,
+ attrs = {
+ "expected_flags": attr.string_list(),
+ },
+)
+
+def _test_aidl_interface_sets_flags_to_cc_libraries():
+ name = "aidl_interface_sets_flags_to_cc_libraries"
+ test_name = name + "_test"
+ aidl_library_target = name + "-ndk"
+ shared_target_under_test = aidl_library_target + "__internal_root_cpp"
+ shared_test_name = test_name + "_shared"
+ static_target_under_test = aidl_library_target + "_bp2build_cc_library_static_cpp"
+ static_test_name = test_name + "_static"
+
+ aidl_interface(
+ name = name,
+ ndk_config = {
+ "enabled": True,
+ },
+ srcs = ["Foo.aidl"],
+ unstable = True,
+ tags = ["manual"],
+ )
+
+ cc_library_has_flags_test(
+ name = shared_test_name,
+ target_under_test = shared_target_under_test,
+ expected_flags = [
+ "-DBINDER_STABILITY_SUPPORT",
+ ],
+ )
+
+ cc_library_has_flags_test(
+ name = static_test_name,
+ target_under_test = static_target_under_test,
+ expected_flags = [
+ "-DBINDER_STABILITY_SUPPORT",
+ ],
+ )
+
+ return [
+ shared_test_name,
+ static_test_name,
+ ]
+
+def aidl_interface_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = (
+ [
+ "//build/bazel/rules/aidl/testing:generated_targets_have_correct_srcs_test",
+ "//build/bazel/rules/aidl/testing:interface_macro_produces_all_targets_test",
+ _ndk_backend_test(),
+ _ndk_config_test(),
+ _next_version_for_unversioned_stable_interface_test(),
+ _next_version_for_versioned_stable_interface_test(),
+ ] +
+ _test_aidl_interface_generated_header_filter() +
+ _test_aidl_interface_passes_flags_to_aidl_libraries() +
+ _test_aidl_interface_sets_flags_to_cc_libraries() +
+ _test_aidl_interface_generated_cc_library_has_correct_tidy_checks_as_errors()
+ ),
+ )
diff --git a/rules/aidl/aidl_library.bzl b/rules/aidl/aidl_library.bzl
new file mode 100644
index 00000000..5e0ee45b
--- /dev/null
+++ b/rules/aidl/aidl_library.bzl
@@ -0,0 +1,241 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+AidlGenInfo = provider(
+ fields = [
+ "srcs",
+ "hdrs",
+ "hash_file",
+ "transitive_srcs",
+ "transitive_include_dirs",
+ "flags",
+ ],
+)
+
+def _symlink_aidl_srcs(ctx, srcs, strip_import_prefix):
+ virtual_imports = paths.join("_virtual_imports", ctx.label.name)
+ include_path = paths.join(ctx.genfiles_dir.path, ctx.label.package, virtual_imports)
+ workspace_root_strip_import_prefix = paths.join(ctx.label.package, strip_import_prefix)
+
+ direct_srcs = []
+ for src in srcs:
+ src_path = src.short_path
+
+ if not paths.normalize(src_path).startswith(paths.normalize(workspace_root_strip_import_prefix)):
+ fail(".aidl file '%s' is not under the specified strip prefix '%s'" %
+ (src_path, workspace_root_strip_import_prefix))
+
+ import_path = paths.relativize(src_path, workspace_root_strip_import_prefix)
+ virtual_src = ctx.actions.declare_file(paths.join(virtual_imports, import_path))
+ ctx.actions.symlink(
+ output = virtual_src,
+ target_file = src,
+ progress_message = "Symlinking virtual .aidl sources for %{label}",
+ )
+ direct_srcs.append(virtual_src)
+ return include_path, direct_srcs
+
+# https://cs.android.com/android/platform/system/tools/aidl/+/master:build/aidl_api.go;l=718-724;drc=87bcb923b4ed9cf6e6837f4cc02d954f211c0b12
+def _version_for_hash_gen(version):
+ if int(version) > 1:
+ return int(version) - 1
+ return "latest-version"
+
+def _get_aidl_interface_name(versioned_name):
+ parts = versioned_name.split("-V")
+ if len(parts) == 1 or not parts[-1].isdigit():
+ fail("{}'s version is not parsable", versioned_name)
+
+ return parts[0]
+
+def _verify_hash_file(ctx):
+ timestamp = ctx.actions.declare_file(ctx.label.name + "_checkhash_" + ctx.attr.version + ".timestamp")
+
+ api_dir = "{package_dir}/aidl_api/{aidl_interface_name}/{version}".format(
+ package_dir = paths.dirname(ctx.build_file_path),
+ aidl_interface_name = _get_aidl_interface_name(ctx.label.name),
+ version = ctx.attr.version,
+ )
+
+ shell_command = """
+ cd {api_dir}
+ aidl_files_checksums=$(find ./ -name "*.aidl" -print0 | LC_ALL=C sort -z | xargs -0 sha1sum && echo {version})
+ cd -
+
+ if [[ $(echo "$aidl_files_checksums" | sha1sum | cut -d " " -f 1) = $(tail -1 {hash_file}) ]]; then
+ touch {timestamp};
+ else
+ cat "{message_check_equality}"
+ exit 1;
+ fi;
+ """.format(
+ api_dir = api_dir,
+ aidl_files = " ".join([src.path for src in ctx.files.srcs]),
+ version = _version_for_hash_gen(ctx.attr.version),
+ hash_file = ctx.file.hash_file.path,
+ timestamp = timestamp.path,
+ message_check_equality = ctx.file._message_check_equality.path,
+ )
+
+ ctx.actions.run_shell(
+ inputs = ctx.files.srcs + [ctx.file.hash_file, ctx.file._message_check_equality],
+ outputs = [timestamp],
+ command = shell_command,
+ mnemonic = "AidlHashValidation",
+ progress_message = "Validating AIDL .hash file",
+ )
+
+ return timestamp
+
+def _aidl_library_rule_impl(ctx):
+ transitive_srcs = []
+ transitive_include_dirs = []
+
+ validation_output = []
+
+ if ctx.attr.hash_file and ctx.attr.version:
+ # if the aidl_library represents an aidl_interface frozen version,
+ # hash_file and version attributes are set
+ validation_output.append(_verify_hash_file(ctx))
+
+ aidl_import_infos = [d[AidlGenInfo] for d in ctx.attr.deps]
+ for info in aidl_import_infos:
+ transitive_srcs.append(info.transitive_srcs)
+ transitive_include_dirs.append(info.transitive_include_dirs)
+
+ include_path, srcs = _symlink_aidl_srcs(ctx, ctx.files.srcs, ctx.attr.strip_import_prefix)
+ _, hdrs = _symlink_aidl_srcs(ctx, ctx.files.hdrs, ctx.attr.strip_import_prefix)
+
+ return [
+ DefaultInfo(files = depset(ctx.files.srcs)),
+ OutputGroupInfo(
+ _validation = depset(direct = validation_output),
+ ),
+ AidlGenInfo(
+ srcs = depset(srcs),
+ hdrs = depset(hdrs),
+ hash_file = ctx.file.hash_file,
+ transitive_srcs = depset(
+ direct = srcs + hdrs,
+ transitive = transitive_srcs,
+ ),
+ transitive_include_dirs = depset(
+ direct = [include_path],
+ transitive = transitive_include_dirs,
+ # build with preorder so that transitive_include_dirs.to_list()
+ # return direct include path in the first element
+ order = "preorder",
+ ),
+ flags = ctx.attr.flags,
+ ),
+ ]
+
+aidl_library = rule(
+ implementation = _aidl_library_rule_impl,
+ attrs = {
+ "srcs": attr.label_list(
+ allow_files = [".aidl"],
+ doc = "AIDL source files that contain StructuredParcelable" +
+ " AIDL defintions. These files can be compiled to language" +
+ " bindings.",
+ ),
+ "hdrs": attr.label_list(
+ allow_files = [".aidl"],
+ doc = "AIDL source files that contain UnstructuredParcelable" +
+ " AIDL defintions. These files cannot be compiled to language" +
+ " bindings, but can be referenced by other AIDL sources.",
+ ),
+ "version": attr.string(
+ doc = "The version of the upstream aidl_interface that" +
+ " the aidl_library is created for",
+ ),
+ "hash_file": attr.label(
+ doc = "The .hash file in the api directory of an aidl_interface frozen version",
+ allow_single_file = [".hash"],
+ ),
+ "_message_check_equality": attr.label(
+ allow_single_file = [".txt"],
+ default = "//system/tools/aidl/build:message_check_equality.txt",
+ ),
+ "deps": attr.label_list(
+ providers = [AidlGenInfo],
+ doc = "Targets listed here provide AIDL sources referenced" +
+ "by this library.",
+ ),
+ "strip_import_prefix": attr.string(
+ doc = "The prefix to strip from the paths of the .aidl files in " +
+ "this rule. When set, aidl source files in the srcs " +
+ "attribute of this rule are accessible at their path with " +
+ "this prefix cut off.",
+ ),
+ "flags": attr.string_list(
+ doc = "Flags to pass to AIDL tool",
+ ),
+ },
+ provides = [AidlGenInfo],
+)
+
+def _generate_aidl_bindings(ctx, lang, aidl_info):
+ """ Utility function for creating AIDL bindings from aidl_libraries.
+
+ Args:
+ ctx: context, used for declaring actions and new files and providing _aidl_tool
+ lang: string, defines the language of the generated binding code
+ aidl_src_infos: AidlGenInfo, list of sources to provide to AIDL compiler
+
+ Returns:
+ list of output files
+ """
+
+ #TODO(b/235113507) support C++ AIDL binding
+ ext = ""
+ if lang == "java":
+ ext = ".java"
+ else:
+ fail("Cannot generate AIDL language bindings for `{}`.".format(lang))
+
+ out_files = []
+ for aidl_file in aidl_info.srcs.to_list():
+ out_filename = paths.replace_extension(aidl_file.basename, ext)
+ out_file = ctx.actions.declare_file(out_filename, sibling = aidl_file)
+ out_files.append(out_file)
+
+ args = ctx.actions.args()
+ args.add_all(aidl_info.flags)
+
+ #TODO(b/241139797) allow this flag to be controlled by an attribute
+ args.add("--structured")
+
+ args.add_all([
+ "-I {}".format(i)
+ for i in aidl_info.transitive_include_dirs.to_list()
+ ])
+ args.add(aidl_file.path)
+ args.add(out_file)
+
+ ctx.actions.run(
+ inputs = aidl_info.transitive_srcs,
+ outputs = [out_file],
+ arguments = [args],
+ progress_message = "Generating {} AIDL binding from {}".format(lang, aidl_file.short_path),
+ executable = ctx.executable._aidl_tool,
+ )
+
+ return out_files
+
+aidl_file_utils = struct(
+ generate_aidl_bindings = _generate_aidl_bindings,
+)
diff --git a/rules/aidl/aidl_library_test.bzl b/rules/aidl/aidl_library_test.bzl
new file mode 100644
index 00000000..606ca5b3
--- /dev/null
+++ b/rules/aidl/aidl_library_test.bzl
@@ -0,0 +1,260 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":aidl_library.bzl", "AidlGenInfo", "aidl_library")
+
+PACKAGE_ROOT = "build/bazel/rules/aidl"
+
+def _test_include_dirs_are_transitive_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+
+ asserts.equals(
+ env,
+ expected = [
+ # direct include dir is the first in the list returned from
+ # transitive_include_dirs.to_list() because transitive_include_dir
+ # is created with preorder
+ # TODO(b/243825300): Move direct include_dir out of transitive_include_dir
+ # so that we don't have to rely on preorder traversal
+ paths.join(ctx.genfiles_dir.path, PACKAGE_ROOT, "_virtual_imports", "include_dirs_transitivity"),
+ paths.join(ctx.genfiles_dir.path, PACKAGE_ROOT, "_virtual_imports", "include_dirs_transitivity_dependency"),
+ ],
+ actual = target_under_test[AidlGenInfo].transitive_include_dirs.to_list(),
+ )
+
+ return analysistest.end(env)
+
+include_dirs_are_transitive_test = analysistest.make(_test_include_dirs_are_transitive_impl)
+
+def _test_include_dirs_transitivity():
+ test_base_name = "include_dirs_transitivity"
+ test_name = test_base_name + "_test"
+ aidl_dep = test_base_name + "_dependency"
+ aidl_library(
+ name = test_base_name,
+ strip_import_prefix = "testing",
+ deps = [":" + aidl_dep],
+ tags = ["manual"],
+ )
+ aidl_library(
+ name = aidl_dep,
+ strip_import_prefix = "testing2",
+ tags = ["manual"],
+ )
+ include_dirs_are_transitive_test(
+ name = test_name,
+ target_under_test = test_base_name,
+ )
+ return [
+ test_name,
+ ]
+
+def _test_empty_srcs_aggregates_deps_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+
+ asserts.equals(
+ env,
+ expected = [],
+ actual = target_under_test[AidlGenInfo].srcs.to_list(),
+ )
+
+ import_path = paths.join(
+ PACKAGE_ROOT,
+ "_virtual_imports",
+ )
+
+ asserts.equals(
+ env,
+ expected = [
+ paths.join(import_path, target_under_test.label.name + "_dependency2", "b.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency2", "header_b.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency3", "c.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency3", "header_c.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency1", "a.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency1", "header_a.aidl"),
+ ],
+ actual = [
+ file.short_path
+ for file in target_under_test[AidlGenInfo].transitive_srcs.to_list()
+ ],
+ )
+
+ return analysistest.end(env)
+
+empty_srcs_aggregates_deps_test = analysistest.make(_test_empty_srcs_aggregates_deps_impl)
+
+def _test_hdrs_are_only_in_transitive_srcs_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+
+ import_path = paths.join(
+ PACKAGE_ROOT,
+ "_virtual_imports",
+ target_under_test.label.name,
+ )
+
+ asserts.equals(
+ env,
+ expected = [
+ paths.join(import_path, "direct.aidl"),
+ ],
+ actual = [
+ file.short_path
+ for file in target_under_test[AidlGenInfo].srcs.to_list()
+ ],
+ )
+
+ asserts.equals(
+ env,
+ expected = [
+ paths.join(import_path, "header_direct.aidl"),
+ ],
+ actual = [
+ file.short_path
+ for file in target_under_test[AidlGenInfo].hdrs.to_list()
+ ],
+ )
+
+ return analysistest.end(env)
+
+hdrs_are_only_in_transitive_srcs_test = analysistest.make(_test_hdrs_are_only_in_transitive_srcs_impl)
+
+def _test_transitive_srcs_contains_direct_and_transitive_srcs_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+
+ import_path = paths.join(
+ PACKAGE_ROOT,
+ "_virtual_imports",
+ )
+
+ asserts.equals(
+ env,
+ expected = [
+ paths.join(import_path, target_under_test.label.name, "direct.aidl"),
+ ],
+ actual = [
+ file.short_path
+ for file in target_under_test[AidlGenInfo].srcs.to_list()
+ ],
+ )
+
+ asserts.equals(
+ env,
+ expected = [
+ paths.join(import_path, target_under_test.label.name + "_dependency2", "b.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency2", "header_b.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency3", "c.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency3", "header_c.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency1", "a.aidl"),
+ paths.join(import_path, target_under_test.label.name + "_dependency1", "header_a.aidl"),
+ paths.join(import_path, target_under_test.label.name, "direct.aidl"),
+ paths.join(import_path, target_under_test.label.name, "header_direct.aidl"),
+ ],
+ actual = [
+ file.short_path
+ for file in target_under_test[AidlGenInfo].transitive_srcs.to_list()
+ ],
+ )
+
+ return analysistest.end(env)
+
+transitive_srcs_contains_direct_and_transitive_srcs_test = analysistest.make(
+ _test_transitive_srcs_contains_direct_and_transitive_srcs_impl,
+)
+
+def _generate_test_targets(name):
+ aidl_dep1 = name + "_dependency1"
+ aidl_dep2 = name + "_dependency2"
+ aidl_dep3 = name + "_dependency3"
+ aidl_library(
+ name = aidl_dep1,
+ srcs = ["a.aidl"],
+ hdrs = ["header_a.aidl"],
+ deps = [
+ ":" + aidl_dep2,
+ ":" + aidl_dep3,
+ ],
+ tags = ["manual"],
+ )
+ aidl_library(
+ name = aidl_dep2,
+ srcs = ["b.aidl"],
+ hdrs = ["header_b.aidl"],
+ tags = ["manual"],
+ )
+ aidl_library(
+ name = aidl_dep3,
+ srcs = ["c.aidl"],
+ hdrs = ["header_c.aidl"],
+ tags = ["manual"],
+ )
+ return aidl_dep1
+
+def _test_empty_srcs_aggregates_deps():
+ test_base_name = "empty_srcs_aggregates_deps"
+ test_name = test_base_name + "_test"
+
+ aidl_dep1 = _generate_test_targets(test_base_name)
+ aidl_library(
+ name = test_base_name,
+ deps = [":" + aidl_dep1],
+ tags = ["manual"],
+ )
+ empty_srcs_aggregates_deps_test(
+ name = test_name,
+ target_under_test = test_base_name,
+ )
+ return [
+ test_name,
+ ]
+
+def _test_transitive_srcs_contains_direct_and_transitive_srcs():
+ test_base_name = "transitive_srcs_contains_direct_and_transitive_srcs"
+ srcs_test_name = test_base_name + "_srcs"
+ hdrs_test_name = test_base_name + "_hdrs"
+
+ aidl_dep1 = _generate_test_targets(test_base_name)
+ aidl_library(
+ name = test_base_name,
+ srcs = ["direct.aidl"],
+ hdrs = ["header_direct.aidl"],
+ deps = [":" + aidl_dep1],
+ tags = ["manual"],
+ )
+ transitive_srcs_contains_direct_and_transitive_srcs_test(
+ name = srcs_test_name,
+ target_under_test = test_base_name,
+ )
+ hdrs_are_only_in_transitive_srcs_test(
+ name = hdrs_test_name,
+ target_under_test = test_base_name,
+ )
+ return [
+ srcs_test_name,
+ hdrs_test_name,
+ ]
+
+def aidl_library_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = _test_include_dirs_transitivity() +
+ _test_transitive_srcs_contains_direct_and_transitive_srcs() +
+ _test_empty_srcs_aggregates_deps(),
+ )
diff --git a/rules/aidl/testing/BUILD b/rules/aidl/testing/BUILD
new file mode 100644
index 00000000..cf65e84d
--- /dev/null
+++ b/rules/aidl/testing/BUILD
@@ -0,0 +1,141 @@
+"""
+Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("//build/bazel/rules/aidl:aidl_interface.bzl", "aidl_interface")
+
+test_scope = [
+ "//build/bazel/rules/aidl/testing:aidl_interface_test-java",
+ "//build/bazel/rules/aidl/testing:aidl_interface_test-V1",
+ "//build/bazel/rules/aidl/testing:aidl_interface_test-V1-java",
+ "//build/bazel/rules/aidl/testing:aidl_interface_test-V2",
+ "//build/bazel/rules/aidl/testing:aidl_interface_test-V2-java",
+ "//build/bazel/rules/aidl/testing:aidl_interface_test-latest",
+ "//build/bazel/rules/aidl/testing:aidl_interface_test-latest-java",
+]
+
+aidl_interface(
+ name = "foo",
+ srcs = ["Test.aidl"],
+ tags = ["manual"],
+)
+
+aidl_interface(
+ name = "aidl_interface_test",
+ java_config = {
+ "enabled": True,
+ },
+ tags = ["manual"],
+ versions_with_info = [
+ {
+ "version": "1",
+ "deps": [":foo-V1"],
+ },
+ {
+ "version": "2",
+ "deps": [":foo-V1"],
+ },
+ ],
+)
+
+genquery(
+ name = "generated_target_V1_has_correct_srcs_query",
+ expression = 'kind("source file", deps(//build/bazel/rules/aidl/testing:aidl_interface_test-V1))',
+ scope = test_scope,
+)
+
+genquery(
+ name = "generated_target_V2_has_correct_srcs_query",
+ expression = 'kind("source file", deps(//build/bazel/rules/aidl/testing:aidl_interface_test-V2))',
+ scope = test_scope,
+)
+
+sh_test(
+ name = "generated_targets_have_correct_srcs_test",
+ size = "small",
+ srcs = ["generated_targets_have_correct_srcs.sh"],
+ data = [
+ ":generated_target_V1_has_correct_srcs_query",
+ ":generated_target_V2_has_correct_srcs_query",
+ ],
+ tags = [
+ "manual",
+ "no_windows",
+ ],
+ visibility = ["//build/bazel/rules/aidl:__subpackages__"],
+ deps = ["@bazel_tools//tools/bash/runfiles"],
+)
+
+genquery(
+ name = "aidl_library_V1_produced_by_default_query",
+ expression = "kind(aidl_library, //build/bazel/rules/aidl/testing:aidl_interface_test-V1)",
+ scope = test_scope,
+)
+
+genquery(
+ name = "aidl_library_V2_produced_by_default_query",
+ expression = "kind(aidl_library, //build/bazel/rules/aidl/testing:aidl_interface_test-V2)",
+ scope = test_scope,
+)
+
+genquery(
+ name = "aidl_library_latest_produced_by_default_query",
+ expression = "kind(alias, //build/bazel/rules/aidl/testing:aidl_interface_test-latest)",
+ scope = test_scope,
+)
+
+genquery(
+ name = "java_backend_V1_produced_by_default_query",
+ expression = "kind(java_aidl_library, //build/bazel/rules/aidl/testing:aidl_interface_test-V1-java)",
+ scope = test_scope,
+)
+
+genquery(
+ name = "java_backend_V2_produced_by_default_query",
+ expression = "kind(java_aidl_library, //build/bazel/rules/aidl/testing:aidl_interface_test-V2-java)",
+ scope = test_scope,
+)
+
+genquery(
+ name = "java_backend_latest_produced_by_default_query",
+ expression = "kind(alias, //build/bazel/rules/aidl/testing:aidl_interface_test-latest-java)",
+ scope = test_scope,
+)
+
+genquery(
+ name = "java_backend_unversioned_produced_by_default_query",
+ expression = "kind(alias, //build/bazel/rules/aidl/testing:aidl_interface_test-java)",
+ scope = test_scope,
+)
+
+sh_test(
+ name = "interface_macro_produces_all_targets_test",
+ size = "small",
+ srcs = ["interface_macro_produces_all_targets.sh"],
+ data = [
+ ":aidl_library_V1_produced_by_default_query",
+ ":aidl_library_V2_produced_by_default_query",
+ ":aidl_library_latest_produced_by_default_query",
+ ":java_backend_V1_produced_by_default_query",
+ ":java_backend_V2_produced_by_default_query",
+ ":java_backend_latest_produced_by_default_query",
+ ],
+ tags = [
+ "manual",
+ "no_windows",
+ ],
+ visibility = ["//build/bazel/rules/aidl:__subpackages__"],
+ deps = ["@bazel_tools//tools/bash/runfiles"],
+)
diff --git a/rules/aidl/testing/Test.aidl b/rules/aidl/testing/Test.aidl
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/aidl/testing/Test.aidl
diff --git a/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test.aidl b/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test.aidl
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test.aidl
diff --git a/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test2.aidl b/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test2.aidl
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test2.aidl
diff --git a/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test3.aidl b/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test3.aidl
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/aidl/testing/aidl_api/aidl_interface_test/1/android/net/Test3.aidl
diff --git a/rules/aidl/testing/aidl_api/aidl_interface_test/2/Test2Only.aidl b/rules/aidl/testing/aidl_api/aidl_interface_test/2/Test2Only.aidl
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/aidl/testing/aidl_api/aidl_interface_test/2/Test2Only.aidl
diff --git a/rules/aidl/testing/generated_targets_have_correct_srcs.sh b/rules/aidl/testing/generated_targets_have_correct_srcs.sh
new file mode 100755
index 00000000..d6b93917
--- /dev/null
+++ b/rules/aidl/testing/generated_targets_have_correct_srcs.sh
@@ -0,0 +1,55 @@
+#!/usr/bin/env bash
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Tests that generated targets have correct srcs attribute.
+
+. "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash"
+
+readonly expected_query_v1="\
+//build/bazel/rules/aidl/testing:Test.aidl
+//build/bazel/rules/aidl/testing:aidl_api/aidl_interface_test/1/.hash
+//build/bazel/rules/aidl/testing:aidl_api/aidl_interface_test/1/android/net/Test.aidl
+//build/bazel/rules/aidl/testing:aidl_api/aidl_interface_test/1/android/net/Test2.aidl
+//build/bazel/rules/aidl/testing:aidl_api/aidl_interface_test/1/android/net/Test3.aidl
+//system/tools/aidl/build:message_check_equality.txt"
+readonly expected_query_v2="\
+//build/bazel/rules/aidl/testing:Test.aidl
+//build/bazel/rules/aidl/testing:aidl_api/aidl_interface_test/2/.hash
+//build/bazel/rules/aidl/testing:aidl_api/aidl_interface_test/2/Test2Only.aidl
+//system/tools/aidl/build:message_check_equality.txt"
+
+readonly query_path_v1="__main__/build/bazel/rules/aidl/testing/generated_target_V1_has_correct_srcs_query"
+readonly query_path_v2="__main__/build/bazel/rules/aidl/testing/generated_target_V2_has_correct_srcs_query"
+readonly actual_query_v1=$(cat "$(rlocation $query_path_v1)")
+readonly actual_query_v2=$(cat "$(rlocation $query_path_v2)")
+
+if [ "$expected_query_v1" != "$actual_query_v1" ]; then
+ echo "aidl_interface generated target V1 has incorrect srcs." &&
+ echo "expected:" &&
+ echo "$expected_query_v1" &&
+ echo "actual:" &&
+ echo "$actual_query_v1" &&
+ exit 1
+fi
+
+if [ "$expected_query_v2" != "$actual_query_v2" ]; then
+ echo "aidl_interface generated target V2 has incorrect srcs." &&
+ echo "expected:" &&
+ echo "$expected_query_v2" &&
+ echo "actual:" &&
+ echo "$actual_query_v2" &&
+ exit 1
+fi
diff --git a/rules/aidl/testing/interface_macro_produces_all_targets.sh b/rules/aidl/testing/interface_macro_produces_all_targets.sh
new file mode 100755
index 00000000..d926f011
--- /dev/null
+++ b/rules/aidl/testing/interface_macro_produces_all_targets.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Tests that generated targets have correct srcs attribute.
+
+. "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash"
+
+readonly expected_query="\
+//build/bazel/rules/aidl/testing:aidl_interface_test-V1
+//build/bazel/rules/aidl/testing:aidl_interface_test-V2
+//build/bazel/rules/aidl/testing:aidl_interface_test-latest
+//build/bazel/rules/aidl/testing:aidl_interface_test-V1-java
+//build/bazel/rules/aidl/testing:aidl_interface_test-V2-java
+//build/bazel/rules/aidl/testing:aidl_interface_test-latest-java"
+
+readonly query_paths=(
+ "__main__/build/bazel/rules/aidl/testing/aidl_library_V1_produced_by_default_query"
+ "__main__/build/bazel/rules/aidl/testing/aidl_library_V2_produced_by_default_query"
+ "__main__/build/bazel/rules/aidl/testing/aidl_library_latest_produced_by_default_query"
+ "__main__/build/bazel/rules/aidl/testing/java_backend_V1_produced_by_default_query"
+ "__main__/build/bazel/rules/aidl/testing/java_backend_V2_produced_by_default_query"
+ "__main__/build/bazel/rules/aidl/testing/java_backend_latest_produced_by_default_query"
+)
+actual_query=""
+for runfile in ${query_paths[@]}; do
+ this_query="$(cat $(rlocation $runfile))"
+ if [ "$actual_query" = "" ]; then
+ actual_query=$this_query
+ else
+ actual_query="\
+${actual_query}
+${this_query}"
+ fi
+done
+
+if [ "$expected_query" != "$actual_query" ]; then
+ echo "not all interface macro targets were created" &&
+ echo "expected query result:" &&
+ echo "$expected_query" &&
+ echo "actual query result:" &&
+ echo "$actual_query" &&
+ exit 1
+fi
diff --git a/rules/android/BUILD.bazel b/rules/android/BUILD.bazel
index 8b137891..e69de29b 100644
--- a/rules/android/BUILD.bazel
+++ b/rules/android/BUILD.bazel
@@ -1 +0,0 @@
-
diff --git a/rules/android/aar_import.bzl b/rules/android/aar_import.bzl
new file mode 100644
index 00000000..76abef03
--- /dev/null
+++ b/rules/android/aar_import.bzl
@@ -0,0 +1,75 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Macro wrapping the aar_import for bp2build. """
+
+load("//build/bazel/rules/android/aar_import_aosp_internal:rule.bzl", _aar_import = "aar_import")
+load("//build/bazel/rules/java:sdk_transition.bzl", "sdk_transition", "sdk_transition_attrs")
+
+# TODO(b/277801336): document these attributes.
+def aar_import(
+ name = "",
+ aar = [],
+ sdk_version = None,
+ deps = [],
+ tags = [],
+ target_compatible_with = [],
+ visibility = None,
+ **kwargs):
+ lib_name = name + "_private"
+ _aar_import(
+ name = lib_name,
+ aar = aar,
+ deps = deps,
+ tags = tags + ["manual"],
+ target_compatible_with = target_compatible_with,
+ visibility = ["//visibility:private"],
+ **kwargs
+ )
+
+ aar_import_sdk_transition(
+ name = name,
+ sdk_version = sdk_version,
+ java_version = None,
+ exports = lib_name,
+ tags = tags,
+ target_compatible_with = target_compatible_with,
+ visibility = visibility,
+ )
+
+# The list of providers to forward was determined using cquery on one
+# of the example targets listed under EXAMPLE_WRAPPER_TARGETS at
+# //build/bazel/ci/target_lists.sh. It may not be exhaustive. A unit
+# test ensures that the wrapper's providers and the wrapped rule's do
+# match.
+def _aar_import_sdk_transition_impl(ctx):
+ return [
+ ctx.attr.exports[0][AndroidLibraryResourceClassJarProvider],
+ ctx.attr.exports[0][JavaInfo],
+ ctx.attr.exports[0][AndroidNativeLibsInfo],
+ ctx.attr.exports[0][ProguardSpecProvider],
+ ctx.attr.exports[0][AndroidIdeInfo],
+ ctx.attr.exports[0][DefaultInfo],
+ ]
+
+aar_import_sdk_transition = rule(
+ implementation = _aar_import_sdk_transition_impl,
+ attrs = sdk_transition_attrs,
+ provides = [
+ AndroidIdeInfo,
+ AndroidLibraryResourceClassJarProvider,
+ AndroidNativeLibsInfo,
+ JavaInfo,
+ ],
+)
diff --git a/rules/android/aar_import_aosp_internal/BUILD.bazel b/rules/android/aar_import_aosp_internal/BUILD.bazel
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/android/aar_import_aosp_internal/BUILD.bazel
diff --git a/rules/android/aar_import_aosp_internal/attrs.bzl b/rules/android/aar_import_aosp_internal/attrs.bzl
new file mode 100644
index 00000000..1b685b3f
--- /dev/null
+++ b/rules/android/aar_import_aosp_internal/attrs.bzl
@@ -0,0 +1,40 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(
+ "@rules_android//rules:attrs.bzl",
+ _attrs = "attrs",
+)
+load(
+ "@rules_android//rules/aar_import:attrs.bzl",
+ _BASE_ATTRS = "ATTRS",
+)
+
+ATTRS = _attrs.replace(
+ _BASE_ATTRS,
+ exports = attr.label_list(
+ allow_files = False,
+ allow_rules = [
+ "aar_import",
+ "java_import",
+ "kt_jvm_import",
+ "aar_import_sdk_transition",
+ "java_import_sdk_transition",
+ "kt_jvm_import_sdk_transition",
+ ],
+ doc = "The closure of all rules reached via `exports` attributes are considered " +
+ "direct dependencies of any rule that directly depends on the target with " +
+ "`exports`. The `exports` are not direct deps of the rule they belong to.",
+ ),
+)
diff --git a/rules/android/aar_import_aosp_internal/rule.bzl b/rules/android/aar_import_aosp_internal/rule.bzl
new file mode 100644
index 00000000..79580f2a
--- /dev/null
+++ b/rules/android/aar_import_aosp_internal/rule.bzl
@@ -0,0 +1,33 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""aar_import rule."""
+
+load(":attrs.bzl", _ATTRS = "ATTRS")
+load("@rules_android//rules/aar_import:impl.bzl", _impl = "impl")
+load("@rules_android//rules/aar_import:rule.bzl", "RULE_DOC")
+
+aar_import = rule(
+ attrs = _ATTRS,
+ fragments = ["android"],
+ implementation = _impl,
+ doc = RULE_DOC,
+ provides = [
+ AndroidIdeInfo,
+ AndroidLibraryResourceClassJarProvider,
+ AndroidNativeLibsInfo,
+ JavaInfo,
+ ],
+ toolchains = ["@rules_android//toolchains/android:toolchain_type"],
+)
diff --git a/rules/android/android_app_certificate.bzl b/rules/android/android_app_certificate.bzl
index d3f3f54e..84ab819f 100644
--- a/rules/android/android_app_certificate.bzl
+++ b/rules/android/android_app_certificate.bzl
@@ -1,30 +1,83 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("//build/bazel/product_config:product_variables_providing_rule.bzl", "ProductVariablesDepsInfo", "ProductVariablesInfo")
AndroidAppCertificateInfo = provider(
"Info needed for Android app certificates",
fields = {
"pem": "Certificate .pem file",
"pk8": "Certificate .pk8 file",
+ "key_name": "Key name",
},
)
+def _search_cert_files(cert_name, cert_files_to_search):
+ pk8 = None
+ pem = None
+ for file in cert_files_to_search:
+ if file.basename == cert_name + ".pk8":
+ pk8 = file
+ elif file.basename == cert_name + ".x509.pem":
+ pem = file
+ if not pk8 or not pem:
+ fail("Could not find .x509.pem and/or .pk8 file with name '%s' in the following files: %s" % (cert_name, cert_files_to_search))
+ return pk8, pem
+
+def _maybe_override(ctx, cert_name):
+ if not cert_name:
+ fail("cert_name cannot be None")
+
+ cert_overrides = ctx.attr._product_variables[ProductVariablesInfo].CertificateOverrides
+ if not cert_overrides:
+ return cert_name, False
+
+ apex_name = ctx.attr._apex_name[BuildSettingInfo].value
+ if not apex_name:
+ # Only override in the apex configuration, because the apex module name is used as the key for overriding
+ return cert_name, False
+
+ matches = [o for o in cert_overrides if o.split(":")[0] == apex_name]
+
+ if not matches:
+ # no matches, no override.
+ return cert_name, False
+
+ if len(matches) > 1:
+ fail("unexpected multiple certificate overrides for %s in: %s" % (apex_name, matches))
+
+ # e.g. test1_com.android.tzdata:com.google.android.tzdata5.certificate
+ new_cert_name = matches[0].split(":")[1]
+ return new_cert_name.removesuffix(".certificate"), True
+
def _android_app_certificate_rule_impl(ctx):
+ cert_name = ctx.attr.certificate
+ pk8 = ctx.file.pk8
+ pem = ctx.file.pem
+
+ # Only override if the override mapping exists, otherwise we wouldn't be
+ # able to find the new certs.
+ overridden_cert_name, overridden = _maybe_override(ctx, cert_name)
+ if overridden:
+ cert_name = overridden_cert_name
+ cert_files_to_search = ctx.attr._product_variables[ProductVariablesDepsInfo].OverridingCertificateFiles
+ pk8, pem = _search_cert_files(cert_name, cert_files_to_search)
+
return [
- AndroidAppCertificateInfo(pem = ctx.file.pem, pk8 = ctx.file.pk8),
+ AndroidAppCertificateInfo(pem = pem, pk8 = pk8, key_name = cert_name),
]
_android_app_certificate = rule(
@@ -32,6 +85,14 @@ _android_app_certificate = rule(
attrs = {
"pem": attr.label(mandatory = True, allow_single_file = [".pem"]),
"pk8": attr.label(mandatory = True, allow_single_file = [".pk8"]),
+ "certificate": attr.string(mandatory = True),
+ "_apex_name": attr.label(default = "//build/bazel/rules/apex:apex_name"),
+ "_product_variables": attr.label(
+ default = "//build/bazel/product_config:product_vars",
+ ),
+ "_hardcoded_certs": attr.label(
+ default = "//build/make/target/product/security:android_certificate_directory",
+ ),
},
)
@@ -45,5 +106,76 @@ def android_app_certificate(
name = name,
pem = certificate + ".x509.pem",
pk8 = certificate + ".pk8",
+ certificate = certificate,
**kwargs
)
+
+default_cert_directory = "build/make/target/product/security"
+
+def _android_app_certificate_with_default_cert_impl(ctx):
+ product_var_cert = ctx.attr._product_variables[ProductVariablesInfo].DefaultAppCertificate
+
+ cert_name = ctx.attr.cert_name
+
+ if cert_name and product_var_cert:
+ cert_dir = paths.dirname(product_var_cert)
+ elif cert_name:
+ cert_dir = default_cert_directory
+ elif product_var_cert:
+ cert_name = paths.basename(product_var_cert)
+ cert_dir = paths.dirname(product_var_cert)
+ else:
+ cert_name = "testkey"
+ cert_dir = default_cert_directory
+
+ if cert_dir != default_cert_directory:
+ cert_files_to_search = ctx.attr._product_variables[ProductVariablesDepsInfo].DefaultAppCertificateFiles
+ else:
+ cert_files_to_search = ctx.files._hardcoded_certs
+
+ cert_name, overridden = _maybe_override(ctx, cert_name)
+ if overridden:
+ cert_files_to_search = ctx.attr._product_variables[ProductVariablesDepsInfo].OverridingCertificateFiles
+ pk8, pem = _search_cert_files(cert_name, cert_files_to_search)
+
+ return [
+ AndroidAppCertificateInfo(
+ pk8 = pk8,
+ pem = pem,
+ key_name = "//" + cert_dir + ":" + cert_name,
+ ),
+ ]
+
+android_app_certificate_with_default_cert = rule(
+ doc = """
+ This rule is the equivalent of an android_app_certificate, but uses the
+ certificate with the given name from a certain folder, or the default
+ certificate.
+
+ Modules can give a simple name of a certificate instead of a full label to
+ an android_app_certificate. This certificate will be looked for either in
+ the package determined by the DefaultAppCertificate product config variable,
+ or the hardcoded default directory. (build/make/target/product/security)
+
+ If a name is not given, it will fall back to using the certificate termined
+ by DefaultAppCertificate. (DefaultAppCertificate can function as both the
+ default certificate to use if none is specified, and the folder to look for
+ certificates in)
+
+ If neither the name nor DefaultAppCertificate is given,
+ build/make/target/product/security/testkey.{pem,pk8} will be used.
+
+ Since this rule is intended to be used from other macros, it's common to have
+ multiple android_app_certificate targets pointing to the same pem/pk8 files.
+ """,
+ implementation = _android_app_certificate_with_default_cert_impl,
+ attrs = {
+ "cert_name": attr.string(),
+ "_product_variables": attr.label(
+ default = "//build/bazel/product_config:product_vars",
+ ),
+ "_hardcoded_certs": attr.label(
+ default = "//build/make/target/product/security:android_certificate_directory",
+ ),
+ },
+)
diff --git a/rules/android/android_app_keystore.bzl b/rules/android/android_app_keystore.bzl
index 32088cec..b438c4c6 100644
--- a/rules/android/android_app_keystore.bzl
+++ b/rules/android/android_app_keystore.bzl
@@ -1,18 +1,16 @@
-"""
-Copyright (C) 2022 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
load("@bazel_skylib//lib:paths.bzl", "paths")
load("android_app_certificate.bzl", "AndroidAppCertificateInfo")
@@ -32,7 +30,7 @@ def _pk8_to_private_pem(ctx, openssl, pk8_file, private_pem_file):
args.add_all(["-inform", "DER"])
args.add_all(["-outform", "PEM"])
args.add_all(["-out", private_pem_file])
- args.add("-nocrypt") # don't bother encrypting this private key since it is just an intermediate file
+ args.add("-nocrypt") # don't bother encrypting this private key since it is just an intermediate file
ctx.actions.run(
inputs = [pk8_file],
@@ -51,6 +49,7 @@ def _pem_to_pk12(ctx, openssl, certificate_pem, private_key_pem, pk12_file):
args.add_all(["-inkey", private_key_pem])
args.add_all(["-out", pk12_file])
args.add_all(["-name", "android"])
+
# openssl requires a password and will request a
# password from STDIN if we don't supply one here
args.add_all(["-passout", "pass:android"])
@@ -66,14 +65,17 @@ def _pem_to_pk12(ctx, openssl, certificate_pem, private_key_pem, pk12_file):
mnemonic = "CreatePK12",
)
-def _pk12_to_keystore(ctx, keytool, pk12_file, keystore_file):
+def _pk12_to_keystore(ctx, pk12_file, keystore_file):
"""Converts a PKCS12 keystore file to a JKS keystore file."""
+ java_runtime = ctx.attr._java_runtime[java_common.JavaRuntimeInfo]
+ keytool = paths.join(java_runtime.java_home, "bin", "keytool")
args = ctx.actions.args()
args.add("-importkeystore")
args.add_all(["-destkeystore", keystore_file])
args.add_all(["-srckeystore", pk12_file])
args.add_all(["-srcstoretype", "PKCS12"])
args.add_all(["-srcstorepass", "android"])
+
# apksigner expects keystores provided by the debug_signing_keys attribute
# to be secured with the password "android"
args.add_all(["-deststorepass", "android"])
@@ -81,6 +83,7 @@ def _pk12_to_keystore(ctx, keytool, pk12_file, keystore_file):
ctx.actions.run(
inputs = [pk12_file],
executable = keytool,
+ tools = [java_runtime.files],
outputs = [keystore_file],
arguments = [args],
mnemonic = "CreateKeystore",
@@ -88,7 +91,6 @@ def _pk12_to_keystore(ctx, keytool, pk12_file, keystore_file):
def _android_app_keystore_rule_impl(ctx):
openssl = ctx.executable._openssl
- keytool = ctx.executable._keytool
private_pem = ctx.actions.declare_file(ctx.attr.name + ".priv.pem")
pk12 = ctx.actions.declare_file(ctx.attr.name + ".pk12")
@@ -98,13 +100,13 @@ def _android_app_keystore_rule_impl(ctx):
pem_file = ctx.attr.certificate[AndroidAppCertificateInfo].pem
_pk8_to_private_pem(ctx, openssl, pk8_file, private_pem)
_pem_to_pk12(ctx, openssl, pem_file, private_pem, pk12)
- _pk12_to_keystore(ctx, keytool, pk12, keystore)
+ _pk12_to_keystore(ctx, pk12, keystore)
return [
AndroidAppKeystoreInfo(
keystore = keystore,
),
- DefaultInfo(files = depset(direct = [keystore]))
+ DefaultInfo(files = depset(direct = [keystore])),
]
"""Converts an android_app_certificate (i.e. pem/pk8 pair) into a JKS keystore"""
@@ -117,14 +119,12 @@ android_app_keystore = rule(
allow_single_file = True,
executable = True,
cfg = "exec",
- doc = "An OpenSSL compatible tool."
+ doc = "An OpenSSL compatible tool.",
),
- "_keytool": attr.label(
- default = Label("//prebuilts/jdk/jdk11:linux-x86/bin/keytool"),
- allow_single_file = True,
- executable = True,
+ "_java_runtime": attr.label(
+ default = Label("@bazel_tools//tools/jdk:current_java_runtime"),
cfg = "exec",
- doc = "The keytool binary."
+ providers = [java_common.JavaRuntimeInfo],
),
},
provides = [AndroidAppKeystoreInfo],
diff --git a/rules/android/android_binary.bzl b/rules/android/android_binary.bzl
index 3504e845..64465a15 100644
--- a/rules/android/android_binary.bzl
+++ b/rules/android/android_binary.bzl
@@ -1,63 +1,66 @@
-"""
-Copyright (C) 2022 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-load("@bazel_skylib//lib:paths.bzl", "paths")
-load("@rules_android//rules:rules.bzl", _android_binary = "android_binary")
-load("@soong_injection//product_config:product_variables.bzl", "product_vars")
-
-load("android_app_certificate.bzl", "android_app_certificate")
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@rules_android//rules:common.bzl", "common")
+load("@rules_android//rules:migration_tag_DONOTUSE.bzl", "add_migration_tag")
+load(
+ "//build/bazel/rules/android/android_binary_aosp_internal:rule.bzl",
+ "android_binary_aosp_internal_macro",
+)
+load("android_app_certificate.bzl", "android_app_certificate_with_default_cert")
load("android_app_keystore.bzl", "android_app_keystore")
+load("//build/bazel/rules/java:sdk_transition.bzl", "sdk_transition", "sdk_transition_attrs")
+# TODO(b/277801336): document these attributes.
+def _android_binary_helper(**attrs):
+ """ Duplicates the logic in top-level android_binary macro in
+ rules_android/rules/android_binary.bzl but uses
+ android_binary_aosp_internal_macro instead of android_binary_internal_macro.
-def _default_cert_prod_var():
- return product_vars["DefaultAppCertificate"]
+ https://docs.bazel.build/versions/master/be/android.html#android_binary
-def _default_app_certificate_package():
- default_cert = _default_cert_prod_var()
- if default_cert:
- return "//" + paths.dirname(default_cert)
- # if product variable is not set, default to Soong default:
- return "//build/make/target/product/security"
-
-def _default_app_certificate():
- default_cert = _default_cert_prod_var()
- if default_cert:
- return default_cert
- return _default_app_certificate_package() + ":testkey"
-
-def _android_app_certificate_with_default_cert(name, cert_name):
+ Args:
+ **attrs: Rule attributes
+ """
+ android_binary_aosp_internal_name = ":" + attrs["name"] + common.PACKAGED_RESOURCES_SUFFIX
+ android_binary_aosp_internal_macro(
+ **dict(
+ attrs,
+ name = android_binary_aosp_internal_name[1:],
+ visibility = ["//visibility:private"],
+ )
+ )
- if cert_name:
- # if a specific certificate name is given, check the default directory
- # for that certificate
- certificate = _default_app_certificate_package() + ":" + cert_name
- else:
- certificate = _default_app_certificate()
+ attrs.pop("$enable_manifest_merging", None)
- android_app_certificate(
- name = name,
- certificate = certificate,
+ native.android_binary(
+ application_resources = android_binary_aosp_internal_name,
+ **add_migration_tag(attrs)
)
def android_binary(
name,
certificate = None,
certificate_name = None,
+ sdk_version = None,
+ java_version = None,
+ tags = [],
+ target_compatible_with = [],
+ visibility = None,
**kwargs):
- """Bazel macro to find and create a keystore to use for debug_signing_keys
+ """ android_binary macro wrapper that handles custom attrs needed in AOSP
+ Bazel macro to find and create a keystore to use for debug_signing_keys
with @rules_android android_binary.
This module emulates the Soong behavior which allows a developer to specify
@@ -81,19 +84,64 @@ def android_binary(
if certificate or certificate_name:
if certificate_name:
app_cert_name = name + "_app_certificate"
- _android_app_certificate_with_default_cert(app_cert_name, certificate_name)
+ android_app_certificate_with_default_cert(
+ name = app_cert_name,
+ cert_name = certificate_name,
+ )
certificate = ":" + app_cert_name
app_keystore_name = name + "_keystore"
android_app_keystore(
name = app_keystore_name,
- certificate = certificate
+ certificate = certificate,
)
debug_signing_keys.append(app_keystore_name)
- _android_binary(
- name = name,
+ bin_name = name + "_private"
+ _android_binary_helper(
+ name = bin_name,
debug_signing_keys = debug_signing_keys,
+ target_compatible_with = target_compatible_with,
+ tags = tags + ["manual"],
+ visibility = ["//visibility:private"],
**kwargs
)
+
+ android_binary_sdk_transition(
+ name = name,
+ sdk_version = sdk_version,
+ java_version = java_version,
+ exports = bin_name,
+ tags = tags,
+ target_compatible_with = target_compatible_with,
+ visibility = visibility,
+ )
+
+# The list of providers to forward was determined using cquery on one
+# of the example targets listed under EXAMPLE_WRAPPER_TARGETS at
+# //build/bazel/ci/target_lists.sh. It may not be exhaustive. A unit
+# test ensures that the wrapper's providers and the wrapped rule's do
+# match.
+def _android_binary_sdk_transition_impl(ctx):
+ return struct(
+ android = ctx.attr.exports[0].android,
+ JavaGenJarsProvider = ctx.attr.exports[0][JavaInfo].annotation_processing,
+ providers = [
+ ctx.attr.exports[0][AndroidIdlInfo],
+ ctx.attr.exports[0][InstrumentedFilesInfo],
+ ctx.attr.exports[0][DataBindingV2Info],
+ ctx.attr.exports[0][JavaInfo],
+ ctx.attr.exports[0][AndroidIdeInfo],
+ ctx.attr.exports[0][ApkInfo],
+ ctx.attr.exports[0][AndroidPreDexJarInfo],
+ ctx.attr.exports[0][AndroidFeatureFlagSet],
+ ctx.attr.exports[0][OutputGroupInfo],
+ ctx.attr.exports[0][DefaultInfo],
+ ],
+ )
+
+android_binary_sdk_transition = rule(
+ implementation = _android_binary_sdk_transition_impl,
+ attrs = sdk_transition_attrs,
+)
diff --git a/rules/android/android_binary_aosp_internal/BUILD.bazel b/rules/android/android_binary_aosp_internal/BUILD.bazel
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/android/android_binary_aosp_internal/BUILD.bazel
diff --git a/rules/android/android_binary_aosp_internal/impl.bzl b/rules/android/android_binary_aosp_internal/impl.bzl
new file mode 100644
index 00000000..91228dca
--- /dev/null
+++ b/rules/android/android_binary_aosp_internal/impl.bzl
@@ -0,0 +1,62 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@rules_android//rules:java.bzl", "java")
+load(
+ "@rules_android//rules:processing_pipeline.bzl",
+ "ProviderInfo",
+ "processing_pipeline",
+)
+load("@rules_android//rules:resources.bzl", _resources = "resources")
+load("@rules_android//rules:utils.bzl", "get_android_toolchain")
+load("@rules_android//rules/android_binary_internal:impl.bzl", "finalize", _BASE_PROCESSORS = "PROCESSORS")
+load("//build/bazel/rules/common:api.bzl", "api")
+
+def _process_manifest_aosp(ctx, **unused_ctxs):
+ manifest_ctx = _resources.set_default_min_sdk(
+ ctx,
+ manifest = ctx.file.manifest,
+ default = api.default_app_target_sdk(),
+ enforce_min_sdk_floor_tool = get_android_toolchain(ctx).enforce_min_sdk_floor_tool.files_to_run,
+ )
+
+ return ProviderInfo(
+ name = "manifest_ctx",
+ value = manifest_ctx,
+ )
+
+# (b/274150785) validation processor does not allow min_sdk that are a string
+PROCESSORS = processing_pipeline.replace(
+ _BASE_PROCESSORS,
+ ManifestProcessor = _process_manifest_aosp,
+)
+
+_PROCESSING_PIPELINE = processing_pipeline.make_processing_pipeline(
+ processors = PROCESSORS,
+ finalize = finalize,
+)
+
+def impl(ctx):
+ """The rule implementation.
+
+ Args:
+ ctx: The context.
+
+ Returns:
+ A list of providers.
+ """
+ java_package = java.resolve_package_from_label(ctx.label, ctx.attr.custom_package)
+ return processing_pipeline.run(ctx, java_package, _PROCESSING_PIPELINE)
diff --git a/rules/android/android_binary_aosp_internal/rule.bzl b/rules/android/android_binary_aosp_internal/rule.bzl
new file mode 100644
index 00000000..7168d730
--- /dev/null
+++ b/rules/android/android_binary_aosp_internal/rule.bzl
@@ -0,0 +1,28 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@rules_android//rules/android_binary_internal:rule.bzl", "make_rule", "sanitize_attrs")
+load(":impl.bzl", _impl = "impl")
+
+android_binary_aosp_internal = make_rule(implementation = _impl)
+
+def android_binary_aosp_internal_macro(**attrs):
+ """android_binary_internal rule.
+
+ Args:
+ **attrs: Rule attributes
+ """
+ android_binary_aosp_internal(**sanitize_attrs(attrs))
diff --git a/rules/android/android_library.bzl b/rules/android/android_library.bzl
new file mode 100644
index 00000000..514cc4df
--- /dev/null
+++ b/rules/android/android_library.bzl
@@ -0,0 +1,110 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""android_library rule."""
+
+load("//build/bazel/rules/java:sdk_transition.bzl", "sdk_transition", "sdk_transition_attrs")
+load(
+ "//build/bazel/rules/android/android_library_aosp_internal:rule.bzl",
+ "android_library_aosp_internal_macro",
+)
+load("@rules_android//rules:providers.bzl", "StarlarkAndroidResourcesInfo")
+
+# TODO(b/277801336): document these attributes.
+def android_library(
+ name,
+ sdk_version = None,
+ java_version = None,
+ tags = [],
+ target_compatible_with = [],
+ visibility = None,
+ **attrs):
+ """ android_library macro wrapper that handles custom attrs needed in AOSP
+
+ Args:
+ name: the wrapper rule name.
+ sdk_version: string representing which sdk_version to build against. See
+ //build/bazel/rules/common/sdk_version.bzl for formatting and semantics.
+ java_version: string representing which version of java the java code in this rule should be
+ built with.
+ tags, target_compatible_with and visibility have Bazel's traditional semantics.
+ **attrs: Rule attributes
+ """
+ lib_name = name + "_private"
+ android_library_aosp_internal_macro(
+ name = lib_name,
+ tags = tags + ["manual"],
+ target_compatible_with = target_compatible_with,
+ visibility = ["//visibility:private"],
+ **attrs
+ )
+
+ android_library_sdk_transition(
+ aar = name + ".aar",
+ name = name,
+ sdk_version = sdk_version,
+ java_version = java_version,
+ exports = lib_name,
+ tags = tags,
+ target_compatible_with = target_compatible_with,
+ visibility = visibility,
+ )
+
+# The list of providers to forward was determined using cquery on one
+# of the example targets listed under EXAMPLE_WRAPPER_TARGETS at
+# //build/bazel/ci/target_lists.sh. It may not be exhaustive. A unit
+# test ensures that the wrapper's providers and the wrapped rule's do
+# match.
+def _android_library_sdk_transition_impl(ctx):
+ ctx.actions.symlink(
+ output = ctx.outputs.aar,
+ target_file = ctx.attr.exports[0][AndroidIdeInfo].aar,
+ )
+
+ providers = []
+ if AndroidLibraryAarInfo in ctx.attr.exports[0]:
+ providers.append(ctx.attr.exports[0][AndroidLibraryAarInfo])
+ return struct(
+ android = ctx.attr.exports[0].android,
+ java = ctx.attr.exports[0].java,
+ providers = providers + [
+ ctx.attr.exports[0][StarlarkAndroidResourcesInfo],
+ ctx.attr.exports[0][AndroidLibraryResourceClassJarProvider],
+ ctx.attr.exports[0][AndroidIdlInfo],
+ ctx.attr.exports[0][DataBindingV2Info],
+ ctx.attr.exports[0][JavaInfo],
+ ctx.attr.exports[0][ProguardSpecProvider],
+ ctx.attr.exports[0][AndroidProguardInfo],
+ ctx.attr.exports[0][AndroidNativeLibsInfo],
+ ctx.attr.exports[0][AndroidCcLinkParamsInfo],
+ ctx.attr.exports[0][AndroidIdeInfo],
+ ctx.attr.exports[0][InstrumentedFilesInfo],
+ ctx.attr.exports[0][Actions],
+ ctx.attr.exports[0][OutputGroupInfo],
+ ctx.attr.exports[0][DefaultInfo],
+ ],
+ )
+
+android_library_sdk_transition = rule(
+ implementation = _android_library_sdk_transition_impl,
+ attrs = sdk_transition_attrs | {"aar": attr.output()},
+ provides = [
+ AndroidCcLinkParamsInfo,
+ AndroidIdeInfo,
+ AndroidIdlInfo,
+ AndroidLibraryResourceClassJarProvider,
+ AndroidNativeLibsInfo,
+ JavaInfo,
+ ],
+)
diff --git a/rules/android/android_library_aosp_internal/BUILD.bazel b/rules/android/android_library_aosp_internal/BUILD.bazel
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/android/android_library_aosp_internal/BUILD.bazel
diff --git a/rules/android/android_library_aosp_internal/attrs.bzl b/rules/android/android_library_aosp_internal/attrs.bzl
new file mode 100644
index 00000000..8e17c717
--- /dev/null
+++ b/rules/android/android_library_aosp_internal/attrs.bzl
@@ -0,0 +1,128 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(
+ "@rules_android//rules:attrs.bzl",
+ _attrs = "attrs",
+)
+load(
+ "@rules_android//rules/android_library:attrs.bzl",
+ _BASE_ATTRS = "ATTRS",
+)
+load("@rules_kotlin//kotlin:compiler_opt.bzl", "kotlincopts_attrs")
+load("@rules_kotlin//kotlin:traverse_exports.bzl", _kt_traverse_exports = "kt_traverse_exports")
+
+_KT_COMPILER_ATTRS = _attrs.add(
+ kotlincopts_attrs(),
+ dict(
+ common_srcs = attr.label_list(
+ allow_files = [".kt"],
+ doc = """The list of common multi-platform source files that are processed to create
+ the target.""",
+ ),
+ coverage_srcs = attr.label_list(allow_files = True),
+ # Magic attribute name for DexArchiveAspect
+ _toolchain = attr.label(
+ default = Label(
+ "@rules_kotlin//toolchains/kotlin_jvm:kt_jvm_toolchain_impl",
+ ),
+ ),
+ ),
+)
+
+ATTRS = _attrs.add(
+ _attrs.replace(
+ _BASE_ATTRS,
+ deps = attr.label_list(
+ allow_rules = [
+ "aar_import",
+ "android_library",
+ "cc_library",
+ "java_import",
+ "java_library",
+ "java_lite_proto_library",
+ ],
+ aspects = [
+ _kt_traverse_exports.aspect,
+ ],
+ providers = [
+ [CcInfo],
+ [JavaInfo],
+ ],
+ doc = (
+ "The list of other libraries to link against. Permitted library types " +
+ "are: `android_library`, `java_library` with `android` constraint and " +
+ "`cc_library` wrapping or producing `.so` native libraries for the " +
+ "Android target platform."
+ ),
+ ),
+ exported_plugins = attr.label_list(
+ allow_rules = [
+ "java_plugin",
+ ],
+ cfg = "exec",
+ ),
+ exports = attr.label_list(
+ allow_rules = [
+ "aar_import",
+ "android_library",
+ "cc_library",
+ "java_import",
+ "java_library",
+ "java_lite_proto_library",
+ ],
+ aspects = [
+ _kt_traverse_exports.aspect,
+ ],
+ providers = [
+ [CcInfo],
+ [JavaInfo],
+ ],
+ doc = (
+ "The closure of all rules reached via `exports` attributes are considered " +
+ "direct dependencies of any rule that directly depends on the target with " +
+ "`exports`. The `exports` are not direct deps of the rule they belong to."
+ ),
+ ),
+ exports_manifest = _attrs.tristate.create(
+ default = _attrs.tristate.no,
+ doc = (
+ "Whether to export manifest entries to `android_binary` targets that " +
+ "depend on this target. `uses-permissions` attributes are never exported."
+ ),
+ ),
+ plugins = attr.label_list(
+ providers = [
+ [JavaPluginInfo],
+ ],
+ cfg = "exec",
+ doc = (
+ "Java compiler plugins to run at compile-time. Every `java_plugin` " +
+ "specified in the plugins attribute will be run whenever this target " +
+ "is built. Resources generated by the plugin will be included in " +
+ "the result jar of the target."
+ ),
+ ),
+ srcs = attr.label_list(
+ allow_files = [
+ ".kt",
+ ".java",
+ ".srcjar",
+ ],
+ ),
+ ),
+ _KT_COMPILER_ATTRS,
+)
diff --git a/rules/android/android_library_aosp_internal/impl.bzl b/rules/android/android_library_aosp_internal/impl.bzl
new file mode 100644
index 00000000..8ea06885
--- /dev/null
+++ b/rules/android/android_library_aosp_internal/impl.bzl
@@ -0,0 +1,131 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@rules_android//rules:common.bzl", _common = "common")
+load("@rules_android//rules:java.bzl", _java = "java")
+load(
+ "@rules_android//rules:processing_pipeline.bzl",
+ "ProviderInfo",
+ "processing_pipeline",
+)
+load("@rules_android//rules:utils.bzl", "utils")
+load(
+ "@rules_android//rules/android_library:impl.bzl",
+ "finalize",
+ _BASE_PROCESSORS = "PROCESSORS",
+)
+load("@rules_kotlin//kotlin:common.bzl", _kt_common = "common")
+load("@rules_kotlin//kotlin:compiler_opt.bzl", "merge_kotlincopts")
+load("@rules_kotlin//kotlin:jvm_compile.bzl", "kt_jvm_compile")
+load("@rules_kotlin//toolchains/kotlin_jvm:kt_jvm_toolchains.bzl", _kt_jvm_toolchains = "kt_jvm_toolchains")
+
+def _validations_processor(ctx, **_unused_sub_ctxs):
+ utils.check_for_failures(ctx.label, ctx.attr.deps, ctx.attr.exports)
+
+def _process_jvm(
+ ctx,
+ java_package, # @unused
+ exceptions_ctx, # @unused
+ resources_ctx,
+ idl_ctx,
+ db_ctx,
+ **_unused_sub_ctxs):
+ # Filter out disallowed sources.
+ srcs = ctx.files.srcs + idl_ctx.idl_java_srcs + db_ctx.java_srcs
+
+ # kt_jvm_compile expects deps that only carry CcInfo in runtime_deps
+ deps = [dep for dep in ctx.attr.deps if JavaInfo in dep] + idl_ctx.idl_deps
+ runtime_deps = [dep for dep in ctx.attr.deps if JavaInfo not in dep]
+
+ jvm_ctx = kt_jvm_compile(
+ ctx,
+ ctx.outputs.lib_jar,
+ # ctx.outputs.lib_src_jar, # Implicitly determines file.
+ srcs = srcs,
+ common_srcs = ctx.files.common_srcs,
+ coverage_srcs = ctx.files.coverage_srcs,
+ deps = deps,
+ plugins = ctx.attr.plugins + db_ctx.java_plugins,
+ exports = ctx.attr.exports,
+ # As the JavaInfo constructor does not support attaching
+ # exported_plugins, for the purposes of propagation, the plugin is
+ # wrapped in a java_library.exported_plugins target and attached with
+ # export to this rule.
+ exported_plugins = ctx.attr.exported_plugins,
+ runtime_deps = runtime_deps,
+ r_java = resources_ctx.r_java,
+ javacopts = ctx.attr.javacopts + db_ctx.javac_opts,
+ kotlincopts = merge_kotlincopts(ctx),
+ neverlink = ctx.attr.neverlink,
+ testonly = ctx.attr.testonly,
+ android_lint_plugins = [],
+ android_lint_rules_jars = depset(),
+ manifest = getattr(ctx.file, "manifest", None),
+ merged_manifest = resources_ctx.merged_manifest,
+ resource_files = ctx.files.resource_files,
+ kt_toolchain = _kt_jvm_toolchains.get(ctx),
+ java_toolchain = _common.get_java_toolchain(ctx),
+ disable_lint_checks = [],
+ rule_family = _kt_common.RULE_FAMILY.ANDROID_LIBRARY,
+ annotation_processor_additional_outputs = (
+ db_ctx.java_annotation_processor_additional_outputs
+ ),
+ annotation_processor_additional_inputs = (
+ db_ctx.java_annotation_processor_additional_inputs
+ ),
+ )
+
+ java_info = jvm_ctx.java_info
+
+ return ProviderInfo(
+ name = "jvm_ctx",
+ value = struct(
+ java_info = java_info,
+ providers = [java_info],
+ ),
+ )
+
+def _process_coverage(ctx, **_unused_ctx):
+ return ProviderInfo(
+ name = "coverage_ctx",
+ value = struct(
+ providers = [
+ coverage_common.instrumented_files_info(
+ ctx,
+ source_attributes = ["srcs", "coverage_srcs"],
+ dependency_attributes = ["assets", "deps", "exports"],
+ ),
+ ],
+ ),
+ )
+
+PROCESSORS = processing_pipeline.prepend(
+ processing_pipeline.replace(
+ _BASE_PROCESSORS,
+ JvmProcessor = _process_jvm,
+ CoverageProcessor = _process_coverage,
+ ),
+ ValidationsProcessor = _validations_processor,
+)
+
+_PROCESSING_PIPELINE = processing_pipeline.make_processing_pipeline(
+ processors = PROCESSORS,
+ finalize = finalize,
+)
+
+def impl(ctx):
+ java_package = _java.resolve_package_from_label(ctx.label, ctx.attr.custom_package)
+ return processing_pipeline.run(ctx, java_package, _PROCESSING_PIPELINE)
diff --git a/rules/android/android_library_aosp_internal/rule.bzl b/rules/android/android_library_aosp_internal/rule.bzl
new file mode 100644
index 00000000..3bfdd707
--- /dev/null
+++ b/rules/android/android_library_aosp_internal/rule.bzl
@@ -0,0 +1,41 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(
+ "@rules_android//rules/android_library:rule.bzl",
+ _attrs_metadata = "attrs_metadata",
+ _make_rule = "make_rule",
+)
+load("@rules_kotlin//toolchains/kotlin_jvm:kt_jvm_toolchains.bzl", _kt_jvm_toolchains = "kt_jvm_toolchains")
+load(":attrs.bzl", "ATTRS")
+load(
+ ":impl.bzl",
+ _impl = "impl",
+)
+
+android_library = _make_rule(
+ attrs = ATTRS,
+ implementation = _impl,
+ additional_toolchains = [_kt_jvm_toolchains.type],
+)
+
+def android_library_aosp_internal_macro(**attrs):
+ """AOSP android_library rule.
+
+ Args:
+ **attrs: Rule attributes
+ """
+ android_library(**_attrs_metadata(attrs))
diff --git a/rules/android/rules.bzl b/rules/android/rules.bzl
new file mode 100644
index 00000000..43b794d5
--- /dev/null
+++ b/rules/android/rules.bzl
@@ -0,0 +1,62 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Starlark rules for building Android apps."""
+
+load(
+ ":aar_import.bzl",
+ _aar_import = "aar_import",
+)
+load(
+ "@rules_android//rules:rules.bzl",
+ _android_application = "android_application",
+)
+load(
+ "@rules_android//rules:rules.bzl",
+ _android_ndk_repository = "android_ndk_repository",
+)
+load(
+ "@rules_android//rules:rules.bzl",
+ _android_sdk = "android_sdk",
+)
+load(
+ "@rules_android//rules:rules.bzl",
+ _android_sdk_repository = "android_sdk_repository",
+)
+load(
+ "@rules_android//rules:rules.bzl",
+ _android_tools_defaults_jar = "android_tools_defaults_jar",
+)
+load(
+ ":android_app_certificate.bzl",
+ _android_app_certificate = "android_app_certificate",
+)
+load(
+ ":android_binary.bzl",
+ _android_binary = "android_binary",
+)
+load(
+ ":android_library.bzl",
+ _android_library = "android_library",
+)
+
+aar_import = _aar_import
+android_application = _android_application
+android_app_certificate = _android_app_certificate
+android_binary = _android_binary
+android_library = _android_library
+android_ndk_repository = _android_ndk_repository
+android_sdk = _android_sdk
+android_sdk_repository = _android_sdk_repository
+android_tools_defaults_jar = _android_tools_defaults_jar
diff --git a/rules/apex.bzl b/rules/apex.bzl
deleted file mode 100644
index e5fc11ef..00000000
--- a/rules/apex.bzl
+++ /dev/null
@@ -1,432 +0,0 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-load(":apex_key.bzl", "ApexKeyInfo")
-load(":prebuilt_file.bzl", "PrebuiltFileInfo")
-load(":sh_binary.bzl", "ShBinaryInfo")
-load("//build/bazel/rules/cc:stripped_cc_common.bzl", "StrippedCcBinaryInfo")
-load("//build/bazel/rules/android:android_app_certificate.bzl", "AndroidAppCertificateInfo")
-load("//build/bazel/rules/apex:transition.bzl", "apex_transition", "shared_lib_transition_32", "shared_lib_transition_64")
-load("//build/bazel/rules/apex:cc.bzl", "ApexCcInfo", "apex_cc_aspect")
-
-DIR_LIB = "lib"
-DIR_LIB64 = "lib64"
-
-ApexInfo = provider(
- "ApexInfo has no field currently and is used by apex rule dependents to ensure an attribute is a target of apex rule.",
- fields = {},
-)
-
-# Prepare the input files info for bazel_apexer_wrapper to generate APEX filesystem image.
-def _prepare_apexer_wrapper_inputs(ctx):
- # dictionary to return in the format:
- # apex_manifest[(image_file_dirname, image_file_basename)] = bazel_output_file
- apex_manifest = {}
-
- x86_constraint = ctx.attr._x86_constraint[platform_common.ConstraintValueInfo]
- x86_64_constraint = ctx.attr._x86_64_constraint[platform_common.ConstraintValueInfo]
- arm_constraint = ctx.attr._arm_constraint[platform_common.ConstraintValueInfo]
- arm64_constraint = ctx.attr._arm64_constraint[platform_common.ConstraintValueInfo]
-
- if ctx.target_platform_has_constraint(x86_constraint):
- _add_libs_32_target(ctx, "x86", apex_manifest)
- elif ctx.target_platform_has_constraint(x86_64_constraint):
- _add_libs_64_target(ctx, "x86", "x86_64", apex_manifest)
- elif ctx.target_platform_has_constraint(arm_constraint):
- _add_libs_32_target(ctx, "arm", apex_manifest)
- elif ctx.target_platform_has_constraint(arm64_constraint):
- _add_libs_64_target(ctx, "arm", "arm64", apex_manifest)
-
- # Handle prebuilts
- for dep in ctx.attr.prebuilts:
- prebuilt_file_info = dep[PrebuiltFileInfo]
- if prebuilt_file_info.filename:
- filename = prebuilt_file_info.filename
- else:
- filename = dep.label.name
- apex_manifest[(prebuilt_file_info.dir, filename)] = prebuilt_file_info.src
-
- # Handle binaries
- for dep in ctx.attr.binaries:
- if ShBinaryInfo in dep:
- # sh_binary requires special handling on directory/filename construction.
- sh_binary_info = dep[ShBinaryInfo]
- default_info = dep[DefaultInfo]
- if sh_binary_info != None:
- directory = "bin"
- if sh_binary_info.sub_dir != None and sh_binary_info.sub_dir != "":
- directory = "/".join([directory, sh_binary_info.sub_dir])
-
- if sh_binary_info.filename != None and sh_binary_info.filename != "":
- filename = sh_binary_info.filename
- else:
- filename = dep.label.name
-
- apex_manifest[(directory, filename)] = default_info.files_to_run.executable
- elif CcInfo in dep:
- # cc_binary just takes the final executable from the runfiles.
- apex_manifest[("bin", dep.label.name)] = dep[DefaultInfo].files_to_run.executable
-
- apex_content_inputs = []
-
- bazel_apexer_wrapper_manifest = ctx.actions.declare_file("%s_bazel_apexer_wrapper_manifest" % ctx.attr.name)
- file_lines = []
-
- # Store the apex file target directory, file name and the path in the source tree in a file.
- # This file will be read by the bazel_apexer_wrapper to create the apex input directory.
- # Here is an example:
- # {etc/tz,tz_version,system/timezone/output_data/version/tz_version}
- for (apex_dirname, apex_basename), bazel_input_file in apex_manifest.items():
- apex_content_inputs.append(bazel_input_file)
- file_lines += [",".join([apex_dirname, apex_basename, bazel_input_file.path])]
-
- ctx.actions.write(bazel_apexer_wrapper_manifest, "\n".join(file_lines))
-
- return apex_content_inputs, bazel_apexer_wrapper_manifest
-
-def _add_libs_32_target(ctx, key, apex_manifest):
- if len(ctx.split_attr.native_shared_libs_32.keys()) > 0:
- _add_lib_file(DIR_LIB, ctx.split_attr.native_shared_libs_32[key], apex_manifest)
-
-def _add_libs_64_target(ctx, key_32, key_64, apex_manifest):
- _add_libs_32_target(ctx, key_32, apex_manifest)
- if len(ctx.split_attr.native_shared_libs_64.keys()) > 0:
- _add_lib_file(DIR_LIB64, ctx.split_attr.native_shared_libs_64[key_64], apex_manifest)
-
-def _add_lib_file(dir, libs, apex_manifest):
- for dep in libs:
- apex_cc_info = dep[ApexCcInfo]
- for lib_file in apex_cc_info.transitive_shared_libs.to_list():
- apex_manifest[(dir, lib_file.basename)] = lib_file
-
-# conv_apex_manifest - Convert the JSON APEX manifest to protobuf, which is needed by apexer.
-def _convert_apex_manifest_json_to_pb(ctx, apex_toolchain):
- apex_manifest_json = ctx.file.manifest
- apex_manifest_pb = ctx.actions.declare_file("apex_manifest.pb")
-
- ctx.actions.run(
- outputs = [apex_manifest_pb],
- inputs = [ctx.file.manifest],
- executable = apex_toolchain.conv_apex_manifest,
- arguments = [
- "proto",
- apex_manifest_json.path,
- "-o",
- apex_manifest_pb.path,
- ],
- mnemonic = "ConvApexManifest",
- )
-
- return apex_manifest_pb
-
-# apexer - generate the APEX file.
-def _run_apexer(ctx, apex_toolchain, apex_content_inputs, bazel_apexer_wrapper_manifest, apex_manifest_pb):
- # Inputs
- file_contexts = ctx.file.file_contexts
- apex_key_info = ctx.attr.key[ApexKeyInfo]
- privkey = apex_key_info.private_key
- pubkey = apex_key_info.public_key
- android_jar = apex_toolchain.android_jar
- android_manifest = ctx.file.android_manifest
-
- # Outputs
- apex_output_file = ctx.actions.declare_file(ctx.attr.name + ".apex.unsigned")
-
- # Arguments
- args = ctx.actions.args()
- args.add_all(["--manifest", apex_manifest_pb.path])
- args.add_all(["--file_contexts", file_contexts.path])
- args.add_all(["--key", privkey.path])
- args.add_all(["--pubkey", pubkey.path])
- min_sdk_version = ctx.attr.min_sdk_version
-
- # TODO(b/215339575): This is a super rudimentary way to convert "current" to a numerical number.
- # Generalize this to API level handling logic in a separate Starlark utility, preferably using
- # API level maps dumped from api_levels.go
- if min_sdk_version == "current":
- min_sdk_version = "10000"
- args.add_all(["--min_sdk_version", min_sdk_version])
- args.add_all(["--bazel_apexer_wrapper_manifest", bazel_apexer_wrapper_manifest])
- args.add_all(["--apexer_path", apex_toolchain.apexer])
-
- # apexer needs the list of directories containing all auxilliary tools invoked during
- # the creation of an apex
- avbtool_files = apex_toolchain.avbtool[DefaultInfo].files_to_run
- e2fsdroid_files = apex_toolchain.e2fsdroid[DefaultInfo].files_to_run
- mke2fs_files = apex_toolchain.mke2fs[DefaultInfo].files_to_run
- resize2fs_files = apex_toolchain.resize2fs[DefaultInfo].files_to_run
- apexer_tool_paths = [
- # These are built by make_injection
- apex_toolchain.apexer.dirname,
-
- # These are real Bazel targets
- apex_toolchain.aapt2.dirname,
- avbtool_files.executable.dirname,
- e2fsdroid_files.executable.dirname,
- mke2fs_files.executable.dirname,
- resize2fs_files.executable.dirname,
- ]
-
- args.add_all(["--apexer_tool_path", ":".join(apexer_tool_paths)])
- args.add_all(["--apex_output_file", apex_output_file])
-
- if android_manifest != None:
- args.add_all(["--android_manifest", android_manifest.path])
-
- inputs = apex_content_inputs + [
- bazel_apexer_wrapper_manifest,
- apex_manifest_pb,
- file_contexts,
- privkey,
- pubkey,
- android_jar,
- ]
-
- tools = [
- avbtool_files,
- e2fsdroid_files,
- mke2fs_files,
- resize2fs_files,
- apex_toolchain.aapt2,
-
- apex_toolchain.apexer,
- apex_toolchain.sefcontext_compile,
- ]
-
- if android_manifest != None:
- inputs.append(android_manifest)
-
- ctx.actions.run(
- inputs = inputs,
- tools = tools,
- outputs = [apex_output_file],
- executable = ctx.executable._bazel_apexer_wrapper,
- arguments = [args],
- mnemonic = "BazelApexerWrapper",
- )
-
- return apex_output_file
-
-# Sign a file with signapk.
-def _run_signapk(ctx, unsigned_file, signed_file, private_key, public_key, mnemonic):
- # Inputs
- inputs = [
- unsigned_file,
- private_key,
- public_key,
- ctx.executable._signapk,
- ]
-
- # Outputs
- outputs = [signed_file]
-
- # Arguments
- args = ctx.actions.args()
- args.add_all(["-a", 4096])
- args.add_all(["--align-file-size"])
- args.add_all([public_key, private_key])
- args.add_all([unsigned_file, signed_file])
-
- ctx.actions.run(
- inputs = inputs,
- outputs = outputs,
- executable = ctx.executable._signapk,
- arguments = [args],
- mnemonic = mnemonic,
- )
-
- return signed_file
-
-# Compress a file with apex_compression_tool.
-def _run_apex_compression_tool(ctx, apex_toolchain, input_file, output_file_name):
- # Inputs
- inputs = [
- input_file,
- ]
-
- avbtool_files = apex_toolchain.avbtool[DefaultInfo].files_to_run
- tools = [
- avbtool_files,
- apex_toolchain.apex_compression_tool,
- apex_toolchain.soong_zip,
- ]
-
- # Outputs
- compressed_file = ctx.actions.declare_file(output_file_name)
- outputs = [compressed_file]
-
- # Arguments
- args = ctx.actions.args()
- args.add_all(["compress"])
- tool_dirs = [apex_toolchain.soong_zip.dirname, avbtool_files.executable.dirname]
- args.add_all(["--apex_compression_tool", ":".join(tool_dirs)])
- args.add_all(["--input", input_file])
- args.add_all(["--output", compressed_file])
-
- ctx.actions.run(
- inputs = inputs,
- tools = tools,
- outputs = outputs,
- executable = apex_toolchain.apex_compression_tool,
- arguments = [args],
- mnemonic = "BazelApexCompressing",
- )
- return compressed_file
-
-# See the APEX section in the README on how to use this rule.
-def _apex_rule_impl(ctx):
- apex_toolchain = ctx.toolchains["//build/bazel/rules/apex:apex_toolchain_type"].toolchain_info
-
- apex_content_inputs, bazel_apexer_wrapper_manifest = _prepare_apexer_wrapper_inputs(ctx)
- apex_manifest_pb = _convert_apex_manifest_json_to_pb(ctx, apex_toolchain)
-
- unsigned_apex_output_file = _run_apexer(ctx, apex_toolchain, apex_content_inputs, bazel_apexer_wrapper_manifest, apex_manifest_pb)
-
- apex_cert_info = ctx.attr.certificate[AndroidAppCertificateInfo]
- private_key = apex_cert_info.pk8
- public_key = apex_cert_info.pem
-
- signed_apex = ctx.outputs.apex_output
- _run_signapk(ctx, unsigned_apex_output_file, signed_apex, private_key, public_key, "BazelApexSigning")
- output_file = signed_apex
-
- if ctx.attr.compressible:
- compressed_apex_output_file = _run_apex_compression_tool(ctx, apex_toolchain, signed_apex, ctx.attr.name + ".capex.unsigned")
- signed_capex = ctx.outputs.capex_output
- _run_signapk(ctx, compressed_apex_output_file, signed_capex, private_key, public_key, "BazelCompressedApexSigning")
-
- files_to_build = depset([output_file])
- return [DefaultInfo(files = files_to_build), ApexInfo()]
-
-_apex = rule(
- implementation = _apex_rule_impl,
- attrs = {
- "manifest": attr.label(allow_single_file = [".json"]),
- "android_manifest": attr.label(allow_single_file = [".xml"]),
- "file_contexts": attr.label(allow_single_file = True, mandatory = True),
- "key": attr.label(providers = [ApexKeyInfo]),
- "certificate": attr.label(providers = [AndroidAppCertificateInfo]),
- "min_sdk_version": attr.string(default = "current"),
- "updatable": attr.bool(default = True),
- "installable": attr.bool(default = True),
- "compressible": attr.bool(default = False),
- "native_shared_libs_32": attr.label_list(
- providers = [ApexCcInfo],
- aspects = [apex_cc_aspect],
- cfg = shared_lib_transition_32,
- doc = "The libs compiled for 32-bit",
- ),
- "native_shared_libs_64": attr.label_list(
- providers = [ApexCcInfo],
- aspects = [apex_cc_aspect],
- cfg = shared_lib_transition_64,
- doc = "The libs compiled for 64-bit",
- ),
- "binaries": attr.label_list(
- providers = [
- # The dependency must produce _all_ of the providers in _one_ of these lists.
- [ShBinaryInfo], # sh_binary
- [StrippedCcBinaryInfo, CcInfo], # cc_binary (stripped)
- ],
- cfg = apex_transition,
- ),
- "prebuilts": attr.label_list(providers = [PrebuiltFileInfo], cfg = apex_transition),
- "apex_output": attr.output(doc = "signed .apex output"),
- "capex_output": attr.output(doc = "signed .capex output"),
-
- # Required to use apex_transition. This is an acknowledgement to the risks of memory bloat when using transitions.
- "_allowlist_function_transition": attr.label(default = "@bazel_tools//tools/allowlists/function_transition_allowlist"),
- "_bazel_apexer_wrapper": attr.label(
- cfg = "host",
- doc = "The apexer wrapper to avoid the problem where symlinks are created inside apex image.",
- executable = True,
- default = "//build/bazel/rules/apex:bazel_apexer_wrapper",
- ),
- "_signapk": attr.label(
- cfg = "host",
- doc = "The signapk tool.",
- executable = True,
- default = "//build/make/tools/signapk",
- ),
- "_x86_constraint": attr.label(
- default = Label("//build/bazel/platforms/arch:x86"),
- ),
- "_x86_64_constraint": attr.label(
- default = Label("//build/bazel/platforms/arch:x86_64"),
- ),
- "_arm_constraint": attr.label(
- default = Label("//build/bazel/platforms/arch:arm"),
- ),
- "_arm64_constraint": attr.label(
- default = Label("//build/bazel/platforms/arch:arm64"),
- ),
- },
- toolchains = ["//build/bazel/rules/apex:apex_toolchain_type"],
- fragments = ["platform"],
-)
-
-def apex(
- name,
- manifest = "apex_manifest.json",
- android_manifest = None,
- file_contexts = None,
- key = None,
- certificate = None,
- min_sdk_version = None,
- updatable = True,
- installable = True,
- compressible = False,
- native_shared_libs_32 = [],
- native_shared_libs_64 = [],
- binaries = [],
- prebuilts = [],
- **kwargs):
- "Bazel macro to correspond with the APEX bundle Soong module."
-
- # If file_contexts is not specified, then use the default from //system/sepolicy/apex.
- # https://cs.android.com/android/platform/superproject/+/master:build/soong/apex/builder.go;l=259-263;drc=b02043b84d86fe1007afef1ff012a2155172215c
- if file_contexts == None:
- file_contexts = "//system/sepolicy/apex:" + name + "-file_contexts"
-
- apex_output = name + ".apex"
- capex_output = None
- if compressible:
- capex_output = name + ".capex"
-
- _apex(
- name = name,
- manifest = manifest,
- android_manifest = android_manifest,
- file_contexts = file_contexts,
- key = key,
- certificate = certificate,
- min_sdk_version = min_sdk_version,
- updatable = updatable,
- installable = installable,
- compressible = compressible,
- native_shared_libs_32 = native_shared_libs_32,
- native_shared_libs_64 = native_shared_libs_64,
- binaries = binaries,
- prebuilts = prebuilts,
-
- # Enables predeclared output builds from command line directly, e.g.
- #
- # $ bazel build //path/to/module:com.android.module.apex
- # $ bazel build //path/to/module:com.android.module.capex
- apex_output = apex_output,
- capex_output = capex_output,
- **kwargs
- )
diff --git a/rules/apex/BUILD b/rules/apex/BUILD
index ef3998cd..04bb667a 100644
--- a/rules/apex/BUILD
+++ b/rules/apex/BUILD
@@ -1,5 +1,21 @@
+load("@bazel_skylib//rules:common_settings.bzl", "bool_flag", "bool_setting", "string_list_setting", "string_setting")
+load("@env//:env.bzl", "env")
+load("@soong_injection//api_levels:platform_versions.bzl", "platform_versions")
+load("@soong_injection//product_config:product_variables.bzl", "product_vars")
+load("//build/bazel/flags:common.bzl", "is_env_true")
+load("//build/bazel/product_variables:constants.bzl", "constants")
+load("//build/bazel/rules:metadata.bzl", "metadata")
load("//build/bazel/rules/apex:toolchain.bzl", "apex_toolchain")
-load("@bazel_skylib//rules:common_settings.bzl", "string_setting", "string_list_setting")
+load("//build/bazel/rules/common:api.bzl", "api")
+load(":apex_aab_test.bzl", "apex_aab_test_suite")
+load(":apex_key_test.bzl", "apex_key_test_suite")
+load(":apex_mk_test.bzl", "apex_mk_test_suite")
+load(":apex_test.bzl", "apex_test_suite")
+
+# Setup package and default_metadata_file for _test_apex_sbom in apex_test.bzl
+package(default_package_metadata = [":default_metadata_file"])
+
+metadata(name = "default_metadata_file")
string_setting(
name = "apex_name",
@@ -8,36 +24,156 @@ string_setting(
)
string_setting(
- name = "min_sdk_version",
+ name = "base_apex_name",
build_setting_default = "",
visibility = ["//visibility:public"],
)
+string_setting(
+ name = "override_apex_manifest_default_version",
+ build_setting_default = env.get("OVERRIDE_APEX_MANIFEST_DEFAULT_VERSION", ""),
+ visibility = ["//visibility:public"],
+)
+
+string_setting(
+ name = "apex_global_min_sdk_version_override",
+ # TODO(b/269577299): Make this read from //build/bazel/product_config:product_vars instead.
+ build_setting_default = product_vars.get("ApexGlobalMinSdkVersionOverride", ""),
+ visibility = ["//visibility:public"],
+)
+
+bool_setting(
+ name = "within_apex",
+ build_setting_default = False,
+ visibility = ["//visibility:public"],
+)
+
+config_setting(
+ name = "in_apex",
+ flag_values = {
+ ":within_apex": "True",
+ },
+)
+
+config_setting(
+ name = "non_apex",
+ flag_values = {
+ ":apex_name": "",
+ },
+)
+
+config_setting(
+ name = "android-in_apex",
+ constraint_values = [
+ constants.ArchVariantToConstraints["android"],
+ ],
+ flag_values = {
+ ":within_apex": "True",
+ },
+)
+
+config_setting(
+ name = "android-non_apex",
+ constraint_values = [
+ constants.ArchVariantToConstraints["android"],
+ ],
+ flag_values = {
+ ":within_apex": "False",
+ },
+)
+
+config_setting(
+ name = "linux_bionic-in_apex",
+ constraint_values = [
+ constants.ArchVariantToConstraints["linux_bionic"],
+ ],
+ flag_values = {
+ ":within_apex": "True",
+ },
+)
+
+config_setting(
+ name = "linux_bionic-non_apex",
+ constraint_values = [
+ constants.ArchVariantToConstraints["linux_bionic"],
+ ],
+ flag_values = {
+ ":within_apex": "False",
+ },
+)
+
string_list_setting(
name = "apex_direct_deps",
build_setting_default = [],
visibility = ["//visibility:public"],
)
+bool_flag(
+ name = "apexer_verbose",
+ build_setting_default = False,
+ visibility = ["//visibility:public"],
+)
+
+string_setting(
+ name = "min_sdk_version",
+ build_setting_default = "",
+ visibility = ["//visibility:public"],
+)
+
+[
+ config_setting(
+ name = "min_sdk_version_" + str(level),
+ flag_values = {
+ ":min_sdk_version": str(level),
+ },
+ )
+ for level in api.api_levels.values()
+]
+
+bool_flag(
+ name = "unsafe_disable_apex_allowed_deps_check",
+ build_setting_default = is_env_true(env.get("UNSAFE_DISABLE_APEX_ALLOWED_DEPS_CHECK")),
+ visibility = ["//visibility:public"],
+)
+
+bool_flag(
+ name = "unbundled_build_target_sdk_with_api_fingerprint",
+ build_setting_default = is_env_true(env.get("UNBUNDLED_BUILD_TARGET_SDK_WITH_API_FINGERPRINT")),
+ visibility = ["//visibility:public"],
+)
+
+string_setting(
+ name = "platform_sdk_codename",
+ build_setting_default = platform_versions.platform_sdk_codename,
+ visibility = ["//visibility:public"],
+)
+
toolchain_type(name = "apex_toolchain_type")
apex_toolchain(
- name = "prebuilt_apex_toolchain",
- aapt2 = "//prebuilts/sdk/tools:linux/bin/aapt2",
+ name = "apex_toolchain",
+ aapt2 = "//frameworks/base/tools/aapt2",
+ android_jar = "//prebuilts/sdk/current:public/android.jar",
+ apex_compression_tool = "//system/apex/tools:apex_compression_tool",
+ apexer = "//system/apex/apexer",
avbtool = "//external/avb:avbtool",
- apexer = "@make_injection//:host/linux-x86/bin/apexer",
+ conv_apex_manifest = "//system/apex/apexer:conv_apex_manifest",
+ dexdeps = "//dalvik/tools/dexdeps", # tool input for gen_java_usedby_apex
+ e2fsdroid = "//external/e2fsprogs/contrib/android:e2fsdroid",
+ gen_java_usedby_apex = "//build/soong/scripts:gen_java_usedby_apex.sh",
+ gen_ndk_usedby_apex = "//build/soong/scripts:gen_ndk_usedby_apex.sh",
+ jsonmodify = "//build/soong/scripts:jsonmodify",
+ manifest_fixer = "//build/soong/scripts:manifest_fixer",
mke2fs = "//external/e2fsprogs/misc:mke2fs",
+ notice_generator = "//build/bazel/compliance/cmd:bazel_notice_gen",
+ readelf = "//prebuilts/clang/host/linux-x86:llvm-readelf",
resize2fs = "//external/e2fsprogs/resize:resize2fs",
- e2fsdroid = "//external/e2fsprogs/contrib/android:e2fsdroid",
- sefcontext_compile = "@make_injection//:host/linux-x86/bin/sefcontext_compile",
- conv_apex_manifest = "@make_injection//:host/linux-x86/bin/conv_apex_manifest",
- android_jar = "//prebuilts/sdk/current:public/android.jar",
- apex_compression_tool = "@make_injection//:host/linux-x86/bin/apex_compression_tool",
- soong_zip = "//prebuilts/build-tools:linux-x86/bin/soong_zip",
+ sefcontext_compile = "//external/selinux/libselinux:sefcontext_compile",
+ soong_zip = "//build/soong/zip/cmd:soong_zip",
)
toolchain(
- name = "prebuilt_apex_toolchain_def",
+ name = "apex_toolchain_def",
exec_compatible_with = [
"//build/bazel/platforms/arch:x86_64",
"//build/bazel/platforms/os:linux",
@@ -45,35 +181,28 @@ toolchain(
target_compatible_with = [
"//build/bazel/platforms/os:android",
],
- toolchain = ":prebuilt_apex_toolchain",
+ toolchain = ":apex_toolchain",
toolchain_type = "//build/bazel/rules/apex:apex_toolchain_type",
)
-py_binary(
- name = "bazel_apexer_wrapper",
- srcs = ["bazel_apexer_wrapper.py"],
- visibility = ["//visibility:public"],
+apex_test_suite(
+ name = "apex_tests",
)
-sh_test(
- name = "bazel_apexer_wrapper_test",
- srcs = ["bazel_apexer_wrapper_test.sh"],
- deps = ["@bazel_tools//tools/bash/runfiles"],
- data = [
- ":bazel_apexer_wrapper",
- "test.pem",
- "//external/avb:avbtool",
- "//external/e2fsprogs/contrib/android:e2fsdroid",
- "//external/e2fsprogs/misc:mke2fs",
- "//external/e2fsprogs/resize:resize2fs",
- "//external/e2fsprogs/debugfs:debugfs",
- "//prebuilts/build-tools:linux-x86/bin/soong_zip",
- "//prebuilts/sdk/tools:linux/bin/aapt2",
- "@make_injection//:host/linux-x86/bin/apex_compression_tool",
- "@make_injection//:host/linux-x86/bin/apexer",
- "@make_injection//:host/linux-x86/bin/conv_apex_manifest",
- "@make_injection//:host/linux-x86/bin/deapexer",
- "@make_injection//:host/linux-x86/bin/sefcontext_compile",
- "//prebuilts/sdk/current:public/android.jar",
- ]
+apex_aab_test_suite(
+ name = "apex_aab_tests",
+)
+
+apex_key_test_suite(
+ name = "apex_key_tests",
+)
+
+apex_mk_test_suite(
+ name = "apex_mk_tests",
+)
+
+filegroup(
+ name = "signapk_deploy_jar",
+ srcs = ["//build/make/tools/signapk:signapk_deploy.jar"],
+ visibility = ["//visibility:public"],
)
diff --git a/rules/apex/METADATA b/rules/apex/METADATA
new file mode 100644
index 00000000..503057ea
--- /dev/null
+++ b/rules/apex/METADATA
@@ -0,0 +1 @@
+# Created for _test_apex_sbom in apex_test.bzl \ No newline at end of file
diff --git a/rules/apex/README.md b/rules/apex/README.md
new file mode 100644
index 00000000..4a3b0a35
--- /dev/null
+++ b/rules/apex/README.md
@@ -0,0 +1,73 @@
+# The Bazel APEX ruleset
+
+**Example**
+
+```
+$ b build //path/to/module:com.android.module.apex --config=android
+```
+
+**Code locations**
+
+The main entry point is the `apex` macro in [//build/bazel/rules/apex/apex.bzl](https://cs.android.com/android/platform/superproject/+/master:build/bazel/rules/apex/apex.bzl?q=f:apex.bzl), which expands to the `_apex` rule implementation.
+
+Related files in this directory include:
+
+* `cc.bzl` for the C/C++ specific aspect that traverses into native dependencies
+* `toolchain.bzl` for the host toolchain
+* `transition.bzl` for the configuration transition to add APEX-specific configuration to dependencies, like the APEX name and min sdk version.
+* `apex_aab.bzl` to repackage APEXes into multi-architecture Android app bundles and APK sets.
+* `apex_info.bzl` contains ApexInfo and ApexMkInfo providers. These form the main interface of an APEX target.
+* `apex_key.bzl` for the `apex_key()` rule
+* `apex_test.bzl` for Starlark analysis tests
+
+The bp2build converter (`ConvertWithBp2build`) is located [here](https://cs.android.com/android/platform/superproject/+/master:build/soong/apex/apex.go;l=3469;drc=4d247e6f21004d3998bf32d46c22111a380b81af).
+
+The mixed build handler (`ProcessBazelQueryResponse`) is located [here](https://cs.android.com/android/platform/superproject/+/master:build/soong/apex/apex.go;l=1888;drc=4d247e6f21004d3998bf32d46c22111a380b81af).
+
+**Major features**
+
+* Build, compress, and sign APEX `ext4` images and containers for all architectures/bitness
+* Supports outputs: `.apex`, `.capex` (compressed apex), `.aab` (Android app bundle), `.apks` (APK set)
+* Supports packaging prebuilts (e.g. tzdata), native shared libs, native binaries, `sh_binary`
+* Works with `apex`, `override_apex` and `apex_test` Soong module types
+* Supports AOSP and Google/Go APEX variants
+* Supports standalone/unbundled APEX builds (fast for development) with `b` and full platform build with `m` (preloaded on system)
+* Supports generating Mainline quality signals metadata files: required/provided libs, `installed_files.txt`
+* Internal mainline build scripts is capable of building multi-arch AABs/APKs in a single Bazel invocation
+
+**Detailed features**
+
+* Bazel build settings/flags in `//build/bazel/rules/apex/BUILD`, like `apexer_verbose` and `unsafe_disable_apex_allowed_deps_check`
+* ABI stability for native deps
+ * ABI stable stubs are marked as required, and not included within the APEX
+ * non-ABI stable transitive deps are copied into the APEX
+* Supports testonly APEXes (converted from `apex_test`)
+* Supports default certificates with product config
+* Supports default `file_contexts`
+* Supports `allowed_deps.txt` validation
+* Supports `apex_available` validation
+* Supports `min_sdk_version` validation
+* Supports `logging_parent`, `package_name`, `android_manifest`, `key`.
+* Supports `apex_manifest.pb` conversion
+* Supports `canned_fs_config` generation
+* Supports `file_contexts` generation
+* Licensing: `NOTICE.html.gz` embedded in the APEX
+* All host tools are built from source by Bazel, or vendored prebuilts
+* All actions are fully sandboxed
+* Ability to build metadata files on the command line with `--output_groups=&lt;coverage_files,backing_libs,...>`
+
+**Guardrails / others**
+
+* `--config=android` is needed to build for the device. All APEX targets set `target_compatible_with` to android only - no host APEXes.
+* Comprehensive set of rule analysis tests for in `apex_test.bzl`
+* Example APEX in `//build/bazel/examples/apex/...`
+* Unit and integration tests in `//build/bazel/{examples,tests,rules}/apex/...` and `//build/soong/tests/...`
+
+**Known issues / gap analysis (non-exhaustive)**
+
+Upcoming features are based on Roboleaf module conversion priorities, like Java, Rust, DCLA and API fingerprinting support.
+
+* `override_apex` modules are converted to a regular apex with duplicated attributes. These are hidden by bp2build currently and will be cleaned up with macros in the future.
+* Correct product platform transitions for `apex_aab` to `mainline_modules_*` products
+* Java support
+* Rust support
diff --git a/rules/apex/apex.bzl b/rules/apex/apex.bzl
new file mode 100644
index 00000000..be1cf588
--- /dev/null
+++ b/rules/apex/apex.bzl
@@ -0,0 +1,1171 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("@soong_injection//apex_toolchain:constants.bzl", "default_manifest_version")
+load("//build/bazel/platforms:platform_utils.bzl", "platforms")
+load("//build/bazel/product_config:product_variables_providing_rule.bzl", "ProductVariablesInfo")
+load("//build/bazel/rules:common.bzl", "get_dep_targets")
+load("//build/bazel/rules:metadata.bzl", "MetadataFileInfo")
+load("//build/bazel/rules:prebuilt_file.bzl", "PrebuiltFileInfo")
+load("//build/bazel/rules:sh_binary.bzl", "ShBinaryInfo")
+load("//build/bazel/rules:toolchain_utils.bzl", "verify_toolchain_exists")
+load("//build/bazel/rules/android:android_app_certificate.bzl", "AndroidAppCertificateInfo", "android_app_certificate_with_default_cert")
+load("//build/bazel/rules/apex:cc.bzl", "ApexCcInfo", "ApexCcMkInfo", "apex_cc_aspect")
+load("//build/bazel/rules/apex:sdk_versions.bzl", "maybe_override_min_sdk_version")
+load("//build/bazel/rules/apex:transition.bzl", "apex_transition", "shared_lib_transition_32", "shared_lib_transition_64")
+load("//build/bazel/rules/cc:clang_tidy.bzl", "collect_deps_clang_tidy_info")
+load("//build/bazel/rules/cc:stripped_cc_common.bzl", "CcUnstrippedInfo", "StrippedCcBinaryInfo")
+load("//build/bazel/rules/common:api.bzl", "api")
+load(
+ "//build/bazel/rules/license:license_aspect.bzl",
+ "RuleLicensedDependenciesInfo",
+ "license_aspect",
+ "license_map",
+ "license_map_notice_files",
+ "license_map_to_json",
+)
+load(":apex_available.bzl", "ApexAvailableInfo", "apex_available_aspect")
+load(":apex_deps_validation.bzl", "ApexDepsInfo", "apex_deps_validation_aspect", "validate_apex_deps")
+load(":apex_info.bzl", "ApexInfo", "ApexMkInfo")
+load(":apex_key.bzl", "ApexKeyInfo")
+load(":bundle.bzl", "apex_zip_files")
+
+def _create_file_mapping(ctx):
+ """Create a file mapping for the APEX filesystem image.
+
+ This returns a Dict[File, str] where the dictionary keys are paths in the
+ apex staging dir / filesystem image, and the values are the files and other
+ metadata that should be installed there.
+
+ It also returns other data structures, such as:
+ - requires: libs that this apex depend on from other apex or the platform
+ - provides: libs that this apex provide to other apex or the platform
+ - make_modules_to_install: make module names of libs that needs to be installed onto the platform in a bundled build (LOCAL_REQUIRED_MODULES)
+ - make_files_info: metadata about this apex's payload to be used for other packaging steps.
+ """
+
+ # Dictionary mapping from paths in the apex to the files and associated metadata to be put there
+ file_mapping = {}
+ requires = {}
+ provides = {}
+ make_modules_to_install = {}
+ metadata_file_mapping = {}
+
+ # Generate a str -> str dictionary to define Make modules and variables for the
+ # packaging step in a mixed build. This is necessary as long as there are
+ # Make-derived actions that operate on bazel's outputs. If we move all Make
+ # packaging actions to Bazel, there's no need for this data flow.
+ make_files_info = {}
+
+ arch = platforms.get_target_arch(ctx.attr._platform_utils)
+ is_target_64_bit = platforms.get_target_bitness(ctx.attr._platform_utils) == 64
+
+ def add_file_mapping(install_dir, basename, bazel_file, klass, owner, arch = None, unstripped = None, metadata_file = None):
+ installed_path = paths.join(install_dir, basename)
+ if installed_path in file_mapping and file_mapping[installed_path] != bazel_file:
+ # TODO: we should figure this out and make it a failure
+ print("Warning: %s in this apex is already installed to %s, overwriting it with %s" %
+ (file_mapping[installed_path].path, installed_path, bazel_file.path))
+ file_mapping[installed_path] = bazel_file
+ metadata_file_mapping[installed_path] = metadata_file
+
+ files_info = {
+ "built_file": bazel_file.path,
+ "class": klass,
+ "install_dir": install_dir,
+ "basename": basename,
+ "package": owner.package,
+ "make_module_name": owner.name,
+ "arch": arch,
+ }
+ if unstripped:
+ files_info["unstripped_built_file"] = unstripped.path
+ make_files_info[installed_path] = files_info
+
+ def _add_lib_files(directory, libs, arch):
+ for dep in libs:
+ apex_cc_info = dep[ApexCcInfo]
+ for lib in apex_cc_info.requires_native_libs.to_list():
+ requires[lib] = True
+ for lib in apex_cc_info.provides_native_libs.to_list():
+ provides[lib] = True
+ for lib_file in apex_cc_info.transitive_shared_libs.to_list():
+ stripped = lib_file.stripped
+ unstripped = lib_file.unstripped
+ add_file_mapping(
+ directory,
+ stripped.basename,
+ stripped,
+ "nativeSharedLib",
+ stripped.owner,
+ arch = arch,
+ unstripped = unstripped,
+ metadata_file = lib_file.metadata_file,
+ )
+
+ # For bundled builds.
+ apex_cc_mk_info = dep[ApexCcMkInfo]
+ for mk_module in apex_cc_mk_info.make_modules_to_install.to_list():
+ make_modules_to_install[mk_module] = True
+
+ if is_target_64_bit:
+ _add_lib_files("lib64", ctx.attr.native_shared_libs_64, arch)
+
+ secondary_arch = platforms.get_target_secondary_arch(ctx.attr._platform_utils)
+ if secondary_arch:
+ _add_lib_files("lib", ctx.attr.native_shared_libs_32, secondary_arch)
+ else:
+ _add_lib_files("lib", ctx.attr.native_shared_libs_32, arch)
+
+ backing_libs = []
+ for lib in file_mapping.values():
+ if lib.basename not in backing_libs:
+ backing_libs.append(lib.basename)
+ backing_libs = sorted(backing_libs)
+
+ # Handle prebuilts
+ for dep in ctx.attr.prebuilts:
+ prebuilt_file_info = dep[PrebuiltFileInfo]
+ if prebuilt_file_info.filename:
+ filename = prebuilt_file_info.filename
+ else:
+ filename = dep.label.name
+ add_file_mapping(
+ prebuilt_file_info.dir,
+ filename,
+ prebuilt_file_info.src,
+ "etc",
+ dep.label,
+ arch = arch,
+ metadata_file = dep[MetadataFileInfo].metadata_file,
+ )
+
+ # Handle binaries
+ for dep in ctx.attr.binaries:
+ if ShBinaryInfo in dep:
+ # sh_binary requires special handling on directory/filename construction.
+ sh_binary_info = dep[ShBinaryInfo]
+ if sh_binary_info:
+ directory = "bin"
+ if sh_binary_info.sub_dir:
+ directory = paths.join("bin", sh_binary_info.sub_dir)
+
+ filename = dep.label.name
+ if sh_binary_info.filename:
+ filename = sh_binary_info.filename
+
+ add_file_mapping(
+ directory,
+ filename,
+ dep[DefaultInfo].files_to_run.executable,
+ "shBinary",
+ dep.label,
+ arch = arch,
+ metadata_file = dep[MetadataFileInfo].metadata_file,
+ )
+ elif ApexCcInfo in dep:
+ # cc_binary just takes the final executable from the runfiles.
+ add_file_mapping(
+ "bin",
+ dep.label.name,
+ dep[DefaultInfo].files_to_run.executable,
+ "nativeExecutable",
+ dep.label,
+ arch,
+ unstripped = dep[CcUnstrippedInfo].unstripped[0].files.to_list()[0],
+ metadata_file = dep[MetadataFileInfo].metadata_file,
+ )
+
+ # Add transitive shared lib deps of apex binaries to the apex.
+ if is_target_64_bit:
+ _add_lib_files("lib64", [dep], arch)
+ else:
+ _add_lib_files("lib", [dep], arch)
+
+ return (
+ file_mapping,
+ sorted(requires.keys(), key = lambda x: x.name), # sort on just the name of the target, not package
+ sorted(provides.keys(), key = lambda x: x.name),
+ backing_libs,
+ sorted(make_modules_to_install),
+ sorted(make_files_info.values(), key = lambda x: ":".join([x["package"], x["make_module_name"], x["arch"]])),
+ metadata_file_mapping,
+ )
+
+def _add_so(label):
+ return label.name + ".so"
+
+def _add_apex_manifest_information(
+ ctx,
+ apex_toolchain,
+ requires_native_libs,
+ provides_native_libs):
+ apex_manifest_json = ctx.file.manifest
+ apex_manifest_full_json = ctx.actions.declare_file(ctx.attr.name + "_apex_manifest_full.json")
+
+ args = ctx.actions.args()
+ args.add(apex_manifest_json)
+ args.add_all(["-a", "requireNativeLibs"])
+ args.add_all(requires_native_libs, map_each = _add_so) # e.g. turn "//foo/bar:baz" to "baz.so"
+ args.add_all(["-a", "provideNativeLibs"])
+ args.add_all(provides_native_libs, map_each = _add_so)
+
+ manifest_version = ctx.attr._override_apex_manifest_default_version[BuildSettingInfo].value
+ if not manifest_version:
+ manifest_version = default_manifest_version
+ args.add_all(["-se", "version", "0", manifest_version])
+
+ # TODO: support other optional flags like -v name and -a jniLibs
+ args.add_all(["-o", apex_manifest_full_json])
+
+ ctx.actions.run(
+ inputs = [apex_manifest_json],
+ outputs = [apex_manifest_full_json],
+ executable = apex_toolchain.jsonmodify[DefaultInfo].files_to_run,
+ arguments = [args],
+ mnemonic = "ApexManifestModify",
+ )
+
+ return apex_manifest_full_json
+
+# conv_apex_manifest - Convert the JSON APEX manifest to protobuf, which is needed by apexer.
+def _convert_apex_manifest_json_to_pb(ctx, apex_toolchain, apex_manifest_json):
+ apex_manifest_pb = ctx.actions.declare_file(ctx.attr.name + "_apex_manifest.pb")
+
+ ctx.actions.run(
+ outputs = [apex_manifest_pb],
+ inputs = [apex_manifest_json],
+ executable = apex_toolchain.conv_apex_manifest[DefaultInfo].files_to_run,
+ arguments = [
+ "proto",
+ apex_manifest_json.path,
+ "-o",
+ apex_manifest_pb.path,
+ ],
+ mnemonic = "ConvApexManifest",
+ )
+
+ return apex_manifest_pb
+
+def _generate_canned_fs_config(ctx, filepaths):
+ """Generate filesystem config.
+
+ This encodes the filemode, uid, and gid of each file in the APEX,
+ including apex_manifest.json and apex_manifest.pb.
+ NOTE: every file must have an entry.
+ """
+
+ # Ensure all paths don't start with / and are normalized
+ filepaths = [paths.normalize(f).lstrip("/") for f in filepaths]
+
+ # Soong also sorts the config lines to be consistent with bazel
+ filepaths = sorted([f for f in filepaths if f])
+
+ # First, collect a set of all the directories in the apex
+ apex_subdirs_set = {}
+ for f in filepaths:
+ d = paths.dirname(f)
+ if d != "": # The root dir is handled manually below
+ # Make sure all the parent dirs of the current subdir are in the set, too
+ dirs = d.split("/")
+ for i in range(1, len(dirs) + 1):
+ apex_subdirs_set["/".join(dirs[:i])] = True
+
+ config_lines = []
+ config_lines.append("/ 1000 1000 0755")
+ config_lines.append("/apex_manifest.json 1000 1000 0644")
+ config_lines.append("/apex_manifest.pb 1000 1000 0644")
+
+ # Readonly if not executable. filepaths is already sorted.
+ config_lines += ["/" + f + " 1000 1000 0644" for f in filepaths if not f.startswith("bin/")]
+
+ # Mark all binaries as executable. filepaths is already sorted.
+ config_lines += ["/" + f + " 0 2000 0755" for f in filepaths if f.startswith("bin/")]
+
+ # All directories have the same permission.
+ config_lines += ["/" + d + " 0 2000 0755" for d in sorted(apex_subdirs_set.keys())]
+
+ output = ctx.actions.declare_file(ctx.attr.name + "_canned_fs_config.txt")
+
+ config_lines = "\n".join(config_lines) + "\n"
+ ctx.actions.write(output, config_lines)
+
+ if ctx.attr.canned_fs_config:
+ # Append the custom fs config content to the existing file
+ combined_output = ctx.actions.declare_file(ctx.attr.name + "_combined_canned_fs_config.txt")
+ ctx.actions.run_shell(
+ inputs = [ctx.file.canned_fs_config, output],
+ outputs = [combined_output],
+ mnemonic = "AppendCustomFsConfig",
+ command = "cat {i} {canned_fs_config} > {o}".format(
+ i = output.path,
+ o = combined_output.path,
+ canned_fs_config = ctx.file.canned_fs_config.path,
+ ),
+ )
+ output = combined_output
+
+ return output
+
+# Append an entry for apex_manifest.pb to the file_contexts file for this APEX,
+# which is either from /system/sepolicy/apex/<apexname>-file_contexts (set in
+# the apex macro) or custom file_contexts attribute value of this APEX. This
+# ensures that the manifest file is correctly labeled as system_file.
+def _generate_file_contexts(ctx):
+ file_contexts = ctx.actions.declare_file(ctx.attr.name + "-file_contexts")
+
+ ctx.actions.run_shell(
+ inputs = [ctx.file.file_contexts],
+ outputs = [file_contexts],
+ mnemonic = "GenerateApexFileContexts",
+ command = "cat {i} > {o} && echo >> {o} && echo /apex_manifest\\\\.pb u:object_r:system_file:s0 >> {o} && echo / u:object_r:system_file:s0 >> {o}"
+ .format(i = ctx.file.file_contexts.path, o = file_contexts.path),
+ )
+
+ return file_contexts
+
+# TODO(b/255592586): This can be reused by Java rules later.
+def _mark_manifest_as_test_only(ctx, apex_toolchain):
+ if ctx.file.android_manifest == None:
+ return None
+
+ android_manifest = ctx.file.android_manifest
+ dir_name = android_manifest.dirname
+ base_name = android_manifest.basename
+ android_manifest_fixed = ctx.actions.declare_file(paths.join(dir_name, "manifest_fixer", base_name))
+
+ args = ctx.actions.args()
+ args.add("--test-only")
+ args.add(android_manifest)
+ args.add(android_manifest_fixed)
+
+ ctx.actions.run(
+ inputs = [android_manifest],
+ outputs = [android_manifest_fixed],
+ executable = apex_toolchain.manifest_fixer[DefaultInfo].files_to_run,
+ arguments = [args],
+ mnemonic = "MarkAndroidManifestTestOnly",
+ )
+
+ return android_manifest_fixed
+
+# Generate <APEX>_backing.txt file which lists all libraries used by the APEX.
+def _generate_apex_backing_file(ctx, backing_libs):
+ backing_file = ctx.actions.declare_file(ctx.attr.name + "_backing.txt")
+ ctx.actions.write(
+ output = backing_file,
+ content = " ".join(backing_libs) + "\n",
+ )
+ return backing_file
+
+# Generate installed-files.txt which lists all installed files by the APEX.
+def _generate_installed_files_list(ctx, file_mapping):
+ installed_files = ctx.actions.declare_file(ctx.attr.name + "-installed-files.txt")
+ command = []
+ for device_path, bazel_file in file_mapping.items():
+ command.append("echo $(stat -L -c %%s %s) ./%s" % (bazel_file.path, device_path))
+ ctx.actions.run_shell(
+ inputs = file_mapping.values(),
+ outputs = [installed_files],
+ mnemonic = "GenerateApexInstalledFileList",
+ command = "(" + "; ".join(command) + ") | sort -nr > " + installed_files.path,
+ )
+ return installed_files
+
+def _generate_notices(ctx, apex_toolchain):
+ licensees = license_map(ctx.attr.binaries + ctx.attr.prebuilts + ctx.attr.native_shared_libs_32 + ctx.attr.native_shared_libs_64)
+ licenses_file = ctx.actions.declare_file(ctx.attr.name + "_licenses.json")
+ ctx.actions.write(licenses_file, "[\n%s\n]\n" % ",\n".join(license_map_to_json(licensees)))
+
+ # Run HTML notice file generator.
+ notice_file = ctx.actions.declare_file(ctx.attr.name + "_notice_dir/NOTICE.html.gz")
+ notice_generator = apex_toolchain.notice_generator[DefaultInfo].files_to_run
+
+ args = ctx.actions.args()
+ args.add_all(["-o", notice_file, licenses_file])
+
+ # TODO(asmundak): should we extend it with license info for self
+ # (the case when APEX itself has applicable_licenses attribute)?
+ inputs = license_map_notice_files(licensees) + [licenses_file]
+ ctx.actions.run(
+ mnemonic = "GenerateNoticeFile",
+ inputs = inputs,
+ outputs = [notice_file],
+ executable = notice_generator,
+ tools = [notice_generator],
+ arguments = [args],
+ )
+ return notice_file
+
+def _use_api_fingerprint(ctx):
+ product_vars = ctx.attr._product_variables[ProductVariablesInfo]
+ if not product_vars.Unbundled_build:
+ return False
+ if product_vars.Always_use_prebuilt_sdks:
+ return False
+ if not ctx.attr._unbundled_build_target_sdk_with_api_fingerprint[BuildSettingInfo].value:
+ return False
+ return True
+
+# apexer - generate the APEX file.
+def _run_apexer(ctx, apex_toolchain):
+ # Inputs
+ apex_key_info = ctx.attr.key[ApexKeyInfo]
+ privkey = apex_key_info.private_key
+ pubkey = apex_key_info.public_key
+ android_jar = apex_toolchain.android_jar
+
+ file_mapping, requires_native_libs, provides_native_libs, backing_libs, make_modules_to_install, make_files_info, metadata_file_mapping = _create_file_mapping(ctx)
+ canned_fs_config = _generate_canned_fs_config(ctx, file_mapping.keys())
+ file_contexts = _generate_file_contexts(ctx)
+ full_apex_manifest_json = _add_apex_manifest_information(ctx, apex_toolchain, requires_native_libs, provides_native_libs)
+ apex_manifest_pb = _convert_apex_manifest_json_to_pb(ctx, apex_toolchain, full_apex_manifest_json)
+ notices_file = _generate_notices(ctx, apex_toolchain)
+ api_fingerprint_file = None
+
+ file_mapping_file = ctx.actions.declare_file(ctx.attr.name + "_apex_file_mapping.json")
+ ctx.actions.write(file_mapping_file, json.encode({k: v.path for k, v in file_mapping.items()}))
+
+ # Outputs
+ apex_output_file = ctx.actions.declare_file(ctx.attr.name + ".apex.unsigned")
+
+ apexer_files = apex_toolchain.apexer[DefaultInfo].files_to_run
+
+ # Arguments
+ command = [ctx.executable._staging_dir_builder.path, file_mapping_file.path]
+
+ # NOTE: When used as inputs to another sandboxed action, this directory
+ # artifact's inner files will be made up of symlinks. Ensure that the
+ # aforementioned action handles symlinks correctly (e.g. following
+ # symlinks).
+ staging_dir = ctx.actions.declare_directory(ctx.attr.name + "_staging_dir")
+
+ command.append(staging_dir.path)
+
+ # start of apexer cmd
+ command.append(apexer_files.executable.path)
+ if ctx.attr._apexer_verbose[BuildSettingInfo].value:
+ command.append("--verbose")
+
+ command.append("--force")
+ command.append("--include_build_info")
+ command.extend(["--canned_fs_config", canned_fs_config.path])
+ command.extend(["--manifest", apex_manifest_pb.path])
+ command.extend(["--file_contexts", file_contexts.path])
+ command.extend(["--key", privkey.path])
+ command.extend(["--pubkey", pubkey.path])
+ command.extend(["--payload_type", "image"])
+ command.extend(["--payload_fs_type", "ext4"])
+ command.extend(["--assets_dir", notices_file.dirname])
+
+ # Override the package name, if it's expicitly specified
+ if ctx.attr.package_name:
+ command.extend(["--override_apk_package_name", ctx.attr.package_name])
+ else:
+ override_package_name = _override_manifest_package_name(ctx)
+ if override_package_name:
+ command.extend(["--override_apk_package_name", override_package_name])
+
+ if ctx.attr.logging_parent:
+ command.extend(["--logging_parent", ctx.attr.logging_parent])
+
+ use_api_fingerprint = _use_api_fingerprint(ctx)
+
+ target_sdk_version = str(api.final_or_future(api.default_app_target_sdk()))
+ if use_api_fingerprint:
+ api_fingerprint_file = ctx.file._api_fingerprint_txt
+ sdk_version_suffix = ".$(cat {})".format(api_fingerprint_file.path)
+ target_sdk_version = ctx.attr._platform_sdk_codename[BuildSettingInfo].value + sdk_version_suffix
+
+ command.extend(["--target_sdk_version", target_sdk_version])
+
+ # TODO(b/215339575): This is a super rudimentary way to convert "current" to a numerical number.
+ # Generalize this to API level handling logic in a separate Starlark utility, preferably using
+ # API level maps dumped from api_levels.go
+ min_sdk_version = ctx.attr.min_sdk_version
+ if min_sdk_version == "current":
+ min_sdk_version = "10000"
+
+ override_min_sdk_version = ctx.attr._apex_global_min_sdk_version_override[BuildSettingInfo].value
+ min_sdk_version = str(maybe_override_min_sdk_version(min_sdk_version, override_min_sdk_version))
+
+ if min_sdk_version == "10000" and use_api_fingerprint:
+ min_sdk_version = ctx.attr._platform_sdk_codename[BuildSettingInfo].value + sdk_version_suffix
+ command.append(api_fingerprint_file.path)
+ command.extend(["--min_sdk_version", min_sdk_version])
+
+ # apexer needs the list of directories containing all auxilliary tools invoked during
+ # the creation of an apex
+ avbtool_files = apex_toolchain.avbtool[DefaultInfo].files_to_run
+ e2fsdroid_files = apex_toolchain.e2fsdroid[DefaultInfo].files_to_run
+ mke2fs_files = apex_toolchain.mke2fs[DefaultInfo].files_to_run
+ resize2fs_files = apex_toolchain.resize2fs[DefaultInfo].files_to_run
+ sefcontext_compile_files = apex_toolchain.sefcontext_compile[DefaultInfo].files_to_run
+ staging_dir_builder_files = ctx.attr._staging_dir_builder[DefaultInfo].files_to_run
+ apexer_tool_paths = [
+ apex_toolchain.aapt2.dirname,
+ apexer_files.executable.dirname,
+ avbtool_files.executable.dirname,
+ e2fsdroid_files.executable.dirname,
+ mke2fs_files.executable.dirname,
+ resize2fs_files.executable.dirname,
+ sefcontext_compile_files.executable.dirname,
+ ]
+
+ command.extend(["--apexer_tool_path", ":".join(apexer_tool_paths)])
+
+ android_manifest = ctx.file.android_manifest
+ if android_manifest != None:
+ if ctx.attr.testonly:
+ android_manifest = _mark_manifest_as_test_only(ctx, apex_toolchain)
+ command.extend(["--android_manifest", android_manifest.path])
+ elif ctx.attr.testonly:
+ command.append("--test_only")
+
+ command.append(staging_dir.path)
+ command.append(apex_output_file.path)
+
+ inputs = [
+ ctx.executable._staging_dir_builder,
+ file_mapping_file,
+ canned_fs_config,
+ apex_manifest_pb,
+ file_contexts,
+ notices_file,
+ privkey,
+ pubkey,
+ android_jar,
+ ] + file_mapping.values()
+ if use_api_fingerprint:
+ inputs.append(api_fingerprint_file)
+
+ if android_manifest != None:
+ inputs.append(android_manifest)
+
+ tools = [
+ apexer_files,
+ avbtool_files,
+ e2fsdroid_files,
+ mke2fs_files,
+ resize2fs_files,
+ sefcontext_compile_files,
+ apex_toolchain.aapt2,
+ staging_dir_builder_files,
+ ]
+
+ # This is run_shell instead of run because --target_sdk_version may
+ # use the API fingerprinting file contents using bash expansion,
+ # and only run_shell can support that by executing the whole command with
+ # /bin/bash -c. Regular run would quote the --target_sdk_version value with
+ # single quotes ('--target_sdk_version=ABC.$(cat version.txt)'), preventing
+ # bash expansion.
+ ctx.actions.run_shell(
+ inputs = inputs,
+ tools = tools,
+ outputs = [apex_output_file, staging_dir],
+ command = " ".join(command),
+ mnemonic = "Apexer",
+ )
+ return struct(
+ unsigned_apex = apex_output_file,
+ requires_native_libs = requires_native_libs,
+ provides_native_libs = provides_native_libs,
+ backing_libs = _generate_apex_backing_file(ctx, backing_libs),
+ symbols_used_by_apex = _generate_symbols_used_by_apex(ctx, apex_toolchain, staging_dir),
+ java_symbols_used_by_apex = _generate_java_symbols_used_by_apex(ctx, apex_toolchain),
+ installed_files = _generate_installed_files_list(ctx, file_mapping),
+ make_modules_to_install = make_modules_to_install,
+ make_files_info = make_files_info,
+ file_mapping = file_mapping,
+ metadata_file_mapping = metadata_file_mapping,
+ )
+
+def _run_signapk(ctx, unsigned_file, signed_file, private_key, public_key, mnemonic):
+ """Sign a file with signapk."""
+
+ # Arguments
+ args = ctx.actions.args()
+ args.add_all(["-a", 4096])
+ args.add_all(["--align-file-size"])
+ args.add_all([public_key, private_key])
+ args.add_all([unsigned_file, signed_file])
+
+ ctx.actions.run(
+ inputs = [
+ unsigned_file,
+ private_key,
+ public_key,
+ ctx.executable._signapk,
+ ],
+ outputs = [signed_file],
+ executable = ctx.executable._signapk,
+ arguments = [args],
+ mnemonic = mnemonic,
+ )
+
+ return signed_file
+
+# See also getOverrideManifestPackageName
+# https://cs.android.com/android/platform/superproject/+/master:build/soong/apex/builder.go;l=1000;drc=241e738c7156d928e9a993b15993cb3297face45
+def _override_manifest_package_name(ctx):
+ apex_name = ctx.attr.name
+ overrides = ctx.attr._product_variables[ProductVariablesInfo].ManifestPackageNameOverrides
+ if not overrides:
+ return None
+
+ matches = [o for o in overrides if o.split(":")[0] == apex_name]
+
+ if not matches:
+ return None
+
+ if len(matches) > 1:
+ fail("unexpected multiple manifest package overrides for %s, %s" % (apex_name, matches))
+
+ return matches[0].split(":")[1]
+
+# https://cs.android.com/android/platform/superproject/+/master:build/soong/android/config.go;drc=5ca657189aac546af0aafaba11bbc9c5d889eab3;l=1501
+# In Soong, we don't check whether the current apex is part of Unbundled_apps.
+# Hence, we might simplify the logic by just checking product_vars["Unbundled_build"]
+# TODO(b/271474456): Eventually we might default to unbundled mode in bazel-only mode
+# so that we don't need to check Unbundled_apps.
+def _compression_enabled(ctx):
+ product_vars = ctx.attr._product_variables[ProductVariablesInfo]
+
+ return product_vars.CompressedApex and len(product_vars.Unbundled_apps) == 0
+
+# Compress a file with apex_compression_tool.
+def _run_apex_compression_tool(ctx, apex_toolchain, input_file, output_file_name):
+ avbtool_files = apex_toolchain.avbtool[DefaultInfo].files_to_run
+ apex_compression_tool_files = apex_toolchain.apex_compression_tool[DefaultInfo].files_to_run
+
+ # Outputs
+ compressed_file = ctx.actions.declare_file(output_file_name)
+
+ # Arguments
+ args = ctx.actions.args()
+ args.add_all(["compress"])
+ tool_dirs = [apex_toolchain.soong_zip.dirname, avbtool_files.executable.dirname]
+ args.add_all(["--apex_compression_tool", ":".join(tool_dirs)])
+ args.add_all(["--input", input_file])
+ args.add_all(["--output", compressed_file])
+
+ ctx.actions.run(
+ inputs = [input_file],
+ tools = [
+ avbtool_files,
+ apex_compression_tool_files,
+ apex_toolchain.soong_zip,
+ ],
+ outputs = [compressed_file],
+ executable = apex_compression_tool_files,
+ arguments = [args],
+ mnemonic = "BazelApexCompressing",
+ )
+ return compressed_file
+
+# Generate <module>_using.txt, which contains a list of versioned NDK symbols
+# dynamically linked to by this APEX's contents. This is used for coverage
+# checks.
+def _generate_symbols_used_by_apex(ctx, apex_toolchain, staging_dir):
+ symbols_used_by_apex = ctx.actions.declare_file(ctx.attr.name + "_using.txt")
+ ctx.actions.run(
+ outputs = [symbols_used_by_apex],
+ inputs = [staging_dir],
+ tools = [
+ apex_toolchain.readelf.files_to_run,
+ apex_toolchain.gen_ndk_usedby_apex.files_to_run,
+ ],
+ executable = apex_toolchain.gen_ndk_usedby_apex.files_to_run,
+ arguments = [
+ staging_dir.path,
+ apex_toolchain.readelf.files_to_run.executable.path,
+ symbols_used_by_apex.path,
+ ],
+ progress_message = "Generating dynamic NDK symbol list used by the %s apex" % ctx.attr.name,
+ mnemonic = "ApexUsingNDKSymbolsForCoverage",
+ )
+ return symbols_used_by_apex
+
+# Generate <module>_using.xml, which contains a list of java API metadata used
+# by this APEX's contents. This is used for coverage checks.
+#
+# TODO(b/257954111): Add JARs and APKs as inputs to this action when we start
+# building Java mainline modules.
+def _generate_java_symbols_used_by_apex(ctx, apex_toolchain):
+ java_symbols_used_by_apex = ctx.actions.declare_file(ctx.attr.name + "_using.xml")
+ ctx.actions.run(
+ outputs = [java_symbols_used_by_apex],
+ inputs = [],
+ tools = [
+ apex_toolchain.dexdeps.files_to_run,
+ apex_toolchain.gen_java_usedby_apex.files_to_run,
+ ],
+ executable = apex_toolchain.gen_java_usedby_apex.files_to_run,
+ arguments = [
+ apex_toolchain.dexdeps.files_to_run.executable.path,
+ java_symbols_used_by_apex.path,
+ ],
+ progress_message = "Generating Java symbol list used by the %s apex" % ctx.attr.name,
+ mnemonic = "ApexUsingJavaSymbolsForCoverage",
+ )
+ return java_symbols_used_by_apex
+
+def _validate_apex_deps(ctx):
+ transitive_deps = depset(
+ transitive = [
+ d[ApexDepsInfo].transitive_deps
+ for d in (
+ ctx.attr.native_shared_libs_32 +
+ ctx.attr.native_shared_libs_64 +
+ ctx.attr.binaries +
+ ctx.attr.prebuilts
+ )
+ ],
+ )
+ validation_files = []
+ if not ctx.attr._unsafe_disable_apex_allowed_deps_check[BuildSettingInfo].value:
+ validation_files.append(validate_apex_deps(ctx, transitive_deps, ctx.file.allowed_apex_deps_manifest))
+
+ transitive_unvalidated_targets = []
+ transitive_invalid_targets = []
+ for _, attr_deps in get_dep_targets(ctx.attr, predicate = lambda target: ApexAvailableInfo in target).items():
+ for dep in attr_deps:
+ transitive_unvalidated_targets.append(dep[ApexAvailableInfo].transitive_unvalidated_targets)
+ transitive_invalid_targets.append(dep[ApexAvailableInfo].transitive_invalid_targets)
+
+ invalid_targets = depset(transitive = transitive_invalid_targets).to_list()
+ if len(invalid_targets) > 0:
+ invalid_targets_msg = "\n ".join([
+ "{label}; apex_available tags: {tags}".format(label = target.label, tags = list(apex_available_tags))
+ for target, apex_available_tags in invalid_targets
+ ])
+ msg = ("`{apex_name}` apex has transitive dependencies that do not include the apex in " +
+ "their apex_available tags:\n {invalid_targets_msg}").format(
+ apex_name = ctx.label,
+ invalid_targets_msg = invalid_targets_msg,
+ )
+ fail(msg)
+
+ transitive_unvalidated_targets_output_file = ctx.actions.declare_file(ctx.attr.name + "_unvalidated_deps.txt")
+ ctx.actions.write(
+ transitive_unvalidated_targets_output_file,
+ "\n".join([
+ str(label) + ": " + str(reason)
+ for label, reason in depset(transitive = transitive_unvalidated_targets).to_list()
+ ]),
+ )
+ return transitive_deps, transitive_unvalidated_targets_output_file, validation_files
+
+def _verify_updatability(ctx):
+ # TODO(b/274732759): Add these checks as more APEXes are converted to Bazel.
+ #
+ # Keep this in sync with build/soong/apex/apex.go#checkUpdatable.
+ #
+ # - Cannot use platform APIs.
+ # - Cannot use external VNDK libs.
+ # - Does not set future_updatable.
+
+ if not ctx.attr.min_sdk_version:
+ fail("updatable APEXes must set min_sdk_version.")
+
+def _generate_sbom(ctx, file_mapping, metadata_file_mapping, apex_file):
+ apex_filename = paths.basename(apex_file.path)
+ sbom_metadata_csv = ctx.actions.declare_file(apex_filename + "-sbom-metadata.csv")
+ command = []
+ metadata_files = []
+ command.append("echo installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path")
+ command.append("echo %s,%s,,,,,,%s" % (apex_filename, ctx.label.package, apex_file.path))
+ for installed_file, bazel_output_file in file_mapping.items():
+ if metadata_file_mapping[installed_file]:
+ metadata_files.append(metadata_file_mapping[installed_file])
+ command.append("echo %s,%s,,,,,,%s" % (installed_file, paths.dirname(bazel_output_file.short_path), bazel_output_file.path))
+ ctx.actions.run_shell(
+ inputs = file_mapping.values(),
+ outputs = [sbom_metadata_csv],
+ mnemonic = "GenerateSBOMMetadata",
+ command = "(" + "; ".join(command) + ") > " + sbom_metadata_csv.path,
+ )
+
+ sbom_file = ctx.actions.declare_file(apex_filename + ".spdx.json")
+ sbom_fragment_file = ctx.actions.declare_file(apex_filename + "-fragment.spdx")
+ inputs = [
+ apex_file,
+ sbom_metadata_csv,
+ ctx.executable._generate_sbom,
+ ]
+ inputs += file_mapping.values()
+ inputs += metadata_files
+
+ product_vars = ctx.attr._product_variables[ProductVariablesInfo]
+ build_fingerprint = "%s/%s/%s:%s/%s/%s:%s/%s" % (
+ product_vars.ProductBrand,
+ product_vars.DeviceProduct,
+ product_vars.DeviceName,
+ product_vars.Platform_version_name,
+ product_vars.BuildId,
+ "",
+ product_vars.TargetBuildVariant,
+ "_".join(product_vars.BuildVersionTags),
+ )
+ ctx.actions.run(
+ inputs = inputs,
+ outputs = [sbom_file, sbom_fragment_file],
+ arguments = [
+ "--output_file",
+ sbom_file.path,
+ "--metadata",
+ sbom_metadata_csv.path,
+ "--build_version",
+ build_fingerprint,
+ "--product_mfr",
+ product_vars.ProductManufacturer,
+ "--json",
+ "--unbundled_apex",
+ ],
+ mnemonic = "GenerateSBOM",
+ executable = ctx.executable._generate_sbom,
+ )
+ return [sbom_file, sbom_fragment_file]
+
+# See the APEX section in the README on how to use this rule.
+def _apex_rule_impl(ctx):
+ verify_toolchain_exists(ctx, "//build/bazel/rules/apex:apex_toolchain_type")
+ if ctx.attr.updatable:
+ _verify_updatability(ctx)
+
+ apex_toolchain = ctx.toolchains["//build/bazel/rules/apex:apex_toolchain_type"].toolchain_info
+
+ apexer_outputs = _run_apexer(ctx, apex_toolchain)
+ unsigned_apex = apexer_outputs.unsigned_apex
+
+ apex_cert_info = ctx.attr.certificate[0][AndroidAppCertificateInfo]
+ private_key = apex_cert_info.pk8
+ public_key = apex_cert_info.pem
+
+ signed_apex = ctx.actions.declare_file(ctx.attr.name + ".apex")
+ signed_capex = None
+
+ _run_signapk(ctx, unsigned_apex, signed_apex, private_key, public_key, "BazelApexSigning")
+
+ if ctx.attr.compressible and _compression_enabled(ctx):
+ compressed_apex_output_file = _run_apex_compression_tool(ctx, apex_toolchain, signed_apex, ctx.attr.name + ".capex.unsigned")
+ signed_capex = ctx.actions.declare_file(ctx.attr.name + ".capex")
+ _run_signapk(ctx, compressed_apex_output_file, signed_capex, private_key, public_key, "BazelCompressedApexSigning")
+
+ apex_key_info = ctx.attr.key[ApexKeyInfo]
+
+ arch = platforms.get_target_arch(ctx.attr._platform_utils)
+ zip_files = apex_zip_files(
+ actions = ctx.actions,
+ name = ctx.label.name,
+ tools = struct(
+ aapt2 = apex_toolchain.aapt2,
+ zip2zip = ctx.executable._zip2zip,
+ merge_zips = ctx.executable._merge_zips,
+ soong_zip = apex_toolchain.soong_zip,
+ ),
+ apex_file = signed_apex,
+ arch = arch,
+ secondary_arch = platforms.get_target_secondary_arch(ctx.attr._platform_utils),
+ )
+
+ transitive_apex_deps, transitive_unvalidated_targets_output_file, apex_deps_validation_files = _validate_apex_deps(ctx)
+
+ optional_output_groups = {}
+ if signed_capex:
+ optional_output_groups["signed_compressed_output"] = [signed_capex]
+
+ return [
+ DefaultInfo(files = depset([signed_apex])),
+ ApexInfo(
+ signed_output = signed_apex,
+ signed_compressed_output = signed_capex,
+ unsigned_output = unsigned_apex,
+ requires_native_libs = apexer_outputs.requires_native_libs,
+ provides_native_libs = apexer_outputs.provides_native_libs,
+ bundle_key_info = apex_key_info,
+ container_key_info = apex_cert_info,
+ package_name = ctx.attr.package_name,
+ backing_libs = apexer_outputs.backing_libs,
+ symbols_used_by_apex = apexer_outputs.symbols_used_by_apex,
+ installed_files = apexer_outputs.installed_files,
+ java_symbols_used_by_apex = apexer_outputs.java_symbols_used_by_apex,
+ base_file = zip_files.apex_only,
+ base_with_config_zip = zip_files.apex_with_config,
+ ),
+ OutputGroupInfo(
+ coverage_files = [apexer_outputs.symbols_used_by_apex],
+ java_coverage_files = [apexer_outputs.java_symbols_used_by_apex],
+ backing_libs = depset([apexer_outputs.backing_libs]),
+ installed_files = depset([apexer_outputs.installed_files]),
+ transitive_unvalidated_targets = depset([transitive_unvalidated_targets_output_file]),
+ apex_sbom = depset(_generate_sbom(ctx, apexer_outputs.file_mapping, apexer_outputs.metadata_file_mapping, signed_apex)),
+ capex_sbom = depset(_generate_sbom(ctx, apexer_outputs.file_mapping, apexer_outputs.metadata_file_mapping, signed_capex) if signed_capex else []),
+ _validation = apex_deps_validation_files,
+ **optional_output_groups
+ ),
+ ApexDepsInfo(transitive_deps = transitive_apex_deps),
+ ApexMkInfo(
+ make_modules_to_install = apexer_outputs.make_modules_to_install,
+ files_info = apexer_outputs.make_files_info,
+ ),
+ collect_deps_clang_tidy_info(ctx),
+ ]
+
+# These are the standard aspects that should be applied on all edges that
+# contribute to an APEX's payload.
+STANDARD_PAYLOAD_ASPECTS = [
+ license_aspect,
+ apex_available_aspect,
+ apex_deps_validation_aspect,
+]
+
+_apex = rule(
+ implementation = _apex_rule_impl,
+ attrs = {
+ # Attributes that configure the APEX container.
+ "manifest": attr.label(allow_single_file = [".json"]),
+ "android_manifest": attr.label(allow_single_file = [".xml"]),
+ "package_name": attr.string(),
+ "logging_parent": attr.string(),
+ "file_contexts": attr.label(allow_single_file = True, mandatory = True),
+ "canned_fs_config": attr.label(
+ allow_single_file = True,
+ doc = """Path to the canned fs config file for customizing file's
+uid/gid/mod/capabilities. The content of this file is appended to the
+default config, so that the custom entries are preferred.
+
+The format is /<path_or_glob> <uid> <gid> <mode> [capabilities=0x<cap>], where
+path_or_glob is a path or glob pattern for a file or set of files, uid/gid
+are numerial values of user ID and group ID, mode is octal value for the
+file mode, and cap is hexadecimal value for the capability.""",
+ ),
+ "key": attr.label(providers = [ApexKeyInfo], mandatory = True),
+ "certificate": attr.label(
+ providers = [AndroidAppCertificateInfo],
+ mandatory = True,
+ cfg = apex_transition,
+ ),
+ "min_sdk_version": attr.string(
+ default = "current",
+ doc = """The minimum SDK version that this APEX must support at minimum. This is usually set to
+the SDK version that the APEX was first introduced.
+
+When not set, defaults to "10000" (or "current").""",
+ ),
+ "updatable": attr.bool(default = True, doc = """Whether this APEX is considered updatable or not.
+
+When set to true, this will enforce additional rules for making sure that the
+APEX is truly updatable. To be updatable, min_sdk_version should be set as well."""),
+ "installable": attr.bool(default = True),
+ "compressible": attr.bool(default = False),
+ "base_apex_name": attr.string(
+ default = "",
+ doc = "The name of the base apex of this apex. For example, the AOSP variant of this apex.",
+ ),
+
+ # Attributes that contribute to the payload.
+ "native_shared_libs_32": attr.label_list(
+ providers = [ApexCcInfo, ApexCcMkInfo, RuleLicensedDependenciesInfo],
+ aspects = STANDARD_PAYLOAD_ASPECTS + [apex_cc_aspect],
+ cfg = shared_lib_transition_32,
+ doc = "The libs compiled for 32-bit",
+ ),
+ "native_shared_libs_64": attr.label_list(
+ providers = [ApexCcInfo, ApexCcMkInfo, RuleLicensedDependenciesInfo],
+ aspects = STANDARD_PAYLOAD_ASPECTS + [apex_cc_aspect],
+ cfg = shared_lib_transition_64,
+ doc = "The libs compiled for 64-bit",
+ ),
+ "binaries": attr.label_list(
+ providers = [
+ # The dependency must produce _all_ of the providers in _one_ of these lists.
+ [ShBinaryInfo, RuleLicensedDependenciesInfo], # sh_binary
+ [StrippedCcBinaryInfo, CcInfo, ApexCcInfo, ApexCcMkInfo, RuleLicensedDependenciesInfo], # cc_binary (stripped)
+ ],
+ cfg = apex_transition,
+ aspects = STANDARD_PAYLOAD_ASPECTS + [apex_cc_aspect],
+ ),
+ "prebuilts": attr.label_list(
+ providers = [PrebuiltFileInfo, RuleLicensedDependenciesInfo],
+ cfg = apex_transition,
+ aspects = STANDARD_PAYLOAD_ASPECTS,
+ ),
+
+ # Required to use apex_transition. This is an acknowledgement to the risks of memory bloat when using transitions.
+ "_allowlist_function_transition": attr.label(default = "@bazel_tools//tools/allowlists/function_transition_allowlist"),
+
+ # Tools that are not part of the apex_toolchain.
+ "_staging_dir_builder": attr.label(
+ cfg = "exec",
+ doc = "The staging dir builder to avoid the problem where symlinks are created inside apex image.",
+ executable = True,
+ default = "//build/bazel/rules:staging_dir_builder",
+ ),
+ "_signapk": attr.label(
+ cfg = "exec",
+ doc = "The signapk tool.",
+ executable = True,
+ default = "//build/make/tools/signapk",
+ ),
+ "_zip2zip": attr.label(
+ cfg = "exec",
+ allow_single_file = True,
+ doc = "The tool zip2zip. Used to convert apex file to the expected directory structure.",
+ default = "//build/soong/cmd/zip2zip:zip2zip",
+ executable = True,
+ ),
+ "_merge_zips": attr.label(
+ cfg = "exec",
+ allow_single_file = True,
+ doc = "The tool merge_zips. Used to combine base zip and config file into a single zip for mixed build aab creation.",
+ default = "//prebuilts/build-tools:linux-x86/bin/merge_zips",
+ executable = True,
+ ),
+ "_platform_utils": attr.label(
+ default = Label("//build/bazel/platforms:platform_utils"),
+ ),
+ "_generate_sbom": attr.label(
+ cfg = "exec",
+ doc = "SBOM generation tool",
+ executable = True,
+ default = "//build/make/tools/sbom:generate-sbom",
+ ),
+
+ # allowed deps check
+ "_unsafe_disable_apex_allowed_deps_check": attr.label(
+ default = "//build/bazel/rules/apex:unsafe_disable_apex_allowed_deps_check",
+ ),
+ "allowed_apex_deps_manifest": attr.label(
+ allow_single_file = True,
+ default = "//packages/modules/common/build:allowed_deps.txt",
+ ),
+
+ # Build settings.
+ "_apexer_verbose": attr.label(
+ default = "//build/bazel/rules/apex:apexer_verbose",
+ doc = "If enabled, make apexer log verbosely.",
+ ),
+ "_override_apex_manifest_default_version": attr.label(
+ default = "//build/bazel/rules/apex:override_apex_manifest_default_version",
+ doc = "If specified, override 'version: 0' in apex_manifest.json with this value instead of the branch default. Non-zero versions will not be changed.",
+ ),
+ "_apex_global_min_sdk_version_override": attr.label(
+ default = "//build/bazel/rules/apex:apex_global_min_sdk_version_override",
+ doc = "If specified, override the min_sdk_version of this apex and in the transition and checks for dependencies.",
+ ),
+ "_product_variables": attr.label(
+ default = "//build/bazel/product_config:product_vars",
+ ),
+
+ # Api_fingerprint
+ "_unbundled_build_target_sdk_with_api_fingerprint": attr.label(
+ default = "//build/bazel/rules/apex:unbundled_build_target_sdk_with_api_fingerprint",
+ ),
+ "_platform_sdk_codename": attr.label(
+ default = "//build/bazel/rules/apex:platform_sdk_codename",
+ ),
+ "_api_fingerprint_txt": attr.label(
+ default = "//frameworks/base/api:api_fingerprint",
+ allow_single_file = True,
+ ),
+ },
+ # The apex toolchain is not mandatory so that we don't get toolchain resolution errors even
+ # when the apex is not compatible with the current target (via target_compatible_with).
+ toolchains = [config_common.toolchain_type("//build/bazel/rules/apex:apex_toolchain_type", mandatory = False)],
+ fragments = ["platform"],
+)
+
+def apex(
+ name,
+ manifest = "apex_manifest.json",
+ android_manifest = None,
+ file_contexts = None,
+ key = None,
+ certificate = None,
+ certificate_name = None,
+ min_sdk_version = None,
+ updatable = True,
+ installable = True,
+ compressible = False,
+ native_shared_libs_32 = [],
+ native_shared_libs_64 = [],
+ binaries = [],
+ prebuilts = [],
+ package_name = None,
+ logging_parent = None,
+ canned_fs_config = None,
+ testonly = False,
+ # TODO(b/255400736): tests are not fully supported yet.
+ tests = [],
+ target_compatible_with = [],
+ **kwargs):
+ "Bazel macro to correspond with the APEX bundle Soong module."
+
+ # If file_contexts is not specified, then use the default from //system/sepolicy/apex.
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/apex/builder.go;l=259-263;drc=b02043b84d86fe1007afef1ff012a2155172215c
+ if file_contexts == None:
+ file_contexts = "//system/sepolicy/apex:" + name + "-file_contexts"
+
+ if testonly:
+ compressible = False
+ elif tests:
+ fail("Apex with tests attribute needs to be testonly.")
+
+ if certificate and certificate_name:
+ fail("Cannot use both certificate_name and certificate attributes together. Use only one of them.")
+ app_cert_name = name + "_app_certificate"
+ if certificate_name:
+ # use the name key in the default cert dir
+ android_app_certificate_with_default_cert(
+ name = app_cert_name,
+ cert_name = certificate_name,
+ )
+ certificate_label = ":" + app_cert_name
+ elif certificate:
+ certificate_label = certificate
+ else:
+ # use the default testkey
+ android_app_certificate_with_default_cert(name = app_cert_name)
+ certificate_label = ":" + app_cert_name
+
+ target_compatible_with = select({
+ "//build/bazel/platforms/os:android": [],
+ "//conditions:default": ["@platforms//:incompatible"],
+ }) + target_compatible_with
+
+ _apex(
+ name = name,
+ manifest = manifest,
+ android_manifest = android_manifest,
+ file_contexts = file_contexts,
+ key = key,
+ certificate = certificate_label,
+ min_sdk_version = min_sdk_version,
+ updatable = updatable,
+ installable = installable,
+ compressible = compressible,
+ native_shared_libs_32 = native_shared_libs_32,
+ native_shared_libs_64 = native_shared_libs_64,
+ binaries = binaries,
+ prebuilts = prebuilts,
+ package_name = package_name,
+ logging_parent = logging_parent,
+ canned_fs_config = canned_fs_config,
+ testonly = testonly,
+ target_compatible_with = target_compatible_with,
+ **kwargs
+ )
diff --git a/rules/apex/apex_aab.bzl b/rules/apex/apex_aab.bzl
new file mode 100644
index 00000000..258e7ea6
--- /dev/null
+++ b/rules/apex/apex_aab.bzl
@@ -0,0 +1,434 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("//build/bazel/rules:toolchain_utils.bzl", "verify_toolchain_exists")
+load(":apex_info.bzl", "ApexInfo")
+load(":bundle.bzl", "build_bundle_config")
+
+def _arch_transition_impl(settings, _attr):
+ """Implementation of arch_transition.
+
+ Six arch products are included for mainline modules: x86, x86_64, x86_64only, arm, arm64, arm64only.
+ """
+ old_platform = str(settings["//command_line_option:platforms"][0])
+
+ # We can't use platforms alone to differentiate between x86_64 and x86_64
+ # with a secondary arch, which is significant for apex packaging that can
+ # optionally include the secondary arch's libs. That is currently determined
+ # by DeviceSecondaryArch in apex's lib inclusion logic, so we explicitly set
+ # DeviceSecondaryArch to "" for the 64bit only cases.
+
+ # TODO(b/249685973) Instead of using these __internal_x86 platforms, use
+ # the mainline_modules_<arch> android products
+ return {
+ # these key names must correspond to mainline_modules_<arch> product name suffixes.
+ "arm": {
+ "//command_line_option:platforms": old_platform + "__internal_arm",
+ },
+ "arm64": {
+ "//command_line_option:platforms": old_platform + "__internal_arm64",
+ },
+ "arm64only": {
+ "//command_line_option:platforms": old_platform + "__internal_arm64only",
+ },
+ "x86": {
+ "//command_line_option:platforms": old_platform + "__internal_x86",
+ },
+ "x86_64": {
+ "//command_line_option:platforms": old_platform + "__internal_x86_64",
+ },
+ "x86_64only": {
+ "//command_line_option:platforms": old_platform + "__internal_x86_64only",
+ },
+ }
+
+# Multi-arch transition.
+arch_transition = transition(
+ implementation = _arch_transition_impl,
+ inputs = [
+ "//command_line_option:platforms",
+ ],
+ outputs = [
+ "//command_line_option:platforms",
+ ],
+)
+
+def _merge_base_files(ctx, module_name, base_files):
+ """Run merge_zips to merge all files created for each arch by _apex_base_file."""
+ merged_base_file = ctx.actions.declare_file(module_name + "/" + module_name + ".zip")
+
+ # Arguments
+ args = ctx.actions.args()
+ args.add("--ignore-duplicates")
+ args.add(merged_base_file)
+ args.add_all(base_files)
+
+ ctx.actions.run(
+ inputs = base_files,
+ outputs = [merged_base_file],
+ executable = ctx.executable._merge_zips,
+ arguments = [args],
+ mnemonic = "ApexMergeBaseFiles",
+ )
+ return merged_base_file
+
+def _apex_bundle(ctx, module_name, merged_base_file, bundle_config_file):
+ """Run bundletool to create the aab file."""
+
+ # Outputs
+ bundle_file = ctx.actions.declare_file(module_name + "/" + module_name + ".aab")
+
+ # Arguments
+ args = ctx.actions.args()
+ args.add("build-bundle")
+ args.add_all(["--config", bundle_config_file])
+ args.add_all(["--modules", merged_base_file])
+ args.add_all(["--output", bundle_file])
+
+ ctx.actions.run(
+ inputs = [
+ bundle_config_file,
+ merged_base_file,
+ ],
+ outputs = [bundle_file],
+ executable = ctx.executable._bundletool,
+ arguments = [args],
+ mnemonic = "ApexBundleFile",
+ )
+ return bundle_file
+
+def _sign_bundle(ctx, aapt2, avbtool, module_name, bundle_file, apex_info):
+ """ Run dev_sign_bundle to sign the bundle_file."""
+
+ # Python3 interpreter for dev_sign_bundle to run other python scripts.
+ python_interpreter = ctx.toolchains["@bazel_tools//tools/python:toolchain_type"].py3_runtime.interpreter
+ if python_interpreter.basename != "python3":
+ python3 = ctx.actions.declare_file("python3")
+ ctx.actions.symlink(
+ output = python3,
+ target_file = python_interpreter,
+ is_executable = True,
+ )
+ python_interpreter = python3
+
+ # Input directory for dev_sign_bundle.
+ input_bundle_file = ctx.actions.declare_file(module_name + "/sign_bundle/input_dir/" + bundle_file.basename)
+ ctx.actions.symlink(
+ output = input_bundle_file,
+ target_file = bundle_file,
+ )
+
+ # Output directory for dev_sign_bundle
+ output_dir = ctx.actions.declare_directory(module_name + "/sign_bundle/output_dir")
+
+ # Temporary directory for dev_sign_bundle
+ tmp_dir = ctx.actions.declare_directory(module_name + "/sign_bundle/tmp_dir")
+
+ # Jar file of prebuilts/bundletool
+ bundletool_jarfile = ctx.attr._bundletool_lib.files.to_list()[0]
+
+ # Keystore file
+ keystore_file = ctx.attr.dev_keystore.files.to_list()[0]
+
+ # ANDROID_HOST_OUT environment
+ debugfs_static = ctx.actions.declare_file(module_name + "/sign_bundle/android_host_out/bin/debugfs_static")
+ ctx.actions.symlink(
+ output = debugfs_static,
+ target_file = ctx.executable._debugfs,
+ is_executable = True,
+ )
+ fsck_erofs = ctx.actions.declare_file(module_name + "/sign_bundle/android_host_out/bin/fsck.erofs")
+ ctx.actions.symlink(
+ output = fsck_erofs,
+ target_file = ctx.executable._fsck_erofs,
+ is_executable = True,
+ )
+ signapk_jar = ctx.actions.declare_file(module_name + "/sign_bundle/android_host_out/framework/signapk.jar")
+ ctx.actions.symlink(
+ output = signapk_jar,
+ target_file = ctx.attr._signapk_jar.files.to_list()[0],
+ is_executable = False,
+ )
+ libconscrypt_openjdk_jni_so = ctx.actions.declare_file(module_name + "/sign_bundle/android_host_out/lib64/libconscrypt_openjdk_jni.so")
+ ctx.actions.symlink(
+ output = libconscrypt_openjdk_jni_so,
+ target_file = ctx.attr._libconscrypt_openjdk_jni.files.to_list()[1],
+ is_executable = False,
+ )
+
+ java_runtime = ctx.attr._java_runtime[java_common.JavaRuntimeInfo]
+
+ # Tools
+ tools = [
+ ctx.executable.dev_sign_bundle,
+ ctx.executable._deapexer,
+ ctx.executable._sign_apex,
+ ctx.executable._openssl,
+ ctx.executable._zip2zip,
+ ctx.executable._blkid,
+ aapt2,
+ avbtool.files_to_run.executable,
+ python_interpreter,
+ debugfs_static,
+ fsck_erofs,
+ bundletool_jarfile,
+ signapk_jar,
+ libconscrypt_openjdk_jni_so,
+ java_runtime.files,
+ ]
+
+ # Inputs
+ inputs = [
+ input_bundle_file,
+ keystore_file,
+ apex_info.bundle_key_info.private_key,
+ apex_info.container_key_info.pem,
+ apex_info.container_key_info.pk8,
+ ]
+
+ # Outputs
+ outputs = [output_dir, tmp_dir]
+
+ # Arguments
+ java_bin = paths.join(java_runtime.java_home, "bin")
+ args = ctx.actions.args()
+ args.add_all(["--input_dir", input_bundle_file.dirname])
+ args.add_all(["--output_dir", output_dir.path])
+ args.add_all(["--temp_dir", tmp_dir.path])
+ args.add_all(["--aapt2_path", aapt2.path])
+ args.add_all(["--bundletool_path", bundletool_jarfile.path])
+ args.add_all(["--deapexer_path", ctx.executable._deapexer.path])
+ args.add_all(["--blkid_path", ctx.executable._blkid.path])
+ args.add_all(["--debugfs_path", ctx.executable._debugfs.path])
+ args.add_all(["--java_binary_path", paths.join(java_bin, "java")])
+ args.add_all(["--apex_signer_path", ctx.executable._sign_apex])
+
+ ctx.actions.run(
+ inputs = inputs,
+ outputs = outputs,
+ executable = ctx.executable.dev_sign_bundle,
+ arguments = [args],
+ tools = tools,
+ env = {
+ # necessary for dev_sign_bundle.
+ "BAZEL_ANDROID_HOST_OUT": paths.dirname(debugfs_static.dirname),
+ "PATH": ":".join(
+ [
+ python_interpreter.dirname,
+ ctx.executable._deapexer.dirname,
+ avbtool.files_to_run.executable.dirname,
+ ctx.executable._openssl.dirname,
+ java_bin,
+ ],
+ ),
+ },
+ mnemonic = "ApexSignBundleFile",
+ )
+
+ apks_file = ctx.actions.declare_file(module_name + "/" + module_name + ".apks")
+ cert_info_file = ctx.actions.declare_file(module_name + "/" + module_name + ".cert_info.txt")
+ ctx.actions.run_shell(
+ inputs = [output_dir],
+ outputs = [apks_file, cert_info_file],
+ command = " ".join(["cp", output_dir.path + "/" + module_name + "/*", apks_file.dirname]),
+ )
+
+ return [apks_file, cert_info_file]
+
+def _apex_aab_impl(ctx):
+ """Implementation of apex_aab rule.
+
+ This drives the process of creating aab file from apex files created for each arch."""
+ verify_toolchain_exists(ctx, "//build/bazel/rules/apex:apex_toolchain_type")
+ apex_toolchain = ctx.toolchains["//build/bazel/rules/apex:apex_toolchain_type"].toolchain_info
+
+ prefixed_apex_files = []
+ apex_base_files = []
+ bundle_config_file = None
+ module_name = ctx.attr.mainline_module[0].label.name
+ for arch in ctx.split_attr.mainline_module:
+ apex_info = ctx.split_attr.mainline_module[arch][ApexInfo]
+ apex_base_files.append(apex_info.base_file)
+
+ arch_subdir = "mainline_modules_%s" % arch
+
+ # A mapping of files to a prefix directory they should be copied to.
+ # These files will be accessible with the apex_files output_group.
+ mapping = {
+ apex_info.base_file: arch_subdir,
+ apex_info.signed_output: arch_subdir,
+ apex_info.symbols_used_by_apex: arch_subdir + "/ndk_apis_usedby_apex",
+ apex_info.backing_libs: arch_subdir + "/ndk_apis_backedby_apex",
+ apex_info.java_symbols_used_by_apex: arch_subdir + "/java_apis_usedby_apex",
+ # TODO(b/262267680): create licensetexts
+ # TODO(b/262267551): create shareprojects
+ }
+
+ # Forward the individual files for all variants in an additional output group,
+ # so dependents can easily access the multi-arch base APEX files by building
+ # this target with --output_groups=apex_files.
+ #
+ # Copy them into an arch-specific directory, since they have the same basename.
+ for _file, _dir in mapping.items():
+ _out = ctx.actions.declare_file(_dir + "/" + _file.basename)
+ ctx.actions.run_shell(
+ inputs = [_file],
+ outputs = [_out],
+ command = " ".join(["cp", _file.path, _out.path]),
+ )
+ prefixed_apex_files.append(_out)
+
+ # Create .aab file
+ bundle_config_file = build_bundle_config(ctx.actions, ctx.label.name)
+ merged_base_file = _merge_base_files(ctx, module_name, apex_base_files)
+ bundle_file = _apex_bundle(ctx, module_name, merged_base_file, bundle_config_file)
+
+ # Create .apks file
+ apex_info = ctx.attr.mainline_module[0][ApexInfo]
+ package_name = apex_info.package_name
+
+ if ctx.attr.dev_sign_bundle and ctx.attr.dev_keystore and (package_name.startswith("com.google.android") or package_name.startswith("com.google.mainline")):
+ signed_files = _sign_bundle(ctx, apex_toolchain.aapt2, apex_toolchain.avbtool, module_name, bundle_file, apex_info)
+ return [
+ DefaultInfo(files = depset([bundle_file] + signed_files)),
+ OutputGroupInfo(apex_files = depset(prefixed_apex_files), signed_files = signed_files),
+ ]
+
+ return [
+ DefaultInfo(files = depset([bundle_file])),
+ OutputGroupInfo(apex_files = depset(prefixed_apex_files)),
+ ]
+
+# apex_aab rule creates multi-arch outputs of a Mainline module, such as the
+# Android Apk Bundle (.aab) file of the APEX specified in mainline_module.
+# There is no equivalent Soong module, and it is currently done in shell script
+# by invoking Soong multiple times.
+_apex_aab = rule(
+ implementation = _apex_aab_impl,
+ toolchains = [
+ # The apex toolchain is not mandatory so that we don't get toolchain resolution errors
+ # even when the aab is not compatible with the current target (via target_compatible_with).
+ config_common.toolchain_type("//build/bazel/rules/apex:apex_toolchain_type", mandatory = False),
+ "@bazel_tools//tools/python:toolchain_type",
+ ],
+ attrs = {
+ "dev_keystore": attr.label(
+ cfg = "exec",
+ executable = False,
+ ),
+ "dev_sign_bundle": attr.label(
+ cfg = "exec",
+ executable = True,
+ ),
+ "mainline_module": attr.label(
+ mandatory = True,
+ cfg = arch_transition,
+ providers = [ApexInfo],
+ doc = "The label of a mainline module target",
+ ),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ doc = "Allow transition.",
+ ),
+ "_blkid": attr.label(
+ cfg = "exec",
+ executable = True,
+ default = "//external/e2fsprogs/misc:blkid",
+ ),
+ "_bundletool": attr.label(
+ cfg = "exec",
+ executable = True,
+ default = "//prebuilts/bundletool",
+ ),
+ "_bundletool_lib": attr.label(
+ cfg = "exec",
+ executable = False,
+ default = "//prebuilts/bundletool:bundletool-lib",
+ ),
+ "_deapexer": attr.label(
+ cfg = "exec",
+ executable = True,
+ default = "//system/apex/tools:deapexer",
+ ),
+ "_debugfs": attr.label(
+ cfg = "exec",
+ executable = True,
+ default = "//external/e2fsprogs/debugfs:debugfs_static",
+ ),
+ "_fsck_erofs": attr.label(
+ cfg = "exec",
+ executable = True,
+ default = "//external/erofs-utils:fsck.erofs",
+ ),
+ "_java_runtime": attr.label(
+ default = Label("@bazel_tools//tools/jdk:current_java_runtime"),
+ cfg = "exec",
+ providers = [java_common.JavaRuntimeInfo],
+ ),
+ "_libconscrypt_openjdk_jni": attr.label(
+ cfg = "exec",
+ executable = False,
+ default = "//external/conscrypt:libconscrypt_openjdk_jni",
+ ),
+ "_merge_zips": attr.label(
+ allow_single_file = True,
+ cfg = "exec",
+ executable = True,
+ default = "//prebuilts/build-tools:linux-x86/bin/merge_zips",
+ ),
+ "_openssl": attr.label(
+ allow_single_file = True,
+ cfg = "exec",
+ executable = True,
+ default = "//prebuilts/build-tools:linux-x86/bin/openssl",
+ ),
+ "_sign_apex": attr.label(
+ cfg = "exec",
+ executable = True,
+ default = "//build/make/tools/releasetools:sign_apex",
+ ),
+ "_signapk_jar": attr.label(
+ cfg = "exec",
+ executable = False,
+ default = "//build/bazel/rules/apex:signapk_deploy_jar",
+ ),
+ "_zip2zip": attr.label(
+ allow_single_file = True,
+ cfg = "exec",
+ executable = True,
+ default = "//build/soong/cmd/zip2zip:zip2zip",
+ ),
+ "_zipper": attr.label(
+ cfg = "exec",
+ executable = True,
+ default = "@bazel_tools//tools/zip:zipper",
+ ),
+ },
+)
+
+def apex_aab(name, mainline_module, dev_sign_bundle = None, dev_keystore = None, target_compatible_with = [], **kwargs):
+ target_compatible_with = select({
+ "//build/bazel/platforms/os:android": [],
+ "//conditions:default": ["@platforms//:incompatible"],
+ }) + target_compatible_with
+
+ _apex_aab(
+ name = name,
+ mainline_module = mainline_module,
+ dev_sign_bundle = dev_sign_bundle,
+ dev_keystore = dev_keystore,
+ target_compatible_with = target_compatible_with,
+ **kwargs
+ )
diff --git a/rules/apex/apex_aab_test.bzl b/rules/apex/apex_aab_test.bzl
new file mode 100644
index 00000000..275dc8c9
--- /dev/null
+++ b/rules/apex/apex_aab_test.bzl
@@ -0,0 +1,151 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":apex_aab.bzl", "apex_aab")
+load(":apex_test_helpers.bzl", "test_apex")
+
+def _apex_aab_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.true(
+ env,
+ len(target_under_test.files.to_list()) == len(ctx.attr.expected_paths),
+ )
+ for i in range(0, len(ctx.attr.expected_paths)):
+ asserts.equals(
+ env,
+ ctx.attr.expected_paths[i],
+ target_under_test.files.to_list()[i].short_path,
+ )
+ return analysistest.end(env)
+
+apex_aab_test = analysistest.make(
+ _apex_aab_test,
+ attrs = {
+ "expected_paths": attr.string_list(mandatory = True),
+ },
+)
+
+def _test_apex_aab_generates_aab():
+ name = "apex_aab_simple"
+ test_name = name + "_test"
+ apex_name = name + "_apex"
+
+ test_apex(name = apex_name)
+
+ apex_aab(
+ name = name,
+ mainline_module = apex_name,
+ tags = ["manual"],
+ )
+
+ apex_aab_test(
+ name = test_name,
+ target_under_test = name,
+ expected_paths = ["/".join([native.package_name(), apex_name, apex_name + ".aab"])],
+ )
+
+ return test_name
+
+def _apex_aab_output_group_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ actual_paths = sorted([
+ f.short_path
+ for f in target_under_test[OutputGroupInfo].apex_files.to_list()
+ ])
+ asserts.equals(
+ env,
+ sorted(ctx.attr.expected_paths),
+ sorted(actual_paths),
+ )
+ return analysistest.end(env)
+
+apex_aab_output_group_test = analysistest.make(
+ _apex_aab_output_group_test,
+ attrs = {"expected_paths": attr.string_list(mandatory = True)},
+)
+
+def _test_apex_aab_apex_files_output_group():
+ name = "apex_aab_apex_files"
+ test_name = name + "_test"
+ apex_name = name + "_apex"
+
+ test_apex(name = apex_name)
+
+ apex_aab(
+ name = name,
+ mainline_module = apex_name,
+ tags = ["manual"],
+ )
+
+ expected_paths = []
+ for arch in ["arm", "arm64", "x86", "x86_64", "arm64only", "x86_64only"]:
+ paths = [
+ "/".join([native.package_name(), "mainline_modules_" + arch, basename])
+ for basename in [
+ apex_name + ".apex",
+ apex_name + "-base.zip",
+ "java_apis_usedby_apex/" + apex_name + "_using.xml",
+ "ndk_apis_usedby_apex/" + apex_name + "_using.txt",
+ "ndk_apis_backedby_apex/" + apex_name + "_backing.txt",
+ ]
+ ]
+ expected_paths.extend(paths)
+
+ apex_aab_output_group_test(
+ name = test_name,
+ target_under_test = name,
+ expected_paths = expected_paths,
+ )
+
+ return test_name
+
+def _test_apex_aab_generates_aab_and_apks():
+ name = "apex_aab_apks"
+ test_name = name + "_test"
+ apex_name = name + "_apex"
+
+ test_apex(name = apex_name, package_name = "com.google.android." + apex_name)
+
+ apex_aab(
+ name = name,
+ mainline_module = apex_name,
+ dev_sign_bundle = "//build/make/tools/releasetools:sign_apex",
+ dev_keystore = "//build/bazel/rules/apex/testdata:dev-keystore",
+ tags = ["manual"],
+ )
+
+ apex_aab_test(
+ name = test_name,
+ target_under_test = name,
+ expected_paths = [
+ "/".join([native.package_name(), apex_name, apex_name + ".aab"]),
+ "/".join([native.package_name(), apex_name, apex_name + ".apks"]),
+ "/".join([native.package_name(), apex_name, apex_name + ".cert_info.txt"]),
+ ],
+ )
+
+ return test_name
+
+def apex_aab_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_apex_aab_generates_aab(),
+ _test_apex_aab_apex_files_output_group(),
+ _test_apex_aab_generates_aab_and_apks(),
+ ],
+ )
diff --git a/rules/apex/apex_available.bzl b/rules/apex/apex_available.bzl
new file mode 100644
index 00000000..eb33d29e
--- /dev/null
+++ b/rules/apex/apex_available.bzl
@@ -0,0 +1,159 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("@soong_injection//apex_toolchain:constants.bzl", "apex_available_baseline")
+load("//build/bazel/rules:common.bzl", "get_dep_targets", "strip_bp2build_label_suffix")
+load("//build/bazel/rules:prebuilt_file.bzl", "PrebuiltFileInfo")
+load("//build/bazel/rules/apex:cc.bzl", "CC_ATTR_ASPECTS")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "CcStaticLibraryInfo")
+load("//build/bazel/rules/cc:cc_stub_library.bzl", "CcStubLibrarySharedInfo")
+
+ApexAvailableInfo = provider(
+ "ApexAvailableInfo collects APEX availability metadata.",
+ fields = {
+ "apex_available_names": "names of APEXs that this target is available to",
+ "platform_available": "whether this target is available for the platform",
+ "transitive_invalid_targets": "list of targets that had an invalid apex_available attribute",
+ "transitive_unvalidated_targets": "list of targets that were skipped in the apex_available_validation function",
+ },
+)
+
+# Validates if a target is made available as a transitive dependency of an APEX. The return
+# value is tri-state: True, False, string. Strings are used when a target is _not checked_
+# and the string itself contains the reason.
+def _validate_apex_available(target, ctx, *, apex_available_tags, apex_name, base_apex_name):
+ # testonly apexes aren't checked.
+ if ctx.attr.testonly:
+ return "testonly"
+
+ # Macro-internal manual targets aren't checked.
+ if "manual" in ctx.rule.attr.tags and "apex_available_checked_manual_for_testing" not in ctx.rule.attr.tags:
+ return "manual"
+
+ # prebuilt_file targets don't specify apex_available, and aren't checked.
+ if PrebuiltFileInfo in target:
+ return "prebuilt"
+
+ # stubs are APIs, and don't specify apex_available, and aren't checked.
+ if CcStubLibrarySharedInfo in target:
+ return "stubs"
+
+ if "//apex_available:anyapex" in apex_available_tags:
+ return "//apex_available:anyapex"
+
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/apex/apex.go;l=2910;drc=862c0d68fff500d7fe59bc2fcfc9c7d75596e5b5
+ # Bp2build-generated cc_library_static target from stubs-providing lib
+ # doesn't have apex_available tag.
+ # If its shared variant is directly in the apex, skip validation
+ # Otherwise, it will be invalidated.
+ direct_deps = ctx.attr._direct_deps[BuildSettingInfo].value
+ if CcStaticLibraryInfo in target and str(target.label).removesuffix("_bp2build_cc_library_static") in direct_deps:
+ return "has shared variant directly included"
+
+ if base_apex_name in apex_available_tags or apex_name in apex_available_tags:
+ return True
+
+ target_name = strip_bp2build_label_suffix(target.label.name)
+ baselines = [
+ apex_available_baseline.get(base_apex_name, []),
+ apex_available_baseline.get(apex_name, []),
+ apex_available_baseline.get("//apex_available:anyapex", []),
+ ]
+ if any([target_name in b for b in baselines]):
+ return True
+
+ return False
+
+_IGNORED_ATTRS = [
+ "certificate",
+ "key",
+ "android_manifest",
+ "applicable_licenses",
+ "androidmk_static_deps",
+ "androidmk_whole_archive_deps",
+ "androidmk_dynamic_deps",
+ "androidmk_deps",
+]
+
+def _apex_available_aspect_impl(target, ctx):
+ apex_available_tags = [
+ t.removeprefix("apex_available=")
+ for t in ctx.rule.attr.tags
+ if t.startswith("apex_available=")
+ ]
+ platform_available = (
+ "//apex_available:platform" in apex_available_tags or
+ len(apex_available_tags) == 0
+ )
+ apex_name = ctx.attr._apex_name[BuildSettingInfo].value
+
+ dep_targets = get_dep_targets(
+ ctx.rule.attr,
+ predicate = lambda target: ApexAvailableInfo in target,
+ )
+ transitive_unvalidated_targets = []
+ transitive_invalid_targets = []
+ for attr, attr_targets in dep_targets.items():
+ for t in attr_targets:
+ info = t[ApexAvailableInfo]
+ transitive_unvalidated_targets.append(info.transitive_unvalidated_targets)
+ if attr in CC_ATTR_ASPECTS:
+ transitive_invalid_targets.append(info.transitive_invalid_targets)
+ if attr not in _IGNORED_ATTRS:
+ if info.platform_available != None:
+ platform_available = platform_available and info.platform_available
+
+ if "manual" in ctx.rule.attr.tags and "apex_available_checked_manual_for_testing" not in ctx.rule.attr.tags:
+ platform_available = None
+
+ if CcStubLibrarySharedInfo in target:
+ # stub libraries libraries are always available to platform
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/cc.go;l=3670;drc=89ff729d1d65fb0ce2945ec6b8c4777a9d78dcab
+ platform_available = True
+
+ skipped_reason = _validate_apex_available(
+ target,
+ ctx,
+ apex_available_tags = apex_available_tags,
+ apex_name = apex_name,
+ base_apex_name = ctx.attr._base_apex_name[BuildSettingInfo].value,
+ )
+
+ return [
+ ApexAvailableInfo(
+ platform_available = platform_available,
+ apex_available_names = apex_available_tags,
+ transitive_unvalidated_targets = depset(
+ direct = [(ctx.label, skipped_reason)] if type(skipped_reason) == type("") else None,
+ transitive = transitive_unvalidated_targets,
+ ),
+ transitive_invalid_targets = depset(
+ direct = [(target, tuple(apex_available_tags))] if skipped_reason == False else None,
+ transitive = transitive_invalid_targets,
+ ),
+ ),
+ ]
+
+apex_available_aspect = aspect(
+ implementation = _apex_available_aspect_impl,
+ provides = [ApexAvailableInfo],
+ attr_aspects = ["*"],
+ attrs = {
+ "testonly": attr.bool(default = False), # propagated from the apex
+ "_apex_name": attr.label(default = "//build/bazel/rules/apex:apex_name"),
+ "_base_apex_name": attr.label(default = "//build/bazel/rules/apex:base_apex_name"),
+ "_direct_deps": attr.label(default = "//build/bazel/rules/apex:apex_direct_deps"),
+ },
+)
diff --git a/rules/apex/apex_deps_validation.bzl b/rules/apex/apex_deps_validation.bzl
new file mode 100644
index 00000000..f69b30e3
--- /dev/null
+++ b/rules/apex/apex_deps_validation.bzl
@@ -0,0 +1,261 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("//build/bazel/rules:common.bzl", "get_dep_targets", "strip_bp2build_label_suffix")
+load("//build/bazel/rules/android:android_app_certificate.bzl", "AndroidAppCertificateInfo")
+load(":apex_available.bzl", "ApexAvailableInfo")
+load(":apex_info.bzl", "ApexInfo")
+load(":apex_key.bzl", "ApexKeyInfo")
+load(":cc.bzl", "get_min_sdk_version")
+
+ApexDepsInfo = provider(
+ "ApexDepsInfo collects transitive deps for dependency validation.",
+ fields = {
+ "transitive_deps": "Labels of targets that are depended on by this APEX.",
+ },
+)
+
+ApexDepInfo = provider(
+ "ApexDepInfo collects metadata about dependencies of APEXs.",
+ fields = {
+ "is_external": "True if this target is an external dep to the APEX.",
+ "label": "Label of target",
+ "min_sdk_version": "min_sdk_version of target",
+ },
+)
+
+_IGNORED_PACKAGES = [
+ "build/bazel/platforms",
+]
+_IGNORED_REPOSITORIES = [
+ "bazel_tools",
+]
+_IGNORED_RULE_KINDS = [
+ # No validation for language-agnostic targets. In general language
+ # agnostic rules to support AIDL, HIDL, Sysprop do not have an analogous
+ # module type in Soong and do not have an apex_available property, often
+ # relying on language-specific apex_available properties. Because a
+ # language-specific rule is required for a language-agnostic rule to be
+ # within the transitive deps of an apex and impact the apex contents, this
+ # is safe.
+ "aidl_library",
+ "hidl_library",
+ "sysprop_library",
+
+ # Build settings, these have no built artifact and thus will not be
+ # included in an apex.
+ "string_list_setting",
+ "string_setting",
+
+ # These rule kinds cannot be skipped by checking providers because most
+ # targets have a License provider
+ "_license",
+ "_license_kind",
+]
+_IGNORED_PROVIDERS = [
+ AndroidAppCertificateInfo,
+ ApexKeyInfo,
+ ProtoInfo,
+]
+_IGNORED_ATTRS = [
+ "androidmk_static_deps",
+ "androidmk_whole_archive_deps",
+ "androidmk_dynamic_deps",
+ "androidmk_deps",
+]
+_IGNORED_TARGETS = [
+ "default_metadata_file",
+]
+
+def _should_skip_apex_dep(target, ctx):
+ # Ignore Bazel-specific targets like platform/os/arch constraints,
+ # anything from @bazel_tools, and rule types that we dont care about
+ # for dependency validation like licenses, certificates, etc.
+ #TODO(b/261715581) update allowed_deps.txt to include Bazel-specific targets
+ return (
+ ctx.label.workspace_name in _IGNORED_REPOSITORIES or
+ ctx.label.package in _IGNORED_PACKAGES or
+ ctx.rule.kind in _IGNORED_RULE_KINDS or
+ True in [p in target for p in _IGNORED_PROVIDERS] or
+ target.label.name in _IGNORED_TARGETS
+ )
+
+def _apex_dep_validation_aspect_impl(target, ctx):
+ transitive_deps = []
+ for attr, attr_deps in get_dep_targets(ctx.rule.attr, predicate = lambda target: ApexDepsInfo in target).items():
+ if attr in _IGNORED_ATTRS:
+ continue
+ for dep in attr_deps:
+ transitive_deps.append(dep[ApexDepsInfo].transitive_deps)
+
+ if _should_skip_apex_dep(target, ctx):
+ return ApexDepsInfo(
+ transitive_deps = depset(
+ transitive = transitive_deps,
+ ),
+ )
+
+ is_external = False
+ include_self_in_transitive_deps = True
+
+ if "manual" in ctx.rule.attr.tags and "apex_available_checked_manual_for_testing" not in ctx.rule.attr.tags:
+ include_self_in_transitive_deps = False
+ else:
+ apex_available_names = target[ApexAvailableInfo].apex_available_names
+ apex_name = ctx.attr._apex_name[BuildSettingInfo].value
+ base_apex_name = ctx.attr._base_apex_name[BuildSettingInfo].value
+ if not (
+ "//apex_available:anyapex" in apex_available_names or
+ base_apex_name in apex_available_names or
+ apex_name in apex_available_names
+ ):
+ # APEX deps validation stops when the dependency graph crosses the APEX boundary
+ # Record that this is a boundary target, so that we exclude can it later from validation
+ is_external = True
+ transitive_deps = []
+
+ if not target[ApexAvailableInfo].platform_available:
+ # Skip dependencies that are only available to APEXes; they are
+ # developed with updatability in mind and don't need manual approval.
+ include_self_in_transitive_deps = False
+
+ if ApexInfo in target:
+ include_self_in_transitive_deps = False
+
+ direct_deps = []
+ if include_self_in_transitive_deps:
+ direct_deps = [
+ ApexDepInfo(
+ label = ctx.label,
+ is_external = is_external,
+ min_sdk_version = get_min_sdk_version(ctx),
+ ),
+ ]
+
+ return ApexDepsInfo(
+ transitive_deps = depset(
+ direct = direct_deps,
+ transitive = transitive_deps,
+ ),
+ )
+
+apex_deps_validation_aspect = aspect(
+ doc = "apex_deps_validation_aspect walks the deps of an APEX and records" +
+ " its transitive dependencies so that they can be validated against" +
+ " allowed_deps.txt.",
+ implementation = _apex_dep_validation_aspect_impl,
+ attr_aspects = ["*"],
+ apply_to_generating_rules = True,
+ attrs = {
+ "_apex_name": attr.label(default = "//build/bazel/rules/apex:apex_name"),
+ "_base_apex_name": attr.label(default = "//build/bazel/rules/apex:base_apex_name"),
+ "_direct_deps": attr.label(default = "//build/bazel/rules/apex:apex_direct_deps"),
+ },
+ required_aspect_providers = [ApexAvailableInfo],
+ provides = [ApexDepsInfo],
+)
+
+def _min_sdk_version_string(version):
+ if version.apex_inherit:
+ return "apex_inherit"
+ elif version.min_sdk_version == None:
+ return "(no version)"
+ return version.min_sdk_version
+
+def _apex_dep_to_string(apex_dep_info):
+ return "{name}(minSdkVersion:{min_sdk_version})".format(
+ name = strip_bp2build_label_suffix(apex_dep_info.label.name),
+ min_sdk_version = _min_sdk_version_string(apex_dep_info.min_sdk_version),
+ )
+
+def apex_dep_infos_to_allowlist_strings(apex_dep_infos):
+ """apex_dep_infos_to_allowlist_strings converts outputs a string that can be compared against allowed_deps.txt
+
+ Args:
+ apex_dep_infos (list[ApexDepInfo]): list of deps to convert
+ Returns:
+ a list of strings conforming to the format of allowed_deps.txt
+ """
+ return [
+ _apex_dep_to_string(d)
+ for d in apex_dep_infos
+ if not d.is_external
+ ]
+
+def validate_apex_deps(ctx, transitive_deps, allowed_deps_manifest):
+ """validate_apex_deps generates actions to validate that all deps in transitive_deps exist in the allowed_deps file
+
+ Args:
+ ctx (rule context): a rule context
+ transitive_deps (depset[ApexDepsInfo]): list of transitive dependencies
+ of an APEX. This is most likely generated by collecting the output
+ of apex_deps_validation_aspect
+ allowed_deps_manifest (File): a file containing an allowlist of modules
+ that can be included in an APEX. This is expected to be in the format
+ of //packages/modules/common/build/allowed_deps.txt
+ Returns:
+ validation_marker (File): an empty file created if validation succeeds
+ """
+ apex_deps_file = ctx.actions.declare_file(ctx.label.name + ".current_deps")
+ ctx.actions.write(
+ apex_deps_file,
+ "\n".join(apex_dep_infos_to_allowlist_strings(transitive_deps.to_list())),
+ )
+ validation_marker = ctx.actions.declare_file(ctx.label.name + ".allowed_deps")
+ shell_command = """
+ export module_diff=$(
+ cat {allowed_deps_manifest} |
+ sed -e 's/^prebuilt_//g' |
+ sort |
+ comm -23 <(sort -u {apex_deps_file}) -
+ );
+ export diff_size=$(echo "$module_diff" | wc -w);
+ if [[ $diff_size -eq 0 ]]; then
+ touch {validation_marker};
+ else
+ echo -e "\n******************************";
+ echo "ERROR: go/apex-allowed-deps-error contains more information";
+ echo "******************************";
+ echo "Detected changes to allowed dependencies in updatable modules.";
+ echo "There are $diff_size dependencies of APEX {target_label} on modules not in {allowed_deps_manifest}:";
+ echo "$module_diff";
+ echo "To fix and update packages/modules/common/build/allowed_deps.txt, please run:";
+ echo -e "$ (croot && packages/modules/common/build/update-apex-allowed-deps.sh)\n";
+ echo "When submitting the generated CL, you must include the following information";
+ echo "in the commit message if you are adding a new dependency:";
+ echo "Apex-Size-Increase: Expected binary size increase for affected APEXes (or the size of the .jar / .so file of the new library)";
+ echo "Previous-Platform-Support: Are the maintainers of the new dependency committed to supporting previous platform releases?";
+ echo "Aosp-First: Is the new dependency being developed AOSP-first or internal?";
+ echo "Test-Info: What’s the testing strategy for the new dependency? Does it have its own tests, and are you adding integration tests? How/when are the tests run?";
+ echo "You do not need OWNERS approval to submit the change, but mainline-modularization@";
+ echo "will periodically review additions and may require changes.";
+ echo -e "******************************\n";
+ exit 1;
+ fi;
+ """.format(
+ allowed_deps_manifest = allowed_deps_manifest.path,
+ apex_deps_file = apex_deps_file.path,
+ validation_marker = validation_marker.path,
+ target_label = ctx.label,
+ )
+ ctx.actions.run_shell(
+ inputs = [allowed_deps_manifest, apex_deps_file],
+ outputs = [validation_marker],
+ command = shell_command,
+ mnemonic = "ApexDepValidation",
+ progress_message = "Validating APEX dependencies",
+ )
+
+ return validation_marker
diff --git a/rules/apex/apex_info.bzl b/rules/apex/apex_info.bzl
new file mode 100644
index 00000000..554ec378
--- /dev/null
+++ b/rules/apex/apex_info.bzl
@@ -0,0 +1,41 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ApexInfo = provider(
+ "ApexInfo exports metadata about this apex.",
+ fields = {
+ "backing_libs": "File containing libraries used by the APEX.",
+ "base_file": "A zip file used to create aab files.",
+ "base_with_config_zip": "A zip file used to create aab files within mixed builds.",
+ "bundle_key_info": "APEX bundle signing public/private key pair (the value of the key: attribute).",
+ "container_key_info": "Info of the container key provided as AndroidAppCertificateInfo.",
+ "installed_files": "File containing all files installed by the APEX",
+ "java_symbols_used_by_apex": "Java symbol list used by this APEX.",
+ "package_name": "APEX package name.",
+ "provides_native_libs": "Labels of native shared libs that this apex provides.",
+ "requires_native_libs": "Labels of native shared libs that this apex requires.",
+ "signed_compressed_output": "Signed .capex file.",
+ "signed_output": "Signed .apex file.",
+ "symbols_used_by_apex": "Symbol list used by this APEX.",
+ "unsigned_output": "Unsigned .apex file.",
+ },
+)
+
+ApexMkInfo = provider(
+ "ApexMkInfo exports metadata about this apex for Android.mk integration / bundled builds.",
+ fields = {
+ "make_modules_to_install": "Make module names that should be installed to the system along with this APEX.",
+ "files_info": "Metadata about the files included in the APEX payload. Used for generating Make code for final packaging step (e.g. coverage zip files).",
+ },
+)
diff --git a/rules/apex/apex_key.bzl b/rules/apex/apex_key.bzl
new file mode 100644
index 00000000..45e83f01
--- /dev/null
+++ b/rules/apex/apex_key.bzl
@@ -0,0 +1,95 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("//build/bazel/product_config:product_variables_providing_rule.bzl", "ProductVariablesDepsInfo", "ProductVariablesInfo")
+
+ApexKeyInfo = provider(
+ "Info needed to sign APEX bundles",
+ fields = {
+ "private_key": "File containing the private key",
+ "public_key": "File containing the public_key",
+ },
+)
+
+def _apex_key_rule_impl(ctx):
+ public_key = ctx.file.public_key
+ private_key = ctx.file.private_key
+
+ # If the DefaultAppCertificate directory is specified, then look for this
+ # key in that directory instead, with the exact same basenames for both the
+ # avbpubkey and pem files.
+ product_var_cert = ctx.attr._product_variables[ProductVariablesInfo].DefaultAppCertificate
+ cert_files_to_search = ctx.attr._product_variables[ProductVariablesDepsInfo].DefaultAppCertificateFiles
+ if product_var_cert and cert_files_to_search:
+ for f in cert_files_to_search:
+ if f.basename == ctx.file.public_key.basename:
+ public_key = f
+ elif f.basename == ctx.file.private_key.basename:
+ private_key = f
+
+ public_keyname = paths.split_extension(public_key.basename)[0]
+ private_keyname = paths.split_extension(private_key.basename)[0]
+ if public_keyname != private_keyname:
+ fail("public_key %s (keyname:%s) and private_key %s (keyname:%s) do not have same keyname" % (
+ ctx.attr.public_key.label,
+ public_keyname,
+ ctx.attr.private_key.label,
+ private_keyname,
+ ))
+
+ return [
+ ApexKeyInfo(
+ public_key = public_key,
+ private_key = private_key,
+ ),
+ ]
+
+_apex_key = rule(
+ implementation = _apex_key_rule_impl,
+ attrs = {
+ "private_key": attr.label(mandatory = True, allow_single_file = True),
+ "public_key": attr.label(mandatory = True, allow_single_file = True),
+ "_product_variables": attr.label(
+ default = "//build/bazel/product_config:product_vars",
+ ),
+ },
+)
+
+def _get_key_label(label, name):
+ if label and name:
+ fail("Cannot use both {public,private}_key_name and {public,private}_key attributes together. " +
+ "Use only one of them.")
+
+ if label:
+ return label
+
+ # Ensure that the name references the calling package's local BUILD target
+ return ":" + name
+
+def apex_key(
+ name,
+ public_key = None,
+ private_key = None,
+ public_key_name = None,
+ private_key_name = None,
+ **kwargs):
+ # The keys are labels that point to either a file, or a target that provides
+ # a single file (e.g. a filegroup or rule that provides the key itself only).
+ _apex_key(
+ name = name,
+ public_key = _get_key_label(public_key, public_key_name),
+ private_key = _get_key_label(private_key, private_key_name),
+ **kwargs
+ )
diff --git a/rules/apex/apex_key_test.bzl b/rules/apex/apex_key_test.bzl
new file mode 100644
index 00000000..fdd31a1b
--- /dev/null
+++ b/rules/apex/apex_key_test.bzl
@@ -0,0 +1,163 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":apex_key.bzl", "ApexKeyInfo", "apex_key")
+
+def _apex_key_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.equals(
+ env,
+ ctx.attr.expected_private_key_short_path,
+ target_under_test[ApexKeyInfo].private_key.short_path,
+ )
+ asserts.equals(
+ env,
+ ctx.attr.expected_public_key_short_path,
+ target_under_test[ApexKeyInfo].public_key.short_path,
+ )
+ return analysistest.end(env)
+
+apex_key_test = analysistest.make(
+ _apex_key_test,
+ attrs = {
+ "expected_private_key_short_path": attr.string(mandatory = True),
+ "expected_public_key_short_path": attr.string(mandatory = True),
+ },
+)
+
+apex_key_with_default_app_cert_test = analysistest.make(
+ _apex_key_test,
+ attrs = {
+ "expected_private_key_short_path": attr.string(mandatory = True),
+ "expected_public_key_short_path": attr.string(mandatory = True),
+ },
+ config_settings = {
+ # This product sets DefaultAppCertificate to build/bazel/rules/apex/testdata/devkey,
+ # so we expect the apex_key to look for key_name in build/bazel/rules/apex/testdata.
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing_with_overrides_and_app_cert",
+ },
+)
+
+def _test_apex_key_file_targets_with_key_name_attribute():
+ name = "apex_key_file_targets_with_key_name_attribute"
+ test_name = name + "_test"
+ private_key = name + ".pem"
+ public_key = name + ".avbpubkey"
+
+ apex_key(
+ name = name,
+ private_key_name = private_key,
+ public_key_name = public_key,
+ )
+
+ apex_key_test(
+ name = test_name,
+ target_under_test = name,
+ expected_private_key_short_path = native.package_name() + "/" + private_key,
+ expected_public_key_short_path = native.package_name() + "/" + public_key,
+ )
+
+ return test_name
+
+def _test_apex_key_file_targets_with_key_name_attribute_with_default_app_cert():
+ name = "apex_key_file_targets_with_key_attribute_with_default_app_cert"
+ test_name = name + "_test"
+ private_key = "devkey.pem"
+ public_key = "devkey.avbpubkey"
+
+ apex_key(
+ name = name,
+ private_key_name = private_key,
+ public_key_name = public_key,
+ )
+
+ apex_key_with_default_app_cert_test(
+ name = test_name,
+ target_under_test = name,
+ expected_private_key_short_path = "build/bazel/rules/apex/testdata/" + private_key,
+ expected_public_key_short_path = "build/bazel/rules/apex/testdata/" + public_key,
+ )
+
+ return test_name
+
+def _test_apex_key_file_targets_with_key_attribute():
+ name = "apex_key_file_targets_with_key_attribute"
+ test_name = name + "_test"
+ private_key = name + ".pem"
+ public_key = name + ".avbpubkey"
+
+ apex_key(
+ name = name,
+ # Referring to file targets with plain strings work as well, as bazel
+ # will parse these labels as file targets in the same package.
+ private_key = private_key,
+ public_key = public_key,
+ )
+
+ apex_key_test(
+ name = test_name,
+ target_under_test = name,
+ expected_private_key_short_path = native.package_name() + "/" + private_key,
+ expected_public_key_short_path = native.package_name() + "/" + public_key,
+ )
+
+ return test_name
+
+def _test_apex_key_generated_keys():
+ name = "apex_key_generated_keys"
+ test_name = name + "_test"
+ private_key = name + ".pem"
+ public_key = name + ".avbpubkey"
+
+ native.genrule(
+ name = private_key,
+ outs = ["priv/" + name + ".generated"],
+ cmd = "noop",
+ tags = ["manual"],
+ )
+
+ native.genrule(
+ name = public_key,
+ outs = ["pub/" + name + ".generated"],
+ cmd = "noop",
+ tags = ["manual"],
+ )
+
+ apex_key(
+ name = name,
+ private_key = private_key,
+ public_key = public_key,
+ )
+
+ apex_key_test(
+ name = test_name,
+ target_under_test = name,
+ expected_private_key_short_path = native.package_name() + "/priv/" + name + ".generated",
+ expected_public_key_short_path = native.package_name() + "/pub/" + name + ".generated",
+ )
+
+ return test_name
+
+def apex_key_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_apex_key_file_targets_with_key_name_attribute(),
+ _test_apex_key_file_targets_with_key_name_attribute_with_default_app_cert(),
+ _test_apex_key_file_targets_with_key_attribute(),
+ _test_apex_key_generated_keys(),
+ ],
+ )
diff --git a/rules/apex/apex_mk_test.bzl b/rules/apex/apex_mk_test.bzl
new file mode 100644
index 00000000..9a915a82
--- /dev/null
+++ b/rules/apex/apex_mk_test.bzl
@@ -0,0 +1,199 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(":apex_info.bzl", "ApexInfo", "ApexMkInfo")
+load(":apex_test_helpers.bzl", "test_apex")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules:prebuilt_file.bzl", "prebuilt_file")
+load("//build/bazel/rules/cc:cc_binary.bzl", "cc_binary")
+load("//build/bazel/rules:sh_binary.bzl", "sh_binary")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+
+def _apex_files_info_test(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+
+ # no attr.string_keyed_string_dict_list.. so we'll have to make do :-)
+ expected_files_info = [json.decode(i) for i in ctx.attr.expected_files_info]
+ actual_files_info = target[ApexMkInfo].files_info
+
+ asserts.equals(env, len(expected_files_info), len(actual_files_info))
+
+ for idx, expected in enumerate(expected_files_info):
+ actual = actual_files_info[idx]
+
+ asserts.equals(env, len(expected), len(actual))
+ for k, v in expected.items():
+ if k in ["built_file", "unstripped_built_file"]:
+ # don't test the part that contains the configuration hash, which is sensitive to changes.
+ expected_path_without_config = v.split("bazel-out/")[-1]
+ asserts.true(env, actual[k].endswith(expected_path_without_config))
+ else:
+ asserts.equals(env, v, actual[k])
+ return analysistest.end(env)
+
+apex_files_info_test = analysistest.make(
+ _apex_files_info_test,
+ attrs = {
+ "expected_files_info": attr.string_list(
+ doc = "expected files info",
+ ),
+ },
+)
+
+def _test_apex_files_info_basic():
+ name = "apex_files_info_basic"
+ test_name = name + "_test"
+
+ test_apex(name = name)
+
+ apex_files_info_test(
+ name = test_name,
+ target_under_test = name,
+ expected_files_info = [
+ # deliberately empty.
+ ],
+ )
+
+ return test_name
+
+def _test_apex_files_info_complex():
+ name = "apex_files_info_complex"
+ test_name = name + "_test"
+
+ prebuilt_file(
+ name = name + "_file",
+ src = name + "_file.txt",
+ dir = "etc",
+ tags = ["manual"],
+ )
+
+ sh_binary(
+ name = name + "_bin_sh",
+ srcs = [name + "_bin.sh"],
+ tags = ["manual"],
+ )
+
+ cc_binary(
+ name = name + "_bin_cc",
+ srcs = [name + "_bin.cc"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_lib2_cc",
+ srcs = [name + "_lib2.cc"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ binaries = [name + "_bin_sh", name + "_bin_cc"],
+ prebuilts = [name + "_file"],
+ native_shared_libs_32 = [name + "_lib_cc"],
+ native_shared_libs_64 = [name + "_lib2_cc"],
+ )
+
+ apex_files_info_test(
+ name = test_name,
+ target_under_test = name,
+ target_compatible_with = ["//build/bazel/platforms/os:android", "//build/bazel/platforms/arch:arm64"],
+ expected_files_info = [json.encode(i) for i in [
+ {
+ "built_file": "bazel-out/bin/build/bazel/rules/apex/apex_files_info_complex_bin_cc",
+ "class": "nativeExecutable",
+ "install_dir": "bin",
+ "basename": "apex_files_info_complex_bin_cc",
+ "package": "build/bazel/rules/apex",
+ "make_module_name": "apex_files_info_complex_bin_cc",
+ "arch": "arm64",
+ "unstripped_built_file": "bazel-out/build/bazel/rules/apex/apex_files_info_complex_bin_cc_unstripped",
+ },
+ {
+ "built_file": "bazel-out/bin/build/bazel/rules/apex/apex_files_info_complex_bin_sh",
+ "class": "shBinary",
+ "install_dir": "bin",
+ "basename": "apex_files_info_complex_bin_sh",
+ "package": "build/bazel/rules/apex",
+ "make_module_name": "apex_files_info_complex_bin_sh",
+ "arch": "arm64",
+ },
+ {
+ "built_file": "build/bazel/rules/apex/apex_files_info_complex_file.txt",
+ "class": "etc",
+ "install_dir": "etc",
+ "basename": "apex_files_info_complex_file",
+ "package": "build/bazel/rules/apex",
+ "make_module_name": "apex_files_info_complex_file",
+ "arch": "arm64",
+ },
+ {
+ "built_file": "bazel-out/bin/build/bazel/rules/apex/apex_files_info_complex_lib2_cc.so",
+ "class": "nativeSharedLib",
+ "install_dir": "lib64",
+ "basename": "apex_files_info_complex_lib2_cc.so",
+ "package": "build/bazel/rules/apex",
+ "make_module_name": "apex_files_info_complex_lib2_cc",
+ "arch": "arm64",
+ "unstripped_built_file": "bazel-out/bin/build/bazel/rules/apex/libapex_files_info_complex_lib2_cc_unstripped.so",
+ },
+ {
+ "built_file": "bazel-out/bin/build/bazel/rules/apex/apex_files_info_complex_lib_cc.so",
+ "class": "nativeSharedLib",
+ "install_dir": "lib",
+ "basename": "apex_files_info_complex_lib_cc.so",
+ "package": "build/bazel/rules/apex",
+ "make_module_name": "apex_files_info_complex_lib_cc",
+ "arch": "arm",
+ "unstripped_built_file": "bazel-out/bin/build/bazel/rules/apex/libapex_files_info_complex_lib_cc_unstripped.so",
+ },
+ {
+ "built_file": "bazel-out/bin/external/libcxx/libc++.so",
+ "class": "nativeSharedLib",
+ "install_dir": "lib",
+ "basename": "libc++.so",
+ "package": "external/libcxx",
+ "make_module_name": "libc++",
+ "arch": "arm",
+ "unstripped_built_file": "bazel-out/bin/external/libcxx/liblibc++_unstripped.so",
+ },
+ {
+ "built_file": "bazel-out/bin/external/libcxx/libc++.so",
+ "class": "nativeSharedLib",
+ "install_dir": "lib64",
+ "basename": "libc++.so",
+ "package": "external/libcxx",
+ "make_module_name": "libc++",
+ "arch": "arm64",
+ "unstripped_built_file": "bazel-out/bin/external/libcxx/liblibc++_unstripped.so",
+ },
+ ]],
+ )
+
+ return test_name
+
+def apex_mk_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_apex_files_info_basic(),
+ _test_apex_files_info_complex(),
+ ],
+ )
diff --git a/rules/apex/apex_test.bzl b/rules/apex/apex_test.bzl
new file mode 100644
index 00000000..e155bf58
--- /dev/null
+++ b/rules/apex/apex_test.bzl
@@ -0,0 +1,2830 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:new_sets.bzl", "sets")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("@soong_injection//apex_toolchain:constants.bzl", "default_manifest_version")
+load("//build/bazel/platforms:platform_utils.bzl", "platforms")
+load("//build/bazel/rules:common.bzl", "get_dep_targets")
+load("//build/bazel/rules:prebuilt_file.bzl", "prebuilt_file")
+load("//build/bazel/rules:sh_binary.bzl", "sh_binary")
+load("//build/bazel/rules/aidl:aidl_interface.bzl", "aidl_interface")
+load("//build/bazel/rules/android:android_app_certificate.bzl", "android_app_certificate")
+load("//build/bazel/rules/cc:cc_binary.bzl", "cc_binary")
+load("//build/bazel/rules/cc:cc_library_headers.bzl", "cc_library_headers")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+load("//build/bazel/rules/cc:cc_stub_library.bzl", "cc_stub_suite")
+load("//build/bazel/rules/test_common:rules.bzl", "expect_failure_test", "target_under_test_exist_test")
+load(":apex_deps_validation.bzl", "ApexDepsInfo", "apex_dep_infos_to_allowlist_strings")
+load(":apex_info.bzl", "ApexInfo", "ApexMkInfo")
+load(":apex_test_helpers.bzl", "test_apex")
+
+ActionArgsInfo = provider(
+ fields = {
+ "argv": "The link action arguments.",
+ },
+)
+
+def _canned_fs_config_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ found_canned_fs_config_action = False
+
+ def pretty_print_list(the_list):
+ if not the_list:
+ return "[]"
+ result = "[\n"
+ for item in the_list:
+ result += " \"%s\",\n" % item
+ return result + "]"
+
+ if ctx.attr.expected_extra_cat:
+ append_custom_fs_config = [a for a in actions if a.mnemonic == "AppendCustomFsConfig"]
+ asserts.true(env, len(append_custom_fs_config) == 1, "could not find the AppendCustomFsConfig action")
+ a = append_custom_fs_config[0]
+ args = a.argv[2].split(" ") # first 2 are "/bin/bash" and "-c"
+ asserts.equals(env, args[0], "cat")
+ asserts.true(env, args[1].endswith("_canned_fs_config.txt"))
+ asserts.true(env, args[2].endswith(ctx.attr.expected_extra_cat), "expected %s, but got %s" % (ctx.attr.expected_extra_cat, args[2]))
+ asserts.equals(env, args[3], ">")
+ asserts.true(env, args[4].endswith("_combined_canned_fs_config.txt"))
+
+ for a in actions:
+ if a.mnemonic != "FileWrite":
+ # The canned_fs_config uses ctx.actions.write.
+ continue
+
+ outputs = a.outputs.to_list()
+ if len(outputs) != 1:
+ continue
+ if not outputs[0].basename.endswith("_canned_fs_config.txt"):
+ continue
+
+ found_canned_fs_config_action = True
+
+ # Don't sort -- the order is significant.
+ actual_entries = a.content.split("\n")
+ replacement = "64" if platforms.get_target_bitness(ctx.attr._platform_utils) == 64 else ""
+ expected_entries = [x.replace("{64_OR_BLANK}", replacement) for x in ctx.attr.expected_entries]
+ asserts.equals(env, pretty_print_list(expected_entries), pretty_print_list(actual_entries))
+
+ break
+
+ # Ensures that we actually found the canned_fs_config.txt generation action.
+ asserts.true(env, found_canned_fs_config_action, "did not find the canned fs config generating action")
+
+ return analysistest.end(env)
+
+canned_fs_config_test = analysistest.make(
+ _canned_fs_config_test,
+ attrs = {
+ "expected_entries": attr.string_list(
+ doc = "Expected lines in the canned_fs_config.txt",
+ ),
+ "expected_extra_cat": attr.string(
+ doc = "Filename of the custom canned fs config to be found in the AppendCustomFsConfig action",
+ ),
+ "_platform_utils": attr.label(
+ default = Label("//build/bazel/platforms:platform_utils"),
+ ),
+ },
+)
+
+def _test_canned_fs_config_basic():
+ name = "apex_canned_fs_config_basic"
+ test_name = name + "_test"
+
+ test_apex(name = name)
+
+ canned_fs_config_test(
+ name = test_name,
+ target_under_test = name,
+ expected_entries = [
+ "/ 1000 1000 0755",
+ "/apex_manifest.json 1000 1000 0644",
+ "/apex_manifest.pb 1000 1000 0644",
+ "", # ends with a newline
+ ],
+ )
+
+ return test_name
+
+def _test_canned_fs_config_custom():
+ name = "apex_canned_fs_config_custom"
+ test_name = name + "_test"
+
+ native.genrule(
+ name = name + ".custom_config",
+ outs = [name + ".custom.config"],
+ cmd = "echo -e \"/2.bin 0 1000 0750\n/1.bin 0 1000 0777\n\" > $@",
+ )
+
+ test_apex(
+ name = name,
+ canned_fs_config = name + "_custom.config",
+ )
+
+ canned_fs_config_test(
+ name = test_name,
+ target_under_test = name,
+ expected_entries = [
+ "/ 1000 1000 0755",
+ "/apex_manifest.json 1000 1000 0644",
+ "/apex_manifest.pb 1000 1000 0644",
+ "", # ends with a newline
+ # unfortunately, due to bazel analysis not being able to read the
+ # contents of inputs (i.e. dynamic dependencies), we cannot test for
+ # the contents of the custom config here. but, we can test that the
+ # custom config is concatenated in the action command with
+ # 'expected_extra_cat' below.
+ ],
+ expected_extra_cat = name + "_custom.config",
+ )
+
+ return test_name
+
+def _test_canned_fs_config_binaries():
+ name = "apex_canned_fs_config_binaries"
+ test_name = name + "_test"
+
+ sh_binary(
+ name = "bin_sh",
+ srcs = ["bin.sh"],
+ tags = ["manual"],
+ )
+
+ cc_binary(
+ name = "bin_cc",
+ srcs = ["bin.cc"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ binaries = ["bin_sh", "bin_cc"],
+ )
+
+ canned_fs_config_test(
+ name = test_name,
+ target_under_test = name,
+ expected_entries = [
+ "/ 1000 1000 0755",
+ "/apex_manifest.json 1000 1000 0644",
+ "/apex_manifest.pb 1000 1000 0644",
+ "/lib{64_OR_BLANK}/libc++.so 1000 1000 0644",
+ "/bin/bin_cc 0 2000 0755",
+ "/bin/bin_sh 0 2000 0755",
+ "/bin 0 2000 0755",
+ "/lib{64_OR_BLANK} 0 2000 0755",
+ "", # ends with a newline
+ ],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+
+ return test_name
+
+def _test_canned_fs_config_native_shared_libs_arm():
+ name = "apex_canned_fs_config_native_shared_libs_arm"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_lib2_cc",
+ srcs = [name + "_lib2.cc"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ native_shared_libs_64 = [name + "_lib2_cc"],
+ )
+
+ canned_fs_config_test(
+ name = test_name,
+ target_under_test = name,
+ expected_entries = [
+ "/ 1000 1000 0755",
+ "/apex_manifest.json 1000 1000 0644",
+ "/apex_manifest.pb 1000 1000 0644",
+ "/lib/apex_canned_fs_config_native_shared_libs_arm_lib_cc.so 1000 1000 0644",
+ "/lib/libc++.so 1000 1000 0644",
+ "/lib 0 2000 0755",
+ "", # ends with a newline
+ ],
+ target_compatible_with = ["//build/bazel/platforms/arch:arm"],
+ )
+
+ return test_name
+
+def _test_canned_fs_config_native_shared_libs_arm64():
+ name = "apex_canned_fs_config_native_shared_libs_arm64"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_lib2_cc",
+ srcs = [name + "_lib2.cc"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ native_shared_libs_64 = [name + "_lib2_cc"],
+ )
+
+ canned_fs_config_test(
+ name = test_name,
+ target_under_test = name,
+ expected_entries = [
+ "/ 1000 1000 0755",
+ "/apex_manifest.json 1000 1000 0644",
+ "/apex_manifest.pb 1000 1000 0644",
+ "/lib/apex_canned_fs_config_native_shared_libs_arm64_lib_cc.so 1000 1000 0644",
+ "/lib/libc++.so 1000 1000 0644",
+ "/lib64/apex_canned_fs_config_native_shared_libs_arm64_lib2_cc.so 1000 1000 0644",
+ "/lib64/libc++.so 1000 1000 0644",
+ "/lib 0 2000 0755",
+ "/lib64 0 2000 0755",
+ "", # ends with a newline
+ ],
+ target_compatible_with = ["//build/bazel/platforms/arch:arm64"],
+ )
+
+ return test_name
+
+def _test_canned_fs_config_prebuilts():
+ name = "apex_canned_fs_config_prebuilts"
+ test_name = name + "_test"
+
+ prebuilt_file(
+ name = "file",
+ src = "file.txt",
+ dir = "etc",
+ tags = ["manual"],
+ )
+
+ prebuilt_file(
+ name = "nested_file_in_dir",
+ src = "file2.txt",
+ dir = "etc/nested",
+ tags = ["manual"],
+ )
+
+ prebuilt_file(
+ name = "renamed_file_in_dir",
+ src = "file3.txt",
+ dir = "etc",
+ filename = "renamed_file3.txt",
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ prebuilts = [
+ ":file",
+ ":nested_file_in_dir",
+ ":renamed_file_in_dir",
+ ],
+ )
+
+ canned_fs_config_test(
+ name = test_name,
+ target_under_test = name,
+ expected_entries = [
+ "/ 1000 1000 0755",
+ "/apex_manifest.json 1000 1000 0644",
+ "/apex_manifest.pb 1000 1000 0644",
+ "/etc/file 1000 1000 0644",
+ "/etc/nested/nested_file_in_dir 1000 1000 0644",
+ "/etc/renamed_file3.txt 1000 1000 0644",
+ "/etc 0 2000 0755",
+ "/etc/nested 0 2000 0755",
+ "", # ends with a newline
+ ],
+ )
+
+ return test_name
+
+def _test_canned_fs_config_prebuilts_sort_order():
+ name = "apex_canned_fs_config_prebuilts_sort_order"
+ test_name = name + "_test"
+
+ prebuilt_file(
+ name = "file_a",
+ src = "file_a.txt",
+ dir = "etc/a",
+ tags = ["manual"],
+ )
+
+ prebuilt_file(
+ name = "file_b",
+ src = "file_b.txt",
+ dir = "etc/b",
+ tags = ["manual"],
+ )
+
+ prebuilt_file(
+ name = "file_a_c",
+ src = "file_a_c.txt",
+ dir = "etc/a/c",
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ prebuilts = [
+ ":file_a",
+ ":file_b",
+ ":file_a_c",
+ ],
+ )
+
+ canned_fs_config_test(
+ name = test_name,
+ target_under_test = name,
+ expected_entries = [
+ "/ 1000 1000 0755",
+ "/apex_manifest.json 1000 1000 0644",
+ "/apex_manifest.pb 1000 1000 0644",
+ "/etc/a/c/file_a_c 1000 1000 0644",
+ "/etc/a/file_a 1000 1000 0644",
+ "/etc/b/file_b 1000 1000 0644",
+ "/etc 0 2000 0755",
+ "/etc/a 0 2000 0755",
+ "/etc/a/c 0 2000 0755",
+ "/etc/b 0 2000 0755",
+ "", # ends with a newline
+ ],
+ )
+
+ return test_name
+
+def _test_canned_fs_config_runtime_deps():
+ name = "apex_canned_fs_config_runtime_deps"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_runtime_dep_3",
+ srcs = ["lib2.cc"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name + "_static_lib",
+ srcs = ["lib3.cc"],
+ runtime_deps = [name + "_runtime_dep_3"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_runtime_dep_2",
+ srcs = ["lib2.cc"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_runtime_dep_1",
+ srcs = ["lib.cc"],
+ runtime_deps = [name + "_runtime_dep_2"],
+ tags = ["manual"],
+ )
+
+ cc_binary(
+ name = name + "_bin_cc",
+ srcs = ["bin.cc"],
+ runtime_deps = [name + "_runtime_dep_1"],
+ deps = [name + "_static_lib"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ binaries = [name + "_bin_cc"],
+ )
+
+ canned_fs_config_test(
+ name = test_name,
+ target_under_test = name,
+ expected_entries = [
+ "/ 1000 1000 0755",
+ "/apex_manifest.json 1000 1000 0644",
+ "/apex_manifest.pb 1000 1000 0644",
+ "/lib{64_OR_BLANK}/%s_runtime_dep_1.so 1000 1000 0644" % name,
+ "/lib{64_OR_BLANK}/%s_runtime_dep_2.so 1000 1000 0644" % name,
+ "/lib{64_OR_BLANK}/%s_runtime_dep_3.so 1000 1000 0644" % name,
+ "/lib{64_OR_BLANK}/libc++.so 1000 1000 0644",
+ "/bin/%s_bin_cc 0 2000 0755" % name,
+ "/bin 0 2000 0755",
+ "/lib{64_OR_BLANK} 0 2000 0755",
+ "", # ends with a newline
+ ],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+
+ return test_name
+
+def _apex_manifest_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ conv_apex_manifest_action = [a for a in actions if a.mnemonic == "ConvApexManifest"][0]
+
+ apexer_action = [a for a in actions if a.mnemonic == "Apexer"][0]
+ argv = apexer_action.argv[:-1] + apexer_action.argv[-1].split(" ")
+ manifest_index = argv.index("--manifest")
+ manifest_path = argv[manifest_index + 1]
+
+ asserts.equals(
+ env,
+ conv_apex_manifest_action.outputs.to_list()[0].path,
+ manifest_path,
+ "the generated apex manifest protobuf is used as input to apexer",
+ )
+ asserts.true(
+ env,
+ manifest_path.endswith(".pb"),
+ "the generated apex manifest should be a .pb file",
+ )
+
+ if ctx.attr.expected_min_sdk_version != "":
+ flag_index = argv.index("--min_sdk_version")
+ min_sdk_version_argv = argv[flag_index + 1]
+ asserts.equals(
+ env,
+ ctx.attr.expected_min_sdk_version,
+ min_sdk_version_argv,
+ )
+
+ return analysistest.end(env)
+
+apex_manifest_test_attr = dict(
+ impl = _apex_manifest_test,
+ attrs = {
+ "expected_min_sdk_version": attr.string(),
+ },
+)
+
+apex_manifest_test = analysistest.make(
+ **apex_manifest_test_attr
+)
+
+apex_manifest_global_min_sdk_current_test = analysistest.make(
+ config_settings = {
+ "@//build/bazel/rules/apex:unbundled_build_target_sdk_with_api_fingerprint": False,
+ },
+ **apex_manifest_test_attr
+)
+
+apex_manifest_global_min_sdk_override_tiramisu_test = analysistest.make(
+ config_settings = {
+ "@//build/bazel/rules/apex:apex_global_min_sdk_version_override": "Tiramisu",
+ "@//build/bazel/rules/apex:unbundled_build_target_sdk_with_api_fingerprint": False,
+ },
+ **apex_manifest_test_attr
+)
+
+def _test_apex_manifest():
+ name = "apex_manifest"
+ test_name = name + "_test"
+
+ test_apex(name = name)
+
+ apex_manifest_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _test_apex_manifest_min_sdk_version():
+ name = "apex_manifest_min_sdk_version"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ min_sdk_version = "30",
+ )
+
+ apex_manifest_test(
+ name = test_name,
+ target_under_test = name,
+ expected_min_sdk_version = "30",
+ )
+
+ return test_name
+
+def _test_apex_manifest_min_sdk_version_current():
+ name = "apex_manifest_min_sdk_version_current"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ min_sdk_version = "current",
+ )
+
+ # this test verifies min_sdk_version without use_api_fingerprint
+ apex_manifest_global_min_sdk_current_test(
+ name = test_name,
+ target_under_test = name,
+ expected_min_sdk_version = "10000",
+ )
+
+ return test_name
+
+def _test_apex_manifest_min_sdk_version_override():
+ name = "apex_manifest_min_sdk_version_override"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ min_sdk_version = "30",
+ )
+
+ # this test verifies min_sdk_version without use_api_fingerprint
+ apex_manifest_global_min_sdk_override_tiramisu_test(
+ name = test_name,
+ target_under_test = name,
+ expected_min_sdk_version = "33", # overriden to 33
+ )
+
+ return test_name
+
+def _apex_native_libs_requires_provides_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.equals(
+ env,
+ [t.label for t in ctx.attr.requires_native_libs], # expected
+ target_under_test[ApexInfo].requires_native_libs, # actual
+ "did not get expected requires_native_libs",
+ )
+ asserts.equals(
+ env,
+ [t.label for t in ctx.attr.provides_native_libs],
+ target_under_test[ApexInfo].provides_native_libs,
+ "did not get expected provides_native_libs",
+ )
+ asserts.equals(
+ env,
+ ctx.attr.make_modules_to_install,
+ target_under_test[ApexMkInfo].make_modules_to_install,
+ "did not get expected make_modules_to_install",
+ )
+
+ # Compare the argv of the jsonmodify action that updates the apex
+ # manifest with information about provided and required libs.
+ actions = analysistest.target_actions(env)
+ action = [a for a in actions if a.mnemonic == "ApexManifestModify"][0]
+ requires_argv_index = action.argv.index("requireNativeLibs") + 1
+ provides_argv_index = action.argv.index("provideNativeLibs") + 1
+
+ for idx, requires in enumerate(ctx.attr.requires_native_libs):
+ asserts.equals(
+ env,
+ requires.label.name + ".so", # expected
+ action.argv[requires_argv_index + idx], # actual
+ )
+
+ for idx, provides in enumerate(ctx.attr.provides_native_libs):
+ asserts.equals(
+ env,
+ provides.label.name + ".so",
+ action.argv[provides_argv_index + idx],
+ )
+
+ return analysistest.end(env)
+
+apex_native_libs_requires_provides_test = analysistest.make(
+ _apex_native_libs_requires_provides_test,
+ attrs = {
+ "make_modules_to_install": attr.string_list(doc = "make module names that should be installed to system"),
+ "provides_argv": attr.string_list(),
+ "provides_native_libs": attr.label_list(doc = "bazel target names of libs provided for dynamic linking"),
+ "requires_argv": attr.string_list(),
+ "requires_native_libs": attr.label_list(doc = "bazel target names of libs required for dynamic linking"),
+ },
+)
+
+def _test_apex_manifest_dependencies_nodep():
+ name = "apex_manifest_dependencies_nodep"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_nodep",
+ stl = "none",
+ system_dynamic_deps = [],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_nodep"],
+ native_shared_libs_64 = [name + "_lib_nodep"],
+ )
+
+ apex_native_libs_requires_provides_test(
+ name = test_name,
+ target_under_test = name,
+ requires_native_libs = [],
+ provides_native_libs = [],
+ make_modules_to_install = [],
+ )
+
+ return test_name
+
+def _test_apex_manifest_dependencies_cc_library_shared_bionic_deps():
+ name = "apex_manifest_dependencies_cc_library_shared_bionic_deps"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib",
+ # implicit bionic system_dynamic_deps
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib"],
+ native_shared_libs_64 = [name + "_lib"],
+ )
+
+ apex_native_libs_requires_provides_test(
+ name = test_name,
+ target_under_test = name,
+ requires_native_libs = [
+ "//bionic/libc",
+ "//bionic/libdl",
+ "//bionic/libm",
+ ],
+ provides_native_libs = [],
+ make_modules_to_install = [],
+ )
+
+ return test_name
+
+def _test_apex_manifest_dependencies_cc_binary_bionic_deps():
+ name = "apex_manifest_dependencies_cc_binary_bionic_deps"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = name + "_bin",
+ # implicit bionic system_deps
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ binaries = [name + "_bin"],
+ )
+
+ apex_native_libs_requires_provides_test(
+ name = test_name,
+ target_under_test = name,
+ requires_native_libs = [
+ "//bionic/libc",
+ "//bionic/libdl",
+ "//bionic/libm",
+ ],
+ provides_native_libs = [],
+ make_modules_to_install = [],
+ )
+
+ return test_name
+
+def _test_apex_manifest_dependencies_requires():
+ name = "apex_manifest_dependencies_requires"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_with_dep",
+ system_dynamic_deps = [],
+ stl = "none",
+ implementation_dynamic_deps = select({
+ "//build/bazel/rules/apex:android-in_apex": [name + "_libfoo_stub_libs_current"],
+ "//build/bazel/rules/apex:android-non_apex": [name + "_libfoo"],
+ }),
+ tags = ["manual"],
+ stubs_symbol_file = name + "_lib_with_dep" + ".map.txt",
+ )
+
+ native.genrule(
+ name = name + "_genrule_lib_with_dep_map_txt",
+ outs = [name + "_lib_with_dep.map.txt"],
+ cmd = "touch $@",
+ tags = ["manual"],
+ )
+
+ cc_stub_suite(
+ name = name + "_lib_with_dep_stub_libs",
+ soname = name + "_lib_with_dep.so",
+ source_library_label = ":" + name + "_lib_with_dep",
+ symbol_file = name + "_lib_with_dep.map.txt",
+ versions = ["30"],
+ )
+
+ cc_library_shared(
+ name = name + "_libfoo",
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + "_libfoo" + ".map.txt",
+ )
+
+ native.genrule(
+ name = name + "_genrule_libfoo_map_txt",
+ outs = [name + "_libfoo.map.txt"],
+ cmd = "touch $@",
+ tags = ["manual"],
+ )
+
+ cc_stub_suite(
+ name = name + "_libfoo_stub_libs",
+ soname = name + "_libfoo.so",
+ source_library_label = ":" + name + "_libfoo",
+ symbol_file = name + "_libfoo.map.txt",
+ versions = ["30"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_with_dep"],
+ native_shared_libs_64 = [name + "_lib_with_dep"],
+ )
+
+ apex_native_libs_requires_provides_test(
+ name = test_name,
+ target_under_test = name,
+ requires_native_libs = [name + "_libfoo"],
+ provides_native_libs = [name + "_lib_with_dep"],
+ make_modules_to_install = [name + "_libfoo"],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+
+ return test_name
+
+def _test_apex_manifest_dependencies_provides():
+ name = "apex_manifest_dependencies_provides"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_libfoo",
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + "_libfoo" + ".map.txt",
+ )
+
+ native.genrule(
+ name = name + "_genrule_libfoo_map_txt",
+ outs = [name + "_libfoo.map.txt"],
+ cmd = "touch $@",
+ tags = ["manual"],
+ )
+
+ cc_stub_suite(
+ name = name + "_libfoo_stub_libs",
+ soname = name + "_libfoo.so",
+ source_library_label = ":" + name + "_libfoo",
+ symbol_file = name + "_libfoo.map.txt",
+ versions = ["30"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_libfoo"],
+ native_shared_libs_64 = [name + "_libfoo"],
+ )
+
+ apex_native_libs_requires_provides_test(
+ name = test_name,
+ target_under_test = name,
+ requires_native_libs = [],
+ provides_native_libs = [name + "_libfoo"],
+ make_modules_to_install = [],
+ )
+
+ return test_name
+
+def _test_apex_manifest_dependencies_selfcontained():
+ name = "apex_manifest_dependencies_selfcontained"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_with_dep",
+ system_dynamic_deps = [],
+ stl = "none",
+ implementation_dynamic_deps = select({
+ "//build/bazel/rules/apex:android-in_apex": [name + "_libfoo_stub_libs_current"],
+ "//build/bazel/rules/apex:android-non_apex": [name + "_libfoo"],
+ }),
+ tags = ["manual"],
+ stubs_symbol_file = name + "_lib_with_dep" + ".map.txt",
+ )
+
+ native.genrule(
+ name = name + "_genrule_lib-with_dep_map_txt",
+ outs = [name + "_lib_with_dep.map.txt"],
+ cmd = "touch $@",
+ tags = ["manual"],
+ )
+
+ cc_stub_suite(
+ name = name + "_lib_with_dep_stub_libs",
+ soname = name + "_lib_with_dep.so",
+ source_library_label = ":" + name + "_lib_with_dep",
+ symbol_file = name + "_lib_with_dep.map.txt",
+ versions = ["30"],
+ )
+
+ cc_library_shared(
+ name = name + "_libfoo",
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + "_libfoo" + ".map.txt",
+ )
+
+ native.genrule(
+ name = name + "_genrule_libfoo_map_txt",
+ outs = [name + "_libfoo.map.txt"],
+ cmd = "touch $@",
+ tags = ["manual"],
+ )
+
+ cc_stub_suite(
+ name = name + "_libfoo_stub_libs",
+ soname = name + "_libfoo.so",
+ source_library_label = ":" + name + "_libfoo",
+ symbol_file = name + "_libfoo.map.txt",
+ versions = ["30"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [
+ name + "_lib_with_dep",
+ name + "_libfoo",
+ ],
+ native_shared_libs_64 = [
+ name + "_lib_with_dep",
+ name + "_libfoo",
+ ],
+ )
+
+ apex_native_libs_requires_provides_test(
+ name = test_name,
+ target_under_test = name,
+ requires_native_libs = [],
+ provides_native_libs = [
+ name + "_lib_with_dep",
+ name + "_libfoo",
+ ],
+ make_modules_to_install = [],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+
+ return test_name
+
+def _test_apex_manifest_dependencies_cc_binary():
+ name = "apex_manifest_dependencies_cc_binary"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = name + "_bin",
+ stl = "none",
+ system_deps = [],
+ dynamic_deps = [
+ name + "_lib_with_dep",
+ ] + select({
+ "//build/bazel/rules/apex:android-in_apex": [name + "_librequires2_stub_libs_current"],
+ "//build/bazel/rules/apex:android-non_apex": [name + "_librequires2"],
+ }),
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_lib_with_dep",
+ system_dynamic_deps = [],
+ stl = "none",
+ implementation_dynamic_deps = select({
+ "//build/bazel/rules/apex:android-in_apex": [name + "_librequires_stub_libs_current"],
+ "//build/bazel/rules/apex:android-non_apex": [name + "_librequires"],
+ }),
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_librequires",
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + "_librequires" + ".map.txt",
+ )
+
+ native.genrule(
+ name = name + "_genrule_librequires_map_txt",
+ outs = [name + "_librequires.map.txt"],
+ cmd = "touch $@",
+ tags = ["manual"],
+ )
+
+ cc_stub_suite(
+ name = name + "_librequires_stub_libs",
+ soname = name + "_librequires.so",
+ source_library_label = ":" + name + "_librequires",
+ symbol_file = name + "_librequires.map.txt",
+ versions = ["30"],
+ )
+
+ cc_library_shared(
+ name = name + "_librequires2",
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + "_librequires2.map.txt",
+ )
+
+ native.genrule(
+ name = name + "_genrule_librequires2_map_txt",
+ outs = [name + "_librequires2.map.txt"],
+ cmd = "touch $@",
+ tags = ["manual"],
+ )
+
+ cc_stub_suite(
+ name = name + "_librequires2_stub_libs",
+ soname = name + "_librequires2.so",
+ source_library_label = ":" + name + "_librequires2",
+ symbol_file = name + "_librequires2.map.txt",
+ versions = ["30"],
+ )
+
+ test_apex(
+ name = name,
+ binaries = [name + "_bin"],
+ )
+
+ apex_native_libs_requires_provides_test(
+ name = test_name,
+ target_under_test = name,
+ requires_native_libs = [
+ name + "_librequires",
+ name + "_librequires2",
+ ],
+ make_modules_to_install = [
+ name + "_librequires",
+ name + "_librequires2",
+ ],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+
+ return test_name
+
+def _action_args_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ action = [a for a in actions if a.mnemonic == ctx.attr.action_mnemonic][0]
+ argv = action.argv[:-1] + action.argv[-1].split(" ")
+ flag_idx = argv.index(ctx.attr.expected_args[0])
+
+ for i, expected_arg in enumerate(ctx.attr.expected_args):
+ asserts.equals(
+ env,
+ expected_arg,
+ argv[flag_idx + i],
+ )
+
+ return analysistest.end(env)
+
+_action_args_test_attrs = {
+ "action_mnemonic": attr.string(mandatory = True),
+ "expected_args": attr.string_list(mandatory = True),
+}
+
+action_args_test = analysistest.make(
+ _action_args_test,
+ attrs = _action_args_test_attrs,
+)
+
+def _test_logging_parent_flag():
+ name = "logging_parent"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ logging_parent = "logging.parent",
+ )
+
+ action_args_test(
+ name = test_name,
+ target_under_test = name,
+ action_mnemonic = "Apexer",
+ expected_args = [
+ "--logging_parent",
+ "logging.parent",
+ ],
+ )
+
+ return test_name
+
+def _test_default_apex_manifest_version():
+ name = "default_apex_manifest_version"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ )
+
+ action_args_test(
+ name = test_name,
+ target_under_test = name,
+ action_mnemonic = "ApexManifestModify",
+ expected_args = [
+ "-se",
+ "version",
+ "0",
+ str(default_manifest_version),
+ ],
+ )
+
+ return test_name
+
+action_args_with_overrides_test = analysistest.make(
+ _action_args_test,
+ attrs = _action_args_test_attrs,
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing_with_overrides_and_app_cert",
+ },
+)
+
+def _test_package_name():
+ name = "package_name"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ package_name = "my.package.name",
+ )
+
+ action_args_test(
+ name = test_name,
+ target_under_test = name,
+ action_mnemonic = "Apexer",
+ expected_args = [
+ "--override_apk_package_name",
+ "my.package.name",
+ ],
+ )
+
+ return test_name
+
+def _test_package_name_override_from_config():
+ name = "package_name_override_from_config"
+ test_name = name + "_test"
+
+ test_apex(name = name)
+
+ action_args_with_overrides_test(
+ name = test_name,
+ target_under_test = name,
+ action_mnemonic = "Apexer",
+ expected_args = [
+ "--override_apk_package_name",
+ "another.package",
+ ],
+ )
+
+ return test_name
+
+action_args_with_override_apex_manifest_default_version_test = analysistest.make(
+ _action_args_test,
+ attrs = _action_args_test_attrs,
+ # Wouldn't it be nice if it's possible to set the config_setting from the test callsite..
+ config_settings = {
+ "@//build/bazel/rules/apex:override_apex_manifest_default_version": "1234567890",
+ },
+)
+
+def _test_override_apex_manifest_version():
+ name = "override_apex_manifest_version"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ )
+
+ action_args_with_override_apex_manifest_default_version_test(
+ name = test_name,
+ target_under_test = name,
+ action_mnemonic = "ApexManifestModify",
+ expected_args = [
+ "-se",
+ "version",
+ "0",
+ "1234567890",
+ ],
+ )
+
+ return test_name
+
+def _file_contexts_args_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ file_contexts_action = [a for a in actions if a.mnemonic == "GenerateApexFileContexts"][0]
+
+ # GenerateApexFileContexts is a run_shell action.
+ # ["/bin/bash", "c", "<args>"]
+ cmd = file_contexts_action.argv[2]
+
+ for expected_arg in ctx.attr.expected_args:
+ asserts.true(
+ env,
+ expected_arg in cmd,
+ "failed to find '%s' in '%s'" % (expected_arg, cmd),
+ )
+
+ return analysistest.end(env)
+
+file_contexts_args_test = analysistest.make(
+ _file_contexts_args_test,
+ attrs = {
+ "expected_args": attr.string_list(mandatory = True),
+ },
+)
+
+def _test_generate_file_contexts():
+ name = "apex_manifest_pb_file_contexts"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ )
+
+ file_contexts_args_test(
+ name = test_name,
+ target_under_test = name,
+ expected_args = [
+ "/apex_manifest\\\\.pb u:object_r:system_file:s0",
+ "/ u:object_r:system_file:s0",
+ ],
+ )
+
+ return test_name
+
+def _min_sdk_version_failure_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ asserts.expect_failure(
+ env,
+ "min_sdk_version %s cannot be lower than the dep's min_sdk_version %s" %
+ (ctx.attr.apex_min, ctx.attr.dep_min),
+ )
+
+ return analysistest.end(env)
+
+min_sdk_version_failure_test = analysistest.make(
+ _min_sdk_version_failure_test_impl,
+ expect_failure = True,
+ attrs = {
+ "apex_min": attr.string(),
+ "dep_min": attr.string(),
+ },
+)
+
+def _test_min_sdk_version_failure():
+ name = "min_sdk_version_failure"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ min_sdk_version = "32",
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ min_sdk_version = "30",
+ )
+
+ min_sdk_version_failure_test(
+ name = test_name,
+ target_under_test = name,
+ apex_min = "30",
+ dep_min = "32",
+ )
+
+ return test_name
+
+def _test_min_sdk_version_failure_transitive():
+ name = "min_sdk_version_failure_transitive"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ dynamic_deps = [name + "_lib2_cc"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_lib2_cc",
+ srcs = [name + "_lib2.cc"],
+ tags = ["manual"],
+ min_sdk_version = "32",
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ min_sdk_version = "30",
+ )
+
+ min_sdk_version_failure_test(
+ name = test_name,
+ target_under_test = name,
+ apex_min = "30",
+ dep_min = "32",
+ )
+
+ return test_name
+
+def _apex_certificate_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ container_key_info = target_under_test[ApexInfo].container_key_info
+
+ asserts.equals(env, ctx.attr.expected_pem_path, container_key_info.pem.path)
+ asserts.equals(env, ctx.attr.expected_pk8_path, container_key_info.pk8.path)
+
+ return analysistest.end(env)
+
+apex_certificate_test = analysistest.make(
+ _apex_certificate_test,
+ attrs = {
+ "expected_pem_path": attr.string(),
+ "expected_pk8_path": attr.string(),
+ },
+)
+
+apex_certificate_with_overrides_test = analysistest.make(
+ _apex_certificate_test,
+ attrs = {
+ "expected_pem_path": attr.string(),
+ "expected_pk8_path": attr.string(),
+ },
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing_with_overrides_and_app_cert",
+ },
+)
+
+def _test_apex_certificate_none():
+ name = "apex_certificate_none"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ certificate = None,
+ )
+
+ apex_certificate_test(
+ name = test_name,
+ target_under_test = name,
+ expected_pem_path = "build/make/target/product/security/testkey.x509.pem",
+ expected_pk8_path = "build/make/target/product/security/testkey.pk8",
+ )
+
+ return test_name
+
+def _test_apex_certificate_name():
+ name = "apex_certificate_name"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ certificate = None,
+ certificate_name = "shared", # use something other than testkey
+ )
+
+ apex_certificate_test(
+ name = test_name,
+ target_under_test = name,
+ expected_pem_path = "build/make/target/product/security/shared.x509.pem",
+ expected_pk8_path = "build/make/target/product/security/shared.pk8",
+ )
+
+ return test_name
+
+def _test_apex_certificate_label():
+ name = "apex_certificate_label"
+ test_name = name + "_test"
+
+ android_app_certificate(
+ name = name + "_cert",
+ certificate = name,
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ certificate = name + "_cert",
+ )
+
+ apex_certificate_test(
+ name = test_name,
+ target_under_test = name,
+ expected_pem_path = "build/bazel/rules/apex/apex_certificate_label.x509.pem",
+ expected_pk8_path = "build/bazel/rules/apex/apex_certificate_label.pk8",
+ )
+
+ return test_name
+
+def _test_apex_certificate_label_with_overrides():
+ name = "apex_certificate_label_with_overrides"
+ test_name = name + "_test"
+
+ android_app_certificate(
+ name = name + "_cert",
+ certificate = name,
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ certificate = name + "_cert",
+ )
+
+ apex_certificate_with_overrides_test(
+ name = test_name,
+ target_under_test = name,
+ expected_pem_path = "build/bazel/rules/apex/testdata/another.x509.pem",
+ expected_pk8_path = "build/bazel/rules/apex/testdata/another.pk8",
+ )
+
+ return test_name
+
+def _min_sdk_version_apex_inherit_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ argv = target_under_test[ActionArgsInfo].argv
+
+ found = False
+ for arg in argv:
+ if arg.startswith("--target="):
+ found = True
+ asserts.true(
+ env,
+ arg.endswith(ctx.attr.apex_min),
+ "Incorrect --target flag: %s %s" % (arg, ctx.attr.apex_min),
+ )
+
+ asserts.true(
+ env,
+ found,
+ "No --target flag found: %s" % argv,
+ )
+
+ return analysistest.end(env)
+
+def _feature_check_aspect_impl(target, ctx):
+ rules_propagate_src = [
+ "_bssl_hash_injection",
+ "stripped_shared_library",
+ "versioned_shared_library",
+ ]
+
+ argv = []
+ if ctx.rule.kind == "cc_shared_library" and target.label.name == ctx.attr.cc_target:
+ link_actions = [a for a in target.actions if a.mnemonic == "CppLink"]
+ argv = link_actions[0].argv
+ elif ctx.rule.kind in rules_propagate_src and hasattr(ctx.rule.attr, "src"):
+ argv = ctx.rule.attr.src[ActionArgsInfo].argv
+ elif ctx.rule.kind == "_cc_library_shared_proxy" and hasattr(ctx.rule.attr, "shared"):
+ argv = ctx.rule.attr.shared[0][ActionArgsInfo].argv
+ elif ctx.rule.kind == "_apex" and hasattr(ctx.rule.attr, "native_shared_libs_32"):
+ argv = ctx.rule.attr.native_shared_libs_32[0][ActionArgsInfo].argv
+
+ return [
+ ActionArgsInfo(
+ argv = argv,
+ ),
+ ]
+
+feature_check_aspect = aspect(
+ implementation = _feature_check_aspect_impl,
+ attrs = {
+ "cc_target": attr.string(values = [
+ # This has to mirror the test impl library names
+ "min_sdk_version_apex_inherit_lib_cc_unstripped",
+ "min_sdk_version_apex_inherit_override_min_sdk_tiramisu_lib_cc_unstripped",
+ ]),
+ },
+ attr_aspects = ["native_shared_libs_32", "shared", "src"],
+)
+
+min_sdk_version_apex_inherit_test_attrs = dict(
+ impl = _min_sdk_version_apex_inherit_test_impl,
+ attrs = {
+ "apex_min": attr.string(),
+ "cc_target": attr.string(),
+ },
+ # We need to use aspect to examine the dependencies' actions of the apex
+ # target as the result of the transition, checking the dependencies directly
+ # using names will give you the info before the transition takes effect.
+ extra_target_under_test_aspects = [feature_check_aspect],
+)
+
+min_sdk_version_apex_inherit_test = analysistest.make(
+ **min_sdk_version_apex_inherit_test_attrs
+)
+
+min_sdk_version_apex_inherit_override_min_sdk_tiramisu_test = analysistest.make(
+ config_settings = {
+ "@//build/bazel/rules/apex:apex_global_min_sdk_version_override": "Tiramisu",
+ },
+ **min_sdk_version_apex_inherit_test_attrs
+)
+
+def _test_min_sdk_version_apex_inherit():
+ name = "min_sdk_version_apex_inherit"
+ test_name = name + "_test"
+ cc_name = name + "_lib_cc"
+ apex_min = "29"
+
+ cc_library_shared(
+ name = cc_name,
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ min_sdk_version = "apex_inherit",
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [cc_name],
+ min_sdk_version = apex_min,
+ )
+
+ min_sdk_version_apex_inherit_test(
+ name = test_name,
+ target_under_test = name,
+ apex_min = apex_min,
+ cc_target = cc_name + "_unstripped",
+ )
+
+ return test_name
+
+def _test_min_sdk_version_apex_inherit_override_min_sdk_tiramisu():
+ name = "min_sdk_version_apex_inherit_override_min_sdk_tiramisu"
+ test_name = name + "_test"
+ cc_name = name + "_lib_cc"
+
+ cc_library_shared(
+ name = cc_name,
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ min_sdk_version = "apex_inherit",
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [cc_name],
+ min_sdk_version = "29",
+ )
+
+ min_sdk_version_apex_inherit_override_min_sdk_tiramisu_test(
+ name = test_name,
+ target_under_test = name,
+ apex_min = "33", # the apex transition forced the apex min_sdk_version to be 33
+ cc_target = cc_name + "_unstripped",
+ )
+
+ return test_name
+
+def _apex_provides_base_zip_files_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+
+ # The particular name of the file isn't important as it just gets zipped with the other apex files for other architectures
+ asserts.true(
+ env,
+ target_under_test[ApexInfo].base_file != None,
+ "Expected base_file to exist, but found None %s" % target_under_test[ApexInfo].base_file,
+ )
+
+ asserts.equals(
+ env,
+ target_under_test[ApexInfo].base_with_config_zip.basename,
+ # name is important here because the file gets disted and then referenced by name
+ ctx.attr.apex_name + ".apex-base.zip",
+ "Expected base file with config zip to have name ending with , but found %s" % target_under_test[ApexInfo].base_with_config_zip.basename,
+ )
+
+ return analysistest.end(env)
+
+apex_provides_base_zip_files_test = analysistest.make(
+ _apex_provides_base_zip_files_test_impl,
+ attrs = {
+ "apex_name": attr.string(),
+ },
+)
+
+def _test_apex_provides_base_zip_files():
+ name = "apex_provides_base_zip_files"
+ test_name = name + "_test"
+
+ test_apex(name = name)
+
+ apex_provides_base_zip_files_test(
+ name = test_name,
+ target_under_test = name,
+ apex_name = name,
+ )
+
+ return test_name
+
+def _apex_testonly_with_manifest_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "Apexer"]
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "No apexer action found: %s" % actions,
+ )
+ argv = actions[0].argv
+
+ asserts.false(
+ env,
+ "--test_only" in argv,
+ "Calling apexer with --test_only when manifest file is specified: %s" % argv,
+ )
+
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "MarkAndroidManifestTestOnly"]
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "No MarkAndroidManifestTestOnly action found: %s" % actions,
+ )
+ argv = actions[0].argv
+
+ asserts.true(
+ env,
+ "--test-only" in argv,
+ "Calling manifest_fixer without --test-only: %s" % argv,
+ )
+
+ return analysistest.end(env)
+
+apex_testonly_with_manifest_test = analysistest.make(
+ _apex_testonly_with_manifest_test_impl,
+)
+
+def _test_apex_testonly_with_manifest():
+ name = "apex_testonly_with_manifest"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ min_sdk_version = "32",
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ # This will not cause the validation failure because it is testonly.
+ min_sdk_version = "30",
+ testonly = True,
+ tests = [name + "_cc_test"],
+ android_manifest = "AndroidManifest.xml",
+ )
+
+ # It shouldn't complain about the min_sdk_version of the dep is too low.
+ apex_testonly_with_manifest_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _apex_testonly_without_manifest_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "Apexer"]
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "No apexer action found: %s" % actions,
+ )
+ argv = actions[0].argv[:-1] + actions[0].argv[-1].split(" ")
+
+ asserts.true(
+ env,
+ "--test_only" in argv,
+ "Calling apexer without --test_only when manifest file is not specified: %s" % argv,
+ )
+
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "MarkAndroidManifestTestOnly"]
+ asserts.true(
+ env,
+ len(actions) == 0,
+ "MarkAndroidManifestTestOnly shouldn't be called when manifest file is not specified: %s" % actions,
+ )
+
+ return analysistest.end(env)
+
+apex_testonly_without_manifest_test = analysistest.make(
+ _apex_testonly_without_manifest_test_impl,
+)
+
+def _test_apex_testonly_without_manifest():
+ name = "apex_testonly_without_manifest"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ testonly = True,
+ )
+
+ apex_testonly_without_manifest_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _apex_backing_file_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "FileWrite" and a.outputs.to_list()[0].basename.endswith("_backing.txt")]
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "No FileWrite action found for creating <apex>_backing.txt file: %s" % actions,
+ )
+
+ asserts.equals(env, ctx.attr.expected_content, actions[0].content)
+ return analysistest.end(env)
+
+apex_backing_file_test = analysistest.make(
+ _apex_backing_file_test,
+ attrs = {
+ "expected_content": attr.string(),
+ },
+)
+
+def _test_apex_backing_file():
+ name = "apex_backing_file"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ android_manifest = "AndroidManifest.xml",
+ )
+
+ apex_backing_file_test(
+ name = test_name,
+ target_under_test = name,
+ expected_content = "apex_backing_file_lib_cc.so libc++.so\n",
+ )
+
+ return test_name
+
+def _apex_installed_files_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "GenerateApexInstalledFileList"]
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "No GenerateApexInstalledFileList action found for creating <apex>-installed-files.txt file: %s" % actions,
+ )
+
+ asserts.equals(
+ env,
+ len(ctx.attr.expected_inputs),
+ len(actions[0].inputs.to_list()),
+ "Expected inputs length: %d, actual inputs length: %d" % (len(ctx.attr.expected_inputs), len(actions[0].inputs.to_list())),
+ )
+ for file in actions[0].inputs.to_list():
+ asserts.true(
+ env,
+ file.basename in ctx.attr.expected_inputs,
+ "Unexpected input: %s" % file.basename,
+ )
+ asserts.equals(env, ctx.attr.expected_output, actions[0].outputs.to_list()[0].basename)
+ return analysistest.end(env)
+
+apex_installed_files_test = analysistest.make(
+ _apex_installed_files_test,
+ attrs = {
+ "expected_inputs": attr.string_list(),
+ "expected_output": attr.string(),
+ },
+)
+
+def _test_apex_installed_files():
+ name = "apex_installed_files"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ android_manifest = "AndroidManifest.xml",
+ )
+
+ apex_installed_files_test(
+ name = test_name,
+ target_under_test = name,
+ expected_inputs = ["libc++.so", "apex_installed_files_lib_cc.so"],
+ expected_output = "apex_installed_files-installed-files.txt",
+ )
+
+ return test_name
+
+def _apex_symbols_used_by_apex_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ actual = target_under_test[ApexInfo].symbols_used_by_apex
+
+ asserts.equals(env, ctx.attr.expected_path, actual.short_path)
+
+ return analysistest.end(env)
+
+apex_symbols_used_by_apex_test = analysistest.make(
+ _apex_symbols_used_by_apex_test,
+ attrs = {
+ "expected_path": attr.string(),
+ },
+)
+
+def _test_apex_symbols_used_by_apex():
+ name = "apex_with_symbols_used_by_apex"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ )
+
+ apex_symbols_used_by_apex_test(
+ name = test_name,
+ target_under_test = name,
+ expected_path = "build/bazel/rules/apex/apex_with_symbols_used_by_apex_using.txt",
+ )
+
+ return test_name
+
+def _apex_java_symbols_used_by_apex_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ actual = target_under_test[ApexInfo].java_symbols_used_by_apex
+
+ asserts.equals(env, ctx.attr.expected_path, actual.short_path)
+
+ return analysistest.end(env)
+
+apex_java_symbols_used_by_apex_test = analysistest.make(
+ _apex_java_symbols_used_by_apex_test,
+ attrs = {
+ "expected_path": attr.string(),
+ },
+)
+
+def _test_apex_java_symbols_used_by_apex():
+ name = "apex_with_java_symbols_used_by_apex"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ )
+
+ apex_java_symbols_used_by_apex_test(
+ name = test_name,
+ target_under_test = name,
+ expected_path = "build/bazel/rules/apex/apex_with_java_symbols_used_by_apex_using.xml",
+ )
+
+ return test_name
+
+def _generate_notice_file_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "GenerateNoticeFile"]
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "apex target should have a single GenerateNoticeFile action, found %s" % actions,
+ )
+ input_json = [f for f in actions[0].inputs.to_list() if f.basename.endswith("_licenses.json")]
+ asserts.true(
+ env,
+ len(input_json) == 1,
+ "apex GenerateNoticeFile should have a single input *_license.json file, got %s" % input_json,
+ )
+ outs = actions[0].outputs.to_list()
+ asserts.true(
+ env,
+ len(outs) == 1 and outs[0].basename == "NOTICE.html.gz",
+ "apex GenerateNoticeFile should generate a single NOTICE.html.gz file, got %s" % [o.short_path for o in outs],
+ )
+ return analysistest.end(env)
+
+apex_generate_notice_file_test = analysistest.make(_generate_notice_file_test)
+
+def _test_apex_generate_notice_file():
+ name = "apex_notice_file"
+ test_name = name + "_test"
+ test_apex(name = name)
+ apex_generate_notice_file_test(name = test_name, target_under_test = name)
+ return test_name
+
+def _analysis_success_test(ctx):
+ env = analysistest.begin(ctx)
+
+ # An empty analysis test that just ensures the target_under_test can be analyzed.
+ return analysistest.end(env)
+
+analysis_success_test = analysistest.make(_analysis_success_test)
+
+def _test_apex_available():
+ name = "apex_available"
+ test_name = name + "_test"
+ static_lib_name = name + "_lib_cc_static"
+ lib_headers_name = name + "_lib_cc_headers"
+
+ cc_library_static(
+ name = static_lib_name,
+ srcs = ["src.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ # anyapex.
+ "apex_available=//apex_available:anyapex",
+ ],
+ )
+ cc_library_headers(
+ name = lib_headers_name,
+ absolute_includes = ["include_dir"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ "apex_available=//apex_available:anyapex",
+ ],
+ )
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ deps = [
+ static_lib_name,
+ lib_headers_name,
+ ],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ # Explicit name.
+ "apex_available=" + name,
+ ],
+ )
+ cc_library_shared(
+ name = name + "_lib2_cc",
+ srcs = [name + "_lib2.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ # anyapex.
+ "apex_available=//apex_available:anyapex",
+ ],
+ )
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [
+ name + "_lib_cc",
+ name + "_lib2_cc",
+ ],
+ android_manifest = "AndroidManifest.xml",
+ )
+
+ analysis_success_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _test_apex_available_failure():
+ name = "apex_available_failure"
+ test_name = name + "_test"
+ static_lib_name = name + "_lib_cc_static"
+ lib_headers_name = name + "_lib_cc_headers"
+
+ cc_library_static(
+ name = static_lib_name,
+ srcs = ["src.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ ],
+ )
+ cc_library_headers(
+ name = lib_headers_name,
+ absolute_includes = ["include_dir"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ ],
+ )
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ deps = [
+ static_lib_name,
+ lib_headers_name,
+ ],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ ],
+ )
+ cc_library_shared(
+ name = name + "_lib2_cc",
+ srcs = [name + "_lib2.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ # anyapex.
+ "apex_available=//apex_available:anyapex",
+ ],
+ )
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [
+ name + "_lib_cc",
+ name + "_lib2_cc",
+ ],
+ android_manifest = "AndroidManifest.xml",
+ )
+
+ expect_failure_test(
+ name = test_name,
+ target_under_test = name,
+ failure_message = """
+Error in fail: `@//build/bazel/rules/apex:apex_available_failure` apex has transitive dependencies that do not include the apex in their apex_available tags:
+ @//build/bazel/rules/apex:apex_available_failure_lib_cc_static; apex_available tags: []
+ @//build/bazel/rules/apex:apex_available_failure_lib_cc_headers; apex_available tags: []
+ @//build/bazel/rules/apex:apex_available_failure_lib_cc; apex_available tags: []""",
+ )
+ return test_name
+
+def _test_apex_available_with_base_apex():
+ name = "apex_available_with_base_apex"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ # Explicit name.
+ "apex_available=" + name + "_base",
+ ],
+ )
+
+ cc_library_shared(
+ name = name + "_lib2_cc",
+ srcs = [name + "_lib2.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ # anyapex.
+ "apex_available=//apex_available:anyapex",
+ ],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [
+ name + "_lib_cc",
+ name + "_lib2_cc",
+ ],
+ base_apex_name = name + "_base",
+ android_manifest = "AndroidManifest.xml",
+ )
+
+ analysis_success_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _apex_deps_validation_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ target_under_test = analysistest.target_under_test(env)
+ asserts.new_set_equals(
+ env,
+ sets.make(ctx.attr.allowed_deps_manifest + ctx.attr._default_apex_deps),
+ sets.make(apex_dep_infos_to_allowlist_strings(
+ target_under_test[ApexDepsInfo].transitive_deps.to_list(),
+ )),
+ )
+
+ return analysistest.end(env)
+
+_apex_deps_validation_test = analysistest.make(
+ _apex_deps_validation_test_impl,
+ attrs = {
+ "allowed_deps_manifest": attr.string_list(),
+ "_default_apex_deps": attr.string_list(
+ default = [
+ "libc_llndk_headers(minSdkVersion:apex_inherit)",
+ "libc_headers(minSdkVersion:apex_inherit)",
+ "libc++abi(minSdkVersion:apex_inherit)",
+ "libc++_static(minSdkVersion:apex_inherit)",
+ "libc++(minSdkVersion:apex_inherit)",
+ "libc++demangle(minSdkVersion:apex_inherit)",
+ ],
+ ),
+ },
+ config_settings = {
+ "@//build/bazel/rules/apex:unsafe_disable_apex_allowed_deps_check": True,
+ },
+)
+
+def _test_apex_deps_validation():
+ name = "apex_deps_validation"
+ test_name = name + "_test"
+
+ aidl_interface_name = name + "_aidl_interface"
+ aidl_interface(
+ name = aidl_interface_name,
+ ndk_config = {
+ "enabled": True,
+ "min_sdk_version": "28",
+ },
+ srcs = ["Foo.aidl"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ "apex_available=" + name,
+ "apex_available=//apex_available:platform",
+ ],
+ )
+
+ specific_apex_available_name = name + "_specific_apex_available"
+ cc_library_shared(
+ name = specific_apex_available_name,
+ srcs = [name + "_lib.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ "apex_available=" + name,
+ "apex_available=//apex_available:platform",
+ ],
+ min_sdk_version = "30",
+ )
+
+ any_apex_available_name = name + "_any_apex_available"
+ cc_library_shared(
+ name = any_apex_available_name,
+ srcs = [name + "_lib.cc"],
+ implementation_dynamic_deps = [aidl_interface_name + "-V1-ndk"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ "apex_available=//apex_available:anyapex",
+ "apex_available=//apex_available:platform",
+ ],
+ min_sdk_version = "30",
+ )
+
+ no_platform_available_name = name + "_no_platform_available"
+ cc_library_shared(
+ name = no_platform_available_name,
+ srcs = [name + "_lib.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ "apex_available=//apex_available:anyapex",
+ ],
+ min_sdk_version = "30",
+ )
+
+ no_platform_available_transitive_dep_name = name + "_no_platform_available_transitive_dep"
+ cc_library_shared(
+ name = no_platform_available_transitive_dep_name,
+ srcs = [name + "_lib.cc"],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ "apex_available=//apex_available:anyapex",
+ ],
+ min_sdk_version = "30",
+ )
+
+ platform_available_but_dep_with_no_platform_available_name = name + "_shared_platform_available_but_dep_with_no_platform_available"
+ cc_library_shared(
+ name = platform_available_but_dep_with_no_platform_available_name,
+ srcs = [name + "_lib.cc"],
+ deps = [no_platform_available_transitive_dep_name],
+ tags = [
+ "manual",
+ "apex_available_checked_manual_for_testing",
+ "apex_available=//apex_available:anyapex",
+ "apex_available=//apex_available:platform",
+ ],
+ min_sdk_version = "30",
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [
+ specific_apex_available_name,
+ any_apex_available_name,
+ no_platform_available_name,
+ platform_available_but_dep_with_no_platform_available_name,
+ ],
+ android_manifest = "AndroidManifest.xml",
+ min_sdk_version = "30",
+ )
+
+ _apex_deps_validation_test(
+ name = test_name,
+ target_under_test = name,
+ allowed_deps_manifest = [
+ specific_apex_available_name + "(minSdkVersion:30)",
+ any_apex_available_name + "(minSdkVersion:30)",
+ platform_available_but_dep_with_no_platform_available_name + "(minSdkVersion:30)",
+ aidl_interface_name + "-V1-ndk(minSdkVersion:28)",
+ "jni_headers(minSdkVersion:29)",
+ ],
+ tags = ["manual"],
+ )
+
+ return test_name
+
+_MarchInfo = provider(fields = {"march": "list of march values found in the cc deps of this apex"})
+
+def _apex_transition_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ march_values = target_under_test[_MarchInfo].march
+
+ asserts.equals(env, ctx.attr.expected, march_values.to_list())
+
+ return analysistest.end(env)
+
+def _cc_compile_test_aspect_impl(target, ctx):
+ transitive_march = []
+ for attr_deps in get_dep_targets(ctx.rule.attr, predicate = lambda target: _MarchInfo in target).values():
+ for dep in attr_deps:
+ transitive_march.append(dep[_MarchInfo].march)
+ march_values = []
+ if (target.label.name).startswith("apex_transition_lib"):
+ for a in target.actions:
+ if a.mnemonic == "CppCompile":
+ march_values += [arg for arg in a.argv if "march" in arg]
+ return [
+ _MarchInfo(
+ march = depset(
+ direct = march_values,
+ transitive = transitive_march,
+ ),
+ ),
+ ]
+
+_cc_compile_test_aspect = aspect(
+ implementation = _cc_compile_test_aspect_impl,
+ attr_aspects = ["*"],
+)
+
+apex_transition_test = analysistest.make(
+ _apex_transition_test,
+ attrs = {
+ "expected": attr.string_list(),
+ },
+ extra_target_under_test_aspects = [_cc_compile_test_aspect],
+)
+
+def _test_apex_transition():
+ name = "apex_transition"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name + "_lib2_cc",
+ srcs = [name + "_lib2.cc"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ native_shared_libs_64 = [name + "_lib2_cc"],
+ android_manifest = "AndroidManifest.xml",
+ )
+
+ apex_transition_test(
+ name = test_name + "_32",
+ target_under_test = name,
+ target_compatible_with = ["//build/bazel/platforms/os:android", "//build/bazel/platforms/arch:arm"],
+ expected = ["-march=armv7-a"],
+ )
+
+ apex_transition_test(
+ name = test_name + "_64",
+ target_under_test = name,
+ target_compatible_with = ["//build/bazel/platforms/os:android", "//build/bazel/platforms/arch:arm64"],
+ expected = ["-march=armv8-a"],
+ )
+
+ return [test_name + "_32", test_name + "_64"]
+
+def _test_no_static_linking_for_stubs_lib():
+ name = "no_static_linking_for_stubs_lib"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = name + "_static_unavailable_to_apex",
+ tags = [
+ "apex_available_checked_manual_for_testing",
+ "manual",
+ ],
+ )
+
+ cc_library_shared(
+ name = name + "_shared",
+ deps = [name + "_static_unavailable_to_apex"],
+ tags = [
+ "apex_available=" + name,
+ "apex_available_checked_manual_for_testing",
+ "manual",
+ ],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_shared"],
+ )
+
+ expect_failure_test(
+ name = test_name,
+ target_under_test = name,
+ failure_message = """
+Error in fail: `@//build/bazel/rules/apex:no_static_linking_for_stubs_lib` apex has transitive dependencies that do not include the apex in their apex_available tags:
+ @//build/bazel/rules/apex:no_static_linking_for_stubs_lib_static_unavailable_to_apex; apex_available tags: []""",
+ )
+
+ return test_name
+
+def _test_directly_included_stubs_lib_with_indirectly_static_variant():
+ name = "directly_included_stubs_lib_with_indirectly_static_variant"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = name + "bar",
+ deps = [name + "_shared_bp2build_cc_library_static"],
+ tags = [
+ "apex_available=" + name,
+ "apex_available_checked_manual_for_testing",
+ "manual",
+ ],
+ )
+
+ cc_library_shared(
+ name = name + "foo",
+ deps = [name + "_shared_bp2build_cc_library_static"],
+ tags = [
+ "apex_available=" + name,
+ "apex_available_checked_manual_for_testing",
+ "manual",
+ ],
+ )
+
+ # This target is unavailable to apex but is allowed to be required by
+ # cc_binary bar and cc_library_shared foo because its shared variant
+ # is directly in the apex
+ cc_library_static(
+ name = name + "_shared_bp2build_cc_library_static",
+ tags = [
+ "apex_available_checked_manual_for_testing",
+ "manual",
+ ],
+ )
+
+ cc_library_shared(
+ name = name + "_shared",
+ tags = [
+ "apex_available=" + name,
+ "apex_available_checked_manual_for_testing",
+ "manual",
+ ],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_shared", name + "foo"],
+ binaries = [name + "bar"],
+ )
+
+ target_under_test_exist_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def cc_library_shared_with_stubs(name):
+ cc_library_shared(
+ name = name,
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + ".map.txt",
+ )
+
+ native.genrule(
+ name = name + "_genrule_map_txt",
+ outs = [name + ".map.txt"],
+ cmd = "touch $@",
+ tags = ["manual"],
+ )
+
+ cc_stub_suite(
+ name = name + "_stub_libs",
+ soname = name + ".so",
+ source_library_label = ":" + name,
+ symbol_file = name + ".map.txt",
+ versions = ["30"],
+ tags = ["manual"],
+ )
+
+ return [
+ name,
+ name + "_stub_libs",
+ ]
+
+def _apex_in_unbundled_build_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ mk_modules_to_install = target_under_test[ApexMkInfo].make_modules_to_install
+ asserts.true(
+ env,
+ "apex_in_unbundled_build_libfoo" not in mk_modules_to_install,
+ "stub libs apex_in_unbundled_build_libfoo should not be propagated " +
+ "to make for installation in unbundled mode",
+ )
+ return analysistest.end(env)
+
+apex_in_unbundled_build_test = analysistest.make(
+ _apex_in_unbundled_build_test,
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing_unbundled_build",
+ },
+)
+
+def _test_apex_in_unbundled_build():
+ name = "apex_in_unbundled_build"
+ test_name = name + "_test"
+
+ [cc_library_shared_name, cc_stub_suite_name] = cc_library_shared_with_stubs(name + "_libfoo")
+
+ cc_binary(
+ name = name + "_bar",
+ tags = [
+ "apex_available=" + name,
+ "apex_available_checked_manual_for_testing",
+ "manual",
+ ],
+ dynamic_deps = select({
+ "//build/bazel/rules/apex:android-in_apex": [cc_stub_suite_name + "_current"],
+ "//build/bazel/rules/apex:android-non_apex": [cc_library_shared_name],
+ }),
+ )
+
+ test_apex(
+ name = name,
+ binaries = [name + "_bar"],
+ )
+
+ apex_in_unbundled_build_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _apex_in_bundled_build_test(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ mk_modules_to_install = target_under_test[ApexMkInfo].make_modules_to_install
+ asserts.true(
+ env,
+ "apex_in_bundled_build_libfoo" in mk_modules_to_install,
+ "stub libs apex_in_unbundled_build_libfoo should be propagated " +
+ "to make for installation in unbundled mode",
+ )
+
+ return analysistest.end(env)
+
+apex_in_bundled_build_test = analysistest.make(
+ _apex_in_bundled_build_test,
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing",
+ },
+)
+
+def _test_apex_in_bundled_build():
+ name = "apex_in_bundled_build"
+ test_name = name + "_test"
+
+ [cc_library_shared_name, cc_stub_suite_name] = cc_library_shared_with_stubs(name + "_libfoo")
+
+ cc_binary(
+ name = name + "_bar",
+ tags = [
+ "apex_available=" + name,
+ "apex_available_checked_manual_for_testing",
+ "manual",
+ ],
+ dynamic_deps = select({
+ "//build/bazel/rules/apex:android-in_apex": [cc_stub_suite_name + "_current"],
+ "//build/bazel/rules/apex:android-non_apex": [cc_library_shared_name],
+ }),
+ )
+
+ test_apex(
+ name = name,
+ binaries = [name + "_bar"],
+ )
+
+ apex_in_bundled_build_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _apex_compression_test(ctx):
+ env = analysistest.begin(ctx)
+
+ target = analysistest.target_under_test(env)
+ asserts.true(
+ env,
+ target[ApexInfo].signed_compressed_output != None,
+ "ApexInfo.signed_compressed_output should exist from compressible apex",
+ )
+
+ return analysistest.end(env)
+
+apex_compression_test = analysistest.make(
+ _apex_compression_test,
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing",
+ },
+)
+
+def _test_apex_compression():
+ name = "apex_compression"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ compressible = True,
+ )
+
+ apex_compression_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _apex_no_compression_test(ctx):
+ env = analysistest.begin(ctx)
+
+ target = analysistest.target_under_test(env)
+ asserts.true(
+ env,
+ target[ApexInfo].signed_compressed_output == None,
+ "ApexInfo.signed_compressed_output should not exist when compression_enabled is not specified",
+ )
+
+ return analysistest.end(env)
+
+apex_no_compression_test = analysistest.make(
+ _apex_no_compression_test,
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing_no_compression",
+ },
+)
+
+def _test_apex_no_compression():
+ name = "apex_no_compression"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ )
+
+ apex_no_compression_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _min_target_sdk_version_api_fingerprint_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ apexer_action = None
+ for action in actions:
+ if action.argv == None:
+ continue
+ for a in action.argv:
+ if "--min_sdk_version" in a:
+ apexer_action = action
+ break
+ if apexer_action != None:
+ break
+
+ asserts.true(
+ env,
+ apexer_action != None,
+ "There is no apexer action in all the actions",
+ )
+
+ argv = apexer_action.argv[:-1] + apexer_action.argv[-1].split(" ")
+ api_fingerprint_in_input = False
+ api_fingerprint_path = None
+ for f in apexer_action.inputs.to_list():
+ if f.basename == "api_fingerprint.txt":
+ api_fingerprint_in_input = True
+ api_fingerprint_path = f.path
+ break
+
+ asserts.true(
+ env,
+ api_fingerprint_in_input,
+ "api_fingerprint.txt is not in the input files",
+ )
+
+ expected_target_sdk_version = "123" + ".$(cat {})".format(api_fingerprint_path)
+ target_sdk_version_index = argv.index("--target_sdk_version")
+ asserts.equals(
+ env,
+ expected = expected_target_sdk_version,
+ actual = argv[target_sdk_version_index + 1] + " " + argv[target_sdk_version_index + 2],
+ )
+
+ min_sdk_version_index = argv.index("--min_sdk_version")
+ if ctx.attr.min_sdk_version in ["current", "10000"]:
+ expected_min_sdk_version = "123" + ".$(cat {})".format(api_fingerprint_path)
+ actual_min_sdk_version = argv[min_sdk_version_index + 1] + " " + argv[min_sdk_version_index + 2]
+ else:
+ expected_min_sdk_version = ctx.attr.min_sdk_version
+ actual_min_sdk_version = argv[min_sdk_version_index + 1]
+ asserts.equals(
+ env,
+ expected = expected_min_sdk_version,
+ actual = actual_min_sdk_version,
+ )
+
+ return analysistest.end(env)
+
+min_target_sdk_version_api_fingerprint_test = analysistest.make(
+ _min_target_sdk_version_api_fingerprint_test,
+ attrs = {
+ "min_sdk_version": attr.string(
+ default = "current",
+ ),
+ },
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing_unbundled_build",
+ "@//build/bazel/rules/apex:unbundled_build_target_sdk_with_api_fingerprint": True,
+ "@//build/bazel/rules/apex:platform_sdk_codename": "123",
+ },
+)
+
+def _test_min_target_sdk_version_api_fingerprint_min_sdk_version_specified():
+ name = "min_target_sdk_version_api_fingerprint_min_sdk_version_specified"
+ test_name = name + "_test"
+ min_sdk_version = "30"
+
+ test_apex(
+ name = name,
+ min_sdk_version = min_sdk_version,
+ )
+
+ min_target_sdk_version_api_fingerprint_test(
+ name = test_name,
+ target_under_test = name,
+ min_sdk_version = min_sdk_version,
+ )
+
+ return test_name
+
+def _test_min_target_sdk_version_api_fingerprint_min_sdk_version_not_specified():
+ name = "min_target_sdk_version_api_fingerprint_min_sdk_version_not_specified"
+ test_name = name + "_test"
+
+ test_apex(
+ name = name,
+ )
+
+ min_target_sdk_version_api_fingerprint_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _apex_sbom_test(ctx):
+ env = analysistest.begin(ctx)
+
+ # Action GenerateSBOMMetadata
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "GenerateSBOMMetadata"]
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "No GenerateSBOMMetadata action found for creating <apex>-sbom-metadata.csv file: %s" % actions,
+ )
+
+ input_files = [input.basename for input in actions[0].inputs.to_list()]
+ asserts.true(
+ env,
+ "apex_sbom_lib_cc.so" in input_files,
+ "No expected file in inputs of GenerateSBOMMetadata action",
+ )
+
+ output_files = [output.basename for output in actions[0].outputs.to_list()]
+ asserts.true(
+ env,
+ "apex_sbom.apex-sbom-metadata.csv" in output_files,
+ "No expected file in outputs of GenerateSBOMMetadata action",
+ )
+
+ # Action GenerateSBOM
+ actions = [a for a in analysistest.target_actions(env) if a.mnemonic == "GenerateSBOM"]
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "No GenerateSBOM action found for creating sbom.spdx.json file: %s" % actions,
+ )
+ input_files = [input.short_path for input in actions[0].inputs.to_list()]
+ expected_input_files = [
+ "build/bazel/rules/apex/apex_sbom.apex",
+ "build/bazel/rules/apex/apex_sbom.apex-sbom-metadata.csv",
+ "build/make/tools/sbom/generate-sbom",
+ "build/bazel/rules/apex/apex_sbom_lib_cc.so",
+ "build/bazel/rules/apex/METADATA",
+ ]
+ asserts.true(
+ env,
+ all([f in input_files for f in expected_input_files]),
+ "Missing input files: %s" % input_files,
+ )
+
+ output_files = [output.basename for output in actions[0].outputs.to_list()]
+ expected_output_files = [
+ "apex_sbom.apex.spdx.json",
+ "apex_sbom.apex-fragment.spdx",
+ ]
+ asserts.true(
+ env,
+ all([f in output_files for f in expected_output_files]),
+ "Missing output files: %s" % input_files,
+ )
+
+ return analysistest.end(env)
+
+apex_sbom_test = analysistest.make(
+ _apex_sbom_test,
+)
+
+def _test_apex_sbom():
+ name = "apex_sbom"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_lib_cc",
+ srcs = [name + "_lib.cc"],
+ tags = ["manual"],
+ )
+
+ test_apex(
+ name = name,
+ native_shared_libs_32 = [name + "_lib_cc"],
+ android_manifest = "AndroidManifest.xml",
+ )
+
+ apex_sbom_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def apex_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_canned_fs_config_basic(),
+ _test_canned_fs_config_custom(),
+ _test_canned_fs_config_binaries(),
+ _test_canned_fs_config_native_shared_libs_arm(),
+ _test_canned_fs_config_native_shared_libs_arm64(),
+ _test_canned_fs_config_prebuilts(),
+ _test_canned_fs_config_prebuilts_sort_order(),
+ _test_canned_fs_config_runtime_deps(),
+ _test_apex_manifest(),
+ _test_apex_manifest_min_sdk_version(),
+ _test_apex_manifest_min_sdk_version_current(),
+ _test_apex_manifest_min_sdk_version_override(),
+ _test_apex_manifest_dependencies_nodep(),
+ _test_apex_manifest_dependencies_cc_binary_bionic_deps(),
+ _test_apex_manifest_dependencies_cc_library_shared_bionic_deps(),
+ _test_apex_manifest_dependencies_requires(),
+ _test_apex_manifest_dependencies_provides(),
+ _test_apex_manifest_dependencies_selfcontained(),
+ _test_apex_manifest_dependencies_cc_binary(),
+ _test_logging_parent_flag(),
+ _test_package_name(),
+ _test_package_name_override_from_config(),
+ _test_generate_file_contexts(),
+ _test_default_apex_manifest_version(),
+ _test_override_apex_manifest_version(),
+ _test_min_sdk_version_failure(),
+ _test_min_sdk_version_failure_transitive(),
+ _test_apex_certificate_none(),
+ _test_apex_certificate_name(),
+ _test_apex_certificate_label(),
+ _test_apex_certificate_label_with_overrides(),
+ _test_min_sdk_version_apex_inherit(),
+ _test_min_sdk_version_apex_inherit_override_min_sdk_tiramisu(),
+ _test_apex_testonly_with_manifest(),
+ _test_apex_provides_base_zip_files(),
+ _test_apex_testonly_without_manifest(),
+ _test_apex_backing_file(),
+ _test_apex_symbols_used_by_apex(),
+ _test_apex_installed_files(),
+ _test_apex_java_symbols_used_by_apex(),
+ _test_apex_generate_notice_file(),
+ _test_apex_available(),
+ _test_apex_available_failure(),
+ _test_apex_available_with_base_apex(),
+ _test_apex_deps_validation(),
+ _test_no_static_linking_for_stubs_lib(),
+ _test_directly_included_stubs_lib_with_indirectly_static_variant(),
+ _test_apex_in_unbundled_build(),
+ _test_apex_in_bundled_build(),
+ _test_apex_compression(),
+ _test_apex_no_compression(),
+ _test_min_target_sdk_version_api_fingerprint_min_sdk_version_specified(),
+ _test_min_target_sdk_version_api_fingerprint_min_sdk_version_not_specified(),
+ _test_apex_sbom(),
+ ] + _test_apex_transition(),
+ )
diff --git a/rules/apex/apex_test_helpers.bzl b/rules/apex/apex_test_helpers.bzl
new file mode 100644
index 00000000..a9583f25
--- /dev/null
+++ b/rules/apex/apex_test_helpers.bzl
@@ -0,0 +1,81 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/android:android_app_certificate.bzl", "android_app_certificate")
+load(":apex.bzl", "apex")
+load(":apex_key.bzl", "apex_key")
+
+# Set up test-local dependencies required for every apex.
+def _setup_apex_required_deps(
+ file_contexts,
+ key,
+ manifest,
+ certificate):
+ # Use the same shared common deps for all test apexes.
+ if file_contexts and not native.existing_rule(file_contexts):
+ native.genrule(
+ name = file_contexts,
+ outs = [file_contexts + ".out"],
+ cmd = "echo unused && exit 1",
+ tags = ["manual"],
+ )
+
+ if manifest and not native.existing_rule(manifest):
+ native.genrule(
+ name = manifest,
+ outs = [manifest + ".json"],
+ cmd = "echo unused && exit 1",
+ tags = ["manual"],
+ )
+
+ # Required for ApexKeyInfo provider
+ if key and not native.existing_rule(key):
+ apex_key(
+ name = key,
+ private_key = key + ".pem",
+ public_key = key + ".avbpubkey",
+ tags = ["manual"],
+ )
+
+ # Required for AndroidAppCertificate provider
+ if certificate and not native.existing_rule(certificate):
+ android_app_certificate(
+ name = certificate,
+ certificate = certificate + ".cert",
+ tags = ["manual"],
+ )
+
+def test_apex(
+ name,
+ file_contexts = "test_file_contexts",
+ key = "test_key",
+ manifest = "test_manifest",
+ certificate = "test_certificate",
+ **kwargs):
+ _setup_apex_required_deps(
+ file_contexts = file_contexts,
+ key = key,
+ manifest = manifest,
+ certificate = certificate,
+ )
+
+ apex(
+ name = name,
+ file_contexts = file_contexts,
+ key = key,
+ manifest = manifest,
+ certificate = certificate,
+ tags = ["manual"],
+ **kwargs
+ )
diff --git a/rules/apex/bazel_apexer_wrapper.py b/rules/apex/bazel_apexer_wrapper.py
deleted file mode 100644
index f1c14dbd..00000000
--- a/rules/apex/bazel_apexer_wrapper.py
+++ /dev/null
@@ -1,207 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-def _create_apex(args, work_dir):
-
- image_apex_dir = "image.apex"
-
- # Used for creating canned_fs_config, since every file and dir in the APEX are represented
- # by an entry in the fs_config.
- apex_subdirs = []
- apex_filepaths = []
-
- input_dir = os.path.join(work_dir, image_apex_dir)
- os.makedirs(input_dir, exist_ok=True)
- bazel_apexer_wrapper_manifest = open(args.bazel_apexer_wrapper_manifest, 'r')
- file_lines = bazel_apexer_wrapper_manifest.readlines()
- for line in file_lines:
- line = line.strip()
- if (len(line) == 0):
- continue
- apex_dirname, apex_filename, bazel_input_file = line.split(",")
- full_apex_dirname = "/".join([input_dir, apex_dirname])
- os.makedirs(full_apex_dirname, exist_ok=True)
-
- apex_filepath = "/".join([apex_dirname, apex_filename])
- apex_filepaths.append(apex_filepath)
- apex_subdirs.append(apex_dirname)
-
- full_apex_filepath = "/".join([input_dir, apex_filepath])
- # Because Bazel execution root is a symlink forest, all the input files are symlinks, these
- # include the dependency files declared in the BUILD files as well as the files declared
- # and created in the bzl files. For sandbox runs the former are two or more level symlinks and
- # latter are one level symlinks. For non-sandbox runs, the former are one level symlinks
- # and the latter are actual files. Here are some examples:
- #
- # Two level symlinks:
- # system/timezone/output_data/version/tz_version ->
- # /usr/local/google/home/...out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
- # execroot/__main__/system/timezone/output_data/version/tz_version ->
- # /usr/local/google/home/.../system/timezone/output_data/version/tz_version
- #
- # Three level symlinks:
- # bazel-out/android_x86_64-fastbuild-ST-4ecd5e98bfdd/bin/external/boringssl/libcrypto.so ->
- # /usr/local/google/home/yudiliu/android/aosp/master/out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
- # execroot/__main__/bazel-out/android_x86_64-fastbuild-ST-4ecd5e98bfdd/bin/external/boringssl/libcrypto.so ->
- # /usr/local/google/home/yudiliu/android/aosp/master/out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
- # execroot/__main__/bazel-out/android_x86_64-fastbuild-ST-4ecd5e98bfdd/bin/external/boringssl/
- # liblibcrypto_stripped.so ->
- # /usr/local/google/home/yudiliu/android/aosp/master/out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
- # execroot/__main__/bazel-out/android_x86_64-fastbuild-ST-4ecd5e98bfdd/bin/external/boringssl/
- # liblibcrypto_unstripped.so
- #
- # One level symlinks:
- # bazel-out/android_target-fastbuild/bin/system/timezone/apex/apex_manifest.pb ->
- # /usr/local/google/home/.../out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
- # execroot/__main__/bazel-out/android_target-fastbuild/bin/system/timezone/apex/
- # apex_manifest.pb
-
- if os.path.islink(bazel_input_file):
- bazel_input_file = os.readlink(bazel_input_file)
-
- # For sandbox run these are the 2nd level symlinks and we need to resolve
- while os.path.islink(bazel_input_file) and 'execroot/__main__' in bazel_input_file:
- bazel_input_file = os.readlink(bazel_input_file)
-
- shutil.copyfile(bazel_input_file, full_apex_filepath, follow_symlinks=False)
-
- # Make sure subdirs are unique
- apex_subdirs_set = set()
- for d in apex_subdirs:
- apex_subdirs_set.add(d)
-
- # Make sure all the parent dirs of the current subdir are in the set, too
- dirs = d.split("/")
- for i in range(0, len(dirs)):
- apex_subdirs_set.add("/".join(dirs[:i]))
-
- canned_fs_config = _generate_canned_fs_config(work_dir, apex_subdirs_set, apex_filepaths)
-
- # Construct the main apexer command.
- cmd = [args.apexer_path]
- cmd.append('--verbose')
- cmd.append('--force')
- cmd.append('--include_build_info')
- cmd.extend(['--file_contexts', args.file_contexts])
- cmd.extend(['--canned_fs_config', canned_fs_config])
- cmd.extend(['--key', args.key])
- cmd.extend(['--payload_type', 'image'])
- cmd.extend(['--target_sdk_version', '10000'])
- cmd.extend(['--payload_fs_type', 'ext4'])
- cmd.extend(['--apexer_tool_path', args.apexer_tool_paths])
-
- if args.android_manifest != None:
- cmd.extend(['--android_manifest', args.android_manifest])
-
- if args.pubkey != None:
- cmd.extend(['--pubkey', args.pubkey])
-
- if args.manifest != None:
- cmd.extend(['--manifest', args.manifest])
-
- if args.min_sdk_version != None:
- cmd.extend(['--min_sdk_version', args.min_sdk_version])
-
- if args.android_jar_path != None:
- cmd.extend(['--android_jar_path', args.android_jar_path])
-
- cmd.append(input_dir)
- cmd.append(args.apex_output_file)
-
- popen = subprocess.Popen(cmd)
- popen.wait()
-
- return True
-
-# Generate filesystem config. This encodes the filemode, uid, and gid of each
-# file in the APEX, including apex_manifest.json and apex_manifest.pb.
-#
-# NOTE: every file must have an entry.
-def _generate_canned_fs_config(work_dir, dirs, filepaths):
- with tempfile.NamedTemporaryFile(mode = 'w+', dir=work_dir, delete=False) as canned_fs_config:
- config_lines = []
- config_lines += ["/ 1000 1000 0755"]
- config_lines += ["/apex_manifest.json 1000 1000 0644"]
- config_lines += ["/apex_manifest.pb 1000 1000 0644"]
- config_lines += ["/" + filepath + " 1000 1000 0644" for filepath in filepaths]
- config_lines += ["/" + d + " 0 2000 0755" for d in dirs]
- canned_fs_config.write("\n".join(config_lines))
-
- return canned_fs_config.name
-
-def _parse_args(argv):
- parser = argparse.ArgumentParser(description='Build an APEX file')
-
- parser.add_argument(
- '--manifest',
- help='path to the APEX manifest file (.pb)')
- parser.add_argument(
- '--apex_output_file',
- required=True,
- help='path to the APEX image file')
- parser.add_argument(
- '--bazel_apexer_wrapper_manifest',
- required=True,
- help='path to the manifest file that stores the info about the files to be packaged by apexer')
- parser.add_argument(
- '--android_manifest',
- help='path to the AndroidManifest file. If omitted, a default one is created and used')
- parser.add_argument(
- '--file_contexts',
- required=True,
- help='selinux file contexts file.')
- parser.add_argument(
- '--key',
- required=True,
- help='path to the private key file.')
- parser.add_argument(
- '--pubkey',
- help='path to the public key file. Used to bundle the public key in APEX for testing.')
- parser.add_argument(
- '--apexer_path',
- required=True,
- help='Path to the apexer binary.')
- parser.add_argument(
- '--apexer_tool_paths',
- required=True,
- help='Directories containing all the tools used by apexer, separated by ":" character.')
- parser.add_argument(
- '--min_sdk_version',
- help='Default Min SDK version to use for AndroidManifest.xml')
- parser.add_argument(
- '--android_jar_path',
- help='path to use as the source of the android API.')
-
- return parser.parse_args(argv)
-
-def main(argv):
- args = _parse_args(argv)
-
- with tempfile.TemporaryDirectory() as work_dir:
- success = _create_apex(args, work_dir)
-
- if not success:
- sys.exit(1)
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/rules/apex/bundle.bzl b/rules/apex/bundle.bzl
new file mode 100644
index 00000000..85022ffc
--- /dev/null
+++ b/rules/apex/bundle.bzl
@@ -0,0 +1,157 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+# Arch to ABI map
+_arch_abi_map = {
+ "arm": "armeabi-v7a",
+ "arm64": "arm64-v8a",
+ "x86": "x86",
+ "x86_64": "x86_64",
+}
+
+def _proto_convert(actions, name, aapt2, apex_file):
+ """Run 'aapt2 convert' to convert resource files to protobuf format."""
+
+ root, ext = paths.split_extension(apex_file.basename)
+ output_file = actions.declare_file(paths.join(
+ name,
+ root + ".pb" + ext,
+ ))
+
+ # Arguments
+ args = actions.args()
+ args.add("convert")
+ args.add("--output-format", "proto")
+ args.add(apex_file)
+ args.add("-o", output_file)
+
+ actions.run(
+ inputs = [apex_file],
+ outputs = [output_file],
+ executable = aapt2,
+ arguments = [args],
+ mnemonic = "ApexProtoConvert",
+ )
+ return output_file
+
+def _base_file(actions, name, zip2zip, arch, secondary_arch, apex_proto_file):
+ """Transforms the apex file to the expected directory structure with all files that will be included in the base module of aab file."""
+
+ output_file = actions.declare_file(name + "-base.zip")
+
+ # Arguments
+ args = actions.args()
+ args.add("-i", apex_proto_file)
+ args.add("-o", output_file)
+ abi = _arch_abi_map[arch]
+ if secondary_arch:
+ abi += "." + _arch_abi_map[secondary_arch]
+ args.add_all([
+ "apex_payload.img:apex/%s.img" % abi,
+ "apex_build_info.pb:apex/%s.build_info.pb" % abi,
+ "apex_manifest.json:root/apex_manifest.json",
+ "apex_manifest.pb:root/apex_manifest.pb",
+ "AndroidManifest.xml:manifest/AndroidManifest.xml",
+ "assets/NOTICE.html.gz:assets/NOTICE.html.gz",
+ ])
+
+ actions.run(
+ inputs = [apex_proto_file],
+ outputs = [output_file],
+ executable = zip2zip,
+ arguments = [args],
+ mnemonic = "ApexBaseFile",
+ )
+ return output_file
+
+def build_bundle_config(actions, name):
+ """Create bundle_config.json as configuration for running bundletool.
+
+ Args:
+ actions: ctx.actions from a rule, used to declare outputs and actions.
+ name: name of target creating action
+ Returns:
+ The bundle_config.json file
+ """
+ file_content = {
+ # TODO(b/257459237): Config should collect manifest names and paths of android apps if their manifest name is overridden.
+ "apex_config": {},
+ "compression": {
+ "uncompressed_glob": [
+ "apex_payload.img",
+ "apex_manifest.*",
+ ],
+ },
+ }
+ bundle_config_file = actions.declare_file(paths.join(name, "bundle_config.json"))
+
+ actions.write(bundle_config_file, json.encode(file_content))
+
+ return bundle_config_file
+
+def _merge_apex_zip_with_config(actions, name, soong_zip, merge_zips, apex_zip, apex_file):
+ # TODO(): Only used as compatibility with mixed builds
+ bundle_config = build_bundle_config(actions, name)
+ apex_config_zip = actions.declare_file(name + ".config")
+
+ args = actions.args()
+ args.add("-o", apex_config_zip)
+ args.add("-C", bundle_config.dirname)
+ args.add("-f", bundle_config)
+ actions.run(
+ inputs = [bundle_config],
+ outputs = [apex_config_zip],
+ executable = soong_zip,
+ arguments = [args],
+ mnemonic = "ApexBaseConfigZip",
+ )
+
+ merged_zip = actions.declare_file(apex_file.basename + "-base.zip")
+ merge_args = actions.args()
+ merge_args.add(merged_zip)
+ merge_args.add(apex_zip)
+ merge_args.add(apex_config_zip)
+ actions.run(
+ inputs = [apex_config_zip, apex_zip],
+ outputs = [merged_zip],
+ executable = merge_zips,
+ arguments = [merge_args],
+ mnemonic = "ApexMergeBaseFileAndConfig",
+ )
+ return merged_zip
+
+def apex_zip_files(actions, name, tools, apex_file, arch, secondary_arch):
+ """Create apex zip files used to create an APEX bundle.
+
+ Args:
+ actions: Actions, ctx.actions from a rule, used to declare outputs and actions.
+ name: string, name of the target creating the action
+ tools: struct containing fields with executables: aapt2, zip2zip, soong_zip, merge_zips
+ apex_file: File, APEX file
+ arch: string, the arch of the target configuration of the target requesting the action
+ Returns:
+ A struct with these fields:
+ apex_only: the regular "base" apex zip
+ apex_with_config: a zipfile that's identical to apex_only, but with the addition of bundle_config.json
+ """
+ apex_proto = _proto_convert(actions, name, tools.aapt2, apex_file)
+ apex_zip = _base_file(actions, name, tools.zip2zip, arch, secondary_arch, apex_proto)
+ merged_zip = _merge_apex_zip_with_config(actions, name, tools.soong_zip, tools.merge_zips, apex_zip, apex_file)
+
+ return struct(
+ apex_only = apex_zip,
+ apex_with_config = merged_zip,
+ )
diff --git a/rules/apex/cc.bzl b/rules/apex/cc.bzl
index 2ef4bda0..b47b29b0 100644
--- a/rules/apex/cc.bzl
+++ b/rules/apex/cc.bzl
@@ -1,130 +1,306 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
-load("//build/bazel/rules/cc:cc_library_shared.bzl", "CcStubLibrariesInfo")
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("//build/bazel/product_config:product_variables_providing_rule.bzl", "ProductVariablesInfo")
+load("//build/bazel/rules:metadata.bzl", "MetadataFileInfo")
+load("//build/bazel/rules/cc:cc_library_common.bzl", "parse_apex_sdk_version")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "CcSharedLibraryOutputInfo", "CcStubLibrariesInfo")
+load("//build/bazel/rules/cc:cc_stub_library.bzl", "CcStubLibrarySharedInfo")
+load("//build/bazel/rules/cc:stripped_cc_common.bzl", "CcUnstrippedInfo")
+load("//build/bazel/rules/license:license_aspect.bzl", "license_aspect")
ApexCcInfo = provider(
"Info needed to use CC targets in APEXes",
fields = {
+ "provides_native_libs": "Labels of native shared libs that this apex provides.",
+ "requires_native_libs": "Labels of native shared libs that this apex requires.",
"transitive_shared_libs": "File references to transitive .so libs produced by the CC targets and should be included in the APEX.",
},
)
-# Return True if this target provides stubs that is equal to, or below, the
-# APEX's min_sdk_level.
+ApexCcMkInfo = provider(
+ "AndroidMk data about CC targets in APEXes",
+ fields = {
+ "make_modules_to_install": "List of module names that should be installed into the system, along with this APEX",
+ },
+)
+
+# Special libraries that are installed to the bootstrap subdirectory. Bionic
+# libraries are assumed to be provided by the system, and installed automatically
+# as a symlink to the runtime APEX.
+#
+# This list is from https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/cc.go;l=1439-1452;drc=9c667416ded33b93a44c5f1894ea23cae6699a17
+#
+# NOTE: Keep this list in sync with the Soong list.
#
-# These stable ABI libraries are intentionally omitted from APEXes as they are
-# provided from another APEX or the platform. By omitting them from APEXes, we
-# ensure that there are no multiple copies of such libraries on a device.
-def has_cc_stubs(target, ctx):
- if ctx.rule.kind != "_cc_library_shared_proxy":
- # only _cc_library_shared_proxy contains merged CcStubLibrariesInfo providers
- # (a provider aggregating CcStubInfo and CcSharedLibraryInfo)
- return False
-
- if len(target[CcStubLibrariesInfo].infos) == 0:
- # Not all shared library targets have stubs
- return False
-
- # Minimum SDK version supported by the APEX that transitively depends on
- # this target.
- min_sdk_version = ctx.attr._min_sdk_version[BuildSettingInfo].value
- apex_name = ctx.attr._apex_name[BuildSettingInfo].value
-
- available_versions = []
-
- # Check that the shared library has stubs built for (at least) the
- # min_sdk_version of the APEX
- for stub_info in target[CcStubLibrariesInfo].infos:
- stub_version = stub_info["CcStubInfo"].version
- available_versions.append(stub_version)
- if stub_version <= min_sdk_version:
- return True
-
- fail("cannot find a stub lib version for min_sdk_level %s (%s apex)\navailable versions: %s (%s)" %
- (min_sdk_version, apex_name, available_versions, target.label))
+# See cc/binary.go#install for more information.
+def _installed_to_bootstrap(label):
+ label = str(label)
+
+ # hwasan
+ if label == "@//prebuilts/clang/host/linux-x86:libclang_rt.hwasan":
+ return True
+
+ # bionic libs
+ if label in [
+ "@//bionic/libc:libc",
+ "@//bionic/libc:libc_hwasan", # For completeness, but no one should be depending on this.
+ "@//bionic/libm:libm",
+ "@//bionic/libdl:libdl",
+ "@//bionic/libdl_android:libdl_android",
+ "@//bionic/linker:linker",
+ ]:
+ return True
+
+ return False
+
+def has_cc_stubs(target):
+ """
+ Return True if this target provides stubs.
+
+ There is no need to check versions of stubs any more, see aosp/1609533.
+
+ These stable ABI libraries are intentionally omitted from APEXes as they are
+ provided from another APEX or the platform. By omitting them from APEXes, we
+ ensure that there are no multiple copies of such libraries on a device.
+
+ Args:
+ target: The target to check for stubs on.
+ Returns:
+ If the target has cc stubs
+ """
+ if CcStubLibrarySharedInfo in target:
+ # This is a stub lib (direct or transitive).
+ return True
+
+ if CcStubLibrariesInfo in target and target[CcStubLibrariesInfo].has_stubs:
+ # Direct deps of the apex. The apex would depend on the source lib, not stub lib,
+ # so check for CcStubLibrariesInfo.has_stubs.
+ return True
+
+ return False
# Check if this target is specified as a direct dependency of the APEX,
# as opposed to a transitive dependency, as the transitivity impacts
# the files that go into an APEX.
-def is_apex_direct_dep(target, ctx):
+def is_apex_direct_dep(label, ctx):
apex_direct_deps = ctx.attr._apex_direct_deps[BuildSettingInfo].value
- return str(target.label) in apex_direct_deps
+ return str(label) in apex_direct_deps
+
+MinSdkVersionInfo = provider(
+ "MinSdkVersionInfo provides metadata about the min_sdk_version attribute of a target",
+ fields = {
+ "apex_inherit": "true if min_sdk_version: \"apex_inherit\" is present on the module",
+ "min_sdk_version": "value of min_sdk_version",
+ },
+)
+
+def get_min_sdk_version(ctx):
+ """get_min_sdk_version returns the min_sdk_version for the existing target
+
+ Args:
+ ctx (rule context): a rule context
+ Returns:
+ MinSdkVersionInfo
+ """
+ min_sdk_version = None
+ apex_inherit = False
+ if hasattr(ctx.rule.attr, "min_sdk_version"):
+ if ctx.rule.attr.min_sdk_version == "apex_inherit":
+ apex_inherit = True
+ elif ctx.rule.attr.min_sdk_version:
+ min_sdk_version = parse_apex_sdk_version(ctx.rule.attr.min_sdk_version)
+ else:
+ # min_sdk_version in cc targets are represented as features
+ for f in ctx.rule.attr.features:
+ if f.startswith("sdk_version_"):
+ # e.g. sdk_version_29 or sdk_version_10000 or sdk_version_apex_inherit
+ sdk_version = f.removeprefix("sdk_version_")
+ if sdk_version == "apex_inherit":
+ apex_inherit = True
+ elif min_sdk_version == None:
+ min_sdk_version = int(sdk_version)
+ else:
+ fail(
+ "found more than one sdk_version feature on {target}; features = {features}",
+ target = ctx.label,
+ features = ctx.rule.attr.features,
+ )
+ return MinSdkVersionInfo(
+ min_sdk_version = min_sdk_version,
+ apex_inherit = apex_inherit,
+ )
+
+def _validate_min_sdk_version(ctx):
+ dep_min_version = get_min_sdk_version(ctx).min_sdk_version
+ apex_min_version = parse_apex_sdk_version(ctx.attr._min_sdk_version[BuildSettingInfo].value)
+ if dep_min_version and apex_min_version < dep_min_version:
+ fail("The apex %s's min_sdk_version %s cannot be lower than the dep's min_sdk_version %s" %
+ (ctx.attr._apex_name[BuildSettingInfo].value, apex_min_version, dep_min_version))
def _apex_cc_aspect_impl(target, ctx):
+ # Ensure that dependencies are compatible with this apex's min_sdk_level
+ if not ctx.attr.testonly:
+ _validate_min_sdk_version(ctx)
+
# Whether this dep is a direct dep of an APEX or makes a difference in dependency
# traversal, and aggregation of libs that are required from the platform/other APEXes,
# and libs that this APEX will provide to others.
- is_direct_dep = is_apex_direct_dep(target, ctx)
+ is_direct_dep = is_apex_direct_dep(target.label, ctx)
+
+ provides = []
+ requires = []
+ make_modules_to_install = []
- if has_cc_stubs(target, ctx):
+ # The APEX manifest records the stub-providing libs (ABI-stable) in its
+ # direct and transitive deps.
+ #
+ # If a stub-providing lib is in the direct deps of an apex, then the apex
+ # provides the symbols.
+ #
+ # If a stub-providing lib is in the transitive deps of an apex, then the
+ # apex requires the symbols from the platform or other apexes.
+ if has_cc_stubs(target):
if is_direct_dep:
- # TODO(b/215500321): Mark these libraries as "stub-providing" exports
- # of this APEX, which the system and other APEXes can depend on,
- # and propagate this list.
- pass
+ # Mark this target as "stub-providing" exports of this APEX,
+ # which the system and other APEXes can depend on, and propagate
+ # this list.
+ provides.append(target.label)
else:
- # If this is not a direct dep, and stubs are available, don't propagate
- # the libraries.
- #
- # TODO(b/215500321): In a bundled build, ensure that these libraries are
- # available on the system either via the system partition, or another APEX
- # and propagate this list.
- return [ApexCcInfo(transitive_shared_libs = depset())]
+ # If this is not a direct dep and the build is in not unbundled mode,
+ # and stubs are available, don't propagate the libraries.
+
+ # Mark this target as required from the system either via
+ # the system partition, or another APEX, and propagate this list.
+ source_library_label = target[CcStubLibrarySharedInfo].source_library_label
+
+ # If a stub library is in the "provides" of the apex, it doesn't need to be in the "requires"
+ if not is_apex_direct_dep(source_library_label, ctx):
+ requires.append(source_library_label)
+ if not ctx.attr._product_variables[ProductVariablesInfo].Unbundled_build and not _installed_to_bootstrap(source_library_label):
+ # It's sufficient to pass the make module name, not the fully qualified bazel label.
+ make_modules_to_install.append(source_library_label.name)
+
+ return [
+ ApexCcInfo(
+ transitive_shared_libs = depset(),
+ requires_native_libs = depset(direct = requires),
+ provides_native_libs = depset(direct = provides),
+ ),
+ ApexCcMkInfo(
+ make_modules_to_install = depset(direct = make_modules_to_install),
+ ),
+ ]
shared_object_files = []
# Transitive deps containing shared libraries to be propagated the apex.
transitive_deps = []
- rules_propagate_src = ["_bssl_hash_injection", "stripped_shared_library", "versioned_shared_library"]
+ rules_propagate_src = [
+ "_bssl_hash_injection",
+ "stripped_shared_library",
+ "versioned_shared_library",
+ "stripped_binary",
+ "versioned_binary",
+ ]
# Exclude the stripped and unstripped so files
if ctx.rule.kind == "_cc_library_shared_proxy":
- for output_file in target[DefaultInfo].files.to_list():
- if output_file.extension == "so":
- shared_object_files.append(output_file)
+ shared_object_files.append(struct(
+ stripped = target[CcSharedLibraryOutputInfo].output_file,
+ unstripped = target[CcUnstrippedInfo].unstripped,
+ metadata_file = target[MetadataFileInfo].metadata_file,
+ ))
if hasattr(ctx.rule.attr, "shared"):
- transitive_deps.append(ctx.rule.attr.shared)
- elif ctx.rule.kind == "cc_shared_library" and hasattr(ctx.rule.attr, "dynamic_deps"):
- # Propagate along the dynamic_deps edge
- for dep in ctx.rule.attr.dynamic_deps:
- transitive_deps.append(dep)
+ transitive_deps.append(ctx.rule.attr.shared[0])
+ elif ctx.rule.kind in ["cc_shared_library", "cc_binary"]:
+ # Propagate along the dynamic_deps and deps edges for binaries and shared libs
+ if hasattr(ctx.rule.attr, "dynamic_deps"):
+ for dep in ctx.rule.attr.dynamic_deps:
+ transitive_deps.append(dep)
+ if hasattr(ctx.rule.attr, "deps"):
+ for dep in ctx.rule.attr.deps:
+ transitive_deps.append(dep)
elif ctx.rule.kind in rules_propagate_src and hasattr(ctx.rule.attr, "src"):
# Propagate along the src edge
- transitive_deps.append(ctx.rule.attr.src)
+ if ctx.rule.kind == "stripped_binary":
+ transitive_deps.append(ctx.rule.attr.src[0])
+ else:
+ transitive_deps.append(ctx.rule.attr.src)
+
+ if ctx.rule.kind in ["stripped_binary", "_cc_library_shared_proxy", "_cc_library_combiner"] and hasattr(ctx.rule.attr, "runtime_deps"):
+ for dep in ctx.rule.attr.runtime_deps:
+ unstripped = None
+ if CcUnstrippedInfo in dep:
+ unstripped = dep[CcUnstrippedInfo].unstripped
+ for output_file in dep[DefaultInfo].files.to_list():
+ if output_file.extension == "so":
+ shared_object_files.append(struct(
+ stripped = output_file,
+ unstripped = unstripped,
+ metadata_file = dep[MetadataFileInfo].metadata_file,
+ ))
+ transitive_deps.append(dep)
return [
ApexCcInfo(
- # TODO: Rely on a split transition across arches to happen earlier
transitive_shared_libs = depset(
shared_object_files,
- transitive = [dep[ApexCcInfo].transitive_shared_libs for dep in transitive_deps],
+ transitive = [info[ApexCcInfo].transitive_shared_libs for info in transitive_deps],
+ ),
+ requires_native_libs = depset(
+ [],
+ transitive = [info[ApexCcInfo].requires_native_libs for info in transitive_deps],
+ ),
+ provides_native_libs = depset(
+ provides,
+ transitive = [info[ApexCcInfo].provides_native_libs for info in transitive_deps],
+ ),
+ ),
+ ApexCcMkInfo(
+ make_modules_to_install = depset(
+ [],
+ transitive = [info[ApexCcMkInfo].make_modules_to_install for info in transitive_deps],
),
),
]
+# The list of attributes in a cc dep graph where this aspect will traverse on.
+CC_ATTR_ASPECTS = [
+ "dynamic_deps",
+ "deps",
+ "shared",
+ "src",
+ "runtime_deps",
+ "static_deps",
+ "whole_archive_deps",
+]
+
# This aspect is intended to be applied on a apex.native_shared_libs attribute
apex_cc_aspect = aspect(
implementation = _apex_cc_aspect_impl,
+ provides = [ApexCcInfo, ApexCcMkInfo],
attrs = {
- "_min_sdk_version": attr.label(default = "//build/bazel/rules/apex:min_sdk_version"),
- "_apex_name": attr.label(default = "//build/bazel/rules/apex:apex_name"),
+ # This is propagated from the apex
+ "testonly": attr.bool(default = False),
"_apex_direct_deps": attr.label(default = "//build/bazel/rules/apex:apex_direct_deps"),
+ "_apex_name": attr.label(default = "//build/bazel/rules/apex:apex_name"),
+ "_min_sdk_version": attr.label(default = "//build/bazel/rules/apex:min_sdk_version"),
+ "_product_variables": attr.label(default = "//build/bazel/product_config:product_vars"),
},
- attr_aspects = ["dynamic_deps", "shared", "src"],
+ attr_aspects = CC_ATTR_ASPECTS,
+ requires = [license_aspect],
# TODO: Have this aspect also propagate along attributes of native_shared_libs?
)
diff --git a/rules/apex/mainline_modules.bzl b/rules/apex/mainline_modules.bzl
deleted file mode 100644
index c17782e2..00000000
--- a/rules/apex/mainline_modules.bzl
+++ /dev/null
@@ -1,260 +0,0 @@
-"""
-Copyright (C) 2022 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-load("//build/bazel/rules:apex.bzl", "ApexInfo")
-
-def _arch_transition_impl(settings, attr):
- """Implementation of arch_transition.
- Four archs are included for mainline modules: x86, x86_64, arm and arm64.
- """
- return {
- "x86": {
- "//command_line_option:platforms": "//build/bazel/platforms:android_x86",
- },
- "x86_64": {
- "//command_line_option:platforms": "//build/bazel/platforms:android_x86_64",
- },
- "arm": {
- "//command_line_option:platforms": "//build/bazel/platforms:android_arm",
- },
- "arm64": {
- "//command_line_option:platforms": "//build/bazel/platforms:android_arm64",
- },
- }
-
-# Multi-arch transition.
-arch_transition = transition(
- implementation = _arch_transition_impl,
- inputs = [],
- outputs = [
- "//command_line_option:platforms",
- ],
-)
-
-# Arch to ABI map
-_arch_abi_map = {
- "arm64": "arm64-v8a",
- "arm": "armeabi-v7a",
- "x86_64": "x86_64",
- "x86": "x86",
-}
-
-def _apex_proto_convert(ctx, arch, module_name, apex_file):
- """Run 'aapt2 convert' to convert resource files to protobuf format."""
- # Inputs
- inputs = [
- apex_file,
- ctx.executable._aapt2,
- ]
-
- # Outputs
- filename = apex_file.basename
- pos_dot = filename.rindex(".")
- proto_convert_file = ctx.actions.declare_file("/".join([
- module_name,
- arch,
- filename[:pos_dot] + ".pb" + filename[pos_dot:]]))
- outputs = [proto_convert_file]
-
- # Arguments
- args = ctx.actions.args()
- args.add_all(["convert"])
- args.add_all(["--output-format", "proto"])
- args.add_all([apex_file])
- args.add_all(["-o", proto_convert_file.path])
-
- ctx.actions.run(
- inputs = inputs,
- outputs = outputs,
- executable = ctx.executable._aapt2,
- arguments = [args],
- mnemonic = "ApexProtoConvert",
- )
- return proto_convert_file
-
-def _apex_base_file(ctx, arch, module_name, apex_proto_file):
- """Run zip2zip to transform the apex file the expected directory structure
- with all files that will be included in the base module of aab file."""
-
- # Inputs
- inputs = [
- apex_proto_file,
- ctx.executable._zip2zip,
- ]
-
- # Outputs
- base_file = ctx.actions.declare_file("/".join([module_name, arch, module_name + ".base"]))
- outputs = [base_file]
-
- # Arguments
- args = ctx.actions.args()
- args.add_all(["-i", apex_proto_file])
- args.add_all(["-o", base_file])
- abi = _arch_abi_map[arch]
- args.add_all([
- "apex_payload.img:apex/%s.img" % abi,
- "apex_build_info.pb:apex/%s.build_info.pb" % abi,
- "apex_manifest.json:root/apex_manifest.json",
- "apex_manifest.pb:root/apex_manifest.pb",
- "AndroidManifest.xml:manifest/AndroidManifest.xml",
- "assets/NOTICE.html.gz:assets/NOTICE.html.gz",
- ])
-
- ctx.actions.run(
- inputs = inputs,
- outputs = outputs,
- executable = ctx.executable._zip2zip,
- arguments = [args],
- mnemonic = "ApexBaseFile",
- )
- return base_file
-
-def _build_bundle_config(ctx, arch, module_name):
- """Create bundle_config.json as configuration for running bundletool."""
- file_content = {
- "compression": {
- "uncompressed_glob": [
- "apex_payload.img",
- "apex_manifest.*",
- ],
- },
- "apex_config": {},
- }
- bundle_config_file = ctx.actions.declare_file("/".join([module_name, "bundle_config.json"]))
- ctx.actions.write(bundle_config_file, json.encode(file_content))
-
- return bundle_config_file
-
-def _merge_base_files(ctx, module_name, base_files):
- """Run merge_zips to merge all files created for each arch by _apex_base_file."""
-
- # Inputs
- inputs = base_files + [ctx.executable._merge_zips]
-
- # Outputs
- merged_base_file = ctx.actions.declare_file(module_name + "/" + module_name + ".zip")
- outputs = [merged_base_file]
-
- # Arguments
- args = ctx.actions.args()
- args.add_all(["--ignore-duplicates"])
- args.add_all([merged_base_file])
- args.add_all(base_files)
-
- ctx.actions.run(
- inputs = inputs,
- outputs = outputs,
- executable = ctx.executable._merge_zips,
- arguments = [args],
- mnemonic = "ApexMergeBaseFiles",
- )
- return merged_base_file
-
-def _apex_bundle(ctx, module_name, merged_base_file, bundle_config_file):
- """Run bundletool to create the aab file."""
-
- # Inputs
- inputs = [
- bundle_config_file,
- merged_base_file,
- ctx.executable._bundletool,
- ]
-
- # Outputs
- bundle_file = ctx.actions.declare_file(module_name + "/" + module_name + ".aab")
- outputs = [bundle_file]
-
- # Arguments
- args = ctx.actions.args()
- args.add_all(["build-bundle"])
- args.add_all(["--config", bundle_config_file])
- args.add_all(["--modules", merged_base_file])
- args.add_all(["--output", bundle_file])
-
- ctx.actions.run(
- inputs = inputs,
- outputs = outputs,
- executable = ctx.executable._bundletool,
- arguments = [args],
- mnemonic = "ApexBundleFile",
- )
- return bundle_file
-
-def _apex_aab_impl(ctx):
- """Implementation of apex_aab rule, which drives the process of creating aab
- file from apex files created for each arch."""
- apex_base_files = []
- bundle_config_file = None
- module_name = ctx.attr.mainline_module[0].label.name
- for arch in ctx.split_attr.mainline_module:
- apex_file = ctx.split_attr.mainline_module[arch].files.to_list()[0]
- proto_convert_file = _apex_proto_convert(ctx, arch, module_name, apex_file)
- base_file = _apex_base_file(ctx, arch, module_name, proto_convert_file)
- apex_base_files.append(base_file)
- # It is assumed that the bundle config is the same for all products.
- if bundle_config_file == None:
- bundle_config_file = _build_bundle_config(ctx, arch, module_name)
-
- merged_base_file = _merge_base_files(ctx, module_name, apex_base_files)
- bundle_file = _apex_bundle(ctx, module_name, merged_base_file, bundle_config_file)
-
- return [DefaultInfo(files = depset([bundle_file]))]
-
-# apex_aab rule creates Android Apk Bundle (.aab) file of the APEX specified in mainline_module.
-# There is no equivalent Soong module, and it is currently done in shell script by
-# invoking Soong multiple times.
-apex_aab = rule(
- implementation = _apex_aab_impl,
- attrs = {
- "mainline_module": attr.label(
- mandatory = True,
- cfg = arch_transition,
- providers = [ApexInfo],
- doc = "The label of a mainline module target",
- ),
- "_allowlist_function_transition": attr.label(
- default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
- doc = "Allow transition.",
- ),
- "_zipper": attr.label(
- cfg = "host",
- executable = True,
- default = "@bazel_tools//tools/zip:zipper",
- ),
- "_aapt2": attr.label(
- allow_single_file = True,
- cfg = "host",
- executable = True,
- default = "//prebuilts/sdk/tools:linux/bin/aapt2",
- ),
- "_merge_zips": attr.label(
- allow_single_file = True,
- cfg = "host",
- executable = True,
- default = "//prebuilts/build-tools:linux-x86/bin/merge_zips",
- ),
- "_zip2zip": attr.label(
- allow_single_file = True,
- cfg = "host",
- executable = True,
- default = "//prebuilts/build-tools:linux-x86/bin/zip2zip",
- ),
- "_bundletool": attr.label(
- cfg = "host",
- executable = True,
- default = "//prebuilts/bundletool",
- ),
- },
-)
diff --git a/rules/apex/sdk_versions.bzl b/rules/apex/sdk_versions.bzl
new file mode 100644
index 00000000..c81b3d0d
--- /dev/null
+++ b/rules/apex/sdk_versions.bzl
@@ -0,0 +1,47 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/common:api.bzl", "api")
+
+def maybe_override_min_sdk_version(min_sdk_version, override_min_sdk_version):
+ """
+ Override a min_sdk_version with another if higher.
+
+ Normalizes string codenames to API ints for direct comparisons.
+
+ Args:
+ min_sdk_version: The min_sdk_version to potentially be overridden, as a string.
+ Can be "current", or a number.
+ override_min_sdk_version: The version to potentially override min_sdk_version with, as a string.
+ Can be a number, of a known api level codename.
+ Returns:
+ Either min_sdk_version or override_min_sdk_version, converted to a string representation of a number.
+ """
+ if min_sdk_version == "current":
+ min_sdk_version = "10000"
+ if not str(min_sdk_version).isdigit():
+ fail("%s must only contain digits." % min_sdk_version)
+
+ min_api_level = int(min_sdk_version)
+
+ if str(override_min_sdk_version).isdigit():
+ override_api_level = int(override_min_sdk_version)
+ else:
+ override_api_level = api.api_levels.get(override_min_sdk_version, -1)
+
+ # Only override version numbers upwards.
+ if override_api_level > min_api_level:
+ min_api_level = override_api_level
+
+ return str(min_api_level)
diff --git a/rules/apex/testdata/BUILD b/rules/apex/testdata/BUILD
new file mode 100644
index 00000000..88dc8ea4
--- /dev/null
+++ b/rules/apex/testdata/BUILD
@@ -0,0 +1,20 @@
+exports_files([
+ "devkey.priv",
+ "devkey.pub",
+])
+
+filegroup(
+ name = "android_certificate_directory",
+ srcs = glob([
+ "*.pk8",
+ "*.pem",
+ "*.avbpubkey",
+ ]),
+ visibility = ["//build/bazel:__subpackages__"],
+)
+
+filegroup(
+ name = "dev-keystore",
+ srcs = ["devkey.keystore"],
+ visibility = ["//visibility:public"],
+)
diff --git a/rules/apex/testdata/another.pk8 b/rules/apex/testdata/another.pk8
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/apex/testdata/another.pk8
diff --git a/rules/apex/testdata/another.x509.pem b/rules/apex/testdata/another.x509.pem
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/apex/testdata/another.x509.pem
diff --git a/rules/apex/testdata/devkey.avbpubkey b/rules/apex/testdata/devkey.avbpubkey
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/apex/testdata/devkey.avbpubkey
diff --git a/rules/apex/testdata/devkey.keystore b/rules/apex/testdata/devkey.keystore
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/apex/testdata/devkey.keystore
diff --git a/rules/apex/testdata/devkey.pem b/rules/apex/testdata/devkey.pem
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/apex/testdata/devkey.pem
diff --git a/rules/apex/testdata/devkey.priv b/rules/apex/testdata/devkey.priv
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/apex/testdata/devkey.priv
diff --git a/rules/apex/testdata/devkey.pub b/rules/apex/testdata/devkey.pub
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/apex/testdata/devkey.pub
diff --git a/rules/apex/toolchain.bzl b/rules/apex/toolchain.bzl
index 04c7f4d8..e23f906b 100644
--- a/rules/apex/toolchain.bzl
+++ b/rules/apex/toolchain.bzl
@@ -1,18 +1,16 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
ApexToolchainInfo = provider(
doc = "APEX toolchain",
@@ -28,6 +26,13 @@ ApexToolchainInfo = provider(
"android_jar",
"apex_compression_tool",
"soong_zip",
+ "jsonmodify",
+ "manifest_fixer",
+ "gen_ndk_usedby_apex",
+ "readelf",
+ "gen_java_usedby_apex",
+ "dexdeps",
+ "notice_generator",
],
)
@@ -36,15 +41,22 @@ def _apex_toolchain_impl(ctx):
toolchain_info = ApexToolchainInfo(
aapt2 = ctx.file.aapt2,
avbtool = ctx.attr.avbtool,
- apexer = ctx.file.apexer,
+ apexer = ctx.attr.apexer,
mke2fs = ctx.attr.mke2fs,
resize2fs = ctx.attr.resize2fs,
e2fsdroid = ctx.attr.e2fsdroid,
- sefcontext_compile = ctx.file.sefcontext_compile,
- conv_apex_manifest = ctx.file.conv_apex_manifest,
+ sefcontext_compile = ctx.attr.sefcontext_compile,
+ conv_apex_manifest = ctx.attr.conv_apex_manifest,
android_jar = ctx.file.android_jar,
- apex_compression_tool = ctx.file.apex_compression_tool,
+ apex_compression_tool = ctx.attr.apex_compression_tool,
soong_zip = ctx.file.soong_zip,
+ jsonmodify = ctx.attr.jsonmodify,
+ manifest_fixer = ctx.attr.manifest_fixer,
+ gen_ndk_usedby_apex = ctx.attr.gen_ndk_usedby_apex,
+ readelf = ctx.attr.readelf,
+ gen_java_usedby_apex = ctx.attr.gen_java_usedby_apex,
+ dexdeps = ctx.attr.dexdeps,
+ notice_generator = ctx.attr.notice_generator,
),
)
return [toolchain_info]
@@ -52,19 +64,26 @@ def _apex_toolchain_impl(ctx):
apex_toolchain = rule(
implementation = _apex_toolchain_impl,
attrs = {
- "aapt2": attr.label(allow_single_file = True, cfg = "host", executable = True),
- "avbtool": attr.label(cfg = "host", executable = True),
- "apexer": attr.label(allow_single_file = True, cfg = "host", executable = True),
- "mke2fs": attr.label(cfg = "host", executable = True),
- "resize2fs": attr.label(cfg = "host", executable = True),
- "e2fsdroid": attr.label(cfg = "host", executable = True),
- "sefcontext_compile": attr.label(allow_single_file = True, cfg = "host", executable = True),
- "conv_apex_manifest": attr.label(allow_single_file = True, cfg = "host", executable = True),
- "android_jar": attr.label(allow_single_file = True, cfg = "host"),
- "apex_compression_tool": attr.label(allow_single_file = True, cfg = "host", executable = True),
+ "aapt2": attr.label(allow_single_file = True, cfg = "exec", executable = True, mandatory = True),
+ "android_jar": attr.label(allow_single_file = True, cfg = "exec", mandatory = True),
+ "apex_compression_tool": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "apexer": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "avbtool": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "conv_apex_manifest": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "dexdeps": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "e2fsdroid": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "gen_java_usedby_apex": attr.label(cfg = "exec", executable = True, mandatory = True, allow_single_file = [".sh"]),
+ "gen_ndk_usedby_apex": attr.label(cfg = "exec", executable = True, mandatory = True, allow_single_file = [".sh"]),
+ "jsonmodify": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "manifest_fixer": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "mke2fs": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "notice_generator": attr.label(allow_single_file = True, cfg = "exec", executable = True, mandatory = True),
+ "readelf": attr.label(cfg = "exec", executable = True, mandatory = True, allow_single_file = True),
+ "resize2fs": attr.label(cfg = "exec", executable = True, mandatory = True),
+ "sefcontext_compile": attr.label(cfg = "exec", executable = True, mandatory = True),
# soong_zip is added as a dependency of apex_compression_tool which uses
# soong_zip to compress APEX files. avbtool is also used in apex_compression tool
# and has been added to apex toolchain previously.
- "soong_zip": attr.label(allow_single_file = True, cfg = "host", executable = True),
+ "soong_zip": attr.label(allow_single_file = True, cfg = "exec", executable = True, mandatory = True),
},
)
diff --git a/rules/apex/transition.bzl b/rules/apex/transition.bzl
index e6ffa374..408d939b 100644
--- a/rules/apex/transition.bzl
+++ b/rules/apex/transition.bzl
@@ -1,85 +1,124 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
"""
-Copyright (C) 2021 The Android Open Source Project
+Configuration transitions for APEX rules.
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
+Transitions are a Bazel mechanism to analyze/build dependencies in a different
+configuration (i.e. options and flags). The APEX transition is applied from a
+top level APEX rule to its dependencies via an outgoing edge, so that the
+dependencies can be built specially for APEXes (vs the platform).
- http://www.apache.org/licenses/LICENSE-2.0
+e.g. if an apex A depends on some target T, building T directly as a top level target
+will use a different configuration from building T indirectly as a dependency of A. The
+latter will contain APEX specific configuration settings that its rule or an aspect can
+use to create different actions or providers for APEXes specifically..
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+The outgoing transitions are similar to ApexInfo propagation in Soong's
+top-down ApexInfoMutator:
+https://cs.android.com/android/platform/superproject/+/master:build/soong/apex/apex.go;l=948-962;drc=539d41b686758eeb86236c0e0dcf75478acb77f3
"""
-# Configuration transitions for APEX rules.
-#
-# Transitions are a Bazel mechanism to analyze/build dependencies in a different
-# configuration (i.e. options and flags). The APEX transition is applied from a
-# top level APEX rule to its dependencies via an outgoing edge, so that the
-# dependencies can be built specially for APEXes (vs the platform).
-#
-# e.g. if an apex A depends on some target T, building T directly as a top level target
-# will use a different configuration from building T indirectly as a dependency of A. The
-# latter will contain APEX specific configuration settings that its rule or an aspect can
-# use to create different actions or providers for APEXes specifically..
-#
-# The outgoing transitions are similar to ApexInfo propagation in Soong's
-# top-down ApexInfoMutator:
-# https://cs.android.com/android/platform/superproject/+/master:build/soong/apex/apex.go;l=948-962;drc=539d41b686758eeb86236c0e0dcf75478acb77f3
-
load("@bazel_skylib//lib:dicts.bzl", "dicts")
+load("//build/bazel/rules/apex:sdk_versions.bzl", "maybe_override_min_sdk_version")
+
+def _create_apex_configuration(settings, attr, additional = {}):
+ min_sdk_version = maybe_override_min_sdk_version(
+ attr.min_sdk_version,
+ settings["//build/bazel/rules/apex:apex_global_min_sdk_version_override"],
+ )
-def _create_apex_configuration(attr, additional = {}):
return dicts.add({
"//build/bazel/rules/apex:apex_name": attr.name, # Name of the APEX
- "//build/bazel/rules/apex:min_sdk_version": attr.min_sdk_version, # Min SDK version of the APEX
+ "//build/bazel/rules/apex:base_apex_name": attr.base_apex_name, # Name of the base APEX, if exists
+ "//build/bazel/rules/apex:min_sdk_version": min_sdk_version,
+ "//build/bazel/rules/apex:within_apex": True, # Building a APEX
}, additional)
def _impl(settings, attr):
# Perform a transition to apply APEX specific build settings on the
# destination target (i.e. an APEX dependency).
- return _create_apex_configuration(attr)
+
+ # At this point, the configurable attributes native_shared_libs_32 and
+ # native_shared_libs_64 are already resolved according to the lunch target
+ direct_deps = [str(dep) for dep in attr.native_shared_libs_32]
+ direct_deps += [str(dep) for dep in attr.native_shared_libs_64]
+ direct_deps += [str(dep) for dep in attr.binaries]
+
+ return _create_apex_configuration(settings, attr, {
+ "//build/bazel/rules/apex:apex_direct_deps": direct_deps,
+ })
+
+_TRANSITION_INPUTS = [
+ "//build/bazel/rules/apex:apex_global_min_sdk_version_override",
+]
+
+_TRANSITION_OUTPUTS = [
+ "//build/bazel/rules/apex:apex_name",
+ "//build/bazel/rules/apex:base_apex_name",
+ "//build/bazel/rules/apex:within_apex",
+ "//build/bazel/rules/apex:min_sdk_version",
+ "//build/bazel/rules/apex:apex_direct_deps",
+]
apex_transition = transition(
implementation = _impl,
- inputs = [],
- outputs = [
- "//build/bazel/rules/apex:apex_name",
- "//build/bazel/rules/apex:min_sdk_version",
- ],
+ inputs = _TRANSITION_INPUTS,
+ outputs = _TRANSITION_OUTPUTS,
)
+# The following table describes how target platform of shared_lib_transition_32 and shared_lib_transition_64
+# look like when building APEXes for different primary/secondary architecture.
+#
+# |---------------------------+----------------------------------------------------+----------------------------------------------------|
+# | Primary arch | Platform for | Platform for |
+# | / Secondary arch | 32b libs transition | 64b libs transition |
+# |---------------------------+----------------------------------------------------+----------------------------------------------------|
+# | 32bit / N/A | android_target | android_target |
+# | (android_target is 32bit) | | (wrong target platform indicates the transition |
+# | | | is not needed, and the 64bit libs are not included |
+# | | | in APEXes for 32bit devices, see |
+# | | | _create_file_mapping() in apex.bzl) |
+# |---------------------------+----------------------------------------------------+----------------------------------------------------|
+# | 64bit / 32bit | android_target_secondary | android_target |
+# | (android_target is 64bit) | | |
+# |---------------------------+----------------------------------------------------+----------------------------------------------------|
+# | 64bit / N/A | android_target | android_target |
+# | (android_target is 64bit) | (wrong target platform indicates the transition | |
+# | | is not needed, and the 32bit libs are not included | |
+# | | in APEXes for 64bit ONLY devices, see | |
+# | | _create_file_mapping() in apex.bzl) | |
+# |---------------------------+----------------------------------------------------+----------------------------------------------------|
+
def _impl_shared_lib_transition_32(settings, attr):
# Perform a transition to apply APEX specific build settings on the
# destination target (i.e. an APEX dependency).
direct_deps = [str(dep) for dep in attr.native_shared_libs_32]
+ direct_deps += [str(dep) for dep in attr.binaries]
+
+ old_platform = str(settings["//command_line_option:platforms"][0])
- # TODO: We need to check if this is a x86 or arm arch then only set one platform
- # instead of this 1:2 split to avoid performance hit.
- return {
- "x86": _create_apex_configuration(attr, {
- "//command_line_option:platforms": "//build/bazel/platforms:android_x86",
- "//build/bazel/rules/apex:apex_direct_deps": direct_deps,
- }),
- "arm": _create_apex_configuration(attr, {
- "//command_line_option:platforms": "//build/bazel/platforms:android_arm",
- "//build/bazel/rules/apex:apex_direct_deps": direct_deps,
- }),
- }
+ return _create_apex_configuration(settings, attr, {
+ "//build/bazel/rules/apex:apex_direct_deps": direct_deps,
+ "//command_line_option:platforms": old_platform + "_secondary",
+ })
shared_lib_transition_32 = transition(
implementation = _impl_shared_lib_transition_32,
- inputs = [],
- outputs = [
- "//build/bazel/rules/apex:apex_name",
- "//build/bazel/rules/apex:min_sdk_version",
- "//build/bazel/rules/apex:apex_direct_deps",
- "//command_line_option:platforms",
- ],
+ inputs = _TRANSITION_INPUTS + ["//command_line_option:platforms"],
+ outputs = _TRANSITION_OUTPUTS + ["//command_line_option:platforms"],
)
def _impl_shared_lib_transition_64(settings, attr):
@@ -87,27 +126,17 @@ def _impl_shared_lib_transition_64(settings, attr):
# destination target (i.e. an APEX dependency).
direct_deps = [str(dep) for dep in attr.native_shared_libs_64]
+ direct_deps += [str(dep) for dep in attr.binaries]
- # TODO: We need to check if this is a x86 or arm arch then only set one platform
- # instead of this 1:2 split to avoid performance hit.
- return {
- "x86_64": _create_apex_configuration(attr, {
- "//command_line_option:platforms": "//build/bazel/platforms:android_x86_64",
- "//build/bazel/rules/apex:apex_direct_deps": direct_deps,
- }),
- "arm64": _create_apex_configuration(attr, {
- "//command_line_option:platforms": "//build/bazel/platforms:android_arm64",
- "//build/bazel/rules/apex:apex_direct_deps": direct_deps,
- }),
- }
+ # For the 64 bit transition, we don't actually change the arch, because
+ # we only read the value of native_shared_libs_64 when the target
+ # is 64-bit already
+ return _create_apex_configuration(settings, attr, {
+ "//build/bazel/rules/apex:apex_direct_deps": direct_deps,
+ })
shared_lib_transition_64 = transition(
implementation = _impl_shared_lib_transition_64,
- inputs = [],
- outputs = [
- "//build/bazel/rules/apex:apex_name",
- "//build/bazel/rules/apex:min_sdk_version",
- "//build/bazel/rules/apex:apex_direct_deps",
- "//command_line_option:platforms",
- ],
+ inputs = _TRANSITION_INPUTS,
+ outputs = _TRANSITION_OUTPUTS,
)
diff --git a/rules/apex_key.bzl b/rules/apex_key.bzl
deleted file mode 100644
index be31da67..00000000
--- a/rules/apex_key.bzl
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-ApexKeyInfo = provider(
- "Info needed to sign APEX bundles",
- fields = {
- "public_key": "File containing the public_key",
- "private_key": "File containing the private key",
- },
-)
-
-def _apex_key_rule_impl(ctx):
- return [
- ApexKeyInfo(public_key = ctx.file.public_key, private_key = ctx.file.private_key),
- ]
-
-apex_key = rule(
- implementation = _apex_key_rule_impl,
- attrs = {
- "public_key": attr.label(mandatory = True, allow_single_file = True),
- "private_key": attr.label(mandatory = True, allow_single_file = True),
- },
-)
diff --git a/rules/api_surfaces_injection.bzl b/rules/api_surfaces_injection.bzl
new file mode 100644
index 00000000..9d10e9ad
--- /dev/null
+++ b/rules/api_surfaces_injection.bzl
@@ -0,0 +1,26 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/apis:api_surface.bzl", "ALL_API_SURFACES")
+
+def _impl(rctx):
+ rctx.file("WORKSPACE", "")
+ synthetic_build_dir = str(rctx.path(Label("//:BUILD")).dirname)
+ api_surfaces_dir = synthetic_build_dir + "/build/bazel/api_surfaces"
+ for api_surface in ALL_API_SURFACES:
+ rctx.symlink(api_surfaces_dir + "/" + api_surface, api_surface)
+
+api_surfaces_repository = repository_rule(
+ implementation = _impl,
+)
diff --git a/rules/apis/BUILD b/rules/apis/BUILD
new file mode 100644
index 00000000..17844111
--- /dev/null
+++ b/rules/apis/BUILD
@@ -0,0 +1,21 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(":api_domain_test.bzl", "api_domain_test_suite")
+load(":cc_api_contribution_test.bzl", "cc_api_test_suite")
+
+cc_api_test_suite(name = "cc_api_test_suite")
+
+api_domain_test_suite(name = "api_domain_test_suite")
diff --git a/rules/apis/README.md b/rules/apis/README.md
new file mode 100644
index 00000000..8c95adf5
--- /dev/null
+++ b/rules/apis/README.md
@@ -0,0 +1,9 @@
+# Bazel rules for API export
+This package contains Bazel rules for declaring API contributions of API
+domains to API surfaces (go/android-build-api-domains)
+
+## WARNING:
+API export is expected to run in **Standalone Bazel mode**
+(go/multi-tree-api-export). As such, rules defined in this package should not
+have any dependencies on bp2build (most notably the generated `@soong_injection`
+workspace)
diff --git a/rules/apis/api_domain.bzl b/rules/apis/api_domain.bzl
new file mode 100644
index 00000000..74b712ed
--- /dev/null
+++ b/rules/apis/api_domain.bzl
@@ -0,0 +1,56 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Bazel rules for generating the metadata of API domain contributions to an API surface"""
+
+load(":api_surface.bzl", "ALL_API_SURFACES")
+load(":cc_api_contribution.bzl", "CcApiContributionInfo")
+load(":java_api_contribution.bzl", "JavaApiContributionInfo")
+
+def _api_domain_impl(ctx):
+ """Implementation of the api_domain rule
+ Currently it only supports exporting the API surface contributions of the API domain
+ """
+ out = []
+ for api_surface in ALL_API_SURFACES:
+ # TODO(b/220938703): Add other contributions (e.g. resource_api_contribution)
+ # cc
+ cc_libraries = [cc[CcApiContributionInfo] for cc in ctx.attr.cc_api_contributions if api_surface in cc[CcApiContributionInfo].api_surfaces]
+
+ # java
+ java_libraries = [java[JavaApiContributionInfo] for java in ctx.attr.java_api_contributions if api_surface in java[JavaApiContributionInfo].api_surfaces]
+
+ # The contributions of an API domain are always at ver=current
+ # Contributions of an API domain to previous Android SDKs will be snapshot and imported into the build graph by a separate Bazel rule
+ api_surface_metadata = struct(
+ name = api_surface,
+ version = "current",
+ api_domain = ctx.attr.name,
+ cc_libraries = cc_libraries,
+ java_libraries = java_libraries,
+ )
+ api_surface_filestem = "-".join([api_surface, "current", ctx.attr.name])
+ api_surface_file = ctx.actions.declare_file(api_surface_filestem + ".json")
+ ctx.actions.write(api_surface_file, json.encode(api_surface_metadata))
+ out.append(api_surface_file)
+
+ return [DefaultInfo(files = depset(out))]
+
+api_domain = rule(
+ implementation = _api_domain_impl,
+ attrs = {
+ "cc_api_contributions": attr.label_list(providers = [CcApiContributionInfo]),
+ "java_api_contributions": attr.label_list(providers = [JavaApiContributionInfo]),
+ },
+)
diff --git a/rules/apis/api_domain_test.bzl b/rules/apis/api_domain_test.bzl
new file mode 100644
index 00000000..c28b2d4c
--- /dev/null
+++ b/rules/apis/api_domain_test.bzl
@@ -0,0 +1,161 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":api_domain.bzl", "api_domain")
+load(":cc_api_contribution.bzl", "cc_api_contribution")
+load(":java_api_contribution.bzl", "java_api_contribution")
+
+# Check that a .json file is created
+def _json_output_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ asserts.equals(
+ env,
+ expected = 9, # union of cc and java api surfaces
+ actual = len(actions),
+ )
+ asserts.equals(
+ env,
+ expected = 1,
+ actual = len(actions[0].outputs.to_list()),
+ )
+ asserts.equals(
+ env,
+ expected = "json",
+ actual = actions[0].outputs.to_list()[0].extension,
+ )
+ return analysistest.end(env)
+
+json_output_test = analysistest.make(_json_output_test_impl)
+
+def _json_output_test():
+ test_name = "json_output_test"
+ subject_name = test_name + "_subject"
+ api_domain(
+ name = subject_name,
+ cc_api_contributions = [],
+ tags = ["manual"],
+ )
+ json_output_test(
+ name = test_name,
+ target_under_test = subject_name,
+ )
+ return test_name
+
+# Check that output contains contribution information
+# e.g. cc_libraries, java_libraries
+def _json_output_contains_contributions_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ asserts.equals(
+ env,
+ expected = 9, # union of cc and java api surfaces
+ actual = len(actions),
+ )
+
+ output = json.decode(actions[0].content.replace("'", "")) # Trim the surrounding '
+
+ # cc
+ asserts.true(env, "cc_libraries" in output)
+ cc_contributions_in_output = output.get("cc_libraries")
+ asserts.equals(
+ env,
+ expected = 1,
+ actual = len(cc_contributions_in_output),
+ )
+ test_contribution = cc_contributions_in_output[0]
+ asserts.equals(
+ env,
+ expected = ctx.attr.expected_cc_library_name,
+ actual = test_contribution.get("name"),
+ )
+ asserts.equals(
+ env,
+ expected = paths.join(
+ paths.dirname(ctx.build_file_path),
+ ctx.attr.expected_symbolfile,
+ ),
+ actual = test_contribution.get("api"),
+ )
+
+ # java
+ asserts.true(env, "java_libraries" in output)
+ java_contributions_in_output = output.get("java_libraries")
+ asserts.equals(
+ env,
+ expected = 1,
+ actual = len(java_contributions_in_output),
+ )
+ test_java_contribution = java_contributions_in_output[0]
+ asserts.equals(
+ env,
+ expected = paths.join(
+ paths.dirname(ctx.build_file_path),
+ ctx.attr.expected_java_apifile,
+ ),
+ actual = test_java_contribution.get("api"),
+ )
+ return analysistest.end(env)
+
+json_output_contains_contributions_test = analysistest.make(
+ impl = _json_output_contains_contributions_test_impl,
+ attrs = {
+ "expected_cc_library_name": attr.string(),
+ "expected_symbolfile": attr.string(),
+ "expected_java_apifile": attr.string(),
+ },
+)
+
+def _json_output_contains_contributions_test():
+ test_name = "json_output_contains_cc_test"
+ subject_name = test_name + "_subject"
+ cc_subject_name = subject_name + "_cc"
+ java_subject_name = subject_name + "_java"
+ symbolfile = "libfoo.map.txt"
+ java_apifile = "current.txt"
+ cc_api_contribution(
+ name = cc_subject_name,
+ api = symbolfile,
+ tags = ["manual"],
+ )
+ java_api_contribution(
+ name = java_subject_name,
+ api = java_apifile,
+ tags = ["manual"],
+ )
+ api_domain(
+ name = subject_name,
+ cc_api_contributions = [cc_subject_name],
+ java_api_contributions = [java_subject_name],
+ tags = ["manual"],
+ )
+ json_output_contains_contributions_test(
+ name = test_name,
+ target_under_test = subject_name,
+ expected_cc_library_name = cc_subject_name,
+ expected_symbolfile = symbolfile,
+ expected_java_apifile = java_apifile,
+ )
+ return test_name
+
+def api_domain_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _json_output_test(),
+ _json_output_contains_contributions_test(),
+ ],
+ )
diff --git a/rules/apis/api_surface.bzl b/rules/apis/api_surface.bzl
new file mode 100644
index 00000000..12af911e
--- /dev/null
+++ b/rules/apis/api_surface.bzl
@@ -0,0 +1,44 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Constants for Android API surfaces"""
+
+PUBLIC_API = "publicapi"
+SYSTEM_API = "systemapi"
+TEST_API = "testapi"
+MODULE_LIB_API = "module-libapi"
+SYSTEM_SERVER_API = "system-serverapi"
+INTRA_CORE_API = "intracoreapi"
+CORE_PLATFORM_API = "core_platformapi"
+
+# VENDOR_API is API surface provided by system to vendor
+# Also known as LLNDK.
+VENDOR_API = "vendorapi"
+
+# TOOLCHAIN_API is a special API surface provided by ART to compile other API domains
+# (e.g. core-lambda-stubs required to compile java files containing lambdas)
+# This is not part of go/android-api-types, and is not available to apps at runtime
+TOOLCHAIN_API = "toolchainapi"
+
+ALL_API_SURFACES = [
+ PUBLIC_API,
+ SYSTEM_API,
+ TEST_API,
+ MODULE_LIB_API,
+ SYSTEM_SERVER_API,
+ INTRA_CORE_API,
+ CORE_PLATFORM_API,
+ VENDOR_API,
+ TOOLCHAIN_API,
+]
diff --git a/rules/apis/cc_api_contribution.bzl b/rules/apis/cc_api_contribution.bzl
new file mode 100644
index 00000000..ca5ac37a
--- /dev/null
+++ b/rules/apis/cc_api_contribution.bzl
@@ -0,0 +1,235 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Bazel rules for exporting API contributions of CC libraries"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:sets.bzl", "sets")
+load("//build/bazel/rules/cc:cc_constants.bzl", "constants")
+load(":api_surface.bzl", "MODULE_LIB_API", "PUBLIC_API", "VENDOR_API")
+
+"""A Bazel provider that encapsulates the headers presented to an API surface"""
+CcApiHeaderInfo = provider(
+ fields = {
+ "name": "Name identifying the header files",
+ "root": "Directory containing the header files, relative to workspace root. This will become the -I parameter in consuming API domains. This defaults to the current Bazel package",
+ "headers": "The header (.h) files presented by the library to an API surface",
+ "system": "bool, This will determine whether the include path will be -I or -isystem",
+ "arch": "Target arch of devices that use these header files to compile. The default is empty, which means that it is arch-agnostic",
+ },
+)
+
+def _cc_api_header_impl(ctx):
+ """Implementation for the cc_api_headers rule.
+ This rule does not have any build actions, but returns a `CcApiHeaderInfo` provider object"""
+ headers_filepath = [header.path for header in ctx.files.hdrs]
+ root = paths.dirname(ctx.build_file_path)
+ if ctx.attr.include_dir:
+ root = paths.join(root, ctx.attr.include_dir)
+ info = CcApiHeaderInfo(
+ name = ctx.label.name,
+ root = root,
+ headers = headers_filepath,
+ system = ctx.attr.system,
+ arch = ctx.attr.arch,
+ )
+
+ # TODO: Use depset for CcApiHeaderInfoList to optimize merges in `_cc_api_contribution_impl`
+ return [
+ info,
+ CcApiHeaderInfoList(
+ headers_list = [info],
+ ),
+ ]
+
+"""A bazel rule that encapsulates the header contributions of a CC library to an API surface
+This rule does not contain the API symbolfile (.map.txt). The API symbolfile is part of the cc_api_contribution rule
+This layering is necessary since the symbols present in a single .map.txt file can be defined in different include directories
+e.g.
+├── Android.bp
+├── BUILD
+├── include <-- cc_api_headers
+├── include_other <-- cc_api_headers
+├── libfoo.map.txt
+"""
+cc_api_headers = rule(
+ implementation = _cc_api_header_impl,
+ attrs = {
+ "include_dir": attr.string(
+ mandatory = False,
+ doc = "Directory containing the header files, relative to the Bazel package. This relative path will be joined with the Bazel package path to become the -I parameter in the consuming API domain",
+ ),
+ "hdrs": attr.label_list(
+ mandatory = True,
+ allow_files = constants.hdr_dot_exts,
+ doc = "List of .h files presented to the API surface. Glob patterns are allowed",
+ ),
+ "system": attr.bool(
+ default = False,
+ doc = "Boolean to indicate whether these are system headers",
+ ),
+ "arch": attr.string(
+ mandatory = False,
+ values = ["arm", "arm64", "x86", "x86_64"],
+ doc = "Arch of the target device. The default is empty, which means that the headers are arch-agnostic",
+ ),
+ },
+)
+
+"""List container for multiple CcApiHeaderInfo providers"""
+CcApiHeaderInfoList = provider(
+ fields = {
+ "headers_list": "List of CcApiHeaderInfo providers presented by a target",
+ },
+)
+
+def _cc_api_library_headers_impl(ctx):
+ hdrs_info = []
+ for hdr in ctx.attr.hdrs:
+ for hdr_info in hdr[CcApiHeaderInfoList].headers_list:
+ hdrs_info.append(hdr_info)
+
+ return [
+ CcApiHeaderInfoList(
+ headers_list = hdrs_info,
+ ),
+ ]
+
+_cc_api_library_headers = rule(
+ implementation = _cc_api_library_headers_impl,
+ attrs = {
+ "hdrs": attr.label_list(
+ mandatory = True,
+ providers = [CcApiHeaderInfoList],
+ ),
+ },
+)
+
+# Internal header library targets created by cc_api_library_headers macro
+# Bazel does not allow target name to end with `/`
+def _header_target_name(name, include_dir):
+ return name + "_" + paths.normalize(include_dir)
+
+def cc_api_library_headers(
+ name,
+ hdrs = [], # @unused
+ export_includes = [],
+ export_system_includes = [],
+ arch = None,
+ deps = [],
+ **kwargs):
+ header_deps = []
+ for include in export_includes:
+ _name = _header_target_name(name, include)
+
+ # export_include = "." causes the following error in glob
+ # Error in glob: segment '.' not permitted
+ # Normalize path before globbing
+ fragments = [include, "**/*.h"]
+ normpath = paths.normalize(paths.join(*fragments))
+
+ cc_api_headers(
+ name = _name,
+ include_dir = include,
+ hdrs = native.glob([normpath]),
+ system = False,
+ arch = arch,
+ )
+ header_deps.append(_name)
+
+ for system_include in export_system_includes:
+ _name = _header_target_name(name, system_include)
+ cc_api_headers(
+ name = _name,
+ include_dir = system_include,
+ hdrs = native.glob([paths.join(system_include, "**/*.h")]),
+ system = True,
+ arch = arch,
+ )
+ header_deps.append(_name)
+
+ # deps should be exported
+ header_deps.extend(deps)
+
+ _cc_api_library_headers(
+ name = name,
+ hdrs = header_deps,
+ **kwargs
+ )
+
+"""A Bazel provider that encapsulates the contributions of a CC library to an API surface"""
+CcApiContributionInfo = provider(
+ fields = {
+ "name": "Name of the cc library",
+ "api": "Path of map.txt describing the stable APIs of the library. Path is relative to workspace root",
+ "headers": "metadata of the header files of the cc library",
+ "api_surfaces": "API surface(s) this library contributes to",
+ },
+)
+
+VALID_CC_API_SURFACES = [
+ PUBLIC_API,
+ MODULE_LIB_API, # API surface provided by platform and mainline modules to other mainline modules
+ VENDOR_API,
+]
+
+def _validate_api_surfaces(api_surfaces):
+ for api_surface in api_surfaces:
+ if api_surface not in VALID_CC_API_SURFACES:
+ fail(api_surface, " is not a valid API surface. Acceptable values: ", VALID_CC_API_SURFACES)
+
+def _cc_api_contribution_impl(ctx):
+ """Implemenation for the cc_api_contribution rule
+ This rule does not have any build actions, but returns a `CcApiContributionInfo` provider object"""
+ api_filepath = ctx.file.api.path
+ hdrs_info = sets.make()
+ for hdr in ctx.attr.hdrs:
+ for hdr_info in hdr[CcApiHeaderInfoList].headers_list:
+ sets.insert(hdrs_info, hdr_info)
+
+ name = ctx.attr.library_name or ctx.label.name
+ _validate_api_surfaces(ctx.attr.api_surfaces)
+
+ return [
+ CcApiContributionInfo(
+ name = name,
+ api = api_filepath,
+ headers = sets.to_list(hdrs_info),
+ api_surfaces = ctx.attr.api_surfaces,
+ ),
+ ]
+
+cc_api_contribution = rule(
+ implementation = _cc_api_contribution_impl,
+ attrs = {
+ "library_name": attr.string(
+ mandatory = False,
+ doc = "Name of the library. This can be different from `name` to prevent name collision with the implementation of the library in the same Bazel package. Defaults to label.name",
+ ),
+ "api": attr.label(
+ mandatory = True,
+ allow_single_file = [".map.txt", ".map"],
+ doc = ".map.txt file of the library",
+ ),
+ "hdrs": attr.label_list(
+ mandatory = False,
+ providers = [CcApiHeaderInfoList],
+ doc = "Header contributions of the cc library. This should return a `CcApiHeaderInfo` provider",
+ ),
+ "api_surfaces": attr.string_list(
+ doc = "API surface(s) this library contributes to. See VALID_CC_API_SURFACES in cc_api_contribution.bzl for valid values for API surfaces",
+ default = ["publicapi"],
+ ),
+ },
+)
diff --git a/rules/apis/cc_api_contribution_test.bzl b/rules/apis/cc_api_contribution_test.bzl
new file mode 100644
index 00000000..e9aba9b1
--- /dev/null
+++ b/rules/apis/cc_api_contribution_test.bzl
@@ -0,0 +1,294 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":cc_api_contribution.bzl", "CcApiContributionInfo", "CcApiHeaderInfo", "CcApiHeaderInfoList", "cc_api_contribution", "cc_api_headers", "cc_api_library_headers")
+
+def _empty_include_dir_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.equals(env, paths.dirname(ctx.build_file_path), target_under_test[CcApiHeaderInfo].root)
+ return analysistest.end(env)
+
+empty_include_dir_test = analysistest.make(_empty_include_dir_test_impl)
+
+def _empty_include_dir_test():
+ test_name = "empty_include_dir_test"
+ subject_name = test_name + "_subject"
+ cc_api_headers(
+ name = subject_name,
+ hdrs = ["hdr.h"],
+ tags = ["manual"],
+ )
+ empty_include_dir_test(
+ name = test_name,
+ target_under_test = subject_name,
+ )
+ return test_name
+
+def _nonempty_include_dir_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ expected_root = paths.join(paths.dirname(ctx.build_file_path), ctx.attr.expected_include_dir)
+ asserts.equals(env, expected_root, target_under_test[CcApiHeaderInfo].root)
+ return analysistest.end(env)
+
+nonempty_include_dir_test = analysistest.make(
+ impl = _nonempty_include_dir_test_impl,
+ attrs = {
+ "expected_include_dir": attr.string(),
+ },
+)
+
+def _nonempty_include_dir_test():
+ test_name = "nonempty_include_dir_test"
+ subject_name = test_name + "_subject"
+ include_dir = "my/include"
+ cc_api_headers(
+ name = subject_name,
+ include_dir = include_dir,
+ hdrs = ["my/include/hdr.h"],
+ tags = ["manual"],
+ )
+ nonempty_include_dir_test(
+ name = test_name,
+ target_under_test = subject_name,
+ expected_include_dir = include_dir,
+ )
+ return test_name
+
+def _api_library_headers_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.true(env, CcApiHeaderInfoList in target_under_test)
+ headers_list = target_under_test[CcApiHeaderInfoList].headers_list
+ actual_includes = sorted([headers.root for headers in headers_list if not headers.system])
+ actual_system_includes = sorted([headers.root for headers in headers_list if headers.system])
+ asserts.equals(env, ctx.attr.expected_includes, actual_includes)
+ asserts.equals(env, ctx.attr.expected_system_includes, actual_system_includes)
+ return analysistest.end(env)
+
+api_library_headers_test = analysistest.make(
+ impl = _api_library_headers_test_impl,
+ attrs = {
+ "expected_includes": attr.string_list(),
+ "expected_system_includes": attr.string_list(),
+ },
+)
+
+def _api_library_headers_test():
+ test_name = "api_library_headers_test"
+ subject_name = test_name + "_subject"
+ cc_api_library_headers(
+ name = subject_name,
+ hdrs = [],
+ export_includes = ["include1", "include2"],
+ export_system_includes = ["system_include1"],
+ deps = [":other_api_library_headers", "other_api_headers"],
+ tags = ["manual"],
+ )
+ cc_api_library_headers(
+ name = "other_api_library_headers",
+ hdrs = [],
+ export_includes = ["otherinclude1"],
+ tags = ["manual"],
+ )
+ cc_api_headers(
+ name = "other_api_headers",
+ hdrs = [],
+ include_dir = "otherinclude2",
+ tags = ["manual"],
+ )
+ api_library_headers_test(
+ name = test_name,
+ target_under_test = subject_name,
+ expected_includes = ["build/bazel/rules/apis/include1", "build/bazel/rules/apis/include2", "build/bazel/rules/apis/otherinclude1", "build/bazel/rules/apis/otherinclude2"],
+ expected_system_includes = ["build/bazel/rules/apis/system_include1"],
+ )
+ return test_name
+
+def _api_path_is_relative_to_workspace_root_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ expected_path = paths.join(paths.dirname(ctx.build_file_path), ctx.attr.expected_symbolfile)
+ asserts.equals(env, expected_path, target_under_test[CcApiContributionInfo].api)
+ return analysistest.end(env)
+
+api_path_is_relative_to_workspace_root_test = analysistest.make(
+ impl = _api_path_is_relative_to_workspace_root_test_impl,
+ attrs = {
+ "expected_symbolfile": attr.string(),
+ },
+)
+
+def _api_path_is_relative_to_workspace_root_test():
+ test_name = "api_path_is_relative_workspace_root"
+ subject_name = test_name + "_subject"
+ symbolfile = "libfoo.map.txt"
+ cc_api_contribution(
+ name = subject_name,
+ api = symbolfile,
+ tags = ["manual"],
+ )
+ api_path_is_relative_to_workspace_root_test(
+ name = test_name,
+ target_under_test = subject_name,
+ expected_symbolfile = symbolfile,
+ )
+ return test_name
+
+def _empty_library_name_gets_label_name_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.equals(env, target_under_test.label.name, target_under_test[CcApiContributionInfo].name)
+ return analysistest.end(env)
+
+empty_library_name_gets_label_name_test = analysistest.make(_empty_library_name_gets_label_name_impl)
+
+def _empty_library_name_gets_label_name_test():
+ test_name = "empty_library_name_gets_label_name"
+ subject_name = test_name + "_subject"
+ cc_api_contribution(
+ name = subject_name,
+ api = ":libfoo.map.txt",
+ tags = ["manual"],
+ )
+ empty_library_name_gets_label_name_test(
+ name = test_name,
+ target_under_test = subject_name,
+ )
+ return test_name
+
+def _nonempty_library_name_preferred_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.equals(env, ctx.attr.expected_library_name, target_under_test[CcApiContributionInfo].name)
+ return analysistest.end(env)
+
+nonempty_library_name_preferred_test = analysistest.make(
+ impl = _nonempty_library_name_preferred_impl,
+ attrs = {
+ "expected_library_name": attr.string(),
+ },
+)
+
+def _nonempty_library_name_preferred_test():
+ test_name = "nonempty_library_name_preferred_test"
+ subject_name = test_name + "_subject"
+ library_name = "mylibrary"
+ cc_api_contribution(
+ name = subject_name,
+ library_name = library_name,
+ api = ":libfoo.map.txt",
+ tags = ["manual"],
+ )
+ nonempty_library_name_preferred_test(
+ name = test_name,
+ target_under_test = subject_name,
+ expected_library_name = library_name,
+ )
+ return test_name
+
+def _api_surfaces_attr_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.equals(env, ctx.attr.expected_api_surfaces, target_under_test[CcApiContributionInfo].api_surfaces)
+ return analysistest.end(env)
+
+api_surfaces_attr_test = analysistest.make(
+ impl = _api_surfaces_attr_test_impl,
+ attrs = {
+ "expected_api_surfaces": attr.string_list(),
+ },
+)
+
+def _api_surfaces_attr_test():
+ test_name = "api_surfaces_attr_test"
+ subject_name = test_name + "_subject"
+ cc_api_contribution(
+ name = subject_name,
+ api = "libfoo.map.txt",
+ api_surfaces = ["publicapi", "module-libapi"],
+ tags = ["manual"],
+ )
+ api_surfaces_attr_test(
+ name = test_name,
+ target_under_test = subject_name,
+ expected_api_surfaces = ["publicapi", "module-libapi"],
+ )
+ return test_name
+
+def _api_headers_contribution_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ asserts.equals(env, ctx.attr.expected_include_dirs, [hdr_info.root for hdr_info in target_under_test[CcApiContributionInfo].headers])
+ return analysistest.end(env)
+
+api_headers_contribution_test = analysistest.make(
+ impl = _api_headers_contribution_test_impl,
+ attrs = {
+ "expected_include_dirs": attr.string_list(),
+ },
+)
+
+def _api_headers_contribution_test():
+ test_name = "api_headers_contribution_test"
+ subject_name = test_name + "_subject"
+ cc_api_contribution(
+ name = subject_name,
+ api = ":libfoo.map.txt",
+ hdrs = [
+ subject_name + "_headers",
+ subject_name + "_library_headers",
+ ],
+ tags = ["manual"],
+ )
+ cc_api_headers(
+ name = subject_name + "_headers",
+ hdrs = [],
+ include_dir = "dir1",
+ tags = ["manual"],
+ )
+ cc_api_library_headers(
+ name = subject_name + "_library_headers",
+ export_includes = ["dir2", "dir3"],
+ tags = ["manual"],
+ )
+ api_headers_contribution_test(
+ name = test_name,
+ target_under_test = subject_name,
+ expected_include_dirs = [
+ "build/bazel/rules/apis/dir1",
+ "build/bazel/rules/apis/dir2",
+ "build/bazel/rules/apis/dir3",
+ ],
+ )
+ return test_name
+
+def cc_api_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _empty_include_dir_test(),
+ _nonempty_include_dir_test(),
+ _api_library_headers_test(),
+ _api_path_is_relative_to_workspace_root_test(),
+ _empty_library_name_gets_label_name_test(),
+ _nonempty_library_name_preferred_test(),
+ _api_surfaces_attr_test(),
+ _api_headers_contribution_test(),
+ ],
+ )
diff --git a/rules/apis/java_api_contribution.bzl b/rules/apis/java_api_contribution.bzl
new file mode 100644
index 00000000..5baa44e1
--- /dev/null
+++ b/rules/apis/java_api_contribution.bzl
@@ -0,0 +1,95 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Bazel rules for exporting API contributions of Java libraries"""
+
+load(":api_surface.bzl", "CORE_PLATFORM_API", "INTRA_CORE_API", "MODULE_LIB_API", "PUBLIC_API", "SYSTEM_API", "SYSTEM_SERVER_API", "TEST_API", "TOOLCHAIN_API")
+
+"""A Bazel provider that encapsulates the contributions of a Java library to an API surface"""
+JavaApiContributionInfo = provider(
+ fields = {
+ "name": "Name of the contribution target",
+ "api": "Path of partial current.txt file describing the stable APIs of the library. Path is relative to workspace root",
+ "api_surfaces": "List of API surfaces that this partial api file contributes to",
+ },
+)
+
+# Java API surfaces are hierarchical.
+# This hierarchy map was created by looking at the stub definitions in frameworks/base/StubLibraries.bp
+# Key is the full api surface
+# Values are the partial metalava signature files that are combined to generate the full api surface stubs.
+_JAVA_FULLAPISURFACE_TO_PARTIALSIGNATUREFILE = {
+ PUBLIC_API: [PUBLIC_API],
+ SYSTEM_API: [PUBLIC_API, SYSTEM_API],
+ TEST_API: [PUBLIC_API, SYSTEM_API, TEST_API],
+ MODULE_LIB_API: [PUBLIC_API, SYSTEM_API, MODULE_LIB_API],
+ SYSTEM_SERVER_API: [PUBLIC_API, SYSTEM_API, MODULE_LIB_API, SYSTEM_SERVER_API],
+ # intracore is publicapi + "@IntraCoreApi".
+ # e.g. art.module.intra.core.api uses the following `droiddoc_option`
+ # [<hide>, --show-single-annotation libcore.api.IntraCoreApi"]
+ # conscrypt and icu4j use similar droidoc_options
+ INTRA_CORE_API: [PUBLIC_API, INTRA_CORE_API],
+ # CorePlatformApi does not extend PublicApi
+ # Each core module is at different stages of transition
+ # The status quo in Soong today is
+ # 1. conscrypt - Still provides CorePlatformApis
+ # 2. i18n - APIs have migrated to Public API surface
+ # 3. art - APIs have migrated to ModuleLib API suface
+ # This layering complexity will be handled by the build orchestrator and not by API export.
+ CORE_PLATFORM_API: [CORE_PLATFORM_API],
+ # coreapi does not have an entry here, it really is the public stubs of the 3 core modules
+ # (art, conscrypt, i18n)
+ TOOLCHAIN_API: [TOOLCHAIN_API],
+}
+
+VALID_JAVA_API_SURFACES = _JAVA_FULLAPISURFACE_TO_PARTIALSIGNATUREFILE.keys()
+
+def _java_api_contribution_impl(ctx):
+ """Implemenation for the java_api_contribution rule
+ This rule does not have any build actions, but returns a `JavaApiContributionInfo` provider object"""
+
+ full_api_surfaces = []
+
+ # The checked-in signature files are parital signatures. e.g. SystemAPI surface
+ # (android_system_stubs_current.jar) contains the classes
+ # and methods present in current.txt and system-current.txt.
+ # The jar representing the full api surface is created by combining these partial signature files.
+ for full_api_surface, partials in _JAVA_FULLAPISURFACE_TO_PARTIALSIGNATUREFILE.items():
+ if ctx.attr.api_surface in partials:
+ full_api_surfaces.append(full_api_surface)
+
+ return [
+ JavaApiContributionInfo(
+ name = ctx.label.name,
+ api = ctx.file.api.path,
+ api_surfaces = full_api_surfaces,
+ ),
+ ]
+
+java_api_contribution = rule(
+ implementation = _java_api_contribution_impl,
+ attrs = {
+ "api": attr.label(
+ mandatory = True,
+ allow_single_file = [".txt"],
+ doc = "The partial signature file describing the APIs of this module",
+ ),
+ # TODO: Better name for this
+ "api_surface": attr.string(
+ doc = "The partial api surface signature represented by this file. See _JAVA_FULLAPISURFACE_TO_PARTIALSIGNATUREFILE in java_api_contribution.bzl for relationship between partial signature files and full API surfaces",
+ default = "publicapi",
+ values = VALID_JAVA_API_SURFACES,
+ ),
+ },
+)
diff --git a/rules/bpf/BUILD b/rules/bpf/BUILD
new file mode 100644
index 00000000..98849e95
--- /dev/null
+++ b/rules/bpf/BUILD
@@ -0,0 +1,19 @@
+"""
+Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(":bpf_test.bzl", "bpf_test_suite")
+
+bpf_test_suite(name = "bpf_tests")
diff --git a/rules/bpf/bpf.bzl b/rules/bpf/bpf.bzl
new file mode 100644
index 00000000..c241d0dd
--- /dev/null
+++ b/rules/bpf/bpf.bzl
@@ -0,0 +1,151 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
+
+def _get_clang_cmd_output(ctx):
+ copts = [
+ "--target=bpf",
+ "-nostdlibinc",
+ "-no-canonical-prefixes",
+ "-O2",
+ ]
+ copts.extend(ctx.attr.copts)
+ if ctx.attr.btf:
+ copts.append("-g")
+
+ includes = [
+ "frameworks/libs/net/common/native/bpf_headers/include/bpf",
+ # TODO(b/149785767): only give access to specific file with AID_* constants
+ "system/core/libcutils/include",
+ "external/musl/src/env",
+ ctx.label.package,
+ ]
+ includes.extend(ctx.attr.absolute_includes)
+
+ system_includes = [
+ "bionic/libc/include",
+ "bionic/libc/kernel/uapi",
+ # The architecture doesn't matter here, but asm/types.h is included by linux/types.h.
+ "bionic/libc/kernel/uapi/asm-arm64",
+ "bionic/libc/kernel/android/uapi",
+ ]
+
+ toolchain = find_cpp_toolchain(ctx)
+ extra_features = [
+ "dependency_file",
+ "bpf_compiler_flags",
+ ]
+ extra_disabled_features = [
+ "sdk_version_flag",
+ "pie",
+ "non_external_compiler_flags",
+ "common_compiler_flags",
+ "asm_compiler_flags",
+ "cpp_compiler_flags",
+ "c_compiler_flags",
+ "external_compiler_flags",
+ "arm_isa_arm",
+ "arm_isa_thumb",
+ "no_override_clang_global_copts",
+ ]
+ feature_configuration = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = toolchain,
+ requested_features = ctx.features + extra_features,
+ unsupported_features = ctx.disabled_features + extra_disabled_features,
+ )
+
+ compilation_context = []
+ dir_name = ctx.label.name
+ if ctx.attr.btf:
+ # If btf is true, intermediate dir ("unstripped") is added when
+ # clang command is executed, because ctx.actions.run used in stripped
+ # command does not allow the same input and output names.
+ # "unstripped" will be removed when strip command is executed.
+ dir_name = paths.join("unstripped", dir_name)
+ (compilation_context, compilation_outputs) = cc_common.compile(
+ name = dir_name,
+ actions = ctx.actions,
+ feature_configuration = feature_configuration,
+ cc_toolchain = toolchain,
+ srcs = ctx.files.srcs,
+ system_includes = system_includes,
+ includes = includes,
+ user_compile_flags = copts,
+ compilation_contexts = compilation_context,
+ )
+
+ return compilation_outputs.objects
+
+def _declare_stripped_cmd_output_file(ctx, src):
+ file_path = paths.join("_objs", src.basename, src.basename)
+ return ctx.actions.declare_file(file_path)
+
+def _get_stripped_cmd_output(ctx, srcs):
+ out_files = [_declare_stripped_cmd_output_file(ctx, src) for src in srcs]
+
+ args = ctx.actions.args()
+ args.add("--strip-unneeded")
+ args.add("--remove-section=.rel.BTF")
+ args.add("--remove-section=.rel.BTF.ext")
+ args.add("--remove-section=.BTF.ext")
+
+ for in_file, out_file in zip(srcs, out_files):
+ ctx.actions.run(
+ inputs = [in_file],
+ outputs = [out_file],
+ executable = ctx.executable._strip,
+ arguments = [args] + [in_file.path, "-o", out_file.path],
+ )
+
+ return out_files
+
+def _bpf_impl(ctx):
+ for src in ctx.files.srcs:
+ if "_" in src.basename:
+ fail("Invalid character '_' in source name")
+
+ clang_outfiles = _get_clang_cmd_output(ctx)
+
+ if not ctx.attr.btf:
+ return [DefaultInfo(files = depset(clang_outfiles))]
+ else:
+ stripped_outfiles = _get_stripped_cmd_output(ctx, clang_outfiles)
+ return [DefaultInfo(files = depset(stripped_outfiles))]
+
+bpf = rule(
+ implementation = _bpf_impl,
+ attrs = {
+ "srcs": attr.label_list(
+ mandatory = True,
+ allow_files = True,
+ ),
+ "copts": attr.string_list(),
+ "absolute_includes": attr.string_list(),
+ "btf": attr.bool(
+ default = True,
+ doc = "if set to true, generate BTF debug info for maps & programs",
+ ),
+ "_strip": attr.label(
+ cfg = "exec",
+ executable = True,
+ default = "//prebuilts/clang/host/linux-x86:llvm-strip",
+ allow_files = True,
+ ),
+ },
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+ fragments = ["cpp"],
+)
diff --git a/rules/bpf/bpf_test.bzl b/rules/bpf/bpf_test.bzl
new file mode 100644
index 00000000..ccbb19e5
--- /dev/null
+++ b/rules/bpf/bpf_test.bzl
@@ -0,0 +1,157 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":bpf.bzl", "bpf")
+
+def _basic_bpf_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ bpf_target = analysistest.target_under_test(env)
+
+ if len(ctx.attr.expected_flags) > 0:
+ for flag in ctx.attr.expected_flags:
+ asserts.true(
+ env,
+ flag in actions[0].argv,
+ "Expected flag (%s) is not in actual flags" % (flag),
+ )
+
+ if len(ctx.attr.unexpected_flags) > 0:
+ for flag in ctx.attr.unexpected_flags:
+ asserts.true(
+ env,
+ flag not in actions[0].argv,
+ "Unexpected flag (%s) is in actual flags" % (flag),
+ )
+
+ if len(ctx.attr.includes) > 0:
+ for dir in ctx.attr.includes:
+ index = actions[0].argv.index(dir)
+ asserts.true(
+ env,
+ actions[0].argv[index - 1] == "-I",
+ "Directory %s is not after '-I' tag in clang command" % (dir),
+ )
+
+ asserts.equals(
+ env,
+ expected = 2 if ctx.attr.expect_strip else 1,
+ actual = len(actions),
+ )
+
+ if ctx.attr.expect_strip:
+ asserts.true(
+ env,
+ actions[-1].argv[0].endswith("llvm-strip"),
+ "No strip action is executed when btf is True",
+ )
+
+ asserts.true(
+ env,
+ "unstripped" not in bpf_target[DefaultInfo].files.to_list()[0].path,
+ "'unstripped' is in the output file path",
+ )
+
+ return analysistest.end(env)
+
+basic_bpf_test = analysistest.make(
+ _basic_bpf_test_impl,
+ attrs = {
+ "expected_flags": attr.string_list(),
+ "unexpected_flags": attr.string_list(),
+ "includes": attr.string_list(),
+ "expect_strip": attr.bool(),
+ },
+)
+
+def bpf_fail_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ asserts.expect_failure(
+ env,
+ "Invalid character '_' in source name",
+ )
+
+ return analysistest.end(env)
+
+bpf_fail_test = analysistest.make(
+ bpf_fail_test_impl,
+ expect_failure = True,
+)
+
+def test_all_attrs_btf_true():
+ name = "all_attrs_btf_true_test"
+ copts = ["cflag1", "cflag2"]
+ absolute_includes = ["foo/bar1", "foo/bar2"]
+ bpf(
+ name = name + "_target",
+ srcs = ["testAllAttrsBtfTrueSrc.c"],
+ copts = copts,
+ absolute_includes = absolute_includes,
+ btf = True,
+ tags = ["manual"],
+ )
+ basic_bpf_test(
+ name = name,
+ target_under_test = name + "_target",
+ expected_flags = ["-g"] + copts,
+ includes = absolute_includes,
+ expect_strip = True,
+ )
+ return name
+
+def test_btf_false():
+ name = "btf_false_test"
+ bpf(
+ name = name + "_target",
+ srcs = ["testBtfFalse.c"],
+ copts = ["copts1", "copts2"],
+ absolute_includes = ["foo/bar1", "foo/bar2"],
+ btf = False,
+ tags = ["manual"],
+ )
+ basic_bpf_test(
+ name = name,
+ target_under_test = name + "_target",
+ unexpected_flags = ["-g"],
+ expect_strip = False,
+ )
+ return name
+
+def test_invalid_src_name():
+ name = "invalid_src_name_test"
+ bpf(
+ name = name + "_target",
+ srcs = [name + "_src.c"],
+ copts = ["copts1", "copts2"],
+ absolute_includes = ["foo/bar1", "foo/bar2"],
+ btf = True,
+ tags = ["manual"],
+ )
+ bpf_fail_test(
+ name = name,
+ target_under_test = name + "_target",
+ )
+ return name
+
+def bpf_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ test_all_attrs_btf_true(),
+ test_btf_false(),
+ test_invalid_src_name(),
+ ],
+ )
diff --git a/rules/cc/BUILD.bazel b/rules/cc/BUILD.bazel
index 8b137891..36be0555 100644
--- a/rules/cc/BUILD.bazel
+++ b/rules/cc/BUILD.bazel
@@ -1 +1,93 @@
+"""Copyright (C) 2022 The Android Open Source Project
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load(":cc_aidl_library_test.bzl", "cc_aidl_library_test_suite")
+load(":cc_binary_test.bzl", "cc_binary_test_suite")
+load(":cc_hidl_library_test.bzl", "cc_hidl_library_test_suite")
+load(":cc_library_common_test.bzl", "cc_library_common_test_suites")
+load(":cc_library_shared_test.bzl", "cc_library_shared_test_suite")
+load(":cc_library_static_test.bzl", "cc_library_static_test_suite")
+load(":cc_object_test.bzl", "cc_object_test_suite")
+load(":cc_prebuilt_binary_test.bzl", "cc_prebuilt_binary_test_suite")
+load(":cc_prebuilt_library_shared_test.bzl", "cc_prebuilt_library_shared_test_suite")
+load(":cc_prebuilt_library_static_test.bzl", "cc_prebuilt_library_static_test_suite")
+load(":cc_proto_test.bzl", "cc_proto_test_suite")
+load(":cc_sysprop_library_test.bzl", "cc_gen_sysprop_test_suite")
+load(":cc_test_test.bzl", "cc_test_test_suite")
+load(":clang_tidy_test.bzl", "clang_tidy_test_suite")
+load(":flex_test.bzl", "flex_test_suite")
+load(":lto_transitions_test.bzl", "lto_transition_test_suite")
+load(":stl_test.bzl", "stl_test_suite")
+load(":yasm_test.bzl", "yasm_test_suite")
+
+config_setting(
+ name = "coverage_lib_flag",
+ values = {"collect_code_coverage": "True"},
+)
+
+selects.config_setting_group(
+ name = "android_coverage_lib_flag",
+ match_all = [
+ ":coverage_lib_flag",
+ "//build/bazel/platforms/os:android_config_setting",
+ ],
+)
+
+platform(
+ name = "windows_for_testing",
+ constraint_values = ["@//build/bazel/platforms/os:windows"],
+)
+
+platform(
+ name = "darwin_for_testing",
+ constraint_values = ["@//build/bazel/platforms/os:darwin"],
+)
+
+cc_aidl_library_test_suite(name = "cc_aidl_library_tests")
+
+cc_binary_test_suite(name = "cc_binary_tests")
+
+cc_test_test_suite(name = "cc_test_tests")
+
+cc_hidl_library_test_suite(name = "cc_hidl_library_tests")
+
+cc_library_common_test_suites(name = "cc_library_common_tests")
+
+cc_library_shared_test_suite(name = "cc_library_shared_tests")
+
+cc_library_static_test_suite(name = "cc_library_static_tests")
+
+cc_gen_sysprop_test_suite(name = "cc_gen_sysprop_tests")
+
+cc_object_test_suite(name = "cc_object_tests")
+
+cc_prebuilt_binary_test_suite(name = "cc_prebuilt_binary_tests")
+
+cc_prebuilt_library_shared_test_suite(name = "cc_prebuilt_library_shared_tests")
+
+cc_prebuilt_library_static_test_suite(name = "cc_prebuilt_library_static_tests")
+
+cc_proto_test_suite(name = "cc_proto_tests")
+
+clang_tidy_test_suite(name = "clang_tidy_tests")
+
+flex_test_suite(name = "flex_tests")
+
+lto_transition_test_suite(name = "lto_transition_tests")
+
+stl_test_suite(name = "stl_tests")
+
+yasm_test_suite(name = "yasm_tests")
diff --git a/rules/cc/cc_aidl_code_gen.bzl b/rules/cc/cc_aidl_code_gen.bzl
new file mode 100644
index 00000000..2d3dc2b9
--- /dev/null
+++ b/rules/cc/cc_aidl_code_gen.bzl
@@ -0,0 +1,211 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("//build/bazel/rules/aidl:aidl_library.bzl", "AidlGenInfo")
+load(":cc_library_common.bzl", "create_ccinfo_for_includes")
+
+_SOURCES = "sources"
+_HEADERS = "headers"
+_INCLUDE_DIR = "include_dir"
+
+def _cc_aidl_code_gen_impl(ctx):
+ """
+ Generate stub C++ code from direct aidl srcs using transitive deps
+
+ Args:
+ ctx: (RuleContext)
+ Returns:
+ (DefaultInfo) Generated .cpp and .cpp.d files
+ (CcInfo) Generated headers and their include dirs
+ """
+ generated_srcs, generated_hdrs, include_dirs = [], [], []
+
+ for aidl_info in [d[AidlGenInfo] for d in ctx.attr.deps]:
+ stub = _compile_aidl_srcs(ctx, aidl_info, ctx.attr.lang)
+ generated_srcs.extend(stub[_SOURCES])
+ generated_hdrs.extend(stub[_HEADERS])
+ include_dirs.extend([stub[_INCLUDE_DIR]])
+
+ return [
+ DefaultInfo(files = depset(direct = generated_srcs + generated_hdrs)),
+ create_ccinfo_for_includes(
+ ctx,
+ hdrs = generated_hdrs,
+ includes = include_dirs,
+ ),
+ ]
+
+def _declare_stub_files(ctx, aidl_file, direct_include_dir, lang):
+ """
+ Declare stub files that AIDL compiles to for cc
+
+ Args:
+ ctx: (Context) Used to register declare_file actions.
+ aidl_file: (File) The aidl file
+ direct_include_dir: (String) The path to given aidl file minus the aidl package namespace
+ lang: (String) AIDL backend
+ Returns:
+ (list[File]) List of declared stub files
+ """
+ ret = {}
+ ret[_SOURCES], ret[_HEADERS] = [], []
+ short_basename = paths.replace_extension(aidl_file.basename, "")
+
+ # aidl file path relative to direct include dir
+ short_path = paths.relativize(aidl_file.path, direct_include_dir)
+
+ ret[_SOURCES] = [
+ ctx.actions.declare_file(
+ paths.join(
+ ctx.label.name,
+ paths.dirname(short_path),
+ short_basename + ".cpp",
+ ),
+ ),
+ ]
+
+ headers = [short_basename + ".h"]
+
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/gen.go;bpv=1;bpt=1?q=cc%2Fgen.go
+ # Strip prefix I before creating basenames for bp and bn headers
+ if len(short_basename) > 2 and short_basename.startswith("I") and short_basename[1].upper() == short_basename[1]:
+ short_basename = short_basename.removeprefix("I")
+
+ headers.extend([
+ "Bp" + short_basename + ".h",
+ "Bn" + short_basename + ".h",
+ ])
+
+ # Headers for ndk backend are nested under aidl directory to prevent
+ # c++ namespaces collision with cpp backend
+ # Context: https://android.googlesource.com/platform/system/tools/aidl/+/7c93337add97ce36f0a35c6705f3a67a441f2ae7
+ out_dir_prefix = ""
+ if lang == "ndk":
+ out_dir_prefix = "aidl"
+
+ for basename in headers:
+ ret[_HEADERS].append(ctx.actions.declare_file(
+ paths.join(ctx.label.name, out_dir_prefix, paths.dirname(short_path), basename),
+ ))
+
+ return ret
+
+def _compile_aidl_srcs(ctx, aidl_info, lang):
+ """
+ Compile AIDL stub code for direct AIDL srcs
+
+ Args:
+ ctx: (Context) Used to register declare_file actions
+ aidl_info: (AidlGenInfo) aidl_info from an aidl library
+ lang: (String) AIDL backend
+
+ Returns:
+ (Dict) A dict of where the the values are generated headers (.h) and their boilerplate implementation (.cpp)
+ """
+
+ ret = {}
+ ret[_SOURCES], ret[_HEADERS] = [], []
+
+ # transitive_include_dirs is traversed in preorder
+ direct_include_dir = aidl_info.transitive_include_dirs.to_list()[0]
+
+ # Given AIDL file a/b/c/d/Foo.aidl with direct_include_dir a/b
+ # The outputs paths are
+ # cpp backend:
+ # <package-dir>/<target-name>/c/d/*Foo.h
+ # <package-dir>/<target-name>/c/d/Foo.cpp
+ # ndk backend:
+ # <package-dir>/<target-name>/aidl/c/d/*Foo.h
+ # <package-dir>/<target-name>/c/d/Foo.cpp
+ #
+ # where <package-dir> is bazel-bin/<path-to-cc_aidl_library-target>
+ # and <target-name> is <cc_aidl_library-name>_aidl_code_gen
+ # cpp and ndk are created in separate cc_aidl-library targets, so
+ # <target-name> are unique among cpp and ndk backends
+
+ # include dir, relative to package dir, to the generated headers
+ ret[_INCLUDE_DIR] = ctx.label.name
+
+ # AIDL needs to know the full path to outputs
+ # <bazel-bin>/<package-dir>/<target-name>
+ out_dir = paths.join(
+ ctx.bin_dir.path,
+ ctx.label.package,
+ ret[_INCLUDE_DIR],
+ )
+
+ outputs = []
+ for aidl_file in aidl_info.srcs.to_list():
+ files = _declare_stub_files(ctx, aidl_file, direct_include_dir, lang)
+ outputs.extend(files[_SOURCES] + files[_HEADERS])
+ ret[_SOURCES].extend(files[_SOURCES])
+ ret[_HEADERS].extend(files[_HEADERS])
+
+ args = ctx.actions.args()
+
+ # Add flags from lang-agnostic aidl_library target
+ args.add_all(aidl_info.flags)
+
+ # Add flags specific for cpp and ndk lang
+ if ctx.attr.min_sdk_version != "":
+ args.add("--min_sdk_version={}".format(ctx.attr.min_sdk_version))
+
+ if aidl_info.hash_file == None:
+ args.add("--hash=notfrozen")
+ else:
+ args.add("--hash=$(tail -1 {})".format(aidl_info.hash_file))
+
+ args.add_all([
+ "--ninja",
+ "--lang={}".format(lang),
+ "--out={}".format(out_dir),
+ "--header_out={}".format(out_dir),
+ ])
+ args.add_all(["-I {}".format(i) for i in aidl_info.transitive_include_dirs.to_list()])
+ args.add_all(["{}".format(aidl_file.path) for aidl_file in aidl_info.srcs.to_list()])
+
+ ctx.actions.run(
+ inputs = aidl_info.transitive_srcs,
+ outputs = outputs,
+ executable = ctx.executable._aidl,
+ arguments = [args],
+ progress_message = "Compiling AIDL binding",
+ mnemonic = "CcAidlCodeGen",
+ )
+
+ return ret
+
+cc_aidl_code_gen = rule(
+ implementation = _cc_aidl_code_gen_impl,
+ doc = "This rule generates AIDL stub C++ code for the direct srcs in each " +
+ "AidlGenInfo in deps attribute using transitive srcs and transitive_include_dirs.",
+ attrs = {
+ "deps": attr.label_list(
+ providers = [AidlGenInfo],
+ ),
+ "lang": attr.string(
+ mandatory = True,
+ values = ["cpp", "ndk"],
+ ),
+ "min_sdk_version": attr.string(),
+ "_aidl": attr.label(
+ allow_single_file = True,
+ executable = True,
+ cfg = "exec",
+ default = Label("//prebuilts/build-tools:linux-x86/bin/aidl"),
+ ),
+ },
+ provides = [CcInfo],
+)
diff --git a/rules/cc/cc_aidl_library.bzl b/rules/cc/cc_aidl_library.bzl
new file mode 100644
index 00000000..c9b7d21a
--- /dev/null
+++ b/rules/cc/cc_aidl_library.bzl
@@ -0,0 +1,66 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/cc:cc_aidl_code_gen.bzl", "cc_aidl_code_gen")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+
+def cc_aidl_library(
+ name,
+ deps = [],
+ lang = "cpp",
+ make_shared = False,
+ **kwargs):
+ """
+ Generate AIDL stub code for C++ and wrap it in a cc_library_static target
+
+ Args:
+ name (str): name of the cc_library_static target
+ deps (list[AidlGenInfo]): list of all aidl_libraries that this cc_aidl_library depends on
+ make_shared (bool): if true, `name` will refer to a cc_library_shared,
+ and an additional cc_library_static will be created
+ if false, `name` will refer to a cc_library_static
+ **kwargs: extra arguments that will be passesd to cc_library_{static,shared}.
+ """
+
+ if lang not in ["cpp", "ndk"]:
+ fail("lang {} is unsupported. Allowed lang: ndk, cpp.".format(lang))
+
+ aidl_code_gen = name + "_aidl_code_gen"
+ cc_aidl_code_gen(
+ name = aidl_code_gen,
+ deps = deps,
+ lang = lang,
+ min_sdk_version = kwargs.get("min_sdk_version", None),
+ tags = kwargs.get("tags", []) + ["manual"],
+ )
+
+ arguments_with_kwargs = dict(
+ kwargs,
+ srcs = [":" + aidl_code_gen],
+ deps = [aidl_code_gen],
+ )
+
+ static_name = name
+ if make_shared:
+ cc_library_shared(
+ name = name,
+ **arguments_with_kwargs
+ )
+ static_name = name + "_bp2build_cc_library_static"
+
+ cc_library_static(
+ name = static_name,
+ **arguments_with_kwargs
+ )
diff --git a/rules/cc/cc_aidl_library_test.bzl b/rules/cc/cc_aidl_library_test.bzl
new file mode 100644
index 00000000..74605f80
--- /dev/null
+++ b/rules/cc/cc_aidl_library_test.bzl
@@ -0,0 +1,192 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:new_sets.bzl", "sets")
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/aidl:aidl_library.bzl", "aidl_library")
+load("//build/bazel/rules/cc:cc_aidl_library.bzl", "cc_aidl_library")
+load("//build/bazel/rules/test_common:flags.bzl", "action_flags_present_only_for_mnemonic_test")
+
+aidl_library_label_name = "foo_aidl_library"
+aidl_files = [
+ "a/b/A.aidl",
+ "a/b/B.aidl",
+]
+
+def _cc_aidl_code_gen_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "expected to have one action per aidl_library target",
+ )
+ cc_aidl_code_gen_target = analysistest.target_under_test(env)
+ action = actions[0]
+ argv = action.argv
+
+ # Check inputs are correctly added to command
+ aidl_input_path_template = paths.join(
+ ctx.genfiles_dir.path,
+ ctx.label.package,
+ "_virtual_imports",
+ aidl_library_label_name,
+ ) + "/{}"
+ expected_inputs = [
+ aidl_input_path_template.format(file)
+ for file in aidl_files
+ ]
+ for expected_input in expected_inputs:
+ asserts.true(
+ env,
+ expected_input in argv,
+ "expect {} to be passed to aidl command".format(expected_input),
+ )
+
+ # Check generated outputs
+ output_path = paths.join(
+ ctx.genfiles_dir.path,
+ ctx.label.package,
+ cc_aidl_code_gen_target.label.name,
+ )
+ expected_outputs = []
+ expected_outputs.extend(
+ [
+ paths.join(output_path, "a/b/BpA.h"),
+ paths.join(output_path, "a/b/BnA.h"),
+ paths.join(output_path, "a/b/A.h"),
+ paths.join(output_path, "a/b/A.cpp"),
+ paths.join(output_path, "a/b/BpB.h"),
+ paths.join(output_path, "a/b/BnB.h"),
+ paths.join(output_path, "a/b/B.h"),
+ paths.join(output_path, "a/b/B.cpp"),
+ ],
+ )
+
+ asserts.set_equals(
+ env,
+ sets.make(expected_outputs),
+ sets.make([output.path for output in action.outputs.to_list()]),
+ )
+
+ # Check the output path is correctly added to includes in CcInfo.compilation_context
+ asserts.true(
+ env,
+ output_path in cc_aidl_code_gen_target[CcInfo].compilation_context.includes.to_list(),
+ "output path is added to CcInfo.compilation_context.includes",
+ )
+
+ return analysistest.end(env)
+
+cc_aidl_code_gen_test = analysistest.make(
+ _cc_aidl_code_gen_test_impl,
+)
+
+def _cc_aidl_code_gen_test():
+ name = "foo"
+ aidl_code_gen_name = name + "_aidl_code_gen"
+ code_gen_test_name = aidl_code_gen_name + "_test"
+
+ aidl_library(
+ name = aidl_library_label_name,
+ srcs = aidl_files,
+ tags = ["manual"],
+ )
+ cc_aidl_library(
+ name = name,
+ deps = [":foo_aidl_library"],
+ tags = ["manual"],
+ )
+ cc_aidl_code_gen_test(
+ name = code_gen_test_name,
+ target_under_test = aidl_code_gen_name,
+ )
+
+ action_flags_present_test_name = name + "_test_action_flags_present"
+ action_flags_present_only_for_mnemonic_test(
+ name = action_flags_present_test_name,
+ target_under_test = name + "_cpp",
+ mnemonics = ["CppCompile"],
+ expected_flags = [
+ "-DDO_NOT_CHECK_MANUAL_BINDER_INTERFACES",
+ ],
+ )
+
+ return [
+ code_gen_test_name,
+ action_flags_present_test_name,
+ ]
+
+def _cc_aidl_hash_notfrozen():
+ aidl_library_name = "cc_aidl_hash_notfrozen"
+ cc_aidl_library_name = aidl_library_name + "cc_aidl_library"
+ aidl_code_gen_name = cc_aidl_library_name + "_aidl_code_gen"
+ test_name = aidl_code_gen_name + "_test"
+
+ aidl_library(
+ name = aidl_library_name,
+ srcs = aidl_files,
+ tags = ["manual"],
+ )
+ cc_aidl_library(
+ name = cc_aidl_library_name,
+ deps = [aidl_library_name],
+ tags = ["manual"],
+ )
+ action_flags_present_only_for_mnemonic_test(
+ name = test_name,
+ target_under_test = aidl_code_gen_name,
+ mnemonics = ["CcAidlCodeGen"],
+ expected_flags = ["--hash=notfrozen"],
+ )
+
+ return test_name
+
+def _cc_aidl_hash_flag_with_hash_file():
+ aidl_library_name = "cc_aidl_hash_flag_with_hash_file"
+ cc_aidl_library_name = aidl_library_name + "cc_aidl_library"
+ aidl_code_gen_name = cc_aidl_library_name + "_aidl_code_gen"
+ test_name = aidl_code_gen_name + "_test"
+
+ aidl_library(
+ name = aidl_library_name,
+ srcs = aidl_files,
+ hash_file = "cc_aidl_hash_flag_with_hash_file/.hash",
+ tags = ["manual"],
+ )
+ cc_aidl_library(
+ name = cc_aidl_library_name,
+ deps = [aidl_library_name],
+ tags = ["manual"],
+ )
+ action_flags_present_only_for_mnemonic_test(
+ name = test_name,
+ target_under_test = aidl_code_gen_name,
+ mnemonics = ["CcAidlCodeGen"],
+ expected_flags = [
+ "--hash=$(tail -1 <source file build/bazel/rules/cc/cc_aidl_hash_flag_with_hash_file/.hash>)",
+ ],
+ )
+
+ return test_name
+
+def cc_aidl_library_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _cc_aidl_hash_notfrozen(),
+ _cc_aidl_hash_flag_with_hash_file(),
+ ] + _cc_aidl_code_gen_test(),
+ )
diff --git a/rules/cc/cc_binary.bzl b/rules/cc/cc_binary.bzl
index 8ba78590..09ccd598 100644
--- a/rules/cc/cc_binary.bzl
+++ b/rules/cc/cc_binary.bzl
@@ -1,33 +1,33 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
load(
":cc_library_common.bzl",
"add_lists_defaulting_to_none",
"parse_sdk_version",
+ "sanitizer_deps",
"system_dynamic_deps_defaults",
"system_static_deps_defaults",
)
load(":cc_library_static.bzl", "cc_library_static")
-load(":stl.bzl", "shared_stl_deps", "static_binary_stl_deps")
-load(":stripped_cc_common.bzl", "stripped_binary")
+load(":stl.bzl", "stl_info_from_attr")
+load(":stripped_cc_common.bzl", "stripped_binary", "stripped_test")
load(":versioned_cc_common.bzl", "versioned_binary")
def cc_binary(
name,
+ suffix = "",
dynamic_deps = [],
srcs = [],
srcs_c = [],
@@ -39,8 +39,9 @@ def cc_binary(
deps = [],
whole_archive_deps = [],
system_deps = None,
- export_includes = [],
- export_system_includes = [],
+ runtime_deps = [],
+ export_includes = [], # @unused
+ export_system_includes = [], # @unused
local_includes = [],
absolute_includes = [],
linkshared = True,
@@ -53,31 +54,37 @@ def cc_binary(
strip = {},
features = [],
target_compatible_with = [],
- sdk_version = "",
+ sdk_version = "", # @unused
min_sdk_version = "",
use_version_lib = False,
+ tags = [],
+ generate_cc_test = False,
+ tidy = None,
+ tidy_checks = None,
+ tidy_checks_as_errors = None,
+ tidy_flags = None,
+ tidy_disabled_srcs = None,
+ tidy_timeout_srcs = None,
+ native_coverage = True,
**kwargs):
"Bazel macro to correspond with the cc_binary Soong module."
- root_name = name + "_root"
+ root_name = name + "__internal_root"
unstripped_name = name + "_unstripped"
toolchain_features = []
- toolchain_features += features
-
+ toolchain_features.extend(["-pic", "pie"])
if linkshared:
toolchain_features.extend(["dynamic_executable", "dynamic_linker"])
else:
toolchain_features.extend(["-dynamic_executable", "-dynamic_linker", "static_executable", "static_flag"])
if not use_libcrt:
- toolchain_features += ["-use_libcrt"]
+ toolchain_features.append("-use_libcrt")
if min_sdk_version:
- toolchain_features += [
- "sdk_version_" + parse_sdk_version(min_sdk_version),
- "-sdk_version_default",
- ]
+ toolchain_features += parse_sdk_version(min_sdk_version) + ["-sdk_version_default"]
+ toolchain_features += features
system_dynamic_deps = []
system_static_deps = []
@@ -92,53 +99,92 @@ def cc_binary(
else:
system_static_deps = system_deps
- stl_static, stl_shared = [], []
-
- if linkshared:
- stl_static, stl_shared = shared_stl_deps(stl)
+ if not native_coverage:
+ toolchain_features.append("-coverage")
else:
- stl_static = static_binary_stl_deps(stl)
-
- # The static library at the root of the shared library.
- # This may be distinct from the static version of the library if e.g.
- # the static-variant srcs are different than the shared-variant srcs.
+ toolchain_features += select({
+ "//build/bazel/rules/cc:android_coverage_lib_flag": ["android_coverage_lib"],
+ "//conditions:default": [],
+ })
+
+ # TODO(b/233660582): deal with the cases where the default lib shouldn't be used
+ whole_archive_deps = whole_archive_deps + select({
+ "//build/bazel/rules/cc:android_coverage_lib_flag": ["//system/extras/toolchain-extras:libprofile-clang-extras"],
+ "//conditions:default": [],
+ })
+
+ stl_info = stl_info_from_attr(stl, linkshared, is_binary = True)
+ linkopts = linkopts + stl_info.linkopts
+ copts = copts + stl_info.cppflags
+
+ # The static library at the root of the cc_binary.
cc_library_static(
name = root_name,
absolute_includes = absolute_includes,
+ # alwayslink = True because the compiled objects from cc_library.srcs is expected
+ # to always be linked into the binary itself later (otherwise, why compile them at
+ # the cc_binary level?).
+ #
+ # Concretely, this makes this static library to be wrapped in the --whole_archive
+ # block when linking the cc_binary later.
alwayslink = True,
asflags = asflags,
conlyflags = conlyflags,
copts = copts,
cpp_std = cpp_std,
cppflags = cppflags,
- deps = deps + whole_archive_deps + stl_static + system_static_deps,
- dynamic_deps = dynamic_deps,
+ deps = deps + stl_info.static_deps + system_static_deps,
+ whole_archive_deps = whole_archive_deps,
+ dynamic_deps = dynamic_deps + stl_info.shared_deps,
features = toolchain_features,
local_includes = local_includes,
rtti = rtti,
srcs = srcs,
srcs_as = srcs_as,
srcs_c = srcs_c,
- stl = stl,
+ stl = "none",
system_dynamic_deps = system_dynamic_deps,
target_compatible_with = target_compatible_with,
- use_version_lib = use_version_lib,
+ tags = ["manual"],
+ tidy = tidy,
+ tidy_checks = tidy_checks,
+ tidy_checks_as_errors = tidy_checks_as_errors,
+ tidy_flags = tidy_flags,
+ tidy_disabled_srcs = tidy_disabled_srcs,
+ tidy_timeout_srcs = tidy_timeout_srcs,
+ native_coverage = native_coverage,
)
binary_dynamic_deps = add_lists_defaulting_to_none(
dynamic_deps,
system_dynamic_deps,
- stl_shared,
+ stl_info.shared_deps,
+ )
+
+ sanitizer_deps_name = name + "_sanitizer_deps"
+ sanitizer_deps(
+ name = sanitizer_deps_name,
+ dep = root_name,
+ tags = ["manual"],
)
- native.cc_binary(
+ # cc_test and cc_binary are almost identical rules, so fork the top level
+ # rule classes here.
+ unstripped_cc_rule = native.cc_binary
+ stripped_cc_rule = stripped_binary
+ if generate_cc_test:
+ unstripped_cc_rule = native.cc_test
+ stripped_cc_rule = stripped_test
+
+ unstripped_cc_rule(
name = unstripped_name,
- deps = [root_name] + deps + system_static_deps + stl_static,
+ deps = [root_name, sanitizer_deps_name] + deps + system_static_deps + stl_info.static_deps,
dynamic_deps = binary_dynamic_deps,
features = toolchain_features,
linkopts = linkopts,
additional_linker_inputs = additional_linker_inputs,
target_compatible_with = target_compatible_with,
+ tags = ["manual"],
**kwargs
)
@@ -147,11 +193,19 @@ def cc_binary(
name = versioned_name,
src = unstripped_name,
stamp_build_number = use_version_lib,
+ tags = ["manual"],
+ testonly = generate_cc_test,
)
- stripped_binary(
+ stripped_cc_rule(
name = name,
+ suffix = suffix,
src = versioned_name,
+ runtime_deps = runtime_deps,
target_compatible_with = target_compatible_with,
+ tags = tags,
+ unstripped = unstripped_name,
+ testonly = generate_cc_test,
+ androidmk_deps = [root_name],
+ **strip
)
-
diff --git a/rules/cc/cc_binary_test.bzl b/rules/cc/cc_binary_test.bzl
new file mode 100644
index 00000000..eb6e31b8
--- /dev/null
+++ b/rules/cc/cc_binary_test.bzl
@@ -0,0 +1,295 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":cc_binary.bzl", "cc_binary")
+load(":cc_library_common_test.bzl", "target_provides_androidmk_info_test")
+load(":cc_library_shared.bzl", "cc_library_shared")
+load(":cc_library_static.bzl", "cc_library_static")
+
+def strip_test_assert_flags(env, strip_action, strip_flags):
+ # Extract these flags from strip_action (for example):
+ # build/soong/scripts/strip.sh --keep-symbols --add-gnu-debuglink -i <in> -o <out> -d <out>.d
+ # ^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
+ flag_start_idx = 1 # starts after the strip.sh executable
+ flag_end_idx = strip_action.argv.index("-i") # end of the flags
+ asserts.equals(
+ env,
+ strip_action.argv[flag_start_idx:flag_end_idx],
+ strip_flags,
+ )
+
+def _cc_binary_strip_test(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ filtered_actions = [a for a in actions if a.mnemonic == "CcStrip"]
+ on_target = ctx.target_platform_has_constraint(
+ ctx.attr._android_constraint[platform_common.ConstraintValueInfo],
+ )
+ if ctx.attr.strip_flags or on_target:
+ # expected to find strip flags, so look for a CcStrip action.
+ asserts.true(
+ env,
+ len(filtered_actions) == 1,
+ "expected to find an action with CcStrip mnemonic in %s" % actions,
+ )
+ if ctx.attr.strip_flags or not on_target:
+ strip_test_assert_flags(env, filtered_actions[0], ctx.attr.strip_flags)
+ return analysistest.end(env)
+ else:
+ asserts.true(
+ env,
+ len(filtered_actions) == 0,
+ "expected to not find an action with CcStrip mnemonic in %s" % actions,
+ )
+ return analysistest.end(env)
+
+cc_binary_strip_test = analysistest.make(
+ _cc_binary_strip_test,
+ attrs = {
+ "strip_flags": attr.string_list(),
+ "_android_constraint": attr.label(default = Label("//build/bazel/platforms/os:android")),
+ },
+)
+
+def _cc_binary_strip_default():
+ name = "cc_binary_strip_default"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = name,
+ srcs = ["main.cc"],
+ tags = ["manual"],
+ )
+
+ cc_binary_strip_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [],
+ )
+
+ return test_name
+
+def _cc_binary_strip_keep_symbols():
+ name = "cc_binary_strip_keep_symbols"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = name,
+ srcs = ["main.cc"],
+ tags = ["manual"],
+ strip = {"keep_symbols": True},
+ )
+
+ cc_binary_strip_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [
+ "--keep-symbols",
+ "--add-gnu-debuglink",
+ ],
+ )
+
+ return test_name
+
+def _cc_binary_strip_keep_symbols_and_debug_frame():
+ name = "cc_binary_strip_keep_symbols_and_debug_frame"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = name,
+ srcs = ["main.cc"],
+ tags = ["manual"],
+ strip = {"keep_symbols_and_debug_frame": True},
+ )
+
+ cc_binary_strip_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [
+ "--keep-symbols-and-debug-frame",
+ "--add-gnu-debuglink",
+ ],
+ )
+
+ return test_name
+
+def _cc_binary_strip_keep_symbols_list():
+ name = "cc_binary_strip_keep_symbols_list"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = name,
+ srcs = ["main.cc"],
+ tags = ["manual"],
+ strip = {"keep_symbols_list": ["foo", "bar"]},
+ )
+
+ cc_binary_strip_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [
+ "-kfoo,bar",
+ "--add-gnu-debuglink",
+ ],
+ )
+
+ return test_name
+
+def _cc_binary_strip_all():
+ name = "cc_binary_strip_all"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = name,
+ srcs = ["main.cc"],
+ tags = ["manual"],
+ strip = {"all": True},
+ )
+
+ cc_binary_strip_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [
+ "--add-gnu-debuglink",
+ ],
+ )
+
+ return test_name
+
+def _cc_binary_suffix_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ info = target[DefaultInfo]
+ suffix = ctx.attr.suffix
+
+ outputs = info.files.to_list()
+ asserts.true(
+ env,
+ len(outputs) == 1,
+ "Expected 1 output file; got %s" % outputs,
+ )
+ out = outputs[0].path
+ asserts.true(
+ env,
+ out.endswith(suffix),
+ "Expected output filename to end in `%s`; it was instead %s" % (suffix, out),
+ )
+
+ return analysistest.end(env)
+
+cc_binary_suffix_test = analysistest.make(
+ _cc_binary_suffix_test_impl,
+ attrs = {"suffix": attr.string()},
+)
+
+def _cc_binary_suffix():
+ name = "cc_binary_suffix"
+ test_name = name + "_test"
+ suffix = "-suf"
+
+ cc_binary(
+ name,
+ srcs = ["src.cc"],
+ tags = ["manual"],
+ suffix = suffix,
+ )
+ cc_binary_suffix_test(
+ name = test_name,
+ target_under_test = name,
+ suffix = suffix,
+ )
+ return test_name
+
+def _cc_binary_empty_suffix():
+ name = "cc_binary_empty_suffix"
+ test_name = name + "_test"
+
+ cc_binary(
+ name,
+ srcs = ["src.cc"],
+ tags = ["manual"],
+ )
+ cc_binary_suffix_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _cc_binary_provides_androidmk_info():
+ name = "cc_binary_provides_androidmk_info"
+ dep_name = name + "_static_dep"
+ whole_archive_dep_name = name + "_whole_archive_dep"
+ dynamic_dep_name = name + "_dynamic_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = whole_archive_dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_library_shared(
+ name = dynamic_dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_binary(
+ name = name,
+ srcs = ["foo.cc"],
+ deps = [dep_name],
+ whole_archive_deps = [whole_archive_dep_name],
+ dynamic_deps = [dynamic_dep_name],
+ tags = ["manual"],
+ )
+ android_test_name = test_name + "_android"
+ linux_test_name = test_name + "_linux"
+ target_provides_androidmk_info_test(
+ name = android_test_name,
+ target_under_test = name,
+ expected_static_libs = [dep_name, "libc++demangle", "libunwind"],
+ expected_whole_static_libs = [whole_archive_dep_name],
+ expected_shared_libs = [dynamic_dep_name, "libc++", "libc", "libdl", "libm"],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+ target_provides_androidmk_info_test(
+ name = linux_test_name,
+ target_under_test = name,
+ expected_static_libs = [dep_name],
+ expected_whole_static_libs = [whole_archive_dep_name],
+ expected_shared_libs = [dynamic_dep_name, "libc++"],
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+ return [
+ android_test_name,
+ linux_test_name,
+ ]
+
+def cc_binary_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _cc_binary_strip_default(),
+ _cc_binary_strip_keep_symbols(),
+ _cc_binary_strip_keep_symbols_and_debug_frame(),
+ _cc_binary_strip_keep_symbols_list(),
+ _cc_binary_strip_all(),
+ _cc_binary_suffix(),
+ _cc_binary_empty_suffix(),
+ ] + _cc_binary_provides_androidmk_info(),
+ )
diff --git a/rules/cc/cc_constants.bzl b/rules/cc/cc_constants.bzl
index 26f56b88..f5605b8f 100644
--- a/rules/cc/cc_constants.bzl
+++ b/rules/cc/cc_constants.bzl
@@ -1,18 +1,16 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
# Constants for cc_* rules.
# To use, load the constants struct:
@@ -22,17 +20,13 @@ limitations under the License.
_HDR_EXTS = ["h", "hh", "hpp", "hxx", "h++", "inl", "inc", "ipp", "h.generic"]
_C_SRC_EXTS = ["c"]
_CPP_SRC_EXTS = ["cc", "cpp"]
-_AS_SRC_EXTS = ["S"]
+_AS_SRC_EXTS = ["s", "S"]
_SRC_EXTS = _C_SRC_EXTS + _CPP_SRC_EXTS + _AS_SRC_EXTS
_ALL_EXTS = _SRC_EXTS + _HDR_EXTS
_HDR_EXTS_WITH_DOT = ["." + ext for ext in _HDR_EXTS]
_SRC_EXTS_WITH_DOT = ["." + ext for ext in _SRC_EXTS]
_ALL_EXTS_WITH_DOT = ["." + ext for ext in _ALL_EXTS]
-# These are root-relative.
-_GLOBAL_INCLUDE_DIRS_COPTS_ONLY_USED_FOR_SOONG_COMPATIBILITY_DO_NOT_ADD_MORE = [
- "/",
-]
constants = struct(
hdr_exts = _HDR_EXTS,
c_src_exts = _C_SRC_EXTS,
diff --git a/rules/cc/cc_hidl_library.bzl b/rules/cc/cc_hidl_library.bzl
new file mode 100644
index 00000000..addeb3fa
--- /dev/null
+++ b/rules/cc/cc_hidl_library.bzl
@@ -0,0 +1,145 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules:hidl_file_utils.bzl", "LANGUAGE_CC_HEADERS", "LANGUAGE_CC_SOURCES", "hidl_file_utils")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+load("//build/bazel/rules/hidl:hidl_library.bzl", "HidlInfo")
+load(":cc_library_common.bzl", "create_ccinfo_for_includes")
+
+CC_SOURCE_SUFFIX = "_genc++"
+CC_HEADER_SUFFIX = "_genc++_headers"
+CORE_PACKAGES = ["android.hidl.base@", "android.hidl.manager@"]
+
+def _cc_hidl_code_gen_rule_impl(ctx):
+ hidl_info = ctx.attr.dep[HidlInfo]
+ outs = hidl_file_utils.generate_hidl_action(
+ hidl_info,
+ ctx.attr.language,
+ ctx,
+ )
+
+ return [
+ DefaultInfo(files = depset(direct = outs)),
+ create_ccinfo_for_includes(ctx, includes = [ctx.label.name]),
+ ]
+
+_cc_hidl_code_gen = rule(
+ implementation = _cc_hidl_code_gen_rule_impl,
+ attrs = {
+ "dep": attr.label(
+ providers = [HidlInfo],
+ doc = "hidl_library that exposes HidlInfo provider with *.hal files",
+ mandatory = True,
+ ),
+ "language": attr.string(
+ mandatory = True,
+ values = ["c++-headers", "c++-sources"],
+ ),
+ "_hidl_gen": attr.label(
+ allow_single_file = True,
+ default = Label("//prebuilts/build-tools:linux-x86/bin/hidl-gen"),
+ executable = True,
+ cfg = "exec",
+ ),
+ },
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+)
+
+def cc_hidl_library(
+ name,
+ interface,
+ dynamic_deps = [],
+ min_sdk_version = "",
+ tags = []):
+ srcs_name = name + CC_SOURCE_SUFFIX
+ hdrs_name = name + CC_HEADER_SUFFIX
+
+ _cc_hidl_code_gen(
+ name = srcs_name,
+ dep = interface,
+ language = LANGUAGE_CC_SOURCES,
+ tags = ["manual"],
+ )
+
+ _cc_hidl_code_gen(
+ name = hdrs_name,
+ dep = interface,
+ language = LANGUAGE_CC_HEADERS,
+ tags = ["manual"],
+ )
+
+ # Don't generate the cc library target for the core interfaces, they are parts
+ # of the libhidlbase
+ if _is_core_package(name):
+ return
+
+ combined_dynamic_deps = [
+ "//system/libhidl:libhidlbase",
+ "//system/core/libutils:libutils",
+ ]
+ implementation_dynamic_deps = [
+ "//system/core/libcutils:libcutils",
+ ] + select({
+ "//build/bazel/rules/apex:android-in_apex": ["//system/logging/liblog:liblog_stub_libs_current"],
+ "//conditions:default": ["//system/logging/liblog:liblog"],
+ })
+
+ for dep in dynamic_deps:
+ # Break up something like: //system/libhidl/transport/base/1.0:android.hidl.base@1.0
+ # and get the interface name such as android.hidl.base@1.0.
+ parts = dep.split(":")
+ dep_name = parts[1] if len(parts) == 2 else dep
+
+ # core packages will be provided by libhidlbase
+ if not _is_core_package(dep_name):
+ combined_dynamic_deps.append(dep)
+
+ common_attrs = dict(
+ [
+ ("srcs", [":" + srcs_name]),
+ ("hdrs", [":" + hdrs_name]),
+ ("dynamic_deps", combined_dynamic_deps),
+ ("implementation_dynamic_deps", implementation_dynamic_deps),
+ ("export_includes", ["."]),
+ ("local_includes", ["."]),
+ ("copts", [
+ "-Wall",
+ "-Werror",
+ "-Wextra-semi",
+ ] + select({
+ "//build/bazel/product_variables:debuggable": ["-D__ANDROID_DEBUGGABLE__"],
+ "//conditions:default": [],
+ })),
+ ("min_sdk_version", min_sdk_version),
+ ("tags", tags),
+ ],
+ )
+
+ cc_library_shared(
+ name = name,
+ **common_attrs
+ )
+
+ cc_library_static(
+ name = name + "_bp2build_cc_library_static",
+ **common_attrs
+ )
+
+def _is_core_package(name):
+ for pkg in CORE_PACKAGES:
+ if name.startswith(pkg):
+ return True
+
+ return False
diff --git a/rules/cc/cc_hidl_library_test.bzl b/rules/cc/cc_hidl_library_test.bzl
new file mode 100644
index 00000000..73b69f74
--- /dev/null
+++ b/rules/cc/cc_hidl_library_test.bzl
@@ -0,0 +1,343 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules:hidl_file_utils.bzl", "INTERFACE_HEADER_PREFIXES", "TYPE_HEADER_PREFIXES")
+load("//build/bazel/rules/hidl:hidl_interface.bzl", "INTERFACE_SUFFIX")
+load("//build/bazel/rules/hidl:hidl_library.bzl", "hidl_library")
+load(":cc_hidl_library.bzl", "CC_HEADER_SUFFIX", "cc_hidl_library")
+
+HIDL_GEN = "prebuilts/build-tools/linux-x86/bin/hidl-gen"
+
+SRC_TYPE_NAME_1 = "types_1.hal"
+GEN_TYPE_NAME_1 = "types_1.h"
+SRC_INTERFACE_NAME_1 = "IInterface_1.hal"
+GEN_INTERFACE_NAME_1 = "Interface_1.h"
+ROOT_1 = "android.hardware"
+ROOT_INTERFACE_FILE_LABEL_1 = "//hardware/interfaces:current.txt"
+ROOT_INTERFACE_FILE_1 = "hardware/interfaces/current.txt"
+INTERFACE_PACKAGE_NAME_1 = "android.hardware.int1"
+ROOT_ARGUMENT_1 = "android.hardware:hardware/interfaces"
+
+SRC_TYPE_NAME_2 = "types_2.hal"
+SRC_INTERFACE_NAME_2 = "IInterface_2.hal"
+ROOT_2 = "android.hidl"
+ROOT_INTERFACE_FILE_LABEL_2 = "//system/libhidl/transport:current.txt"
+ROOT_INTERFACE_FILE_2 = "system/libhidl/transport/current.txt"
+ROOT_ARGUMENT_2 = "android.hidl:system/libhidl/transport"
+INTERFACE_PACKAGE_NAME_2 = "android.hidl.int2"
+
+INTERFACE_PACKAGE_NAME_CORE = "android.hidl.base"
+
+INTERFACE_VERSION_1_0 = "1.0"
+INTERFACE_VERSION_1_1 = "1.1"
+
+def _cc_code_gen_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ package_root = paths.dirname(ctx.build_file_path)
+ header_gen_actions = [a for a in actions if a.mnemonic == "HidlGenCcHeader"]
+ asserts.true(
+ env,
+ len(header_gen_actions) == 1,
+ "Cc header gen action not found: %s" % actions,
+ )
+
+ header_gen_action = header_gen_actions[0]
+
+ asserts.equals(
+ env,
+ expected = sorted([
+ paths.join(package_root, SRC_TYPE_NAME_1),
+ paths.join(package_root, SRC_INTERFACE_NAME_1),
+ paths.join(package_root, SRC_TYPE_NAME_2),
+ paths.join(package_root, SRC_INTERFACE_NAME_2),
+ ROOT_INTERFACE_FILE_1,
+ ROOT_INTERFACE_FILE_2,
+ paths.join(HIDL_GEN),
+ ]),
+ actual = sorted([
+ file.short_path
+ for file in header_gen_action.inputs.to_list()
+ ]),
+ )
+
+ path = paths.join(package_root, INTERFACE_PACKAGE_NAME_1.replace(".", "/"), INTERFACE_VERSION_1_0)
+ asserts.equals(
+ env,
+ expected = sorted(
+ [
+ paths.join(path, prefix + GEN_TYPE_NAME_1)
+ for prefix in TYPE_HEADER_PREFIXES
+ ] +
+ [
+ paths.join(path, prefix + GEN_INTERFACE_NAME_1)
+ for prefix in INTERFACE_HEADER_PREFIXES
+ ],
+ ),
+ actual = sorted([
+ file.short_path
+ for file in header_gen_action.outputs.to_list()
+ ]),
+ )
+
+ cmd = header_gen_action.argv
+ asserts.true(
+ env,
+ HIDL_GEN == cmd[0],
+ "hidl-gen is not called: %s" % cmd,
+ )
+
+ asserts.true(
+ env,
+ "-R" in cmd,
+ "Calling hidl-gen without -R: %s" % cmd,
+ )
+
+ index = cmd.index("-p")
+ asserts.true(
+ env,
+ index > 0,
+ "Calling hidl-gen without -p: %s" % cmd,
+ )
+
+ asserts.true(
+ env,
+ cmd[index + 1] == ".",
+ ". needs to follow -p: %s" % cmd,
+ )
+
+ index = cmd.index("-o")
+ asserts.true(
+ env,
+ index > 0,
+ "Calling hidl-gen without -o: %s" % cmd,
+ )
+
+ asserts.true(
+ env,
+ cmd[index + 1].endswith(package_root),
+ "Incorrect output path: %s" % cmd,
+ )
+
+ index = cmd.index("-L")
+ asserts.true(
+ env,
+ index > 0,
+ "Calling hidl-gen without -L: %s" % cmd,
+ )
+
+ asserts.true(
+ env,
+ cmd[index + 1] == "c++-headers",
+ "Incorrect language: %s" % cmd,
+ )
+
+ roots = []
+ cmd_len = len(cmd)
+ for i in range(cmd_len):
+ if cmd[i] == "-r":
+ roots.append(cmd[i + 1])
+
+ asserts.equals(
+ env,
+ expected = sorted([
+ ROOT_ARGUMENT_1,
+ ROOT_ARGUMENT_2,
+ ]),
+ actual = sorted(roots),
+ )
+
+ asserts.true(
+ env,
+ cmd[cmd_len - 1] == INTERFACE_PACKAGE_NAME_1 + "@" + INTERFACE_VERSION_1_0,
+ "The last arg should be the FQ name of the interface: %s" % cmd,
+ )
+
+ return analysistest.end(env)
+
+cc_code_gen_test = analysistest.make(
+ _cc_code_gen_test_impl,
+)
+
+def _test_cc_code_gen():
+ test_name = "cc_code_gen_test"
+ cc_name = INTERFACE_PACKAGE_NAME_1 + "@" + INTERFACE_VERSION_1_0
+ interface_name = cc_name + INTERFACE_SUFFIX
+ cc_name_dep = INTERFACE_PACKAGE_NAME_2 + "@" + INTERFACE_VERSION_1_0
+ interface_name_dep = cc_name_dep + INTERFACE_SUFFIX
+
+ hidl_library(
+ name = interface_name_dep,
+ root = ROOT_2,
+ root_interface_file = ROOT_INTERFACE_FILE_LABEL_2,
+ fq_name = cc_name_dep,
+ srcs = [
+ SRC_TYPE_NAME_2,
+ SRC_INTERFACE_NAME_2,
+ ],
+ tags = ["manual"],
+ )
+
+ cc_hidl_library(
+ name = cc_name_dep,
+ interface = interface_name_dep,
+ tags = ["manual"],
+ )
+
+ hidl_library(
+ name = interface_name,
+ deps = [interface_name_dep],
+ root = ROOT_1,
+ root_interface_file = ROOT_INTERFACE_FILE_LABEL_1,
+ fq_name = cc_name,
+ srcs = [
+ SRC_TYPE_NAME_1,
+ SRC_INTERFACE_NAME_1,
+ ],
+ tags = ["manual"],
+ )
+
+ cc_hidl_library(
+ name = cc_name,
+ interface = interface_name,
+ dynamic_deps = [cc_name_dep],
+ tags = ["manual"],
+ )
+
+ cc_code_gen_test(
+ name = test_name,
+ target_under_test = cc_name + CC_HEADER_SUFFIX,
+ )
+
+ return test_name
+
+def _cc_interface_dep_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ dynamic_deps = target_under_test[CcSharedLibraryInfo].dynamic_deps
+
+ dep_name = INTERFACE_PACKAGE_NAME_CORE + "@" + INTERFACE_VERSION_1_1
+ package_root = paths.dirname(ctx.build_file_path)
+
+ asserts.false(
+ env,
+ _find_dep(package_root, dep_name, dynamic_deps),
+ "Core package in the dependencies: %s %s" % (dep_name, dynamic_deps),
+ )
+
+ dep_name = INTERFACE_PACKAGE_NAME_2 + "@" + INTERFACE_VERSION_1_1
+ asserts.true(
+ env,
+ _find_dep(package_root, dep_name, dynamic_deps),
+ "Missing valid dependency: %s %s" % (dep_name, dynamic_deps),
+ )
+
+ return analysistest.end(env)
+
+def _find_dep(package_root, name, deps):
+ full_name = "@//" + package_root + ":" + name
+ for lists in deps.to_list():
+ for dep in lists[0]:
+ if dep.startswith(full_name):
+ return True
+
+ return False
+
+cc_interface_dep_test = analysistest.make(
+ _cc_interface_dep_test_impl,
+)
+
+def _test_cc_interface_dep():
+ test_name = "cc_interface_dep_test"
+ cc_name = INTERFACE_PACKAGE_NAME_1 + "@" + INTERFACE_VERSION_1_1
+ interface_name = cc_name + INTERFACE_SUFFIX
+ cc_name_dep = INTERFACE_PACKAGE_NAME_2 + "@" + INTERFACE_VERSION_1_1
+ interface_name_dep = cc_name_dep + INTERFACE_SUFFIX
+ cc_name_core = INTERFACE_PACKAGE_NAME_CORE + "@" + INTERFACE_VERSION_1_1
+ interface_name_core = cc_name_core + INTERFACE_SUFFIX
+
+ hidl_library(
+ name = interface_name_dep,
+ root = ROOT_2,
+ root_interface_file = ROOT_INTERFACE_FILE_LABEL_2,
+ fq_name = cc_name_dep,
+ srcs = [
+ SRC_TYPE_NAME_2,
+ SRC_INTERFACE_NAME_2,
+ ],
+ tags = ["manual"],
+ )
+
+ cc_hidl_library(
+ name = cc_name_dep,
+ interface = interface_name_dep,
+ tags = ["manual"],
+ )
+
+ hidl_library(
+ name = interface_name_core,
+ root = ROOT_2,
+ root_interface_file = ROOT_INTERFACE_FILE_LABEL_2,
+ fq_name = cc_name_core,
+ srcs = [
+ SRC_TYPE_NAME_2,
+ SRC_INTERFACE_NAME_2,
+ ],
+ tags = ["manual"],
+ )
+
+ cc_hidl_library(
+ name = cc_name_core,
+ interface = interface_name_core,
+ tags = ["manual"],
+ )
+
+ hidl_library(
+ name = interface_name,
+ deps = [interface_name_dep, interface_name_core],
+ root = ROOT_1,
+ root_interface_file = ROOT_INTERFACE_FILE_LABEL_1,
+ fq_name = cc_name,
+ srcs = [
+ SRC_TYPE_NAME_1,
+ SRC_INTERFACE_NAME_1,
+ ],
+ tags = ["manual"],
+ )
+
+ cc_hidl_library(
+ name = cc_name,
+ interface = interface_name,
+ dynamic_deps = [cc_name_dep, cc_name_core],
+ tags = ["manual"],
+ )
+
+ cc_interface_dep_test(
+ name = test_name,
+ target_under_test = cc_name,
+ )
+
+ return test_name
+
+def cc_hidl_library_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_cc_code_gen(),
+ _test_cc_interface_dep(),
+ ],
+ )
diff --git a/rules/cc/cc_library_common.bzl b/rules/cc/cc_library_common.bzl
index 1431ff3c..afd5d978 100644
--- a/rules/cc/cc_library_common.bzl
+++ b/rules/cc/cc_library_common.bzl
@@ -1,40 +1,143 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-load("//build/bazel/product_variables:constants.bzl", "constants")
-load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cpp_toolchain")
-load("@soong_injection//api_levels:api_levels.bzl", "api_levels")
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
+load("@soong_injection//android:constants.bzl", android_constants = "constants")
+load("@soong_injection//api_levels:platform_versions.bzl", "platform_versions")
+load("//build/bazel/rules:common.bzl", "strip_bp2build_label_suffix")
+load("//build/bazel/rules/common:api.bzl", "api")
_bionic_targets = ["//bionic/libc", "//bionic/libdl", "//bionic/libm"]
_static_bionic_targets = ["//bionic/libc:libc_bp2build_cc_library_static", "//bionic/libdl:libdl_bp2build_cc_library_static", "//bionic/libm:libm_bp2build_cc_library_static"]
+# When building a APEX, stub libraries of libc, libdl, libm should be used in linking.
+_bionic_stub_targets = [
+ "//bionic/libc:libc_stub_libs_current",
+ "//bionic/libdl:libdl_stub_libs_current",
+ "//bionic/libm:libm_stub_libs_current",
+]
+
# The default system_dynamic_deps value for cc libraries. This value should be
# used if no value for system_dynamic_deps is specified.
system_dynamic_deps_defaults = select({
- constants.ArchVariantToConstraints["linux_bionic"]: _bionic_targets,
- constants.ArchVariantToConstraints["android"]: _bionic_targets,
+ "//build/bazel/rules/apex:android-in_apex": _bionic_stub_targets,
+ "//build/bazel/rules/apex:android-non_apex": _bionic_targets,
+ "//build/bazel/rules/apex:linux_bionic-in_apex": _bionic_stub_targets,
+ "//build/bazel/rules/apex:linux_bionic-non_apex": _bionic_targets,
"//conditions:default": [],
})
system_static_deps_defaults = select({
- constants.ArchVariantToConstraints["linux_bionic"]: _static_bionic_targets,
- constants.ArchVariantToConstraints["android"]: _static_bionic_targets,
+ "//build/bazel/rules/apex:android-in_apex": _bionic_stub_targets,
+ "//build/bazel/rules/apex:android-non_apex": _static_bionic_targets,
+ "//build/bazel/rules/apex:linux_bionic-in_apex": _bionic_stub_targets,
+ "//build/bazel/rules/apex:linux_bionic-non_apex": _static_bionic_targets,
"//conditions:default": [],
})
+# List comes from here:
+# https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/cc.go;l=1441;drc=9fd9129b5728602a4768e8e8e695660b683c405e
+_bionic_libs = ["libc", "libm", "libdl", "libdl_android", "linker", "linkerconfig"]
+
+# Comes from here:
+# https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/cc.go;l=1450;drc=9fd9129b5728602a4768e8e8e695660b683c405e
+_bootstrap_libs = ["libclang_rt.hwasan"]
+
+future_version = 10000
+
+CcSanitizerLibraryInfo = provider(
+ "Denotes which sanitizer libraries to include",
+ fields = {
+ "propagate_ubsan_deps": ("True if any ubsan sanitizers are " +
+ "enabled on any transitive deps, or " +
+ "the current target. False otherwise"),
+ },
+)
+
+# Must be called from within a rule (not a macro) so that the features select
+# has been resolved.
+def get_sanitizer_lib_info(features, deps):
+ propagate_ubsan_deps = False
+ for feature in features:
+ if feature.startswith("ubsan_"):
+ propagate_ubsan_deps = True
+ break
+ if not propagate_ubsan_deps:
+ for dep in deps:
+ if (CcSanitizerLibraryInfo in dep and
+ dep[CcSanitizerLibraryInfo].propagate_ubsan_deps):
+ propagate_ubsan_deps = True
+ break
+ return CcSanitizerLibraryInfo(
+ propagate_ubsan_deps = propagate_ubsan_deps,
+ )
+
+def _sanitizer_deps_impl(ctx):
+ if (CcSanitizerLibraryInfo in ctx.attr.dep and
+ ctx.attr.dep[CcSanitizerLibraryInfo].propagate_ubsan_deps):
+ # To operate correctly with native cc_binary and cc_sharedLibrary,
+ # copy the linker inputs and ensure that this target is marked as the
+ # "owner". Otherwise, upstream targets may drop these linker inputs.
+ # See b/264894507.
+ libraries = [
+ lib
+ for input in ctx.attr._ubsan_library[CcInfo].linking_context.linker_inputs.to_list()
+ for lib in input.libraries
+ ]
+ new_linker_input = cc_common.create_linker_input(
+ owner = ctx.label,
+ libraries = depset(direct = libraries),
+ )
+ linking_context = cc_common.create_linking_context(
+ linker_inputs = depset(direct = [new_linker_input]),
+ )
+ return [CcInfo(linking_context = linking_context)]
+ return [CcInfo()]
+
+# This rule is essentially a workaround to be able to add dependencies
+# conditionally based on provider values
+sanitizer_deps = rule(
+ implementation = _sanitizer_deps_impl,
+ doc = "A rule that propagates given sanitizer dependencies if the " +
+ "proper conditions are met",
+ attrs = {
+ "dep": attr.label(
+ mandatory = True,
+ doc = "library to check for sanitizer dependency propagation",
+ ),
+ "_ubsan_library": attr.label(
+ default = "//prebuilts/clang/host/linux-x86:libclang_rt.ubsan_minimal",
+ doc = "The library target corresponding to the undefined " +
+ "behavior sanitizer library to be used",
+ ),
+ },
+ provides = [CcInfo],
+)
+
+def sdk_version_feature_from_parsed_version(version):
+ return "sdk_version_" + str(version)
+
+def _create_sdk_version_features_map():
+ version_feature_map = {}
+ for level in api.api_levels.values():
+ version_feature_map["//build/bazel/rules/apex:min_sdk_version_" + str(level)] = [sdk_version_feature_from_parsed_version(level)]
+ version_feature_map["//conditions:default"] = [sdk_version_feature_from_parsed_version(future_version)]
+
+ return version_feature_map
+
+sdk_version_features = select(_create_sdk_version_features_map())
+
def add_lists_defaulting_to_none(*args):
"""Adds multiple lists, but is well behaved with a `None` default."""
combined = None
@@ -46,10 +149,6 @@ def add_lists_defaulting_to_none(*args):
return combined
-# By default, crtbegin/crtend linking is enabled for shared libraries and cc_binary.
-def disable_crt_link(features):
- return features + ["-link_crt"]
-
# get_includes_paths expects a rule context, a list of directories, and
# whether the directories are package-relative and returns a list of exec
# root-relative paths. This handles the need to search for files both in the
@@ -64,7 +163,12 @@ def get_includes_paths(ctx, dirs, package_relative = True):
execution_rel_dir = ctx.label.package
if len(rel_dir) > 0:
execution_rel_dir = execution_rel_dir + "/" + rel_dir
- execution_relative_dirs.append(execution_rel_dir)
+
+ # To allow this repo to be used as an external one.
+ repo_prefix_dir = execution_rel_dir
+ if ctx.label.workspace_root != "":
+ repo_prefix_dir = ctx.label.workspace_root + "/" + execution_rel_dir
+ execution_relative_dirs.append(repo_prefix_dir)
# to support generated files, we also need to export includes relatives to the bin directory
if not execution_rel_dir.startswith("/"):
@@ -73,14 +177,14 @@ def get_includes_paths(ctx, dirs, package_relative = True):
def create_ccinfo_for_includes(
ctx,
+ hdrs = [],
includes = [],
absolute_includes = [],
system_includes = [],
deps = []):
- cc_toolchain = find_cpp_toolchain(ctx)
-
# Create a compilation context using the string includes of this target.
compilation_context = cc_common.create_compilation_context(
+ headers = depset(hdrs),
includes = depset(
get_includes_paths(ctx, includes) +
get_includes_paths(ctx, absolute_includes, package_relative = False),
@@ -91,42 +195,229 @@ def create_ccinfo_for_includes(
# Combine this target's compilation context with those of the deps; use only
# the compilation context of the combined CcInfo.
cc_infos = [dep[CcInfo] for dep in deps]
- cc_infos += [CcInfo(compilation_context = compilation_context)]
+ cc_infos.append(CcInfo(compilation_context = compilation_context))
combined_info = cc_common.merge_cc_infos(cc_infos = cc_infos)
return CcInfo(compilation_context = combined_info.compilation_context)
-
def is_external_directory(package_name):
- if package_name.startswith('external'):
- return True
- if package_name.startswith('hardware'):
- paths = package_name.split("/")
- if len(paths) < 2:
- return True
- secondary_path = paths[1]
- if secondary_path in ["google", "interfaces", "ril"]:
- return True
- return secondary_path.startswith("libhardware")
- if package_name.startswith("vendor"):
- paths = package_name.split("/")
- if len(paths) < 2:
- return True
- secondary_path = paths[1]
- return secondary_path.contains("google")
- return False
+ if package_name.startswith("external"):
+ return True
+ if package_name.startswith("hardware"):
+ paths = package_name.split("/")
+ if len(paths) < 2:
+ return True
+ secondary_path = paths[1]
+ if secondary_path in ["google", "interfaces", "ril"]:
+ return False
+ return not secondary_path.startswith("libhardware")
+ if package_name.startswith("vendor"):
+ paths = package_name.split("/")
+ if len(paths) < 2:
+ return True
+ secondary_path = paths[1]
+ return "google" not in secondary_path
+ return False
+# TODO: Move this to a common rule dir, instead of a cc rule dir. Nothing here
+# should be cc specific, except that the current callers are (only) cc rules.
def parse_sdk_version(version):
- future_version = "10000"
+ if version == "apex_inherit":
+ # use the version determined by the transition value.
+ return sdk_version_features + [sdk_version_feature_from_parsed_version("apex_inherit")]
- if version == "" or version == "current":
- return future_version
- elif version.isdigit() and int(version) in api_levels.values():
- return version
- elif version in api_levels.keys():
- return str(api_levels[version])
- # We need to handle this case properly later
- elif version == "apex_inherit":
+ return [sdk_version_feature_from_parsed_version(parse_apex_sdk_version(version))]
+
+def parse_apex_sdk_version(version):
+ if version == "" or version == "current" or version == "10000":
return future_version
- else:
- fail("Unknown sdk version: %s" % (version))
+ elif version in api.api_levels.keys():
+ return api.api_levels[version]
+ elif version.isdigit():
+ version = int(version)
+ if version in api.api_levels.values():
+ return version
+ elif version == platform_versions.platform_sdk_version:
+ # For internal branch states, support parsing a finalized version number
+ # that's also still in
+ # platform_versions.platform_version_active_codenames, but not api.api_levels.
+ #
+ # This happens a few months each year on internal branches where the
+ # internal master branch has a finalized API, but is not released yet,
+ # therefore the Platform_sdk_version is usually latest AOSP dessert
+ # version + 1. The generated api.api_levels map sets these to 9000 + i,
+ # where i is the index of the current/future version, so version is not
+ # in the api.api_levels.values() list, but it is a valid sdk version.
+ #
+ # See also b/234321488#comment2
+ return version
+ fail("Unknown sdk version: %s, could not be parsed as " % version +
+ "an integer and/or is not a recognized codename. Valid api levels are:" +
+ str(api.api_levels))
+
+CPP_EXTENSIONS = ["cc", "cpp", "c++"]
+
+C_EXTENSIONS = ["c"]
+
+_HEADER_EXTENSIONS = ["h", "hh", "hpp", "hxx", "h++", "inl", "inc", "ipp", "h.generic"]
+
+def get_non_header_srcs(input_srcs, exclude_srcs = [], source_extensions = None, header_extensions = _HEADER_EXTENSIONS):
+ """get_non_header_srcs returns a list of srcs that do not have header extensions and aren't in the exclude srcs list
+
+ Args:
+ input_srcs (list[File]): list of files to filter
+ exclude_srcs (list[File]): list of files that should be excluded from the returned list
+ source_extensions (list[str]): list of extensions that designate sources.
+ If None, all extensions are valid. Otherwise only source with these extensions are returned
+ header_extensions (list[str]): list of extensions that designate headers
+ Returns:
+ srcs, hdrs (list[File], list[File]): tuple of lists of files; srcs have non-header extension and are not excluded,
+ and hdrs are files with header extensions
+ """
+ srcs = []
+ hdrs = []
+ for s in input_srcs:
+ is_source = not source_extensions or s.extension in source_extensions
+ if s.extension in header_extensions:
+ hdrs.append(s)
+ elif is_source and s not in exclude_srcs:
+ srcs.append(s)
+ return srcs, hdrs
+
+def prefix_in_list(str, prefixes):
+ """returns the prefix if any element of prefixes is a prefix of path
+
+ Args:
+ str (str): the string to compare prefixes against
+ prefixes (list[str]): a list of prefixes to check against str
+ Returns:
+ prefix (str or None): the prefix (if any) that str starts with
+ """
+ for prefix in prefixes:
+ if str.startswith(prefix):
+ return prefix
+ return None
+
+_DISALLOWED_INCLUDE_DIRS = android_constants.NeverAllowNotInIncludeDir
+_PACKAGES_DISALLOWED_TO_SPECIFY_INCLUDE_DIRS = android_constants.NeverAllowNoUseIncludeDir
+
+def check_absolute_include_dirs_disabled(target_package, absolute_includes):
+ """checks that absolute include dirs are disabled for some directories
+
+ Args:
+ target_package (str): package of current target
+ absolute_includes (list[str]): list of absolute include directories
+ """
+ if len(absolute_includes) > 0:
+ disallowed_prefix = prefix_in_list(
+ target_package,
+ _PACKAGES_DISALLOWED_TO_SPECIFY_INCLUDE_DIRS,
+ )
+ if disallowed_prefix != None:
+ fail("include_dirs is deprecated, all usages of them in '" +
+ disallowed_prefix + "' have been migrated to use alternate" +
+ " mechanisms and so can no longer be used.")
+
+ for path in absolute_includes:
+ if path in _DISALLOWED_INCLUDE_DIRS:
+ fail("include_dirs is deprecated, all usages of '" + path + "' have" +
+ " been migrated to use alternate mechanisms and so can no longer" +
+ " be used.")
+
+def get_compilation_args(toolchain, feature_config, flags, compilation_ctx, action_name):
+ compilation_vars = cc_common.create_compile_variables(
+ cc_toolchain = toolchain,
+ feature_configuration = feature_config,
+ user_compile_flags = flags,
+ include_directories = compilation_ctx.includes,
+ quote_include_directories = compilation_ctx.quote_includes,
+ system_include_directories = compilation_ctx.system_includes,
+ framework_include_directories = compilation_ctx.framework_includes,
+ )
+
+ return cc_common.get_memory_inefficient_command_line(
+ feature_configuration = feature_config,
+ action_name = action_name,
+ variables = compilation_vars,
+ )
+
+def build_compilation_flags(ctx, deps, user_flags, action_name):
+ cc_toolchain = find_cpp_toolchain(ctx)
+
+ feature_config = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = cc_toolchain,
+ language = "c++",
+ requested_features = ctx.features,
+ unsupported_features = ctx.disabled_features,
+ )
+
+ cc_info = cc_common.merge_cc_infos(direct_cc_infos = [d[CcInfo] for d in deps])
+
+ compilation_flags = get_compilation_args(
+ toolchain = cc_toolchain,
+ feature_config = feature_config,
+ flags = user_flags,
+ compilation_ctx = cc_info.compilation_context,
+ action_name = action_name,
+ )
+
+ return cc_info.compilation_context, compilation_flags
+
+def is_bionic_lib(name):
+ return name in _bionic_libs
+
+def is_bootstrap_lib(name):
+ return name in _bootstrap_libs
+
+CcAndroidMkInfo = provider(
+ "Provides information to be passed to AndroidMk in Soong",
+ fields = {
+ "local_static_libs": "list of target names passed to LOCAL_STATIC_LIBRARIES AndroidMk variable",
+ "local_whole_static_libs": "list of target names passed to LOCAL_WHOLE_STATIC_LIBRARIES AndroidMk variable",
+ "local_shared_libs": "list of target names passed to LOCAL_SHARED_LIBRARIES AndroidMk variable",
+ },
+)
+
+def create_cc_androidmk_provider(*, static_deps, whole_archive_deps, dynamic_deps):
+ # Since this information is provided to Soong for mixed builds,
+ # we are just taking the Soong module name rather than the Bazel
+ # label.
+ # TODO(b/266197834) consider moving this logic to the mixed builds
+ # handler in Soong
+ local_static_libs = [
+ strip_bp2build_label_suffix(d.label.name)
+ for d in static_deps
+ ]
+ local_whole_static_libs = [
+ strip_bp2build_label_suffix(d.label.name)
+ for d in whole_archive_deps
+ ]
+ local_shared_libs = [
+ strip_bp2build_label_suffix(d.label.name)
+ for d in dynamic_deps
+ ]
+ return CcAndroidMkInfo(
+ local_static_libs = local_static_libs,
+ local_whole_static_libs = local_whole_static_libs,
+ local_shared_libs = local_shared_libs,
+ )
+
+def create_cc_prebuilt_library_info(ctx, lib_to_link):
+ "Create the CcInfo for a prebuilt_library_{shared,static}"
+
+ compilation_context = cc_common.create_compilation_context(
+ includes = depset(get_includes_paths(ctx, ctx.attr.export_includes)),
+ system_includes = depset(get_includes_paths(ctx, ctx.attr.export_system_includes)),
+ )
+ linker_input = cc_common.create_linker_input(
+ owner = ctx.label,
+ libraries = depset(direct = [lib_to_link] if lib_to_link != None else []),
+ )
+ linking_context = cc_common.create_linking_context(
+ linker_inputs = depset(direct = [linker_input]),
+ )
+ return CcInfo(
+ compilation_context = compilation_context,
+ linking_context = linking_context,
+ )
diff --git a/rules/cc/cc_library_common_test.bzl b/rules/cc/cc_library_common_test.bzl
new file mode 100644
index 00000000..22c14228
--- /dev/null
+++ b/rules/cc/cc_library_common_test.bzl
@@ -0,0 +1,157 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "unittest", skylib_asserts = "asserts")
+load("//build/bazel/rules/test_common:asserts.bzl", roboleaf_asserts = "asserts")
+load(":cc_library_common.bzl", "CcAndroidMkInfo", "is_external_directory")
+
+asserts = skylib_asserts + roboleaf_asserts
+
+def _is_external_directory_test(ctx):
+ env = unittest.begin(ctx)
+
+ actual = is_external_directory(ctx.attr.path)
+
+ asserts.equals(env, ctx.attr.expected_value, actual, "expected {path}, to be {external}".format(
+ path = ctx.attr.path,
+ external = "external" if ctx.attr.expected_value else "non-external",
+ ))
+
+ return unittest.end(env)
+
+is_external_directory_test = unittest.make(
+ _is_external_directory_test,
+ attrs = {
+ "path": attr.string(),
+ "expected_value": attr.bool(),
+ },
+)
+
+def _is_external_directory_tests():
+ test_cases = {
+ "non_external": struct(
+ path = "path/to/package",
+ expected_value = False,
+ ),
+ "external": struct(
+ path = "external/path/to/package",
+ expected_value = True,
+ ),
+ "hardware": struct(
+ path = "hardware/path/to/package",
+ expected_value = True,
+ ),
+ "only_hardware": struct(
+ path = "hardware",
+ expected_value = True,
+ ),
+ "hardware_google": struct(
+ path = "hardware/google/path/to/package",
+ expected_value = False,
+ ),
+ "hardware_interfaces": struct(
+ path = "hardware/interfaces/path/to/package",
+ expected_value = False,
+ ),
+ "hardware_ril": struct(
+ path = "hardware/ril/path/to/package",
+ expected_value = False,
+ ),
+ "hardware_libhardware_dir": struct(
+ path = "hardware/libhardware/path/to/package",
+ expected_value = False,
+ ),
+ "hardware_libhardware_partial": struct(
+ path = "hardware/libhardware_legacy/path/to/package",
+ expected_value = False,
+ ),
+ "vendor": struct(
+ path = "vendor/path/to/package",
+ expected_value = True,
+ ),
+ "only_vendor": struct(
+ path = "vendor",
+ expected_value = True,
+ ),
+ "vendor_google": struct(
+ path = "vendor/google/path/to/package",
+ expected_value = False,
+ ),
+ "vendor_google_with_prefix": struct(
+ path = "vendor/pre_google/path/to/package",
+ expected_value = False,
+ ),
+ "vendor_google_with_postfix": struct(
+ path = "vendor/google_post/path/to/package",
+ expected_value = False,
+ ),
+ }
+
+ for name, test_case in test_cases.items():
+ is_external_directory_test(
+ name = name,
+ path = test_case.path,
+ expected_value = test_case.expected_value,
+ )
+ return test_cases.keys()
+
+def _target_provides_androidmk_info_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ target_under_test = analysistest.target_under_test(env)
+ mkinfo = target_under_test[CcAndroidMkInfo]
+ asserts.list_equals(
+ env,
+ ctx.attr.expected_static_libs,
+ mkinfo.local_static_libs,
+ "expected static_libs to be %s, but got %s" % (
+ ctx.attr.expected_static_libs,
+ mkinfo.local_static_libs,
+ ),
+ )
+ asserts.list_equals(
+ env,
+ ctx.attr.expected_whole_static_libs,
+ mkinfo.local_whole_static_libs,
+ "expected whole_static_libs to be %s, but got %s" % (
+ ctx.attr.expected_whole_static_libs,
+ mkinfo.local_whole_static_libs,
+ ),
+ )
+ asserts.list_equals(
+ env,
+ ctx.attr.expected_shared_libs,
+ mkinfo.local_shared_libs,
+ "expected shared_libs to be %s, but got %s" % (
+ ctx.attr.expected_shared_libs,
+ mkinfo.local_shared_libs,
+ ),
+ )
+
+ return analysistest.end(env)
+
+target_provides_androidmk_info_test = analysistest.make(
+ _target_provides_androidmk_info_test_impl,
+ attrs = {
+ "expected_static_libs": attr.string_list(),
+ "expected_whole_static_libs": attr.string_list(),
+ "expected_shared_libs": attr.string_list(),
+ },
+)
+
+def cc_library_common_test_suites(name):
+ native.test_suite(
+ name = name,
+ tests = _is_external_directory_tests(),
+ )
diff --git a/rules/cc/cc_library_headers.bzl b/rules/cc/cc_library_headers.bzl
index c54c1413..ce41dad0 100644
--- a/rules/cc/cc_library_headers.bzl
+++ b/rules/cc/cc_library_headers.bzl
@@ -1,24 +1,21 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
load(":cc_library_static.bzl", "cc_library_static")
def cc_library_headers(
name,
- implementation_deps = [],
deps = [],
hdrs = [],
export_includes = [],
@@ -32,15 +29,12 @@ def cc_library_headers(
cc_library_static(
name = name,
- implementation_deps = implementation_deps,
deps = deps,
export_includes = export_includes,
export_absolute_includes = export_absolute_includes,
export_system_includes = export_system_includes,
hdrs = hdrs,
native_bridge_supported = native_bridge_supported,
- # do not automatically add libcrt dependency to header libraries
- use_libcrt = False,
stl = "none",
sdk_version = sdk_version,
min_sdk_version = min_sdk_version,
diff --git a/rules/cc/cc_library_shared.bzl b/rules/cc/cc_library_shared.bzl
index b4367e50..9fed9694 100644
--- a/rules/cc/cc_library_shared.bzl
+++ b/rules/cc/cc_library_shared.bzl
@@ -1,40 +1,46 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
+load("//build/bazel/rules/abi:abi_dump.bzl", "AbiDiffInfo", "abi_dump")
load(
":cc_library_common.bzl",
+ "CcAndroidMkInfo",
"add_lists_defaulting_to_none",
- "disable_crt_link",
"parse_sdk_version",
+ "sanitizer_deps",
"system_dynamic_deps_defaults",
)
load(":cc_library_static.bzl", "cc_library_static")
-load(":cc_stub_library.bzl", "CcStubInfo", "cc_stub_gen")
-load(":generate_toc.bzl", "shared_library_toc", _CcTocInfo = "CcTocInfo")
-load(":stl.bzl", "shared_stl_deps")
-load(":stripped_cc_common.bzl", "stripped_shared_library")
+load(":clang_tidy.bzl", "collect_deps_clang_tidy_info")
+load(
+ ":composed_transitions.bzl",
+ "lto_and_fdo_profile_incoming_transition",
+)
+load(
+ ":fdo_profile_transitions.bzl",
+ "FDO_PROFILE_ATTR_KEY",
+)
+load(":generate_toc.bzl", "CcTocInfo", "generate_toc")
+load(":lto_transitions.bzl", "lto_deps_transition")
+load(":stl.bzl", "stl_info_from_attr")
+load(":stripped_cc_common.bzl", "CcUnstrippedInfo", "stripped_shared_library")
load(":versioned_cc_common.bzl", "versioned_shared_library")
-load("@rules_cc//examples:experimental_cc_shared_library.bzl", "cc_shared_library", _CcSharedLibraryInfo = "CcSharedLibraryInfo")
-load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cpp_toolchain")
-
-CcTocInfo = _CcTocInfo
-CcSharedLibraryInfo = _CcSharedLibraryInfo
def cc_library_shared(
name,
+ suffix = "",
# Common arguments between shared_root and the shared library
features = [],
dynamic_deps = [],
@@ -53,53 +59,98 @@ def cc_library_shared(
implementation_deps = [],
deps = [],
whole_archive_deps = [],
+ implementation_whole_archive_deps = [],
system_dynamic_deps = None,
+ runtime_deps = [],
export_includes = [],
export_absolute_includes = [],
export_system_includes = [],
local_includes = [],
absolute_includes = [],
rtti = False,
- use_libcrt = True, # FIXME: Unused below?
stl = "",
cpp_std = "",
c_std = "",
- link_crt = True,
additional_linker_inputs = None,
# Purely _shared arguments
strip = {},
- soname = "",
# TODO(b/202299295): Handle data attribute.
- data = [],
+ data = [], # @unused
use_version_lib = False,
stubs_symbol_file = None,
- stubs_versions = [],
inject_bssl_hash = False,
- sdk_version = "",
+ sdk_version = "", # @unused
min_sdk_version = "",
+ abi_checker_enabled = None,
+ abi_checker_symbol_file = None,
+ abi_checker_exclude_symbol_versions = [],
+ abi_checker_exclude_symbol_tags = [],
+ abi_checker_check_all_apis = False,
+ abi_checker_diff_flags = [],
+ native_coverage = True,
+ tags = [],
+ fdo_profile = None,
+ tidy = None,
+ tidy_checks = None,
+ tidy_checks_as_errors = None,
+ tidy_flags = None,
+ tidy_disabled_srcs = None,
+ tidy_timeout_srcs = None,
+ tidy_gen_header_filter = None,
**kwargs):
"Bazel macro to correspond with the cc_library_shared Soong module."
- shared_root_name = name + "_root"
+ # There exist modules named 'libtest_missing_symbol' and
+ # 'libtest_missing_symbol_root'. Ensure that that the target suffixes are
+ # sufficiently unique.
+ shared_root_name = name + "__internal_root"
unstripped_name = name + "_unstripped"
stripped_name = name + "_stripped"
- toc_name = name + "_toc"
if system_dynamic_deps == None:
system_dynamic_deps = system_dynamic_deps_defaults
- # Force crtbegin and crtend linking unless explicitly disabled (i.e. bionic
- # libraries do this)
- if link_crt == False:
- features = disable_crt_link(features)
-
if min_sdk_version:
- features = features + [
- "sdk_version_" + parse_sdk_version(min_sdk_version),
- "-sdk_version_default",
+ features = features + parse_sdk_version(min_sdk_version) + ["-sdk_version_default"]
+
+ if fdo_profile != None:
+ # FIXME(b/261609769): This is a temporary workaround to add link flags
+ # that requires the path to fdo profile.
+ # This workaround is error-prone because it assumes all the fdo_profile
+ # targets are created in a specific way (e.g. fdo_profile target named foo
+ # uses an afdo profile file named foo.afdo in the same folder).
+ fdo_profile_path = fdo_profile + ".afdo"
+ linkopts = linkopts + [
+ "-funique-internal-linkage-names",
+ "-fprofile-sample-accurate",
+ "-fprofile-sample-use=$(location {})".format(fdo_profile_path),
+ "-Wl,-mllvm,-no-warn-sample-unused=true",
]
+ if additional_linker_inputs != None:
+ additional_linker_inputs = additional_linker_inputs + [fdo_profile_path]
+ else:
+ additional_linker_inputs = [fdo_profile_path]
+
+ stl_info = stl_info_from_attr(stl, True)
+ linkopts = linkopts + stl_info.linkopts
+ copts = copts + stl_info.cppflags
+
+ extra_archive_deps = []
+ if not native_coverage:
+ features = features + ["-coverage"]
+ else:
+ features = features + select({
+ "//build/bazel/rules/cc:android_coverage_lib_flag": ["android_coverage_lib"],
+ "//conditions:default": [],
+ })
+
+ # TODO(b/233660582): deal with the cases where the default lib shouldn't be used
+ extra_archive_deps = select({
+ "//build/bazel/rules/cc:android_coverage_lib_flag": ["//system/extras/toolchain-extras:libprofile-clang-extras"],
+ "//conditions:default": [],
+ })
# The static library at the root of the shared library.
# This may be distinct from the static version of the library if e.g.
@@ -120,20 +171,35 @@ def cc_library_shared(
local_includes = local_includes,
absolute_includes = absolute_includes,
rtti = rtti,
- stl = stl,
+ stl = "none",
cpp_std = cpp_std,
c_std = c_std,
dynamic_deps = dynamic_deps,
- implementation_deps = implementation_deps,
- implementation_dynamic_deps = implementation_dynamic_deps,
+ implementation_deps = implementation_deps + stl_info.static_deps,
+ implementation_dynamic_deps = implementation_dynamic_deps + stl_info.shared_deps,
+ implementation_whole_archive_deps = implementation_whole_archive_deps,
system_dynamic_deps = system_dynamic_deps,
- deps = deps + whole_archive_deps,
+ deps = deps,
+ whole_archive_deps = whole_archive_deps + extra_archive_deps,
features = features,
- use_version_lib = use_version_lib,
target_compatible_with = target_compatible_with,
+ tags = ["manual"],
+ native_coverage = native_coverage,
+ tidy = tidy,
+ tidy_checks = tidy_checks,
+ tidy_checks_as_errors = tidy_checks_as_errors,
+ tidy_flags = tidy_flags,
+ tidy_disabled_srcs = tidy_disabled_srcs,
+ tidy_timeout_srcs = tidy_timeout_srcs,
+ tidy_gen_header_filter = tidy_gen_header_filter,
)
- stl_static, stl_shared = shared_stl_deps(stl)
+ sanitizer_deps_name = name + "_sanitizer_deps"
+ sanitizer_deps(
+ name = sanitizer_deps_name,
+ dep = shared_root_name,
+ tags = ["manual"],
+ )
# implementation_deps and deps are to be linked into the shared library via
# --no-whole-archive. In order to do so, they need to be dependencies of
@@ -144,39 +210,44 @@ def cc_library_shared(
deps_stub = name + "_deps"
native.cc_library(
name = imp_deps_stub,
- deps = implementation_deps + stl_static,
+ deps = (
+ implementation_deps +
+ implementation_whole_archive_deps +
+ stl_info.static_deps +
+ implementation_dynamic_deps +
+ system_dynamic_deps +
+ stl_info.shared_deps +
+ [sanitizer_deps_name]
+ ),
target_compatible_with = target_compatible_with,
+ tags = ["manual"],
)
native.cc_library(
name = deps_stub,
- deps = deps,
+ deps = deps + dynamic_deps,
target_compatible_with = target_compatible_with,
+ tags = ["manual"],
)
shared_dynamic_deps = add_lists_defaulting_to_none(
dynamic_deps,
system_dynamic_deps,
implementation_dynamic_deps,
- stl_shared,
+ stl_info.shared_deps,
)
- if len(soname) == 0:
- soname = name + ".so"
+ soname = name + suffix + ".so"
soname_flag = "-Wl,-soname," + soname
- cc_shared_library(
+ native.cc_shared_library(
name = unstripped_name,
user_link_flags = linkopts + [soname_flag],
- # b/184806113: Note this is a workaround so users don't have to
- # declare all transitive static deps used by this target. It'd be great
- # if a shared library could declare a transitive exported static dep
- # instead of needing to declare each target transitively.
- static_deps = ["//:__subpackages__"] + [shared_root_name, imp_deps_stub, deps_stub],
dynamic_deps = shared_dynamic_deps,
additional_linker_inputs = additional_linker_inputs,
- roots = [shared_root_name, imp_deps_stub, deps_stub] + whole_archive_deps,
+ deps = [shared_root_name, imp_deps_stub, deps_stub],
features = features,
target_compatible_with = target_compatible_with,
+ tags = ["manual"],
**kwargs
)
@@ -185,6 +256,7 @@ def cc_library_shared(
name = hashed_name,
src = unstripped_name,
inject_bssl_hash = inject_bssl_hash,
+ tags = ["manual"],
)
versioned_name = name + "_versioned"
@@ -192,126 +264,68 @@ def cc_library_shared(
name = versioned_name,
src = hashed_name,
stamp_build_number = use_version_lib,
+ tags = ["manual"],
)
stripped_shared_library(
name = stripped_name,
src = versioned_name,
target_compatible_with = target_compatible_with,
+ tags = ["manual"],
**strip
)
- shared_library_toc(
- name = toc_name,
- src = stripped_name,
- target_compatible_with = target_compatible_with,
+ # The logic here is based on the shouldCreateSourceAbiDumpForLibrary() in sabi.go
+ # abi_root is used to control if abi_dump aspects should be run on the static
+ # deps because there is no way to control the aspects directly from the rule.
+ abi_root = shared_root_name
+
+ # explicitly disabled
+ if abi_checker_enabled == False:
+ abi_root = None
+ elif abi_checker_enabled == True or stubs_symbol_file:
+ # The logic comes from here:
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/library.go;l=2288;drc=73feba33308bf9432aea43e069ed24a2f0312f1b
+ if not abi_checker_symbol_file and stubs_symbol_file:
+ abi_checker_symbol_file = stubs_symbol_file
+ else:
+ abi_root = None
+
+ abi_checker_explicitly_disabled = abi_checker_enabled == False
+
+ abi_dump_name = name + "_abi_dump"
+ abi_dump(
+ name = abi_dump_name,
+ shared = stripped_name,
+ root = abi_root,
+ soname = soname,
+ has_stubs = stubs_symbol_file != None,
+ enabled = abi_checker_enabled,
+ explicitly_disabled = abi_checker_explicitly_disabled,
+ symbol_file = abi_checker_symbol_file,
+ exclude_symbol_versions = abi_checker_exclude_symbol_versions,
+ exclude_symbol_tags = abi_checker_exclude_symbol_tags,
+ check_all_apis = abi_checker_check_all_apis,
+ diff_flags = abi_checker_diff_flags,
+ tags = ["manual"],
)
- # Emit the stub version of this library (e.g. for libraries that are
- # provided by the NDK)
- stub_shared_libraries = []
- if stubs_symbol_file and len(stubs_versions) > 0:
- # TODO(b/193663198): This unconditionally creates stubs for every version, but
- # that's not always true depending on whether this module is available
- # on the host, ramdisk, vendor ramdisk. We currently don't have
- # information about the image variant yet, so we'll create stub targets
- # for all shared libraries with the stubs property for now.
- #
- # See: https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/library.go;l=2316-2377;drc=3d3b35c94ed2a3432b2e5e7e969a3a788a7a80b5
- for version in stubs_versions:
- stubs_library_name = "_".join([name, version, "stubs"])
- cc_stub_library_shared(
- name = stubs_library_name,
- stubs_symbol_file = stubs_symbol_file,
- version = version,
- target_compatible_with = target_compatible_with,
- features = features,
- )
- stub_shared_libraries.append(stubs_library_name)
-
_cc_library_shared_proxy(
name = name,
shared = stripped_name,
- root = shared_root_name,
- table_of_contents = toc_name,
+ shared_debuginfo = unstripped_name,
+ deps = [shared_root_name],
+ features = features,
output_file = soname,
target_compatible_with = target_compatible_with,
- stub_shared_libraries = stub_shared_libraries,
- )
-
-# cc_stub_library_shared creates a cc_library_shared target, but using stub C source files generated
-# from a library's .map.txt files and ndkstubgen. The top level target returns the same
-# providers as a cc_library_shared, with the addition of a CcStubInfo
-# containing metadata files and versions of the stub library.
-def cc_stub_library_shared(name, stubs_symbol_file, version, target_compatible_with, features):
- # Call ndkstubgen to generate the stub.c source file from a .map.txt file. These
- # are accessible in the CcStubInfo provider of this target.
- cc_stub_gen(
- name = name + "_files",
- symbol_file = stubs_symbol_file,
- version = version,
- target_compatible_with = target_compatible_with,
- )
-
- # The static library at the root of the stub shared library.
- cc_library_static(
- name = name + "_root",
- srcs_c = [name + "_files"], # compile the stub.c file
- features = disable_crt_link(features) + \
- [
- # Enable the stub library compile flags
- "stub_library",
- # Disable all include-related features to avoid including any headers
- # that may cause conflicting type errors with the symbols in the
- # generated stubs source code.
- # e.g.
- # double acos(double); // in header
- # void acos() {} // in the generated source code
- # See https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/library.go;l=942-946;drc=d8a72d7dc91b2122b7b10b47b80cf2f7c65f9049
- "-toolchain_include_directories",
- "-includes",
- "-include_paths",
- ],
- target_compatible_with = target_compatible_with,
- stl = "none",
- system_dynamic_deps = [],
+ has_stubs = stubs_symbol_file != None,
+ runtime_deps = runtime_deps,
+ abi_dump = abi_dump_name,
+ fdo_profile = fdo_profile,
+ tags = tags,
)
- # Create a .so for the stub library. This library is self contained, has
- # no deps, and doesn't link against crt.
- cc_shared_library(
- name = name + "_so",
- roots = [name + "_root"],
- features = disable_crt_link(features),
- target_compatible_with = target_compatible_with,
- )
-
- # Create a target with CcSharedLibraryInfo and CcStubInfo providers.
- _cc_stub_library_shared(
- name = name,
- stub_target = name + "_files",
- library_target = name + "_so",
- )
-
-def _cc_stub_library_shared_impl(ctx):
- return [
- ctx.attr.library_target[DefaultInfo],
- ctx.attr.library_target[CcSharedLibraryInfo],
- ctx.attr.stub_target[CcStubInfo],
- ]
-
-_cc_stub_library_shared = rule(
- implementation = _cc_stub_library_shared_impl,
- doc = "Top level rule to merge CcStubInfo and CcSharedLibraryInfo into a single target",
- attrs = {
- "stub_target": attr.label(mandatory = True),
- "library_target": attr.label(mandatory = True),
- },
-)
-
-def _swap_shared_linker_input(ctx, shared_info, new_output):
- old_library_to_link = shared_info.linker_input.libraries[0]
-
+def _create_dynamic_library_linker_input_for_file(ctx, shared_info, output):
cc_toolchain = find_cpp_toolchain(ctx)
feature_configuration = cc_common.configure_features(
ctx = ctx,
@@ -320,7 +334,7 @@ def _swap_shared_linker_input(ctx, shared_info, new_output):
new_library_to_link = cc_common.create_library_to_link(
actions = ctx.actions,
- dynamic_library = new_output,
+ dynamic_library = output,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
)
@@ -329,10 +343,25 @@ def _swap_shared_linker_input(ctx, shared_info, new_output):
owner = shared_info.linker_input.owner,
libraries = depset([new_library_to_link]),
)
+ return new_linker_input
+
+def _correct_cc_shared_library_linking(ctx, shared_info, new_output, static_root):
+ # we may have done some post-processing of the shared library
+ # replace the linker_input that has not been post-processed with the
+ # library that has been post-processed
+ new_linker_input = _create_dynamic_library_linker_input_for_file(ctx, shared_info, new_output)
+
+ # only export the static internal root, we include other libraries as roots
+ # that should be linked as alwayslink; however, if they remain as exports,
+ # they will be linked dynamically, not statically when they should be
+ static_root_label = str(static_root.label)
+ if static_root_label not in shared_info.exports:
+ fail("Expected %s in exports %s" % (static_root_label, shared_info.exports))
+ exports = [static_root_label]
return CcSharedLibraryInfo(
dynamic_deps = shared_info.dynamic_deps,
- exports = shared_info.exports,
+ exports = exports,
link_once_static_libs = shared_info.link_once_static_libs,
linker_input = new_linker_input,
preloaded_deps = shared_info.preloaded_deps,
@@ -340,61 +369,146 @@ def _swap_shared_linker_input(ctx, shared_info, new_output):
CcStubLibrariesInfo = provider(
fields = {
- "infos": "A list of dict, where each dict contains the CcStubInfo, CcSharedLibraryInfo and DefaultInfo of a version of a stub library.",
+ "has_stubs": "If the shared library has stubs",
},
)
-def _cc_library_shared_proxy_impl(ctx):
- root_files = ctx.attr.root[DefaultInfo].files.to_list()
- shared_files = ctx.attr.shared[DefaultInfo].files.to_list()
+# A provider to propagate shared library output artifacts, primarily useful
+# for root level querying in Soong-Bazel mixed builds.
+# Ideally, it would be preferable to reuse the existing native
+# CcSharedLibraryInfo provider, but that provider requires that shared library
+# artifacts are wrapped in a linker input. Artifacts retrievable from this linker
+# input are symlinks to the original artifacts, which is problematic when
+# other dependencies expect a real file.
+CcSharedLibraryOutputInfo = provider(
+ fields = {
+ "output_file": "A single .so file, produced by this target.",
+ },
+)
- if len(shared_files) != 1:
- fail("Expected only one shared library file")
+def _cc_library_shared_proxy_impl(ctx):
+ # Using a "deps" label_list instead of a single mandatory label attribute
+ # is a hack to support aspect propagation of graph_aspect of the native
+ # cc_shared_library. The aspect will only be applied and propagated along
+ # a label_list attribute named "deps".
+ if len(ctx.attr.deps) != 1:
+ fail("Exactly one 'deps' must be specified for cc_library_shared_proxy")
+ root_files = ctx.attr.deps[0][DefaultInfo].files.to_list()
+ shared_files = ctx.attr.shared[0][DefaultInfo].files.to_list()
+ shared_debuginfo = ctx.attr.shared_debuginfo[0][DefaultInfo].files.to_list()
+ if len(shared_files) != 1 or len(shared_debuginfo) != 1:
+ fail("Expected only one shared library file and one debuginfo file for it")
shared_lib = shared_files[0]
-
- ctx.actions.symlink(
- output = ctx.outputs.output_file,
- target_file = shared_lib,
+ abi_diff_files = ctx.attr.abi_dump[AbiDiffInfo].diff_files.to_list()
+
+ # Copy the output instead of symlinking. This is because this output
+ # can be directly installed into a system image; this installation treats
+ # symlinks differently from real files (symlinks will be preserved relative
+ # to the image root).
+ ctx.actions.run_shell(
+ # We need to add the abi dump files to the inputs of this copy action even
+ # though they are not used, otherwise not all the abi dump files will be
+ # created. For example, for b build
+ # packages/modules/adb/pairing_connection:libadb_pairing_server, only
+ # libadb_pairing_server.so.lsdump will be created, libadb_pairing_auth.so.lsdump
+ # and libadb_pairing_connection.so.lsdump will not be. The reason is that
+ # even though libadb_pairing server depends on libadb_pairing_auth and
+ # libadb_pairing_connection, the abi dump files are not explicitly used
+ # by libadb_pairing_server, so bazel won't bother generating them.
+ inputs = depset(direct = [shared_lib] + abi_diff_files),
+ outputs = [ctx.outputs.output_file],
+ command = "cp -f %s %s" % (shared_lib.path, ctx.outputs.output_file.path),
+ mnemonic = "CopyFile",
+ progress_message = "Copying files",
+ use_default_shell_env = True,
)
- files = root_files + [ctx.outputs.output_file, ctx.files.table_of_contents[0]]
+ toc_info = generate_toc(ctx, ctx.attr.name, ctx.outputs.output_file)
- stub_library_infos = []
- for stub_library in ctx.attr.stub_shared_libraries:
- providers = {
- "CcStubInfo": stub_library[CcStubInfo],
- "CcSharedLibraryInfo": stub_library[CcSharedLibraryInfo],
- "DefaultInfo": stub_library[DefaultInfo],
- }
- stub_library_infos.append(providers)
+ files = root_files + [ctx.outputs.output_file, toc_info.toc] + abi_diff_files
return [
DefaultInfo(
files = depset(direct = files),
runfiles = ctx.runfiles(files = [ctx.outputs.output_file]),
),
- _swap_shared_linker_input(ctx, ctx.attr.shared[CcSharedLibraryInfo], ctx.outputs.output_file),
- ctx.attr.table_of_contents[CcTocInfo],
- # Propagate only includes from the root. Do not re-propagate linker inputs.
- CcInfo(compilation_context = ctx.attr.root[CcInfo].compilation_context),
- CcStubLibrariesInfo(infos = stub_library_infos),
+ _correct_cc_shared_library_linking(ctx, ctx.attr.shared[0][CcSharedLibraryInfo], ctx.outputs.output_file, ctx.attr.deps[0]),
+ toc_info,
+ # The _only_ linker_input is the statically linked root itself. We need to propagate this
+ # as cc_shared_library identifies which libraries can be linked dynamically based on the
+ # linker_inputs of the roots
+ ctx.attr.deps[0][CcInfo],
+ ctx.attr.deps[0][CcAndroidMkInfo],
+ CcStubLibrariesInfo(has_stubs = ctx.attr.has_stubs),
+ ctx.attr.shared[0][OutputGroupInfo],
+ CcSharedLibraryOutputInfo(output_file = ctx.outputs.output_file),
+ CcUnstrippedInfo(unstripped = shared_debuginfo[0]),
+ ctx.attr.abi_dump[AbiDiffInfo],
+ collect_deps_clang_tidy_info(ctx),
]
_cc_library_shared_proxy = rule(
implementation = _cc_library_shared_proxy_impl,
+ # Incoming transition to override outgoing transition from rdep
+ cfg = lto_and_fdo_profile_incoming_transition,
attrs = {
- "shared": attr.label(mandatory = True, providers = [CcSharedLibraryInfo]),
- "root": attr.label(mandatory = True, providers = [CcInfo]),
- "output_file": attr.output(mandatory = True),
- "table_of_contents": attr.label(
+ FDO_PROFILE_ATTR_KEY: attr.label(),
+ "shared": attr.label(
mandatory = True,
- # TODO(b/217908237): reenable allow_single_file
- # allow_single_file = True,
- providers = [CcTocInfo],
+ providers = [CcSharedLibraryInfo],
+ cfg = lto_deps_transition,
+ ),
+ "shared_debuginfo": attr.label(
+ mandatory = True,
+ cfg = lto_deps_transition,
+ ),
+ # "deps" should be a single element: the root target of the shared library.
+ # See _cc_library_shared_proxy_impl comment for explanation.
+ "deps": attr.label_list(
+ mandatory = True,
+ providers = [CcInfo],
+ cfg = lto_deps_transition,
+ ),
+ "output_file": attr.output(mandatory = True),
+ "has_stubs": attr.bool(default = False),
+ "runtime_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "Deps that should be installed along with this target. Read by the apex cc aspect.",
+ ),
+ "abi_dump": attr.label(providers = [AbiDiffInfo]),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
+ "androidmk_static_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "All the whole archive deps of the lib. This is used to propagate" +
+ " information to AndroidMk about LOCAL_STATIC_LIBRARIES.",
+ ),
+ "androidmk_whole_archive_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "All the whole archive deps of the lib. This is used to propagate" +
+ " information to AndroidMk about LOCAL_WHOLE_STATIC_LIBRARIES.",
+ ),
+ "androidmk_dynamic_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "All the dynamic deps of the lib. This is used to propagate" +
+ " information to AndroidMk about LOCAL_SHARED_LIBRARIES.",
+ ),
+ "_toc_script": attr.label(
+ cfg = "exec",
+ executable = True,
+ allow_single_file = True,
+ default = "//build/soong/scripts:toc.sh",
+ ),
+ "_readelf": attr.label(
+ cfg = "exec",
+ executable = True,
+ allow_single_file = True,
+ default = "//prebuilts/clang/host/linux-x86:llvm-readelf",
),
- "stub_shared_libraries": attr.label_list(providers = [CcStubInfo, CcSharedLibraryInfo]),
},
+ provides = [CcAndroidMkInfo, CcInfo, CcTocInfo],
fragments = ["cpp"],
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
)
@@ -407,7 +521,6 @@ def _bssl_hash_injection_impl(ctx):
if ctx.attr.inject_bssl_hash:
hashed_file = ctx.actions.declare_file("lib" + ctx.attr.name + ".so")
args = ctx.actions.args()
- args.add_all(["-sha256"])
args.add_all(["-in-object", ctx.files.src[0]])
args.add_all(["-o", hashed_file])
@@ -423,6 +536,7 @@ def _bssl_hash_injection_impl(ctx):
return [
DefaultInfo(files = depset([hashed_file])),
ctx.attr.src[CcSharedLibraryInfo],
+ ctx.attr.src[OutputGroupInfo],
]
_bssl_hash_injection = rule(
diff --git a/rules/cc/cc_library_shared_test.bzl b/rules/cc/cc_library_shared_test.bzl
new file mode 100644
index 00000000..041c3dfb
--- /dev/null
+++ b/rules/cc/cc_library_shared_test.bzl
@@ -0,0 +1,785 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+load("//build/bazel/rules/cc:cc_stub_library.bzl", "cc_stub_suite")
+load(
+ "//build/bazel/rules/cc/testing:transitions.bzl",
+ "ActionArgsInfo",
+ "compile_action_argv_aspect_generator",
+)
+load("//build/bazel/rules/test_common:flags.bzl", "action_flags_present_only_for_mnemonic_test")
+load("//build/bazel/rules/test_common:paths.bzl", "get_package_dir_based_path")
+load(":cc_library_common_test.bzl", "target_provides_androidmk_info_test")
+
+def _cc_library_shared_suffix_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ info = target[DefaultInfo]
+ suffix = ctx.attr.suffix
+
+ # NB: There may be more than 1 output file (if e.g. including a TOC)
+ outputs = [so.path for so in info.files.to_list() if so.path.endswith(".so")]
+ asserts.true(
+ env,
+ len(outputs) == 1,
+ "Expected only 1 output file; got %s" % outputs,
+ )
+ out = outputs[0]
+ suffix_ = suffix + ".so"
+ asserts.true(
+ env,
+ out.endswith(suffix_),
+ "Expected output filename to end in `%s`; it was instead %s" % (suffix_, out),
+ )
+
+ return analysistest.end(env)
+
+cc_library_shared_suffix_test = analysistest.make(
+ _cc_library_shared_suffix_test_impl,
+ attrs = {"suffix": attr.string()},
+)
+
+def _cc_library_shared_suffix():
+ name = "cc_library_shared_suffix"
+ test_name = name + "_test"
+ suffix = "-suf"
+
+ cc_library_shared(
+ name,
+ srcs = ["foo.cc"],
+ tags = ["manual"],
+ suffix = suffix,
+ )
+ cc_library_shared_suffix_test(
+ name = test_name,
+ target_under_test = name,
+ suffix = suffix,
+ )
+ return test_name
+
+def _cc_library_shared_empty_suffix():
+ name = "cc_library_shared_empty_suffix"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name,
+ srcs = ["foo.cc"],
+ tags = ["manual"],
+ )
+ cc_library_shared_suffix_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _cc_library_shared_propagating_compilation_context_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ cc_info = target[CcInfo]
+ compilation_context = cc_info.compilation_context
+
+ header_paths = [f.path for f in compilation_context.headers.to_list()]
+ for hdr in ctx.files.expected_hdrs:
+ asserts.true(
+ env,
+ hdr.path in header_paths,
+ "Did not find {hdr} in includes: {hdrs}.".format(hdr = hdr, hdrs = compilation_context.headers),
+ )
+
+ for hdr in ctx.files.expected_absent_hdrs:
+ asserts.true(
+ env,
+ hdr not in header_paths,
+ "Found {hdr} in includes: {hdrs}, should not be present.".format(hdr = hdr, hdrs = compilation_context.headers),
+ )
+
+ for include in ctx.attr.expected_includes:
+ absolute_include = get_package_dir_based_path(env, include)
+ asserts.true(
+ env,
+ absolute_include in compilation_context.includes.to_list(),
+ "Did not find {include} in includes: {includes}.".format(include = include, includes = compilation_context.includes),
+ )
+
+ for include in ctx.attr.expected_absent_includes:
+ absolute_include = get_package_dir_based_path(env, include)
+ asserts.true(
+ env,
+ absolute_include not in compilation_context.includes.to_list(),
+ "Found {include} in includes: {includes}, was expected to be absent".format(include = include, includes = compilation_context.includes),
+ )
+
+ for include in ctx.attr.expected_system_includes:
+ absolute_include = get_package_dir_based_path(env, include)
+ asserts.true(
+ env,
+ absolute_include in compilation_context.system_includes.to_list(),
+ "Did not find {include} in system includes: {includes}.".format(include = include, includes = compilation_context.system_includes),
+ )
+
+ for include in ctx.attr.expected_absent_system_includes:
+ absolute_include = get_package_dir_based_path(env, include)
+ asserts.true(
+ env,
+ absolute_include not in compilation_context.system_includes.to_list(),
+ "Found {include} in system includes: {includes}, was expected to be absent".format(include = include, includes = compilation_context.system_includes),
+ )
+
+ return analysistest.end(env)
+
+_cc_library_shared_propagating_compilation_context_test = analysistest.make(
+ _cc_library_shared_propagating_compilation_context_test_impl,
+ attrs = {
+ "expected_hdrs": attr.label_list(),
+ "expected_absent_hdrs": attr.label_list(),
+ "expected_includes": attr.string_list(),
+ "expected_absent_includes": attr.string_list(),
+ "expected_system_includes": attr.string_list(),
+ "expected_absent_system_includes": attr.string_list(),
+ },
+)
+
+def _cc_library_shared_propagates_deps():
+ name = "_cc_library_shared_propagates_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ hdrs = [":cc_library_shared_hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name,
+ deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_shared_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_hdrs = [":cc_library_shared_hdr"],
+ expected_includes = ["a/b/c"],
+ expected_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_shared_propagates_whole_archive_deps():
+ name = "_cc_library_shared_propagates_whole_archive_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ hdrs = [":cc_library_shared_hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name,
+ whole_archive_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_shared_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_hdrs = [":cc_library_shared_hdr"],
+ expected_includes = ["a/b/c"],
+ expected_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_shared_propagates_dynamic_deps():
+ name = "_cc_library_shared_propagates_dynamic_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = dep_name,
+ hdrs = [":cc_library_shared_hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name,
+ dynamic_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_shared_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_hdrs = [":cc_library_shared_hdr"],
+ expected_includes = ["a/b/c"],
+ expected_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_shared_does_not_propagate_implementation_deps():
+ name = "_cc_library_shared_does_not_propagate_implementation_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ hdrs = [":cc_library_shared_hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name,
+ implementation_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_shared_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_absent_hdrs = [":cc_library_shared_hdr"],
+ expected_absent_includes = ["a/b/c"],
+ expected_absent_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_shared_does_not_propagate_implementation_whole_archive_deps():
+ name = "_cc_library_shared_does_not_propagate_implementation_whole_archive_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ hdrs = [":cc_library_shared_hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name,
+ implementation_whole_archive_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_shared_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_absent_hdrs = [":cc_library_shared_hdr"],
+ expected_absent_includes = ["a/b/c"],
+ expected_absent_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_shared_does_not_propagate_implementation_dynamic_deps():
+ name = "_cc_library_shared_does_not_propagate_implementation_dynamic_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = dep_name,
+ hdrs = [":cc_library_shared_hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name,
+ implementation_dynamic_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_shared_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_absent_hdrs = [":cc_library_shared_hdr"],
+ expected_absent_includes = ["a/b/c"],
+ expected_absent_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_shared_propagating_fdo_profile_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ argv_map = target_under_test[ActionArgsInfo].argv_map
+ for label in ctx.attr.deps_labels_to_check_fdo_profile:
+ asserts.true(
+ env,
+ label in argv_map,
+ "can't find {} in argv map".format(label),
+ )
+ argv = argv_map[label]
+ asserts.true(
+ env,
+ _has_fdo_profile(argv, ctx.attr.fdo_profile_path_basename),
+ "can't find {} in compile action of {}".format(
+ ctx.attr.fdo_profile_path_basename,
+ label,
+ ),
+ )
+ for label in ctx.attr.deps_labels_to_check_no_fdo_profile:
+ asserts.true(
+ env,
+ label in argv_map,
+ "can't find {} in argv_map".format(label),
+ )
+ argv = argv_map[label]
+ asserts.true(
+ env,
+ not _has_fdo_profile(argv, ctx.attr.fdo_profile_path_basename),
+ "{} should not have {} in compile action".format(
+ ctx.attr.fdo_profile_path_basename,
+ label,
+ ),
+ )
+
+ return analysistest.end(env)
+
+_compile_action_argv_aspect = compile_action_argv_aspect_generator({
+ "_cc_library_combiner": ["deps", "roots", "includes"],
+ "_cc_includes": ["deps"],
+ "_cc_library_shared_proxy": ["deps"],
+})
+
+cc_library_shared_propagating_fdo_profile_test = analysistest.make(
+ _cc_library_shared_propagating_fdo_profile_test_impl,
+ attrs = {
+ # FdoProfileInfo isn't exposed to Starlark so we need to test against
+ # the path basename directly
+ "fdo_profile_path_basename": attr.string(),
+ # This has to be a string_list() instead of label_list(). If the deps
+ # are given as labels, the deps are analyzed because transition is attached
+ "deps_labels_to_check_fdo_profile": attr.string_list(),
+ "deps_labels_to_check_no_fdo_profile": attr.string_list(),
+ },
+ # We need to use aspect to examine the dependencies' actions of the apex
+ # target as the result of the transition, checking the dependencies directly
+ # using names will give you the info before the transition takes effect.
+ extra_target_under_test_aspects = [_compile_action_argv_aspect],
+)
+
+# _has_fdo_profile checks whether afdo-specific flag is present in actions.argv
+def _has_fdo_profile(argv, fdo_profile_path_basename):
+ for arg in argv:
+ if fdo_profile_path_basename in arg and "-fprofile-sample-use" in arg:
+ return True
+
+ return False
+
+def _cc_libary_shared_propagate_fdo_profile_to_whole_archive_deps():
+ name = "_cc_libary_shared_propagate_fdo_profile_to_whole_archive_deps"
+ fdo_profile_name = name + "_fdo_profile"
+ dep_name = name + "_dep"
+ transitive_dep_name = name + "_transitive_dep"
+ unexported_dep_name = name + "_exported_dep"
+ transitive_unexported_dep_name = name + "_transitive_unexported_dep"
+ test_name = name + "_test"
+
+ native.fdo_profile(
+ name = fdo_profile_name,
+ profile = fdo_profile_name + ".afdo",
+ )
+
+ cc_library_static(
+ name = transitive_dep_name,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = transitive_unexported_dep_name,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = dep_name,
+ whole_archive_deps = [transitive_dep_name],
+ implementation_whole_archive_deps = [transitive_unexported_dep_name],
+ srcs = ["foo.cpp", "bar.cpp"],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = unexported_dep_name,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = name,
+ whole_archive_deps = [dep_name],
+ implementation_whole_archive_deps = [unexported_dep_name],
+ fdo_profile = ":" + fdo_profile_name,
+ tags = ["manual"],
+ )
+
+ cc_library_shared_propagating_fdo_profile_test(
+ name = test_name,
+ target_under_test = name,
+ deps_labels_to_check_fdo_profile = [
+ dep_name + "_cpp",
+ transitive_dep_name + "_cpp",
+ unexported_dep_name + "_cpp",
+ transitive_unexported_dep_name + "_cpp",
+ ],
+ fdo_profile_path_basename = fdo_profile_name + ".afdo",
+ )
+
+ return test_name
+
+def _cc_library_shared_does_not_propagate_fdo_profile_to_dynamic_deps():
+ name = "_cc_library_shared_does_not_propagate_fdo_profile_to_dynamic_deps"
+ fdo_profile_name = name + "_fdo_profile"
+ dep_name = name + "_dep"
+ transitive_shared_dep_name = name + "_transitive_shared_dep"
+ unexported_transitive_shared_dep_name = name + "_unexported_transitive_shared_dep"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = transitive_shared_dep_name,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ )
+ cc_library_shared(
+ name = unexported_transitive_shared_dep_name,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = dep_name,
+ srcs = ["foo.cpp"],
+ dynamic_deps = [transitive_shared_dep_name],
+ implementation_dynamic_deps = [unexported_transitive_shared_dep_name],
+ tags = ["manual"],
+ )
+ native.fdo_profile(
+ name = fdo_profile_name,
+ profile = fdo_profile_name + ".afdo",
+ )
+ cc_library_shared(
+ name = name,
+ whole_archive_deps = [dep_name],
+ fdo_profile = fdo_profile_name,
+ stl = "",
+ tags = ["manual"],
+ )
+
+ cc_library_shared_propagating_fdo_profile_test(
+ name = test_name,
+ target_under_test = name,
+ deps_labels_to_check_fdo_profile = [
+ dep_name + "_cpp",
+ ],
+ # make sure dynamic deps don't build with afdo profiles from rdeps
+ deps_labels_to_check_no_fdo_profile = [
+ transitive_shared_dep_name + "__internal_root_cpp",
+ unexported_transitive_shared_dep_name + "__internal_root_cpp",
+ ],
+ fdo_profile_path_basename = fdo_profile_name + ".afdo",
+ )
+
+ return test_name
+
+def _fdo_profile_transition_correctly_set_and_unset_fdo_profile():
+ name = "_fdo_profile_transition_set_and_unset_fdo_profile_correctly"
+ fdo_profile_name = name + "_fdo_profile"
+ dep_with_fdo_profile = name + "_dep_with_fdo_profile"
+ transitive_dep_without_fdo_profile = name + "_transitive_dep_without_fdo_profile"
+ test_name = name + "_test"
+
+ native.fdo_profile(
+ name = fdo_profile_name,
+ profile = fdo_profile_name + ".afdo",
+ )
+
+ cc_library_shared(
+ name = name,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ dynamic_deps = [dep_with_fdo_profile],
+ )
+
+ cc_library_shared(
+ name = dep_with_fdo_profile,
+ fdo_profile = fdo_profile_name,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ dynamic_deps = [transitive_dep_without_fdo_profile],
+ )
+
+ cc_library_shared(
+ name = transitive_dep_without_fdo_profile,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared_propagating_fdo_profile_test(
+ name = test_name,
+ target_under_test = name,
+ deps_labels_to_check_fdo_profile = [
+ dep_with_fdo_profile + "__internal_root_cpp",
+ ],
+ # make sure dynamic deps don't build with afdo profiles from rdeps
+ deps_labels_to_check_no_fdo_profile = [
+ name + "__internal_root_cpp",
+ transitive_dep_without_fdo_profile + "__internal_root_cpp",
+ ],
+ fdo_profile_path_basename = fdo_profile_name + ".afdo",
+ )
+
+ return test_name
+
+def _cc_library_link_flags_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+
+ for action in target.actions:
+ if action.mnemonic == "CppLink":
+ for flag in ctx.attr.expected_link_flags:
+ if flag not in action.argv:
+ fail("{} is not in list of flags for linking {}".format(flag, action.argv))
+
+ return analysistest.end(env)
+
+cc_library_link_flags_test = analysistest.make(
+ _cc_library_link_flags_test_impl,
+ attrs = {
+ "expected_link_flags": attr.string_list(),
+ },
+)
+
+def _cc_library_with_fdo_profile_link_flags():
+ name = "_cc_library_with_fdo_profile_link_flags"
+ test_name = name + "_test"
+ cc_library_shared(
+ name = name,
+ fdo_profile = name + "_fdo_profile",
+ tags = ["manual"],
+ )
+ cc_library_link_flags_test(
+ name = test_name,
+ target_under_test = name + "_unstripped",
+ expected_link_flags = [
+ "-funique-internal-linkage-names",
+ "-fprofile-sample-accurate",
+ "-fprofile-sample-use=build/bazel/rules/cc/_cc_library_with_fdo_profile_link_flags_fdo_profile.afdo",
+ "-Wl,-mllvm,-no-warn-sample-unused=true",
+ ],
+ )
+ return test_name
+
+def _cc_library_disable_fdo_optimization_if_coverage_is_enabled_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+
+ for action in target.actions:
+ if action.mnemonic == "CppCompile":
+ for arg in action.argv:
+ if "-fprofile-sample-use" in arg:
+ fail("fdo optimization can not be enabled when coverage is enabled")
+
+ return analysistest.end(env)
+
+cc_library_disable_fdo_optimization_if_coverage_is_enabled_test = analysistest.make(
+ _cc_library_disable_fdo_optimization_if_coverage_is_enabled_impl,
+ config_settings = {
+ "//command_line_option:collect_code_coverage": True,
+ },
+)
+
+def _cc_library_disable_fdo_optimization_if_coverage_is_enabled_test():
+ name = "_cc_library_disable_fdo_optimization_if_coverage_is_enabled_test"
+ test_name = name + "_test"
+ cc_library_shared(
+ name = name,
+ fdo_profile = name + "_fdo_profile",
+ srcs = ["foo.cpp"],
+ # Coverage will add an extra lib to all the shared libs, we try to avoid
+ # that by clearing the system_dynamic_deps and stl.
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ )
+ cc_library_disable_fdo_optimization_if_coverage_is_enabled_test(
+ name = test_name,
+ target_under_test = name + "__internal_root_cpp",
+ )
+ return test_name
+
+def _cc_library_set_defines_for_stubs():
+ name = "cc_library_set_defines_for_stubs"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = name + "_libfoo",
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + "_libfoo.map.txt",
+ )
+
+ cc_stub_suite(
+ name = name + "_libfoo_stub_libs",
+ soname = name + "_libfoo.so",
+ source_library_label = ":" + name + "_libfoo",
+ symbol_file = name + "_libfoo.map.txt",
+ versions = ["30", "40"],
+ )
+
+ cc_library_shared(
+ name = name + "_libbar",
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + "_libbar.map.txt",
+ )
+
+ cc_stub_suite(
+ name = name + "_libbar_stub_libs",
+ soname = name + "_libbar.so",
+ source_library_label = ":" + name + "_libbar",
+ symbol_file = name + "_libbar.map.txt",
+ versions = ["current"],
+ )
+
+ cc_library_shared(
+ name = name + "_libbaz",
+ system_dynamic_deps = [],
+ stl = "none",
+ tags = ["manual"],
+ stubs_symbol_file = name + "_libbaz.map.txt",
+ )
+
+ cc_stub_suite(
+ name = name + "_libbaz_stub_libs",
+ soname = name + "_libbaz.so",
+ source_library_label = ":" + name + "_libbaz",
+ symbol_file = name + "_libbaz.map.txt",
+ versions = ["30"],
+ )
+
+ cc_library_shared(
+ name = name + "_lib_with_stub_deps",
+ srcs = ["foo.cpp"],
+ implementation_dynamic_deps = [
+ name + "_libfoo_stub_libs_current",
+ name + "_libbar_stub_libs_current",
+ name + "_libbaz_stub_libs-30", # depend on an old version explicitly
+ ],
+ tags = ["manual"],
+ )
+
+ action_flags_present_only_for_mnemonic_test(
+ name = test_name,
+ target_under_test = name + "_lib_with_stub_deps__internal_root_cpp",
+ mnemonics = ["CppCompile"],
+ expected_flags = [
+ "-D__CC_LIBRARY_SET_DEFINES_FOR_STUBS_LIBFOO_API__=10000",
+ "-D__CC_LIBRARY_SET_DEFINES_FOR_STUBS_LIBBAR_API__=10000",
+ "-D__CC_LIBRARY_SET_DEFINES_FOR_STUBS_LIBBAZ_API__=30",
+ ],
+ )
+ return test_name
+
+def _cc_library_shared_provides_androidmk_info():
+ name = "cc_library_shared_provides_androidmk_info"
+ dep_name = name + "_static_dep"
+ whole_archive_dep_name = name + "_whole_archive_dep"
+ dynamic_dep_name = name + "_dynamic_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = whole_archive_dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_library_shared(
+ name = dynamic_dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_library_shared(
+ name = name,
+ srcs = ["foo.cc"],
+ deps = [dep_name],
+ whole_archive_deps = [whole_archive_dep_name],
+ dynamic_deps = [dynamic_dep_name],
+ tags = ["manual"],
+ )
+ android_test_name = test_name + "_android"
+ linux_test_name = test_name + "_linux"
+ target_provides_androidmk_info_test(
+ name = android_test_name,
+ target_under_test = name,
+ expected_static_libs = [dep_name, "libc++demangle"],
+ expected_whole_static_libs = [whole_archive_dep_name],
+ expected_shared_libs = [dynamic_dep_name, "libc++", "libc", "libdl", "libm"],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+ target_provides_androidmk_info_test(
+ name = linux_test_name,
+ target_under_test = name,
+ expected_static_libs = [dep_name],
+ expected_whole_static_libs = [whole_archive_dep_name],
+ expected_shared_libs = [dynamic_dep_name, "libc++"],
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+ return [
+ android_test_name,
+ linux_test_name,
+ ]
+
+def cc_library_shared_test_suite(name):
+ native.genrule(name = "cc_library_shared_hdr", cmd = "null", outs = ["cc_shared_f.h"], tags = ["manual"])
+
+ native.test_suite(
+ name = name,
+ tests = [
+ _cc_library_shared_suffix(),
+ _cc_library_shared_empty_suffix(),
+ _cc_library_shared_propagates_deps(),
+ _cc_library_shared_propagates_whole_archive_deps(),
+ _cc_library_shared_propagates_dynamic_deps(),
+ _cc_library_shared_does_not_propagate_implementation_deps(),
+ _cc_library_shared_does_not_propagate_implementation_whole_archive_deps(),
+ _cc_library_shared_does_not_propagate_implementation_dynamic_deps(),
+ _cc_libary_shared_propagate_fdo_profile_to_whole_archive_deps(),
+ _cc_library_shared_does_not_propagate_fdo_profile_to_dynamic_deps(),
+ _fdo_profile_transition_correctly_set_and_unset_fdo_profile(),
+ _cc_library_with_fdo_profile_link_flags(),
+ _cc_library_disable_fdo_optimization_if_coverage_is_enabled_test(),
+ _cc_library_set_defines_for_stubs(),
+ ] + _cc_library_shared_provides_androidmk_info(),
+ )
diff --git a/rules/cc/cc_library_static.bzl b/rules/cc/cc_library_static.bzl
index f994490c..757f943e 100644
--- a/rules/cc/cc_library_static.bzl
+++ b/rules/cc/cc_library_static.bzl
@@ -1,31 +1,46 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "ACTION_NAMES")
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
+load("//build/bazel/rules:common.bzl", "get_dep_targets")
load(
":cc_library_common.bzl",
+ "CPP_EXTENSIONS",
+ "C_EXTENSIONS",
+ "CcAndroidMkInfo",
+ "check_absolute_include_dirs_disabled",
+ "create_cc_androidmk_provider",
"create_ccinfo_for_includes",
+ "get_non_header_srcs",
+ "get_sanitizer_lib_info",
"is_external_directory",
"parse_sdk_version",
"system_dynamic_deps_defaults",
)
-load(":stl.bzl", "static_stl_deps")
-load("@bazel_skylib//lib:collections.bzl", "collections")
-load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cpp_toolchain")
-load("@rules_cc//examples:experimental_cc_shared_library.bzl", "CcSharedLibraryInfo")
-load("//build/bazel/product_variables:constants.bzl", "constants")
+load(":clang_tidy.bzl", "ClangTidyInfo", "clang_tidy_for_dir", "generate_clang_tidy_actions")
+load(":lto_transitions.bzl", "lto_deps_transition")
+load(":stl.bzl", "stl_info_from_attr")
+
+_ALLOWED_MANUAL_INTERFACE_PATHS = [
+ "vendor/",
+ "hardware/",
+ # for testing
+ "build/bazel/rules/cc",
+]
CcStaticLibraryInfo = provider(fields = ["root_static_archive", "objects"])
@@ -38,14 +53,14 @@ def cc_library_static(
whole_archive_deps = [],
implementation_whole_archive_deps = [],
system_dynamic_deps = None,
+ runtime_deps = [],
export_absolute_includes = [],
export_includes = [],
export_system_includes = [],
local_includes = [],
absolute_includes = [],
hdrs = [],
- native_bridge_supported = False, # TODO: not supported yet.
- use_libcrt = True,
+ native_bridge_supported = False, # TODO: not supported yet. @unused
rtti = False,
stl = "",
cpp_std = "",
@@ -62,13 +77,22 @@ def cc_library_static(
srcs_as = [],
asflags = [],
features = [],
+ linkopts = [],
alwayslink = None,
target_compatible_with = [],
# TODO(b/202299295): Handle data attribute.
- data = [],
- sdk_version = "",
+ data = [], # @unused
+ sdk_version = "", # @unused
min_sdk_version = "",
- use_version_lib = False):
+ tags = [],
+ tidy = None,
+ tidy_checks = None,
+ tidy_checks_as_errors = None,
+ tidy_flags = None,
+ tidy_disabled_srcs = None,
+ tidy_timeout_srcs = None,
+ tidy_gen_header_filter = None,
+ native_coverage = True):
"Bazel macro to correspond with the cc_library_static Soong module."
exports_name = "%s_exports" % name
@@ -78,32 +102,38 @@ def cc_library_static(
asm_name = "%s_asm" % name
toolchain_features = []
- toolchain_features += features
+
+ toolchain_features.append("pic")
if is_external_directory(native.package_name()):
toolchain_features += [
"-non_external_compiler_flags",
"external_compiler_flags",
]
-
- if use_version_lib:
- libbuildversionLabel = "//build/soong/cc/libbuildversion:libbuildversion"
- whole_archive_deps = whole_archive_deps + [libbuildversionLabel]
+ else:
+ toolchain_features += [
+ "non_external_compiler_flags",
+ "-external_compiler_flags",
+ ]
if rtti:
- toolchain_features += ["rtti"]
- if not use_libcrt:
- toolchain_features += ["use_libcrt"]
+ toolchain_features.append("rtti")
if cpp_std:
toolchain_features += [cpp_std, "-cpp_std_default"]
if c_std:
toolchain_features += [c_std, "-c_std_default"]
+ for path in _ALLOWED_MANUAL_INTERFACE_PATHS:
+ if native.package_name().startswith(path):
+ toolchain_features += ["do_not_check_manual_binder_interfaces"]
+ break
+
if min_sdk_version:
- toolchain_features += [
- "sdk_version_" + parse_sdk_version(min_sdk_version),
- "-sdk_version_default",
- ]
+ toolchain_features += parse_sdk_version(min_sdk_version) + ["-sdk_version_default"]
+ toolchain_features += features
+
+ if not native_coverage:
+ toolchain_features += ["-coverage"] # buildifier: disable=list-append This could be a select, not a list
if system_dynamic_deps == None:
system_dynamic_deps = system_dynamic_deps_defaults
@@ -116,14 +146,27 @@ def cc_library_static(
# whole archive deps always re-export their includes, etc
deps = deps + whole_archive_deps + dynamic_deps,
target_compatible_with = target_compatible_with,
+ tags = ["manual"],
)
+ stl_info = stl_info_from_attr(stl, False)
+ linkopts = linkopts + stl_info.linkopts
+ copts = copts + stl_info.cppflags
+
_cc_includes(
name = locals_name,
includes = local_includes,
absolute_includes = absolute_includes,
- deps = implementation_deps + implementation_dynamic_deps + system_dynamic_deps + static_stl_deps(stl) + implementation_whole_archive_deps,
+ deps = (
+ implementation_deps +
+ implementation_dynamic_deps +
+ system_dynamic_deps +
+ stl_info.static_deps +
+ stl_info.shared_deps +
+ implementation_whole_archive_deps
+ ),
target_compatible_with = target_compatible_with,
+ tags = ["manual"],
)
# Silently drop these attributes for now:
@@ -138,38 +181,214 @@ def cc_library_static(
("implementation_deps", [locals_name]),
("deps", [exports_name]),
("features", toolchain_features),
- ("toolchains", ["//build/bazel/platforms:android_target_product_vars"]),
- ("alwayslink", alwayslink),
+ ("toolchains", ["//build/bazel/product_config:product_vars"]),
("target_compatible_with", target_compatible_with),
+ ("linkopts", linkopts),
],
)
+ # TODO(b/231574899): restructure this to handle other images
+ copts += select({
+ "//build/bazel/rules/apex:non_apex": [],
+ "//conditions:default": [
+ "-D__ANDROID_APEX__",
+ ],
+ })
+
native.cc_library(
name = cpp_name,
srcs = srcs,
copts = copts + cppflags,
+ tags = ["manual"],
+ alwayslink = True,
**common_attrs
)
native.cc_library(
name = c_name,
srcs = srcs_c,
copts = copts + conlyflags,
+ tags = ["manual"],
+ alwayslink = True,
**common_attrs
)
native.cc_library(
name = asm_name,
srcs = srcs_as,
copts = asflags,
+ tags = ["manual"],
+ alwayslink = True,
**common_attrs
)
# Root target to handle combining of the providers of the language-specific targets.
_cc_library_combiner(
name = name,
- deps = [cpp_name, c_name, asm_name] + whole_archive_deps + implementation_whole_archive_deps,
+ roots = [cpp_name, c_name, asm_name],
+ deps = whole_archive_deps + implementation_whole_archive_deps,
+ additional_sanitizer_deps = (
+ deps +
+ stl_info.static_deps +
+ implementation_deps
+ ),
+ runtime_deps = runtime_deps,
target_compatible_with = target_compatible_with,
+ alwayslink = alwayslink,
+ static_deps = deps + implementation_deps + whole_archive_deps + implementation_whole_archive_deps,
+ androidmk_static_deps = deps + implementation_deps + stl_info.static_deps,
+ androidmk_whole_archive_deps = whole_archive_deps + implementation_whole_archive_deps,
+ androidmk_dynamic_deps = dynamic_deps + implementation_dynamic_deps + system_dynamic_deps + stl_info.shared_deps,
+ exports = exports_name,
+ tags = tags,
+ features = toolchain_features,
+
+ # clang-tidy attributes
+ tidy = tidy,
+ srcs_cpp = srcs,
+ srcs_c = srcs_c,
+ copts_cpp = copts + cppflags,
+ copts_c = copts + conlyflags,
+ hdrs = hdrs,
+ includes = [locals_name, exports_name],
+ tidy_flags = tidy_flags,
+ tidy_checks = tidy_checks,
+ tidy_checks_as_errors = tidy_checks_as_errors,
+ tidy_disabled_srcs = tidy_disabled_srcs,
+ tidy_timeout_srcs = tidy_timeout_srcs,
+ tidy_gen_header_filter = tidy_gen_header_filter,
+ )
+
+def _generate_tidy_files(ctx):
+ disabled_srcs = [] + ctx.files.tidy_disabled_srcs
+ tidy_timeout = ctx.attr._tidy_timeout[BuildSettingInfo].value
+ if tidy_timeout != "":
+ disabled_srcs.extend(ctx.attr.tidy_timeout_srcs)
+
+ if ctx.attr.tidy_gen_header_filter:
+ if ctx.attr.tidy_flags:
+ fail("tidy_flags cannot be set when also using tidy_gen_header_filter")
+ tidy_flags = ["-header-filter=" + paths.join(ctx.genfiles_dir.path, ctx.label.package) + ".*"]
+ else:
+ tidy_flags = ctx.attr.tidy_flags
+
+ cpp_srcs, cpp_hdrs = get_non_header_srcs(
+ ctx.files.srcs_cpp,
+ ctx.files.tidy_disabled_srcs,
+ source_extensions = CPP_EXTENSIONS,
+ )
+ c_srcs, c_hdrs = get_non_header_srcs(
+ ctx.files.srcs_cpp + ctx.files.srcs_c,
+ ctx.files.tidy_disabled_srcs,
+ source_extensions = C_EXTENSIONS,
+ )
+ hdrs = ctx.files.hdrs + cpp_hdrs + c_hdrs
+ cpp_tidy_outs = generate_clang_tidy_actions(
+ ctx,
+ ctx.attr.copts_cpp,
+ ctx.attr.deps + ctx.attr.includes,
+ cpp_srcs,
+ hdrs,
+ "c++",
+ tidy_flags,
+ ctx.attr.tidy_checks,
+ ctx.attr.tidy_checks_as_errors,
+ tidy_timeout,
+ )
+ c_tidy_outs = generate_clang_tidy_actions(
+ ctx,
+ ctx.attr.copts_c,
+ ctx.attr.deps + ctx.attr.includes,
+ c_srcs,
+ hdrs,
+ "c",
+ tidy_flags,
+ ctx.attr.tidy_checks,
+ ctx.attr.tidy_checks_as_errors,
+ tidy_timeout,
+ )
+ return cpp_tidy_outs + c_tidy_outs
+
+def _generate_tidy_actions(ctx):
+ transitive_tidy_files = []
+ for ts in get_dep_targets(ctx.attr, predicate = lambda t: ClangTidyInfo in t).values():
+ for t in ts:
+ transitive_tidy_files.append(t[ClangTidyInfo].transitive_tidy_files)
+
+ with_tidy = ctx.attr._with_tidy[BuildSettingInfo].value
+ allow_local_tidy_true = ctx.attr._allow_local_tidy_true[BuildSettingInfo].value
+ tidy_external_vendor = ctx.attr._tidy_external_vendor[BuildSettingInfo].value
+ tidy_enabled = (with_tidy and ctx.attr.tidy != "never") or (allow_local_tidy_true and ctx.attr.tidy == "local")
+ should_run_for_current_package = clang_tidy_for_dir(tidy_external_vendor, ctx.label.package)
+ if tidy_enabled and should_run_for_current_package:
+ direct_tidy_files = _generate_tidy_files(ctx)
+ else:
+ direct_tidy_files = None
+
+ tidy_files = depset(
+ direct = direct_tidy_files,
+ )
+ transitive_tidy_files = depset(
+ direct = direct_tidy_files,
+ transitive = transitive_tidy_files,
+ )
+ return [
+ OutputGroupInfo(
+ _validation = tidy_files,
+ ),
+ ClangTidyInfo(
+ tidy_files = tidy_files,
+ transitive_tidy_files = transitive_tidy_files,
+ ),
+ ]
+
+def _archive_with_prebuilt_libs(ctx, prebuilt_deps, linking_outputs, cc_toolchain):
+ linking_output = linking_outputs.library_to_link.static_library
+ if not prebuilt_deps:
+ return linking_output
+
+ feature_configuration = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = cc_toolchain,
+ requested_features = ctx.features + ["archive_with_prebuilt_flags"],
+ unsupported_features = ctx.disabled_features + ["linker_flags", "archiver_flags"],
+ )
+
+ output_file = ctx.actions.declare_file("lib" + ctx.label.name + ".a")
+
+ archiver_path = cc_common.get_tool_for_action(
+ feature_configuration = feature_configuration,
+ action_name = ACTION_NAMES.cpp_link_static_library,
+ )
+ archiver_variables = cc_common.create_link_variables(
+ feature_configuration = feature_configuration,
+ cc_toolchain = cc_toolchain,
+ output_file = output_file.path,
+ is_using_linker = False,
+ )
+ command_line = cc_common.get_memory_inefficient_command_line(
+ feature_configuration = feature_configuration,
+ action_name = ACTION_NAMES.cpp_link_static_library,
+ variables = archiver_variables,
+ )
+ args = ctx.actions.args()
+ args.add_all(command_line)
+ args.add(linking_output)
+ args.add_all(prebuilt_deps)
+
+ ctx.actions.run(
+ executable = archiver_path,
+ arguments = [args],
+ inputs = depset(
+ direct = [linking_output] + prebuilt_deps,
+ transitive = [
+ cc_toolchain.all_files,
+ ],
+ ),
+ outputs = [output_file],
+ mnemonic = "CppArchive",
)
+ return output_file
+
# Returns a CcInfo object which combines one or more CcInfo objects, except that all
# linker inputs owned by owners in `old_owner_labels` are relinked and owned by the current target.
#
@@ -181,17 +400,32 @@ def _cc_library_combiner_impl(ctx):
cc_infos = []
for dep in ctx.attr.deps:
old_owner_labels.append(dep.label)
+ cc_info = dep[CcInfo]
+
+ # do not propagate includes, hdrs, etc, already handled by roots
+ cc_infos.append(CcInfo(linking_context = cc_info.linking_context))
+
+ # we handle roots after deps to mimic Soong handling objects from whole archive deps prior to objects from the target itself
+ for dep in ctx.attr.roots:
+ old_owner_labels.append(dep.label)
cc_infos.append(dep[CcInfo])
+
combined_info = cc_common.merge_cc_infos(cc_infos = cc_infos)
objects_to_link = []
+ prebuilt_deps = []
+
# This is not ideal, as it flattens a depset.
for old_linker_input in combined_info.linking_context.linker_inputs.to_list():
if old_linker_input.owner in old_owner_labels:
for lib in old_linker_input.libraries:
# These objects will be recombined into the root archive.
objects_to_link.extend(lib.objects)
+
+ # This is a prebuilt library, we have to handle it separately
+ if not lib.objects and lib.static_library:
+ prebuilt_deps.append(lib.static_library)
else:
# Android macros don't handle transitive linker dependencies because
# it's unsupported in legacy. We may want to change this going forward,
@@ -199,15 +433,28 @@ def _cc_library_combiner_impl(ctx):
fail("cc_static_library %s given transitive linker dependency from %s" % (ctx.label, old_linker_input.owner))
cc_toolchain = find_cpp_toolchain(ctx)
- CPP_LINK_STATIC_LIBRARY_ACTION_NAME = "c++-link-static-library"
+
feature_configuration = cc_common.configure_features(
ctx = ctx,
cc_toolchain = cc_toolchain,
- requested_features = ctx.features,
+ requested_features = ctx.features + ["archiver_flags"],
unsupported_features = ctx.disabled_features + ["linker_flags"],
)
- output_file = ctx.actions.declare_file("lib" + ctx.label.name + ".a")
+ out_name = ctx.label.name
+ if prebuilt_deps:
+ out_name += "_objs_only"
+ linking_context, linking_outputs = cc_common.create_linking_context_from_compilation_outputs(
+ actions = ctx.actions,
+ name = out_name,
+ feature_configuration = feature_configuration,
+ cc_toolchain = cc_toolchain,
+ alwayslink = ctx.attr.alwayslink,
+ disallow_dynamic_library = True,
+ compilation_outputs = cc_common.create_compilation_outputs(objects = depset(direct = objects_to_link)),
+ )
+
+ output_file = _archive_with_prebuilt_libs(ctx, prebuilt_deps, linking_outputs, cc_toolchain)
linker_input = cc_common.create_linker_input(
owner = ctx.label,
libraries = depset(direct = [
@@ -217,47 +464,26 @@ def _cc_library_combiner_impl(ctx):
cc_toolchain = cc_toolchain,
static_library = output_file,
objects = objects_to_link,
+ alwayslink = ctx.attr.alwayslink,
),
]),
)
-
linking_context = cc_common.create_linking_context(linker_inputs = depset(direct = [linker_input]))
- archiver_path = cc_common.get_tool_for_action(
- feature_configuration = feature_configuration,
- action_name = CPP_LINK_STATIC_LIBRARY_ACTION_NAME,
- )
- archiver_variables = cc_common.create_link_variables(
- feature_configuration = feature_configuration,
- cc_toolchain = cc_toolchain,
- output_file = output_file.path,
- is_using_linker = False,
- )
- command_line = cc_common.get_memory_inefficient_command_line(
- feature_configuration = feature_configuration,
- action_name = CPP_LINK_STATIC_LIBRARY_ACTION_NAME,
- variables = archiver_variables,
- )
- args = ctx.actions.args()
- args.add_all(command_line)
- args.add_all(objects_to_link)
-
- ctx.actions.run(
- executable = archiver_path,
- arguments = [args],
- inputs = depset(
- direct = objects_to_link,
- transitive = [
- cc_toolchain.all_files,
- ],
- ),
- outputs = [output_file],
- )
- return [
+ providers = [
DefaultInfo(files = depset(direct = [output_file]), data_runfiles = ctx.runfiles(files = [output_file])),
CcInfo(compilation_context = combined_info.compilation_context, linking_context = linking_context),
CcStaticLibraryInfo(root_static_archive = output_file, objects = objects_to_link),
+ get_sanitizer_lib_info(ctx.attr.features, ctx.attr.deps + ctx.attr.additional_sanitizer_deps),
+ create_cc_androidmk_provider(
+ static_deps = ctx.attr.androidmk_static_deps,
+ whole_archive_deps = ctx.attr.androidmk_whole_archive_deps,
+ dynamic_deps = ctx.attr.androidmk_dynamic_deps,
+ ),
]
+ providers.extend(_generate_tidy_actions(ctx))
+
+ return providers
# A rule which combines objects of oen or more cc_library targets into a single
# static linker input. This outputs a single archive file combining the objects
@@ -271,18 +497,135 @@ def _cc_library_combiner_impl(ctx):
_cc_library_combiner = rule(
implementation = _cc_library_combiner_impl,
attrs = {
- "deps": attr.label_list(providers = [CcInfo]),
+ "roots": attr.label_list(
+ providers = [CcInfo],
+ cfg = lto_deps_transition,
+ ),
+ "deps": attr.label_list(
+ providers = [CcInfo],
+ cfg = lto_deps_transition,
+ ),
+ "additional_sanitizer_deps": attr.label_list(
+ providers = [CcInfo],
+ cfg = lto_deps_transition,
+ doc = "Deps used only to check for sanitizer enablement",
+ ),
+ "runtime_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "Deps that should be installed along with this target. Read by the apex cc aspect.",
+ ),
+ "static_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "All the static deps of the lib. This is used by" +
+ " abi_dump_aspect to travel along the static_deps edges" +
+ " to create abi dump files.",
+ ),
+ "androidmk_static_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "All the whole archive deps of the lib. This is used to propagate" +
+ " information to AndroidMk about LOCAL_STATIC_LIBRARIES.",
+ ),
+ "androidmk_whole_archive_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "All the whole archive deps of the lib. This is used to propagate" +
+ " information to AndroidMk about LOCAL_WHOLE_STATIC_LIBRARIES.",
+ ),
+ "androidmk_dynamic_deps": attr.label_list(
+ providers = [CcInfo],
+ doc = "All the dynamic deps of the lib. This is used to propagate" +
+ " information to AndroidMk about LOCAL_SHARED_LIBRARIES." +
+ " The attribute name is prefixed with androidmk to avoid" +
+ " collision with the dynamic_deps attribute used in APEX" +
+ " aspects' propagation.",
+ ),
+ "exports": attr.label(
+ providers = [CcInfo],
+ cfg = lto_deps_transition,
+ ),
"_cc_toolchain": attr.label(
default = Label("@local_config_cc//:toolchain"),
providers = [cc_common.CcToolchainInfo],
+ doc = "The exported includes used by abi_dump_aspect to retrieve" +
+ " and use as the inputs of abi dumper binary.",
+ ),
+ "alwayslink": attr.bool(
+ doc = """At link time, whether these libraries should be wrapped in
+ the --whole_archive block. This causes all libraries in the static
+ archive to be unconditionally linked, regardless of whether the
+ symbols in these object files are being searched by the linker.""",
+ default = False,
+ ),
+
+ # Clang-tidy attributes
+ "tidy": attr.string(values = ["", "local", "never"]),
+ "srcs_cpp": attr.label_list(allow_files = True),
+ "srcs_c": attr.label_list(allow_files = True),
+ "copts_cpp": attr.string_list(),
+ "copts_c": attr.string_list(),
+ "hdrs": attr.label_list(allow_files = True),
+ "includes": attr.label_list(cfg = lto_deps_transition),
+ "tidy_checks": attr.string_list(),
+ "tidy_checks_as_errors": attr.string_list(),
+ "tidy_flags": attr.string_list(),
+ "tidy_disabled_srcs": attr.label_list(allow_files = True),
+ "tidy_timeout_srcs": attr.label_list(allow_files = True),
+ "tidy_gen_header_filter": attr.bool(),
+ "_clang_tidy_sh": attr.label(
+ default = Label("@//prebuilts/clang/host/linux-x86:clang-tidy.sh"),
+ allow_single_file = True,
+ executable = True,
+ cfg = "exec",
+ doc = "The clang tidy shell wrapper",
+ ),
+ "_clang_tidy": attr.label(
+ default = Label("@//prebuilts/clang/host/linux-x86:clang-tidy"),
+ allow_single_file = True,
+ executable = True,
+ cfg = "exec",
+ doc = "The clang tidy executable",
+ ),
+ "_clang_tidy_real": attr.label(
+ default = Label("@//prebuilts/clang/host/linux-x86:clang-tidy.real"),
+ allow_single_file = True,
+ executable = True,
+ cfg = "exec",
+ ),
+ "_with_tidy": attr.label(
+ default = "//build/bazel/flags/cc/tidy:with_tidy",
+ ),
+ "_allow_local_tidy_true": attr.label(
+ default = "//build/bazel/flags/cc/tidy:allow_local_tidy_true",
+ ),
+ "_with_tidy_flags": attr.label(
+ default = "//build/bazel/flags/cc/tidy:with_tidy_flags",
+ ),
+ "_default_tidy_header_dirs": attr.label(
+ default = "//build/bazel/flags/cc/tidy:default_tidy_header_dirs",
+ ),
+ "_tidy_timeout": attr.label(
+ default = "//build/bazel/flags/cc/tidy:tidy_timeout",
+ ),
+ "_tidy_external_vendor": attr.label(
+ default = "//build/bazel/flags/cc/tidy:tidy_external_vendor",
+ ),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
+ "_product_variables": attr.label(
+ default = "//build/bazel/product_config:product_vars",
),
},
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
- provides = [CcInfo],
+ provides = [CcInfo, CcAndroidMkInfo],
fragments = ["cpp"],
)
def _cc_includes_impl(ctx):
+ check_absolute_include_dirs_disabled(
+ ctx.label.package,
+ ctx.attr.absolute_includes,
+ )
+
return [create_ccinfo_for_includes(
ctx,
includes = ctx.attr.includes,
diff --git a/rules/cc/cc_library_static_test.bzl b/rules/cc/cc_library_static_test.bzl
new file mode 100644
index 00000000..64dda437
--- /dev/null
+++ b/rules/cc/cc_library_static_test.bzl
@@ -0,0 +1,602 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/cc:cc_binary.bzl", "cc_binary")
+load("//build/bazel/rules/cc:cc_library_headers.bzl", "cc_library_headers")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+load("//build/bazel/rules/cc:cc_prebuilt_library_static.bzl", "cc_prebuilt_library_static")
+load(
+ "//build/bazel/rules/test_common:flags.bzl",
+ "action_flags_absent_for_mnemonic_test",
+ "action_flags_present_only_for_mnemonic_test",
+)
+load("//build/bazel/rules/test_common:paths.bzl", "get_output_and_package_dir_based_path", "get_package_dir_based_path")
+load("//build/bazel/rules/test_common:rules.bzl", "expect_failure_test")
+load(":cc_library_common_test.bzl", "target_provides_androidmk_info_test")
+
+def _cc_library_static_propagating_compilation_context_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ cc_info = target[CcInfo]
+ compilation_context = cc_info.compilation_context
+
+ header_paths = [f.path for f in compilation_context.headers.to_list()]
+ for hdr in ctx.files.expected_hdrs:
+ asserts.true(
+ env,
+ hdr.path in header_paths,
+ "Did not find {hdr} in includes: {hdrs}.".format(hdr = hdr, hdrs = compilation_context.headers),
+ )
+
+ for hdr in ctx.files.expected_absent_hdrs:
+ asserts.true(
+ env,
+ hdr not in header_paths,
+ "Found {hdr} in includes: {hdrs}, should not be present.".format(hdr = hdr, hdrs = compilation_context.headers),
+ )
+
+ for include in ctx.attr.expected_includes:
+ absolute_include = get_package_dir_based_path(env, include)
+ asserts.true(
+ env,
+ absolute_include in compilation_context.includes.to_list(),
+ "Did not find {include} in includes: {includes}.".format(include = include, includes = compilation_context.includes),
+ )
+
+ for include in ctx.attr.expected_absent_includes:
+ absolute_include = get_package_dir_based_path(env, include)
+ asserts.true(
+ env,
+ absolute_include not in compilation_context.includes.to_list(),
+ "Found {include} in includes: {includes}, was expected to be absent".format(include = include, includes = compilation_context.includes),
+ )
+
+ for include in ctx.attr.expected_system_includes:
+ absolute_include = get_package_dir_based_path(env, include)
+ asserts.true(
+ env,
+ absolute_include in compilation_context.system_includes.to_list(),
+ "Did not find {include} in system includes: {includes}.".format(include = include, includes = compilation_context.system_includes),
+ )
+
+ for include in ctx.attr.expected_absent_system_includes:
+ absolute_include = get_package_dir_based_path(env, include)
+ asserts.true(
+ env,
+ absolute_include not in compilation_context.system_includes.to_list(),
+ "Found {include} in system includes: {includes}, was expected to be absent".format(include = include, includes = compilation_context.system_includes),
+ )
+
+ return analysistest.end(env)
+
+_cc_library_static_propagating_compilation_context_test = analysistest.make(
+ _cc_library_static_propagating_compilation_context_test_impl,
+ attrs = {
+ "expected_hdrs": attr.label_list(),
+ "expected_absent_hdrs": attr.label_list(),
+ "expected_includes": attr.string_list(),
+ "expected_absent_includes": attr.string_list(),
+ "expected_system_includes": attr.string_list(),
+ "expected_absent_system_includes": attr.string_list(),
+ },
+)
+
+def _cc_library_static_propagates_deps():
+ name = "_cc_library_static_propagates_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ hdrs = [":hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_static_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_hdrs = [":hdr"],
+ expected_includes = ["a/b/c"],
+ expected_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_static_propagates_whole_archive_deps():
+ name = "_cc_library_static_propagates_whole_archive_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ hdrs = [":hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ whole_archive_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_static_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_hdrs = [":hdr"],
+ expected_includes = ["a/b/c"],
+ expected_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_static_propagates_dynamic_deps():
+ name = "_cc_library_static_propagates_dynamic_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = dep_name,
+ hdrs = [":hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ dynamic_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_static_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_hdrs = [":hdr"],
+ expected_includes = ["a/b/c"],
+ expected_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_static_does_not_propagate_implementation_deps():
+ name = "_cc_library_static_does_not_propagate_implementation_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ hdrs = [":hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ implementation_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_static_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_absent_hdrs = [":hdr"],
+ expected_absent_includes = ["a/b/c"],
+ expected_absent_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_static_does_not_propagate_implementation_whole_archive_deps():
+ name = "_cc_library_static_does_not_propagate_implementation_whole_archive_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ hdrs = [":hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ implementation_whole_archive_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_static_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_absent_hdrs = [":hdr"],
+ expected_absent_includes = ["a/b/c"],
+ expected_absent_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_library_static_does_not_propagate_implementation_dynamic_deps():
+ name = "_cc_library_static_does_not_propagate_implementation_dynamic_deps"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_shared(
+ name = dep_name,
+ hdrs = [":hdr"],
+ export_includes = ["a/b/c"],
+ export_system_includes = ["d/e/f"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ implementation_dynamic_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_static_propagating_compilation_context_test(
+ name = test_name,
+ target_under_test = name,
+ expected_absent_hdrs = [":hdr"],
+ expected_absent_includes = ["a/b/c"],
+ expected_absent_system_includes = ["d/e/f"],
+ )
+
+ return test_name
+
+def _cc_rules_do_not_allow_absolute_includes():
+ name = "cc_rules_do_not_allow_absolute_includes"
+ test_names = []
+
+ DISALLOWED_INCLUDE_DIRS = [
+ "art",
+ "art/libnativebridge",
+ "art/libnativeloader",
+ "libcore",
+ "libnativehelper",
+ "external/apache-harmony",
+ "external/apache-xml",
+ "external/boringssl",
+ "external/bouncycastle",
+ "external/conscrypt",
+ "external/icu",
+ "external/okhttp",
+ "external/vixl",
+ "external/wycheproof",
+ ]
+
+ for include_dir in DISALLOWED_INCLUDE_DIRS:
+ binary_name = name + "_binary" + "_" + include_dir
+ library_headers_name = name + "_library_headers" + "_" + include_dir
+ library_shared_name = name + "_library_shared" + "_" + include_dir
+ library_static_name = name + "_library_static" + "_" + include_dir
+
+ cc_binary(
+ name = binary_name,
+ absolute_includes = [include_dir],
+ tags = ["manual"],
+ )
+ cc_library_headers(
+ name = library_headers_name,
+ absolute_includes = [include_dir],
+ tags = ["manual"],
+ )
+ cc_library_shared(
+ name = library_shared_name,
+ absolute_includes = [include_dir],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = library_static_name,
+ absolute_includes = [include_dir],
+ tags = ["manual"],
+ )
+
+ for target in [
+ binary_name,
+ library_headers_name,
+ library_static_name,
+ library_shared_name,
+ ]:
+ test_name = target + "_" + include_dir + "_test"
+ test_names.append(test_name)
+ expect_failure_test(
+ name = test_name,
+ target_under_test = target,
+ )
+
+ return test_names
+
+def _cc_library_static_links_against_prebuilt_library_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ asserts.equals(env, 2, len(actions), "Expected actions, got %s" % actions)
+
+ argv = actions[0].argv
+ expected_output_action1 = get_output_and_package_dir_based_path(env, "libcc_library_static_links_against_prebuilt_library_objs_only.a")
+ asserts.equals(env, 5, len(argv))
+ asserts.equals(env, "crsPD", argv[1])
+ asserts.equals(env, expected_output_action1, argv[2])
+ asserts.equals(env, get_output_and_package_dir_based_path(env, paths.join("_objs", "cc_library_static_links_against_prebuilt_library_cpp", "bar.o")), argv[3])
+ asserts.equals(env, "--format=gnu", argv[4])
+
+ argv = actions[1].argv
+ asserts.equals(env, 6, len(argv))
+ asserts.equals(env, "cqsL", argv[1])
+ asserts.equals(env, get_output_and_package_dir_based_path(env, "libcc_library_static_links_against_prebuilt_library.a"), argv[2])
+ asserts.equals(env, "--format=gnu", argv[3])
+ asserts.equals(env, expected_output_action1, argv[4])
+ asserts.equals(env, get_package_dir_based_path(env, "foo.a"), argv[5])
+
+ return analysistest.end(env)
+
+_cc_library_static_links_against_prebuilt_library_test = analysistest.make(_cc_library_static_links_against_prebuilt_library_test_impl)
+
+def _cc_library_static_links_against_prebuilt_library():
+ name = "cc_library_static_links_against_prebuilt_library"
+ test_name = name + "_test"
+ dep_name = name + "_dep"
+
+ cc_prebuilt_library_static(
+ name = dep_name,
+ static_library = "foo.a",
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ srcs = ["bar.c"],
+ whole_archive_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_static_links_against_prebuilt_library_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _cc_library_static_linking_object_ordering_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ asserts.equals(env, 1, len(actions), "Expected actions, got %s" % actions)
+
+ outputs = actions[0].outputs.to_list()
+ argv = actions[0].argv
+ asserts.equals(env, 4 + len(ctx.attr.expected_objects_in_order), len(argv))
+ asserts.equals(env, "crsPD", argv[1])
+ asserts.equals(env, outputs[0].path, argv[2])
+
+ for i in range(len(ctx.attr.expected_objects_in_order)):
+ obj = ctx.attr.expected_objects_in_order[i]
+ asserts.equals(env, obj, paths.basename(argv[3 + i]))
+
+ asserts.equals(env, "--format=gnu", argv[-1])
+
+ return analysistest.end(env)
+
+_cc_library_static_linking_object_ordering_test = analysistest.make(
+ _cc_library_static_linking_object_ordering_test_impl,
+ attrs = {
+ "expected_objects_in_order": attr.string_list(),
+ },
+)
+
+def _cc_library_static_whole_archive_deps_objects_precede_target_objects():
+ name = "_cc_library_static_whole_archive_deps_objects_precede_target_objects"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ srcs = ["first.c"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ srcs = ["second.c"],
+ whole_archive_deps = [dep_name],
+ tags = ["manual"],
+ )
+
+ _cc_library_static_linking_object_ordering_test(
+ name = test_name,
+ target_under_test = name,
+ expected_objects_in_order = [
+ "first.o",
+ "second.o",
+ ],
+ )
+
+ return test_name
+
+def _cc_library_static_provides_androidmk_info():
+ name = "cc_library_static_provides_androidmk_info"
+ dep_name = name + "_static_dep"
+ whole_archive_dep_name = name + "_whole_archive_dep"
+ dynamic_dep_name = name + "_dynamic_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = whole_archive_dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_library_shared(
+ name = dynamic_dep_name,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = name,
+ srcs = ["foo.cc"],
+ deps = [dep_name],
+ whole_archive_deps = [whole_archive_dep_name],
+ dynamic_deps = [dynamic_dep_name],
+ tags = ["manual"],
+ )
+ android_test_name = test_name + "_android"
+ linux_test_name = test_name + "_linux"
+ target_provides_androidmk_info_test(
+ name = android_test_name,
+ target_under_test = name,
+ expected_static_libs = [dep_name, "libc++_static", "libc++demangle"],
+ expected_whole_static_libs = [whole_archive_dep_name],
+ expected_shared_libs = [dynamic_dep_name, "libc", "libdl", "libm"],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+ target_provides_androidmk_info_test(
+ name = linux_test_name,
+ target_under_test = name,
+ expected_static_libs = [dep_name, "libc++_static"],
+ expected_whole_static_libs = [whole_archive_dep_name],
+ expected_shared_libs = [dynamic_dep_name],
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+ return [
+ android_test_name,
+ linux_test_name,
+ ]
+
+def _cc_library_static_link_action_should_not_have_arch_cflags():
+ name = "cc_library_static_link_action_should_not_have_cflags"
+ cpp_compile_test_name = name + "_CppCompile_test"
+ cpp_link_test_name = name + "_CppLink_test"
+
+ # https://cs.android.com/android/platform/build/soong/+/master:cc/config/arm_device.go;l=57-59;drc=de7c7847e7e028d46fdff8268689f30163c4c231
+ arm_armv7_a_cflags = ["-march=armv7-a", "-mfloat-abi=softfp"]
+
+ cc_library_static(
+ name = name,
+ srcs = ["foo.cpp"],
+ tags = ["manual"],
+ )
+
+ action_flags_present_only_for_mnemonic_test(
+ name = cpp_compile_test_name,
+ target_under_test = name + "_cpp",
+ mnemonics = ["CppCompile"],
+ expected_flags = arm_armv7_a_cflags,
+ target_compatible_with = [
+ "//build/bazel/platforms/os:android",
+ "//build/bazel/platforms/arch/variants:armv7-a-neon",
+ ],
+ )
+
+ action_flags_absent_for_mnemonic_test(
+ name = cpp_link_test_name,
+ target_under_test = name,
+ mnemonics = ["CppLink"],
+ expected_absent_flags = arm_armv7_a_cflags,
+ target_compatible_with = [
+ "//build/bazel/platforms/os:android",
+ "//build/bazel/platforms/arch/variants:armv7-a-neon",
+ ],
+ )
+
+ return [
+ cpp_compile_test_name,
+ cpp_link_test_name,
+ ]
+
+def _cc_library_static_defines_do_not_check_manual_binder_interfaces():
+ name = "_cc_library_static_defines_do_not_check_manual_binder_interfaces"
+ cpp_lib_name = name + "_cpp"
+ cpp_test_name = cpp_lib_name + "_test"
+ c_lib_name = name + "_c"
+ c_test_name = c_lib_name + "_test"
+
+ cc_library_static(
+ name = name,
+ srcs = ["a.cpp"],
+ srcs_c = ["b.c"],
+ tags = ["manual"],
+ )
+ action_flags_present_only_for_mnemonic_test(
+ name = cpp_test_name,
+ target_under_test = cpp_lib_name,
+ mnemonics = ["CppCompile"],
+ expected_flags = [
+ "-DDO_NOT_CHECK_MANUAL_BINDER_INTERFACES",
+ ],
+ )
+ action_flags_present_only_for_mnemonic_test(
+ name = c_test_name,
+ target_under_test = c_lib_name,
+ mnemonics = ["CppCompile"],
+ expected_flags = [
+ "-DDO_NOT_CHECK_MANUAL_BINDER_INTERFACES",
+ ],
+ )
+
+ non_allowlisted_package_cpp_name = name + "_non_allowlisted_package_cpp"
+ action_flags_absent_for_mnemonic_test(
+ name = non_allowlisted_package_cpp_name,
+ target_under_test = "//build/bazel/examples/cc:foo_static_cpp",
+ mnemonics = ["CppCompile"],
+ expected_absent_flags = [
+ "-DDO_NOT_CHECK_MANUAL_BINDER_INTERFACES",
+ ],
+ )
+
+ return [
+ cpp_test_name,
+ c_test_name,
+ non_allowlisted_package_cpp_name,
+ ]
+
+def cc_library_static_test_suite(name):
+ native.genrule(name = "hdr", cmd = "null", outs = ["f.h"], tags = ["manual"])
+
+ native.test_suite(
+ name = name,
+ tests = [
+ _cc_library_static_propagates_deps(),
+ _cc_library_static_propagates_whole_archive_deps(),
+ _cc_library_static_propagates_dynamic_deps(),
+ _cc_library_static_does_not_propagate_implementation_deps(),
+ _cc_library_static_does_not_propagate_implementation_whole_archive_deps(),
+ _cc_library_static_does_not_propagate_implementation_dynamic_deps(),
+ _cc_library_static_links_against_prebuilt_library(),
+ _cc_library_static_whole_archive_deps_objects_precede_target_objects(),
+ ] + (
+ _cc_rules_do_not_allow_absolute_includes() +
+ _cc_library_static_provides_androidmk_info() +
+ _cc_library_static_link_action_should_not_have_arch_cflags() +
+ _cc_library_static_defines_do_not_check_manual_binder_interfaces()
+ ),
+ )
diff --git a/rules/cc/cc_object.bzl b/rules/cc/cc_object.bzl
index a9b32551..f5cdb8d9 100644
--- a/rules/cc/cc_object.bzl
+++ b/rules/cc/cc_object.bzl
@@ -1,28 +1,28 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cpp_toolchain")
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load(":cc_constants.bzl", "constants")
load(
":cc_library_common.bzl",
"get_includes_paths",
"is_external_directory",
+ "parse_sdk_version",
"system_dynamic_deps_defaults",
- "parse_sdk_version")
-load(":cc_constants.bzl", "constants")
-load(":stl.bzl", "static_stl_deps")
+)
+load(":lto_transitions.bzl", "lto_deps_transition")
+load(":stl.bzl", "stl_info_from_attr")
# "cc_object" module copts, taken from build/soong/cc/object.go
_CC_OBJECT_COPTS = ["-fno-addrsig"]
@@ -48,11 +48,11 @@ def split_srcs_hdrs(files):
non_headers_c = []
for f in files:
if f.extension in constants.hdr_exts:
- headers += [f]
+ headers.append(f)
elif f.extension in constants.as_src_exts:
- non_headers_as += [f]
+ non_headers_as.append(f)
else:
- non_headers_c += [f]
+ non_headers_c.append(f)
return non_headers_c, non_headers_as, headers
def _cc_object_impl(ctx):
@@ -74,10 +74,24 @@ def _cc_object_impl(ctx):
if is_external_directory(ctx.label.package):
extra_disabled_features.append("non_external_compiler_flags")
extra_features.append("external_compiler_flags")
+ else:
+ extra_features.append("non_external_compiler_flags")
+ extra_disabled_features.append("external_compiler_flags")
- if ctx.attr.min_sdk_version:
+ apex_min_sdk_version = ctx.attr._apex_min_sdk_version[BuildSettingInfo].value
+ if ctx.attr.crt and apex_min_sdk_version:
+ extra_disabled_features.append("sdk_version_default")
+ extra_features += parse_sdk_version(apex_min_sdk_version)
+ elif ctx.attr.min_sdk_version:
extra_disabled_features.append("sdk_version_default")
- extra_features.append("sdk_version_" + parse_sdk_version(ctx.attr.min_sdk_version))
+ extra_features += parse_sdk_version(ctx.attr.min_sdk_version)
+
+ # Disable coverage for cc object because we link cc objects below and
+ # clang will link extra lib behind the scene to support profiling if coverage
+ # is enabled, so the symbols of the extra lib will be loaded into the generated
+ # object file. When later we link a shared library that depends on more than
+ # one such cc objects it will fail due to the duplicated symbols problem.
+ extra_disabled_features.append("coverage")
feature_configuration = cc_common.configure_features(
ctx = ctx,
@@ -88,7 +102,7 @@ def _cc_object_impl(ctx):
compilation_contexts = []
deps_objects = []
- for obj in ctx.attr.deps:
+ for obj in ctx.attr.objs:
compilation_contexts.append(obj[CcInfo].compilation_context)
deps_objects.append(obj[CcObjectInfo].objects)
for includes_dep in ctx.attr.includes_deps:
@@ -97,7 +111,7 @@ def _cc_object_impl(ctx):
product_variables = ctx.attr._android_product_variables[platform_common.TemplateVariableInfo]
asflags = [ctx.expand_make_variables("asflags", flag, product_variables.variables) for flag in ctx.attr.asflags]
- srcs_c, srcs_as, private_hdrs = split_srcs_hdrs(ctx.files.srcs)
+ srcs_c, srcs_as, private_hdrs = split_srcs_hdrs(ctx.files.srcs + ctx.files.srcs_as)
(compilation_context, compilation_outputs_c) = cc_common.compile(
name = ctx.label.name,
@@ -134,7 +148,7 @@ def _cc_object_impl(ctx):
objects_to_link = cc_common.merge_compilation_outputs(compilation_outputs = deps_objects + [compilation_outputs_c, compilation_outputs_as])
- user_link_flags = []
+ user_link_flags = [] + ctx.attr.linkopts
user_link_flags.extend(_CC_OBJECT_LINKOPTS)
additional_inputs = []
@@ -168,20 +182,32 @@ _cc_object = rule(
implementation = _cc_object_impl,
attrs = {
"srcs": attr.label_list(allow_files = constants.all_dot_exts),
+ "srcs_as": attr.label_list(allow_files = constants.all_dot_exts),
"hdrs": attr.label_list(allow_files = constants.hdr_dot_exts),
"absolute_includes": attr.string_list(),
"local_includes": attr.string_list(),
"copts": attr.string_list(),
"asflags": attr.string_list(),
- "deps": attr.label_list(providers = [CcInfo, CcObjectInfo]),
+ "linkopts": attr.string_list(),
+ "objs": attr.label_list(
+ providers = [CcInfo, CcObjectInfo],
+ cfg = lto_deps_transition,
+ ),
"includes_deps": attr.label_list(providers = [CcInfo]),
"linker_script": attr.label(allow_single_file = True),
"sdk_version": attr.string(),
"min_sdk_version": attr.string(),
+ "crt": attr.bool(default = False),
"_android_product_variables": attr.label(
- default = Label("//build/bazel/platforms:android_target_product_vars"),
+ default = Label("//build/bazel/product_config:product_vars"),
providers = [platform_common.TemplateVariableInfo],
),
+ "_apex_min_sdk_version": attr.label(
+ default = "//build/bazel/rules/apex:min_sdk_version",
+ ),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
},
toolchains = ["//prebuilts/clang/host/linux-x86:nocrt_toolchain"],
fragments = ["cpp"],
@@ -192,10 +218,12 @@ def cc_object(
copts = [],
hdrs = [],
asflags = [],
+ linkopts = [],
srcs = [],
srcs_as = [],
+ objs = [],
deps = [],
- native_bridge_supported = False, # TODO: not supported yet.
+ native_bridge_supported = False, # TODO: not supported yet. @unused
stl = "",
system_dynamic_deps = None,
sdk_version = "",
@@ -206,14 +234,21 @@ def cc_object(
if system_dynamic_deps == None:
system_dynamic_deps = system_dynamic_deps_defaults
+ stl_info = stl_info_from_attr(stl, False)
+ linkopts = linkopts + stl_info.linkopts
+ copts = copts + stl_info.cppflags
+
_cc_object(
name = name,
hdrs = hdrs,
asflags = asflags,
copts = _CC_OBJECT_COPTS + copts,
- srcs = srcs + srcs_as,
- deps = deps,
- includes_deps = static_stl_deps(stl) + system_dynamic_deps,
+ linkopts = linkopts,
+ # TODO(b/261996812): we shouldn't need to have both srcs and srcs_as as inputs here
+ srcs = srcs,
+ srcs_as = srcs_as,
+ objs = objs,
+ includes_deps = stl_info.static_deps + stl_info.shared_deps + system_dynamic_deps + deps,
sdk_version = sdk_version,
min_sdk_version = min_sdk_version,
**kwargs
diff --git a/rules/cc/cc_object_test.bzl b/rules/cc/cc_object_test.bzl
new file mode 100644
index 00000000..59cacb63
--- /dev/null
+++ b/rules/cc/cc_object_test.bzl
@@ -0,0 +1,128 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":cc_object.bzl", "cc_object")
+
+def _min_sdk_version_target_flag_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ cpp_link_actions = [a for a in actions if a.mnemonic == "CppLink"]
+
+ found = False
+ for action in cpp_link_actions:
+ for arg in action.argv:
+ if arg.startswith("--target="):
+ found = True
+ asserts.true(
+ env,
+ arg.endswith(ctx.attr.expected_min_sdk_version),
+ "Incorrect --target flag %s. Expected sdk_version %s" % (arg, ctx.attr.expected_min_sdk_version),
+ )
+ asserts.true(
+ env,
+ found,
+ "No --target flag found in CppLink actions: %s" % (
+ [a.argv for a in cpp_link_actions],
+ ),
+ )
+
+ return analysistest.end(env)
+
+def _create_min_sdk_version_target_flag_test(config_settings = {}):
+ return analysistest.make(
+ _min_sdk_version_target_flag_test_impl,
+ attrs = {
+ "expected_min_sdk_version": attr.string(mandatory = True),
+ },
+ config_settings = config_settings,
+ )
+
+_min_sdk_version_target_flag_test = _create_min_sdk_version_target_flag_test()
+
+_apex_min_sdk_version = "25"
+
+_min_sdk_version_target_flag_with_apex_test = _create_min_sdk_version_target_flag_test({
+ "@//build/bazel/rules/apex:min_sdk_version": _apex_min_sdk_version,
+})
+
+def _crt_cc_object_min_sdk_version_overriden_by_apex_min_sdk_version():
+ name = "crt_cc_object_min_sdk_version_overriden_by_apex_min_sdk_version"
+ test_name = name + "_test"
+ crt_apex_test_name = test_name + "_crt_apex"
+ not_crt_apex_test_name = test_name + "_not_crt_apex"
+ crt_not_apex_test_name = test_name + "_crt_not_apex"
+ not_crt_not_apex_test_name = test_name + "_not_crt_not_apex"
+ crt_obj_name = name + "_crt"
+ not_crt_obj_name = name + "_not_crt"
+ obj_dep_name = name + "_dep"
+ obj_min_sdk_version = "16"
+
+ cc_object(
+ name = obj_dep_name,
+ srcs = ["a.cc"],
+ tags = ["manual"],
+ )
+ cc_object(
+ name = crt_obj_name,
+ crt = True,
+ objs = [obj_dep_name],
+ srcs = ["a.cc"],
+ min_sdk_version = obj_min_sdk_version,
+ tags = ["manual"],
+ )
+ cc_object(
+ name = not_crt_obj_name,
+ objs = [obj_dep_name],
+ srcs = ["a.cc"],
+ min_sdk_version = obj_min_sdk_version,
+ tags = ["manual"],
+ )
+ _min_sdk_version_target_flag_with_apex_test(
+ name = crt_apex_test_name,
+ target_under_test = crt_obj_name,
+ expected_min_sdk_version = _apex_min_sdk_version,
+ target_compatible_with = ["@//build/bazel/platforms/os:android"],
+ )
+ _min_sdk_version_target_flag_with_apex_test(
+ name = not_crt_apex_test_name,
+ target_under_test = not_crt_obj_name,
+ expected_min_sdk_version = obj_min_sdk_version,
+ target_compatible_with = ["@//build/bazel/platforms/os:android"],
+ )
+ _min_sdk_version_target_flag_test(
+ name = crt_not_apex_test_name,
+ target_under_test = crt_obj_name,
+ expected_min_sdk_version = obj_min_sdk_version,
+ target_compatible_with = ["@//build/bazel/platforms/os:android"],
+ )
+ _min_sdk_version_target_flag_test(
+ name = not_crt_not_apex_test_name,
+ target_under_test = not_crt_obj_name,
+ expected_min_sdk_version = obj_min_sdk_version,
+ target_compatible_with = ["@//build/bazel/platforms/os:android"],
+ )
+
+ return [
+ crt_apex_test_name,
+ not_crt_apex_test_name,
+ crt_not_apex_test_name,
+ not_crt_not_apex_test_name,
+ ]
+
+def cc_object_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = _crt_cc_object_min_sdk_version_overriden_by_apex_min_sdk_version(),
+ )
diff --git a/rules/cc/cc_prebuilt_binary.bzl b/rules/cc/cc_prebuilt_binary.bzl
new file mode 100644
index 00000000..15435042
--- /dev/null
+++ b/rules/cc/cc_prebuilt_binary.bzl
@@ -0,0 +1,51 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("//build/bazel/platforms:platform_utils.bzl", "platforms")
+load(":stripped_cc_common.bzl", "common_strip_attrs", "stripped_impl")
+
+def is_target_host(ctx):
+ return not platforms.is_target_android(ctx.attr._platform_utils)
+
+def _cc_prebuilt_binary_impl(ctx):
+ # If the target is host, Soong just manually does a symlink
+ if is_target_host(ctx):
+ exec = ctx.actions.declare_file(ctx.attr.name)
+ ctx.actions.symlink(
+ output = exec,
+ target_file = ctx.files.src[0],
+ )
+ else:
+ exec = stripped_impl(ctx)
+ return [
+ DefaultInfo(
+ files = depset([exec]),
+ executable = exec,
+ ),
+ ]
+
+cc_prebuilt_binary = rule(
+ implementation = _cc_prebuilt_binary_impl,
+ attrs = dict(
+ common_strip_attrs, # HACK: inlining common_strip_attrs
+ src = attr.label(
+ mandatory = True,
+ allow_single_file = True,
+ ),
+ _platform_utils = attr.label(default = Label("//build/bazel/platforms:platform_utils")),
+ ),
+ executable = True,
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+)
diff --git a/rules/cc/cc_prebuilt_binary_test.bzl b/rules/cc/cc_prebuilt_binary_test.bzl
new file mode 100644
index 00000000..db29692c
--- /dev/null
+++ b/rules/cc/cc_prebuilt_binary_test.bzl
@@ -0,0 +1,222 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":cc_binary_test.bzl", "strip_test_assert_flags")
+load(":cc_prebuilt_binary.bzl", "cc_prebuilt_binary")
+
+def _cc_prebuilt_binary_basic_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ outs = target[DefaultInfo].files.to_list()
+ asserts.true(
+ env,
+ len(outs) == 1,
+ "expected there to be 1 output but got:\n" + str(outs),
+ )
+ return analysistest.end(env)
+
+_cc_prebuilt_binary_basic_test = analysistest.make(_cc_prebuilt_binary_basic_test_impl)
+
+def _cc_prebuilt_binary_simple_test():
+ name = "cc_prebuilt_binary_simple"
+ cc_prebuilt_binary(
+ name = name,
+ src = "bin",
+ tags = ["manual"],
+ )
+ test_name = name + "_test"
+ _cc_prebuilt_binary_basic_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _cc_prebuilt_binary_stripping_flags_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ strip_acts = [a for a in actions if a.mnemonic == "CcStrip"]
+ has_strip = len(strip_acts) > 0
+ asserts.true(
+ env,
+ has_strip,
+ "expected to find an action with CcStrip mnemonic in:\n%s" % actions,
+ )
+ if has_strip:
+ strip_test_assert_flags(env, strip_acts[0], ctx.attr.strip_flags)
+ return analysistest.end(env)
+
+__cc_prebuilt_binary_stripping_flags_test = analysistest.make(
+ _cc_prebuilt_binary_stripping_flags_test_impl,
+ attrs = dict(
+ strip_flags = attr.string_list(),
+ ),
+)
+
+def _cc_prebuilt_binary_stripping_flags_test(**kwargs):
+ __cc_prebuilt_binary_stripping_flags_test(
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ **kwargs
+ )
+
+def _cc_prebuilt_binary_strip_keep_symbols_test():
+ name = "cc_prebuilt_binary_strip_keep_symbols"
+ cc_prebuilt_binary(
+ name = name,
+ src = "bin",
+ keep_symbols = True,
+ tags = ["manual"],
+ )
+ test_name = name + "_test"
+ _cc_prebuilt_binary_stripping_flags_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [
+ "--keep-symbols",
+ "--add-gnu-debuglink",
+ ],
+ )
+ return test_name
+
+def _cc_prebuilt_binary_strip_keep_symbols_and_debug_frame_test():
+ name = "cc_prebuilt_binary_strip_keep_symbols_and_debug_frame"
+ cc_prebuilt_binary(
+ name = name,
+ src = "bin",
+ keep_symbols_and_debug_frame = True,
+ tags = ["manual"],
+ )
+ test_name = name + "_test"
+ _cc_prebuilt_binary_stripping_flags_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [
+ "--keep-symbols-and-debug-frame",
+ "--add-gnu-debuglink",
+ ],
+ )
+ return test_name
+
+def _cc_prebuilt_binary_strip_keep_symbols_list_test():
+ name = "cc_prebuilt_binary_strip_keep_symbols_list"
+ symbols = ["foo", "bar", "baz"]
+ cc_prebuilt_binary(
+ name = name,
+ src = "bin",
+ keep_symbols_list = symbols,
+ tags = ["manual"],
+ )
+ test_name = name + "_test"
+ _cc_prebuilt_binary_stripping_flags_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [
+ "-k" + ",".join(symbols),
+ "--add-gnu-debuglink",
+ ],
+ )
+ return test_name
+
+def _cc_prebuilt_binary_strip_all_test():
+ name = "cc_prebuilt_binary_strip_all"
+ cc_prebuilt_binary(
+ name = name,
+ src = "bin",
+ all = True,
+ tags = ["manual"],
+ )
+ test_name = name + "_test"
+ _cc_prebuilt_binary_stripping_flags_test(
+ name = test_name,
+ target_under_test = name,
+ strip_flags = [
+ "--add-gnu-debuglink",
+ ],
+ )
+ return test_name
+
+def _cc_prebuilt_binary_no_stripping_action_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ mnemonics = [a.mnemonic for a in actions]
+ has_strip = "CcStrip" in mnemonics
+ asserts.false(
+ env,
+ has_strip,
+ "expected no action with CcStrip mnemonic in:\n%s" % actions,
+ )
+ return analysistest.end(env)
+
+__cc_prebuilt_binary_no_stripping_action_test = analysistest.make(
+ _cc_prebuilt_binary_no_stripping_action_test_impl,
+)
+
+def _cc_prebuilt_binary_no_stripping_action_test(**kwargs):
+ __cc_prebuilt_binary_no_stripping_action_test(
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ **kwargs
+ )
+
+def _cc_prebuilt_binary_strip_none_test():
+ name = "cc_prebuilt_binary_strip_none"
+ cc_prebuilt_binary(
+ name = name,
+ src = "bin",
+ none = True,
+ tags = ["manual"],
+ )
+ test_name = name + "_test"
+ _cc_prebuilt_binary_no_stripping_action_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _cc_prebuilt_binary_host_test(**kwargs):
+ __cc_prebuilt_binary_no_stripping_action_test(
+ target_compatible_with = select({
+ "//build/bazel/platforms/os:android": ["@platforms//:incompatible"],
+ "//conditions:default": [],
+ }),
+ **kwargs
+ )
+
+def _cc_prebuilt_binary_no_strip_host_test():
+ name = "cc_prebuilt_binary_no_strip_host"
+ cc_prebuilt_binary(
+ name = name,
+ src = "bin",
+ tags = ["manual"],
+ )
+ test_name = name + "_test"
+ _cc_prebuilt_binary_host_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def cc_prebuilt_binary_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _cc_prebuilt_binary_simple_test(),
+ _cc_prebuilt_binary_strip_none_test(),
+ _cc_prebuilt_binary_strip_keep_symbols_test(),
+ _cc_prebuilt_binary_strip_keep_symbols_and_debug_frame_test(),
+ _cc_prebuilt_binary_strip_keep_symbols_list_test(),
+ _cc_prebuilt_binary_strip_all_test(),
+ _cc_prebuilt_binary_no_strip_host_test(),
+ ],
+ )
diff --git a/rules/cc/cc_prebuilt_library_shared.bzl b/rules/cc/cc_prebuilt_library_shared.bzl
new file mode 100644
index 00000000..25144f86
--- /dev/null
+++ b/rules/cc/cc_prebuilt_library_shared.bzl
@@ -0,0 +1,50 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
+load(":cc_library_common.bzl", "create_cc_prebuilt_library_info")
+
+def _cc_prebuilt_library_shared_impl(ctx):
+ lib = ctx.file.shared_library
+ files = ctx.attr.shared_library.files if lib != None else None
+ cc_toolchain = find_cpp_toolchain(ctx)
+ feature_configuration = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = cc_toolchain,
+ )
+ cc_info = create_cc_prebuilt_library_info(
+ ctx,
+ cc_common.create_library_to_link(
+ actions = ctx.actions,
+ dynamic_library = lib,
+ feature_configuration = feature_configuration,
+ cc_toolchain = cc_toolchain,
+ ) if lib != None else None,
+ )
+ return [DefaultInfo(files = files), cc_info]
+
+cc_prebuilt_library_shared = rule(
+ implementation = _cc_prebuilt_library_shared_impl,
+ attrs = dict(
+ shared_library = attr.label(
+ providers = [CcInfo],
+ allow_single_file = True,
+ ),
+ export_includes = attr.string_list(),
+ export_system_includes = attr.string_list(),
+ ),
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+ fragments = ["cpp"],
+ provides = [CcInfo],
+)
diff --git a/rules/cc/cc_prebuilt_library_shared_test.bzl b/rules/cc/cc_prebuilt_library_shared_test.bzl
new file mode 100644
index 00000000..8fe63f85
--- /dev/null
+++ b/rules/cc/cc_prebuilt_library_shared_test.bzl
@@ -0,0 +1,140 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/cc:cc_prebuilt_library_shared.bzl", "cc_prebuilt_library_shared")
+load("//build/bazel/rules/test_common:paths.bzl", "get_output_and_package_dir_based_path")
+
+_fake_expected_lib = "{[()]}"
+
+def _cc_prebuilt_library_shared_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ expected_lib = ctx.attr.expected_lib
+ cc_info = target[CcInfo]
+ compilation_context = cc_info.compilation_context
+ linker_inputs = cc_info.linking_context.linker_inputs.to_list()
+ libs_to_link = []
+ for lib in linker_inputs:
+ libs_to_link += lib.libraries
+
+ if expected_lib == _fake_expected_lib:
+ asserts.true(
+ env,
+ len(libs_to_link) == 0,
+ "\nExpected the shared library to be empty, but instead got:\n\t%s\n" % str(libs_to_link),
+ )
+ else:
+ asserts.true(
+ env,
+ expected_lib in [lib.dynamic_library.basename for lib in libs_to_link],
+ "\nExpected the target to include the shared library %s; but instead got:\n\t%s\n" % (expected_lib, libs_to_link),
+ )
+
+ # Checking for the expected {,system_}includes
+ assert_template = "\nExpected the %s for " + expected_lib + " to be:\n\t%s\n, but instead got:\n\t%s\n"
+ expand_paths = lambda paths: [get_output_and_package_dir_based_path(env, p) for p in paths]
+ expected_includes = expand_paths(ctx.attr.expected_includes)
+ expected_system_includes = expand_paths(ctx.attr.expected_system_includes)
+
+ includes = compilation_context.includes.to_list()
+ for include in expected_includes:
+ asserts.true(env, include in includes, assert_template % ("includes", expected_includes, includes))
+
+ system_includes = compilation_context.system_includes.to_list()
+ for include in expected_system_includes:
+ asserts.true(env, include in system_includes, assert_template % ("system_includes", expected_system_includes, system_includes))
+
+ return analysistest.end(env)
+
+_cc_prebuilt_library_shared_test = analysistest.make(
+ _cc_prebuilt_library_shared_test_impl,
+ attrs = dict(
+ expected_lib = attr.string(default = _fake_expected_lib),
+ expected_includes = attr.string_list(),
+ expected_system_includes = attr.string_list(),
+ ),
+)
+
+def _cc_prebuilt_library_shared_simple():
+ name = "_cc_prebuilt_library_shared_simple"
+ test_name = name + "_test"
+ lib = "libfoo.so"
+
+ cc_prebuilt_library_shared(
+ name = name,
+ shared_library = lib,
+ tags = ["manual"],
+ )
+ _cc_prebuilt_library_shared_test(
+ name = test_name,
+ target_under_test = name,
+ expected_lib = lib,
+ )
+
+ return test_name
+
+def _cc_prebuilt_library_shared_default_library_field():
+ name = "_cc_prebuilt_library_shared_default_library_field"
+ test_name = name + "_test"
+ lib = None
+
+ cc_prebuilt_library_shared(
+ name = name,
+ shared_library = lib,
+ tags = ["manual"],
+ )
+ _cc_prebuilt_library_shared_test(
+ name = test_name,
+ target_under_test = name,
+ expected_lib = lib, # We expect the default of _fake_expected_lib
+ )
+
+ return test_name
+
+def _cc_prebuilt_library_shared_has_all_includes():
+ name = "_cc_prebuilt_library_shared_has_all_includes"
+ test_name = name + "_test"
+ lib = "libfoo.so"
+ includes = ["includes"]
+ system_includes = ["system_includes"]
+
+ cc_prebuilt_library_shared(
+ name = name,
+ shared_library = lib,
+ export_includes = includes,
+ export_system_includes = system_includes,
+ tags = ["manual"],
+ )
+ _cc_prebuilt_library_shared_test(
+ name = test_name,
+ target_under_test = name,
+ expected_lib = lib,
+ expected_includes = includes,
+ expected_system_includes = system_includes,
+ )
+
+ return test_name
+
+def cc_prebuilt_library_shared_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _cc_prebuilt_library_shared_simple(),
+ _cc_prebuilt_library_shared_default_library_field(),
+ _cc_prebuilt_library_shared_has_all_includes(),
+ ],
+ )
diff --git a/rules/cc/cc_prebuilt_library_static.bzl b/rules/cc/cc_prebuilt_library_static.bzl
new file mode 100644
index 00000000..1d84adac
--- /dev/null
+++ b/rules/cc/cc_prebuilt_library_static.bzl
@@ -0,0 +1,52 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
+load(":cc_library_common.bzl", "create_cc_prebuilt_library_info")
+
+def _cc_prebuilt_library_static_impl(ctx):
+ lib = ctx.file.static_library
+ files = ctx.attr.static_library.files if lib != None else None
+ cc_toolchain = find_cpp_toolchain(ctx)
+ feature_configuration = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = cc_toolchain,
+ )
+ cc_info = create_cc_prebuilt_library_info(
+ ctx,
+ cc_common.create_library_to_link(
+ actions = ctx.actions,
+ static_library = lib,
+ alwayslink = ctx.attr.alwayslink,
+ feature_configuration = feature_configuration,
+ cc_toolchain = cc_toolchain,
+ ) if lib != None else None,
+ )
+ return [DefaultInfo(files = files), cc_info]
+
+cc_prebuilt_library_static = rule(
+ implementation = _cc_prebuilt_library_static_impl,
+ attrs = dict(
+ static_library = attr.label(
+ providers = [CcInfo],
+ allow_single_file = True,
+ ),
+ alwayslink = attr.bool(default = False),
+ export_includes = attr.string_list(),
+ export_system_includes = attr.string_list(),
+ ),
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+ fragments = ["cpp"],
+ provides = [CcInfo],
+)
diff --git a/rules/cc/cc_prebuilt_library_static_test.bzl b/rules/cc/cc_prebuilt_library_static_test.bzl
new file mode 100644
index 00000000..326e1953
--- /dev/null
+++ b/rules/cc/cc_prebuilt_library_static_test.bzl
@@ -0,0 +1,208 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/cc:cc_prebuilt_library_static.bzl", "cc_prebuilt_library_static")
+load("//build/bazel/rules/test_common:paths.bzl", "get_output_and_package_dir_based_path")
+
+_fake_expected_lib = "{[()]}"
+
+def _cc_prebuilt_library_static_alwayslink_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ expected_lib = ctx.attr.expected_lib
+ alwayslink = ctx.attr.alwayslink
+ cc_info = target[CcInfo]
+ linker_inputs = cc_info.linking_context.linker_inputs.to_list()
+ libs_to_link = []
+ for l in linker_inputs:
+ libs_to_link += l.libraries
+
+ has_lib = False
+ has_alwayslink = False
+ libs = {}
+ for lib_to_link in libs_to_link:
+ lib = lib_to_link.static_library.basename
+ libs[lib_to_link.static_library] = lib_to_link.alwayslink
+ if lib == expected_lib:
+ has_lib = True
+ has_alwayslink = lib_to_link.alwayslink
+ if has_alwayslink:
+ break
+ asserts.true(
+ env,
+ has_lib,
+ "\nExpected to find the static library `%s` in the linker_input:\n\t%s" % (expected_lib, str(libs)),
+ )
+ asserts.equals(
+ env,
+ has_alwayslink,
+ alwayslink,
+ "\nExpected to find the static library `%s` unconditionally in the linker_input, with alwayslink set to %s:\n\t%s" % (expected_lib, alwayslink, str(libs)),
+ )
+
+ return analysistest.end(env)
+
+_cc_prebuilt_library_static_alwayslink_test = analysistest.make(
+ _cc_prebuilt_library_static_alwayslink_test_impl,
+ attrs = {
+ "expected_lib": attr.string(),
+ "alwayslink": attr.bool(),
+ },
+)
+
+def _cc_prebuilt_library_static_alwayslink_lib(alwayslink):
+ name = "_cc_prebuilt_library_static_alwayslink_lib_" + str(alwayslink)
+ test_name = name + "_test"
+ lib = "libfoo.a"
+
+ cc_prebuilt_library_static(
+ name = name,
+ static_library = lib,
+ alwayslink = alwayslink,
+ tags = ["manual"],
+ )
+ _cc_prebuilt_library_static_alwayslink_test(
+ name = test_name,
+ target_under_test = name,
+ expected_lib = lib,
+ alwayslink = alwayslink,
+ )
+
+ return test_name
+
+def _cc_prebuilt_library_static_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ expected_lib = ctx.attr.expected_lib
+ cc_info = target[CcInfo]
+ compilation_context = cc_info.compilation_context
+ linker_inputs = cc_info.linking_context.linker_inputs.to_list()
+ libs_to_link = []
+ for lib in linker_inputs:
+ libs_to_link += lib.libraries
+
+ if expected_lib == _fake_expected_lib:
+ asserts.true(
+ env,
+ len(libs_to_link) == 0,
+ "\nExpected the static library to be empty, but instead got:\n\t%s\n" % str(libs_to_link),
+ )
+ else:
+ asserts.true(
+ env,
+ expected_lib in [lib.static_library.basename for lib in libs_to_link],
+ "\nExpected the target to include the static library %s; but instead got:\n\t%s\n" % (expected_lib, libs_to_link),
+ )
+
+ # Checking for the expected {,system_}includes
+ assert_template = "\nExpected the %s for " + expected_lib + " to be:\n\t%s\n, but instead got:\n\t%s\n"
+ expand_paths = lambda paths: [get_output_and_package_dir_based_path(env, p) for p in paths]
+ expected_includes = expand_paths(ctx.attr.expected_includes)
+ expected_system_includes = expand_paths(ctx.attr.expected_system_includes)
+
+ includes = compilation_context.includes.to_list()
+ for include in expected_includes:
+ asserts.true(env, include in includes, assert_template % ("includes", expected_includes, includes))
+
+ system_includes = compilation_context.system_includes.to_list()
+ for include in expected_system_includes:
+ asserts.true(env, include in system_includes, assert_template % ("system_includes", expected_system_includes, system_includes))
+
+ return analysistest.end(env)
+
+_cc_prebuilt_library_static_test = analysistest.make(
+ _cc_prebuilt_library_static_test_impl,
+ attrs = dict(
+ expected_lib = attr.string(default = _fake_expected_lib),
+ expected_includes = attr.string_list(),
+ expected_system_includes = attr.string_list(),
+ ),
+)
+
+def _cc_prebuilt_library_static_simple():
+ name = "_cc_prebuilt_library_static_simple"
+ test_name = name + "_test"
+ lib = "libfoo.a"
+
+ cc_prebuilt_library_static(
+ name = name,
+ static_library = lib,
+ tags = ["manual"],
+ )
+ _cc_prebuilt_library_static_test(
+ name = test_name,
+ target_under_test = name,
+ expected_lib = lib,
+ )
+
+ return test_name
+
+def _cc_prebuilt_library_static_None():
+ name = "_cc_prebuilt_library_static_None"
+ test_name = name + "_test"
+ lib = None
+
+ cc_prebuilt_library_static(
+ name = name,
+ static_library = lib,
+ tags = ["manual"],
+ )
+ _cc_prebuilt_library_static_test(
+ name = test_name,
+ target_under_test = name,
+ expected_lib = lib, # We expect the default of _fake_expected_lib
+ )
+
+ return test_name
+
+def _cc_prebuilt_library_static_has_all_includes():
+ name = "_cc_prebuilt_library_static_has_all_includes"
+ test_name = name + "_test"
+ lib = "libfoo.a"
+ includes = ["includes"]
+ system_includes = ["system_includes"]
+
+ cc_prebuilt_library_static(
+ name = name,
+ static_library = lib,
+ export_includes = includes,
+ export_system_includes = system_includes,
+ tags = ["manual"],
+ )
+ _cc_prebuilt_library_static_test(
+ name = test_name,
+ target_under_test = name,
+ expected_lib = lib,
+ expected_includes = includes,
+ expected_system_includes = system_includes,
+ )
+
+ return test_name
+
+# TODO: Test that is alwayslink = alse
+
+def cc_prebuilt_library_static_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _cc_prebuilt_library_static_simple(),
+ _cc_prebuilt_library_static_None(),
+ _cc_prebuilt_library_static_alwayslink_lib(True),
+ _cc_prebuilt_library_static_alwayslink_lib(False),
+ _cc_prebuilt_library_static_has_all_includes(),
+ ],
+ )
diff --git a/rules/cc/cc_prebuilt_object.bzl b/rules/cc/cc_prebuilt_object.bzl
new file mode 100644
index 00000000..f20b2c30
--- /dev/null
+++ b/rules/cc/cc_prebuilt_object.bzl
@@ -0,0 +1,35 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(":cc_object.bzl", "CcObjectInfo")
+
+def _cc_prebuilt_object(ctx):
+ objects_to_link = cc_common.create_compilation_outputs(objects = ctx.attr.src.files)
+ return [
+ DefaultInfo(files = ctx.attr.src.files),
+ CcInfo(),
+ CcObjectInfo(objects = objects_to_link),
+ ]
+
+cc_prebuilt_object = rule(
+ implementation = _cc_prebuilt_object,
+ attrs = dict(
+ src = attr.label(
+ mandatory = True,
+ allow_single_file = [".o"],
+ ),
+ _platform_utils = attr.label(default = Label("//build/bazel/platforms:platform_utils")),
+ ),
+ toolchains = ["//prebuilts/clang/host/linux-x86:nocrt_toolchain"],
+)
diff --git a/rules/cc/cc_proto.bzl b/rules/cc/cc_proto.bzl
index b72379d9..80a5b446 100644
--- a/rules/cc/cc_proto.bzl
+++ b/rules/cc/cc_proto.bzl
@@ -1,28 +1,25 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+load("@bazel_skylib//lib:paths.bzl", "paths")
load("//build/bazel/rules:proto_file_utils.bzl", "proto_file_utils")
load(":cc_library_common.bzl", "create_ccinfo_for_includes")
load(":cc_library_static.bzl", "cc_library_static")
-load("@bazel_skylib//lib:paths.bzl", "paths")
-
-CcProtoGenInfo = provider(fields = ["headers", "sources"])
_SOURCES_KEY = "sources"
_HEADERS_KEY = "headers"
+PROTO_GEN_NAME_SUFFIX = "_proto_gen"
def _cc_proto_sources_gen_rule_impl(ctx):
out_flags = []
@@ -35,34 +32,33 @@ def _cc_proto_sources_gen_rule_impl(ctx):
if ctx.attr.out_format:
out_flags.append(ctx.attr.out_format)
-
srcs = []
hdrs = []
includes = []
+ proto_infos = []
+
for dep in ctx.attr.deps:
proto_info = dep[ProtoInfo]
+ proto_infos.append(proto_info)
if proto_info.proto_source_root == ".":
includes.append(paths.join(ctx.label.name, ctx.label.package))
includes.append(ctx.label.name)
- outs = _generate_cc_proto_action(
- proto_info = proto_info,
- protoc = ctx.executable._protoc,
- ctx = ctx,
- is_cc = True,
- out_flags = out_flags,
- plugin_executable = plugin_executable,
- out_arg = out_arg,
- )
- srcs.extend(outs[_SOURCES_KEY])
- hdrs.extend(outs[_HEADERS_KEY])
+
+ outs = _generate_cc_proto_action(
+ proto_infos = proto_infos,
+ protoc = ctx.executable._protoc,
+ ctx = ctx,
+ is_cc = True,
+ out_flags = out_flags,
+ plugin_executable = plugin_executable,
+ out_arg = out_arg,
+ )
+ srcs.extend(outs[_SOURCES_KEY])
+ hdrs.extend(outs[_HEADERS_KEY])
return [
DefaultInfo(files = depset(direct = srcs + hdrs)),
- create_ccinfo_for_includes(ctx, includes = includes),
- CcProtoGenInfo(
- headers = hdrs,
- sources = srcs,
- ),
+ create_ccinfo_for_includes(ctx, hdrs = hdrs, includes = includes),
]
_cc_proto_sources_gen = rule(
@@ -92,7 +88,7 @@ If not provided, defaults to full protos.
),
},
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
- provides = [CcInfo, CcProtoGenInfo],
+ provides = [CcInfo],
)
def _src_extension(is_cc):
@@ -101,7 +97,7 @@ def _src_extension(is_cc):
return "c"
def _generate_cc_proto_action(
- proto_info,
+ proto_infos,
protoc,
ctx,
plugin_executable,
@@ -113,7 +109,7 @@ def _generate_cc_proto_action(
_HEADERS_KEY: ".pb.h",
}
return proto_file_utils.generate_proto_action(
- proto_info,
+ proto_infos,
protoc,
ctx,
type_dictionary,
@@ -123,99 +119,69 @@ def _generate_cc_proto_action(
mnemonic = "CcProtoGen",
)
-def _cc_proto_sources_impl(ctx):
- srcs = ctx.attr.src[CcProtoGenInfo].sources
- return [
- DefaultInfo(files = depset(direct = srcs)),
- ]
-
-_cc_proto_sources = rule(
- implementation = _cc_proto_sources_impl,
- attrs = {
- "src": attr.label(
- providers = [CcProtoGenInfo],
- ),
- },
-)
-
-def _cc_proto_headers_impl(ctx):
- hdrs = ctx.attr.src[CcProtoGenInfo].headers
- return [
- DefaultInfo(files = depset(direct = hdrs)),
- ]
-
-_cc_proto_headers = rule(
- implementation = _cc_proto_headers_impl,
- attrs = {
- "src": attr.label(
- providers = [CcProtoGenInfo],
- ),
- },
-)
-
def _cc_proto_library(
name,
deps = [],
plugin = None,
+ tags = [],
target_compatible_with = [],
out_format = None,
- proto_dep = None):
- proto_lib_name = name + "_proto_gen"
- srcs_name = name + "_proto_sources"
- hdrs_name = name + "_proto_headers"
+ proto_dep = None,
+ **kwargs):
+ proto_lib_name = name + PROTO_GEN_NAME_SUFFIX
_cc_proto_sources_gen(
name = proto_lib_name,
deps = deps,
plugin = plugin,
out_format = out_format,
- )
-
- _cc_proto_sources(
- name = srcs_name,
- src = proto_lib_name,
- )
-
- _cc_proto_headers(
- name = hdrs_name,
- src = proto_lib_name,
+ tags = ["manual"],
)
cc_library_static(
name = name,
- srcs = [":" + srcs_name],
- hdrs = [":" + hdrs_name],
+ srcs = [":" + proto_lib_name],
deps = [
proto_lib_name,
proto_dep,
],
local_includes = ["."],
+ tags = tags,
target_compatible_with = target_compatible_with,
+ **kwargs
)
def cc_lite_proto_library(
name,
deps = [],
plugin = None,
- target_compatible_with = []):
+ tags = [],
+ target_compatible_with = [],
+ **kwargs):
_cc_proto_library(
name,
deps = deps,
plugin = plugin,
+ tags = tags,
target_compatible_with = target_compatible_with,
out_format = "lite",
proto_dep = "//external/protobuf:libprotobuf-cpp-lite",
+ **kwargs
)
def cc_proto_library(
name,
deps = [],
plugin = None,
- target_compatible_with = []):
+ tags = [],
+ target_compatible_with = [],
+ **kwargs):
_cc_proto_library(
name,
deps = deps,
plugin = plugin,
+ tags = tags,
target_compatible_with = target_compatible_with,
proto_dep = "//external/protobuf:libprotobuf-cpp-full",
+ **kwargs
)
diff --git a/rules/cc/cc_proto_test.bzl b/rules/cc/cc_proto_test.bzl
new file mode 100644
index 00000000..490a1fbb
--- /dev/null
+++ b/rules/cc/cc_proto_test.bzl
@@ -0,0 +1,449 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:sets.bzl", "sets")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":cc_proto.bzl", "PROTO_GEN_NAME_SUFFIX", "cc_proto_library")
+
+PROTO_GEN = "external/protobuf/aprotoc"
+VIRTUAL_IMPORT = "_virtual_imports"
+RUNFILES = "_middlemen/external_Sprotobuf_Saprotoc-runfiles"
+
+GEN_SUFFIX = [
+ ".pb.h",
+ ".pb.cc",
+]
+
+def _get_search_paths(action):
+ cmd = action.argv
+ search_paths = sets.make()
+ cmd_len = len(cmd)
+ for i in range(cmd_len):
+ if cmd[i].startswith("-I"):
+ sets.insert(search_paths, cmd[i].lstrip("- I"))
+
+ return search_paths
+
+def _proto_code_gen_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ actions = analysistest.target_actions(env)
+ package_root = ctx.label.package
+ local_file_output_path = paths.join(
+ package_root,
+ target_under_test.label.name,
+ package_root,
+ )
+
+ input_files = [
+ ctx.attr.local_file_path,
+ ctx.attr.external_file_path,
+ ctx.attr.deps_file_path,
+ ]
+
+ output_files = [
+ ctx.attr.local_file_path,
+ ctx.attr.external_file_path,
+ ]
+
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "Proto gen action not found: %s" % actions,
+ )
+
+ action = actions[0]
+
+ asserts.set_equals(
+ env,
+ expected = sets.make(
+ [paths.join(package_root, file) for file in input_files] + [
+ PROTO_GEN,
+ RUNFILES,
+ ],
+ ),
+ actual = sets.make([
+ file.short_path
+ for file in action.inputs.to_list()
+ ]),
+ )
+
+ asserts.set_equals(
+ env,
+ expected = sets.make(
+ [
+ paths.join(
+ local_file_output_path,
+ paths.replace_extension(file, ext),
+ )
+ for ext in GEN_SUFFIX
+ for file in output_files
+ ],
+ ),
+ actual = sets.make([
+ file.short_path
+ for file in action.outputs.to_list()
+ ]),
+ )
+
+ search_paths = _get_search_paths(action)
+
+ asserts.equals(
+ env,
+ expected = sets.make(
+ ["."] +
+ [paths.join(package_root, f) + "=" + paths.join(package_root, f) for f in input_files],
+ ),
+ actual = search_paths,
+ )
+
+ return analysistest.end(env)
+
+proto_code_gen_test = analysistest.make(
+ _proto_code_gen_test_impl,
+ attrs = {
+ "local_file_path": attr.string(),
+ "deps_file_path": attr.string(),
+ "external_file_path": attr.string(),
+ },
+)
+
+def _test_proto_code_gen():
+ test_name = "proto_code_gen_test"
+ local_file_path = "local/proto_local.proto"
+ external_file_path = "external/proto_external.proto"
+ deps_file_path = "deps/proto_deps.proto"
+ external_proto_name = test_name + "_external_proto"
+ deps_proto_name = test_name + "_deps_proto"
+ local_proto_name = test_name + "_proto"
+ cc_name = test_name + "_cc_proto"
+
+ native.proto_library(
+ name = external_proto_name,
+ srcs = [external_file_path],
+ tags = ["manual"],
+ )
+
+ native.proto_library(
+ name = deps_proto_name,
+ srcs = [deps_file_path],
+ tags = ["manual"],
+ )
+
+ native.proto_library(
+ name = local_proto_name,
+ srcs = [local_file_path],
+ deps = [":" + deps_proto_name],
+ tags = ["manual"],
+ )
+
+ cc_proto_library(
+ name = cc_name,
+ deps = [
+ ":" + local_proto_name,
+ ":" + external_proto_name,
+ ],
+ tags = ["manual"],
+ )
+
+ proto_code_gen_test(
+ name = test_name,
+ target_under_test = cc_name + PROTO_GEN_NAME_SUFFIX,
+ local_file_path = local_file_path,
+ deps_file_path = deps_file_path,
+ external_file_path = external_file_path,
+ )
+
+ return test_name
+
+def _proto_strip_import_prefix_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ actions = analysistest.target_actions(env)
+ package_root = ctx.label.package
+
+ # strip the proto file path, src/stripped/stripped.proto -> stripped/stripped.proto
+ stripped_file_name = paths.relativize(ctx.attr.stripped_file_name, ctx.attr.strip_import_prefix)
+ stripped_file_input_path = paths.join(
+ package_root,
+ VIRTUAL_IMPORT,
+ ctx.attr.stripped_proto_name,
+ )
+ stripped_file_input_full_path = paths.join(
+ stripped_file_input_path,
+ stripped_file_name,
+ )
+ unstripped_file_output_path = paths.join(
+ package_root,
+ target_under_test.label.name,
+ package_root,
+ )
+ stripped_file_output_path = paths.join(
+ unstripped_file_output_path,
+ VIRTUAL_IMPORT,
+ ctx.attr.stripped_proto_name,
+ )
+
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "Proto gen action not found: %s" % actions,
+ )
+
+ action = actions[0]
+
+ asserts.set_equals(
+ env,
+ expected = sets.make(
+ [
+ paths.join(package_root, ctx.attr.unstripped_file_name),
+ stripped_file_input_full_path,
+ PROTO_GEN,
+ RUNFILES,
+ ],
+ ),
+ actual = sets.make([
+ file.short_path
+ for file in action.inputs.to_list()
+ ]),
+ )
+
+ asserts.set_equals(
+ env,
+ expected = sets.make(
+ [
+ paths.join(
+ unstripped_file_output_path,
+ paths.replace_extension(ctx.attr.unstripped_file_name, ext),
+ )
+ for ext in GEN_SUFFIX
+ ] +
+ [
+ paths.join(
+ stripped_file_output_path,
+ paths.replace_extension(stripped_file_name, ext),
+ )
+ for ext in GEN_SUFFIX
+ ],
+ ),
+ actual = sets.make([
+ file.short_path
+ for file in action.outputs.to_list()
+ ]),
+ )
+
+ search_paths = _get_search_paths(action)
+
+ asserts.equals(
+ env,
+ expected = sets.make([
+ ".",
+ paths.join(package_root, ctx.attr.unstripped_file_name) + "=" + paths.join(package_root, ctx.attr.unstripped_file_name),
+ stripped_file_input_full_path + "=" +
+ paths.join(
+ ctx.genfiles_dir.path,
+ stripped_file_input_full_path,
+ ),
+ paths.join(
+ ctx.genfiles_dir.path,
+ stripped_file_input_path,
+ ),
+ ]),
+ actual = search_paths,
+ )
+
+ return analysistest.end(env)
+
+proto_strip_import_prefix_test = analysistest.make(
+ _proto_strip_import_prefix_test_impl,
+ attrs = {
+ "stripped_proto_name": attr.string(),
+ "stripped_file_name": attr.string(),
+ "unstripped_file_name": attr.string(),
+ "strip_import_prefix": attr.string(),
+ },
+)
+
+def _test_proto_strip_import_prefix():
+ test_name = "proto_strip_import_prefix_test"
+ unstripped_proto_name = test_name + "_unstripped_proto"
+ stripped_proto_name = test_name + "_stripped_proto"
+ unstripped_file_name = "unstripped/unstripped.proto"
+ stripped_file_name = "src/stripped/stripped.proto"
+ cc_name = test_name + "_cc_proto"
+ strip_import_prefix = "src"
+
+ native.proto_library(
+ name = unstripped_proto_name,
+ srcs = [unstripped_file_name],
+ tags = ["manual"],
+ )
+
+ native.proto_library(
+ name = stripped_proto_name,
+ srcs = [stripped_file_name],
+ strip_import_prefix = strip_import_prefix,
+ tags = ["manual"],
+ )
+
+ cc_proto_library(
+ name = cc_name,
+ deps = [
+ ":" + stripped_proto_name,
+ ":" + unstripped_proto_name,
+ ],
+ tags = ["manual"],
+ )
+
+ proto_strip_import_prefix_test(
+ name = test_name,
+ target_under_test = cc_name + PROTO_GEN_NAME_SUFFIX,
+ stripped_proto_name = stripped_proto_name,
+ stripped_file_name = stripped_file_name,
+ unstripped_file_name = unstripped_file_name,
+ strip_import_prefix = strip_import_prefix,
+ )
+
+ return test_name
+
+def _proto_with_external_packages_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ actions = analysistest.target_actions(env)
+ package_root = ctx.label.package
+ deps_file_path = ctx.attr.deps_file_path
+ external_file_path = ctx.attr.external_file_path
+ local_file_path = ctx.attr.local_file_path
+
+ asserts.true(
+ env,
+ len(actions) == 1,
+ "Proto gen action not found: %s" % actions,
+ )
+
+ action = actions[0]
+
+ asserts.set_equals(
+ env,
+ expected = sets.make(
+ [
+ paths.join(package_root, local_file_path),
+ deps_file_path,
+ external_file_path,
+ PROTO_GEN,
+ RUNFILES,
+ ],
+ ),
+ actual = sets.make([
+ file.short_path
+ for file in action.inputs.to_list()
+ ]),
+ )
+
+ asserts.set_equals(
+ env,
+ expected = sets.make(
+ [
+ paths.join(
+ package_root,
+ target_under_test.label.name,
+ package_root,
+ paths.replace_extension(local_file_path, ext),
+ )
+ for ext in GEN_SUFFIX
+ ] +
+ [
+ paths.join(
+ package_root,
+ target_under_test.label.name,
+ paths.replace_extension(external_file_path, ext),
+ )
+ for ext in GEN_SUFFIX
+ ],
+ ),
+ actual = sets.make([
+ file.short_path
+ for file in action.outputs.to_list()
+ ]),
+ )
+
+ search_paths = _get_search_paths(action)
+
+ asserts.equals(
+ env,
+ expected = sets.make([
+ ".",
+ paths.join(package_root, local_file_path) + "=" + paths.join(package_root, local_file_path),
+ deps_file_path + "=" + deps_file_path,
+ external_file_path + "=" + external_file_path,
+ ]),
+ actual = search_paths,
+ )
+
+ return analysistest.end(env)
+
+proto_with_external_packages_test = analysistest.make(
+ _proto_with_external_packages_test_impl,
+ attrs = {
+ "local_file_path": attr.string(),
+ "deps_file_path": attr.string(),
+ "external_file_path": attr.string(),
+ },
+)
+
+def _test_proto_with_external_packages():
+ test_name = "proto_with_external_packages_test"
+ proto_name = test_name + "_proto"
+ cc_name = test_name + "_cc_proto"
+ local_file_path = "local/proto_local.proto"
+ deps_file_path = "build/bazel/examples/cc/proto/deps/src/enums/proto_deps.proto"
+ external_file_path = "build/bazel/examples/cc/proto/external/src/enums/proto_external.proto"
+
+ native.proto_library(
+ name = proto_name,
+ srcs = [local_file_path],
+ deps = ["//build/bazel/examples/cc/proto/deps:deps_proto"],
+ tags = ["manual"],
+ )
+
+ cc_proto_library(
+ name = cc_name,
+ deps = [
+ ":" + proto_name,
+ "//build/bazel/examples/cc/proto/external:external_proto",
+ ],
+ tags = ["manual"],
+ )
+
+ proto_with_external_packages_test(
+ name = test_name,
+ target_under_test = cc_name + PROTO_GEN_NAME_SUFFIX,
+ local_file_path = local_file_path,
+ deps_file_path = deps_file_path,
+ external_file_path = external_file_path,
+ )
+
+ return test_name
+
+def cc_proto_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_proto_code_gen(),
+ _test_proto_strip_import_prefix(),
+ _test_proto_with_external_packages(),
+ ],
+ )
diff --git a/rules/cc/cc_stub_library.bzl b/rules/cc/cc_stub_library.bzl
index be18de13..f9c90623 100644
--- a/rules/cc/cc_stub_library.bzl
+++ b/rules/cc/cc_stub_library.bzl
@@ -12,8 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-load("@bazel_skylib//lib:dicts.bzl", "dicts")
-load("//build/bazel/platforms:rule_utilities.bzl", "ARCH_CONSTRAINT_ATTRS", "get_arch")
+load("//build/bazel/platforms:platform_utils.bzl", "platforms")
+load("//build/bazel/rules/apis:api_surface.bzl", "MODULE_LIB_API")
+load("//build/bazel/rules/common:api.bzl", "api")
+load(":cc_library_headers.bzl", "cc_library_headers")
+load(":cc_library_shared.bzl", "CcStubLibrariesInfo")
+load(":cc_library_static.bzl", "cc_library_static")
+load(":fdo_profile_transitions.bzl", "drop_fdo_profile_transition")
+load(":generate_toc.bzl", "CcTocInfo", "generate_toc")
# This file contains the implementation for the cc_stub_library rule.
#
@@ -24,8 +30,8 @@ CcStubInfo = provider(
fields = {
"stub_map": "The .map file containing library symbols for the specific API version.",
"version": "The API version of this library.",
- "abi_symbol_list": "A plain-text list of all symbols of this library for the specific API version."
- }
+ "abi_symbol_list": "A plain-text list of all symbols of this library for the specific API version.",
+ },
)
def _cc_stub_gen_impl(ctx):
@@ -39,15 +45,14 @@ def _cc_stub_gen_impl(ctx):
outputs = [out_stub_c, out_stub_map, out_abi_symbol_list]
- arch = get_arch(ctx)
-
ndkstubgen_args = ctx.actions.args()
- ndkstubgen_args.add_all(["--arch", arch])
+ ndkstubgen_args.add_all(["--arch", platforms.get_target_arch(ctx.attr._platform_utils)])
ndkstubgen_args.add_all(["--api", ctx.attr.version])
ndkstubgen_args.add_all(["--api-map", ctx.file._api_levels_file])
+
# TODO(b/207812332): This always parses and builds the stub library as a dependency of an APEX. Parameterize this
# for non-APEX use cases.
- ndkstubgen_args.add_all(["--apex", ctx.file.symbol_file])
+ ndkstubgen_args.add_all(["--systemapi", "--apex", ctx.file.symbol_file])
ndkstubgen_args.add_all(outputs)
ctx.actions.run(
executable = ctx.executable._ndkstubgen,
@@ -68,19 +73,227 @@ def _cc_stub_gen_impl(ctx):
abi_symbol_list = out_abi_symbol_list,
version = ctx.attr.version,
),
+ OutputGroupInfo(
+ stub_map = [out_stub_map],
+ ),
]
cc_stub_gen = rule(
implementation = _cc_stub_gen_impl,
- attrs = dicts.add({
+ attrs = {
# Public attributes
"symbol_file": attr.label(mandatory = True, allow_single_file = [".map.txt"]),
"version": attr.string(mandatory = True, default = "current"),
# Private attributes
"_api_levels_file": attr.label(default = "@soong_injection//api_levels:api_levels.json", allow_single_file = True),
- # TODO(b/199038020): Use //build/soong/cc/ndkstubgen when py_runtime is set up on CI for hermetic python usage.
- # "_ndkstubgen": attr.label(default = "@make_injection//:host/linux-x86/bin/ndkstubgen", executable = True, cfg = "host", allow_single_file = True),
- "_ndkstubgen": attr.label(default = "//build/soong/cc/ndkstubgen", executable = True, cfg = "host"),
- }, ARCH_CONSTRAINT_ATTRS),
+ "_ndkstubgen": attr.label(default = "//build/soong/cc/ndkstubgen", executable = True, cfg = "exec"),
+ "_platform_utils": attr.label(default = Label("//build/bazel/platforms:platform_utils")),
+ },
+)
+
+CcStubLibrarySharedInfo = provider(
+ fields = {
+ "source_library_label": "The source library label of the cc_stub_library_shared",
+ },
+)
+
+# cc_stub_library_shared creates a cc_library_shared target, but using stub C source files generated
+# from a library's .map.txt files and ndkstubgen. The top level target returns the same
+# providers as a cc_library_shared, with the addition of a CcStubInfo
+# containing metadata files and versions of the stub library.
+def cc_stub_library_shared(name, stubs_symbol_file, version, export_includes, soname, source_library_label, deps, target_compatible_with, features, tags):
+ # Call ndkstubgen to generate the stub.c source file from a .map.txt file. These
+ # are accessible in the CcStubInfo provider of this target.
+ cc_stub_gen(
+ name = name + "_files",
+ symbol_file = stubs_symbol_file,
+ version = version,
+ target_compatible_with = target_compatible_with,
+ tags = ["manual"],
+ )
+
+ # Disable coverage for stub libraries.
+ features = features + ["-coverage", "-link_crt"]
+
+ # The static library at the root of the stub shared library.
+ cc_library_static(
+ name = name + "_root",
+ srcs_c = [name + "_files"], # compile the stub.c file
+ copts = ["-fno-builtin"], # ignore conflicts with builtin function signatures
+ features = [
+ # Don't link the C runtime
+ "-link_crt",
+ # Enable the stub library compile flags
+ "stub_library",
+ # Disable all include-related features to avoid including any headers
+ # that may cause conflicting type errors with the symbols in the
+ # generated stubs source code.
+ # e.g.
+ # double acos(double); // in header
+ # void acos() {} // in the generated source code
+ # See https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/library.go;l=942-946;drc=d8a72d7dc91b2122b7b10b47b80cf2f7c65f9049
+ "-toolchain_include_directories",
+ "-includes",
+ "-include_paths",
+ ],
+ target_compatible_with = target_compatible_with,
+ stl = "none",
+ system_dynamic_deps = [],
+ tags = ["manual"],
+ export_includes = export_includes,
+ # deps is used to export includes that specified using "header_libs" in Android.bp, e.g. "libc_headers".
+ deps = deps,
+ )
+
+ # Create a .so for the stub library. This library is self contained, has
+ # no deps, and doesn't link against crt.
+ if len(soname) == 0:
+ fail("For stub libraries 'soname' is mandatory and must be same as the soname of its source library.")
+ soname_flag = "-Wl,-soname," + soname
+ stub_map = name + "_stub_map"
+ native.filegroup(
+ name = stub_map,
+ srcs = [name + "_files"],
+ output_group = "stub_map",
+ tags = ["manual"],
+ )
+ version_script_flag = "-Wl,--version-script,$(location %s)" % stub_map
+ native.cc_shared_library(
+ name = name + "_so",
+ additional_linker_inputs = [stub_map],
+ user_link_flags = [soname_flag, version_script_flag],
+ roots = [name + "_root"],
+ features = features + ["-link_crt"],
+ target_compatible_with = target_compatible_with,
+ tags = ["manual"],
+ )
+
+ # Create a target with CcSharedLibraryInfo and CcStubInfo providers.
+ _cc_stub_library_shared(
+ name = name,
+ stub_target = name + "_files",
+ library_target = name + "_so",
+ root = name + "_root",
+ source_library_label = source_library_label,
+ version = version,
+ tags = tags,
+ )
+
+def _cc_stub_library_shared_impl(ctx):
+ source_library_label = Label(ctx.attr.source_library_label)
+ api_level = str(api.parse_api_level_from_version(ctx.attr.version))
+ version_macro_name = "__" + source_library_label.name.upper() + "_API__=" + api_level
+ compilation_context = cc_common.create_compilation_context(
+ defines = depset([version_macro_name]),
+ )
+
+ cc_info = cc_common.merge_cc_infos(cc_infos = [
+ ctx.attr.root[CcInfo],
+ CcInfo(compilation_context = compilation_context),
+ ])
+ toc_info = generate_toc(ctx, ctx.attr.name, ctx.attr.library_target.files.to_list()[0])
+
+ return [
+ ctx.attr.library_target[DefaultInfo],
+ ctx.attr.library_target[CcSharedLibraryInfo],
+ ctx.attr.stub_target[CcStubInfo],
+ toc_info,
+ cc_info,
+ CcStubLibrariesInfo(has_stubs = True),
+ OutputGroupInfo(rule_impl_debug_files = depset()),
+ CcStubLibrarySharedInfo(source_library_label = source_library_label),
+ ]
+
+_cc_stub_library_shared = rule(
+ implementation = _cc_stub_library_shared_impl,
+ doc = "Top level rule to merge CcStubInfo and CcSharedLibraryInfo into a single target",
+ # Incoming transition to reset //command_line_option:fdo_profile to None
+ # to converge the configurations of the stub targets
+ cfg = drop_fdo_profile_transition,
+ attrs = {
+ "stub_target": attr.label(
+ providers = [CcStubInfo],
+ mandatory = True,
+ ),
+ "library_target": attr.label(
+ providers = [CcSharedLibraryInfo],
+ mandatory = True,
+ ),
+ "root": attr.label(
+ providers = [CcInfo],
+ mandatory = True,
+ ),
+ "source_library_label": attr.string(mandatory = True),
+ "version": attr.string(mandatory = True),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
+ "_toc_script": attr.label(
+ cfg = "exec",
+ executable = True,
+ allow_single_file = True,
+ default = "//build/soong/scripts:toc.sh",
+ ),
+ "_readelf": attr.label(
+ cfg = "exec",
+ executable = True,
+ allow_single_file = True,
+ default = "//prebuilts/clang/host/linux-x86:llvm-readelf",
+ ),
+ },
+ provides = [
+ CcSharedLibraryInfo,
+ CcTocInfo,
+ CcInfo,
+ CcStubInfo,
+ CcStubLibrariesInfo,
+ CcStubLibrarySharedInfo,
+ ],
)
+def cc_stub_suite(
+ name,
+ source_library_label,
+ versions,
+ symbol_file,
+ export_includes = [],
+ soname = "",
+ deps = [],
+ data = [], # @unused
+ target_compatible_with = [],
+ features = [],
+ tags = ["manual"]):
+ # Implicitly add "current" to versions. This copies the behavior from Soong (aosp/1641782)
+ if "current" not in versions:
+ versions.append("current")
+
+ for version in versions:
+ cc_stub_library_shared(
+ # Use - as the seperator of name and version. "current" might be the version of some libraries.
+ name = name + "-" + version,
+ version = version,
+ stubs_symbol_file = symbol_file,
+ export_includes = export_includes,
+ soname = soname,
+ source_library_label = str(native.package_relative_label(source_library_label)),
+ deps = deps,
+ target_compatible_with = target_compatible_with,
+ features = features,
+ tags = tags,
+ )
+
+ # Create a header library target for this API surface (ModuleLibApi)
+ # The external @api_surfaces repository will contain an alias to this header library.
+ cc_library_headers(
+ name = "%s_%s_headers" % (name, MODULE_LIB_API),
+ export_includes = export_includes,
+ deps = deps, # Necessary for exporting headers that might exist in a different directory (e.g. libEGL)
+ )
+
+ native.alias(
+ # Use _ as the seperator of name and version in alias. So there is no
+ # duplicated name if "current" is one of the versions of a library.
+ name = name + "_current",
+ actual = name + "-" + "current",
+ tags = tags,
+ )
diff --git a/rules/cc/cc_sysprop_library.bzl b/rules/cc/cc_sysprop_library.bzl
new file mode 100644
index 00000000..b400127d
--- /dev/null
+++ b/rules/cc/cc_sysprop_library.bzl
@@ -0,0 +1,162 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(
+ "//build/bazel/rules/sysprop:sysprop_library.bzl",
+ "SyspropGenInfo",
+)
+load(
+ ":cc_library_common.bzl",
+ "create_ccinfo_for_includes",
+)
+load(":cc_library_shared.bzl", "cc_library_shared")
+load(":cc_library_static.bzl", "cc_library_static")
+
+# TODO(b/240466571): Implement determination of exported includes
+def _cc_gen_sysprop_impl(ctx):
+ outputs = []
+ output_headers = []
+ all_srcs = []
+ [
+ all_srcs.extend(src.files.to_list())
+ for src in ctx.attr.dep[SyspropGenInfo].srcs
+ ]
+ for src_file in all_srcs:
+ output_subpath = src_file.short_path.replace(
+ ctx.label.package + "/",
+ "",
+ 1,
+ )
+ action_outputs = []
+ args = ctx.actions.args()
+ output_src_file = ctx.actions.declare_file(
+ "sysprop/%s.cpp" % output_subpath,
+ )
+ action_outputs.append(output_src_file)
+
+ output_header_file = ctx.actions.declare_file(
+ "sysprop/include/%s.h" % output_subpath,
+ )
+ action_outputs.append(output_header_file)
+ output_headers.append(output_header_file)
+
+ # TODO(b/240466571): This will in some cases be exported with the
+ # linked bug
+ output_public_header_file = ctx.actions.declare_file(
+ "sysprop/public/include/%s.h" % output_subpath,
+ )
+ action_outputs.append(output_public_header_file)
+
+ args.add("--header-dir", output_header_file.dirname)
+ args.add("--public-header-dir", output_public_header_file.dirname)
+ args.add("--source-dir", output_src_file.dirname)
+ args.add("--include-name", "%s.h" % output_subpath)
+ args.add(src_file.path)
+ ctx.actions.run(
+ executable = ctx.executable._sysprop_cpp,
+ arguments = [args],
+ inputs = [src_file],
+ outputs = action_outputs,
+ mnemonic = "syspropcc",
+ progress_message = "Generating sources from %s" % (
+ src_file.short_path,
+ ),
+ )
+ outputs.extend(action_outputs)
+ return [
+ DefaultInfo(files = depset(outputs)),
+ create_ccinfo_for_includes(
+ ctx = ctx,
+ hdrs = output_headers,
+ # TODO(b/240466571): This will be determined dynamically with the
+ # linked bug
+ includes = ["sysprop/include"],
+ ),
+ ]
+
+# Visible For Testing
+cc_gen_sysprop = rule(
+ implementation = _cc_gen_sysprop_impl,
+ doc = """compilation of sysprop sources into cpp sources and headers""",
+ attrs = {
+ "dep": attr.label(
+ providers = [SyspropGenInfo],
+ mandatory = True,
+ ),
+ "_sysprop_cpp": attr.label(
+ default = "//system/tools/sysprop:sysprop_cpp",
+ executable = True,
+ cfg = "exec",
+ ),
+ },
+ provides = [CcInfo],
+)
+
+def _cc_gen_sysprop_common(
+ name,
+ dep):
+ sysprop_gen_name = name + "_sysprop_gen"
+ cc_gen_sysprop(
+ name = sysprop_gen_name,
+ dep = dep,
+ tags = ["manual"],
+ )
+
+ return sysprop_gen_name
+
+sysprop_deps = select({
+ "//build/bazel/platforms/os:android": ["//system/libbase:libbase_headers"],
+ "//conditions:default": [
+ "//system/libbase:libbase_bp2build_cc_library_static",
+ "//system/logging/liblog:liblog_bp2build_cc_library_static",
+ ],
+})
+
+sysprop_dynamic_deps = select({
+ "//build/bazel/platforms/os:android": [
+ "//system/logging/liblog",
+ ],
+ "//conditions:default": [],
+})
+
+def cc_sysprop_library_shared(
+ name,
+ dep,
+ min_sdk_version = "",
+ **kwargs):
+ sysprop_gen_name = _cc_gen_sysprop_common(name, dep)
+
+ cc_library_shared(
+ name = name,
+ srcs = [":" + sysprop_gen_name],
+ min_sdk_version = min_sdk_version,
+ deps = sysprop_deps + [sysprop_gen_name],
+ dynamic_deps = sysprop_dynamic_deps,
+ **kwargs
+ )
+
+def cc_sysprop_library_static(
+ name,
+ dep,
+ min_sdk_version = "",
+ **kwargs):
+ sysprop_gen_name = _cc_gen_sysprop_common(name, dep)
+ cc_library_static(
+ name = name,
+ srcs = [":" + sysprop_gen_name],
+ min_sdk_version = min_sdk_version,
+ deps = sysprop_deps + [sysprop_gen_name],
+ dynamic_deps = sysprop_dynamic_deps,
+ **kwargs
+ )
diff --git a/rules/cc/cc_sysprop_library_test.bzl b/rules/cc/cc_sysprop_library_test.bzl
new file mode 100644
index 00000000..546e4777
--- /dev/null
+++ b/rules/cc/cc_sysprop_library_test.bzl
@@ -0,0 +1,267 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/sysprop:sysprop_library.bzl", "sysprop_library")
+load("//build/bazel/rules/test_common:args.bzl", "get_arg_value")
+load(
+ "//build/bazel/rules/test_common:paths.bzl",
+ "get_output_and_package_dir_based_path",
+ "get_package_dir_based_path",
+)
+load(":cc_sysprop_library.bzl", "cc_gen_sysprop")
+
+def _provides_correct_outputs_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ target_under_test = analysistest.target_under_test(env)
+ output_files = target_under_test[DefaultInfo].files.to_list()
+ actual_output_strings = [
+ file.short_path
+ for file in output_files
+ ]
+
+ asserts.equals(
+ env,
+ 6,
+ len(output_files),
+ "List of outputs incorrect length",
+ )
+ for name in ["foo", "bar"]:
+ expected_cpp_path = get_package_dir_based_path(
+ env,
+ "sysprop/path/to/%s.sysprop.cpp" % (name),
+ )
+ asserts.true(
+ env,
+ expected_cpp_path in actual_output_strings,
+ ("Generated cpp source file for %s.sysprop not present in " +
+ "output.\n" +
+ "Expected Value: %s\n" +
+ "Actual output: %s") % (
+ name,
+ expected_cpp_path,
+ actual_output_strings,
+ ),
+ )
+ expected_header_path = get_package_dir_based_path(
+ env,
+ "sysprop/include/path/to/%s.sysprop.h" % (name),
+ )
+ asserts.true(
+ env,
+ expected_header_path in actual_output_strings,
+ ("Generated header source file for %s.sysprop not present in " +
+ "output.\n" +
+ "Expected Value: %s\n" +
+ "Actual output: %s") % (
+ name,
+ expected_header_path,
+ actual_output_strings,
+ ),
+ )
+ expected_public_header_path = get_package_dir_based_path(
+ env,
+ "sysprop/public/include/path/to/%s.sysprop.h" % (name),
+ )
+ asserts.true(
+ env,
+ expected_public_header_path in actual_output_strings,
+ ("Generated public header source file for %s.sysprop not present " +
+ "in output.\n" +
+ "Expected Value: %s\n" +
+ "Actual output: %s") % (
+ name,
+ expected_public_header_path,
+ actual_output_strings,
+ ),
+ )
+
+ return analysistest.end(env)
+
+provides_correct_outputs_test = analysistest.make(
+ _provides_correct_outputs_test_impl,
+)
+
+# TODO(b/240466571): This test will be notably different after implementing
+# exported include and header selection
+def _provides_correct_ccinfo_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ target_under_test = analysistest.target_under_test(env)
+ target_ccinfo = target_under_test[CcInfo]
+ actual_includes = target_ccinfo.compilation_context.includes.to_list()
+ actual_headers = target_ccinfo.compilation_context.headers.to_list()
+ expected_package_relative_include = get_package_dir_based_path(
+ env,
+ "sysprop/include",
+ )
+ asserts.true(
+ env,
+ expected_package_relative_include in actual_includes,
+ ("Package relative include incorrect or not found in CcInfo.\n" +
+ "Expected value: %s\n" +
+ "Actual output: %s") % (
+ expected_package_relative_include,
+ actual_includes,
+ ),
+ )
+ expected_root_relative_include = get_output_and_package_dir_based_path(
+ env,
+ "sysprop/include",
+ )
+ asserts.true(
+ env,
+ expected_root_relative_include in actual_includes,
+ ("Root relative include incorrect or not found in CcInfo.\n" +
+ "Expected value: %s\n" +
+ "Actual output: %s") % (
+ expected_root_relative_include,
+ actual_includes,
+ ),
+ )
+ asserts.true(
+ env,
+ len(actual_includes) == 2,
+ ("CcInfo includes should contain a package relative and a " +
+ "root-relative path and nothing else. Actual output: %s" % (
+ actual_includes,
+ )),
+ )
+ actual_header_strings = [
+ header.path
+ for header in actual_headers
+ ]
+ for name in ["foo", "bar"]:
+ asserts.true(
+ env,
+ get_output_and_package_dir_based_path(
+ env,
+ "sysprop/include/path/to/%s.sysprop.h" % (name),
+ ) in actual_header_strings,
+ ("Generated header file for %s.sysprop not present in CcInfo " +
+ "headers. Actual output: %s") % (name, actual_header_strings),
+ )
+ asserts.true(
+ env,
+ len(actual_headers) == 2,
+ ("List of generated headers in CcInfo was incorrect length. Should " +
+ "be exactly two. Actual output: %s" % actual_headers),
+ )
+
+ return analysistest.end(env)
+
+provides_correct_ccinfo_test = analysistest.make(
+ _provides_correct_ccinfo_test_impl,
+)
+
+def _correct_args_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+
+ asserts.equals(
+ env,
+ 2,
+ len(actions),
+ "Incorrect number of actions",
+ )
+ names = ["foo", "bar"]
+ for i in range(2):
+ name = names[i]
+ actual_args = actions[i].argv
+
+ asserts.equals(
+ env,
+ get_output_and_package_dir_based_path(env, "sysprop/include/path/to"),
+ get_arg_value(actual_args, "--header-dir"),
+ "--header-dir argument incorrect or not found.\n",
+ )
+ asserts.equals(
+ env,
+ get_output_and_package_dir_based_path(env, "sysprop/public/include/path/to"),
+ get_arg_value(actual_args, "--public-header-dir"),
+ "--public-header-dir argument incorrect or not found.\n",
+ )
+ asserts.equals(
+ env,
+ get_output_and_package_dir_based_path(env, "sysprop/path/to"),
+ get_arg_value(actual_args, "--source-dir"),
+ "--source-dir argument incorrect or not found.\n",
+ )
+ asserts.equals(
+ env,
+ "path/to/%s.sysprop.h" % name,
+ get_arg_value(actual_args, "--include-name"),
+ "--include-name argument incorrect or not found.\n",
+ )
+ expected_input = get_package_dir_based_path(
+ env,
+ "path/to/%s.sysprop" % name,
+ )
+ actual_cli_string = " ".join(actual_args)
+ asserts.true(
+ env,
+ expected_input in actual_cli_string,
+ ("Input argument not found.\n" +
+ "Expected Value: %s\n" +
+ "Command: %s") % (expected_input, actual_cli_string),
+ )
+
+ return analysistest.end(env)
+
+correct_args_test = analysistest.make(
+ _correct_args_test_impl,
+)
+
+def _create_test_targets(name, rule_func):
+ wrapper_name = name + "_wrapper"
+ test_name = name + "_test"
+ sysprop_library(
+ name = wrapper_name,
+ srcs = [
+ "path/to/foo.sysprop",
+ "path/to/bar.sysprop",
+ ],
+ tags = ["manual"],
+ )
+ cc_gen_sysprop(
+ name = name,
+ dep = ":" + wrapper_name,
+ tags = ["manual"],
+ )
+ rule_func(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def cc_gen_sysprop_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _create_test_targets(
+ "provides_correct_outputs",
+ provides_correct_outputs_test,
+ ),
+ _create_test_targets(
+ "provides_correct_ccinfo",
+ provides_correct_ccinfo_test,
+ ),
+ _create_test_targets(
+ "correct_args_test",
+ correct_args_test,
+ ),
+ ],
+ )
diff --git a/rules/cc/cc_test.bzl b/rules/cc/cc_test.bzl
new file mode 100644
index 00000000..7b4320a5
--- /dev/null
+++ b/rules/cc/cc_test.bzl
@@ -0,0 +1,180 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""cc_test macro for building native tests with Bazel."""
+
+load("//build/bazel/rules/cc:cc_library_common.bzl", "CcAndroidMkInfo")
+load("//build/bazel/rules/cc:stripped_cc_common.bzl", "CcUnstrippedInfo", "StrippedCcBinaryInfo")
+load("//build/bazel/rules/tradefed:tradefed.bzl", "tradefed_host_driven_test")
+load(":cc_binary.bzl", "cc_binary")
+
+# TODO(b/244559183): Keep this in sync with cc/test.go#linkerFlags
+_gtest_copts = select({
+ "//build/bazel/platforms/os:linux_glibc": ["-DGTEST_OS_LINUX"],
+ "//build/bazel/platforms/os:darwin": ["-DGTEST_OS_MAC"],
+ "//build/bazel/platforms/os:windows": ["-DGTEST_OS_WINDOWS"],
+ "//conditions:default": ["-DGTEST_OS_LINUX_ANDROID"],
+}) + select({
+ "//build/bazel/platforms/os:android": [],
+ "//conditions:default": ["-O0", "-g"], # here, default == host platform
+}) + [
+ "-DGTEST_HAS_STD_STRING",
+ "-Wno-unused-result", # TODO(b/244433518): Figure out why this is necessary in the bazel compile action.
+]
+
+_gtest_deps = [
+ "//external/googletest/googletest:libgtest_main",
+ "//external/googletest/googletest:libgtest",
+]
+
+_pass_through_providers = [
+ CcInfo,
+ InstrumentedFilesInfo,
+ DebugPackageInfo,
+ OutputGroupInfo,
+ StrippedCcBinaryInfo,
+ CcUnstrippedInfo,
+ CcAndroidMkInfo,
+]
+
+def cc_test(
+ name,
+ copts = [],
+ deps = [],
+ dynamic_deps = [],
+ gtest = True,
+ isolated = True, # TODO(b/244432609): currently no-op. @unused
+ tags = [],
+ tidy = None,
+ tidy_checks = None,
+ tidy_checks_as_errors = None,
+ tidy_flags = None,
+ tidy_disabled_srcs = None,
+ tidy_timeout_srcs = None,
+ test_config = None,
+ template_test_config = None,
+ template_configs = [],
+ template_install_base = None,
+ **kwargs):
+ # NOTE: Keep this in sync with cc/test.go#linkerDeps
+ if gtest:
+ # TODO(b/244433197): handle ctx.useSdk() && ctx.Device() case to link against the ndk variants of the gtest libs.
+ # TODO(b/244432609): handle isolated = True to link against libgtest_isolated_main and liblog (dynamically)
+ deps = deps + _gtest_deps
+ copts = copts + _gtest_copts
+
+ # A cc_test is essentially the same as a cc_binary. Let's reuse the
+ # implementation for now and factor the common bits out as necessary.
+ test_binary_name = name + "__test_binary"
+ cc_binary(
+ name = test_binary_name,
+ copts = copts,
+ deps = deps,
+ dynamic_deps = dynamic_deps,
+ generate_cc_test = True,
+ tidy = tidy,
+ tidy_checks = tidy_checks,
+ tidy_checks_as_errors = tidy_checks_as_errors,
+ tidy_flags = tidy_flags,
+ tidy_disabled_srcs = tidy_disabled_srcs,
+ tidy_timeout_srcs = tidy_timeout_srcs,
+ tags = tags + ["manual"],
+ **kwargs
+ )
+
+ # Host only test with no tradefed.
+ # Compatability is left out for now so as not to break mix build.
+ # which breaks when modules are skipped with --config=android
+ without_tradefed_test_name = name + "__without_tradefed_test"
+ cc_runner_test(
+ name = without_tradefed_test_name,
+ binary = test_binary_name,
+ test = test_binary_name,
+ tags = ["manual"],
+ )
+
+ # Tradefed host driven test
+ tradefed_host_driven_test_name = name + "__tradefed_host_driven_test"
+ if not test_config and not template_test_config:
+ template_test_config = select({
+ "//build/bazel/rules/tradefed:android_host_driven_tradefed_test": "//build/make/core:native_test_config_template.xml",
+ "//build/bazel/rules/tradefed:linux_host_driven_tradefed_test": "//build/make/core:native_host_test_config_template.xml",
+ "//conditions:default": "//build/make/core:native_test_config_template.xml",
+ })
+ tradefed_host_driven_test(
+ name = tradefed_host_driven_test_name,
+ test_identifier = name,
+ test = test_binary_name,
+ test_config = test_config,
+ template_test_config = template_test_config,
+ template_configs = template_configs,
+ template_install_base = template_install_base,
+ tags = ["manual"],
+ )
+
+ # TODO(b/264792912) update to use proper config/tags to determine which test to run.
+ cc_runner_test(
+ name = name,
+ binary = test_binary_name,
+ test = select({
+ "//build/bazel/rules/tradefed:android_host_driven_tradefed_test": tradefed_host_driven_test_name,
+ "//build/bazel/rules/tradefed:linux_host_driven_tradefed_test": tradefed_host_driven_test_name,
+ "//conditions:default": without_tradefed_test_name,
+ }),
+ )
+
+def _cc_runner_test_impl(ctx):
+ executable = ctx.actions.declare_file(ctx.attr.name + "__cc_runner_test")
+ ctx.actions.symlink(
+ output = executable,
+ target_file = ctx.attr.test.files_to_run.executable,
+ )
+
+ # Gather runfiles.
+ runfiles = ctx.runfiles()
+ runfiles = runfiles.merge_all([
+ ctx.attr.binary.default_runfiles,
+ ctx.attr.test.default_runfiles,
+ ])
+
+ # Propagate providers of the included binary
+ # Those providers are used to populate attributes of the mixed build.
+ providers = collect_providers(ctx.attr.binary, _pass_through_providers)
+ return [DefaultInfo(
+ executable = executable,
+ runfiles = runfiles,
+ )] + providers
+
+cc_runner_test = rule(
+ doc = "A wrapper rule used to run a test and also propagates providers",
+ attrs = {
+ "binary": attr.label(
+ doc = "Binary that providers should be propagated to next rule // mix build.",
+ ),
+ "test": attr.label(
+ doc = "Test to run.",
+ ),
+ },
+ test = True,
+ implementation = _cc_runner_test_impl,
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+)
+
+def collect_providers(dep, provider_types):
+ """Returns list of providers from dependency that match the provider types"""
+ providers = []
+ for provider in provider_types:
+ if provider in dep:
+ providers.append(dep[provider])
+ return providers
diff --git a/rules/cc/cc_test_test.bzl b/rules/cc/cc_test_test.bzl
new file mode 100644
index 00000000..7ee17233
--- /dev/null
+++ b/rules/cc/cc_test_test.bzl
@@ -0,0 +1,80 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(":cc_library_common_test.bzl", "target_provides_androidmk_info_test")
+load(":cc_library_shared.bzl", "cc_library_shared")
+load(":cc_library_static.bzl", "cc_library_static")
+load(":cc_test.bzl", "cc_test")
+
+def _cc_test_provides_androidmk_info():
+ name = "cc_test_provides_androidmk_info"
+ dep_name = name + "_static_dep"
+ whole_archive_dep_name = name + "_whole_archive_dep"
+ dynamic_dep_name = name + "_dynamic_dep"
+
+ srcs = ["//build/bazel/rules/cc/testing:test_srcs"]
+ gunit_test_srcs = ["//build/bazel/rules/cc/testing:gunit_test_srcs"]
+
+ cc_library_static(
+ name = dep_name,
+ srcs = srcs,
+ tags = ["manual"],
+ )
+ cc_library_static(
+ name = whole_archive_dep_name,
+ srcs = srcs,
+ tags = ["manual"],
+ )
+ cc_library_shared(
+ name = dynamic_dep_name,
+ srcs = srcs,
+ tags = ["manual"],
+ )
+ cc_test(
+ name = name,
+ srcs = gunit_test_srcs,
+ deps = [dep_name],
+ whole_archive_deps = [whole_archive_dep_name],
+ dynamic_deps = [dynamic_dep_name],
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ tags = ["manual"],
+ )
+ android_test_name = name + "_android"
+ linux_test_name = name + "_linux"
+ target_provides_androidmk_info_test(
+ name = android_test_name,
+ target_under_test = name,
+ expected_static_libs = [dep_name, "libgtest_main", "libgtest", "libc++demangle", "libunwind"],
+ expected_whole_static_libs = [whole_archive_dep_name],
+ expected_shared_libs = [dynamic_dep_name, "libc++", "libc", "libdl", "libm"],
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
+ target_provides_androidmk_info_test(
+ name = linux_test_name,
+ target_under_test = name,
+ expected_static_libs = [dep_name, "libgtest_main", "libgtest"],
+ expected_whole_static_libs = [whole_archive_dep_name],
+ expected_shared_libs = [dynamic_dep_name, "libc++"],
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+ return [
+ android_test_name,
+ linux_test_name,
+ ]
+
+def cc_test_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = _cc_test_provides_androidmk_info(),
+ )
diff --git a/rules/cc/clang_tidy.bzl b/rules/cc/clang_tidy.bzl
new file mode 100644
index 00000000..7386a566
--- /dev/null
+++ b/rules/cc/clang_tidy.bzl
@@ -0,0 +1,354 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load(
+ "@bazel_tools//tools/build_defs/cc:action_names.bzl",
+ "CPP_COMPILE_ACTION_NAME",
+ "C_COMPILE_ACTION_NAME",
+)
+load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
+load("@soong_injection//cc_toolchain:config_constants.bzl", "constants")
+load("//build/bazel/product_config:product_variables_providing_rule.bzl", "ProductVariablesInfo")
+load("//build/bazel/rules:common.bzl", "get_dep_targets")
+load(":cc_library_common.bzl", "get_compilation_args")
+
+ClangTidyInfo = provider(
+ "Info provided from clang-tidy actions",
+ fields = {
+ "tidy_files": "Outputs from the clang-tidy tool",
+ "transitive_tidy_files": "Outputs from the clang-tidy tool for all transitive dependencies." +
+ " Currently, these are needed so that mixed-build targets can also run clang-tidy for their dependencies.",
+ },
+)
+
+_TIDY_GLOBAL_NO_CHECKS = constants.TidyGlobalNoChecks.split(",")
+_TIDY_GLOBAL_NO_ERROR_CHECKS = constants.TidyGlobalNoErrorChecks.split(",")
+_TIDY_DEFAULT_GLOBAL_CHECKS = constants.TidyDefaultGlobalChecks.split(",")
+_TIDY_EXTERNAL_VENDOR_CHECKS = constants.TidyExternalVendorChecks.split(",")
+_TIDY_DEFAULT_GLOBAL_CHECKS_NO_ANALYZER = constants.TidyDefaultGlobalChecks.split(",") + ["-clang-analyzer-*"]
+_TIDY_EXTRA_ARG_FLAGS = constants.TidyExtraArgFlags
+
+def _check_bad_tidy_flags(tidy_flags):
+ """should be kept up to date with
+ https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/check.go;l=128;drc=b45a2ea782074944f79fc388df20b06e01f265f7
+ """
+ for flag in tidy_flags:
+ flag = flag.strip()
+ if not flag.startswith("-"):
+ fail("Flag `%s` must start with `-`" % flag)
+ if flag.startswith("-fix"):
+ fail("Flag `%s` is not allowed, since it could cause multiple writes to the same source file" % flag)
+ if flag.startswith("-checks="):
+ fail("Flag `%s` is not allowed, use `tidy_checks` property instead" % flag)
+ if "-warnings-as-errors=" in flag:
+ fail("Flag `%s` is not allowed, use `tidy_checks_as_errors` property instead" % flag)
+ if " " in flag:
+ fail("Bad flag: `%s` is not an allowed multi-word flag. Should it be split into multiple flags?" % flag)
+
+def _check_bad_tidy_checks(tidy_checks):
+ """should be kept up to date with
+ https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/check.go;l=145;drc=b45a2ea782074944f79fc388df20b06e01f265f7
+ """
+ for check in tidy_checks:
+ if " " in check:
+ fail("Check `%s` invalid, cannot contain spaces" % check)
+ if "," in check:
+ fail("Check `%s` invalid, cannot contain commas. Split each entry into its own string instead" % check)
+
+def _add_with_tidy_flags(ctx, tidy_flags):
+ with_tidy_flags = ctx.attr._with_tidy_flags[BuildSettingInfo].value
+ if with_tidy_flags:
+ return tidy_flags + with_tidy_flags
+ return tidy_flags
+
+def _add_header_filter(ctx, tidy_flags):
+ """If TidyFlags does not contain -header-filter, add default header filter.
+ """
+ for flag in tidy_flags:
+ # Find the substring because the flag could also appear as --header-filter=...
+ # and with or without single or double quotes.
+ if "-header-filter=" in flag:
+ return tidy_flags
+
+ # Default header filter should include only the module directory,
+ # not the out/soong/.../ModuleDir/...
+ # Otherwise, there will be too many warnings from generated files in out/...
+ # If a module wants to see warnings in the generated source files,
+ # it should specify its own -header-filter flag.
+ default_dirs = ctx.attr._default_tidy_header_dirs[BuildSettingInfo].value
+ if default_dirs == "":
+ header_filter = "-header-filter=^" + ctx.label.package + "/"
+ else:
+ header_filter = "-header-filter=\"(^%s/|%s)\"" % (ctx.label.package, default_dirs)
+ return tidy_flags + [header_filter]
+
+def _add_extra_arg_flags(tidy_flags):
+ return tidy_flags + ["-extra-arg-before=" + f for f in _TIDY_EXTRA_ARG_FLAGS]
+
+def _add_quiet_if_not_global_tidy(ctx, tidy_flags):
+ if not ctx.attr._product_variables[ProductVariablesInfo].TidyChecks:
+ return tidy_flags + [
+ "-quiet",
+ "-extra-arg-before=-fno-caret-diagnostics",
+ ]
+ return tidy_flags
+
+def _clang_rewrite_tidy_checks(tidy_checks):
+ # List of tidy checks that should be disabled globally. When the compiler is
+ # updated, some checks enabled by this module may be disabled if they have
+ # become more strict, or if they are a new match for a wildcard group like
+ # `modernize-*`.
+ clang_tidy_disable_checks = [
+ "misc-no-recursion",
+ "readability-function-cognitive-complexity", # http://b/175055536
+ ]
+
+ tidy_checks = tidy_checks + ["-" + c for c in clang_tidy_disable_checks]
+
+ # clang-tidy does not allow later arguments to override earlier arguments,
+ # so if we just disabled an argument that was explicitly enabled we must
+ # remove the enabling argument from the list.
+ return [t for t in tidy_checks if t not in clang_tidy_disable_checks]
+
+def _add_checks_for_dir(directory):
+ """should be kept up to date with
+ https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/config/tidy.go;l=170;drc=b45a2ea782074944f79fc388df20b06e01f265f7
+ """
+
+ # This is a map of local path prefixes to the set of default clang-tidy checks
+ # to be used. This is like android.IsThirdPartyPath, but with more patterns.
+ # The last matched local_path_prefix should be the most specific to be used.
+ directory_checks = [
+ ("external/", _TIDY_EXTERNAL_VENDOR_CHECKS),
+ ("frameworks/compile/mclinker/", _TIDY_EXTERNAL_VENDOR_CHECKS),
+ ("hardware/", _TIDY_EXTERNAL_VENDOR_CHECKS),
+ ("hardware/google/", _TIDY_DEFAULT_GLOBAL_CHECKS),
+ ("hardware/interfaces/", _TIDY_DEFAULT_GLOBAL_CHECKS),
+ ("hardware/ril/", _TIDY_DEFAULT_GLOBAL_CHECKS),
+ ("hardware/libhardware", _TIDY_DEFAULT_GLOBAL_CHECKS), # all 'hardware/libhardware*'
+ ("vendor/", _TIDY_EXTERNAL_VENDOR_CHECKS),
+ ("vendor/google", _TIDY_DEFAULT_GLOBAL_CHECKS), # all 'vendor/google*'
+ ("vendor/google/external/", _TIDY_EXTERNAL_VENDOR_CHECKS),
+ ("vendor/google_arc/libs/org.chromium.arc.mojom", _TIDY_EXTERNAL_VENDOR_CHECKS),
+ ("vendor/google_devices/", _TIDY_EXTERNAL_VENDOR_CHECKS), # many have vendor code
+ ]
+
+ for d, checks in reversed(directory_checks):
+ if directory.startswith(d):
+ return checks
+
+ return _TIDY_DEFAULT_GLOBAL_CHECKS
+
+def _add_global_tidy_checks(ctx, local_checks, input_file):
+ tidy_checks = ctx.attr._product_variables[ProductVariablesInfo].TidyChecks
+ global_tidy_checks = []
+ if tidy_checks:
+ global_tidy_checks = tidy_checks
+ elif not input_file.is_source:
+ # don't run clang-tidy for generated files
+ global_tidy_checks = _TIDY_DEFAULT_GLOBAL_CHECKS_NO_ANALYZER
+ else:
+ global_tidy_checks = _add_checks_for_dir(ctx.label.package)
+
+ # If Tidy_checks contains "-*", ignore all checks before "-*".
+ for i, check in enumerate(local_checks):
+ if check == "-*":
+ global_tidy_checks = []
+ local_checks = local_checks[i:]
+
+ tidy_checks = global_tidy_checks + _clang_rewrite_tidy_checks(local_checks)
+ tidy_checks.extend(_TIDY_GLOBAL_NO_CHECKS)
+
+ #TODO(b/255747672) disable cert check on windows only
+ return tidy_checks
+
+def _add_global_tidy_checks_as_errors(tidy_checks_as_errors):
+ return tidy_checks_as_errors + _TIDY_GLOBAL_NO_ERROR_CHECKS
+
+def _create_clang_tidy_action(
+ ctx,
+ clang_tool,
+ input_file,
+ tidy_checks,
+ tidy_checks_as_errors,
+ tidy_flags,
+ clang_flags,
+ headers,
+ tidy_timeout):
+ tidy_flags = _add_with_tidy_flags(ctx, tidy_flags)
+ tidy_flags = _add_header_filter(ctx, tidy_flags)
+ tidy_flags = _add_extra_arg_flags(tidy_flags)
+ tidy_flags = _add_quiet_if_not_global_tidy(ctx, tidy_flags)
+ tidy_checks = _add_global_tidy_checks(ctx, tidy_checks, input_file)
+ tidy_checks_as_errors = _add_global_tidy_checks_as_errors(tidy_checks_as_errors)
+
+ _check_bad_tidy_checks(tidy_checks)
+ _check_bad_tidy_flags(tidy_flags)
+
+ args = ctx.actions.args()
+ args.add(input_file)
+ if tidy_checks:
+ args.add("-checks=" + ",".join(tidy_checks))
+ if tidy_checks_as_errors:
+ args.add("-warnings-as-errors=" + ",".join(tidy_checks_as_errors))
+ if tidy_flags:
+ args.add_all(tidy_flags)
+ args.add("--")
+ args.add_all(clang_flags)
+
+ tidy_file = ctx.actions.declare_file(paths.join(ctx.label.name, input_file.short_path + ".tidy"))
+ env = {
+ "CLANG_CMD": clang_tool,
+ "TIDY_FILE": tidy_file.path,
+ }
+ if tidy_timeout:
+ env["TIDY_TIMEOUT"] = tidy_timeout
+
+ ctx.actions.run(
+ inputs = [input_file] + headers,
+ outputs = [tidy_file],
+ arguments = [args],
+ env = env,
+ progress_message = "Running clang-tidy on {}".format(input_file.short_path),
+ tools = [
+ ctx.executable._clang_tidy,
+ ctx.executable._clang_tidy_real,
+ ],
+ executable = ctx.executable._clang_tidy_sh,
+ execution_requirements = {
+ "no-sandbox": "1",
+ },
+ mnemonic = "ClangTidy",
+ )
+
+ return tidy_file
+
+def generate_clang_tidy_actions(
+ ctx,
+ flags,
+ deps,
+ srcs,
+ hdrs,
+ language,
+ tidy_flags,
+ tidy_checks,
+ tidy_checks_as_errors,
+ tidy_timeout):
+ """Generates actions for clang tidy
+
+ Args:
+ ctx (Context): rule context that is expected to contain
+ - ctx.executable._clang_tidy
+ - ctx.executable._clang_tidy_sh
+ - ctx.executable._clang_tidy_real
+ - ctx.label._with_tidy_flags
+ flags (list[str]): list of target-specific (non-toolchain) flags passed
+ to clang compile action
+ deps (list[Target]): list of Targets which provide headers to
+ compilation context
+ srcs (list[File]): list of srcs to which clang-tidy will be applied
+ hdrs (list[File]): list of headers used by srcs. This is used to provide
+ explicit inputs to the action
+ language (str): must be one of ["c++", "c"]. This is used to decide what
+ toolchain arguments are passed to the clang compile action
+ tidy_flags (list[str]): additional flags to pass to the clang-tidy tool
+ tidy_checks (list[str]): list of checks for clang-tidy to perform
+ tidy_checks_as_errors (list[str]): list of checks to pass as
+ "-warnings-as-errors" to clang-tidy
+ tidy_checks_as_errors (str): timeout to pass to clang-tidy tool
+ tidy_timeout (str): timeout in seconds after which to stop a clang-tidy
+ invocation
+ Returns:
+ tidy_file_outputs: (list[File]): list of .tidy files output by the
+ clang-tidy.sh tool
+ """
+ toolchain = find_cpp_toolchain(ctx)
+ feature_config = cc_common.configure_features(
+ ctx = ctx,
+ cc_toolchain = toolchain,
+ language = "c++",
+ requested_features = ctx.features,
+ unsupported_features = ctx.disabled_features,
+ )
+
+ language = language
+ action_name = ""
+ if language == "c++":
+ action_name = CPP_COMPILE_ACTION_NAME
+ elif language == "c":
+ action_name = C_COMPILE_ACTION_NAME
+ else:
+ fail("invalid language:", language)
+
+ dep_info = cc_common.merge_cc_infos(direct_cc_infos = [d[CcInfo] for d in deps])
+ compilation_ctx = dep_info.compilation_context
+ args = get_compilation_args(
+ toolchain = toolchain,
+ feature_config = feature_config,
+ flags = flags,
+ compilation_ctx = compilation_ctx,
+ action_name = action_name,
+ )
+
+ clang_tool = cc_common.get_tool_for_action(
+ feature_configuration = feature_config,
+ action_name = action_name,
+ )
+
+ header_inputs = (
+ hdrs +
+ compilation_ctx.headers.to_list() +
+ compilation_ctx.direct_headers +
+ compilation_ctx.direct_private_headers +
+ compilation_ctx.direct_public_headers +
+ compilation_ctx.direct_textual_headers
+ )
+
+ tidy_file_outputs = []
+ for src in srcs:
+ tidy_file = _create_clang_tidy_action(
+ ctx = ctx,
+ input_file = src,
+ headers = header_inputs,
+ clang_tool = paths.basename(clang_tool),
+ tidy_checks = tidy_checks,
+ tidy_checks_as_errors = tidy_checks_as_errors,
+ tidy_flags = tidy_flags,
+ clang_flags = args,
+ tidy_timeout = tidy_timeout,
+ )
+ tidy_file_outputs.append(tidy_file)
+
+ return tidy_file_outputs
+
+def collect_deps_clang_tidy_info(ctx):
+ transitive_clang_tidy_files = []
+ for attr_deps in get_dep_targets(ctx.attr, predicate = lambda target: ClangTidyInfo in target).values():
+ for dep in attr_deps:
+ transitive_clang_tidy_files.append(dep[ClangTidyInfo].transitive_tidy_files)
+ return ClangTidyInfo(
+ tidy_files = depset(),
+ transitive_tidy_files = depset(transitive = transitive_clang_tidy_files),
+ )
+
+def _never_tidy_for_dir(directory):
+ # should stay up to date with https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/config/tidy.go;l=227;drc=f5864ba3633fdbadfb434483848887438fc11f59
+ return directory.startswith("external/grpc-grpc")
+
+def clang_tidy_for_dir(allow_external_vendor, directory):
+ return not _never_tidy_for_dir(directory) and (
+ allow_external_vendor or _add_checks_for_dir(directory) != _TIDY_EXTERNAL_VENDOR_CHECKS
+ )
diff --git a/rules/cc/clang_tidy_test.bzl b/rules/cc/clang_tidy_test.bzl
new file mode 100644
index 00000000..ad9163f9
--- /dev/null
+++ b/rules/cc/clang_tidy_test.bzl
@@ -0,0 +1,770 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:new_sets.bzl", "sets")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(
+ "//build/bazel/rules/test_common:args.bzl",
+ "get_all_args_with_prefix",
+ "get_single_arg_with_prefix",
+)
+load("//build/bazel/rules/test_common:rules.bzl", "expect_failure_test")
+load(":cc_library_static.bzl", "cc_library_static")
+load(":clang_tidy.bzl", "generate_clang_tidy_actions")
+
+_PACKAGE_HEADER_FILTER = "^build/bazel/rules/cc/"
+_DEFAULT_GLOBAL_CHECKS = [
+ "android-*",
+ "bugprone-*",
+ "cert-*",
+ "clang-diagnostic-unused-command-line-argument",
+ "google-build-explicit-make-pair",
+ "google-build-namespaces",
+ "google-runtime-operator",
+ "google-upgrade-*",
+ "misc-*",
+ "performance-*",
+ "portability-*",
+ "-bugprone-assignment-in-if-condition",
+ "-bugprone-easily-swappable-parameters",
+ "-bugprone-narrowing-conversions",
+ "-misc-const-correctness",
+ "-misc-no-recursion",
+ "-misc-non-private-member-variables-in-classes",
+ "-misc-unused-parameters",
+ "-performance-no-int-to-ptr",
+ "-clang-analyzer-security.insecureAPI.DeprecatedOrUnsafeBufferHandling",
+]
+_DEFAULT_CHECKS = [
+ "-misc-no-recursion",
+ "-readability-function-cognitive-complexity",
+ "-bugprone-unchecked-optional-access",
+ "-bugprone-reserved-identifier*",
+ "-cert-dcl51-cpp",
+ "-cert-dcl37-c",
+ "-readability-qualified-auto",
+ "-bugprone-implicit-widening-of-multiplication-result",
+ "-bugprone-easily-swappable-parameters",
+ "-cert-err33-c",
+ "-bugprone-unchecked-optional-access",
+ "-misc-use-anonymous-namespace",
+]
+_DEFAULT_CHECKS_AS_ERRORS = [
+ "-bugprone-assignment-in-if-condition",
+ "-bugprone-branch-clone",
+ "-bugprone-signed-char-misuse",
+ "-misc-const-correctness",
+]
+_EXTRA_ARGS_BEFORE = [
+ "-D__clang_analyzer__",
+ "-Xclang",
+ "-analyzer-config",
+ "-Xclang",
+ "c++-temp-dtor-inlining=false",
+]
+
+def _clang_tidy_impl(ctx):
+ tidy_outs = generate_clang_tidy_actions(
+ ctx,
+ ctx.attr.copts,
+ ctx.attr.deps,
+ ctx.files.srcs,
+ ctx.files.hdrs,
+ ctx.attr.language,
+ ctx.attr.tidy_flags,
+ ctx.attr.tidy_checks,
+ ctx.attr.tidy_checks_as_errors,
+ ctx.attr.tidy_timeout_srcs,
+ )
+ return [
+ DefaultInfo(files = depset(tidy_outs)),
+ ]
+
+_clang_tidy = rule(
+ implementation = _clang_tidy_impl,
+ attrs = {
+ "srcs": attr.label_list(allow_files = True),
+ "deps": attr.label_list(),
+ "copts": attr.string_list(),
+ "hdrs": attr.label_list(allow_files = True),
+ "language": attr.string(values = ["c++", "c"], default = "c++"),
+ "tidy_checks": attr.string_list(),
+ "tidy_checks_as_errors": attr.string_list(),
+ "tidy_flags": attr.string_list(),
+ "tidy_timeout_srcs": attr.label_list(allow_files = True),
+ "_clang_tidy_sh": attr.label(
+ default = Label("@//prebuilts/clang/host/linux-x86:clang-tidy.sh"),
+ allow_single_file = True,
+ executable = True,
+ cfg = "exec",
+ doc = "The clang tidy shell wrapper",
+ ),
+ "_clang_tidy": attr.label(
+ default = Label("@//prebuilts/clang/host/linux-x86:clang-tidy"),
+ allow_single_file = True,
+ executable = True,
+ cfg = "exec",
+ doc = "The clang tidy executable",
+ ),
+ "_clang_tidy_real": attr.label(
+ default = Label("@//prebuilts/clang/host/linux-x86:clang-tidy.real"),
+ allow_single_file = True,
+ executable = True,
+ cfg = "exec",
+ ),
+ "_with_tidy": attr.label(
+ default = "//build/bazel/flags/cc/tidy:with_tidy",
+ ),
+ "_allow_local_tidy_true": attr.label(
+ default = "//build/bazel/flags/cc/tidy:allow_local_tidy_true",
+ ),
+ "_with_tidy_flags": attr.label(
+ default = "//build/bazel/flags/cc/tidy:with_tidy_flags",
+ ),
+ "_default_tidy_header_dirs": attr.label(
+ default = "//build/bazel/flags/cc/tidy:default_tidy_header_dirs",
+ ),
+ "_tidy_timeout": attr.label(
+ default = "//build/bazel/flags/cc/tidy:tidy_timeout",
+ ),
+ "_product_variables": attr.label(
+ default = "//build/bazel/product_config:product_vars",
+ ),
+ },
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+ fragments = ["cpp"],
+)
+
+def _get_all_arg(env, actions, argname):
+ args = get_all_args_with_prefix(actions[0].argv, argname)
+ asserts.false(env, args == [], "could not arguments that start with `{}`".format(argname))
+ return args
+
+def _get_single_arg(actions, argname):
+ return get_single_arg_with_prefix(actions[0].argv, argname)
+
+def _checks_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ checks = _get_single_arg(actions, "-checks=").split(",")
+ asserts.set_equals(env, sets.make(ctx.attr.expected_checks), sets.make(checks))
+ if len(ctx.attr.unexpected_checks) > 0:
+ for c in ctx.attr.unexpected_checks:
+ asserts.false(env, c in checks, "found unexpected check in -checks flag: %s" % c)
+
+ checks_as_errors = _get_single_arg(actions, "-warnings-as-errors=").split(",")
+ asserts.set_equals(env, sets.make(ctx.attr.expected_checks_as_errors), sets.make(checks_as_errors))
+
+ return analysistest.end(env)
+
+_checks_test = analysistest.make(
+ _checks_test_impl,
+ attrs = {
+ "expected_checks": attr.string_list(mandatory = True),
+ "expected_checks_as_errors": attr.string_list(mandatory = True),
+ "unexpected_checks": attr.string_list(),
+ },
+)
+
+def _copts_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ args = actions[0].argv
+ clang_flags = []
+ for i, a in enumerate(args):
+ if a == "--" and len(args) > i + 1:
+ clang_flags = args[i + 1:]
+ break
+ asserts.true(
+ env,
+ len(clang_flags) > 0,
+ "no flags passed to clang; all arguments: %s" % args,
+ )
+
+ for expected_arg in ctx.attr.expected_copts:
+ asserts.true(
+ env,
+ expected_arg in clang_flags,
+ "expected `%s` not present in clang flags" % expected_arg,
+ )
+
+ return analysistest.end(env)
+
+_copts_test = analysistest.make(
+ _copts_test_impl,
+ attrs = {
+ "expected_copts": attr.string_list(mandatory = True),
+ },
+)
+
+def _tidy_flags_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ args = actions[0].argv
+ tidy_flags = []
+ for i, a in enumerate(args):
+ if a == "--" and len(args) > i + 1:
+ tidy_flags = args[:i]
+ asserts.true(
+ env,
+ len(tidy_flags) > 0,
+ "no tidy flags passed to clang-tidy; all arguments: %s" % args,
+ )
+
+ for expected_arg in ctx.attr.expected_tidy_flags:
+ asserts.true(
+ env,
+ expected_arg in tidy_flags,
+ "expected `%s` not present in flags to clang-tidy" % expected_arg,
+ )
+
+ header_filter = _get_single_arg(actions, "-header-filter=")
+ asserts.true(
+ env,
+ header_filter == ctx.attr.expected_header_filter,
+ (
+ "expected header-filter to have value `%s`; got `%s`" %
+ (ctx.attr.expected_header_filter, header_filter)
+ ),
+ )
+
+ extra_arg_before = _get_all_arg(env, actions, "-extra-arg-before=")
+ for expected_arg in ctx.attr.expected_extra_arg_before:
+ asserts.true(
+ env,
+ expected_arg in extra_arg_before,
+ "did not find expected flag `%s` in args to clang-tidy" % expected_arg,
+ )
+
+ return analysistest.end(env)
+
+_tidy_flags_test = analysistest.make(
+ _tidy_flags_test_impl,
+ attrs = {
+ "expected_tidy_flags": attr.string_list(),
+ "expected_header_filter": attr.string(mandatory = True),
+ "expected_extra_arg_before": attr.string_list(),
+ },
+)
+
+def _test_clang_tidy():
+ name = "checks"
+ test_name = name + "_test"
+ checks_test_name = test_name + "_checks"
+ copts_test_name = test_name + "_copts"
+ tidy_flags_test_name = test_name + "_tidy_flags"
+
+ _clang_tidy(
+ name = name,
+ # clang-tidy operates differently on generated and non-generated files
+ # use test_srcs so that the tidy rule doesn't think these are genearted
+ # files
+ srcs = ["//build/bazel/rules/cc/testing:test_srcs"],
+ copts = ["-asdf1", "-asdf2"],
+ tidy_flags = ["-tidy-flag1", "-tidy-flag2"],
+ tags = ["manual"],
+ )
+
+ _checks_test(
+ name = checks_test_name,
+ target_under_test = name,
+ expected_checks = _DEFAULT_CHECKS + _DEFAULT_GLOBAL_CHECKS,
+ expected_checks_as_errors = _DEFAULT_CHECKS_AS_ERRORS,
+ )
+
+ _copts_test(
+ name = copts_test_name,
+ target_under_test = name,
+ expected_copts = ["-asdf1", "-asdf2"],
+ )
+
+ _tidy_flags_test(
+ name = tidy_flags_test_name,
+ target_under_test = name,
+ expected_tidy_flags = ["-tidy-flag1", "-tidy-flag2"],
+ expected_header_filter = _PACKAGE_HEADER_FILTER,
+ expected_extra_arg_before = _EXTRA_ARGS_BEFORE,
+ )
+
+ return [
+ checks_test_name,
+ copts_test_name,
+ tidy_flags_test_name,
+ ]
+
+def _test_custom_header_dir():
+ name = "custom_header_dir"
+ test_name = name + "_test"
+
+ _clang_tidy(
+ name = name,
+ srcs = ["a.cpp"],
+ tidy_flags = ["-header-filter=dir1/"],
+ tags = ["manual"],
+ )
+
+ _tidy_flags_test(
+ name = test_name,
+ target_under_test = name,
+ expected_header_filter = "dir1/",
+ )
+
+ return [
+ test_name,
+ ]
+
+def _test_disabled_checks_are_removed():
+ name = "disabled_checks_are_removed"
+ test_name = name + "_test"
+
+ _clang_tidy(
+ name = name,
+ # clang-tidy operates differently on generated and non-generated files.
+ # use test_srcs so that the tidy rule doesn't think these are genearted
+ # files
+ srcs = ["//build/bazel/rules/cc/testing:test_srcs"],
+ tidy_checks = ["misc-no-recursion", "readability-function-cognitive-complexity"],
+ tags = ["manual"],
+ )
+
+ _checks_test(
+ name = test_name,
+ target_under_test = name,
+ expected_checks = _DEFAULT_CHECKS + _DEFAULT_GLOBAL_CHECKS,
+ expected_checks_as_errors = _DEFAULT_CHECKS_AS_ERRORS,
+ unexpected_checks = ["misc-no-recursion", "readability-function-cognitive-complexity"],
+ )
+
+ return [
+ test_name,
+ ]
+
+def _create_bad_tidy_checks_test(name, tidy_checks, failure_message):
+ name = "bad_tidy_checks_fail_" + name
+ test_name = name + "_test"
+
+ _clang_tidy(
+ name = name,
+ srcs = ["a.cpp"],
+ tidy_checks = tidy_checks,
+ tags = ["manual"],
+ )
+
+ expect_failure_test(
+ name = test_name,
+ target_under_test = name,
+ failure_message = failure_message,
+ )
+
+ return [
+ test_name,
+ ]
+
+def _test_bad_tidy_checks_fail():
+ return (
+ _create_bad_tidy_checks_test(
+ name = "with_spaces",
+ tidy_checks = ["check with spaces"],
+ failure_message = "Check `check with spaces` invalid, cannot contain spaces",
+ ) +
+ _create_bad_tidy_checks_test(
+ name = "with_commas",
+ tidy_checks = ["check,with,commas"],
+ failure_message = "Check `check,with,commas` invalid, cannot contain commas. Split each entry into its own string instead",
+ )
+ )
+
+def _create_bad_tidy_flags_test(name, tidy_flags, failure_message):
+ name = "bad_tidy_flags_fail_" + name
+ test_name = name + "_test"
+
+ _clang_tidy(
+ name = name,
+ srcs = ["a.cpp"],
+ tidy_flags = tidy_flags,
+ tags = ["manual"],
+ )
+
+ expect_failure_test(
+ name = test_name,
+ target_under_test = name,
+ failure_message = failure_message,
+ )
+
+ return [
+ test_name,
+ ]
+
+def _test_bad_tidy_flags_fail():
+ return (
+ _create_bad_tidy_flags_test(
+ name = "without_leading_dash",
+ tidy_flags = ["flag1"],
+ failure_message = "Flag `flag1` must start with `-`",
+ ) +
+ _create_bad_tidy_flags_test(
+ name = "fix_flags",
+ tidy_flags = ["-fix"],
+ failure_message = "Flag `%s` is not allowed, since it could cause multiple writes to the same source file",
+ ) +
+ _create_bad_tidy_flags_test(
+ name = "checks_in_flags",
+ tidy_flags = ["-checks=asdf"],
+ failure_message = "Flag `-checks=asdf` is not allowed, use `tidy_checks` property instead",
+ ) +
+ _create_bad_tidy_flags_test(
+ name = "warnings_as_errors_in_flags",
+ tidy_flags = ["-warnings-as-errors=asdf"],
+ failure_message = "Flag `-warnings-as-errors=asdf` is not allowed, use `tidy_checks_as_errors` property instead",
+ ) +
+ _create_bad_tidy_flags_test(
+ name = "space_in_flags",
+ tidy_flags = ["-flag with spaces"],
+ failure_message = "Bad flag: `-flag with spaces` is not an allowed multi-word flag. Should it be split into multiple flags",
+ )
+ )
+
+def _test_disable_global_checks():
+ name = "disable_global_checks"
+ test_name = name + "_test"
+
+ _clang_tidy(
+ name = name,
+ srcs = ["a.cpp"],
+ tidy_checks = ["-*"],
+ tags = ["manual"],
+ )
+
+ _checks_test(
+ name = test_name,
+ target_under_test = name,
+ expected_checks = ["-*"] + _DEFAULT_CHECKS,
+ expected_checks_as_errors = _DEFAULT_CHECKS_AS_ERRORS,
+ )
+
+ return [
+ test_name,
+ ]
+
+def _cc_library_static_generates_clang_tidy_actions_for_srcs_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ clang_tidy_actions = [a for a in actions if a.mnemonic == "ClangTidy"]
+ asserts.equals(
+ env,
+ ctx.attr.expected_num_actions,
+ len(clang_tidy_actions),
+ "expected to have %s clang-tidy actions, but got %s; actions: %s" % (
+ ctx.attr.expected_num_actions,
+ len(clang_tidy_actions),
+ clang_tidy_actions,
+ ),
+ )
+
+ for a in clang_tidy_actions:
+ for input in a.inputs.to_list():
+ input_is_expected_header = input.short_path in [f.short_path for f in ctx.files.expected_headers]
+ if input in ctx.files._clang_tidy_tools or input_is_expected_header:
+ continue
+ asserts.true(
+ env,
+ input in ctx.files.srcs,
+ "clang-tidy operated on a file not in srcs: %s; all inputs: %s" % (input, a.inputs.to_list()),
+ )
+ asserts.true(
+ env,
+ input not in ctx.files.disabled_srcs,
+ "clang-tidy operated on a file in disabled_srcs: %s; all inputs: %s" % (input, a.inputs.to_list()),
+ )
+
+ return analysistest.end(env)
+
+_cc_library_static_generates_clang_tidy_actions_for_srcs_test = analysistest.make(
+ impl = _cc_library_static_generates_clang_tidy_actions_for_srcs_test_impl,
+ attrs = {
+ "expected_num_actions": attr.int(mandatory = True),
+ "srcs": attr.label_list(allow_files = True),
+ "disabled_srcs": attr.label_list(allow_files = True),
+ "expected_headers": attr.label_list(allow_files = True),
+ "_clang_tidy_tools": attr.label_list(
+ default = [
+ "@//prebuilts/clang/host/linux-x86:clang-tidy",
+ "@//prebuilts/clang/host/linux-x86:clang-tidy.real",
+ "@//prebuilts/clang/host/linux-x86:clang-tidy.sh",
+ ],
+ allow_files = True,
+ ),
+ },
+ config_settings = {
+ "@//build/bazel/flags/cc/tidy:allow_local_tidy_true": True,
+ },
+)
+
+def _create_cc_library_static_generates_clang_tidy_actions_for_srcs(
+ name,
+ srcs,
+ expected_num_actions,
+ disabled_srcs = None,
+ expected_headers = []):
+ name = "cc_library_static_generates_clang_tidy_actions_for_srcs_" + name
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = name,
+ srcs = srcs,
+ tidy_disabled_srcs = disabled_srcs,
+ tidy = "local",
+ tags = ["manual"],
+ )
+
+ _cc_library_static_generates_clang_tidy_actions_for_srcs_test(
+ name = test_name,
+ target_under_test = name,
+ expected_num_actions = expected_num_actions,
+ srcs = srcs,
+ disabled_srcs = disabled_srcs,
+ expected_headers = expected_headers + select({
+ "//build/bazel/platforms/os:android": ["@//bionic/libc:generated_android_ids"],
+ "//conditions:default": [],
+ }),
+ )
+
+ return test_name
+
+def _test_cc_library_static_generates_clang_tidy_actions_for_srcs():
+ return [
+ _create_cc_library_static_generates_clang_tidy_actions_for_srcs(
+ name = "with_srcs",
+ srcs = ["a.cpp", "b.cpp"],
+ expected_num_actions = 2,
+ ),
+ _create_cc_library_static_generates_clang_tidy_actions_for_srcs(
+ name = "with_disabled_srcs",
+ srcs = ["a.cpp", "b.cpp"],
+ disabled_srcs = ["b.cpp", "c.cpp"],
+ expected_num_actions = 1,
+ ),
+ ]
+
+def _no_clang_analyzer_on_generated_files_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ clang_tidy_actions = [a for a in actions if a.mnemonic == "ClangTidy"]
+ for a in clang_tidy_actions:
+ found_clang_analyzer = False
+ for arg in a.argv:
+ if "-clang-analyzer-*" in arg:
+ found_clang_analyzer = True
+ asserts.true(env, found_clang_analyzer)
+
+ return analysistest.end(env)
+
+_no_clang_analyzer_on_generated_files_test = analysistest.make(
+ impl = _no_clang_analyzer_on_generated_files_test_impl,
+ config_settings = {
+ "@//build/bazel/flags/cc/tidy:allow_local_tidy_true": True,
+ },
+)
+
+def _test_no_clang_analyzer_on_generated_files():
+ name = "no_clang_analyzer_on_generated_files"
+ gen_name = name + "_generated_files"
+ test_name = name + "_test"
+
+ native.genrule(
+ name = gen_name,
+ outs = ["aout.cpp", "bout.cpp"],
+ cmd = "touch $(OUTS)",
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = name,
+ srcs = [":" + gen_name],
+ tidy = "local",
+ tags = ["manual"],
+ )
+
+ _no_clang_analyzer_on_generated_files_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return [
+ test_name,
+ ]
+
+def _clang_tidy_actions_count_no_tidy_env_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ clang_tidy_actions = [a for a in actions if a.mnemonic == "ClangTidy"]
+ asserts.equals(
+ env,
+ ctx.attr.expected_num_tidy_actions,
+ len(clang_tidy_actions),
+ "expected to find %d tidy actions, but found %d" % (
+ ctx.attr.expected_num_tidy_actions,
+ len(clang_tidy_actions),
+ ),
+ )
+
+ return analysistest.end(env)
+
+_clang_tidy_actions_count_no_tidy_env_test = analysistest.make(
+ impl = _clang_tidy_actions_count_no_tidy_env_test_impl,
+ attrs = {
+ "expected_num_tidy_actions": attr.int(),
+ },
+)
+
+_clang_tidy_actions_count_with_tidy_true_test = analysistest.make(
+ impl = _clang_tidy_actions_count_no_tidy_env_test_impl,
+ attrs = {
+ "expected_num_tidy_actions": attr.int(),
+ },
+ config_settings = {
+ "@//build/bazel/flags/cc/tidy:with_tidy": True,
+ },
+)
+
+_clang_tidy_actions_count_with_allow_local_tidy_true_test = analysistest.make(
+ impl = _clang_tidy_actions_count_no_tidy_env_test_impl,
+ attrs = {
+ "expected_num_tidy_actions": attr.int(),
+ },
+ config_settings = {
+ "@//build/bazel/flags/cc/tidy:allow_local_tidy_true": True,
+ },
+)
+
+def _test_clang_tidy_runs_if_tidy_true():
+ name = "clang_tidy_runs_if_tidy_true"
+ test_name = name + "_test"
+ with_tidy_test_name = test_name + "_with_tidy_true"
+ allow_local_tidy_true_test_name = test_name + "_allow_local_tidy_true"
+
+ cc_library_static(
+ name = name,
+ srcs = ["a.cpp"],
+ tidy = "local",
+ tags = ["manual"],
+ )
+ _clang_tidy_actions_count_no_tidy_env_test(
+ name = test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 0,
+ )
+ _clang_tidy_actions_count_with_tidy_true_test(
+ name = with_tidy_test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 1,
+ )
+ _clang_tidy_actions_count_with_allow_local_tidy_true_test(
+ name = allow_local_tidy_true_test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 1,
+ )
+ return [
+ test_name,
+ with_tidy_test_name,
+ allow_local_tidy_true_test_name,
+ ]
+
+def _test_clang_tidy_runs_if_attribute_unset():
+ name = "clang_tidy_runs_if_attribute_unset"
+ test_name = name + "_test"
+ with_tidy_test_name = test_name + "_with_tidy_true"
+ allow_local_tidy_true_test_name = test_name + "_allow_local_tidy_true"
+
+ cc_library_static(
+ name = name,
+ srcs = ["a.cpp"],
+ tags = ["manual"],
+ )
+ _clang_tidy_actions_count_no_tidy_env_test(
+ name = test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 0,
+ )
+ _clang_tidy_actions_count_with_tidy_true_test(
+ name = with_tidy_test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 1,
+ )
+ _clang_tidy_actions_count_with_allow_local_tidy_true_test(
+ name = allow_local_tidy_true_test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 0,
+ )
+ return [
+ test_name,
+ with_tidy_test_name,
+ allow_local_tidy_true_test_name,
+ ]
+
+def _test_no_clang_tidy_if_tidy_false():
+ name = "no_clang_tidy_if_tidy_false"
+ test_name = name + "_test"
+ with_tidy_test_name = test_name + "_with_tidy_true"
+ allow_local_tidy_true_test_name = test_name + "_allow_local_tidy_true"
+
+ cc_library_static(
+ name = name,
+ srcs = ["a.cpp"],
+ tidy = "never",
+ tags = ["manual"],
+ )
+ _clang_tidy_actions_count_no_tidy_env_test(
+ name = test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 0,
+ )
+ _clang_tidy_actions_count_with_tidy_true_test(
+ name = with_tidy_test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 0,
+ )
+ _clang_tidy_actions_count_with_allow_local_tidy_true_test(
+ name = allow_local_tidy_true_test_name,
+ target_under_test = name,
+ expected_num_tidy_actions = 0,
+ )
+ return [
+ test_name,
+ with_tidy_test_name,
+ allow_local_tidy_true_test_name,
+ ]
+
+def clang_tidy_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests =
+ _test_clang_tidy() +
+ _test_custom_header_dir() +
+ _test_disabled_checks_are_removed() +
+ _test_bad_tidy_checks_fail() +
+ _test_bad_tidy_flags_fail() +
+ _test_disable_global_checks() +
+ _test_cc_library_static_generates_clang_tidy_actions_for_srcs() +
+ _test_no_clang_analyzer_on_generated_files() +
+ _test_no_clang_tidy_if_tidy_false() +
+ _test_clang_tidy_runs_if_tidy_true() +
+ _test_clang_tidy_runs_if_attribute_unset(),
+ )
diff --git a/rules/cc/composed_transitions.bzl b/rules/cc/composed_transitions.bzl
new file mode 100644
index 00000000..b40af152
--- /dev/null
+++ b/rules/cc/composed_transitions.bzl
@@ -0,0 +1,49 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(
+ ":fdo_profile_transitions.bzl",
+ "CLI_CODECOV_KEY",
+ "CLI_FDO_KEY",
+ "FDO_PROFILE_ATTR_KEY",
+ "apply_fdo_profile",
+)
+load(":lto_transitions.bzl", "CLI_FEATURES_KEY", "apply_drop_lto")
+
+# Both LTO and FDO require an incoming transition on cc_library_shared
+def _lto_and_fdo_profile_incoming_transition_impl(settings, attr):
+ new_fdo_settings = apply_fdo_profile(
+ settings[CLI_CODECOV_KEY],
+ getattr(attr, FDO_PROFILE_ATTR_KEY),
+ )
+
+ new_lto_settings = apply_drop_lto(settings[CLI_FEATURES_KEY])
+
+ if new_fdo_settings == None:
+ new_fdo_settings = {}
+ if new_lto_settings == None:
+ new_lto_settings = {}
+ return new_fdo_settings | new_lto_settings
+
+lto_and_fdo_profile_incoming_transition = transition(
+ implementation = _lto_and_fdo_profile_incoming_transition_impl,
+ inputs = [
+ CLI_CODECOV_KEY,
+ CLI_FEATURES_KEY,
+ ],
+ outputs = [
+ CLI_FDO_KEY,
+ CLI_FEATURES_KEY,
+ ],
+)
diff --git a/rules/cc/fdo_profile_transitions.bzl b/rules/cc/fdo_profile_transitions.bzl
new file mode 100644
index 00000000..0fdacd61
--- /dev/null
+++ b/rules/cc/fdo_profile_transitions.bzl
@@ -0,0 +1,62 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FDO_PROFILE_ATTR_KEY = "fdo_profile"
+CLI_FDO_KEY = "//command_line_option:fdo_profile"
+CLI_CODECOV_KEY = "//command_line_option:collect_code_coverage"
+
+# https://github.com/bazelbuild/bazel/blob/8a53b0e51506d825d276ea7c9480190bd2287009/src/main/java/com/google/devtools/build/lib/rules/cpp/FdoHelper.java#L170
+# Coverage mode is not compatible with FDO optimization in Bazel cc rules
+# If both collect_code_coverage is set, disable fdo optimization
+def apply_fdo_profile(codecov_setting, fdo_profile_attr):
+ if codecov_setting:
+ return {
+ CLI_FDO_KEY: None,
+ }
+ else:
+ return {
+ CLI_FDO_KEY: fdo_profile_attr,
+ }
+
+def fdo_profile_transition_impl(setting, attr):
+ return apply_fdo_profile(
+ setting[CLI_CODECOV_KEY],
+ getattr(attr, FDO_PROFILE_ATTR_KEY),
+ )
+
+# This transition reads the fdo_profile attribute of a rule and set the value
+# to //command_line_option:fdo_profile"
+fdo_profile_transition = transition(
+ implementation = fdo_profile_transition_impl,
+ inputs = [
+ CLI_CODECOV_KEY,
+ ],
+ outputs = [
+ CLI_FDO_KEY,
+ ],
+)
+
+def _drop_fdo_profile_transition_impl(_, __):
+ return {
+ CLI_FDO_KEY: None,
+ }
+
+# This transition always resets //command_line_option:fdo_profile to None."
+drop_fdo_profile_transition = transition(
+ implementation = _drop_fdo_profile_transition_impl,
+ inputs = [],
+ outputs = [
+ CLI_FDO_KEY,
+ ],
+)
diff --git a/rules/cc/flex.bzl b/rules/cc/flex.bzl
new file mode 100644
index 00000000..636188bc
--- /dev/null
+++ b/rules/cc/flex.bzl
@@ -0,0 +1,121 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+"""Build rule for converting `.l` or `.ll` to C or C++ sources with Flex.
+
+Uses flex (and m4 under the hood) to convert .l and .ll source files into
+.c and .cc files. Does not support .lex or .lpp extensions
+
+Examples
+--------
+
+This is a simple example.
+```
+genlex(
+ name = "html_lex",
+ src = "html.l",
+)
+```
+
+This example uses some options for flex.
+```
+genlex(
+ name = "rules_l",
+ src = "rules.l",
+ lexopts = ["-d", "-v"],
+)
+```
+"""
+
+def _genlex_impl(ctx):
+ """Implementation for genlex rule."""
+
+ # TODO(b/190006308): When fixed, l and ll sources can coexist. Remove this.
+ exts = [f.extension for f in ctx.files.srcs]
+ contains_l = False
+ contains_ll = False
+ for ext in exts:
+ if ext == "l":
+ contains_l = True
+ if ext == "ll":
+ contains_ll = True
+ if contains_l and contains_ll:
+ fail(
+ "srcs contains both .l and .ll files. Please use separate targets.",
+ )
+
+ outputs = []
+ for src_file in ctx.files.srcs:
+ args = ctx.actions.args()
+ output_filename = ""
+
+ src_ext = src_file.extension
+ split_filename = src_file.basename.partition(".")
+ filename_without_ext = split_filename[0]
+
+ if src_ext == "l":
+ output_filename = paths.replace_extension(filename_without_ext, ".c")
+ elif src_ext == "ll":
+ output_filename = paths.replace_extension(filename_without_ext, ".cc")
+ output_file = ctx.actions.declare_file(output_filename)
+ outputs.append(output_file)
+ args.add("-o", output_file.path)
+
+ args.add_all(ctx.attr.lexopts)
+ args.add(src_file)
+
+ ctx.actions.run(
+ executable = ctx.executable._flex,
+ env = {
+ "M4": ctx.executable._m4.path,
+ },
+ arguments = [args],
+ inputs = [src_file],
+ tools = [ctx.executable._m4],
+ outputs = [output_file],
+ mnemonic = "Flex",
+ progress_message = "Generating %s from %s" % (
+ output_filename,
+ src_file.short_path,
+ ),
+ )
+ return [DefaultInfo(files = depset(outputs))]
+
+genlex = rule(
+ implementation = _genlex_impl,
+ doc = "Generate C/C++-language sources from a lex file using Flex.",
+ attrs = {
+ "srcs": attr.label_list(
+ mandatory = True,
+ allow_files = [".l", ".ll"],
+ doc = "The lex source file for this rule",
+ ),
+ "lexopts": attr.string_list(
+ doc = "A list of options to be added to the flex command line.",
+ ),
+ "_flex": attr.label(
+ default = "//prebuilts/build-tools:flex",
+ executable = True,
+ cfg = "exec",
+ ),
+ "_m4": attr.label(
+ default = "//prebuilts/build-tools:m4",
+ executable = True,
+ cfg = "exec",
+ ),
+ },
+)
diff --git a/rules/cc/flex_test.bzl b/rules/cc/flex_test.bzl
new file mode 100644
index 00000000..03fb644c
--- /dev/null
+++ b/rules/cc/flex_test.bzl
@@ -0,0 +1,346 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/test_common:args.bzl", "get_arg_value")
+load(
+ "//build/bazel/rules/test_common:paths.bzl",
+ "get_output_and_package_dir_based_path",
+ "get_package_dir_based_path",
+)
+load(":flex.bzl", "genlex")
+
+def _single_l_file_to_c_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+
+ asserts.equals(env, 1, len(actions))
+
+ actual_list_foo = [input.path for input in actions[0].inputs.to_list()]
+ expected_path_foo = get_package_dir_based_path(env, "foo.l")
+ asserts.true(
+ env,
+ expected_path_foo in actual_list_foo,
+ ("Input file %s not present or incorrect in Bazel action for " +
+ "target foo. Actual list of inputs: %s") % (
+ expected_path_foo,
+ actual_list_foo,
+ ),
+ )
+ expected_output = get_output_and_package_dir_based_path(env, "foo.c")
+ actual_outputs = [output.path for output in actions[0].outputs.to_list()]
+ asserts.true(
+ env,
+ expected_output in actual_outputs,
+ ("Expected output %s not present or incorrect in Bazel action\n" +
+ "Actual list of outputs: %s") % (
+ expected_output,
+ actual_outputs,
+ ),
+ )
+
+ return analysistest.end(env)
+
+single_l_file_to_c_test = analysistest.make(_single_l_file_to_c_test_impl)
+
+def _test_single_l_file_to_c():
+ name = "single_l_file_to_c"
+ test_name = name + "_test"
+ genlex(
+ name = name,
+ srcs = ["foo.l"],
+ tags = ["manual"],
+ )
+ single_l_file_to_c_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _single_ll_file_to_cc_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+
+ asserts.equals(env, 1, len(actions))
+
+ actual_list_foo = [input.path for input in actions[0].inputs.to_list()]
+ expected_path_foo = get_package_dir_based_path(env, "foo.ll")
+ asserts.true(
+ env,
+ expected_path_foo in actual_list_foo,
+ ("Input file %s not present or incorrect in Bazel action for " +
+ "target foo. Actual list of inputs: %s") % (
+ expected_path_foo,
+ actual_list_foo,
+ ),
+ )
+ expected_output = get_output_and_package_dir_based_path(env, "foo.cc")
+ actual_outputs = [output.path for output in actions[0].outputs.to_list()]
+ asserts.true(
+ env,
+ expected_output in actual_outputs,
+ ("Expected output %s not present or incorrect in Bazel action\n" +
+ "Actual list of outputs: %s") % (
+ expected_output,
+ actual_outputs,
+ ),
+ )
+
+ return analysistest.end(env)
+
+single_ll_file_to_cc_test = analysistest.make(_single_ll_file_to_cc_test_impl)
+
+def _test_single_ll_file_to_cc():
+ name = "single_ll_file_to_cc"
+ test_name = name + "_test"
+ genlex(
+ name = name,
+ srcs = ["foo.ll"],
+ tags = ["manual"],
+ )
+ single_ll_file_to_cc_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _multiple_files_correct_type_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+
+ asserts.equals(env, 2, len(actions))
+
+ actual_list_foo = [input.path for input in actions[0].inputs.to_list()]
+ expected_path_foo = get_package_dir_based_path(env, "foo.l")
+ asserts.true(
+ env,
+ expected_path_foo in actual_list_foo,
+ ("Input file %s not present or incorrect in Bazel action for " +
+ "target foo. Actual list of inputs: %s") % (
+ expected_path_foo,
+ actual_list_foo,
+ ),
+ )
+ actual_list_bar = [input.path for input in actions[1].inputs.to_list()]
+ expected_path_bar = get_package_dir_based_path(env, "bar.l")
+ asserts.true(
+ env,
+ expected_path_bar in actual_list_bar,
+ ("Input file %s not present or incorrect in Bazel action for " +
+ "target bar. Actual list of inputs: %s") % (
+ expected_path_bar,
+ actual_list_bar,
+ ),
+ )
+
+ expected_output = get_output_and_package_dir_based_path(env, "foo.c")
+ actual_outputs = [output.path for output in actions[0].outputs.to_list()]
+ asserts.true(
+ env,
+ expected_output in actual_outputs,
+ ("Expected output %s not present or incorrect in Bazel action" +
+ "for source file foo.l\n" +
+ "Actual list of outputs: %s") % (
+ expected_output,
+ actual_outputs,
+ ),
+ )
+ expected_output = get_output_and_package_dir_based_path(env, "bar.c")
+ actual_outputs = [output.path for output in actions[1].outputs.to_list()]
+ asserts.true(
+ env,
+ expected_output in actual_outputs,
+ ("Expected output %s not present or incorrect in Bazel action " +
+ "for source file bar.l\n" +
+ "Actual list of outputs: %s") % (
+ expected_output,
+ actual_outputs,
+ ),
+ )
+
+ return analysistest.end(env)
+
+multiple_files_correct_type_test = analysistest.make(
+ _multiple_files_correct_type_test_impl,
+)
+
+def _test_multiple_files_correct_type():
+ name = "multiple_files_correct_type"
+ test_name = name + "_test"
+ genlex(
+ name = name,
+ srcs = ["foo.l", "bar.l"],
+ tags = ["manual"],
+ )
+ multiple_files_correct_type_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _output_arg_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ actual_list = actions[0].argv
+ cli_string = " ".join(actions[0].argv)
+ expected_value = get_output_and_package_dir_based_path(env, "foo.c")
+
+ asserts.equals(
+ env,
+ expected_value,
+ get_arg_value(actual_list, "-o"),
+ ("Argument -o not found or had unexpected value.\n" +
+ "Expected value: %s\n" +
+ "Command: %s") % (
+ expected_value,
+ cli_string,
+ ),
+ )
+
+ return analysistest.end(env)
+
+output_arg_test = analysistest.make(_output_arg_test_impl)
+
+def _test_output_arg():
+ name = "output_arg"
+ test_name = name + "_test"
+ genlex(
+ name = name,
+ srcs = ["foo.l"],
+ tags = ["manual"],
+ )
+ output_arg_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _input_arg_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ actual_argv = actions[0].argv
+ expected_value = get_package_dir_based_path(env, "foo.l")
+
+ asserts.true(
+ env,
+ expected_value in actual_argv,
+ "Input file %s not present or incorrect in flex command args" %
+ expected_value,
+ )
+
+ return analysistest.end(env)
+
+input_arg_test = analysistest.make(_input_arg_test_impl)
+
+def _test_input_arg():
+ name = "input_arg"
+ test_name = name + "_test"
+ genlex(
+ name = name,
+ srcs = ["foo.l"],
+ tags = ["manual"],
+ )
+ input_arg_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _lexopts_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ actual_argv = actions[0].argv
+ asserts.true(
+ env,
+ "foo_opt" in actual_argv,
+ ("Did not find expected lexopt foo_opt %s for target foo in test " +
+ "lexopts_test") % actual_argv,
+ )
+ asserts.true(
+ env,
+ "bar_opt" in actual_argv,
+ ("Did not find expected lexopt bar_opt %s for target bars in test " +
+ "lexopts_test") % actual_argv,
+ )
+
+ return analysistest.end(env)
+
+lexopts_test = analysistest.make(_lexopts_test_impl)
+
+def _test_lexopts():
+ name = "lexopts"
+ test_name = name + "_test"
+ genlex(
+ name = name,
+ srcs = ["foo_lexopts.ll"],
+ lexopts = ["foo_opt", "bar_opt"],
+ tags = ["manual"],
+ )
+
+ lexopts_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+# TODO(b/190006308): When fixed, l and ll sources can coexist. Remove this test.
+def _l_and_ll_files_fails_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ asserts.expect_failure(
+ env,
+ "srcs contains both .l and .ll files. Please use separate targets.",
+ )
+
+ return analysistest.end(env)
+
+l_and_ll_files_fails_test = analysistest.make(
+ _l_and_ll_files_fails_test_impl,
+ expect_failure = True,
+)
+
+def _test_l_and_ll_files_fails():
+ name = "l_and_ll_files_fails"
+ test_name = name + "_test"
+ genlex(
+ name = name,
+ srcs = ["foo_fails.l", "bar_fails.ll"],
+ tags = ["manual"],
+ )
+ l_and_ll_files_fails_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def flex_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_single_l_file_to_c(),
+ _test_single_ll_file_to_cc(),
+ _test_multiple_files_correct_type(),
+ _test_output_arg(),
+ _test_input_arg(),
+ _test_lexopts(),
+ _test_l_and_ll_files_fails(),
+ ],
+ )
diff --git a/rules/cc/generate_toc.bzl b/rules/cc/generate_toc.bzl
index dc3d6ad3..ee3bdd20 100644
--- a/rules/cc/generate_toc.bzl
+++ b/rules/cc/generate_toc.bzl
@@ -1,20 +1,18 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-"""A macro to generate table of contents files of symbols from a shared library."""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A function to generate table of contents files of symbols from a shared library."""
CcTocInfo = provider(
"Information about the table of contents of a shared library",
@@ -23,8 +21,8 @@ CcTocInfo = provider(
},
)
-def _shared_library_toc_impl(ctx):
- so_name = "lib" + ctx.attr.name + ".so"
+def generate_toc(ctx, name, input_file):
+ so_name = "lib" + name + ".so"
toc_name = so_name + ".toc"
out_file = ctx.actions.declare_file(toc_name)
d_file = ctx.actions.declare_file(toc_name + ".d")
@@ -32,7 +30,7 @@ def _shared_library_toc_impl(ctx):
env = {
"CLANG_BIN": ctx.executable._readelf.dirname,
},
- inputs = ctx.files.src,
+ inputs = [input_file],
tools = [
ctx.executable._readelf,
],
@@ -42,39 +40,11 @@ def _shared_library_toc_impl(ctx):
# Only Linux shared libraries for now.
"--elf",
"-i",
- ctx.files.src[0].path,
+ input_file.path,
"-o",
out_file.path,
"-d",
d_file.path,
],
)
-
- return [
- CcTocInfo(toc = out_file),
- DefaultInfo(files = depset([out_file])),
- ]
-
-shared_library_toc = rule(
- implementation = _shared_library_toc_impl,
- attrs = {
- "src": attr.label(
- # TODO(b/217908237): reenable allow_single_file
- # allow_single_file = True,
- mandatory = True,
- ),
- "_toc_script": attr.label(
- cfg = "host",
- executable = True,
- allow_single_file = True,
- default = "//build/soong/scripts:toc.sh",
- ),
- "_readelf": attr.label(
- cfg = "host",
- executable = True,
- allow_single_file = True,
- default = "//prebuilts/clang/host/linux-x86:llvm-readelf",
- ),
- },
- toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
-)
+ return CcTocInfo(toc = out_file)
diff --git a/rules/cc/lto_transitions.bzl b/rules/cc/lto_transitions.bzl
new file mode 100644
index 00000000..c0bb79f2
--- /dev/null
+++ b/rules/cc/lto_transitions.bzl
@@ -0,0 +1,62 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FEATURES_ATTR_KEY = "features"
+LTO_FEATURE = "android_thin_lto"
+CLI_FEATURES_KEY = "//command_line_option:features"
+
+# This propagates LTO enablement down the dependency tree for modules that
+# enable it explicitly
+# TODO(b/270418352): Move this logic to the incoming transition when incoming
+# transitions support select statements
+def lto_deps_transition_impl(settings, attr):
+ features = getattr(attr, FEATURES_ATTR_KEY)
+ new_cli_features = list(settings[CLI_FEATURES_KEY])
+ if LTO_FEATURE in features and LTO_FEATURE not in new_cli_features:
+ new_cli_features.append(LTO_FEATURE)
+
+ return {
+ CLI_FEATURES_KEY: new_cli_features,
+ }
+
+lto_deps_transition = transition(
+ implementation = lto_deps_transition_impl,
+ inputs = [
+ CLI_FEATURES_KEY,
+ ],
+ outputs = [
+ CLI_FEATURES_KEY,
+ ],
+)
+
+# This un-propagates LTO enablement for shared deps, as LTO should only
+# propagate down static deps. This approach avoids an error where we end up with
+# two config variants of the same dependency
+def apply_drop_lto(old_cli_features):
+ new_cli_features = list(old_cli_features)
+ if LTO_FEATURE in old_cli_features:
+ new_cli_features.remove(LTO_FEATURE)
+
+ return {
+ CLI_FEATURES_KEY: new_cli_features,
+ }
+
+def drop_lto_transition_impl(settings, _):
+ return apply_drop_lto(settings[CLI_FEATURES_KEY])
+
+drop_lto_transition = transition(
+ implementation = drop_lto_transition_impl,
+ inputs = [CLI_FEATURES_KEY],
+ outputs = [CLI_FEATURES_KEY],
+)
diff --git a/rules/cc/lto_transitions_test.bzl b/rules/cc/lto_transitions_test.bzl
new file mode 100644
index 00000000..e6916b91
--- /dev/null
+++ b/rules/cc/lto_transitions_test.bzl
@@ -0,0 +1,247 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/cc:cc_binary.bzl", "cc_binary")
+load("//build/bazel/rules/cc:cc_library_shared.bzl", "cc_library_shared")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+load(
+ "//build/bazel/rules/cc/testing:transitions.bzl",
+ "ActionArgsInfo",
+ "compile_action_argv_aspect_generator",
+)
+
+lto_flag = "-flto=thin"
+static_cpp_suffix = "_cpp"
+shared_cpp_suffix = "__internal_root_cpp"
+binary_suffix = "__internal_root"
+
+def _lto_deps_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ argv_map = target_under_test[ActionArgsInfo].argv_map
+
+ for target in ctx.attr.targets_with_lto:
+ asserts.true(
+ env,
+ target in argv_map,
+ "can't find {} in argv map".format(target),
+ )
+ if target in argv_map:
+ argv = argv_map[target]
+ asserts.true(
+ env,
+ lto_flag in argv,
+ "Compile action of {} didn't have LTO but it was expected".format(
+ target,
+ ),
+ )
+ for target in ctx.attr.targets_without_lto:
+ asserts.true(
+ env,
+ target in argv_map,
+ "can't find {} in argv map".format(target),
+ )
+ if target in argv_map:
+ argv = argv_map[target]
+ asserts.true(
+ env,
+ lto_flag not in argv,
+ "Compile action of {} had LTO but it wasn't expected".format(
+ target,
+ ),
+ )
+ return analysistest.end(env)
+
+_compile_action_argv_aspect = compile_action_argv_aspect_generator({
+ "_cc_library_combiner": ["deps", "roots", "includes"],
+ "_cc_includes": ["deps"],
+ "_cc_library_shared_proxy": ["deps"],
+ "stripped_binary": ["androidmk_deps"],
+})
+
+lto_deps_test = analysistest.make(
+ _lto_deps_test_impl,
+ attrs = {
+ "targets_with_lto": attr.string_list(),
+ "targets_without_lto": attr.string_list(),
+ },
+ # We need to use aspect to examine the dependencies' actions of the apex
+ # target as the result of the transition, checking the dependencies directly
+ # using names will give you the info before the transition takes effect.
+ extra_target_under_test_aspects = [_compile_action_argv_aspect],
+)
+
+def _test_static_deps_have_lto():
+ name = "static_deps_have_lto"
+ requested_target_name = name + "_requested_target"
+ static_dep_name = name + "_static_dep"
+ static_dep_of_static_dep_name = "_static_dep_of_static_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = requested_target_name,
+ srcs = ["foo.cpp"],
+ deps = [static_dep_name],
+ features = ["android_thin_lto"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = static_dep_name,
+ srcs = ["bar.cpp"],
+ deps = [static_dep_of_static_dep_name],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = static_dep_of_static_dep_name,
+ srcs = ["baz.cpp"],
+ tags = ["manual"],
+ )
+
+ lto_deps_test(
+ name = test_name,
+ target_under_test = requested_target_name,
+ targets_with_lto = [
+ requested_target_name + static_cpp_suffix,
+ static_dep_name + static_cpp_suffix,
+ static_dep_of_static_dep_name + static_cpp_suffix,
+ ],
+ targets_without_lto = [],
+ )
+
+ return test_name
+
+def _test_deps_of_shared_have_lto_if_enabled():
+ name = "deps_of_shared_have_lto_if_enabled"
+ requested_target_name = name + "_requested_target"
+ shared_dep_name = name + "_shared_dep"
+ static_dep_of_shared_dep_name = name + "_static_dep_of_shared_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = requested_target_name,
+ srcs = ["foo.cpp"],
+ dynamic_deps = [shared_dep_name],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = shared_dep_name,
+ srcs = ["bar.cpp"],
+ deps = [static_dep_of_shared_dep_name],
+ features = ["android_thin_lto"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = static_dep_of_shared_dep_name,
+ srcs = ["baz.cpp"],
+ tags = ["manual"],
+ )
+
+ lto_deps_test(
+ name = test_name,
+ target_under_test = requested_target_name,
+ targets_with_lto = [
+ shared_dep_name + "__internal_root_cpp",
+ static_dep_of_shared_dep_name + static_cpp_suffix,
+ ],
+ targets_without_lto = [requested_target_name + static_cpp_suffix],
+ )
+
+ return test_name
+
+def _test_deps_of_shared_deps_no_lto_if_disabled():
+ name = "deps_of_shared_deps_no_lto_if_disabled"
+ requested_target_name = name + "_requested_target"
+ shared_dep_name = name + "_shared_dep"
+ static_dep_of_shared_dep_name = name + "_static_dep_of_shared_dep"
+ test_name = name + "_test"
+
+ cc_library_static(
+ name = requested_target_name,
+ srcs = ["foo.cpp"],
+ dynamic_deps = [shared_dep_name],
+ features = ["android_thin_lto"],
+ tags = ["manual"],
+ )
+
+ cc_library_shared(
+ name = shared_dep_name,
+ srcs = ["bar.cpp"],
+ deps = [static_dep_of_shared_dep_name],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = static_dep_of_shared_dep_name,
+ srcs = ["baz.cpp"],
+ tags = ["manual"],
+ )
+
+ lto_deps_test(
+ name = test_name,
+ target_under_test = requested_target_name,
+ targets_with_lto = [requested_target_name + static_cpp_suffix],
+ targets_without_lto = [
+ shared_dep_name + shared_cpp_suffix,
+ static_dep_of_shared_dep_name + static_cpp_suffix,
+ ],
+ )
+
+ return test_name
+
+def _test_binary_propagates_to_static_deps():
+ name = "binary_propagates_to_static_deps"
+ requested_target_name = name + "_requested_target"
+ dep_name = name + "_dep"
+ test_name = name + "_test"
+
+ cc_binary(
+ name = requested_target_name,
+ srcs = ["foo.cpp"],
+ deps = [dep_name],
+ features = ["android_thin_lto"],
+ tags = ["manual"],
+ )
+
+ cc_library_static(
+ name = dep_name,
+ srcs = ["bar.cpp"],
+ tags = ["manual"],
+ )
+
+ lto_deps_test(
+ name = test_name,
+ target_under_test = requested_target_name,
+ targets_with_lto = [
+ requested_target_name + binary_suffix + static_cpp_suffix,
+ dep_name + static_cpp_suffix,
+ ],
+ )
+
+ return test_name
+
+def lto_transition_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_static_deps_have_lto(),
+ _test_deps_of_shared_have_lto_if_enabled(),
+ _test_deps_of_shared_deps_no_lto_if_disabled(),
+ _test_binary_propagates_to_static_deps(),
+ ],
+ )
diff --git a/rules/cc/prebuilt_library_shared.bzl b/rules/cc/prebuilt_library_shared.bzl
deleted file mode 100644
index c25ceff4..00000000
--- a/rules/cc/prebuilt_library_shared.bzl
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-def prebuilt_library_shared(
- name,
- shared_library,
- alwayslink = None,
- **kwargs):
- "Bazel macro to correspond with the *_prebuilt_library_shared Soong module types"
-
- native.cc_import(
- name = name,
- shared_library = shared_library,
- alwayslink = alwayslink,
- **kwargs
- )
-
- native.cc_import(
- name = name + "_alwayslink",
- shared_library = shared_library,
- alwayslink = True,
- **kwargs
- )
diff --git a/rules/cc/prebuilt_library_static.bzl b/rules/cc/prebuilt_library_static.bzl
deleted file mode 100644
index 950ff163..00000000
--- a/rules/cc/prebuilt_library_static.bzl
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-def prebuilt_library_static(
- name,
- static_library,
- alwayslink = None,
- export_includes = [],
- export_system_includes = [],
- **kwargs):
- "Bazel macro to correspond with the *_prebuilt_library_static Soong module types"
-
- # TODO: Handle includes similarly to cc_library_static
- # e.g. includes = ["clang-r416183b/prebuilt_include/llvm/lib/Fuzzer"],
- native.cc_import(
- name = name,
- static_library = static_library,
- alwayslink = alwayslink,
- **kwargs
- )
-
- native.cc_import(
- name = name + "_alwayslink",
- static_library = static_library,
- alwayslink = True,
- **kwargs
- )
diff --git a/rules/cc/static_libc.bzl b/rules/cc/static_libc.bzl
index 935125c8..62277e22 100644
--- a/rules/cc/static_libc.bzl
+++ b/rules/cc/static_libc.bzl
@@ -1,18 +1,16 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
# Rules and macros to define a cc toolchain with a static libc.
# Used to bootstrap cc development using the bionic lib build by Soong.
diff --git a/rules/cc/stl.bzl b/rules/cc/stl.bzl
index 0c9e03db..d91aa00f 100644
--- a/rules/cc/stl.bzl
+++ b/rules/cc/stl.bzl
@@ -1,18 +1,16 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
# Helpers for stl property resolution.
# These mappings taken from build/soong/cc/stl.go
@@ -22,8 +20,6 @@ load("//build/bazel/product_variables:constants.bzl", "constants")
_libcpp_stl_names = {
"libc++": True,
"libc++_static": True,
- "c++_shared": True,
- "c++_static": True,
"": True,
"system": True,
}
@@ -45,27 +41,111 @@ _static_binary_deps = select({
"//conditions:default": [],
})
-def static_stl_deps(stl_name):
- # TODO(b/201079053): Handle useSdk, windows, fuschia, preferably with selects.
- if stl_name in _libcpp_stl_names:
- return ["//external/libcxx:libc++_static"] + _common_static_deps
- elif stl_name == "none":
- return []
- else:
+def _stl_name_resolver(stl_name, is_shared):
+ if stl_name == "none":
+ return stl_name
+
+ if stl_name not in _libcpp_stl_names:
fail("Unhandled stl %s" % stl_name)
-def static_binary_stl_deps(stl_name):
- base = static_stl_deps(stl_name)
+ if stl_name in ("", "system"):
+ if is_shared:
+ stl_name = "libc++"
+ else:
+ stl_name = "libc++_static"
+ return stl_name
+
+def stl_info_from_attr(stl_name, is_shared, is_binary = False):
+ deps = _stl_deps(stl_name, is_shared, is_binary)
+ flags = _stl_flags(stl_name, is_shared)
+ return struct(
+ static_deps = deps.static,
+ shared_deps = deps.shared,
+ cppflags = flags.cppflags,
+ linkopts = flags.linkopts,
+ )
+
+def _stl_deps(stl_name, is_shared, is_binary = False):
+ stl_name = _stl_name_resolver(stl_name, is_shared)
if stl_name == "none":
- return base
- else:
- return base + _static_binary_deps
-
-def shared_stl_deps(stl_name):
- # TODO(b/201079053): Handle useSdk, windows, fuschia, preferably with selects.
- if stl_name in _libcpp_stl_names:
- return (_common_static_deps, ["//external/libcxx:libc++"])
- elif stl_name == "none":
- return ([], [])
+ return struct(static = [], shared = [])
+
+ shared, static = [], []
+ if stl_name == "libc++":
+ static, shared = _shared_stl_deps()
+ elif stl_name == "libc++_static":
+ static = _static_stl_deps()
+ if is_binary:
+ static += _static_binary_deps
+ return struct(
+ static = static,
+ shared = shared,
+ )
+
+def _static_stl_deps():
+ # TODO(b/201079053): Handle useSdk, windows, preferably with selects.
+ return ["//external/libcxx:libc++_static"] + _common_static_deps
+
+def _shared_stl_deps():
+ return (_common_static_deps, ["//external/libcxx:libc++"])
+
+def _stl_flags(stl_name, is_shared):
+ """returns flags that control STL inclusion
+
+ Should be kept up to date with
+ https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/stl.go;l=197;drc=8722ca5486fa62c07520e09db54b1b330b48da17
+
+ Args:
+ stl_name: string, name of STL library to use
+ is_shared: bool, if true, the STL should be linked dynamically
+ Returns:
+ struct containing flags for CC compilation
+ """
+ stl_name = _stl_name_resolver(stl_name, is_shared)
+
+ cppflags_darwin = []
+ cppflags_windows_not_bionic = []
+ cppflags_not_bionic = []
+ linkopts_not_bionic = []
+ if stl_name in ("libc++", "libc++_static"):
+ cppflags_not_bionic.append("-nostdinc++")
+ linkopts_not_bionic.append("-nostdlib++")
+
+ # libc++'s headers are annotated with availability macros that
+ # indicate which version of Mac OS was the first to ship with a
+ # libc++ feature available in its *system's* libc++.dylib. We do
+ # not use the system's library, but rather ship our own. As such,
+ # these availability attributes are meaningless for us but cause
+ # build breaks when we try to use code that would not be available
+ # in the system's dylib.
+ cppflags_darwin.append("-D_LIBCPP_DISABLE_AVAILABILITY")
+
+ # Disable visiblity annotations since we're using static libc++.
+ cppflags_windows_not_bionic.append("-D_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS")
+ cppflags_windows_not_bionic.append("-D_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS")
+
+ # Use Win32 threads in libc++.
+ cppflags_windows_not_bionic.append("-D_LIBCPP_HAS_THREAD_API_WIN32")
+ elif stl_name in ("none"):
+ cppflags_not_bionic.append("-nostdinc++")
+ linkopts_not_bionic.append("-nostdlib++")
else:
- fail("Unhandled stl %s" % stl_name)
+ #TODO(b/201079053) handle ndk STL flags
+ pass
+
+ return struct(
+ cppflags = select({
+ "//build/bazel/platforms/os:bionic": [],
+ "//conditions:default": cppflags_not_bionic,
+ }) + select({
+ "//build/bazel/platforms/os:darwin": cppflags_darwin,
+ "//build/bazel/platforms/os:windows": (
+ cppflags_windows_not_bionic
+ ),
+ "//conditions:default": [],
+ }),
+ linkopts = select({
+ "//build/bazel/platforms/os:bionic": [],
+ "//conditions:default": linkopts_not_bionic,
+ }),
+ )
diff --git a/rules/cc/stl_test.bzl b/rules/cc/stl_test.bzl
new file mode 100644
index 00000000..6b0e2fb7
--- /dev/null
+++ b/rules/cc/stl_test.bzl
@@ -0,0 +1,994 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:new_sets.bzl", "sets")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":stl.bzl", "stl_info_from_attr")
+
+_ANDROID_STATIC_DEPS = ["//external/libcxxabi:libc++demangle"]
+_STATIC_DEP = ["//external/libcxx:libc++_static"]
+_ANDROID_BINARY_STATIC_DEP = ["//prebuilts/clang/host/linux-x86:libunwind"]
+_SHARED_DEP = ["//external/libcxx:libc++"]
+
+_ANDROID_CPPFLAGS = []
+_ANDROID_LINKOPTS = []
+_LINUX_CPPFLAGS = ["-nostdinc++"]
+_LINUX_LINKOPTS = ["-nostdlib++"]
+_LINUX_BIONIC_CPPFLAGS = []
+_LINUX_BIONIC_LINKOPTS = []
+_DARWIN_CPPFLAGS = [
+ "-nostdinc++",
+ "-D_LIBCPP_DISABLE_AVAILABILITY",
+]
+_DARWIN_CPPFLAGS_STL_NONE = ["-nostdinc++"]
+_DARWIN_LINKOPTS = ["-nostdlib++"]
+_WINDOWS_CPPFLAGS = [
+ "-nostdinc++",
+ "-D_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS",
+ "-D_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS",
+ "-D_LIBCPP_HAS_THREAD_API_WIN32",
+]
+_WINDOWS_CPPFLAGS_STL_NONE = ["-nostdinc++"]
+_WINDOWS_LINKOPTS = ["-nostdlib++"]
+
+_StlInfo = provider(fields = ["static", "shared"])
+
+def _stl_impl(ctx):
+ return [
+ _StlInfo(
+ static = ctx.attr.static,
+ shared = ctx.attr.shared,
+ ),
+ ]
+
+_stl = rule(
+ implementation = _stl_impl,
+ attrs = {
+ "shared": attr.string_list(),
+ "static": attr.string_list(),
+ },
+)
+
+_StlFlagsInfo = provider(fields = ["cppflags", "linkopts"])
+
+def _stl_flags_impl(ctx):
+ return [
+ _StlFlagsInfo(
+ cppflags = ctx.attr.cppflags,
+ linkopts = ctx.attr.linkopts,
+ ),
+ ]
+
+_stl_flags = rule(
+ implementation = _stl_flags_impl,
+ attrs = {
+ "cppflags": attr.string_list(),
+ "linkopts": attr.string_list(),
+ },
+)
+
+def _test_stl(
+ stl,
+ is_shared,
+ is_binary,
+ android_deps,
+ non_android_deps,
+ android_flags,
+ linux_flags,
+ linux_bionic_flags,
+ darwin_flags,
+ windows_flags):
+ target_name = _stl_deps_target(stl, is_shared, is_binary)
+ flags_target_name = _stl_flags_target(stl, is_shared, is_binary)
+ android_test_name = target_name + "_android_test"
+ non_android_test_name = target_name + "_non_android_test"
+ android_flags_test_name = target_name + "_android_flags_test"
+ linux_flags_test_name = target_name + "_linux_flags_test"
+ linux_bionic_flags_test_name = target_name + "_linux_bionic_flags_test"
+ darwin_flags_test_name = target_name + "_darwin_flags_test"
+ windows_flags_test_name = target_name + "_windows_flags_test"
+
+ _stl_deps_android_test(
+ name = android_test_name,
+ static = android_deps.static,
+ shared = android_deps.shared,
+ target_under_test = target_name,
+ )
+
+ _stl_deps_non_android_test(
+ name = non_android_test_name,
+ static = non_android_deps.static,
+ shared = non_android_deps.shared,
+ target_under_test = target_name,
+ )
+
+ _stl_flags_android_test(
+ name = android_flags_test_name,
+ cppflags = android_flags.cppflags,
+ linkopts = android_flags.linkopts,
+ target_under_test = flags_target_name,
+ )
+
+ _stl_flags_linux_test(
+ name = linux_flags_test_name,
+ cppflags = linux_flags.cppflags,
+ linkopts = linux_flags.linkopts,
+ target_under_test = flags_target_name,
+ )
+
+ _stl_flags_linux_bionic_test(
+ name = linux_bionic_flags_test_name,
+ cppflags = linux_bionic_flags.cppflags,
+ linkopts = linux_bionic_flags.linkopts,
+ target_under_test = flags_target_name,
+ )
+
+ _stl_flags_darwin_test(
+ name = darwin_flags_test_name,
+ cppflags = darwin_flags.cppflags,
+ linkopts = darwin_flags.linkopts,
+ target_under_test = flags_target_name,
+ )
+
+ _stl_flags_windows_test(
+ name = windows_flags_test_name,
+ cppflags = windows_flags.cppflags,
+ linkopts = windows_flags.linkopts,
+ target_under_test = flags_target_name,
+ )
+
+ return [
+ android_test_name,
+ non_android_test_name,
+ android_flags_test_name,
+ linux_flags_test_name,
+ linux_bionic_flags_test_name,
+ darwin_flags_test_name,
+ windows_flags_test_name,
+ ]
+
+def _stl_deps_target(name, is_shared, is_binary):
+ target_name = name if name else "empty"
+ target_name += "_shared" if is_shared else "_static"
+ target_name += "_bin" if is_binary else "_lib"
+ info = stl_info_from_attr(name, is_shared, is_binary)
+
+ _stl(
+ name = target_name,
+ shared = info.shared_deps,
+ static = info.static_deps,
+ tags = ["manual"],
+ )
+
+ return target_name
+
+def _stl_deps_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ stl_info = analysistest.target_under_test(env)[_StlInfo]
+
+ expected_static = sets.make(ctx.attr.static)
+ actual_static = sets.make(stl_info.static)
+ asserts.set_equals(
+ env,
+ expected = expected_static,
+ actual = actual_static,
+ )
+
+ expected_shared = sets.make(ctx.attr.shared)
+ actual_shared = sets.make(stl_info.shared)
+ asserts.set_equals(
+ env,
+ expected = expected_shared,
+ actual = actual_shared,
+ )
+
+ return analysistest.end(env)
+
+def _stl_flags_target(name, is_shared, is_binary):
+ target_name = name if name else "empty"
+ target_name += "_shared" if is_shared else "_static"
+ target_name += "_bin" if is_binary else "_lib"
+ target_name += "_flags"
+ info = stl_info_from_attr(name, is_shared)
+
+ _stl_flags(
+ name = target_name,
+ cppflags = info.cppflags,
+ linkopts = info.linkopts,
+ tags = ["manual"],
+ )
+
+ return target_name
+
+def _stl_flags_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ stl_info = analysistest.target_under_test(env)[_StlFlagsInfo]
+
+ expected_cppflags = sets.make(ctx.attr.cppflags)
+ actual_cppflags = sets.make(stl_info.cppflags)
+ asserts.set_equals(
+ env,
+ expected = expected_cppflags,
+ actual = actual_cppflags,
+ )
+
+ expected_linkopts = sets.make(ctx.attr.linkopts)
+ actual_linkopts = sets.make(stl_info.linkopts)
+ asserts.set_equals(
+ env,
+ expected = expected_linkopts,
+ actual = actual_linkopts,
+ )
+
+ return analysistest.end(env)
+
+__stl_flags_android_test = analysistest.make(
+ impl = _stl_flags_test_impl,
+ attrs = {
+ "cppflags": attr.string_list(),
+ "linkopts": attr.string_list(),
+ },
+)
+
+def _stl_flags_android_test(**kwargs):
+ __stl_flags_android_test(
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ **kwargs
+ )
+
+__stl_flags_linux_test = analysistest.make(
+ impl = _stl_flags_test_impl,
+ attrs = {
+ "cppflags": attr.string_list(),
+ "linkopts": attr.string_list(),
+ },
+)
+
+def _stl_flags_linux_test(**kwargs):
+ __stl_flags_linux_test(
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ **kwargs
+ )
+
+__stl_flags_linux_bionic_test = analysistest.make(
+ impl = _stl_flags_test_impl,
+ attrs = {
+ "cppflags": attr.string_list(),
+ "linkopts": attr.string_list(),
+ },
+)
+
+def _stl_flags_linux_bionic_test(**kwargs):
+ __stl_flags_linux_bionic_test(
+ target_compatible_with = ["//build/bazel/platforms/os:linux_bionic"],
+ **kwargs
+ )
+
+_stl_flags_windows_test = analysistest.make(
+ impl = _stl_flags_test_impl,
+ attrs = {
+ "cppflags": attr.string_list(),
+ "linkopts": attr.string_list(),
+ },
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/rules/cc:windows_for_testing",
+ },
+)
+
+_stl_flags_darwin_test = analysistest.make(
+ impl = _stl_flags_test_impl,
+ attrs = {
+ "cppflags": attr.string_list(),
+ "linkopts": attr.string_list(),
+ },
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/rules/cc:darwin_for_testing",
+ },
+)
+
+__stl_deps_android_test = analysistest.make(
+ impl = _stl_deps_test_impl,
+ attrs = {
+ "static": attr.string_list(),
+ "shared": attr.string_list(),
+ },
+)
+
+def _stl_deps_android_test(**kwargs):
+ __stl_deps_android_test(
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ **kwargs
+ )
+
+__stl_deps_non_android_test = analysistest.make(
+ impl = _stl_deps_test_impl,
+ attrs = {
+ "static": attr.string_list(),
+ "shared": attr.string_list(),
+ },
+)
+
+def _stl_deps_non_android_test(**kwargs):
+ __stl_deps_non_android_test(
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ **kwargs
+ )
+
+def stl_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests =
+ _test_stl(
+ stl = "",
+ is_shared = True,
+ is_binary = False,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS,
+ shared = _SHARED_DEP,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = _SHARED_DEP,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "system",
+ is_shared = True,
+ is_binary = False,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS,
+ shared = _SHARED_DEP,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = _SHARED_DEP,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "libc++",
+ is_shared = True,
+ is_binary = False,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS,
+ shared = _SHARED_DEP,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = _SHARED_DEP,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "libc++_static",
+ is_shared = True,
+ is_binary = False,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _STATIC_DEP,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = _STATIC_DEP,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "none",
+ is_shared = True,
+ is_binary = False,
+ android_deps = struct(
+ static = None,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS_STL_NONE,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS_STL_NONE,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "",
+ is_shared = False,
+ is_binary = False,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _STATIC_DEP,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = _STATIC_DEP,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "system",
+ is_shared = False,
+ is_binary = False,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _STATIC_DEP,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = _STATIC_DEP,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "libc++",
+ is_shared = False,
+ is_binary = False,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS,
+ shared = _SHARED_DEP,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = _SHARED_DEP,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "libc++_static",
+ is_shared = False,
+ is_binary = False,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _STATIC_DEP,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = _STATIC_DEP,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "none",
+ is_shared = False,
+ is_binary = False,
+ android_deps = struct(
+ static = None,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS_STL_NONE,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS_STL_NONE,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "",
+ is_shared = True,
+ is_binary = True,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _ANDROID_BINARY_STATIC_DEP,
+ shared = _SHARED_DEP,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = _SHARED_DEP,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "system",
+ is_shared = True,
+ is_binary = True,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _ANDROID_BINARY_STATIC_DEP,
+ shared = _SHARED_DEP,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = _SHARED_DEP,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "libc++",
+ is_shared = True,
+ is_binary = True,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _ANDROID_BINARY_STATIC_DEP,
+ shared = _SHARED_DEP,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = _SHARED_DEP,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "libc++_static",
+ is_shared = True,
+ is_binary = True,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _STATIC_DEP + _ANDROID_BINARY_STATIC_DEP,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = _STATIC_DEP,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "none",
+ is_shared = True,
+ is_binary = True,
+ android_deps = struct(
+ static = None,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS_STL_NONE,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS_STL_NONE,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "",
+ is_shared = False,
+ is_binary = True,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _STATIC_DEP + _ANDROID_BINARY_STATIC_DEP,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = _STATIC_DEP,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "system",
+ is_shared = False,
+ is_binary = True,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _STATIC_DEP + _ANDROID_BINARY_STATIC_DEP,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = _STATIC_DEP,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "libc++",
+ is_shared = False,
+ is_binary = True,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _ANDROID_BINARY_STATIC_DEP,
+ shared = _SHARED_DEP,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = _SHARED_DEP,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "libc++_static",
+ is_shared = False,
+ is_binary = True,
+ android_deps = struct(
+ static = _ANDROID_STATIC_DEPS + _STATIC_DEP + _ANDROID_BINARY_STATIC_DEP,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = _STATIC_DEP,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ) +
+ _test_stl(
+ stl = "none",
+ is_shared = False,
+ is_binary = True,
+ android_deps = struct(
+ static = None,
+ shared = None,
+ ),
+ non_android_deps = struct(
+ static = None,
+ shared = None,
+ ),
+ android_flags = struct(
+ cppflags = _ANDROID_CPPFLAGS,
+ linkopts = _ANDROID_LINKOPTS,
+ ),
+ linux_flags = struct(
+ cppflags = _LINUX_CPPFLAGS,
+ linkopts = _LINUX_LINKOPTS,
+ ),
+ linux_bionic_flags = struct(
+ cppflags = _LINUX_BIONIC_CPPFLAGS,
+ linkopts = _LINUX_BIONIC_LINKOPTS,
+ ),
+ darwin_flags = struct(
+ cppflags = _DARWIN_CPPFLAGS_STL_NONE,
+ linkopts = _DARWIN_LINKOPTS,
+ ),
+ windows_flags = struct(
+ cppflags = _WINDOWS_CPPFLAGS_STL_NONE,
+ linkopts = _WINDOWS_LINKOPTS,
+ ),
+ ),
+ )
diff --git a/rules/cc/stripped_cc_common.bzl b/rules/cc/stripped_cc_common.bzl
index ad4f4b1f..cc0633eb 100644
--- a/rules/cc/stripped_cc_common.bzl
+++ b/rules/cc/stripped_cc_common.bzl
@@ -1,60 +1,71 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
"""A macro to handle shared library stripping."""
-load("@rules_cc//examples:experimental_cc_shared_library.bzl", "CcSharedLibraryInfo", "cc_shared_library")
-load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cpp_toolchain")
+load(":cc_library_common.bzl", "CcAndroidMkInfo")
+load(":clang_tidy.bzl", "collect_deps_clang_tidy_info")
+load(
+ ":lto_transitions.bzl",
+ "drop_lto_transition",
+ "lto_deps_transition",
+)
+
+CcUnstrippedInfo = provider(
+ "Provides unstripped binary/shared library",
+ fields = {
+ "unstripped": "unstripped target",
+ },
+)
# Keep this consistent with soong/cc/strip.go#NeedsStrip.
-def needs_strip(attrs):
- force_disable = attrs.none
- force_enable = attrs.all or attrs.keep_symbols or attrs.keep_symbols_and_debug_frame
- return force_enable and not force_disable
+def _needs_strip(ctx):
+ if ctx.attr.none:
+ return False
+ if ctx.target_platform_has_constraint(ctx.attr._android_constraint[platform_common.ConstraintValueInfo]):
+ return True
+ return (ctx.attr.all or ctx.attr.keep_symbols or
+ ctx.attr.keep_symbols_and_debug_frame or ctx.attr.keep_symbols_list)
# Keep this consistent with soong/cc/strip.go#strip and soong/cc/builder.go#transformStrip.
-def get_strip_args(attrs):
+def _get_strip_args(attrs):
strip_args = []
- keep_symbols_list = attrs.keep_symbols_list
keep_mini_debug_info = False
if attrs.keep_symbols:
- strip_args += ["--keep-symbols"]
+ strip_args.append("--keep-symbols")
elif attrs.keep_symbols_and_debug_frame:
- strip_args += ["--keep-symbols-and-debug-frame"]
- elif len(keep_symbols_list) > 0:
- strip_args += ["-k" + ",".join(keep_symbols_list)]
+ strip_args.append("--keep-symbols-and-debug-frame")
+ elif attrs.keep_symbols_list:
+ strip_args.append("-k" + ",".join(attrs.keep_symbols_list))
elif not attrs.all:
- strip_args += ["--keep-mini-debug-info"]
+ strip_args.append("--keep-mini-debug-info")
keep_mini_debug_info = True
if not keep_mini_debug_info:
- strip_args += ["--add-gnu-debuglink"]
+ strip_args.append("--add-gnu-debuglink")
return strip_args
# https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/builder.go;l=131-146;drc=master
-def _stripped_impl(ctx, prefix = "", extension = ""):
- out_file = ctx.actions.declare_file(prefix + ctx.attr.name + extension)
- if not needs_strip(ctx.attr):
- ctx.actions.symlink(
- output = out_file,
- target_file = ctx.files.src[0],
- )
- return out_file
- cc_toolchain = find_cpp_toolchain(ctx)
+def stripped_impl(ctx, prefix = "", suffix = "", extension = ""):
+ out_file = ctx.actions.declare_file(prefix + ctx.attr.name + suffix + extension)
+ if not _needs_strip(ctx):
+ ctx.actions.symlink(
+ output = out_file,
+ target_file = ctx.files.src[0],
+ )
+ return out_file
d_file = ctx.actions.declare_file(ctx.attr.name + ".d")
ctx.actions.run(
env = {
@@ -74,7 +85,7 @@ def _stripped_impl(ctx, prefix = "", extension = ""):
],
outputs = [out_file, d_file],
executable = ctx.executable._strip_script,
- arguments = get_strip_args(ctx.attr) + [
+ arguments = _get_strip_args(ctx.attr) + [
"-i",
ctx.files.src[0].path,
"-o",
@@ -82,75 +93,83 @@ def _stripped_impl(ctx, prefix = "", extension = ""):
"-d",
d_file.path,
],
+ mnemonic = "CcStrip",
)
return out_file
-common_attrs = {
- "keep_symbols": attr.bool(default = False),
- "keep_symbols_and_debug_frame": attr.bool(default = False),
- "all": attr.bool(default = False),
- "none": attr.bool(default = False),
- "keep_symbols_list": attr.string_list(default = []),
- "_xz": attr.label(
- cfg = "host",
+strip_attrs = dict(
+ keep_symbols = attr.bool(default = False),
+ keep_symbols_and_debug_frame = attr.bool(default = False),
+ all = attr.bool(default = False),
+ none = attr.bool(default = False),
+ keep_symbols_list = attr.string_list(default = []),
+)
+common_strip_attrs = dict(
+ strip_attrs,
+ _xz = attr.label(
+ cfg = "exec",
executable = True,
allow_single_file = True,
default = "//prebuilts/build-tools:linux-x86/bin/xz",
),
- "_create_minidebuginfo": attr.label(
- cfg = "host",
+ _create_minidebuginfo = attr.label(
+ cfg = "exec",
executable = True,
allow_single_file = True,
default = "//prebuilts/build-tools:linux-x86/bin/create_minidebuginfo",
),
- "_strip_script": attr.label(
- cfg = "host",
+ _strip_script = attr.label(
+ cfg = "exec",
executable = True,
allow_single_file = True,
default = "//build/soong/scripts:strip.sh",
),
- "_ar": attr.label(
- cfg = "host",
+ _ar = attr.label(
+ cfg = "exec",
executable = True,
allow_single_file = True,
default = "//prebuilts/clang/host/linux-x86:llvm-ar",
),
- "_strip": attr.label(
- cfg = "host",
+ _strip = attr.label(
+ cfg = "exec",
executable = True,
allow_single_file = True,
default = "//prebuilts/clang/host/linux-x86:llvm-strip",
),
- "_readelf": attr.label(
- cfg = "host",
+ _readelf = attr.label(
+ cfg = "exec",
executable = True,
allow_single_file = True,
default = "//prebuilts/clang/host/linux-x86:llvm-readelf",
),
- "_objcopy": attr.label(
- cfg = "host",
+ _objcopy = attr.label(
+ cfg = "exec",
executable = True,
allow_single_file = True,
default = "//prebuilts/clang/host/linux-x86:llvm-objcopy",
),
- "_cc_toolchain": attr.label(
+ _cc_toolchain = attr.label(
default = Label("@local_config_cc//:toolchain"),
providers = [cc_common.CcToolchainInfo],
),
-}
+ _android_constraint = attr.label(
+ default = Label("//build/bazel/platforms/os:android"),
+ ),
+)
def _stripped_shared_library_impl(ctx):
- out_file = _stripped_impl(ctx, "lib", ".so")
+ out_file = stripped_impl(ctx, prefix = "lib", extension = ".so")
return [
DefaultInfo(files = depset([out_file])),
ctx.attr.src[CcSharedLibraryInfo],
+ ctx.attr.src[OutputGroupInfo],
]
stripped_shared_library = rule(
implementation = _stripped_shared_library_impl,
attrs = dict(
- common_attrs,
+ common_strip_attrs,
src = attr.label(
mandatory = True,
# TODO(b/217908237): reenable allow_single_file
@@ -167,14 +186,21 @@ StrippedCcBinaryInfo = provider()
def _stripped_binary_impl(ctx):
common_providers = [
- ctx.attr.src[CcInfo],
- ctx.attr.src[InstrumentedFilesInfo],
- ctx.attr.src[DebugPackageInfo],
- ctx.attr.src[OutputGroupInfo],
- StrippedCcBinaryInfo(), # a marker for dependents
+ ctx.attr.src[0][CcInfo],
+ ctx.attr.src[0][InstrumentedFilesInfo],
+ ctx.attr.src[0][DebugPackageInfo],
+ ctx.attr.src[0][OutputGroupInfo],
+ StrippedCcBinaryInfo(), # a marker for dependents
+ CcUnstrippedInfo(
+ unstripped = ctx.attr.unstripped,
+ ),
+ collect_deps_clang_tidy_info(ctx),
+ ] + [
+ d[CcAndroidMkInfo]
+ for d in ctx.attr.androidmk_deps
]
- out_file = _stripped_impl(ctx)
+ out_file = stripped_impl(ctx, suffix = ctx.attr.suffix)
return [
DefaultInfo(
@@ -183,11 +209,45 @@ def _stripped_binary_impl(ctx):
),
] + common_providers
+_rule_attrs = dict(
+ common_strip_attrs,
+ src = attr.label(
+ mandatory = True,
+ allow_single_file = True,
+ providers = [CcInfo],
+ cfg = lto_deps_transition,
+ ),
+ runtime_deps = attr.label_list(
+ providers = [CcInfo],
+ doc = "Deps that should be installed along with this target. Read by the apex cc aspect.",
+ ),
+ androidmk_deps = attr.label_list(
+ providers = [CcAndroidMkInfo],
+ ),
+ suffix = attr.string(),
+ unstripped = attr.label(
+ mandatory = True,
+ allow_single_file = True,
+ cfg = lto_deps_transition,
+ doc = "Unstripped binary to be returned by ",
+ ),
+ _allowlist_function_transition = attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
+)
+
stripped_binary = rule(
implementation = _stripped_binary_impl,
- attrs = dict(
- common_attrs,
- src = attr.label(mandatory = True, allow_single_file = True, providers = [CcInfo]),
- ),
+ cfg = drop_lto_transition,
+ attrs = _rule_attrs,
+ executable = True,
+ toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
+)
+
+stripped_test = rule(
+ implementation = _stripped_binary_impl,
+ cfg = drop_lto_transition,
+ attrs = _rule_attrs,
+ test = True,
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
)
diff --git a/rules/cc/testing/BUILD.bazel b/rules/cc/testing/BUILD.bazel
new file mode 100644
index 00000000..8a3659e1
--- /dev/null
+++ b/rules/cc/testing/BUILD.bazel
@@ -0,0 +1,33 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+package(default_visibility = ["//build/bazel/rules/cc:__subpackages__"])
+
+# for use by tests which need non-generated files
+# i.e. files for which File.is_source == True
+filegroup(
+ name = "test_srcs",
+ srcs = [
+ "test.cpp",
+ ],
+)
+
+# for use by tests that need a non-blank gunit test
+filegroup(
+ name = "gunit_test_srcs",
+ srcs = [
+ "gunit_test.cpp",
+ ],
+)
diff --git a/rules/cc/testing/gunit_test.cpp b/rules/cc/testing/gunit_test.cpp
new file mode 100644
index 00000000..4c85c6ff
--- /dev/null
+++ b/rules/cc/testing/gunit_test.cpp
@@ -0,0 +1,5 @@
+#include "gtest/gtest.h"
+
+TEST(GUnitTestExample, success) {
+ EXPECT_EQ(0, 1);
+}
diff --git a/rules/cc/testing/test.cpp b/rules/cc/testing/test.cpp
new file mode 100644
index 00000000..eeb769fb
--- /dev/null
+++ b/rules/cc/testing/test.cpp
@@ -0,0 +1,5 @@
+#include <stdio.h>
+int main() {
+ return 0;
+}
+
diff --git a/rules/cc/testing/transitions.bzl b/rules/cc/testing/transitions.bzl
new file mode 100644
index 00000000..0dc62bb4
--- /dev/null
+++ b/rules/cc/testing/transitions.bzl
@@ -0,0 +1,58 @@
+load("@bazel_skylib//lib:dicts.bzl", "dicts")
+load("@bazel_skylib//lib:sets.bzl", "sets")
+
+ActionArgsInfo = provider(
+ fields = {
+ "argv_map": "A dict with compile action arguments keyed by the target label",
+ },
+)
+
+def _compile_action_argv_aspect_impl(target, ctx):
+ argv_map = {}
+ if ctx.rule.kind == "cc_library":
+ cpp_compile_commands_args = []
+ for action in target.actions:
+ if action.mnemonic == "CppCompile":
+ cpp_compile_commands_args.extend(action.argv)
+
+ if len(cpp_compile_commands_args):
+ argv_map = dicts.add(
+ argv_map,
+ {
+ target.label.name: cpp_compile_commands_args,
+ },
+ )
+ elif ctx.rule.kind in ctx.attr._attr_aspect_dict.keys():
+ attrs = ctx.attr._attr_aspect_dict.get(ctx.rule.kind, [])
+ for attr_name in attrs:
+ for value in getattr(ctx.rule.attr, attr_name):
+ argv_map = dicts.add(
+ argv_map,
+ value[ActionArgsInfo].argv_map,
+ )
+ return ActionArgsInfo(
+ argv_map = argv_map,
+ )
+
+def _get_attr_aspects_list(attr_aspects_dict):
+ return sets.to_list(
+ sets.make(
+ [attr for rule in attr_aspects_dict.values() for attr in rule],
+ ),
+ )
+
+# The aspects generated by this function are used to examine compile actions
+# from cc_library targets generated by our macros for the purpose of assessing
+# the results of transitions. Checking the targets directly using their names
+# gives info from before the transition is applied.
+# attr_aspects should be a dict where the keys are the names of rules and the
+# values are lists of attrs that should be traversed by the aspect looking for
+# cc_library targets.
+def compile_action_argv_aspect_generator(attr_aspects):
+ return aspect(
+ implementation = _compile_action_argv_aspect_impl,
+ attr_aspects = _get_attr_aspects_list(attr_aspects),
+ attrs = {
+ "_attr_aspect_dict": attr.string_list_dict(default = attr_aspects),
+ },
+ )
diff --git a/rules/cc/versioned_cc_common.bzl b/rules/cc/versioned_cc_common.bzl
index f91d151d..55f3d8d2 100644
--- a/rules/cc/versioned_cc_common.bzl
+++ b/rules/cc/versioned_cc_common.bzl
@@ -1,24 +1,19 @@
-"""
-Copyright (C) 2022 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
"""A macro to handle build number stamping."""
-load(":stripped_cc_common.bzl", "StrippedCcBinaryInfo")
-load("@rules_cc//examples:experimental_cc_shared_library.bzl", "CcSharedLibraryInfo")
-
def stamp_build_number(ctx, prefix = "", extension = ""):
if len(ctx.files.src) != 1:
fail("Expected only one input file for build number stamping")
@@ -100,6 +95,7 @@ def _versioned_shared_library_impl(ctx):
return [
DefaultInfo(files = depset([out_file])),
ctx.attr.src[CcSharedLibraryInfo],
+ ctx.attr.src[OutputGroupInfo],
]
versioned_shared_library = rule(
diff --git a/rules/cc/yasm.bzl b/rules/cc/yasm.bzl
new file mode 100644
index 00000000..5820784c
--- /dev/null
+++ b/rules/cc/yasm.bzl
@@ -0,0 +1,102 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("//build/bazel/platforms:platform_utils.bzl", "platforms")
+
+"""Build rule for converting `.asm` files to `.o` files with yasm."""
+
+def globalFlags(ctx):
+ arch = platforms.get_target_arch(ctx.attr._platform_utils)
+ linux = platforms.is_target_linux_or_android(ctx.attr._platform_utils)
+ darwin = platforms.is_target_darwin(ctx.attr._platform_utils)
+
+ if linux and arch == "x86_64":
+ return ["-f", "elf64", "-m", "amd64"]
+ if linux and arch == "x86":
+ return ["-f", "elf32", "-m", "x86"]
+ if linux and arch == "arm64":
+ return ["-f", "elf64", "-m", "aarch64"]
+ if linux and arch == "arm":
+ return ["-f", "elf32", "-m", "arm"]
+ if darwin:
+ return ["-f", "macho", "-m", "amd64"]
+
+ fail("Unable to detect target platform for compiling .asm files")
+
+def _yasm_impl(ctx):
+ common_args = (globalFlags(ctx) + ctx.attr.flags +
+ ["-I" + paths.join(ctx.label.package, d) for d in ctx.attr.include_dirs])
+
+ outputs = [ctx.actions.declare_file(paths.replace_extension(src.path, ".o")) for src in ctx.files.srcs]
+ for src, out in zip(ctx.files.srcs, outputs):
+ ctx.actions.run(
+ inputs = ctx.files.include_srcs, # include_srcs will contain src
+ outputs = [out],
+ executable = ctx.executable._yasm,
+ arguments = common_args + ["-o", out.path, src.path],
+ mnemonic = "yasm",
+ )
+
+ return [DefaultInfo(files = depset(outputs))]
+
+_yasm = rule(
+ implementation = _yasm_impl,
+ doc = "Generate object files from a .asm file using yasm.",
+ attrs = {
+ "srcs": attr.label_list(
+ mandatory = True,
+ allow_files = [".asm"],
+ doc = "The asm source files for this rule",
+ ),
+ "include_srcs": attr.label_list(
+ allow_files = [".inc", ".asm"],
+ doc = "All files that could possibly be included from source files. " +
+ "This is necessary because starlark doesn't allow adding dependencies " +
+ "via .d files.",
+ ),
+ "include_dirs": attr.string_list(
+ doc = "Include directories",
+ ),
+ "flags": attr.string_list(
+ doc = "A list of options to be added to the yasm command line.",
+ ),
+ "_yasm": attr.label(
+ default = "//prebuilts/misc:yasm",
+ executable = True,
+ cfg = "exec",
+ ),
+ "_platform_utils": attr.label(
+ default = Label("//build/bazel/platforms:platform_utils"),
+ ),
+ },
+)
+
+def yasm(
+ name,
+ srcs,
+ include_dirs = [],
+ flags = [],
+ target_compatible_with = [],
+ tags = []):
+ _yasm(
+ name = name,
+ srcs = srcs,
+ flags = flags,
+ include_dirs = include_dirs,
+ include_srcs = native.glob(["**/*.inc", "**/*.asm"]),
+ target_compatible_with = target_compatible_with,
+ tags = tags,
+ )
diff --git a/rules/cc/yasm_test.bzl b/rules/cc/yasm_test.bzl
new file mode 100644
index 00000000..58f48075
--- /dev/null
+++ b/rules/cc/yasm_test.bzl
@@ -0,0 +1,126 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":yasm.bzl", "yasm")
+
+def _basic_yasm_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ for action in actions:
+ asserts.equals(
+ env,
+ action.mnemonic,
+ "yasm",
+ )
+ src = action.argv[-1]
+ asserts.equals(
+ env,
+ action.argv[-3],
+ "-o",
+ )
+ asserts.true(
+ env,
+ action.argv[-2].endswith(paths.replace_extension(src, ".o")),
+ "-o argument is expected to end with the src file as a .o",
+ )
+ asserts.true(
+ env,
+ " ".join(ctx.attr.expected_flags) in " ".join(action.argv),
+ "Expected flags (%s) were not in actual flags (%s)" % (ctx.attr.expected_flags, action.argv),
+ )
+
+ return analysistest.end(env)
+
+basic_yasm_test = analysistest.make(
+ _basic_yasm_test_impl,
+ attrs = {
+ "expected_flags": attr.string_list(
+ doc = "Flags expected to be on the command line.",
+ ),
+ },
+)
+
+def test_single_file():
+ name = "test_single_file"
+ yasm(
+ name = name + "_target",
+ srcs = [name + "_file.asm"],
+ tags = ["manual"],
+ )
+ basic_yasm_test(
+ name = name,
+ target_under_test = name + "_target",
+ )
+ return name
+
+def test_multiple_files():
+ name = "test_multiple_files"
+ yasm(
+ name = name + "_target",
+ srcs = [
+ name + "_file1.asm",
+ name + "_file2.asm",
+ ],
+ tags = ["manual"],
+ )
+ basic_yasm_test(
+ name = name,
+ target_under_test = name + "_target",
+ )
+ return name
+
+def test_custom_flags():
+ name = "test_custom_flags"
+ yasm(
+ name = name + "_target",
+ srcs = [name + "_file.asm"],
+ flags = ["-DNEON_INTRINSICS", "-mfpu=neon"],
+ tags = ["manual"],
+ )
+ basic_yasm_test(
+ name = name,
+ target_under_test = name + "_target",
+ expected_flags = ["-DNEON_INTRINSICS", "-mfpu=neon"],
+ )
+ return name
+
+def test_include_dirs():
+ name = "test_include_dirs"
+ yasm(
+ name = name + "_target",
+ srcs = [name + "_file.asm"],
+ include_dirs = ["foo/bar"],
+ tags = ["manual"],
+ )
+ basic_yasm_test(
+ name = name,
+ target_under_test = name + "_target",
+ expected_flags = ["-Ibuild/bazel/rules/cc/foo/bar"],
+ )
+ return name
+
+def yasm_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ test_single_file(),
+ test_multiple_files(),
+ test_custom_flags(),
+ test_include_dirs(),
+ ],
+ )
diff --git a/rules/common.bzl b/rules/common.bzl
new file mode 100644
index 00000000..ecd409dc
--- /dev/null
+++ b/rules/common.bzl
@@ -0,0 +1,53 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def get_dep_targets(attrs, *, predicate = lambda _: True):
+ """get_dep_targets returns all targets listed in the current rule's attributes
+
+ Args:
+ attrs (dict[str, attr]): dictionary containing the rule's attributes.
+ This may come from `ctx.attr` if called from a rule, or
+ `ctx.rule.attr` if called from an aspect.
+ predicate (function(Target) -> bool): a function used to filter out
+ unwanted targets; if predicate(target) == False, then do not include
+ target
+ Returns:
+ targets (dict[str, list[Target]]): map of attr to list of Targets for which
+ predicate returns True
+ """
+ targets = {}
+ for a in dir(attrs):
+ if a.startswith("_"):
+ # Ignore private attributes
+ continue
+ targets[a] = []
+ value = getattr(attrs, a)
+ vlist = value if type(value) == type([]) else [value]
+ for item in vlist:
+ if type(item) == "Target" and predicate(item):
+ targets[a].append(item)
+ return targets
+
+_BP2BUILD_LABEL_SUFFIXES = [
+ # cc rules
+ "_bp2build_cc_library_static",
+ "_cc_proto_lite",
+ "_aidl_code_gen",
+ "_cc_aidl_library",
+]
+
+def strip_bp2build_label_suffix(name):
+ for suffix in _BP2BUILD_LABEL_SUFFIXES:
+ name = name.removesuffix(suffix)
+ return name
diff --git a/rules/common/BUILD.bazel b/rules/common/BUILD.bazel
new file mode 100644
index 00000000..e4988862
--- /dev/null
+++ b/rules/common/BUILD.bazel
@@ -0,0 +1,6 @@
+load(":api_test.bzl", "api_levels_test_suite")
+load(":sdk_version_test.bzl", "sdk_version_test_suite")
+
+api_levels_test_suite(name = "api_test")
+
+sdk_version_test_suite(name = "sdk_version_test_suite")
diff --git a/rules/common/api.bzl b/rules/common/api.bzl
new file mode 100644
index 00000000..4d8e6db2
--- /dev/null
+++ b/rules/common/api.bzl
@@ -0,0 +1,129 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# An API level, can be a finalized (numbered) API, a preview (codenamed) API, or
+# the future API level (10000). Can be parsed from a string with
+# parse_api_level_with_version.
+
+load("@bazel_skylib//lib:dicts.bzl", "dicts")
+load("@soong_injection//api_levels:api_levels.bzl", "api_levels_released_versions")
+load("@soong_injection//api_levels:platform_versions.bzl", "platform_versions")
+
+_NONE_API_LEVEL_INT = -1
+_PREVIEW_API_LEVEL_BASE = 9000 # Base constant for preview API levels.
+_FUTURE_API_LEVEL_INT = 10000 # API Level associated with an arbitrary future release
+
+# TODO(b/271280342): access these variables in a transition friendly way.
+_PLATFORM_SDK_FINAL = platform_versions.platform_sdk_final
+_PLATFORM_SDK_VERSION = platform_versions.platform_sdk_version
+_PLATFORM_SDK_CODENAME = platform_versions.platform_sdk_codename
+_PLATFORM_VERSION_ACTIVE_CODENAMES = platform_versions.platform_version_active_codenames
+
+# Dict of unfinalized codenames to a placeholder preview API int.
+_preview_codenames_to_ints = {
+ codename: _PREVIEW_API_LEVEL_BASE + i
+ for i, codename in enumerate(_PLATFORM_VERSION_ACTIVE_CODENAMES)
+}
+
+# Returns true if a string or int version is in preview (not finalized).
+def _is_preview(version):
+ if type(version) == "string" and version.isdigit():
+ # normalize int types internally
+ version = int(version)
+
+ # Future / current is considered as a preview.
+ if version == "current" or version == _FUTURE_API_LEVEL_INT:
+ return True
+
+ # api can be either the codename or the int level (9000+)
+ return version in _preview_codenames_to_ints or version in _preview_codenames_to_ints.values()
+
+# Return 10000 for unfinalized versions, otherwise return unchanged.
+def _final_or_future(version):
+ if _is_preview(version):
+ return _FUTURE_API_LEVEL_INT
+ else:
+ return version
+
+_final_codename = {
+ "current": _final_or_future(_PLATFORM_SDK_VERSION),
+} if _PLATFORM_SDK_FINAL and _PLATFORM_SDK_VERSION else {}
+
+_api_levels_with_previews = dicts.add(api_levels_released_versions, _preview_codenames_to_ints)
+_api_levels_with_final_codenames = dicts.add(api_levels_released_versions, _final_codename) # @unused
+
+# parse_api_level_from_version is a Starlark implementation of ApiLevelFromUser
+# at https://cs.android.com/android/platform/superproject/+/master:build/soong/android/api_levels.go;l=221-250;drc=5095a6c4b484f34d5c4f55a855d6174e00fb7f5e
+def _parse_api_level_from_version(version):
+ """converts the given string `version` to an api level
+
+ Args:
+ version: must be non-empty. Inputs that are not "current", known
+ previews, finalized codenames, or convertible to an integer will return
+ an error.
+
+ Returns: The api level as an int.
+ """
+ if version == "":
+ fail("API level string must be non-empty")
+
+ if version == "current":
+ return _FUTURE_API_LEVEL_INT
+
+ if _is_preview(version):
+ return _preview_codenames_to_ints.get(version) or int(version)
+
+ # Not preview nor current.
+ #
+ # If the level is the codename of an API level that has been finalized, this
+ # function returns the API level number associated with that API level. If
+ # the input is *not* a finalized codename, the input is returned unmodified.
+ canonical_level = api_levels_released_versions.get(version)
+ if not canonical_level:
+ if not version.isdigit():
+ fail("version %s could not be parsed as integer and is not a recognized codename" % version)
+ return int(version)
+ return canonical_level
+
+# Starlark implementation of DefaultAppTargetSDK from build/soong/android/config.go
+# https://cs.android.com/android/platform/superproject/+/master:build/soong/android/config.go;l=875-889;drc=b0dc477ef740ec959548fe5517bd92ac4ea0325c
+# check what you want returned for codename == "" case before using
+def _default_app_target_sdk():
+ """default_app_target_sdk returns the API level that platform apps are targeting.
+ This converts a codename to the exact ApiLevel it represents.
+ """
+ if _PLATFORM_SDK_FINAL:
+ return _PLATFORM_SDK_VERSION
+
+ codename = _PLATFORM_SDK_CODENAME
+ if not codename:
+ # soong returns NoneApiLevel here value: "(no version)", number: -1, isPreview: true
+ #
+ # fail fast instead of returning an arbitrary value.
+ fail("Platform_sdk_codename must be set.")
+
+ if codename == "REL":
+ fail("Platform_sdk_codename should not be REL when Platform_sdk_final is false")
+
+ return _parse_api_level_from_version(codename)
+
+api = struct(
+ NONE_API_LEVEL = _NONE_API_LEVEL_INT,
+ FUTURE_API_LEVEL = _FUTURE_API_LEVEL_INT,
+ is_preview = _is_preview,
+ final_or_future = _final_or_future,
+ default_app_target_sdk = _default_app_target_sdk,
+ parse_api_level_from_version = _parse_api_level_from_version,
+ api_levels = _api_levels_with_previews,
+)
diff --git a/rules/common/api_test.bzl b/rules/common/api_test.bzl
new file mode 100644
index 00000000..0e601af6
--- /dev/null
+++ b/rules/common/api_test.bzl
@@ -0,0 +1,77 @@
+load("@bazel_skylib//lib:unittest.bzl", "asserts", "unittest")
+load("//build/bazel/rules/common:api.bzl", "api")
+
+def _api_levels_test_impl(ctx):
+ env = unittest.begin(ctx)
+
+ # schema: version string to parse: (expected api int, is preview api)
+ _LEVELS_UNDER_TEST = {
+ # numbers
+ "9": (9, False), # earliest released number
+ "21": (21, False),
+ "30": (30, False),
+ "33": (33, False),
+ # unchecked non final api level (not finalized, not preview, not current)
+ "1234": (1234, False),
+ "8999": (8999, False),
+ "9999": (9999, False),
+ "10001": (10001, False),
+ # letters
+ "G": (9, False), # earliest released letter
+ "J-MR1": (17, False),
+ "R": (30, False),
+ "S": (31, False),
+ "S-V2": (32, False),
+ # codenames
+ "Tiramisu": (33, False),
+ "UpsideDownCake": (9000, True), # preview
+ "current": (10000, True), # future (considered as preview)
+ # preview numbers
+ "9000": (9000, True), # preview
+ "10000": (10000, True), # future (considered as preview)
+ }
+
+ for level, expected in _LEVELS_UNDER_TEST.items():
+ asserts.equals(env, expected[0], api.parse_api_level_from_version(level), "unexpected api level parsed for %s" % level)
+ asserts.equals(env, expected[1], api.is_preview(level), "unexpected is_preview value for %s" % level)
+
+ return unittest.end(env)
+
+api_levels_test = unittest.make(_api_levels_test_impl)
+
+def _final_or_future_test_impl(ctx):
+ env = unittest.begin(ctx)
+
+ # schema: version string to parse: expected api int
+ _LEVELS_UNDER_TEST = {
+ # finalized
+ "30": 30,
+ "33": 33,
+ "S": 31,
+ "S-V2": 32,
+ "Tiramisu": 33,
+ # not finalized
+ "UpsideDownCake": 10000,
+ "current": 10000,
+ "9000": 10000,
+ "10000": 10000,
+ }
+
+ for level, expected in _LEVELS_UNDER_TEST.items():
+ asserts.equals(
+ env,
+ expected,
+ api.final_or_future(api.parse_api_level_from_version(level)),
+ "unexpected final or future api for %s" % level,
+ )
+
+ return unittest.end(env)
+
+final_or_future_test = unittest.make(_final_or_future_test_impl)
+
+def api_levels_test_suite(name):
+ unittest.suite(
+ name,
+ api_levels_test,
+ final_or_future_test,
+ )
diff --git a/rules/common/sdk_version.bzl b/rules/common/sdk_version.bzl
new file mode 100644
index 00000000..33f2f61c
--- /dev/null
+++ b/rules/common/sdk_version.bzl
@@ -0,0 +1,84 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/common:api.bzl", "api")
+
+# Only scopes that are available in prebuilts (and "none") are listed
+# here for now, but the list should eventually match Soong's SdkKind
+# enum.
+_KIND_PUBLIC = "public"
+_KIND_SYSTEM = "system"
+_KIND_TEST = "test"
+_KIND_SYSTEM_SERVER = "system_server"
+_KIND_MODULE = "module"
+_KIND_CORE = "core"
+_KIND_NONE = "none"
+_ALL_KINDS = [
+ _KIND_PUBLIC,
+ _KIND_SYSTEM,
+ _KIND_TEST,
+ _KIND_SYSTEM_SERVER,
+ _KIND_MODULE,
+ _KIND_CORE,
+ _KIND_NONE,
+]
+
+# Starlark implementation of SdkSpecFrom at https://cs.android.com/android/platform/build/soong/+/master:android/sdk_version.go;l=248-299;drc=69f4218c4feaeca953237cd9e76a9a8cc423d3e3.
+def _sdk_spec_from(sdk_version):
+ """Parses an sdk_version string into kind and api_level.
+
+ Args:
+ sdk_version: a string to specify which SDK version to depend on.
+ - The empty string maps to the full set of private APIs and is currently unsupported.
+ - "core_platform" maps to the module scope of the core system modules.
+ - "none" maps to no SDK (used for bootstrapping the core).
+ - Otherwise, the format is "{kind}_{api_level}", where kind must be one of the strings
+ in ALL_KINDS, and api_level is either an integer, and android codename, or "current".
+ The default kind is "public", and can be omitted by simply providing "{api_level}".
+
+ Returns:
+ A struct with a kind attribute set to one of the string in ALL_KINDS, and an api_level
+ attribute as returned by api.bzl's parse_api_level_from_version.
+ """
+ if not sdk_version:
+ fail("Only prebuilt SDK versions are available, sdk_version must be specified and non-empty.")
+ if sdk_version == "core_platform":
+ fail("Only prebuilt SDK versions are available, sdk_version core_platform is not yet supported.")
+ if sdk_version == "none":
+ return struct(kind = _KIND_NONE, api_level = api.NONE_API_LEVEL)
+ if type(sdk_version) != type(""):
+ fail("sdk_version must be a string")
+ sep_index = sdk_version.rfind("_")
+ api_level_string = sdk_version if sep_index < 0 else sdk_version[sep_index + 1:]
+ api_level = api.parse_api_level_from_version(api_level_string)
+ kind = _KIND_PUBLIC if sep_index == -1 else sdk_version[:sep_index]
+ if kind not in _ALL_KINDS:
+ fail("kind %s parsed from sdk_version %s must be one of %s" % (
+ kind,
+ sdk_version,
+ ",".join(_ALL_KINDS),
+ ))
+ return struct(kind = kind, api_level = api_level)
+
+sdk_version = struct(
+ KIND_PUBLIC = _KIND_PUBLIC,
+ KIND_SYSTEM = _KIND_SYSTEM,
+ KIND_TEST = _KIND_TEST,
+ KIND_SYSTEM_SERVER = _KIND_SYSTEM_SERVER,
+ KIND_MODULE = _KIND_MODULE,
+ KIND_CORE = _KIND_CORE,
+ KIND_NONE = _KIND_NONE,
+ ALL_KINDS = _ALL_KINDS,
+ sdk_spec_from = _sdk_spec_from,
+)
diff --git a/rules/common/sdk_version_test.bzl b/rules/common/sdk_version_test.bzl
new file mode 100644
index 00000000..b22c63d4
--- /dev/null
+++ b/rules/common/sdk_version_test.bzl
@@ -0,0 +1,150 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/common:sdk_version.bzl", "sdk_version")
+load("//build/bazel/rules/common:api.bzl", "api")
+
+# Warning: this is a *lot* of boilerplate to test just one function.
+# Scroll down to sdk_version_test_suite for the actual test cases.
+
+SdkSpec = provider()
+
+def _sdk_spec_from_tester_impl(ctx):
+ sdk_spec = sdk_version.sdk_spec_from(ctx.attr.sdk_version)
+ return [SdkSpec(kind = sdk_spec.kind, api_level = sdk_spec.api_level)]
+
+sdk_spec_from_tester = rule(
+ implementation = _sdk_spec_from_tester_impl,
+ attrs = {
+ "sdk_version": attr.string(),
+ },
+)
+
+def _sdk_spec_from_failure_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ asserts.expect_failure(env, ctx.attr.expected_failure_message)
+ return analysistest.end(env)
+
+sdk_spec_from_failure_test = analysistest.make(
+ impl = _sdk_spec_from_failure_test_impl,
+ expect_failure = True,
+ attrs = {"expected_failure_message": attr.string()},
+)
+
+def test_sdk_spec_from_failure(name, sdk_version, expected_failure_message = ""):
+ sdk_spec_from_tester(
+ name = name + "_target",
+ sdk_version = sdk_version,
+ tags = ["manual"],
+ )
+ sdk_spec_from_failure_test(
+ name = name,
+ target_under_test = name + "_target",
+ expected_failure_message = expected_failure_message,
+ )
+ return name
+
+def _sdk_spec_from_output_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actual_sdk_spec = analysistest.target_under_test(env)[SdkSpec]
+ actual_kind = actual_sdk_spec.kind
+ asserts.equals(
+ env,
+ ctx.attr.expected_kind,
+ actual_kind,
+ "Expected kind %s, but got %s for sdk version %s" % (
+ ctx.attr.expected_kind,
+ actual_kind,
+ ctx.attr.actual_sdk_version,
+ ),
+ )
+
+ actual_api_level = actual_sdk_spec.api_level
+ asserts.equals(
+ env,
+ ctx.attr.expected_api_level,
+ actual_api_level,
+ "Expected api_level %s, but got %s for sdk version %s" % (
+ ctx.attr.expected_api_level,
+ actual_api_level,
+ ctx.attr.actual_sdk_version,
+ ),
+ )
+ return analysistest.end(env)
+
+sdk_spec_from_output_test = analysistest.make(
+ impl = _sdk_spec_from_output_test_impl,
+ attrs = {
+ "actual_sdk_version": attr.string(),
+ "expected_kind": attr.string(),
+ "expected_api_level": attr.int(),
+ },
+)
+
+def test_sdk_spec_from_success(name, sdk_version, expected_kind, expected_api_level):
+ sdk_spec_from_tester(
+ name = name + "_target",
+ sdk_version = sdk_version,
+ tags = ["manual"],
+ )
+ sdk_spec_from_output_test(
+ name = name,
+ target_under_test = name + "_target",
+ actual_sdk_version = sdk_version,
+ expected_kind = expected_kind,
+ expected_api_level = expected_api_level,
+ )
+ return name
+
+def sdk_version_test_suite(name):
+ # sdk version expected to fail to parse.
+ failing_sdk_versions = [
+ "malformed_malformed",
+ "malformed",
+ "",
+ "core_platform",
+ ]
+ failure_tests = [
+ test_sdk_spec_from_failure(
+ name = sdk_version + "_failure_test",
+ sdk_version = sdk_version,
+ )
+ for sdk_version in failing_sdk_versions
+ ]
+
+ # Map of sdk_version to expected kind and api_level
+ sdk_version_to_kind_and_api_level = {
+ "current": ("public", api.FUTURE_API_LEVEL),
+ "core_current": ("core", api.FUTURE_API_LEVEL),
+ "Tiramisu": ("public", 33),
+ "33": ("public", 33),
+ "public_33": ("public", 33),
+ "none": ("none", api.NONE_API_LEVEL),
+ "system_Tiramisu": ("system", 33),
+ "system_32": ("system", 32),
+ }
+ success_tests = [
+ test_sdk_spec_from_success(
+ name = sdk_version + "_success_test",
+ sdk_version = sdk_version,
+ expected_kind = sdk_version_to_kind_and_api_level[sdk_version][0],
+ expected_api_level = sdk_version_to_kind_and_api_level[sdk_version][1],
+ )
+ for sdk_version in sdk_version_to_kind_and_api_level.keys()
+ ]
+ native.test_suite(
+ name = name,
+ tests = failure_tests + success_tests,
+ )
diff --git a/rules/coverage/remote_coverage_tools/BUILD b/rules/coverage/remote_coverage_tools/BUILD
deleted file mode 100644
index 4f3d211a..00000000
--- a/rules/coverage/remote_coverage_tools/BUILD
+++ /dev/null
@@ -1,9 +0,0 @@
-# This is a stub BUILD to override remote_coverage_tools.
-# See b/201242197 for more information.
-
-package(default_visibility = ["//visibility:public"])
-
-filegroup(
- name = "coverage_report_generator",
- srcs = ["coverage_report_generator.sh"],
-)
diff --git a/rules/coverage/remote_coverage_tools/WORKSPACE b/rules/coverage/remote_coverage_tools/WORKSPACE
deleted file mode 100644
index bd9e9137..00000000
--- a/rules/coverage/remote_coverage_tools/WORKSPACE
+++ /dev/null
@@ -1,2 +0,0 @@
-# This is a stub WORKSPACE to override remote_coverage_tools.
-# See b/201242197 for more information.
diff --git a/rules/env.bzl b/rules/env.bzl
new file mode 100644
index 00000000..08255f35
--- /dev/null
+++ b/rules/env.bzl
@@ -0,0 +1,70 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@soong_injection//allowlists:env.bzl", _CAPTURED_ENV_VARS = "env")
+
+_ALLOWED_SPECIAL_CHARACTERS = [
+ "/",
+ "_",
+ "-",
+ "'",
+ ".",
+ " ",
+]
+
+# Since we write the env var value literally into a .bzl file, ensure that the string
+# does not contain special characters like '"', '\n' and '\'. Use an allowlist approach
+# and check that the remaining string is alphanumeric.
+def _validate_env_value(env_var, env_value):
+ if env_value == "":
+ return
+ sanitized_env_value = env_value
+ for allowed_char in _ALLOWED_SPECIAL_CHARACTERS:
+ sanitized_env_value = sanitized_env_value.replace(allowed_char, "")
+ if not sanitized_env_value.isalnum():
+ fail("The value of " +
+ env_var +
+ " can only consist of alphanumeric and " +
+ str(_ALLOWED_SPECIAL_CHARACTERS) +
+ " characters: " +
+ str(env_value))
+
+def _env_impl(rctx):
+ captured_env = {}
+ for var in _CAPTURED_ENV_VARS:
+ value = rctx.os.environ.get(var)
+ if value != None:
+ _validate_env_value(var, value)
+ captured_env[var] = value
+
+ rctx.file("BUILD.bazel", """
+exports_files(["env.bzl"])
+""")
+
+ rctx.file("env.bzl", """
+env = {
+ %s
+}
+""" % "\n ".join([
+ '"%s": "%s",' % (var, value)
+ for var, value in captured_env.items()
+ ]))
+
+env_repository = repository_rule(
+ implementation = _env_impl,
+ configure = True,
+ local = True,
+ environ = _CAPTURED_ENV_VARS,
+ doc = "A repository rule to capture environment variables.",
+)
diff --git a/rules/filegroup.bzl b/rules/filegroup.bzl
index 63ecd232..f1ab826e 100644
--- a/rules/filegroup.bzl
+++ b/rules/filegroup.bzl
@@ -1,18 +1,16 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
load("//build/bazel/rules/cc:cc_constants.bzl", "constants")
@@ -28,11 +26,11 @@ def group_files_by_ext(files):
# configurable selects.
for f in files:
if extension(f) in constants.c_src_exts:
- c += [f]
+ c.append(f)
elif extension(f) in constants.cpp_src_exts:
- cpp += [f]
+ cpp.append(f)
elif extension(f) in constants.as_src_exts:
- asm += [f]
+ asm.append(f)
else:
# not C based
continue
@@ -55,16 +53,19 @@ def filegroup(name, srcs = [], **kwargs):
native.genrule(
name = name + "_null_cc",
outs = [name + "_null.cc"],
+ tags = ["manual"],
cmd = "touch $@",
)
native.genrule(
name = name + "_null_c",
outs = [name + "_null.c"],
+ tags = ["manual"],
cmd = "touch $@",
)
native.genrule(
name = name + "_null_s",
outs = [name + "_null.S"],
+ tags = ["manual"],
cmd = "touch $@",
)
@@ -72,12 +73,15 @@ def filegroup(name, srcs = [], **kwargs):
native.filegroup(
name = name + "_cpp_srcs",
srcs = [name + "_null.cc"] + cpp_srcs,
+ tags = ["manual"],
)
native.filegroup(
name = name + "_c_srcs",
srcs = [name + "_null.c"] + c_srcs,
+ tags = ["manual"],
)
native.filegroup(
name = name + "_as_srcs",
srcs = [name + "_null.S"] + as_srcs,
+ tags = ["manual"],
)
diff --git a/rules/gensrcs.bzl b/rules/gensrcs.bzl
new file mode 100644
index 00000000..20017c8b
--- /dev/null
+++ b/rules/gensrcs.bzl
@@ -0,0 +1,100 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+# A rule to generate files based on provided srcs and tools
+def _gensrcs_impl(ctx):
+ # The next two assignments can be created by using ctx.resolve_command
+ # TODO: Switch to using ctx.resolve_command when it is out of experimental
+ command = ctx.expand_location(ctx.attr.cmd)
+ tools = [
+ tool[DefaultInfo].files_to_run
+ for tool in ctx.attr.tools
+ ]
+
+ command = command.replace(
+ "$(RULEDIR)",
+ paths.join(
+ ctx.var["GENDIR"],
+ ctx.label.package,
+ ),
+ )
+
+ in_files = ctx.files.srcs
+ out_files = []
+ for in_file in in_files:
+ # <path-to-in_file>/out_file
+ # where path-to-in_file is relative to the workspace
+ out_file_path = paths.join(
+ paths.dirname(in_file.short_path),
+ paths.replace_extension(
+ in_file.basename,
+ "." + ctx.attr.output_extension,
+ ),
+ )
+
+ # out_file is at output_file_path that is relative to <GENDIR>/<package-dir>
+ # Hence, the fullpath to out_file is
+ # <GENDIR>/<package-dir>/<path-to-in_file>/out_file
+ out_file = ctx.actions.declare_file(out_file_path)
+ shell_command = command \
+ .replace("$(SRC)", in_file.path) \
+ .replace("$(OUT)", out_file.path)
+ ctx.actions.run_shell(
+ tools = tools,
+ outputs = [out_file],
+ inputs = in_files,
+ command = shell_command,
+ progress_message = "Generating %s from %s" % (
+ out_file.path,
+ in_file.path,
+ ),
+ )
+ out_files.append(out_file)
+
+ return [DefaultInfo(
+ files = depset(out_files),
+ )]
+
+gensrcs = rule(
+ implementation = _gensrcs_impl,
+ doc = "This rule generates files, where each of the `srcs` files is " +
+ "passed into the custom shell command`",
+ attrs = {
+ "srcs": attr.label_list(
+ allow_files = True,
+ mandatory = True,
+ doc = "A list of inputs such as source files to process",
+ ),
+ "output_extension": attr.string(
+ doc = "The extension that will be substituted for output files",
+ ),
+ "cmd": attr.string(
+ mandatory = True,
+ doc = "The command to run. Subject to $(location) expansion. " +
+ "$(IN) represents each input file provided in `srcs` " +
+ "while $(OUT) reprensents corresponding output file " +
+ "generated by the rule. $(RULEDIR) is intepreted the same " +
+ "as it is in genrule.",
+ ),
+ "tools": attr.label_list(
+ allow_files = True,
+ doc = "A list of tool dependencies for this rule. " +
+ "The path of an individual `tools` target //x:y can be " +
+ "obtained using `$(location //x:y)`",
+ cfg = "exec",
+ ),
+ },
+)
diff --git a/rules/gensrcs_test.bzl b/rules/gensrcs_test.bzl
new file mode 100644
index 00000000..bdce0a6b
--- /dev/null
+++ b/rules/gensrcs_test.bzl
@@ -0,0 +1,209 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:new_sets.bzl", "sets")
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules:gensrcs.bzl", "gensrcs")
+
+SRCS = [
+ "texts/src1.txt",
+ "texts/src2.txt",
+ "src3.txt",
+]
+
+OUTPUT_EXTENSION = "out"
+
+EXPECTED_OUTS = [
+ "texts/src1.out",
+ "texts/src2.out",
+ "src3.out",
+]
+
+# ==== Check the actions created by gensrcs ====
+
+def _test_actions_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+ actions = analysistest.target_actions(env)
+
+ # Expect an action for each pair of input/output file
+ asserts.equals(env, expected = len(SRCS), actual = len(actions))
+
+ asserts.set_equals(
+ env,
+ sets.make([
+ # given an input file build/bazel/rules/texts/src1.txt
+ # the corresponding output file is
+ # <GENDIR>/build/bazel/rules/build/bazel/rules/texts/src1.out
+ # the second "build/bazel/rules" is to accomodate the srcs from
+ # external package
+ paths.join(
+ ctx.genfiles_dir.path,
+ "build/bazel/rules",
+ "build/bazel/rules",
+ out,
+ )
+ for out in EXPECTED_OUTS
+ ]),
+ sets.make([file.path for file in target.files.to_list()]),
+ )
+
+ return analysistest.end(env)
+
+actions_test = analysistest.make(_test_actions_impl)
+
+def _test_actions():
+ name = "gensrcs_output_paths"
+ test_name = name + "_test"
+
+ # Rule under test
+ gensrcs(
+ name = name,
+ cmd = "cat $(SRC) > $(OUT)",
+ srcs = SRCS,
+ output_extension = OUTPUT_EXTENSION,
+ tags = ["manual"], # make sure it's not built using `:all`
+ )
+
+ actions_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+# ==== Check the output file when out_extension is unset ====
+
+def _test_unset_output_extension_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ asserts.equals(env, expected = 1, actual = len(actions))
+ action = actions[0]
+ asserts.equals(
+ env,
+ expected = "input.",
+ actual = action.outputs.to_list()[0].basename,
+ )
+
+ return analysistest.end(env)
+
+unset_output_extension_test = analysistest.make(_test_unset_output_extension_impl)
+
+def _test_unset_output_extension():
+ name = "unset_output_extension"
+ test_name = name + "_test"
+
+ # Rule under test
+ gensrcs(
+ name = "TSTSS",
+ cmd = "cat $(SRC) > $(OUT)",
+ srcs = ["input.txt"],
+ tags = ["manual"], # make sure it's not built using `:all`
+ )
+
+ unset_output_extension_test(
+ name = test_name,
+ target_under_test = "TSTSS",
+ )
+ return test_name
+
+TOOL_FILE_NAME = "out.sh"
+
+def _test_gensrcs_tool_builds_for_host_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ asserts.equals(env, expected = 1, actual = len(actions), msg = "expected actions")
+
+ action = actions[0]
+ inputs = action.inputs.to_list()
+ asserts.equals(env, expected = 2, actual = len(inputs), msg = "expected inputs")
+
+ input_map = {}
+ for i in inputs:
+ input_map[i.basename] = i
+ tool = input_map[TOOL_FILE_NAME]
+ asserts.true(
+ env,
+ # because we set --experimental_platform_in_output_dir, we expect the
+ # platform to be in the output path of a generated file
+ "linux" in tool.path, # host platform
+ "expected 'linux' in tool path, got '%s'" % tool.path,
+ )
+
+ outputs = action.outputs.to_list()
+ asserts.equals(env, expected = 1, actual = len(outputs), msg = "expected outputs %s" % outputs)
+ output = outputs[0]
+ asserts.true(
+ env,
+ # because we set --experimental_platform_in_output_dir, we expect the
+ # platform to be in the output path of a generated file. However, the platform
+ # will be the android product name, like aosp_arm, so we can't check if anything
+ # in particular is in the path. Check that linux is not in the path instead.
+ "linux" not in output.path, # target platform
+ "expected 'linux' to not be in output path, got '%s'" % output.path,
+ )
+
+ return analysistest.end(env)
+
+__gensrcs_tool_builds_for_host_test = analysistest.make(
+ _test_gensrcs_tool_builds_for_host_impl,
+)
+
+def _gensrcs_tool_builds_for_host_test(**kwargs):
+ __gensrcs_tool_builds_for_host_test(
+ target_compatible_with = ["//build/bazel/platforms/os:android"], # ensure target != host so there is a transition
+ **kwargs
+ )
+
+def _test_gensrcs_tool_builds_for_host():
+ native.genrule(
+ name = "gensrcs_test_bin",
+ outs = [TOOL_FILE_NAME],
+ executable = True,
+ cmd = "touch $@",
+ target_compatible_with = select({
+ # only supported OS is that specified as host_platform
+ "//build/bazel/platforms/os:linux": [],
+ "//conditions:default": ["@platforms//:incompatible"],
+ }),
+ tags = ["manual"],
+ )
+
+ gensrcs(
+ name = "gensrcs_test_tool_builds_for_host",
+ tools = [":gensrcs_test_bin"],
+ srcs = ["input.txt"],
+ output_extension = OUTPUT_EXTENSION,
+ cmd = "",
+ tags = ["manual"],
+ )
+
+ test_name = "gensrcs_tools_build_for_host_test"
+ _gensrcs_tool_builds_for_host_test(
+ name = test_name,
+ target_under_test = ":gensrcs_test_tool_builds_for_host",
+ )
+ return test_name
+
+def gensrcs_tests_suite(name):
+ """Creates test targets for gensrcs.bzl"""
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_actions(),
+ _test_unset_output_extension(),
+ _test_gensrcs_tool_builds_for_host(),
+ ],
+ )
diff --git a/rules/hidl/BUILD b/rules/hidl/BUILD
new file mode 100644
index 00000000..81213bcc
--- /dev/null
+++ b/rules/hidl/BUILD
@@ -0,0 +1,18 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(":hidl_library_test.bzl", "hidl_library_test_suite")
+
+hidl_library_test_suite(name = "hidl_library_tests")
diff --git a/rules/hidl/hidl_interface.bzl b/rules/hidl/hidl_interface.bzl
new file mode 100644
index 00000000..acc28fd2
--- /dev/null
+++ b/rules/hidl/hidl_interface.bzl
@@ -0,0 +1,48 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/cc:cc_hidl_library.bzl", "cc_hidl_library")
+load("//build/bazel/rules/hidl:hidl_library.bzl", "hidl_library")
+
+INTERFACE_SUFFIX = "_interface"
+
+def hidl_interface(
+ name,
+ srcs = [],
+ deps = [],
+ root = "",
+ root_interface_file = "",
+ min_sdk_version = "",
+ tags = []):
+ "Bazel macro to correspond with the hidl_interface Soong module."
+
+ interface_name = name + INTERFACE_SUFFIX
+ interface_deps = [dep + INTERFACE_SUFFIX for dep in deps]
+
+ hidl_library(
+ name = interface_name,
+ srcs = srcs,
+ deps = interface_deps,
+ fq_name = name,
+ root = root,
+ root_interface_file = root_interface_file,
+ )
+
+ cc_hidl_library(
+ name = name,
+ interface = interface_name,
+ dynamic_deps = deps,
+ min_sdk_version = min_sdk_version,
+ tags = tags,
+ )
diff --git a/rules/hidl/hidl_library.bzl b/rules/hidl/hidl_library.bzl
new file mode 100644
index 00000000..18d802b8
--- /dev/null
+++ b/rules/hidl/hidl_library.bzl
@@ -0,0 +1,77 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+HidlInfo = provider(fields = [
+ "srcs",
+ "transitive_srcs",
+ "transitive_roots",
+ "transitive_root_interface_files",
+ "fq_name",
+ "root",
+ "root_interface_file",
+])
+
+def _hidl_library_rule_impl(ctx):
+ transitive_srcs = []
+ transitive_root_interface_files = []
+ transitive_roots = []
+
+ for dep in ctx.attr.deps:
+ transitive_srcs.append(dep[HidlInfo].transitive_srcs)
+ transitive_root_interface_files.append(dep[HidlInfo].transitive_root_interface_files)
+ transitive_roots.append(dep[HidlInfo].transitive_roots)
+
+ root_interface_path = ctx.file.root_interface_file.path
+ return [
+ DefaultInfo(files = depset(ctx.files.srcs)),
+ HidlInfo(
+ srcs = depset(ctx.files.srcs),
+ transitive_srcs = depset(
+ direct = ctx.files.srcs,
+ transitive = transitive_srcs,
+ ),
+ # These transitive roots will be used as -r arguments later when calling
+ # hidl-gen, for example, -r android.hardware:hardware/interfaces
+ transitive_roots = depset(
+ direct = [ctx.attr.root + ":" + paths.dirname(root_interface_path)],
+ transitive = transitive_roots,
+ ),
+ transitive_root_interface_files = depset(
+ direct = [ctx.file.root_interface_file],
+ transitive = transitive_root_interface_files,
+ ),
+ fq_name = ctx.attr.fq_name,
+ root = ctx.attr.root,
+ root_interface_file = ctx.attr.root_interface_file,
+ ),
+ ]
+
+hidl_library = rule(
+ implementation = _hidl_library_rule_impl,
+ attrs = {
+ "srcs": attr.label_list(
+ allow_files = [".hal"],
+ ),
+ "deps": attr.label_list(
+ providers = [HidlInfo],
+ doc = "hidl_interface targets that this one depends on",
+ ),
+ "fq_name": attr.string(),
+ "root": attr.string(),
+ "root_interface_file": attr.label(allow_single_file = ["current.txt"]),
+ },
+ provides = [HidlInfo],
+)
diff --git a/rules/hidl/hidl_library_test.bzl b/rules/hidl/hidl_library_test.bzl
new file mode 100644
index 00000000..5d2db53d
--- /dev/null
+++ b/rules/hidl/hidl_library_test.bzl
@@ -0,0 +1,163 @@
+"""Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":hidl_library.bzl", "HidlInfo", "hidl_library")
+
+SRC_NAME = "src.hal"
+DEP1_NAME = "dep1.hal"
+DEP2_NAME = "dep2.hal"
+DEP3_NAME = "dep3.hal"
+ROOT = "android.hardware"
+ROOT_INTERFACE_FILE_LABEL = "//hardware/interfaces:current.txt"
+ROOT_INTERFACE_FILE = "hardware/interfaces/current.txt"
+ROOT_ARGUMENT = "android.hardware:hardware/interfaces"
+ROOT1 = "android.system"
+ROOT1_INTERFACE_FILE_LABEL = "//system/hardware/interfaces:current.txt"
+ROOT1_INTERFACE_FILE = "system/hardware/interfaces/current.txt"
+ROOT1_ARGUMENT = "android.system:system/hardware/interfaces"
+ROOT2 = "android.hidl"
+ROOT2_INTERFACE_FILE_LABEL = "//system/libhidl/transport:current.txt"
+ROOT2_INTERFACE_FILE = "system/libhidl/transport/current.txt"
+ROOT2_ARGUMENT = "android.hidl:system/libhidl/transport"
+
+def _hidl_info_simple_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ package_root = paths.dirname(ctx.build_file_path)
+
+ asserts.equals(
+ env,
+ expected = [
+ paths.join(package_root, "src.hal"),
+ ],
+ actual = [
+ file.short_path
+ for file in target_under_test[HidlInfo].srcs.to_list()
+ ],
+ )
+
+ asserts.equals(
+ env,
+ expected = sorted([
+ paths.join(package_root, DEP1_NAME),
+ paths.join(package_root, DEP3_NAME),
+ paths.join(package_root, DEP2_NAME),
+ paths.join(package_root, SRC_NAME),
+ ]),
+ actual = sorted([
+ file.short_path
+ for file in target_under_test[HidlInfo].transitive_srcs.to_list()
+ ]),
+ )
+
+ asserts.equals(
+ env,
+ expected = [
+ ROOT,
+ Label(ROOT_INTERFACE_FILE_LABEL),
+ ],
+ actual = [
+ target_under_test[HidlInfo].root,
+ target_under_test[HidlInfo].root_interface_file.label,
+ ],
+ )
+
+ asserts.equals(
+ env,
+ expected = sorted([
+ ROOT1_ARGUMENT,
+ ROOT2_ARGUMENT,
+ ROOT_ARGUMENT,
+ ]),
+ actual = sorted(target_under_test[HidlInfo].transitive_roots.to_list()),
+ )
+
+ asserts.equals(
+ env,
+ expected = sorted([
+ ROOT1_INTERFACE_FILE,
+ ROOT2_INTERFACE_FILE,
+ ROOT_INTERFACE_FILE,
+ ]),
+ actual = sorted([
+ file.short_path
+ for file in target_under_test[HidlInfo].transitive_root_interface_files.to_list()
+ ]),
+ )
+
+ return analysistest.end(env)
+
+hidl_info_simple_test = analysistest.make(
+ _hidl_info_simple_test_impl,
+)
+
+def _test_hidl_info_simple():
+ test_base_name = "hidl_info_simple"
+ test_name = test_base_name + "_test"
+ dep1 = test_base_name + "_dep1"
+ dep2 = test_base_name + "_dep2"
+ dep3 = test_base_name + "_dep3"
+
+ hidl_library(
+ name = test_base_name,
+ srcs = [SRC_NAME],
+ deps = [
+ ":" + dep1,
+ ":" + dep2,
+ ],
+ root = ROOT,
+ root_interface_file = ROOT_INTERFACE_FILE_LABEL,
+ tags = ["manual"],
+ )
+ hidl_library(
+ name = dep1,
+ srcs = [DEP1_NAME],
+ root = ROOT1,
+ root_interface_file = ROOT1_INTERFACE_FILE_LABEL,
+ tags = ["manual"],
+ )
+ hidl_library(
+ name = dep2,
+ srcs = [DEP2_NAME],
+ deps = [
+ ":" + dep3,
+ ],
+ root = ROOT2,
+ root_interface_file = ROOT2_INTERFACE_FILE_LABEL,
+ tags = ["manual"],
+ )
+ hidl_library(
+ name = dep3,
+ srcs = [DEP3_NAME],
+ root = ROOT2,
+ root_interface_file = ROOT2_INTERFACE_FILE_LABEL,
+ tags = ["manual"],
+ )
+ hidl_info_simple_test(
+ name = test_name,
+ target_under_test = test_base_name,
+ )
+
+ return test_name
+
+def hidl_library_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_hidl_info_simple(),
+ ],
+ )
diff --git a/rules/hidl_file_utils.bzl b/rules/hidl_file_utils.bzl
new file mode 100644
index 00000000..1f042e47
--- /dev/null
+++ b/rules/hidl_file_utils.bzl
@@ -0,0 +1,119 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+LANGUAGE_CC_HEADERS = "c++-headers"
+LANGUAGE_CC_SOURCES = "c++-sources"
+INTERFACE_HEADER_PREFIXES = ["I", "Bs", "BnHw", "BpHw", "IHw"]
+TYPE_HEADER_PREFIXES = ["", "hw"]
+
+def _generate_hidl_action(
+ hidl_info,
+ language,
+ ctx):
+ """ Utility function for generating code for the given language from HIDL interface."""
+
+ output_dir = paths.join(ctx.bin_dir.path, ctx.label.package)
+
+ args = ctx.actions.args()
+
+ args.add("-R")
+ args.add_all(["-p", "."])
+ args.add_all(["-o", output_dir])
+ args.add_all(["-L", language])
+ for root in hidl_info.transitive_roots.to_list():
+ args.add_all(["-r", root])
+
+ args.add(hidl_info.fq_name)
+
+ hidl_srcs = hidl_info.srcs.to_list()
+ inputs = depset(
+ direct = hidl_srcs,
+ # These are needed for hidl-gen to correctly generate the code.
+ transitive = [hidl_info.transitive_srcs, hidl_info.transitive_root_interface_files],
+ )
+
+ outputs = _generate_and_declare_output_files(
+ ctx,
+ hidl_info.fq_name,
+ language,
+ hidl_srcs,
+ )
+
+ ctx.actions.run(
+ inputs = inputs,
+ executable = ctx.executable._hidl_gen,
+ outputs = outputs,
+ arguments = [args],
+ mnemonic = "HidlGen" + _get_language_string(language),
+ )
+
+ return outputs
+
+def _get_language_string(language):
+ if language == LANGUAGE_CC_HEADERS:
+ return "CcHeader"
+ elif language == LANGUAGE_CC_SOURCES:
+ return "Cc"
+
+def _generate_and_declare_output_files(
+ ctx,
+ fq_name,
+ language,
+ hidl_srcs):
+ files = []
+
+ # Break FQ name such as android.hardware.neuralnetworks@1.3 into
+ # android/hardware/neuralnetworks/1.3 which is the directory structure
+ # that hidl-gen uses to generate files.
+ parts = fq_name.split("@")
+ dirname = paths.join(parts[0].replace(".", "/"), parts[1])
+
+ for src in hidl_srcs:
+ filename = src.basename
+
+ # "I" prefix indicates that this file is a interface file, the rest are
+ # files that define types. Interface files and type files are treated
+ # differently when generating code using hidl-gen.
+ basename = filename.removeprefix("I").removesuffix(".hal")
+ interface = _is_interface(filename)
+ if language == LANGUAGE_CC_HEADERS:
+ if interface:
+ prefixes = INTERFACE_HEADER_PREFIXES
+ else:
+ prefixes = TYPE_HEADER_PREFIXES
+ for prefix in prefixes:
+ out_name = paths.join(dirname, prefix + basename + ".h")
+ declared = ctx.actions.declare_file(out_name)
+ files.append(declared)
+ elif language == LANGUAGE_CC_SOURCES:
+ if interface:
+ out_name = paths.join(dirname, basename + "All.cpp")
+ else:
+ out_name = paths.join(dirname, basename + ".cpp")
+ declared = ctx.actions.declare_file(out_name)
+ files.append(declared)
+
+ return files
+
+def _is_interface(filename):
+ if not filename.endswith(".hal"):
+ fail("HIDL source file must be a .hal file: %s" % filename)
+
+ return filename.startswith("I")
+
+hidl_file_utils = struct(
+ generate_hidl_action = _generate_hidl_action,
+)
diff --git a/rules/java/BUILD b/rules/java/BUILD
index e69de29b..763713c9 100644
--- a/rules/java/BUILD
+++ b/rules/java/BUILD
@@ -0,0 +1,112 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(":merged_txts_test.bzl", "merged_txts_test_suite")
+load(":sdk_library_test.bzl", "java_sdk_library_test_suite")
+load(":java_system_modules_test.bzl", "java_system_modules_test_suite")
+load(":bootclasspath_test.bzl", "bootclasspath_test_suite")
+load(":versions_test.bzl", "versions_test_suite")
+load(":versions.bzl", "java_versions")
+load(":sdk_transition_test.bzl", "sdk_transition_test_suite")
+load(":host_for_device_test.bzl", "host_for_device_test_suite")
+load("@bazel_skylib//rules:common_settings.bzl", "string_setting")
+load("@bazel_tools//tools/jdk:default_java_toolchain.bzl", "DEFAULT_JAVACOPTS", "default_java_toolchain")
+load("@soong_injection//java_toolchain:constants.bzl", "constants")
+
+package(
+ default_visibility = ["//visibility:public"],
+)
+
+java_sdk_library_test_suite(name = "java_sdk_library_tests")
+
+merged_txts_test_suite(name = "merged_txts_tests")
+
+java_system_modules_test_suite(name = "java_system_modules_tests")
+
+bootclasspath_test_suite(name = "bootclasspath_tests")
+
+versions_test_suite(name = "versions_tests")
+
+sdk_transition_test_suite(name = "sdk_transition_tests")
+
+host_for_device_test_suite(name = "host_for_device_test_suite")
+
+string_setting(
+ name = "version",
+ build_setting_default = str(java_versions.get_version()),
+ values = [str(v) for v in java_versions.ALL_VERSIONS],
+)
+
+[
+ config_setting(
+ name = setting,
+ flag_values = {
+ "//build/bazel/rules/java:version": str(java_version),
+ },
+ )
+ for java_version, setting in java_versions.VERSION_TO_CONFIG_SETTING.items()
+]
+
+# There is no need for both host and device java version build settings in a
+# world where every java_*/android_*/kt_* target uses the AOSP-specific
+# wrappers. However, there are targets defined by BUILD.tools files within the
+# Bazel binary that do not use the wrapper. These would inherit their java
+# version from their reverse dependency, which can cause build failures (e.g. an
+# android_library_import with java_version=7 has a tools dependency on a
+# non-wrapped Bazel java_library that uses lambdas). By using a separate host
+# version, we can reset it to its default when in the device configuration, so
+# that a subsequent exec transition will use the default java version.
+string_setting(
+ name = "host_version",
+ build_setting_default = str(java_versions.get_version()),
+ values = [str(v) for v in java_versions.ALL_VERSIONS],
+)
+
+[
+ config_setting(
+ name = "host_" + setting,
+ flag_values = {
+ "//build/bazel/rules/java:host_version": str(java_version),
+ },
+ )
+ for java_version, setting in java_versions.VERSION_TO_CONFIG_SETTING.items()
+]
+
+java_version_select_dict = {
+ "host_" + setting: str(version)
+ for version, setting in java_versions.VERSION_TO_CONFIG_SETTING.items()
+} | {
+ "//conditions:default": str(java_versions.get_version()),
+}
+
+default_java_toolchain(
+ name = "jdk17_host_toolchain_java",
+ # TODO(b/218720643): Support switching between multiple JDKs.
+ java_runtime = "//prebuilts/jdk/jdk17:jdk17_runtime",
+ misc = DEFAULT_JAVACOPTS + constants.CommonJdkFlags,
+ source_version = select(java_version_select_dict),
+ target_version = select(java_version_select_dict),
+ toolchain_definition = False,
+)
+
+toolchain(
+ name = "jdk17_host_toolchain_java_definition",
+ exec_compatible_with = ["//build/bazel/platforms/os:linux"],
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ target_settings = [],
+ toolchain = ":jdk17_host_toolchain_java",
+ toolchain_type = "@bazel_tools//tools/jdk:toolchain_type",
+)
diff --git a/rules/java/bootclasspath.bzl b/rules/java/bootclasspath.bzl
new file mode 100644
index 00000000..78fddcfb
--- /dev/null
+++ b/rules/java/bootclasspath.bzl
@@ -0,0 +1,49 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+load(":java_system_modules.bzl", "SystemInfo")
+
+def _bootclasspath_impl(ctx):
+ compile_jars = lambda b: b[JavaInfo].compile_jars.to_list()
+ return java_common.BootClassPathInfo(
+ bootclasspath = [jar for b in ctx.attr.bootclasspath for jar in compile_jars(b)],
+ system = ctx.attr.system[SystemInfo].system if ctx.attr.system else None,
+ auxiliary = [jar for b in ctx.attr.auxiliary for jar in compile_jars(b)],
+ )
+
+bootclasspath = rule(
+ implementation = _bootclasspath_impl,
+ attrs = {
+ "bootclasspath": attr.label_list(
+ providers = [JavaInfo],
+ doc = "The list of libraries to use as javac's --bootclasspath argument.",
+ ),
+ "system": attr.label(
+ providers = [SystemInfo],
+ doc = "The java_system_modules target to use as javac's --system argument.",
+ ),
+ "auxiliary": attr.label_list(
+ providers = [JavaInfo],
+ doc = "The list of libraries to include first in javac's --classpath.",
+ ),
+ },
+ provides = [java_common.BootClassPathInfo],
+ doc = """Provides BootClassPathInfo to a Java toolchain.
+
+the java_common.BootClassPathInfo provider is used by a Java toolchain to
+set javac's --bootclasspath and --system arguments. It can also optionally add
+to the classpath before anything else gets added to it. This rule generates this
+provider from a list of JavaInfo-providing targets for --bootclasspath and
+--classpath, and from a single SystemInfo-providing target for --system.
+""",
+)
diff --git a/rules/java/bootclasspath_test.bzl b/rules/java/bootclasspath_test.bzl
new file mode 100644
index 00000000..652acf2a
--- /dev/null
+++ b/rules/java/bootclasspath_test.bzl
@@ -0,0 +1,67 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":bootclasspath.bzl", "bootclasspath")
+load(":rules.bzl", "java_import")
+load(":java_system_modules.bzl", "java_system_modules")
+
+def _bootclasspath_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ bootclasspath_target = analysistest.target_under_test(env)
+
+ asserts.true(
+ env,
+ java_common.BootClassPathInfo in bootclasspath_target,
+ "Expected BootClassPathInfo in bootclasspath providers.",
+ )
+ return analysistest.end(env)
+
+bootclasspath_test = analysistest.make(
+ _bootclasspath_test_impl,
+)
+
+def test_bootclasspath_provider():
+ name = "test_bootclasspath_provider"
+ import_target = ":" + name + "_import"
+ system_target = ":" + name + "_jsm"
+ bootclasspath(
+ name = name + "_target",
+ bootclasspath = [import_target],
+ system = system_target,
+ auxiliary = [import_target],
+ tags = ["manual"],
+ )
+ bootclasspath_test(
+ name = name,
+ target_under_test = name + "_target",
+ )
+ java_system_modules(
+ name = name + "_jsm",
+ deps = [import_target],
+ tags = ["manual"],
+ )
+ java_import(
+ name = import_target[1:],
+ jars = ["some_jar.jar"],
+ tags = ["manual"],
+ )
+ return name
+
+def bootclasspath_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ test_bootclasspath_provider(),
+ ],
+ )
diff --git a/rules/java/event_log_tags.bzl b/rules/java/event_log_tags.bzl
new file mode 100644
index 00000000..b802258b
--- /dev/null
+++ b/rules/java/event_log_tags.bzl
@@ -0,0 +1,49 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Event log tags generation rule"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+def _event_log_tags_impl(ctx):
+ out_files = []
+ for logtag_file in ctx.files.srcs:
+ out_filename = paths.replace_extension(logtag_file.basename, ".java")
+ out_file = ctx.actions.declare_file(out_filename)
+ out_files.append(out_file)
+ ctx.actions.run(
+ inputs = [logtag_file],
+ outputs = [out_file],
+ arguments = [
+ "-o",
+ out_file.path,
+ logtag_file.path,
+ ],
+ progress_message = "Generating Java logtag file from %s" % logtag_file.short_path,
+ executable = ctx.executable._logtag_to_java_tool,
+ )
+ return [DefaultInfo(files = depset(out_files))]
+
+event_log_tags = rule(
+ implementation = _event_log_tags_impl,
+ attrs = {
+ "srcs": attr.label_list(allow_files = [".logtags"], mandatory = True),
+ "_logtag_to_java_tool": attr.label(
+ executable = True,
+ cfg = "exec",
+ allow_files = True,
+ default = Label("//build/make/tools:java-event-log-tags"),
+ ),
+ },
+)
diff --git a/rules/java/host_for_device.bzl b/rules/java/host_for_device.bzl
new file mode 100644
index 00000000..9d7990a1
--- /dev/null
+++ b/rules/java/host_for_device.bzl
@@ -0,0 +1,40 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+visibility([
+ "//external/guava/...",
+ "//external/kotlinx.coroutines/...",
+ "//external/robolectric-shadows/...",
+ "//external/robolectric/...",
+])
+
+def _host_for_device_impl(ctx):
+ return [java_common.merge([d[JavaInfo] for d in ctx.attr.exports])]
+
+java_host_for_device = rule(
+ doc = """Rule to provide java libraries built with a host classpath in a device configuration.
+This is rarely necessary and restricted to a few allowed projects.
+""",
+ implementation = _host_for_device_impl,
+ attrs = {
+ # This attribute must have a specific name to let the DexArchiveAspect propagate
+ # through it.
+ "exports": attr.label_list(
+ cfg = "exec",
+ providers = [JavaInfo],
+ doc = "List of targets whose contents will be visible to targets that depend on this target.",
+ ),
+ },
+ provides = [JavaInfo],
+)
diff --git a/rules/java/host_for_device_test.bzl b/rules/java/host_for_device_test.bzl
new file mode 100644
index 00000000..5585bad9
--- /dev/null
+++ b/rules/java/host_for_device_test.bzl
@@ -0,0 +1,78 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(":host_for_device.bzl", "java_host_for_device")
+load(":rules.bzl", "java_import")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+
+Platform = provider(
+ "Platform of the leaf dependency in a linear dependency chain",
+ fields = {
+ "platform": "the target platform",
+ "host_platform": "the host platform",
+ },
+)
+
+def _host_for_device_tester_aspect_impl(target, ctx):
+ if ctx.rule.attr.exports and len(ctx.rule.attr.exports) > 0 and Platform in ctx.rule.attr.exports[0]:
+ return ctx.rule.attr.exports[0][Platform]
+ return Platform(
+ platform = ctx.fragments.platform.platform,
+ host_platform = ctx.fragments.platform.host_platform,
+ )
+
+host_for_device_tester_aspect = aspect(
+ implementation = _host_for_device_tester_aspect_impl,
+ attr_aspects = ["exports"],
+ fragments = ["platform"],
+ provides = [Platform],
+)
+
+def _host_for_device_dep_runs_in_exec_config_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ actual_platform = target_under_test[Platform].platform
+ expected_platform = target_under_test[Platform].host_platform
+ asserts.equals(env, expected_platform, actual_platform)
+ asserts.true(env, JavaInfo in target_under_test, "Expected host_for_device to provide JavaInfo")
+ return analysistest.end(env)
+
+host_for_device_dep_runs_in_exec_config_test = analysistest.make(
+ _host_for_device_dep_runs_in_exec_config_test_impl,
+ extra_target_under_test_aspects = [host_for_device_tester_aspect],
+)
+
+def test_host_for_device(name):
+ java_host_for_device(
+ name = name + "_parent",
+ exports = [name + "_child"],
+ tags = ["manual"],
+ )
+ java_import(
+ name = name + "_child",
+ jars = ["blah.jar"],
+ tags = ["manual"],
+ )
+ host_for_device_dep_runs_in_exec_config_test(
+ name = name,
+ target_under_test = name + "_parent",
+ )
+ return name
+
+def host_for_device_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [test_host_for_device("test_host_for_device")],
+ )
diff --git a/rules/java/import.bzl b/rules/java/import.bzl
new file mode 100644
index 00000000..6770c5f0
--- /dev/null
+++ b/rules/java/import.bzl
@@ -0,0 +1,67 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Macro wrapping the java_import for bp2build. """
+
+load("@rules_java//java:defs.bzl", _java_import = "java_import")
+load("//build/bazel/rules/java:sdk_transition.bzl", "sdk_transition", "sdk_transition_attrs")
+
+# TODO(b/277801336): document these attributes.
+def java_import(
+ name = "",
+ jars = [],
+ deps = [],
+ tags = [],
+ target_compatible_with = [],
+ visibility = None,
+ **kwargs):
+ lib_name = name + "_private"
+ _java_import(
+ name = lib_name,
+ jars = jars,
+ deps = deps,
+ tags = tags + ["manual"],
+ target_compatible_with = target_compatible_with,
+ visibility = ["//visibility:private"],
+ **kwargs
+ )
+
+ java_import_sdk_transition(
+ name = name,
+ sdk_version = "none",
+ java_version = None,
+ exports = lib_name,
+ tags = tags,
+ target_compatible_with = target_compatible_with,
+ visibility = visibility,
+ )
+
+# The list of providers to forward was determined using cquery on one
+# of the example targets listed under EXAMPLE_WRAPPER_TARGETS at
+# //build/bazel/ci/target_lists.sh. It may not be exhaustive. A unit
+# test ensures that the wrapper's providers and the wrapped rule's do
+# match.
+def _java_import_sdk_transition_impl(ctx):
+ return [
+ ctx.attr.exports[0][JavaInfo],
+ ctx.attr.exports[0][ProguardSpecProvider],
+ ctx.attr.exports[0][OutputGroupInfo],
+ ctx.attr.exports[0][DefaultInfo],
+ ]
+
+java_import_sdk_transition = rule(
+ implementation = _java_import_sdk_transition_impl,
+ attrs = sdk_transition_attrs,
+ provides = [JavaInfo],
+)
diff --git a/rules/java/java_aidl_library.bzl b/rules/java/java_aidl_library.bzl
new file mode 100644
index 00000000..94fd8c5c
--- /dev/null
+++ b/rules/java/java_aidl_library.bzl
@@ -0,0 +1,91 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/aidl:aidl_library.bzl", "AidlGenInfo", "aidl_file_utils")
+load("//build/bazel/rules/java:sdk_transition.bzl", "sdk_transition")
+
+JavaAidlAspectInfo = provider("JavaAidlAspectInfo", fields = ["jars"])
+
+def _java_aidl_gen_aspect_impl(target, ctx):
+ aidl_gen_java_files = aidl_file_utils.generate_aidl_bindings(ctx, "java", target[AidlGenInfo])
+ java_deps = [
+ d[JavaInfo]
+ for d in ctx.rule.attr.deps
+ ]
+ out_jar = ctx.actions.declare_file(target.label.name + "-aidl-gen.jar")
+ java_info = java_common.compile(
+ ctx,
+ source_files = aidl_gen_java_files,
+ deps = java_deps,
+ output = out_jar,
+ java_toolchain = ctx.toolchains["@bazel_tools//tools/jdk:toolchain_type"].java,
+ )
+
+ return [
+ java_info,
+ JavaAidlAspectInfo(
+ jars = depset([out_jar]),
+ ),
+ ]
+
+_java_aidl_gen_aspect = aspect(
+ implementation = _java_aidl_gen_aspect_impl,
+ attr_aspects = ["deps"],
+ attrs = {
+ "_aidl_tool": attr.label(
+ allow_files = True,
+ executable = True,
+ cfg = "exec",
+ default = Label("//prebuilts/build-tools:linux-x86/bin/aidl"),
+ ),
+ },
+ toolchains = ["@bazel_tools//tools/jdk:toolchain_type"],
+ fragments = ["java"],
+ provides = [JavaInfo, JavaAidlAspectInfo],
+)
+
+def _java_aidl_library_rule_impl(ctx):
+ java_info = java_common.merge([d[JavaInfo] for d in ctx.attr.deps])
+ runtime_jars = depset(transitive = [dep[JavaAidlAspectInfo].jars for dep in ctx.attr.deps])
+ transitive_runtime_jars = depset(transitive = [java_info.transitive_runtime_jars])
+
+ return [
+ java_info,
+ DefaultInfo(
+ files = runtime_jars,
+ runfiles = ctx.runfiles(transitive_files = transitive_runtime_jars),
+ ),
+ OutputGroupInfo(default = depset()),
+ ]
+
+java_aidl_library = rule(
+ implementation = _java_aidl_library_rule_impl,
+ attrs = {
+ # This attribute's name lets the DexArchiveAspect propagate
+ # through it. It should be changed carefully.
+ "deps": attr.label_list(
+ providers = [AidlGenInfo],
+ aspects = [_java_aidl_gen_aspect],
+ cfg = sdk_transition,
+ ),
+ "java_version": attr.string(),
+ "sdk_version": attr.string(
+ default = "system_current",
+ ),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
+ },
+ provides = [JavaInfo],
+)
diff --git a/rules/java/java_system_modules.bzl b/rules/java/java_system_modules.bzl
new file mode 100644
index 00000000..41e76a83
--- /dev/null
+++ b/rules/java/java_system_modules.bzl
@@ -0,0 +1,177 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+SystemInfo = provider(fields = ["system"])
+
+def _gen_module_info_java(ctx, jars_to_module_info, jars, module_info):
+ ctx.actions.run_shell(
+ inputs = jars,
+ outputs = [module_info],
+ command = "{} java.base {} > {}".format(
+ jars_to_module_info.path,
+ " ".join([jar.path for jar in jars]),
+ module_info.path,
+ ),
+ tools = [jars_to_module_info],
+ )
+
+def _gen_module_info_class(ctx, java_runtime, module_info, java_base_patch_jars, module_info_class):
+ ctx.actions.run_shell(
+ inputs = depset([module_info], transitive = [java_base_patch_jars]),
+ outputs = [module_info_class],
+ tools = java_runtime.files,
+ command = "{} -d {} --system=none --patch-module=java.base={} {}".format(
+ paths.join(java_runtime.java_home, "bin", "javac"),
+ module_info_class.dirname,
+ ":".join([jar.path for jar in java_base_patch_jars.to_list()]),
+ module_info.path,
+ ),
+ )
+
+def _gen_module_info_jar(ctx, soong_zip, module_info_class, module_info_jar):
+ args = ctx.actions.args()
+ args.add("-jar")
+ args.add("--symlinks=false")
+ args.add("-o", module_info_jar)
+ args.add("-C", module_info_class.dirname)
+ args.add("-f", module_info_class)
+ ctx.actions.run(
+ inputs = [module_info_class],
+ outputs = [module_info_jar],
+ arguments = [args],
+ executable = soong_zip,
+ )
+
+def _gen_merged_module_jar(ctx, merge_zips, module_info_jar, jars, merged_module_jar):
+ args = ctx.actions.args()
+ args.add("-j", merged_module_jar)
+ args.add_all(depset([module_info_jar], transitive = [jars]))
+ ctx.actions.run(
+ inputs = depset([module_info_jar], transitive = [jars]),
+ outputs = [merged_module_jar],
+ arguments = [args],
+ executable = merge_zips,
+ )
+
+def _gen_jmod(ctx, java_runtime, merged_module_jar, jmod):
+ ctx.actions.run_shell(
+ inputs = [merged_module_jar],
+ outputs = [jmod],
+ tools = java_runtime.files,
+ command = (
+ "{} create --module-version $({} --version) " +
+ "--target-platform android --class-path {} {}"
+ ).format(
+ paths.join(java_runtime.java_home, "bin", "jmod"),
+ paths.join(java_runtime.java_home, "bin", "jlink"),
+ merged_module_jar.path,
+ jmod.path,
+ ),
+ )
+
+def _gen_system(ctx, java_runtime, jmod, system):
+ ctx.actions.run_shell(
+ inputs = depset([jmod], transitive = [java_runtime.files]),
+ outputs = [system],
+ tools = java_runtime.files,
+ command = (
+ "rm -rf {} && " +
+ "{} --module-path {} --add-modules java.base --output {} " +
+ "--disable-plugin system-modules && " +
+ "cp {} {}/lib/"
+ ).format(
+ system.path,
+ paths.join(java_runtime.java_home, "bin", "jlink"),
+ jmod.dirname,
+ system.path,
+ paths.join(java_runtime.java_home, "lib", "jrt-fs.jar"),
+ system.path,
+ ),
+ )
+
+def _java_system_modules_impl(ctx):
+ java_info = java_common.merge([d[JavaInfo] for d in ctx.attr.deps])
+ module_info = ctx.actions.declare_file("%s/src/module-info.java" % ctx.label.name)
+ _gen_module_info_java(ctx, ctx.executable._jars_to_module_info, java_info.compile_jars.to_list(), module_info)
+
+ java_runtime = ctx.attr._runtime[java_common.JavaRuntimeInfo]
+ module_info_class = ctx.actions.declare_file("%s/class/module-info.class" % ctx.label.name)
+ _gen_module_info_class(ctx, java_runtime, module_info, java_info.compile_jars, module_info_class)
+
+ module_info_jar = ctx.actions.declare_file("%s/jar/classes.jar" % ctx.label.name)
+ _gen_module_info_jar(ctx, ctx.executable._soong_zip, module_info_class, module_info_jar)
+
+ merged_module_jar = ctx.actions.declare_file("%s/merged/module.jar" % ctx.label.name)
+ _gen_merged_module_jar(
+ ctx,
+ ctx.executable._merge_zips,
+ module_info_jar,
+ java_info.full_compile_jars,
+ merged_module_jar,
+ )
+
+ jmod = ctx.actions.declare_file("%s/jmod/java.base.jmod" % ctx.label.name)
+ _gen_jmod(ctx, java_runtime, merged_module_jar, jmod)
+
+ system = ctx.actions.declare_directory("%s/system" % ctx.label.name)
+ _gen_system(ctx, java_runtime, jmod, system)
+
+ return [
+ SystemInfo(
+ system = system,
+ ),
+ DefaultInfo(files = depset([system])),
+ ]
+
+java_system_modules = rule(
+ implementation = _java_system_modules_impl,
+ attrs = {
+ "_jars_to_module_info": attr.label(
+ allow_files = True,
+ executable = True,
+ cfg = "exec",
+ default = "//build/soong/scripts:jars-to-module-info-java",
+ ),
+ "_soong_zip": attr.label(
+ cfg = "exec",
+ allow_single_file = True,
+ doc = "The tool soong_zip",
+ default = "//build/soong/zip/cmd:soong_zip",
+ executable = True,
+ ),
+ "_merge_zips": attr.label(
+ cfg = "exec",
+ allow_single_file = True,
+ doc = "The tool merge_zips.",
+ default = "//prebuilts/build-tools:linux-x86/bin/merge_zips",
+ executable = True,
+ ),
+ "_runtime": attr.label(
+ default = Label("@bazel_tools//tools/jdk:current_java_runtime"),
+ cfg = "exec",
+ providers = [java_common.JavaRuntimeInfo],
+ ),
+ "deps": attr.label_list(
+ providers = [JavaInfo],
+ doc = "Libraries to be converted into a system module directory structure.",
+ ),
+ },
+ doc = """Generates a system module directory from Java libraries.
+
+Starting from version 1.9, Java requires a subset of java.* classes to be
+provided via system modules. This rule encapsulates the set of steps necessary
+to convert a jar file into the directory structure of system modules.
+""",
+)
diff --git a/rules/java/java_system_modules_test.bzl b/rules/java/java_system_modules_test.bzl
new file mode 100644
index 00000000..3b76f4c8
--- /dev/null
+++ b/rules/java/java_system_modules_test.bzl
@@ -0,0 +1,59 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":java_system_modules.bzl", "SystemInfo", "java_system_modules")
+load(":rules.bzl", "java_import")
+
+def _java_system_modules_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ java_system_modules_target = analysistest.target_under_test(env)
+
+ asserts.true(
+ env,
+ java_system_modules_target[SystemInfo].system.is_directory,
+ "java_system_modules output should be a directory.",
+ )
+ return analysistest.end(env)
+
+java_system_modules_test = analysistest.make(
+ _java_system_modules_test_impl,
+)
+
+def test_java_system_modules_provider():
+ name = "test_java_system_modules_provider"
+ import_target = ":" + name + "_import"
+ java_system_modules(
+ name = name + "_target",
+ deps = [import_target],
+ tags = ["manual"],
+ )
+ java_system_modules_test(
+ name = name,
+ target_under_test = name + "_target",
+ )
+
+ java_import(
+ name = import_target[1:],
+ jars = ["some_jar.jar"],
+ tags = ["manual"],
+ )
+ return name
+
+def java_system_modules_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ test_java_system_modules_provider(),
+ ],
+ )
diff --git a/rules/java/library.bzl b/rules/java/library.bzl
index f45cd51b..f381b1c2 100644
--- a/rules/java/library.bzl
+++ b/rules/java/library.bzl
@@ -1,8 +1,80 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
"""Macro wrapping the java_library for bp2build. """
-def java_library(name = "", srcs = [], deps = [], javacopts = [], **kwargs):
- # Disable the error prone check of HashtableContains by default. See https://errorprone.info/bugpattern/HashtableContains
- # HashtableContains error is reported when compiling //external/bouncycastle:bouncycastle-bcpkix-unbundled
+load(
+ "@rules_java//java:defs.bzl",
+ _java_library = "java_library",
+)
+load("//build/bazel/rules/java:sdk_transition.bzl", "sdk_transition", "sdk_transition_attrs")
+
+# TODO(b/277801336): document these attributes.
+def java_library(
+ name = "",
+ srcs = [],
+ deps = [],
+ javacopts = [],
+ sdk_version = None,
+ java_version = None,
+ tags = [],
+ target_compatible_with = [],
+ visibility = None,
+ **kwargs):
+ lib_name = name + "_private"
+
+ # Disable the error prone check of HashtableContains by default. See https://errorprone.info/bugpattern/HashtableContains
+ # HashtableContains error is reported when compiling //external/bouncycastle:bouncycastle-bcpkix-unbundled
opts = ["-Xep:HashtableContains:OFF"] + javacopts
- native.java_library(name, srcs = srcs, deps = deps, javacopts = opts, **kwargs)
+ _java_library(
+ name = lib_name,
+ srcs = srcs,
+ deps = deps,
+ javacopts = opts,
+ tags = tags + ["manual"],
+ target_compatible_with = target_compatible_with,
+ visibility = ["//visibility:private"],
+ **kwargs
+ )
+
+ java_library_sdk_transition(
+ name = name,
+ sdk_version = sdk_version,
+ java_version = java_version,
+ exports = lib_name,
+ tags = tags,
+ target_compatible_with = target_compatible_with,
+ visibility = ["//visibility:public"],
+ )
+
+# The list of providers to forward was determined using cquery on one
+# of the example targets listed under EXAMPLE_WRAPPER_TARGETS at
+# //build/bazel/ci/target_lists.sh. It may not be exhaustive. A unit
+# test ensures that the wrapper's providers and the wrapped rule's do
+# match.
+def _java_library_sdk_transition_impl(ctx):
+ return [
+ ctx.attr.exports[0][JavaInfo],
+ ctx.attr.exports[0][InstrumentedFilesInfo],
+ ctx.attr.exports[0][ProguardSpecProvider],
+ ctx.attr.exports[0][OutputGroupInfo],
+ ctx.attr.exports[0][DefaultInfo],
+ ]
+
+java_library_sdk_transition = rule(
+ implementation = _java_library_sdk_transition_impl,
+ attrs = sdk_transition_attrs,
+ provides = [JavaInfo],
+)
diff --git a/rules/java/merged_txts.bzl b/rules/java/merged_txts.bzl
new file mode 100644
index 00000000..069903f8
--- /dev/null
+++ b/rules/java/merged_txts.bzl
@@ -0,0 +1,84 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("//build/bazel/rules/java:sdk_library.bzl", "JavaSdkLibraryInfo")
+
+METALAVA_ARGS = [
+ "-J--add-opens=java.base/java.util=ALL-UNNAMED",
+ "--quiet",
+ "--no-banner",
+ "--format=v2",
+]
+
+def _get_inputs(ctx):
+ inputs = []
+ inputs.extend(ctx.files.base)
+ from_deps = []
+ if ctx.attr.scope == "public":
+ from_deps = [d[JavaSdkLibraryInfo].public for d in ctx.attr.deps]
+ elif ctx.attr.scope == "system":
+ from_deps = [d[JavaSdkLibraryInfo].system for d in ctx.attr.deps]
+ elif ctx.attr.scope == "module-lib":
+ from_deps = [d[JavaSdkLibraryInfo].module_lib for d in ctx.attr.deps]
+ elif ctx.attr.scope == "system-server":
+ from_deps = [d[JavaSdkLibraryInfo].system_server for d in ctx.attr.deps]
+ inputs.extend(from_deps)
+ return depset(inputs)
+
+def _get_output_name(ctx):
+ output_name = "current.txt"
+ if ctx.attr.scope != "public":
+ output_name = ctx.attr.scope + "-" + output_name
+ return output_name
+
+def _merged_txts_impl(ctx):
+ output = ctx.actions.declare_file(_get_output_name(ctx))
+ inputs = _get_inputs(ctx)
+ args = ctx.actions.args()
+ args.add_all(METALAVA_ARGS)
+ args.add_all(inputs)
+ args.add("--api", output)
+ ctx.actions.run(
+ outputs = [output],
+ inputs = inputs,
+ executable = ctx.executable._metalava,
+ arguments = [args],
+ )
+ return [DefaultInfo(files = depset([output]))]
+
+merged_txts = rule(
+ implementation = _merged_txts_impl,
+ attrs = {
+ "scope": attr.string(
+ doc = "api scope",
+ ),
+ "base": attr.label(
+ mandatory = True,
+ allow_single_file = True,
+ doc = "the target used to get the checked-in base current.txt",
+ ),
+ "deps": attr.label_list(
+ mandatory = True,
+ allow_empty = False,
+ providers = [JavaSdkLibraryInfo],
+ ),
+ "_metalava": attr.label(
+ default = "//tools/metalava:metalava",
+ executable = True,
+ cfg = "exec",
+ ),
+ },
+)
diff --git a/rules/java/merged_txts_test.bzl b/rules/java/merged_txts_test.bzl
new file mode 100644
index 00000000..6dcf3331
--- /dev/null
+++ b/rules/java/merged_txts_test.bzl
@@ -0,0 +1,156 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":merged_txts.bzl", "merged_txts")
+load(":sdk_library.bzl", "java_sdk_library")
+
+SCOPE_TO_JAVA_SDK_LIBRARY_FILE = {
+ "public": "sdk_public.txt",
+ "system": "sdk_system.txt",
+ "module-lib": "sdk_module_lib.txt",
+ "system-server": "sdk_system_server.txt",
+}
+
+def _basic_merged_txts_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ base_file = paths.join(paths.dirname(ctx.build_file_path), ctx.attr.base)
+ asserts.true(
+ env,
+ base_file in actions[0].argv,
+ "Base file {} of scope {} is not in args list".format(base_file, ctx.attr.scope),
+ )
+
+ java_sdk_library_file = paths.join(
+ paths.dirname(ctx.build_file_path),
+ SCOPE_TO_JAVA_SDK_LIBRARY_FILE[ctx.attr.scope],
+ )
+ asserts.true(
+ env,
+ java_sdk_library_file in actions[0].argv,
+ "java_sdk_library file {} of scope {} is not in args list".format(java_sdk_library_file, ctx.attr.scope),
+ )
+
+ return analysistest.end(env)
+
+basic_merged_txts_test = analysistest.make(
+ _basic_merged_txts_test_impl,
+ attrs = {
+ "scope": attr.string(),
+ "base": attr.string(),
+ },
+)
+
+def test_generated_current_txt():
+ name = "generated_current_txt_test"
+ target_name = name + "_target"
+ scope = "public"
+ base = "non-updatable-current.txt"
+ merged_txts(
+ name = target_name,
+ scope = scope,
+ base = base,
+ deps = ["dep"],
+ tags = ["manual"],
+ )
+ java_sdk_library(
+ name = "dep",
+ public = SCOPE_TO_JAVA_SDK_LIBRARY_FILE["public"],
+ system = SCOPE_TO_JAVA_SDK_LIBRARY_FILE["system"],
+ module_lib = SCOPE_TO_JAVA_SDK_LIBRARY_FILE["module-lib"],
+ system_server = SCOPE_TO_JAVA_SDK_LIBRARY_FILE["system-server"],
+ )
+ basic_merged_txts_test(
+ name = name,
+ target_under_test = target_name,
+ scope = scope,
+ base = base,
+ )
+ return name
+
+def test_generated_system_current_txt():
+ name = "generated_system_current_txt_test"
+ target_name = name + "_target"
+ scope = "system"
+ base = "non-updatable-system-current.txt"
+ merged_txts(
+ name = target_name,
+ scope = scope,
+ base = base,
+ deps = ["dep"],
+ tags = ["manual"],
+ )
+ basic_merged_txts_test(
+ name = name,
+ target_under_test = target_name,
+ scope = scope,
+ base = base,
+ )
+ return name
+
+def test_generated_module_lib_current_txt():
+ name = "generated_module_lib_current_txt_test"
+ target_name = name + "_target"
+ scope = "module-lib"
+ base = "non-updatable-module-lib_current.txt"
+ merged_txts(
+ name = target_name,
+ scope = scope,
+ base = base,
+ deps = ["dep"],
+ tags = ["manual"],
+ )
+ basic_merged_txts_test(
+ name = name,
+ target_under_test = target_name,
+ scope = scope,
+ base = base,
+ )
+ return name
+
+def test_generated_system_server_current_txt():
+ name = "generated_system_server_current_txt_test"
+ target_name = name + "_target"
+ scope = "system-server"
+ base = "non-updatable-system-server-current.txt"
+ merged_txts(
+ name = target_name,
+ scope = scope,
+ base = base,
+ deps = ["dep"],
+ tags = ["manual"],
+ )
+ basic_merged_txts_test(
+ name = name,
+ target_under_test = target_name,
+ scope = scope,
+ base = base,
+ )
+ return name
+
+def merged_txts_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ test_generated_current_txt(),
+ test_generated_system_current_txt(),
+ test_generated_module_lib_current_txt(),
+ test_generated_system_server_current_txt(),
+ ],
+ )
diff --git a/rules/java/proto.bzl b/rules/java/proto.bzl
index fa2d5675..68533d20 100644
--- a/rules/java/proto.bzl
+++ b/rules/java/proto.bzl
@@ -1,22 +1,19 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
load("//build/bazel/rules:proto_file_utils.bzl", "proto_file_utils")
-load("@bazel_skylib//lib:paths.bzl", "paths")
-load(":library.bzl", "java_library")
+load(":rules.bzl", "java_library")
def _java_proto_sources_gen_rule_impl(ctx):
out_flags = []
@@ -30,17 +27,20 @@ def _java_proto_sources_gen_rule_impl(ctx):
out_flags.append(ctx.attr.out_format)
srcs = []
+ proto_infos = []
+
for dep in ctx.attr.deps:
- proto_info = dep[ProtoInfo]
- out_jar = _generate_java_proto_action(
- proto_info = proto_info,
- protoc = ctx.executable._protoc,
- ctx = ctx,
- out_flags = out_flags,
- plugin_executable = plugin_executable,
- out_arg = out_arg,
- )
- srcs.append(out_jar)
+ proto_infos.append(dep[ProtoInfo])
+
+ out_jar = _generate_java_proto_action(
+ proto_infos = proto_infos,
+ protoc = ctx.executable._protoc,
+ ctx = ctx,
+ out_flags = out_flags,
+ plugin_executable = plugin_executable,
+ out_arg = out_arg,
+ )
+ srcs.append(out_jar)
return [
DefaultInfo(files = depset(direct = srcs)),
@@ -76,14 +76,14 @@ If not provided, defaults to full protos.
)
def _generate_java_proto_action(
- proto_info,
+ proto_infos,
protoc,
ctx,
plugin_executable,
out_arg,
out_flags):
return proto_file_utils.generate_jar_proto_action(
- proto_info,
+ proto_infos,
protoc,
ctx,
out_flags,
@@ -96,9 +96,10 @@ def _java_proto_library(
name,
deps = [],
plugin = None,
- target_compatible_with = [],
out_format = None,
- proto_dep = None):
+ proto_dep = None,
+ sdk_version = "core_current",
+ **kwargs):
proto_sources_name = name + "_proto_gen"
_java_proto_sources_gen(
@@ -106,6 +107,7 @@ def _java_proto_library(
deps = deps,
plugin = plugin,
out_format = out_format,
+ tags = ["manual"],
)
if proto_dep:
@@ -117,70 +119,61 @@ def _java_proto_library(
name = name,
srcs = [proto_sources_name],
deps = deps,
- target_compatible_with = target_compatible_with,
+ sdk_version = sdk_version,
+ **kwargs
)
def java_nano_proto_library(
name,
- deps = [],
plugin = "//external/protobuf:protoc-gen-javanano",
- target_compatible_with = []):
+ **kwargs):
_java_proto_library(
name,
- deps = deps,
plugin = plugin,
- target_compatible_with = target_compatible_with,
proto_dep = "//external/protobuf:libprotobuf-java-nano",
+ **kwargs
)
def java_micro_proto_library(
name,
- deps = [],
plugin = "//external/protobuf:protoc-gen-javamicro",
- target_compatible_with = []):
+ **kwargs):
_java_proto_library(
name,
- deps = deps,
plugin = plugin,
- target_compatible_with = target_compatible_with,
proto_dep = "//external/protobuf:libprotobuf-java-micro",
+ **kwargs
)
def java_lite_proto_library(
name,
- deps = [],
plugin = None,
- target_compatible_with = []):
+ **kwargs):
_java_proto_library(
name,
- deps = deps,
plugin = plugin,
- target_compatible_with = target_compatible_with,
out_format = "lite",
proto_dep = "//external/protobuf:libprotobuf-java-lite",
+ **kwargs
)
def java_stream_proto_library(
name,
- deps = [],
plugin = "//frameworks/base/tools/streaming_proto:protoc-gen-javastream",
- target_compatible_with = []):
+ **kwargs):
_java_proto_library(
name,
- deps = deps,
plugin = plugin,
- target_compatible_with = target_compatible_with,
+ **kwargs
)
def java_proto_library(
name,
- deps = [],
plugin = None,
- target_compatible_with = []):
+ **kwargs):
_java_proto_library(
name,
- deps = deps,
plugin = plugin,
- target_compatible_with = target_compatible_with,
proto_dep = "//external/protobuf:libprotobuf-java-full",
+ **kwargs
)
diff --git a/rules/java/rules.bzl b/rules/java/rules.bzl
new file mode 100644
index 00000000..575321b2
--- /dev/null
+++ b/rules/java/rules.bzl
@@ -0,0 +1,30 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(
+ "@rules_java//java:defs.bzl",
+ _java_binary = "java_binary",
+)
+load(
+ "//build/bazel/rules/java:import.bzl",
+ _java_import = "java_import",
+)
+load(
+ "//build/bazel/rules/java:library.bzl",
+ _java_library = "java_library",
+)
+
+java_binary = _java_binary
+java_library = _java_library
+java_import = _java_import
diff --git a/rules/java/sdk/BUILD.bazel b/rules/java/sdk/BUILD.bazel
new file mode 100644
index 00000000..b00ba819
--- /dev/null
+++ b/rules/java/sdk/BUILD.bazel
@@ -0,0 +1,112 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+package(
+ default_visibility = ["//visibility:public"],
+)
+
+load("@bazel_skylib//rules:common_settings.bzl", "int_setting", "string_setting")
+load("//build/bazel/rules/common:sdk_version.bzl", "sdk_version")
+load("//build/bazel/rules/common:api.bzl", "api")
+load("//build/bazel/rules/java:versions.bzl", "java_versions")
+load(":config_setting_names.bzl", "config_setting_names")
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load("//prebuilts/sdk:utils.bzl", "prebuilt_sdk_utils")
+
+string_setting(
+ name = "kind",
+ build_setting_default = sdk_version.KIND_PUBLIC,
+ values = sdk_version.ALL_KINDS,
+)
+
+int_setting(
+ name = "api_level",
+ build_setting_default = api.FUTURE_API_LEVEL,
+)
+
+# The settings below are used to properly define a device java and android toolchain.
+
+# The SDK_NONE config setting maps to sdk_version = "none". In this configuration the java toolchain
+# will provide nothing on the bootclasspath, not even the standard java.* libraries.
+# The android toolchain is undefined in this configuration.
+config_setting(
+ name = config_setting_names.SDK_NONE,
+ flag_values = {
+ "//build/bazel/rules/java/sdk:kind": sdk_version.KIND_NONE,
+ },
+)
+
+# Pre and Post Java 9 configs differ in how the bootclasspath is constructed and what arguments must
+# be passed to javac. Pre Java 9, the SDK is passed as a whole to the --bootclasspath argument of
+# javac. Post Java 9, the SDK is split between core libraries, passed using system modules and the
+# --system javac argument, and the rest, added at the beginning of the classpath.
+selects.config_setting_group(
+ name = config_setting_names.PRE_JAVA_9,
+ match_any = [
+ "//build/bazel/rules/java:" + java_versions.VERSION_TO_CONFIG_SETTING[version]
+ for version in java_versions.VERSION_TO_CONFIG_SETTING.keys()
+ if version < 9
+ ],
+)
+
+selects.config_setting_group(
+ name = config_setting_names.POST_JAVA_9,
+ match_any = [
+ "//build/bazel/rules/java:" + java_versions.VERSION_TO_CONFIG_SETTING[version]
+ for version in java_versions.VERSION_TO_CONFIG_SETTING.keys()
+ if version >= 9
+ ],
+)
+
+# Specific configuration at a given kind and api level will have the java and android toolchains
+# pointed to the proper android.jar and framework.aidl files.
+[
+ config_setting(
+ name = config_setting_names.kind_api(kind, api_level),
+ flag_values = {
+ "//build/bazel/rules/java/sdk:kind": kind,
+ "//build/bazel/rules/java/sdk:api_level": str(api_level),
+ },
+ )
+ for api_level in prebuilt_sdk_utils.API_LEVELS
+ for kind in prebuilt_sdk_utils.available_kinds_for_api_level(api_level)
+]
+
+# The bootclasspath is a function of sdk kind, api level, and whether building for pre or post java
+# 9 sources.
+[
+ selects.config_setting_group(
+ name = config_setting_names.kind_api_pre_java_9(kind, api_level),
+ match_all = [
+ config_setting_names.kind_api(kind, api_level),
+ config_setting_names.PRE_JAVA_9,
+ ],
+ )
+ for api_level in prebuilt_sdk_utils.API_LEVELS
+ for kind in prebuilt_sdk_utils.available_kinds_for_api_level(api_level)
+ if java_versions.supports_pre_java_9(api_level)
+]
+
+[
+ selects.config_setting_group(
+ name = config_setting_names.kind_api_post_java_9(kind, api_level),
+ match_all = [
+ config_setting_names.kind_api(kind, api_level),
+ config_setting_names.POST_JAVA_9,
+ ],
+ )
+ for api_level in prebuilt_sdk_utils.API_LEVELS
+ for kind in prebuilt_sdk_utils.available_kinds_for_api_level(api_level)
+ if java_versions.supports_post_java_9(api_level)
+]
diff --git a/rules/java/sdk/config_setting_names.bzl b/rules/java/sdk/config_setting_names.bzl
new file mode 100644
index 00000000..2150465f
--- /dev/null
+++ b/rules/java/sdk/config_setting_names.bzl
@@ -0,0 +1,35 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def _kind_api(kind, api_level):
+ return "config_setting_android_%s_%s" % (kind, api_level)
+
+def _kind_api_pre_java_9(kind, api_level):
+ return _kind_api(kind, api_level) + "_pre_java_9"
+
+def _kind_api_post_java_9(kind, api_level):
+ return _kind_api(kind, api_level) + "_post_java_9"
+
+_CONFIG_SETTING_SDK_NONE = "config_setting_sdk_none"
+_CONFIG_SETTING_PRE_JAVA_9 = "config_setting_pre_java_9"
+_CONFIG_SETTING_POST_JAVA_9 = "config_setting_post_java_9"
+
+config_setting_names = struct(
+ SDK_NONE = _CONFIG_SETTING_SDK_NONE,
+ PRE_JAVA_9 = _CONFIG_SETTING_PRE_JAVA_9,
+ POST_JAVA_9 = _CONFIG_SETTING_POST_JAVA_9,
+ kind_api = _kind_api,
+ kind_api_pre_java_9 = _kind_api_pre_java_9,
+ kind_api_post_java_9 = _kind_api_post_java_9,
+)
diff --git a/rules/java/sdk_library.bzl b/rules/java/sdk_library.bzl
new file mode 100644
index 00000000..82f6eb3c
--- /dev/null
+++ b/rules/java/sdk_library.bzl
@@ -0,0 +1,61 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+JavaSdkLibraryInfo = provider(
+ "Checked in current.txt for Public, System, Module_lib and System_server",
+ fields = [
+ "public",
+ "system",
+ "test",
+ "module_lib",
+ "system_server",
+ ],
+)
+
+def _java_sdk_library_impl(ctx):
+ return [
+ JavaSdkLibraryInfo(
+ public = ctx.file.public,
+ system = ctx.file.system,
+ test = ctx.file.test,
+ module_lib = ctx.file.module_lib,
+ system_server = ctx.file.system_server,
+ ),
+ ]
+
+java_sdk_library = rule(
+ implementation = _java_sdk_library_impl,
+ attrs = {
+ "public": attr.label(
+ allow_single_file = True,
+ doc = "public api surface file",
+ ),
+ "system": attr.label(
+ allow_single_file = True,
+ doc = "system api surface file",
+ ),
+ "test": attr.label(
+ allow_single_file = True,
+ doc = "test api surface file",
+ ),
+ "module_lib": attr.label(
+ allow_single_file = True,
+ doc = "module_lib api surface file",
+ ),
+ "system_server": attr.label(
+ allow_single_file = True,
+ doc = "system_server api surface file",
+ ),
+ },
+)
diff --git a/rules/java/sdk_library_test.bzl b/rules/java/sdk_library_test.bzl
new file mode 100644
index 00000000..ca954766
--- /dev/null
+++ b/rules/java/sdk_library_test.bzl
@@ -0,0 +1,115 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":sdk_library.bzl", "JavaSdkLibraryInfo", "java_sdk_library")
+
+def _basic_java_sdk_library_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ java_sdk_library_target = analysistest.target_under_test(env)
+
+ asserts.true(
+ env,
+ java_sdk_library_target[JavaSdkLibraryInfo].public.is_source,
+ "Public api surface file should be source, not generated",
+ )
+
+ asserts.equals(
+ env,
+ expected = "public.txt",
+ actual = java_sdk_library_target[JavaSdkLibraryInfo].public.basename,
+ msg = "Public api surface file not correct",
+ )
+
+ asserts.true(
+ env,
+ java_sdk_library_target[JavaSdkLibraryInfo].system.is_source,
+ "System api surface file should be source, not generated",
+ )
+
+ asserts.equals(
+ env,
+ expected = "system.txt",
+ actual = java_sdk_library_target[JavaSdkLibraryInfo].system.basename,
+ msg = "System api surface file not correct",
+ )
+
+ asserts.true(
+ env,
+ java_sdk_library_target[JavaSdkLibraryInfo].test.is_source,
+ "Test api surface file should be source, not generated",
+ )
+
+ asserts.equals(
+ env,
+ expected = "test.txt",
+ actual = java_sdk_library_target[JavaSdkLibraryInfo].test.basename,
+ msg = "Test api surface file not correct",
+ )
+
+ asserts.true(
+ env,
+ java_sdk_library_target[JavaSdkLibraryInfo].module_lib.is_source,
+ "Module_lib api surface file should be source, not generated",
+ )
+
+ asserts.equals(
+ env,
+ expected = "module_lib.txt",
+ actual = java_sdk_library_target[JavaSdkLibraryInfo].module_lib.basename,
+ msg = "Module_lib api surface file not correct",
+ )
+
+ asserts.true(
+ env,
+ java_sdk_library_target[JavaSdkLibraryInfo].system_server.is_source,
+ "System_server api surface file should be source, not generated",
+ )
+
+ asserts.equals(
+ env,
+ expected = "system_server.txt",
+ actual = java_sdk_library_target[JavaSdkLibraryInfo].system_server.basename,
+ msg = "System_server api surface file not correct",
+ )
+
+ return analysistest.end(env)
+
+basic_java_sdk_library_test = analysistest.make(
+ _basic_java_sdk_library_test_impl,
+)
+
+def test_checked_in_api_surface_files():
+ name = "checked_in_api_surface_files_test"
+ java_sdk_library(
+ name = name + "_target",
+ public = "public.txt",
+ system = "system.txt",
+ test = "test.txt",
+ module_lib = "module_lib.txt",
+ system_server = "system_server.txt",
+ )
+ basic_java_sdk_library_test(
+ name = name,
+ target_under_test = name + "_target",
+ )
+ return name
+
+def java_sdk_library_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ test_checked_in_api_surface_files(),
+ ],
+ )
diff --git a/rules/java/sdk_transition.bzl b/rules/java/sdk_transition.bzl
new file mode 100644
index 00000000..7f69d09d
--- /dev/null
+++ b/rules/java/sdk_transition.bzl
@@ -0,0 +1,68 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/java:versions.bzl", "java_versions")
+load("//build/bazel/rules/common:sdk_version.bzl", "sdk_version")
+load("//build/bazel/rules/common:api.bzl", "api")
+
+def _sdk_transition_impl(settings, attr):
+ host_platform = settings["//command_line_option:host_platform"]
+ default_java_version = str(java_versions.get_version())
+
+ # TODO: this condition should really be "platform is not a device".
+ # More details on why we're treating java version for non-device platforms differently at the
+ # definition of the //build/bazel/rules/java:host_version build setting.
+ if all([host_platform == platform for platform in settings["//command_line_option:platforms"]]):
+ return {
+ "//build/bazel/rules/java:version": default_java_version,
+ "//build/bazel/rules/java:host_version": str(java_versions.get_version(attr.java_version)),
+ "//build/bazel/rules/java/sdk:kind": sdk_version.KIND_NONE,
+ "//build/bazel/rules/java/sdk:api_level": api.NONE_API_LEVEL,
+ }
+ sdk_spec = sdk_version.sdk_spec_from(attr.sdk_version)
+ java_version = str(java_versions.get_version(attr.java_version, sdk_spec.api_level))
+
+ return {
+ "//build/bazel/rules/java:host_version": default_java_version,
+ "//build/bazel/rules/java:version": java_version,
+ "//build/bazel/rules/java/sdk:kind": sdk_spec.kind,
+ "//build/bazel/rules/java/sdk:api_level": sdk_spec.api_level,
+ }
+
+sdk_transition = transition(
+ implementation = _sdk_transition_impl,
+ inputs = [
+ "//command_line_option:host_platform",
+ "//command_line_option:platforms",
+ ],
+ outputs = [
+ "//build/bazel/rules/java:version",
+ "//build/bazel/rules/java:host_version",
+ "//build/bazel/rules/java/sdk:kind",
+ "//build/bazel/rules/java/sdk:api_level",
+ ],
+)
+
+sdk_transition_attrs = {
+ # This attribute must have a specific name to let the DexArchiveAspect propagate
+ # through it.
+ "exports": attr.label(
+ cfg = sdk_transition,
+ ),
+ "java_version": attr.string(),
+ "sdk_version": attr.string(),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
+}
diff --git a/rules/java/sdk_transition_test.bzl b/rules/java/sdk_transition_test.bzl
new file mode 100644
index 00000000..6dd90441
--- /dev/null
+++ b/rules/java/sdk_transition_test.bzl
@@ -0,0 +1,173 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(":sdk_transition.bzl", "sdk_transition")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+
+SdkConfig = provider(
+ "Info about the config settings of the leaf dependency (in a linear dependency chain only)",
+ fields = {
+ "java_version": "the value of the //build/bazel/rules/java:version setting.",
+ "host_java_version": "the value of the //build/bazel/rules/java:host_version setting.",
+ "sdk_kind": "the value of the //build/bazel/rules/java/sdk:kind setting.",
+ "api_level": "the value of the //build/bazel/rules/java/sdk:api_level setting.",
+ },
+)
+
+def _sdk_transition_tester_impl(ctx):
+ if ctx.attr.exports and len(ctx.attr.exports) > 0 and SdkConfig in ctx.attr.exports[0]:
+ return ctx.attr.exports[0][SdkConfig]
+ return SdkConfig(
+ java_version = ctx.attr._java_version_config_setting[BuildSettingInfo].value,
+ host_java_version = ctx.attr._host_java_version_config_setting[BuildSettingInfo].value,
+ sdk_kind = ctx.attr._sdk_kind_config_setting[BuildSettingInfo].value,
+ api_level = ctx.attr._api_level_config_setting[BuildSettingInfo].value,
+ )
+
+sdk_transition_tester = rule(
+ implementation = _sdk_transition_tester_impl,
+ attrs = {
+ "exports": attr.label(
+ cfg = sdk_transition,
+ providers = [SdkConfig],
+ ),
+ "java_version": attr.string(),
+ "sdk_version": attr.string(),
+ "_java_version_config_setting": attr.label(
+ default = "//build/bazel/rules/java:version",
+ ),
+ "_host_java_version_config_setting": attr.label(
+ default = "//build/bazel/rules/java:host_version",
+ ),
+ "_sdk_kind_config_setting": attr.label(
+ default = "//build/bazel/rules/java/sdk:kind",
+ ),
+ "_api_level_config_setting": attr.label(
+ default = "//build/bazel/rules/java/sdk:api_level",
+ ),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
+ },
+)
+
+def _sdk_transition_host_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actual_config = analysistest.target_under_test(env)[SdkConfig]
+ asserts.equals(
+ env,
+ ctx.attr.expected_host_java_version,
+ actual_config.host_java_version,
+ "mismatching host_java_version",
+ )
+ return analysistest.end(env)
+
+def _sdk_transition_device_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ actual_config = analysistest.target_under_test(env)[SdkConfig]
+ asserts.equals(
+ env,
+ ctx.attr.expected_java_version,
+ actual_config.java_version,
+ "mismatching java_version",
+ )
+ asserts.equals(
+ env,
+ ctx.attr.expected_sdk_kind,
+ actual_config.sdk_kind,
+ "mismatching sdk_kind",
+ )
+ asserts.equals(
+ env,
+ ctx.attr.expected_api_level,
+ actual_config.api_level,
+ "mismatching api_level",
+ )
+ return analysistest.end(env)
+
+sdk_transition_host_test = analysistest.make(
+ impl = _sdk_transition_host_test_impl,
+ attrs = {
+ "expected_host_java_version": attr.string(),
+ },
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing_linux_x86_64",
+ "//command_line_option:host_platform": "@//build/bazel/tests/products:aosp_arm64_for_testing_linux_x86_64",
+ },
+)
+
+sdk_transition_device_test = analysistest.make(
+ impl = _sdk_transition_device_test_impl,
+ attrs = {
+ "expected_java_version": attr.string(),
+ "expected_sdk_kind": attr.string(),
+ "expected_api_level": attr.int(),
+ },
+ config_settings = {
+ "//command_line_option:platforms": "@//build/bazel/tests/products:aosp_arm64_for_testing",
+ "//command_line_option:host_platform": "@//build/bazel/tests/products:aosp_arm64_for_testing_linux_x86_64",
+ },
+)
+
+def set_up_targets_under_test(name, java_version, sdk_version):
+ sdk_transition_tester(
+ name = name + "_parent",
+ java_version = java_version,
+ sdk_version = sdk_version,
+ exports = name + "_child",
+ tags = ["manual"],
+ )
+ sdk_transition_tester(
+ name = name + "_child",
+ tags = ["manual"],
+ )
+
+def test_host_sdk_transition(
+ name,
+ java_version,
+ expected_host_java_version):
+ set_up_targets_under_test(name, java_version, sdk_version = None)
+ sdk_transition_host_test(
+ name = name,
+ target_under_test = name + "_parent",
+ expected_host_java_version = expected_host_java_version,
+ )
+ return name
+
+def test_device_sdk_transition(
+ name,
+ java_version,
+ sdk_version,
+ expected_java_version,
+ expected_sdk_kind,
+ expected_api_level):
+ set_up_targets_under_test(name, java_version, sdk_version)
+ sdk_transition_device_test(
+ name = name,
+ target_under_test = name + "_parent",
+ expected_java_version = expected_java_version,
+ expected_sdk_kind = expected_sdk_kind,
+ expected_api_level = expected_api_level,
+ )
+ return name
+
+def sdk_transition_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ test_host_sdk_transition("test_host_sdk_transition", java_version = "8", expected_host_java_version = "8"),
+ test_device_sdk_transition("test_device_sdk_transition", java_version = "9", sdk_version = "32", expected_java_version = "9", expected_sdk_kind = "public", expected_api_level = 32),
+ ],
+ )
diff --git a/rules/java/stub_local_jdk/BUILD.bazel b/rules/java/stub_local_jdk/BUILD.bazel
new file mode 100644
index 00000000..0902d775
--- /dev/null
+++ b/rules/java/stub_local_jdk/BUILD.bazel
@@ -0,0 +1,8 @@
+package(default_visibility = ["//visibility:public"])
+
+#The Kotlin rules directly refer to @local_jdk//jar, this alias is defined to
+# keep this reference valid.
+alias(
+ name = "jar",
+ actual = "@//prebuilts/jdk/jdk17:jar",
+)
diff --git a/rules/java/stub_local_jdk/WORKSPACE b/rules/java/stub_local_jdk/WORKSPACE
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/java/stub_local_jdk/WORKSPACE
diff --git a/rules/java/versions.bzl b/rules/java/versions.bzl
new file mode 100644
index 00000000..acee980e
--- /dev/null
+++ b/rules/java/versions.bzl
@@ -0,0 +1,104 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Constants and utility functions relating to Java versions and how they map to SDK versions.
+"""
+
+load("//build/bazel/rules/common:api.bzl", "api")
+
+# The default java version used absent any java_version or sdk_version specification.
+_DEFAULT_VERSION = 17
+
+# All available java versions
+_ALL_VERSIONS = [
+ 7,
+ 8,
+ 9,
+ 11,
+ 17,
+]
+
+_VERSION_TO_CONFIG_SETTING = {
+ java_version: "config_setting_java_%s" % java_version
+ for java_version in _ALL_VERSIONS
+}
+
+def _compatible_versions_for_api_level(api_level):
+ """Returns all possible java versions that can be used at the given api level."""
+ if api_level in (api.FUTURE_API_LEVEL, api.NONE_API_LEVEL):
+ return _ALL_VERSIONS
+ if api_level <= 23:
+ return [7]
+ if api_level <= 29:
+ return [
+ 7,
+ 8,
+ ]
+ if api_level <= 31:
+ return [
+ 7,
+ 8,
+ 9,
+ ]
+ if api_level <= 33:
+ return [
+ 7,
+ 8,
+ 9,
+ 11,
+ ]
+ return _ALL_VERSIONS
+
+def _supports_pre_java_9(api_level):
+ return any([
+ version < 9
+ for version in _compatible_versions_for_api_level(api_level)
+ ])
+
+def _supports_post_java_9(api_level):
+ return any([
+ version >= 9
+ for version in _compatible_versions_for_api_level(api_level)
+ ])
+
+_NORMALIZED_VERSIONS = {
+ "1.7": 7,
+ "7": 7,
+ "1.8": 8,
+ "8": 8,
+ "1.9": 9,
+ "9": 9,
+ "11": 11,
+ "17": 17,
+}
+
+def _default_version(api_level):
+ """Returns the default java version for the input api level."""
+ return max(_compatible_versions_for_api_level(api_level))
+
+def _get_version(java_version = None, api_level = None):
+ """Returns the java version to use for a given target based on the java_version set by this target and the api_level_string extracted from sdk_version."""
+ if java_version:
+ return _NORMALIZED_VERSIONS[java_version]
+ elif api_level:
+ return _default_version(api_level)
+ return _DEFAULT_VERSION
+
+java_versions = struct(
+ ALL_VERSIONS = _ALL_VERSIONS,
+ VERSION_TO_CONFIG_SETTING = _VERSION_TO_CONFIG_SETTING,
+ compatible_versions_for_api_level = _compatible_versions_for_api_level,
+ get_version = _get_version,
+ supports_pre_java_9 = _supports_pre_java_9,
+ supports_post_java_9 = _supports_post_java_9,
+)
diff --git a/rules/java/versions_test.bzl b/rules/java/versions_test.bzl
new file mode 100644
index 00000000..f06488b1
--- /dev/null
+++ b/rules/java/versions_test.bzl
@@ -0,0 +1,44 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "asserts", "unittest")
+load("//build/bazel/rules/java:versions.bzl", "java_versions")
+load("//build/bazel/rules/common:api.bzl", "api")
+
+def _get_java_version_test_impl(ctx):
+ env = unittest.begin(ctx)
+
+ _VERSIONS_UNDER_TEST = {
+ (None, api.FUTURE_API_LEVEL): 17,
+ (None, 23): 7,
+ (None, 33): 11,
+ ("1.7", api.FUTURE_API_LEVEL): 7,
+ ("1.7", 23): 7,
+ ("1.8", 33): 8,
+ (None, None): 17,
+ }
+ for java_sdk_version, expected_java_version in _VERSIONS_UNDER_TEST.items():
+ java_version = java_sdk_version[0]
+ sdk_version = java_sdk_version[1]
+ asserts.equals(env, expected_java_version, java_versions.get_version(java_version, sdk_version), "unexpected java version for java_version %s and sdk_version %s" % (java_version, sdk_version))
+
+ return unittest.end(env)
+
+get_java_version_test = unittest.make(_get_java_version_test_impl)
+
+def versions_test_suite(name):
+ unittest.suite(
+ name,
+ get_java_version_test,
+ )
diff --git a/rules/java/wrapper_test.sh b/rules/java/wrapper_test.sh
new file mode 100644
index 00000000..abab4214
--- /dev/null
+++ b/rules/java/wrapper_test.sh
@@ -0,0 +1,38 @@
+#!/bin/bash -eux
+
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [[ ! -d "build/bazel/ci" ]]; then
+ echo "Please run this script from TOP".
+ exit 1
+fi
+
+source "build/bazel/ci/build_with_bazel.sh"
+source "build/bazel/ci/target_lists.sh"
+
+function test_wrapper_providers() {
+ for target in ${EXAMPLE_WRAPPER_TARGETS[@]}; do
+ private_providers="$(build/bazel/bin/bazel ${STARTUP_FLAGS[@]} \
+ cquery ${FLAGS[@]} --config=android "${target}_private" \
+ --starlark:expr="sorted(providers(target).keys())" --output=starlark|uniq)"
+ wrapper_providers="$(build/bazel/bin/bazel ${STARTUP_FLAGS[@]} \
+ cquery ${FLAGS[@]} --config=android "${target}" \
+ --starlark:expr="sorted(providers(target).keys())" --output=starlark|uniq)"
+ if [[ ! $(cmp -s <(echo "${private_providers}") <(echo "${wrapper_providers}")) ]]; then
+ echo "${target} and ${target}_private should have the same providers. Diff:"
+ diff <(echo "${private_providers}") <(echo "${wrapper_providers}")
+ fi
+ done
+}
diff --git a/rules/kotlin/BUILD b/rules/kotlin/BUILD
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/kotlin/BUILD
diff --git a/rules/kotlin/kotlinc.BUILD b/rules/kotlin/kotlinc.BUILD
new file mode 100644
index 00000000..01ce700b
--- /dev/null
+++ b/rules/kotlin/kotlinc.BUILD
@@ -0,0 +1,96 @@
+# Copyright 2022 Google LLC. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License);
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@soong_injection//java_toolchain:constants.bzl", "constants")
+
+package(default_visibility = ["//visibility:public"])
+
+java_import(
+ name = "annotations",
+ jars = ["lib/annotations-13.0.jar"],
+)
+
+java_import(
+ name = "jvm_abi_gen_plugin",
+ jars = ["lib/jvm-abi-gen.jar"],
+)
+
+java_import(
+ name = "kotlin_annotation_processing",
+ jars = ["lib/kotlin-annotation-processing.jar"],
+)
+
+# sh_binary(
+# name = "kotlin_compiler",
+# srcs = ["bin/kotlinc"],
+# data = glob(["lib/**"]),
+# )
+
+java_binary(
+ name = "kotlin_compiler",
+ jvm_flags = ["-Xmx" + constants.JavacHeapSize],
+ main_class = "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
+ runtime_deps = [
+ "lib/kotlin-compiler.jar",
+ "lib/kotlin-stdlib.jar",
+ "lib/trove4j.jar",
+ ],
+)
+
+# java_binary(
+# name = "kotlin_compiler",
+# main_class = "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
+# runtime_deps = [":kotlin_compiler_lib"],
+# )
+
+# java_import(
+# name = "kotlin_compiler_lib",
+# jars = ["lib/kotlin-compiler.jar"]
+# srcjar = "lib/kotlin-compiler-sources.jar",
+# )
+
+java_import(
+ name = "kotlin_reflect",
+ jars = ["lib/kotlin-reflect.jar"],
+ srcjar = "lib/kotlin-reflect-sources.jar",
+)
+
+java_import(
+ name = "kotlin_stdlib",
+ jars = ["lib/kotlin-stdlib.jar"],
+ srcjar = "lib/kotlin-stdlib-sources.jar",
+)
+
+java_import(
+ name = "kotlin_stdlib_jdk7",
+ jars = ["lib/kotlin-stdlib-jdk7.jar"],
+ srcjar = "lib/kotlin-stdlib-jdk7-sources.jar",
+)
+
+java_import(
+ name = "kotlin_stdlib_jdk8",
+ jars = ["lib/kotlin-stdlib-jdk8.jar"],
+ srcjar = "lib/kotlin-stdlib-jdk8-sources.jar",
+)
+
+java_import(
+ name = "kotlin_test",
+ jars = ["lib/kotlin-test.jar"],
+ srcjar = "lib/kotlin-test-sources.jar",
+)
+
+alias(
+ name = "kotlin_test_not_testonly",
+ actual = ":kotlin_test",
+)
diff --git a/rules/kotlin/kt_jvm_library.bzl b/rules/kotlin/kt_jvm_library.bzl
new file mode 100644
index 00000000..c181fa9d
--- /dev/null
+++ b/rules/kotlin/kt_jvm_library.bzl
@@ -0,0 +1,140 @@
+"""
+Copyright (C) 2023 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load("@rules_kotlin//kotlin:compiler_opt.bzl", "kt_compiler_opt")
+load("@rules_kotlin//kotlin:jvm_library.bzl", _kt_jvm_library = "kt_jvm_library")
+load("//build/bazel/rules/java:rules.bzl", "java_import")
+load("//build/bazel/rules/java:sdk_transition.bzl", "sdk_transition", "sdk_transition_attrs")
+
+def _kotlin_resources_impl(ctx):
+ output_file = ctx.actions.declare_file("kt_resources.jar")
+
+ args = ctx.actions.args()
+ args.add("cvf")
+ args.add(output_file.path)
+ args.add("-C")
+ args.add(ctx.attr.resource_strip_prefix)
+ args.add(".")
+
+ ctx.actions.run(
+ outputs = [output_file],
+ inputs = ctx.files.srcs,
+ executable = ctx.executable._jar,
+ arguments = [args],
+ )
+
+ return [DefaultInfo(files = depset([output_file]))]
+
+kotlin_resources = rule(
+ doc = """
+ Package srcs into a jar, with the option of stripping a path prefix
+ """,
+ implementation = _kotlin_resources_impl,
+ attrs = {
+ "srcs": attr.label_list(allow_files = True),
+ "resource_strip_prefix": attr.string(
+ doc = """The path prefix to strip from resources.
+ If specified, this path prefix is stripped from every fil
+ in the resources attribute. It is an error for a resource
+ file not to be under this directory. If not specified
+ (the default), the path of resource file is determined
+ according to the same logic as the Java package of source
+ files. For example, a source file at stuff/java/foo/bar/a.txt
+ will be located at foo/bar/a.txt.""",
+ ),
+ "_jar": attr.label(default = "@bazel_tools//tools/jdk:jar", executable = True, cfg = "exec"),
+ },
+)
+
+# TODO(b/277801336): document these attributes.
+def kt_jvm_library(
+ name,
+ deps = None,
+ resources = None,
+ resource_strip_prefix = None,
+ kotlincflags = None,
+ java_version = None,
+ sdk_version = None,
+ tags = [],
+ target_compatible_with = [],
+ visibility = None,
+ **kwargs):
+ "Bazel macro wrapping for kt_jvm_library"
+ if resource_strip_prefix != None:
+ java_import_name = name + "resources"
+ kt_res_jar_name = name + "resources_jar"
+ java_import(
+ name = java_import_name,
+ jars = [":" + kt_res_jar_name],
+ )
+
+ kotlin_resources(
+ name = kt_res_jar_name,
+ srcs = resources,
+ resource_strip_prefix = resource_strip_prefix,
+ )
+
+ deps = deps + [":" + java_import_name]
+
+ custom_kotlincopts = None
+ if kotlincflags != None:
+ ktcopts_name = name + "_kotlincopts"
+ kt_compiler_opt(
+ name = ktcopts_name,
+ opts = kotlincflags,
+ )
+ custom_kotlincopts = [":" + ktcopts_name]
+
+ lib_name = name + "_private"
+ _kt_jvm_library(
+ name = lib_name,
+ deps = deps,
+ custom_kotlincopts = custom_kotlincopts,
+ tags = tags + ["manual"],
+ target_compatible_with = target_compatible_with,
+ visibility = ["//visibility:private"],
+ **kwargs
+ )
+
+ kt_jvm_library_sdk_transition(
+ name = name,
+ sdk_version = sdk_version,
+ java_version = java_version,
+ exports = lib_name,
+ tags = tags,
+ target_compatible_with = target_compatible_with,
+ visibility = visibility,
+ )
+
+# The list of providers to forward was determined using cquery on one
+# of the example targets listed under EXAMPLE_WRAPPER_TARGETS at
+# //build/bazel/ci/target_lists.sh. It may not be exhaustive. A unit
+# test ensures that the wrapper's providers and the wrapped rule's do
+# match.
+def _kt_jvm_library_sdk_transition_impl(ctx):
+ return [
+ ctx.attr.exports[0][JavaInfo],
+ ctx.attr.exports[0][InstrumentedFilesInfo],
+ ctx.attr.exports[0][ProguardSpecProvider],
+ ctx.attr.exports[0][OutputGroupInfo],
+ ctx.attr.exports[0][DefaultInfo],
+ ]
+
+kt_jvm_library_sdk_transition = rule(
+ implementation = _kt_jvm_library_sdk_transition_impl,
+ attrs = sdk_transition_attrs,
+ provides = [JavaInfo],
+)
diff --git a/rules/kotlin/maven_interface/BUILD b/rules/kotlin/maven_interface/BUILD
new file mode 100644
index 00000000..8d58a08f
--- /dev/null
+++ b/rules/kotlin/maven_interface/BUILD
@@ -0,0 +1,29 @@
+java_library(
+ name = "com_google_auto_value_auto_value",
+ visibility = ["//visibility:public"],
+ exports = ["@//external/auto/value:libauto_value_plugin"],
+)
+
+java_library(
+ name = "com_google_auto_service_auto_service_annotations",
+ visibility = ["//visibility:public"],
+ exports = ["@//external/auto/service:auto_service_annotations"],
+)
+
+java_library(
+ name = "com_google_auto_service_auto_service",
+ visibility = ["//visibility:public"],
+ exports = ["@//external/auto/service:libauto_service_plugin"],
+)
+
+java_library(
+ name = "org_jacoco_org_jacoco_agent",
+ visibility = ["//visibility:public"],
+ exports = ["@//external/jacoco:jacocoagent"],
+)
+
+java_library(
+ name = "org_jacoco_org_jacoco_cli",
+ visibility = ["//visibility:public"],
+ exports = ["@//external/jacoco:jacoco-cli"],
+)
diff --git a/rules/kotlin/maven_interface/WORKSPACE b/rules/kotlin/maven_interface/WORKSPACE
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/rules/kotlin/maven_interface/WORKSPACE
diff --git a/rules/kotlin/rules.bzl b/rules/kotlin/rules.bzl
new file mode 100644
index 00000000..2a5c7c98
--- /dev/null
+++ b/rules/kotlin/rules.bzl
@@ -0,0 +1,25 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(
+ "@rules_kotlin//kotlin:compiler_opt.bzl",
+ _kt_compiler_opt = "kt_compiler_opt",
+)
+load(
+ ":kt_jvm_library.bzl",
+ _kt_jvm_library = "kt_jvm_library",
+)
+
+kt_jvm_library = _kt_jvm_library
+kt_compiler_opt = _kt_compiler_opt
diff --git a/rules/license/BUILD b/rules/license/BUILD
new file mode 100644
index 00000000..26c69277
--- /dev/null
+++ b/rules/license/BUILD
@@ -0,0 +1,17 @@
+"""
+Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+package(default_visibility = ["//visibility:public"])
diff --git a/rules/license/license.bzl b/rules/license/license.bzl
new file mode 100644
index 00000000..219931e3
--- /dev/null
+++ b/rules/license/license.bzl
@@ -0,0 +1,79 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Rules for declaring Android licenses used by a package.
+# See: go/license-checking-v2
+
+load("@rules_license//rules:license.bzl", "license")
+
+_special_licenses = {
+ "legacy_by_exception_only": 0,
+ "legacy_not_a_contribution": 0,
+ "legacy_not_allowed": 0,
+ "legacy_notice": 0,
+ "legacy_permissive": 0,
+ "legacy_proprietary": 0,
+ "legacy_reciprocal": 0,
+ "legacy_restricted": 0,
+ "legacy_unencumbered": 0,
+ "legacy_unknown": 0,
+}
+_spdx_license_prefix = "SPDX-license-identifier-"
+_spdx_package = "//build/soong/licenses:"
+
+def _remap_license_kind(license_kind):
+ # In bazel license_kind is a label.
+ # First, map legacy license kinds.
+ if license_kind in _special_licenses:
+ return _spdx_package + license_kind
+
+ # Map SPDX licenses to the ones defined in build/soong/licenses.
+ if license_kind.startswith(_spdx_license_prefix):
+ return _spdx_package + license_kind
+
+ # Last resort.
+ return license_kind
+
+# buildifier: disable=function-docstring-args
+def android_license(
+ name,
+ license_text = "__NO_LICENSE__", # needed as `license` expects it
+ visibility = ["//visibility:public"],
+ license_kinds = [],
+ copyright_notice = None,
+ package_name = None,
+ tags = []):
+ """Wrapper for license rule.
+
+ Args:
+ name: str target name.
+ license_text: str Filename of the license file
+ visibility: list(label) visibility spec
+ license_kinds: list(text) list of license_kind targets.
+ copyright_notice: str Copyright notice associated with this package.
+ package_name : str A human readable name identifying this package. This
+ may be used to produce an index of OSS packages used by
+ an application.
+ tags: list(str) tags applied to the rule
+ """
+
+ license(
+ name = name,
+ license_kinds = [_remap_license_kind(x) for x in license_kinds],
+ license_text = license_text,
+ copyright_notice = copyright_notice,
+ package_name = package_name,
+ visibility = visibility,
+ tags = tags,
+ )
diff --git a/rules/license/license_aspect.bzl b/rules/license/license_aspect.bzl
new file mode 100644
index 00000000..00a91f5b
--- /dev/null
+++ b/rules/license/license_aspect.bzl
@@ -0,0 +1,156 @@
+load("@rules_license//rules:providers.bzl", "LicenseInfo")
+load("//build/bazel/rules:metadata.bzl", "MetadataFileInfo")
+
+RuleLicensedDependenciesInfo = provider(
+ doc = """Rule's licensed dependencies.""",
+ fields = dict(
+ license_closure = "depset(license) for the rule and its licensed dependencies",
+ ),
+)
+
+def _maybe_expand(rule, transitive_licenses):
+ if not RuleLicensedDependenciesInfo in rule:
+ return
+ dep_info = rule[RuleLicensedDependenciesInfo]
+ if hasattr(dep_info, "license_closure"):
+ transitive_licenses.append(dep_info.license_closure)
+
+def create_metadata_file_info(ctx):
+ if hasattr(ctx.rule.attr, "applicable_licenses"):
+ for lic in ctx.rule.attr.applicable_licenses:
+ files = lic.files.to_list()
+ if len(files) == 1 and files[0].basename == "METADATA":
+ return MetadataFileInfo(metadata_file = files[0])
+
+ return MetadataFileInfo(metadata_file = None)
+
+def _rule_licenses_aspect_impl(_rule, ctx):
+ if ctx.rule.kind == "_license":
+ return [RuleLicensedDependenciesInfo(), MetadataFileInfo()]
+
+ licenses = []
+ transitive_licenses = []
+ if hasattr(ctx.rule.attr, "applicable_licenses"):
+ licenses.extend(ctx.rule.attr.applicable_licenses)
+
+ for a in dir(ctx.rule.attr):
+ # Ignore private attributes
+ if a.startswith("_"):
+ continue
+ value = getattr(ctx.rule.attr, a)
+ vlist = value if type(value) == type([]) else [value]
+ for item in vlist:
+ if type(item) == "Target" and RuleLicensedDependenciesInfo in item:
+ _maybe_expand(item, transitive_licenses)
+
+ return [
+ RuleLicensedDependenciesInfo(license_closure = depset(licenses, transitive = transitive_licenses)),
+ create_metadata_file_info(ctx),
+ ]
+
+license_aspect = aspect(
+ doc = """Collect transitive license closure.""",
+ implementation = _rule_licenses_aspect_impl,
+ attr_aspects = ["*"],
+ apply_to_generating_rules = True,
+ provides = [RuleLicensedDependenciesInfo, MetadataFileInfo],
+)
+
+_license_kind_template = """
+ {{
+ "target": "{kind_path}",
+ "name": "{kind_name}",
+ "conditions": {kind_conditions}
+ }}"""
+
+def _license_kind_to_json(kind):
+ return _license_kind_template.format(kind_name = kind.name, kind_path = kind.label, kind_conditions = kind.conditions)
+
+def _quotes_or_null(s):
+ if not s:
+ return "null"
+ return s
+
+def _license_file(license_rule):
+ file = license_rule[LicenseInfo].license_text
+ return file if file and file.basename != "__NO_LICENSE__" else struct(path = "")
+
+def _divine_package_name(license):
+ if license.package_name:
+ return license.package_name.removeprefix("external").removesuffix("BUILD.bazel").replace("/", " ").strip()
+ return license.rule.name.removeprefix("external_").removesuffix("_license").replace("_", " ")
+
+def license_map(deps):
+ """Collects license to licensees map for the given set of rule targets.
+
+ TODO(asmundak): at the moment licensees lists are all empty because collecting
+ the licensees turned out to be too slow. Restore this later.
+ Args:
+ deps: list of rule targets
+ Returns:
+ dictionary mapping a license to its licensees
+ """
+ transitive_licenses = []
+ for d in deps:
+ _maybe_expand(d, transitive_licenses)
+
+ # Each rule provides the closure of its licenses, let us build the
+ # reverse map. A minor quirk is that for some reason there may be
+ # multiple license instances with with the same label. Use the
+ # intermediary dict to map rule's label to its first instance
+ license_by_label = dict()
+ licensees = dict()
+ for lic in depset(transitive = transitive_licenses).to_list():
+ if not LicenseInfo in lic:
+ continue
+ label = lic[LicenseInfo].label.name
+ if not label in license_by_label:
+ license_by_label[label] = lic
+ licensees[lic] = []
+ return licensees
+
+_license_template = """ {{
+ "rule": "{rule}",
+ "license_kinds": [{kinds}
+ ],
+ "copyright_notice": "{copyright_notice}",
+ "package_name": "{package_name}",
+ "package_url": {package_url},
+ "package_version": {package_version},
+ "license_text": "{license_text}",
+ "licensees": [
+ "{licensees}"
+ ]
+ \n }}"""
+
+def _used_license_to_json(license_rule, licensed_rules):
+ license = license_rule[LicenseInfo]
+ return _license_template.format(
+ rule = license.label.name,
+ copyright_notice = license.copyright_notice,
+ package_name = _divine_package_name(license),
+ package_url = _quotes_or_null(license.package_url),
+ package_version = _quotes_or_null(license.package_version),
+ license_text = _license_file(license_rule).path,
+ kinds = ",\n".join([_license_kind_to_json(kind) for kind in license.license_kinds]),
+ licensees = "\",\n \"".join([r for r in licensed_rules]),
+ )
+
+def license_map_to_json(licensees):
+ """Returns an array of JSON representations of a license and its licensees. """
+ return [_used_license_to_json(lic, rules) for lic, rules in licensees.items()]
+
+def license_map_notice_files(licensees):
+ """Returns an array of license text files for the given licensee map.
+
+ Args:
+ licensees: dict returned by license_map() call
+ Returns:
+ the list of notice files this licensees map depends on.
+ """
+ notice_files = []
+ for lic in licensees.keys():
+ file = _license_file(lic)
+ if file.path:
+ notice_files.append(file)
+ return notice_files
diff --git a/rules/linker_config.bzl b/rules/linker_config.bzl
new file mode 100644
index 00000000..d9892a0d
--- /dev/null
+++ b/rules/linker_config.bzl
@@ -0,0 +1,56 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("//build/bazel/rules:prebuilt_file.bzl", "PrebuiltFileInfo")
+
+def _linker_config_impl(ctx):
+ output_file = ctx.actions.declare_file(paths.replace_extension(ctx.file.src.basename, ".pb"))
+
+ args = ctx.actions.args()
+ args.add("proto")
+ args.add("-s", ctx.file.src.path)
+ args.add("-o", output_file.path)
+
+ ctx.actions.run(
+ outputs = [output_file],
+ inputs = [ctx.file.src],
+ executable = ctx.executable._conv_linker_config,
+ arguments = [args],
+ )
+
+ return [
+ DefaultInfo(
+ files = depset([output_file]),
+ ),
+ PrebuiltFileInfo(
+ src = output_file,
+ dir = "etc",
+ filename = "linker.config.pb",
+ ),
+ ]
+
+linker_config = rule(
+ doc = """
+ linker_config generates protobuf file from json file. This protobuf file will
+ be used from linkerconfig while generating ld.config.txt. Format of this file
+ can be found from
+ https://android.googlesource.com/platform/system/linkerconfig/+/master/README.md
+ """,
+ implementation = _linker_config_impl,
+ attrs = {
+ "src": attr.label(allow_single_file = [".json"], mandatory = True, doc = "source linker configuration property file"),
+ "_conv_linker_config": attr.label(default = "//build/soong/scripts:conv_linker_config", cfg = "exec", executable = True),
+ },
+)
diff --git a/rules/linker_config_test.bzl b/rules/linker_config_test.bzl
new file mode 100644
index 00000000..507344bf
--- /dev/null
+++ b/rules/linker_config_test.bzl
@@ -0,0 +1,126 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules:linker_config.bzl", "linker_config")
+load("//build/bazel/rules:prebuilt_file.bzl", "PrebuiltFileInfo")
+
+SRC = "foo.json"
+OUT_EXP = "foo.pb"
+
+def _test_linker_config_actions_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ asserts.equals(env, 1, len(actions), "expected 1 action got {}".format(actions))
+
+ in_file = actions[0].inputs.to_list()[0]
+ out_files = actions[0].outputs.to_list()
+ asserts.equals(env, 1, len(out_files), "expected 1 out file got {}".format(out_files))
+
+ asserts.equals(
+ env,
+ SRC,
+ in_file.basename,
+ "expected source file {} got {}".format(SRC, in_file.basename),
+ )
+ asserts.equals(
+ env,
+ OUT_EXP,
+ out_files[0].basename,
+ "expected out file {} got {}".format(OUT_EXP, out_files[0].basename),
+ )
+
+ # gets build target we are testing for
+ target_under_test = analysistest.target_under_test(env)
+ prebuilt_file_info = target_under_test[PrebuiltFileInfo]
+ asserts.equals(
+ env,
+ "linker.config.pb",
+ prebuilt_file_info.filename,
+ "expected PrebuiltFileInfo filename to be {} but got {}".format("linkerconfig.pb", prebuilt_file_info.filename),
+ )
+ asserts.equals(
+ env,
+ "etc",
+ prebuilt_file_info.dir,
+ "expected PrebuiltFileInfo dir to be {} but got {}".format("etc", prebuilt_file_info.dir),
+ )
+ asserts.equals(
+ env,
+ out_files[0],
+ prebuilt_file_info.src,
+ "expected PrebuiltFileInfo src to be {} but got {}".format(out_files[0], prebuilt_file_info.src),
+ )
+
+ return analysistest.end(env)
+
+linker_config_actions_test = analysistest.make(_test_linker_config_actions_impl)
+
+def _test_linker_config_actions():
+ name = "linker_config_actions"
+ test_name = name + "_test"
+
+ linker_config(
+ name = name,
+ src = SRC,
+ tags = ["manual"],
+ )
+
+ linker_config_actions_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def _test_linker_config_commands_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ in_files = actions[0].inputs.to_list()
+ asserts.true(env, len(in_files) > 0, "expected at least 1 input file got {}".format(in_files))
+
+ args = actions[0].argv
+ asserts.equals(env, 6, len(args), "expected 4 args got {}".format(args))
+ asserts.equals(env, "proto", args[1])
+ asserts.equals(env, "-s", args[2])
+ asserts.equals(env, "-o", args[4])
+
+ return analysistest.end(env)
+
+linker_config_commands_test = analysistest.make(_test_linker_config_commands_impl)
+
+def _test_linker_config_commands():
+ name = "linker_config_commands"
+ test_name = name + "_test"
+ linker_config(
+ name = name,
+ src = SRC,
+ tags = ["manual"],
+ )
+
+ linker_config_commands_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def linker_config_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_linker_config_actions(),
+ _test_linker_config_commands(),
+ ],
+ )
diff --git a/rules/make_injection.bzl b/rules/make_injection.bzl
index 38bb9986..0571311c 100644
--- a/rules/make_injection.bzl
+++ b/rules/make_injection.bzl
@@ -1,18 +1,16 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
# A repository rule to run soong_ui --make-mode to provide the Bazel standalone
# build with prebuilts from Make/Soong that Bazel can't build yet.
@@ -44,6 +42,7 @@ def _impl(rctx):
"TOP": str(build_dir.dirname.dirname.dirname),
},
quiet = False, # stream stdout so it shows progress
+ timeout = 3600, # default of 600 seconds is not sufficient
)
if exec_result.return_code != 0:
fail(exec_result.stderr)
diff --git a/rules/metadata.bzl b/rules/metadata.bzl
new file mode 100644
index 00000000..73f6b5b4
--- /dev/null
+++ b/rules/metadata.bzl
@@ -0,0 +1,29 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+MetadataFileInfo = provider(
+ fields = {
+ "metadata_file": "METADATA file of a module",
+ },
+)
+
+# Define metadata file of packages, usually the file is METADATA in the root directory of a package.
+# Attribute applicable_licenses is needed on the filegroup, so when the filegroup is used in
+# package(default_package_metadata=) Bazel will not regard it as cyclic reference.
+def metadata(name, metadata = "METADATA"):
+ native.filegroup(
+ name = name,
+ srcs = [metadata],
+ applicable_licenses = [],
+ )
diff --git a/rules/partitions/BUILD b/rules/partitions/BUILD
new file mode 100644
index 00000000..ecd0fd0f
--- /dev/null
+++ b/rules/partitions/BUILD
@@ -0,0 +1,24 @@
+load(":toolchain.bzl", "partition_toolchain")
+
+toolchain_type(name = "partition_toolchain_type")
+
+partition_toolchain(
+ name = "partition_toolchain",
+ build_image = "@make_injection//:host/linux-x86/bin/build_image",
+ mkuserimg_mke2fs = "@make_injection//:host/linux-x86/bin/mkuserimg_mke2fs",
+ tags = ["manual"],
+)
+
+toolchain(
+ name = "partition_toolchain_def",
+ exec_compatible_with = [
+ "//build/bazel/platforms/arch:x86_64",
+ "//build/bazel/platforms/os:linux",
+ ],
+ tags = ["manual"],
+ target_compatible_with = [
+ "//build/bazel/platforms/os:android",
+ ],
+ toolchain = ":partition_toolchain",
+ toolchain_type = ":partition_toolchain_type",
+)
diff --git a/rules/partitions/installable_info.bzl b/rules/partitions/installable_info.bzl
new file mode 100644
index 00000000..3e88e351
--- /dev/null
+++ b/rules/partitions/installable_info.bzl
@@ -0,0 +1,43 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/apex:apex_info.bzl", "ApexInfo")
+
+InstallableInfo = provider(
+ "If a target provides InstallableInfo, it means it can be installed on a partition image.",
+ fields = {
+ "files": "A dictionary mapping from a path in the partition to the path to the file to install there.",
+ },
+)
+
+def _installable_aspect_impl(target, _ctx):
+ installed_files = {}
+ if ApexInfo in target:
+ apex = target[ApexInfo].signed_output
+ installed_files["/system/apex/" + apex.basename] = apex
+
+ if not installed_files:
+ return []
+
+ return [
+ InstallableInfo(
+ files = installed_files,
+ ),
+ ]
+
+# This aspect is intended to be applied on a apex.native_shared_libs attribute
+installable_aspect = aspect(
+ implementation = _installable_aspect_impl,
+ attrs = {},
+)
diff --git a/rules/partitions/partition.bzl b/rules/partitions/partition.bzl
new file mode 100644
index 00000000..9d1e31c7
--- /dev/null
+++ b/rules/partitions/partition.bzl
@@ -0,0 +1,377 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load(":installable_info.bzl", "InstallableInfo", "installable_aspect")
+
+# TODO(b/249685973): Reenable the partition rule
+product_config = {}
+
+_IMAGE_TYPES = [
+ "system",
+ "system_other",
+ "userdata",
+ "cache",
+ "vendor",
+ "product",
+ "system_ext",
+ "odm",
+ "vendor_dlkm",
+ "system_dlkm",
+ "oem",
+]
+
+def _p(varname, default = ""):
+ return product_config.get(varname, default)
+
+def _add_common_flags_to_image_props(image_props, image_type):
+ image_props[image_type + "_selinux_fc"] = _p("SELINUX_FC", "")
+ image_props["building_" + image_type + "_image"] = _p("BUILDING_" + image_type.upper() + "_IMAGE", "")
+
+def _add_common_ro_flags_to_image_props(image_props, image_type):
+ image_type = image_type.lower()
+ IMAGE_TYPE = image_type.upper()
+
+ def add_board_var(varname, finalname = None):
+ if not finalname:
+ finalname = varname
+ if _p("BOARD_" + IMAGE_TYPE + "IMAGE_" + varname.upper()):
+ image_props[image_type + "_" + finalname.lower()] = _p("BOARD_" + IMAGE_TYPE + "IMAGE_" + varname.upper())
+
+ add_board_var("EROFS_COMPRESSOR")
+ add_board_var("EROFS_COMPRESS_HINTS")
+ add_board_var("EROFS_PCLUSTER_SIZE")
+ add_board_var("EXTFS_INODE_COUNT")
+ add_board_var("EXTFS_RSV_PCT")
+ add_board_var("F2FS_SLOAD_COMPRESS_FLAGS", "f2fs_sldc_flags")
+ add_board_var("FILE_SYSTEM_COMPRESS", "f2fs_compress")
+ add_board_var("FILE_SYSTEM_TYPE", "fs_type")
+ add_board_var("JOURNAL_SIZE", "journal_size")
+ add_board_var("PARTITION_RESERVED_SIZE", "reserved_size")
+ add_board_var("PARTITION_SIZE", "size")
+ add_board_var("SQUASHFS_BLOCK_SIZE")
+ add_board_var("SQUASHFS_COMPRESSOR")
+ add_board_var("SQUASHFS_COMPRESSOR_OPT")
+ add_board_var("SQUASHFS_DISABLE_4K_ALIGN")
+ if _p("PRODUCT_" + IMAGE_TYPE + "_BASE_FS_PATH"):
+ image_props[image_type + "_base_fs_file"] = _p("PRODUCT_" + IMAGE_TYPE + "_BASE_FS_PATH")
+
+ if not (_p("BOARD_" + IMAGE_TYPE + "IMAGE_PARTITION_SIZE") or
+ _p("BOARD_" + IMAGE_TYPE + "IMAGE_PARTITION_RESERVED_SIZE") or
+ _p("PRODUCT_" + IMAGE_TYPE + "_HEADROOM")):
+ image_props[image_type + "_disable_sparse"] = "true"
+
+ _add_common_flags_to_image_props(image_props, image_type)
+
+def _generate_image_prop_dictionary(ctx, image_types, extra_props = {}):
+ """Generates the image properties file.
+
+ Args:
+ file: The file that will be written to
+ types: A list of one or more of "system", "system_other",
+ "userdata", "cache", "vendor", "product", "system_ext",
+ "odm", "vendor_dlkm", "system_dlkm", or "oem"
+ extra_props: A dictionary of props to append at the end of the file.
+ """
+ # TODO(b/237106430): This should probably be mostly replaced with attributes on the system_image rule,
+ # and then there can be a separate macro to adapt product config variables to a
+ # correctly-spec'd system_image rule.
+
+ toolchain = ctx.toolchains[":partition_toolchain_type"].toolchain_info
+
+ for image_type in image_types:
+ if image_type not in _IMAGE_TYPES:
+ fail("Image type %s unknown. Valid types are %s", image_type, _IMAGE_TYPES)
+ image_props = {}
+
+ if "system" in image_types:
+ if _p("INTERNAL_SYSTEM_OTHER_PARTITION_SIZE"):
+ image_props["system_other_size"] = _p("INTERNAL_SYSTEM_OTHER_PARTITION_SIZE")
+ if _p("PRODUCT_SYSTEM_HEADROOM"):
+ image_props["system_headroom"] = _p("PRODUCT_SYSTEM_HEADROOM")
+ _add_common_ro_flags_to_image_props(image_props, "system")
+ if "system_other" in image_types:
+ image_props["building_system_other_image"] = _p("BUILDING_SYSTEM_OTHER_IMAGE", "")
+ if _p("INTERNAL_SYSTEM_OTHER_PARTITION_SIZE"):
+ image_props["system_other_disable_sparse"] = "true"
+ if "userdata" in image_types:
+ if _p("PRODUCT_FS_CASEFOLD"):
+ image_props["needs_casefold"] = _p("PRODUCT_FS_CASEFOLD")
+ if _p("PRODUCT_QUOTA_PROJID"):
+ image_props["needs_projid"] = _p("PRODUCT_QUOTA_PROJID")
+ if _p("PRODUCT_FS_COMPRESSION"):
+ image_props["needs_compress"] = _p("PRODUCT_FS_COMPRESSION")
+ _add_common_ro_flags_to_image_props(image_props, "userdata")
+ if "cache" in image_types:
+ _add_common_ro_flags_to_image_props(image_props, "cache")
+ if "vendor" in image_types:
+ _add_common_ro_flags_to_image_props(image_props, "vendor")
+ if "product" in image_types:
+ _add_common_ro_flags_to_image_props(image_props, "product")
+ if "system_ext" in image_types:
+ _add_common_ro_flags_to_image_props(image_props, "system_ext")
+ if "odm" in image_types:
+ _add_common_ro_flags_to_image_props(image_props, "odm")
+ if "vendor_dlkm" in image_types:
+ _add_common_ro_flags_to_image_props(image_props, "vendor_dlkm")
+ if "odm_dlkm" in image_types:
+ _add_common_ro_flags_to_image_props(image_props, "odm_dlkm")
+ if "system_dlkm" in image_types:
+ _add_common_ro_flags_to_image_props(image_props, "system_dlkm")
+ if "oem" in image_types:
+ if _p("BOARD_OEMIMAGE_EXTFS_INODE_COUNT"):
+ image_props["oem_extfs_inode_count"] = _p("BOARD_OEMIMAGE_EXTFS_INODE_COUNT")
+ if _p("BOARD_OEMIMAGE_EXTFS_RSV_PCT"):
+ image_props["oem_extfs_rsv_pct"] = _p("BOARD_OEMIMAGE_EXTFS_RSV_PCT")
+ _add_common_ro_flags_to_image_props(image_props, "oem")
+ image_props["ext_mkuserimg"] = toolchain.mkuserimg_mke2fs.path #_p("MKEXTUSRIMG")
+
+ if _p("TARGET_USERIMAGES_USE_EXT2") == "true":
+ image_props["fs_type"] = "ext2"
+ elif _p("TARGET_USERIMAGES_USE_EXT3") == "true":
+ image_props["fs_type"] = "ext3"
+ elif _p("TARGET_USERIMAGES_USE_EXT4") == "true":
+ image_props["fs_type"] = "ext4"
+
+ if _p("TARGET_USERIMAGES_SPARSE_EXT_DISABLED") != "true":
+ image_props["extfs_sparse_flag"] = "-s"
+ if _p("TARGET_USERIMAGES_SPARSE_EROFS_DISABLED") != "true":
+ image_props["erofs_sparse_flag"] = "-s"
+ if _p("TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED") != "true":
+ image_props["squashfs_sparse_flag"] = "-s"
+ if _p("TARGET_USERIMAGES_SPARSE_F2FS_DISABLED") != "true":
+ image_props["f2fs_sparse_flag"] = "-S"
+ if _p("BOARD_EROFS_COMPRESSOR"):
+ image_props["erofs_default_compressor"] = _p("BOARD_EROFS_COMPRESSOR")
+ if _p("BOARD_EROFS_COMPRESS_HINTS"):
+ image_props["erofs_default_compress_hints"] = _p("BOARD_EROFS_COMPRESS_HINTS")
+ if _p("BOARD_EROFS_PCLUSTER_SIZE"):
+ image_props["erofs_pcluster_size"] = _p("BOARD_EROFS_PCLUSTER_SIZE")
+ if _p("BOARD_EROFS_SHARE_DUP_BLOCKS"):
+ image_props["erofs_share_dup_blocks"] = _p("BOARD_EROFS_SHARE_DUP_BLOCKS")
+ if _p("BOARD_EROFS_USE_LEGACY_COMPRESSION"):
+ image_props["erofs_use_legacy_compression"] = _p("BOARD_EROFS_USE_LEGACY_COMPRESSION")
+ if _p("BOARD_EXT4_SHARE_DUP_BLOCKS"):
+ image_props["ext4_share_dup_blocks"] = _p("BOARD_EXT4_SHARE_DUP_BLOCKS")
+ if _p("BOARD_FLASH_LOGICAL_BLOCK_SIZE"):
+ image_props["flash_logical_block_size"] = _p("BOARD_FLASH_LOGICAL_BLOCK_SIZE")
+ if _p("BOARD_FLASH_ERASE_BLOCK_SIZE"):
+ image_props["flash_erase_block_size"] = _p("BOARD_FLASH_ERASE_BLOCK_SIZE")
+ if _p("PRODUCT_SUPPORTS_BOOT_SIGNER"):
+ image_props["boot_signer"] = _p("PRODUCT_SUPPORTS_BOOT_SIGNER")
+ if _p("PRODUCT_SUPPORTS_VERITY"):
+ image_props["verity"] = _p("PRODUCT_SUPPORTS_VERITY")
+ image_props["verity_key"] = _p("PRODUCT_VERITY_SIGNING_KEY")
+ image_props["verity_signer_cmd"] = paths.basename(_p("VERITY_SIGNER"))
+ if _p("PRODUCT_SUPPORTS_VERITY_FEC"):
+ image_props["verity_fec"] = _p("PRODUCT_SUPPORTS_VERITY_FEC")
+ if _p("TARGET_BUILD_VARIANT") == "eng":
+ image_props["verity_disable"] = "true"
+ if _p("PRODUCT_SYSTEM_VERITY_PARTITION"):
+ image_props["system_verity_block_device"] = _p("PRODUCT_SYSTEM_VERITY_PARTITION")
+ if _p("PRODUCT_VENDOR_VERITY_PARTITION"):
+ image_props["vendor_verity_block_device"] = _p("PRODUCT_VENDOR_VERITY_PARTITION")
+ if _p("PRODUCT_PRODUCT_VERITY_PARTITION"):
+ image_props["product_verity_block_device"] = _p("PRODUCT_PRODUCT_VERITY_PARTITION")
+ if _p("PRODUCT_SYSTEM_EXT_VERITY_PARTITION"):
+ image_props["system_ext_verity_block_device"] = _p("PRODUCT_SYSTEM_EXT_VERITY_PARTITION")
+ if _p("PRODUCT_VENDOR_DLKM_VERITY_PARTITION"):
+ image_props["vendor_dlkm_verity_block_device"] = _p("PRODUCT_VENDOR_DLKM_VERITY_PARTITION")
+ if _p("PRODUCT_ODM_DLKM_VERITY_PARTITION"):
+ image_props["odm_dlkm_verity_block_device"] = _p("PRODUCT_ODM_DLKM_VERITY_PARTITION")
+ if _p("PRODUCT_SYSTEM_DLKM_VERITY_PARTITION"):
+ image_props["system_dlkm_verity_block_device"] = _p("PRODUCT_SYSTEM_DLKM_VERITY_PARTITION")
+ if _p("PRODUCT_SUPPORTS_VBOOT"):
+ image_props["vboot"] = _p("PRODUCT_SUPPORTS_VBOOT")
+ image_props["vboot_key"] = _p("PRODUCT_VBOOT_SIGNING_KEY")
+ image_props["vboot_subkey"] = _p("PRODUCT_VBOOT_SIGNING_SUBKEY")
+ image_props["futility"] = paths.basename(_p("FUTILITY"))
+ image_props["vboot_signer_cmd"] = _p("VBOOT_SIGNER")
+
+ # TODO(b/237106430): Avb code is commented out because it's not yet functional
+ # if _p("BOARD_AVB_ENABLE"):
+ # image_props["avb_avbtool"] = paths.basename(_p("AVBTOOL"))
+ # image_props["avb_system_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_system_add_hashtree_footer_args"] = _p("BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_SYSTEM_KEY_PATH"):
+ # image_props["avb_system_key_path"] = _p("BOARD_AVB_SYSTEM_KEY_PATH")
+ # image_props["avb_system_algorithm"] = _p("BOARD_AVB_SYSTEM_ALGORITHM")
+ # image_props["avb_system_rollback_index_location"] = _p("BOARD_AVB_SYSTEM_ROLLBACK_INDEX_LOCATION")
+ # image_props["avb_system_other_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_system_other_add_hashtree_footer_args"] = _p("BOARD_AVB_SYSTEM_OTHER_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_SYSTEM_OTHER_KEY_PATH"):
+ # image_props["avb_system_other_key_path"] = _p("BOARD_AVB_SYSTEM_OTHER_KEY_PATH")
+ # image_props["avb_system_other_algorithm"] = _p("BOARD_AVB_SYSTEM_OTHER_ALGORITHM")
+ # image_props["avb_vendor_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_vendor_add_hashtree_footer_args"] = _p("BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_VENDOR_KEY_PATH"):
+ # image_props["avb_vendor_key_path"] = _p("BOARD_AVB_VENDOR_KEY_PATH")
+ # image_props["avb_vendor_algorithm"] = _p("BOARD_AVB_VENDOR_ALGORITHM")
+ # image_props["avb_vendor_rollback_index_location"] = _p("BOARD_AVB_VENDOR_ROLLBACK_INDEX_LOCATION")
+ # image_props["avb_product_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_product_add_hashtree_footer_args"] = _p("BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_PRODUCT_KEY_PATH"):
+ # image_props["avb_product_key_path"] = _p("BOARD_AVB_PRODUCT_KEY_PATH")
+ # image_props["avb_product_algorithm"] = _p("BOARD_AVB_PRODUCT_ALGORITHM")
+ # image_props["avb_product_rollback_index_location"] = _p("BOARD_AVB_PRODUCT_ROLLBACK_INDEX_LOCATION")
+ # image_props["avb_system_ext_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_system_ext_add_hashtree_footer_args"] = _p("BOARD_AVB_SYSTEM_EXT_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_SYSTEM_EXT_KEY_PATH"):
+ # image_props["avb_system_ext_key_path"] = _p("BOARD_AVB_SYSTEM_EXT_KEY_PATH")
+ # image_props["avb_system_ext_algorithm"] = _p("BOARD_AVB_SYSTEM_EXT_ALGORITHM")
+ # image_props["avb_system_ext_rollback_index_location"] = _p("BOARD_AVB_SYSTEM_EXT_ROLLBACK_INDEX_LOCATION")
+ # image_props["avb_odm_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_odm_add_hashtree_footer_args"] = _p("BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_ODM_KEY_PATH"):
+ # image_props["avb_odm_key_path"] = _p("BOARD_AVB_ODM_KEY_PATH")
+ # image_props["avb_odm_algorithm"] = _p("BOARD_AVB_ODM_ALGORITHM")
+ # image_props["avb_odm_rollback_index_location"] = _p("BOARD_AVB_ODM_ROLLBACK_INDEX_LOCATION")
+ # image_props["avb_vendor_dlkm_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_vendor_dlkm_add_hashtree_footer_args"] = _p("BOARD_AVB_VENDOR_DLKM_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_VENDOR_DLKM_KEY_PATH"):
+ # image_props["avb_vendor_dlkm_key_path"] = _p("BOARD_AVB_VENDOR_DLKM_KEY_PATH")
+ # image_props["avb_vendor_dlkm_algorithm"] = _p("BOARD_AVB_VENDOR_DLKM_ALGORITHM")
+ # image_props["avb_vendor_dlkm_rollback_index_location"] = _p("BOARD_AVB_VENDOR_DLKM_ROLLBACK_INDEX_LOCATION")
+ # image_props["avb_odm_dlkm_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_odm_dlkm_add_hashtree_footer_args"] = _p("BOARD_AVB_ODM_DLKM_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_ODM_DLKM_KEY_PATH"):
+ # image_props["avb_odm_dlkm_key_path"] = _p("BOARD_AVB_ODM_DLKM_KEY_PATH")
+ # image_props["avb_odm_dlkm_algorithm"] = _p("BOARD_AVB_ODM_DLKM_ALGORITHM")
+ # image_props["avb_odm_dlkm_rollback_index_location"] = _p("BOARD_AVB_ODM_DLKM_ROLLBACK_INDEX_LOCATION")
+ # image_props["avb_system_dlkm_hashtree_enable"] = _p("BOARD_AVB_ENABLE")
+ # image_props["avb_system_dlkm_add_hashtree_footer_args"] = _p("BOARD_AVB_SYSTEM_DLKM_ADD_HASHTREE_FOOTER_ARGS")
+ # if _p("BOARD_AVB_SYSTEM_DLKM_KEY_PATH"):
+ # image_props["avb_system_dlkm_key_path"] = _p("BOARD_AVB_SYSTEM_DLKM_KEY_PATH")
+ # image_props["avb_system_dlkm_algorithm"] = _p("BOARD_AVB_SYSTEM_DLKM_ALGORITHM")
+ # image_props["avb_system_dlkm_rollback_index_location"] = _p("BOARD_SYSTEM_SYSTEM_DLKM_ROLLBACK_INDEX_LOCATION")
+ if _p("BOARD_USES_RECOVERY_AS_BOOT") == "true":
+ image_props["recovery_as_boot"] = "true"
+ if _p("BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK") == "true":
+ image_props["gki_boot_image_without_ramdisk"] = "true"
+
+ #image_props["root_dir"] = _p("TARGET_ROOT_OUT") # TODO: replace with actual path
+ if _p("PRODUCT_USE_DYNAMIC_PARTITION_SIZE") == "true":
+ image_props["use_dynamic_partition_size"] = "true"
+ for k, v in extra_props.items():
+ image_props[k] = v
+
+ result = "\n".join([k + "=" + v for k, v in image_props.items()])
+ if result:
+ result += "\n"
+ return result
+
+def get_python3(ctx):
+ python_interpreter = ctx.toolchains["@bazel_tools//tools/python:toolchain_type"].py3_runtime.interpreter
+ if python_interpreter.basename == "python3":
+ return python_interpreter
+
+ renamed = ctx.actions.declare_file("python3")
+ ctx.actions.symlink(
+ output = renamed,
+ target_file = python_interpreter,
+ is_executable = True,
+ )
+ return renamed
+
+def _partition_impl(ctx):
+ if ctx.attr.type != "system":
+ fail("currently only system images are supported")
+
+ toolchain = ctx.toolchains[":partition_toolchain_type"].toolchain_info
+ python_interpreter = get_python3(ctx)
+
+ # build_image requires that the output file be named specifically <type>.img, so
+ # put all the outputs under a name-qualified folder.
+ image_info = ctx.actions.declare_file(ctx.attr.name + "/image_info.txt")
+ output_image = ctx.actions.declare_file(ctx.attr.name + "/" + ctx.attr.type + ".img")
+ ctx.actions.write(image_info, _generate_image_prop_dictionary(ctx, [ctx.attr.type], {"skip_fsck": "true"}))
+
+ files = {}
+ for dep in ctx.attr.deps:
+ files.update(dep[InstallableInfo].files)
+
+ for v in files.keys():
+ if not v.startswith("/system"):
+ fail("Files outside of /system are not currently supported: %s", v)
+
+ file_mapping_file = ctx.actions.declare_file(ctx.attr.name + "/partition_file_mapping.json")
+
+ # It seems build_image will prepend /system to the paths when building_system_image=true
+ ctx.actions.write(file_mapping_file, json.encode({k.removeprefix("/system"): v.path for k, v in files.items()}))
+
+ staging_dir = ctx.actions.declare_directory(ctx.attr.name + "_staging_dir")
+
+ ctx.actions.run(
+ inputs = [
+ image_info,
+ file_mapping_file,
+ ] + files.keys(),
+ tools = [
+ toolchain.build_image,
+ toolchain.mkuserimg_mke2fs,
+ python_interpreter,
+ ],
+ outputs = [output_image],
+ executable = ctx.executable._staging_dir_builder,
+ arguments = [
+ file_mapping_file.path,
+ staging_dir.path,
+ toolchain.build_image.path,
+ staging_dir.path,
+ image_info.path,
+ output_image.path,
+ staging_dir.path,
+ ],
+ mnemonic = "BuildPartition",
+ # TODO: the /usr/bin addition is because build_image uses the du command
+ # in GetDiskUsage(). This can probably be rewritten to just use python code
+ # instead.
+ env = {"PATH": python_interpreter.dirname + ":/usr/bin"},
+ )
+
+ return DefaultInfo(files = depset([output_image]))
+
+_partition = rule(
+ implementation = _partition_impl,
+ attrs = {
+ "type": attr.string(
+ mandatory = True,
+ values = _IMAGE_TYPES,
+ ),
+ "deps": attr.label_list(
+ providers = [[InstallableInfo]],
+ aspects = [installable_aspect],
+ ),
+ "_staging_dir_builder": attr.label(
+ cfg = "exec",
+ doc = "The tool used to build a staging directory, because if bazel were to build it it would be entirely symlinks.",
+ executable = True,
+ default = "//build/bazel/rules:staging_dir_builder",
+ ),
+ },
+ toolchains = [
+ ":partition_toolchain_type",
+ "@bazel_tools//tools/python:toolchain_type",
+ ],
+)
+
+def partition(target_compatible_with = [], **kwargs):
+ target_compatible_with = select({
+ "//build/bazel/platforms/os:android": [],
+ "//conditions:default": ["@platforms//:incompatible"],
+ }) + target_compatible_with
+ _partition(
+ target_compatible_with = target_compatible_with,
+ **kwargs
+ )
diff --git a/rules/partitions/toolchain.bzl b/rules/partitions/toolchain.bzl
new file mode 100644
index 00000000..84f26e77
--- /dev/null
+++ b/rules/partitions/toolchain.bzl
@@ -0,0 +1,38 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+PartitionToolchainInfo = provider(
+ doc = "Partitions toolchain",
+ fields = [
+ "build_image",
+ "mkuserimg_mke2fs",
+ ],
+)
+
+def _partition_toolchain_impl(ctx):
+ toolchain_info = platform_common.ToolchainInfo(
+ toolchain_info = PartitionToolchainInfo(
+ build_image = ctx.file.build_image,
+ mkuserimg_mke2fs = ctx.file.mkuserimg_mke2fs,
+ ),
+ )
+ return [toolchain_info]
+
+partition_toolchain = rule(
+ implementation = _partition_toolchain_impl,
+ attrs = {
+ "build_image": attr.label(allow_single_file = True, cfg = "exec", executable = True, mandatory = True),
+ "mkuserimg_mke2fs": attr.label(allow_single_file = True, cfg = "exec", executable = True, mandatory = True),
+ },
+)
diff --git a/rules/prebuilt_file.bzl b/rules/prebuilt_file.bzl
index 12e69824..1e375a5c 100644
--- a/rules/prebuilt_file.bzl
+++ b/rules/prebuilt_file.bzl
@@ -1,18 +1,16 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
PrebuiltFileInfo = provider(
"Info needed for prebuilt_file modules",
@@ -40,11 +38,20 @@ def _prebuilt_file_rule_impl(ctx):
if not acceptable:
fail("dir for", ctx.label.name, "is `", dir, "`, but we only handle these:\n", _handled_dirs)
+ if ctx.attr.filename_from_src and ctx.attr.filename != "":
+ fail("filename is set. filename_from_src cannot be true")
+ elif ctx.attr.filename != "":
+ filename = ctx.attr.filename
+ elif ctx.attr.filename_from_src:
+ filename = srcs[0].basename
+ else:
+ filename = ctx.attr.name
+
return [
PrebuiltFileInfo(
src = srcs[0],
dir = dir,
- filename = ctx.attr.filename,
+ filename = filename,
installable = ctx.attr.installable,
),
DefaultInfo(
@@ -63,6 +70,7 @@ _prebuilt_file = rule(
),
"dir": attr.string(mandatory = True),
"filename": attr.string(),
+ "filename_from_src": attr.bool(default = True),
"installable": attr.bool(default = True),
},
)
@@ -75,7 +83,8 @@ def prebuilt_file(
installable = True,
# TODO(b/207489266): Fully support;
# data is currently dropped to prevent breakages from e.g. prebuilt_etc
- data = [],
+ filename_from_src = False,
+ data = [], # @unused
**kwargs):
"Bazel macro to correspond with the e.g. prebuilt_etc and prebuilt_usr_share Soong modules."
@@ -85,5 +94,6 @@ def prebuilt_file(
dir = dir,
filename = filename,
installable = installable,
+ filename_from_src = filename_from_src,
**kwargs
)
diff --git a/rules/prebuilt_file_test.bzl b/rules/prebuilt_file_test.bzl
new file mode 100644
index 00000000..f7e6e55d
--- /dev/null
+++ b/rules/prebuilt_file_test.bzl
@@ -0,0 +1,80 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules:prebuilt_file.bzl", "PrebuiltFileInfo", "prebuilt_file")
+
+def _prebuilt_file_with_filename_from_src_test_impl(ctx):
+ env = analysistest.begin(ctx)
+ target = analysistest.target_under_test(env)
+
+ actual_prebuilt_file_info = target[PrebuiltFileInfo]
+
+ # We can't stub a source file object for testing so we scope it out
+ actual_prebuilt_file_info_without_src = PrebuiltFileInfo(
+ dir = actual_prebuilt_file_info.dir,
+ filename = actual_prebuilt_file_info.filename,
+ installable = actual_prebuilt_file_info.installable,
+ )
+ expected_prebuilt_file_info_without_src = PrebuiltFileInfo(
+ dir = "etc/policy",
+ filename = "file.policy",
+ installable = True,
+ )
+
+ # Check PrebuiltFileInfo provider, excluding src
+ asserts.equals(
+ env,
+ actual_prebuilt_file_info_without_src,
+ expected_prebuilt_file_info_without_src,
+ "PrebuiltFileInfo needs to match with expected result",
+ )
+
+ # Check PrebuiltFileInfo src separately
+ asserts.equals(
+ env,
+ actual_prebuilt_file_info.src.path,
+ target.label.package + "/dir/file.policy",
+ "PrebuiltFileInfo src needs to match with what is given to prebuilt_file rule",
+ )
+
+ return analysistest.end(env)
+
+prebuilt_file_with_filename_from_src_test = analysistest.make(
+ _prebuilt_file_with_filename_from_src_test_impl,
+)
+
+def _prebuilt_file_with_filename_from_src_test():
+ name = "prebuilt_file_with_filename_from_src"
+ test_name = name + "_test"
+ prebuilt_file(
+ name = name,
+ dir = "etc/policy",
+ filename_from_src = True,
+ src = "dir/file.policy",
+ tags = ["manual"],
+ )
+ prebuilt_file_with_filename_from_src_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def prebuilt_file_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _prebuilt_file_with_filename_from_src_test(),
+ ],
+ )
diff --git a/rules/prebuilt_xml.bzl b/rules/prebuilt_xml.bzl
new file mode 100644
index 00000000..bb627f17
--- /dev/null
+++ b/rules/prebuilt_xml.bzl
@@ -0,0 +1,103 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(":prebuilt_file.bzl", "PrebuiltFileInfo")
+
+def _prebuilt_xml_impl(ctx):
+ schema = ctx.file.schema
+
+ if len(ctx.files.src) != 1:
+ fail("src for %s is expected to be singular, but is of len %s\n", ctx.label.name, len(ctx.files.src))
+
+ src = ctx.files.src[0]
+
+ args = ctx.actions.args()
+ inputs = [src]
+
+ if schema != None:
+ if schema.extension == "dtd":
+ args.add("--dtdvalid", schema.path)
+ elif schema.extension == "xsd":
+ args.add("--schema", schema.path)
+ inputs.append(schema)
+
+ args.add(src)
+ args.add(">")
+ args.add("/dev/null")
+ args.add("&&")
+ args.add("touch")
+ args.add("-a")
+
+ validation_output = ctx.actions.declare_file(ctx.attr.name + ".validation")
+ args.add(validation_output.path)
+
+ ctx.actions.run(
+ outputs = [validation_output],
+ inputs = inputs,
+ executable = ctx.executable._xml_validation_tool,
+ arguments = [args],
+ mnemonic = "XMLValidation",
+ )
+
+ filename = ""
+
+ if ctx.attr.filename_from_src and ctx.attr.filename != "":
+ fail("filename is set. filename_from_src cannot be true")
+ elif ctx.attr.filename != "":
+ filename = ctx.attr.filename
+ elif ctx.attr.filename_from_src:
+ filename = src
+ else:
+ filename = ctx.attrs.name
+
+ return [
+ PrebuiltFileInfo(
+ src = src,
+ dir = "etc/xml",
+ filename = filename,
+ ),
+ DefaultInfo(files = depset([src])),
+ OutputGroupInfo(_validation = depset([validation_output])),
+ ]
+
+prebuilt_xml = rule(
+ doc = """
+ prebuilt_etc_xml installs an xml file under <partition>/etc/<subdir>.
+ It also optionally validates the xml file against the schema.
+ """,
+ implementation = _prebuilt_xml_impl,
+ attrs = {
+ "src": attr.label(
+ mandatory = True,
+ allow_files = True,
+ # TODO(b/217908237): reenable allow_single_file
+ # allow_single_file = True,
+ ),
+ "schema": attr.label(
+ allow_single_file = [".dtd", ".xsd"],
+ doc = "Optional DTD or XSD that will be used to validate the xml file",
+ ),
+ "filename": attr.string(doc = "Optional name for the installed file"),
+ "filename_from_src": attr.bool(
+ doc = "Optional. When filename is not provided and" +
+ "filename_from_src is true, name for the installed file" +
+ "will be set from src",
+ ),
+ "_xml_validation_tool": attr.label(
+ default = "//external/libxml2:xmllint",
+ executable = True,
+ cfg = "exec",
+ ),
+ },
+)
diff --git a/rules/prebuilt_xml_test.bzl b/rules/prebuilt_xml_test.bzl
new file mode 100644
index 00000000..8e74eee4
--- /dev/null
+++ b/rules/prebuilt_xml_test.bzl
@@ -0,0 +1,249 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules:prebuilt_file.bzl", "PrebuiltFileInfo")
+load("//build/bazel/rules:prebuilt_xml.bzl", "prebuilt_xml")
+load("//build/bazel/rules/test_common:args.bzl", "get_arg_value")
+
+SRC = "fooSrc.xml"
+DIR = "etc/xml"
+DTD_SCHEMA = "bar.dtd"
+XSD_SCHEMA = "baz.xsd"
+FILENAME = "fooFilename"
+
+def _test_prebuilt_xml_commands_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+ asserts.equals(env, 1, len(actions), "expected 1 action got {}".format(actions))
+ args = actions[0].argv
+
+ asserts.true(env, len(args) >= 8, "expected at least 8 arguments but got {} {}".format(len(args), args))
+
+ offset = 0
+ schema = ctx.attr.schema
+
+ if schema != "":
+ offset = 2
+ if schema == "--schema":
+ asserts.equals(env, paths.basename(get_arg_value(args, schema)), XSD_SCHEMA)
+ elif schema == "--dtdvalid":
+ asserts.equals(env, paths.basename(get_arg_value(args, schema)), DTD_SCHEMA)
+ else:
+ analysistest.fail(
+ env,
+ "Expected schema attr to be --schema or --dtdvalid but got {}".format(schema),
+ )
+
+ asserts.equals(env, SRC, paths.basename(args[1 + offset]))
+ asserts.equals(env, ">", args[2 + offset])
+ asserts.equals(env, "/dev/null", args[3 + offset])
+ asserts.equals(env, "&&", args[4 + offset])
+ asserts.equals(env, "touch", args[5 + offset])
+ asserts.equals(env, "-a", args[6 + offset])
+
+ return analysistest.end(env)
+
+prebuilt_xml_commands_test = analysistest.make(
+ _test_prebuilt_xml_commands_impl,
+ attrs = {
+ "schema": attr.string(),
+ },
+)
+
+def _test_prebuilt_xml_commands():
+ name = "prebuilt_xml_commands"
+ test_name = name + "_test"
+
+ prebuilt_xml(
+ name = name,
+ src = SRC,
+ filename = FILENAME,
+ tags = ["manual"],
+ )
+ prebuilt_xml_commands_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _test_prebuilt_xml_commands_dtd():
+ name = "prebuilt_xml_commands_dtd"
+ test_name = name + "_test"
+
+ prebuilt_xml(
+ name = name,
+ src = SRC,
+ schema = DTD_SCHEMA,
+ filename = FILENAME,
+ tags = ["manual"],
+ )
+ prebuilt_xml_commands_test(
+ name = test_name,
+ schema = "--dtdvalid",
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _test_prebuilt_xml_commands_xsd():
+ name = "prebuilt_xml_commands_xsd"
+ test_name = name + "_test"
+ prebuilt_xml(
+ name = name,
+ schema = XSD_SCHEMA,
+ filename = FILENAME,
+ src = SRC,
+ tags = ["manual"],
+ )
+ prebuilt_xml_commands_test(
+ name = test_name,
+ schema = "--schema",
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _test_prebuilt_xml_PrebuiltFileInfo_impl(ctx):
+ env = analysistest.begin(ctx)
+ target_under_test = analysistest.target_under_test(env)
+ prebuilt_file_info = target_under_test[PrebuiltFileInfo]
+
+ asserts.equals(
+ env,
+ FILENAME,
+ prebuilt_file_info.filename,
+ "expected PrebuiltFileInfo filename to be {} but got {}".format(FILENAME, prebuilt_file_info.filename),
+ )
+
+ asserts.equals(
+ env,
+ SRC,
+ prebuilt_file_info.src.basename,
+ "expected PrebuiltFileInfo src to be {} but got {}".format(SRC, prebuilt_file_info.src),
+ )
+
+ asserts.equals(
+ env,
+ DIR,
+ prebuilt_file_info.dir,
+ "expected PrebuiltFileInfo dir to be {} but got {}".format(DIR, prebuilt_file_info.dir),
+ )
+
+ return analysistest.end(env)
+
+prebuilt_xml_PrebuiltFileInfo_test = analysistest.make(_test_prebuilt_xml_PrebuiltFileInfo_impl)
+
+def _test_prebuilt_xml_PrebuiltFileInfo():
+ name = "prebuilt_xml_PrebuiltFileInfo"
+ test_name = name + "_test"
+ prebuilt_xml(
+ name = name,
+ src = SRC,
+ filename = FILENAME,
+ tags = ["manual"],
+ )
+ prebuilt_xml_PrebuiltFileInfo_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _test_prebuilt_xml_schema_validation_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ target = analysistest.target_under_test(env)
+ validation_outputs = target.output_groups._validation.to_list()
+ for action in actions:
+ for validation_output in validation_outputs:
+ if validation_output in action.inputs.to_list():
+ analysistest.fail(
+ env,
+ "%s is a validation action output, but is an input to action %s" % (
+ validation_output,
+ action,
+ ),
+ )
+
+ return analysistest.end(env)
+
+prebuilt_xml_schema_validation_test = analysistest.make(_test_prebuilt_xml_schema_validation_impl)
+
+def _test_prebuilt_xml_dtd_schema_validation():
+ name = "prebuilt_xml_dtd_schema_validation"
+ test_name = name + "_test"
+ prebuilt_xml(
+ name = name,
+ src = SRC,
+ schema = DTD_SCHEMA,
+ filename = FILENAME,
+ tags = ["manual"],
+ )
+ prebuilt_xml_schema_validation_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _test_prebuilt_xml_xsd_schema_validation():
+ name = "prebuilt_xml_xsd_schema_validation"
+ test_name = name + "_test"
+ prebuilt_xml(
+ name = name,
+ schema = XSD_SCHEMA,
+ filename = FILENAME,
+ src = SRC,
+ tags = ["manual"],
+ )
+ prebuilt_xml_schema_validation_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def _test_prebuilt_xml_minimal_schema_validation():
+ name = "prebuilt_xml_minimal_schema_validation"
+ test_name = name + "_test"
+ prebuilt_xml(
+ name = name,
+ src = SRC,
+ filename = FILENAME,
+ tags = ["manual"],
+ )
+ prebuilt_xml_schema_validation_test(
+ name = test_name,
+ target_under_test = name,
+ )
+
+ return test_name
+
+def prebuilt_xml_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_prebuilt_xml_commands(),
+ _test_prebuilt_xml_commands_dtd(),
+ _test_prebuilt_xml_commands_xsd(),
+ _test_prebuilt_xml_minimal_schema_validation(),
+ _test_prebuilt_xml_dtd_schema_validation(),
+ _test_prebuilt_xml_xsd_schema_validation(),
+ _test_prebuilt_xml_PrebuiltFileInfo(),
+ ],
+ )
diff --git a/rules/proto_file_utils.bzl b/rules/proto_file_utils.bzl
index 2ce3dd58..569f6079 100644
--- a/rules/proto_file_utils.bzl
+++ b/rules/proto_file_utils.bzl
@@ -1,20 +1,19 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:sets.bzl", "sets")
def _generate_and_declare_output_files(
ctx,
@@ -38,20 +37,19 @@ def _generate_and_declare_output_files(
return ret
def _generate_jar_proto_action(
- proto_info,
+ proto_infos,
protoc,
ctx,
out_flags = [],
plugin_executable = None,
out_arg = None,
mnemonic = "ProtoGen"):
-
jar_basename = ctx.label.name + "-proto_gen"
jar_name = jar_basename + "-src.jar"
jar_file = ctx.actions.declare_file(jar_name)
_generate_proto_action(
- proto_info = proto_info,
+ proto_infos = proto_infos,
protoc = protoc,
ctx = ctx,
out_flags = out_flags,
@@ -71,7 +69,7 @@ def _generate_jar_proto_action(
return srcjar_file
def _generate_proto_action(
- proto_info,
+ proto_infos,
protoc,
ctx,
type_dictionary = None,
@@ -83,7 +81,7 @@ def _generate_proto_action(
""" Utility function for creating proto_compiler action.
Args:
- proto_info: ProtoInfo
+ proto_infos: A list of ProtoInfo.
protoc: proto compiler executable.
ctx: context, used for declaring new files only.
type_dictionary: a dictionary of types to output extensions
@@ -96,14 +94,26 @@ def _generate_proto_action(
Returns:
Dictionary with declared files grouped by type from the type_dictionary.
"""
- proto_srcs = proto_info.direct_sources
- transitive_proto_srcs = proto_info.transitive_imports
+
+ # TODO(B/245629074): Don't build external/protobuf if it is provided in
+ # toolchain already.
+ proto_srcs = []
+ proto_source_root_list = sets.make()
+ transitive_proto_srcs_list = []
+ transitive_proto_path_list = sets.make()
+
+ for proto_info in proto_infos:
+ sets.insert(proto_source_root_list, proto_info.proto_source_root)
+ proto_srcs.extend(proto_info.direct_sources)
+ transitive_proto_srcs_list.append(proto_info.transitive_imports)
+ for p in proto_info.transitive_proto_path.to_list():
+ sets.insert(transitive_proto_path_list, p)
protoc_out_name = paths.join(ctx.bin_dir.path, ctx.label.package)
if output_file:
protoc_out_name = paths.join(protoc_out_name, output_file.basename)
out_files = {
- "out": [output_file]
+ "out": [output_file],
}
else:
protoc_out_name = paths.join(protoc_out_name, ctx.label.name)
@@ -122,13 +132,17 @@ def _generate_proto_action(
else:
args.add("{}={}:{}".format(out_arg, ",".join(out_flags), protoc_out_name))
- args.add_all(["-I", proto_info.proto_source_root])
- args.add_all(["-I{0}={1}".format(f.short_path, f.path) for f in transitive_proto_srcs.to_list()])
+ # the order matters so we add the source roots first
+ args.add_all(["-I" + p for p in sets.to_list(proto_source_root_list)])
+
+ args.add_all(["-I" + p for p in sets.to_list(transitive_proto_path_list)])
+ args.add_all(["-I{0}={1}".format(f.short_path, f.path) for t in transitive_proto_srcs_list for f in t.to_list()])
+
args.add_all([f.short_path for f in proto_srcs])
inputs = depset(
direct = proto_srcs,
- transitive = [transitive_proto_srcs],
+ transitive = transitive_proto_srcs_list,
)
outputs = []
diff --git a/rules/python/library.bzl b/rules/python/library.bzl
deleted file mode 100644
index 3d2308e6..00000000
--- a/rules/python/library.bzl
+++ /dev/null
@@ -1,15 +0,0 @@
-"""Macro wrapping the py_library rule for Soong/Bazel convergence."""
-
-def py_library(imports = [".."], **kwargs):
- # b/208215661: Always propagate the parent directory of this target so that
- # dependent targets can use `import <modulename>` without using absolute
- # imports, which Bazel uses by default. The eventual effect of this in a
- # py_binary is that all directories contain py_library deps are added to the
- # PYTHONPATH of the py_binary stub script, enabling `import <modulename>`.
- if ".." not in imports:
- imports.append("..")
-
- native.py_library(
- imports = imports,
- **kwargs,
- )
diff --git a/rules/python/py_proto.bzl b/rules/python/py_proto.bzl
new file mode 100644
index 00000000..b453376c
--- /dev/null
+++ b/rules/python/py_proto.bzl
@@ -0,0 +1,103 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+
+def _py_proto_sources_gen_rule_impl(ctx):
+ imports = []
+ all_outputs = []
+ for dep in ctx.attr.deps:
+ proto_info = dep[ProtoInfo]
+
+ outputs = []
+ for name in proto_info.direct_sources:
+ outputs.append(ctx.actions.declare_file(paths.replace_extension(name.basename, "_pb2.py"), sibling = name))
+
+ args = ctx.actions.args()
+ args.add("--python_out=" + proto_info.proto_source_root)
+ args.add_all(["-I", proto_info.proto_source_root])
+ args.add_all(proto_info.direct_sources)
+
+ if proto_info.proto_source_root != ".":
+ imports.append(paths.join("__main__", paths.relativize(proto_info.proto_source_root, ctx.bin_dir.path)))
+
+ # It's not clear what to do with transititve imports/sources
+ if len(proto_info.transitive_imports.to_list()) > len(proto_info.direct_sources) or len(proto_info.transitive_sources.to_list()) > len(proto_info.direct_sources):
+ fail("TODO: Transitive imports/sources of python protos")
+
+ ctx.actions.run(
+ inputs = depset(
+ direct = proto_info.direct_sources,
+ transitive = [proto_info.transitive_imports],
+ ),
+ executable = ctx.executable._protoc,
+ outputs = outputs,
+ arguments = [args],
+ mnemonic = "PyProtoGen",
+ )
+
+ all_outputs.extend(outputs)
+
+ output_depset = depset(direct = all_outputs)
+ return [
+ DefaultInfo(files = output_depset),
+ PyInfo(
+ transitive_sources = output_depset,
+ # If proto_source_root is set to something other than the root of the workspace, import the current package.
+ # It's always the current package because it's the path to where we generated the python sources, not to where
+ # the proto sources are.
+ imports = depset(direct = imports),
+ ),
+ ]
+
+_py_proto_sources_gen = rule(
+ implementation = _py_proto_sources_gen_rule_impl,
+ attrs = {
+ "deps": attr.label_list(
+ providers = [ProtoInfo],
+ doc = "proto_library or any other target exposing ProtoInfo provider with *.proto files",
+ mandatory = True,
+ ),
+ "_protoc": attr.label(
+ default = Label("//external/protobuf:aprotoc"),
+ executable = True,
+ cfg = "exec",
+ ),
+ },
+)
+
+def py_proto_library(
+ name,
+ deps = [],
+ target_compatible_with = [],
+ **kwargs):
+ proto_lib_name = name + "_proto_gen"
+
+ _py_proto_sources_gen(
+ name = proto_lib_name,
+ deps = deps,
+ **kwargs
+ )
+
+ # There may be a better way to do this, but proto_lib_name appears in both srcs
+ # and deps because it must appear in srcs to cause the protobuf files to
+ # actually be compiled, and it must appear in deps for the PyInfo provider to
+ # be respected and the "imports" path to be included in this library.
+ native.py_library(
+ name = name,
+ srcs = [":" + proto_lib_name],
+ deps = [":" + proto_lib_name] + (["//external/protobuf:libprotobuf-python"] if "libprotobuf-python" not in name else []),
+ target_compatible_with = target_compatible_with,
+ **kwargs
+ )
diff --git a/rules/sh_binary.bzl b/rules/sh_binary.bzl
index 82e1510e..65878205 100644
--- a/rules/sh_binary.bzl
+++ b/rules/sh_binary.bzl
@@ -11,6 +11,7 @@ def sh_binary(
srcs,
sub_dir = None,
filename = None,
+ tags = [],
**kwargs):
"Bazel macro to correspond with the sh_binary Soong module."
@@ -18,6 +19,7 @@ def sh_binary(
native.sh_binary(
name = internal_name,
srcs = srcs,
+ tags = ["manual"],
**kwargs
)
@@ -28,6 +30,7 @@ def sh_binary(
sub_dir = sub_dir,
filename = filename,
dep = internal_name,
+ tags = tags,
)
def _sh_binary_combiner_impl(ctx):
diff --git a/rules/soong_injection.bzl b/rules/soong_injection.bzl
index 9e2090ef..fedffeb6 100644
--- a/rules/soong_injection.bzl
+++ b/rules/soong_injection.bzl
@@ -1,27 +1,29 @@
-"""
-Copyright (C) 2021 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
def _impl(rctx):
rctx.file("WORKSPACE", "")
build_dir = str(rctx.path(Label("//:BUILD")).dirname.dirname)
soong_injection_dir = build_dir + "/soong_injection"
+ rctx.symlink(soong_injection_dir + "/allowlists", "allowlists")
+ rctx.symlink(soong_injection_dir + "/android", "android")
rctx.symlink(soong_injection_dir + "/mixed_builds", "mixed_builds")
rctx.symlink(soong_injection_dir + "/cc_toolchain", "cc_toolchain")
rctx.symlink(soong_injection_dir + "/java_toolchain", "java_toolchain")
+ rctx.symlink(soong_injection_dir + "/apex_toolchain", "apex_toolchain")
rctx.symlink(soong_injection_dir + "/product_config", "product_config")
+ rctx.symlink(soong_injection_dir + "/product_config_platforms", "product_config_platforms")
rctx.symlink(soong_injection_dir + "/api_levels", "api_levels")
rctx.symlink(soong_injection_dir + "/metrics", "metrics")
diff --git a/rules/staging_dir_builder.py b/rules/staging_dir_builder.py
new file mode 100644
index 00000000..dea82bf2
--- /dev/null
+++ b/rules/staging_dir_builder.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import shutil
+import subprocess
+import sys
+import argparse
+
+def build_staging_dir(file_mapping_path, staging_dir_path, command_argv):
+ '''Create a staging dir with provided file mapping and apply the command in the dir.
+
+ At least
+
+ Args:
+ file_mapping_path (str): path to the file mapping json
+ staging_dir_path (str): path to the staging directory
+ command_argv (str list): the command to be executed, with the first arg as the executable
+ '''
+
+ try:
+ with open(file_mapping_path, 'r') as f:
+ file_mapping = json.load(f)
+ except OSError as e:
+ sys.exit(str(e))
+ except json.JSONDecodeError as e:
+ sys.exit(file_mapping_path + ": JSON decode error: " + str(e))
+
+ # Validate and clean the file_mapping. This consists of:
+ # - Making sure it's a dict[str, str]
+ # - Normalizing the paths in the staging dir and stripping leading /s
+ # - Making sure there are no duplicate paths in the staging dir
+ # - Making sure no paths use .. to break out of the staging dir
+ cleaned_file_mapping = {}
+ if not isinstance(file_mapping, dict):
+ sys.exit(file_mapping_path + ": expected a JSON dict[str, str]")
+ for path_in_staging_dir, path_in_bazel in file_mapping.items():
+ if not isinstance(path_in_staging_dir, str) or not isinstance(path_in_bazel, str):
+ sys.exit(file_mapping_path + ": expected a JSON dict[str, str]")
+ path_in_staging_dir = os.path.normpath(path_in_staging_dir).lstrip('/')
+ if path_in_staging_dir in cleaned_file_mapping:
+ sys.exit("Staging dir path repeated twice: " + path_in_staging_dir)
+ if path_in_staging_dir.startswith('../'):
+ sys.exit("Path attempts to break out of staging dir: " + path_in_staging_dir)
+ cleaned_file_mapping[path_in_staging_dir] = path_in_bazel
+ file_mapping = cleaned_file_mapping
+
+ for path_in_staging_dir, path_in_bazel in file_mapping.items():
+ path_in_staging_dir = os.path.join(staging_dir_path, path_in_staging_dir)
+
+ # Because Bazel execution root is a symlink forest, all the input files are symlinks, these
+ # include the dependency files declared in the BUILD files as well as the files declared
+ # and created in the bzl files. For sandbox runs the former are two or more level symlinks and
+ # latter are one level symlinks. For non-sandbox runs, the former are one level symlinks
+ # and the latter are actual files. Here are some examples:
+ #
+ # Two level symlinks:
+ # system/timezone/output_data/version/tz_version ->
+ # /usr/local/google/home/...out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
+ # execroot/__main__/system/timezone/output_data/version/tz_version ->
+ # /usr/local/google/home/.../system/timezone/output_data/version/tz_version
+ #
+ # Three level symlinks:
+ # bazel-out/android_x86_64-fastbuild-ST-4ecd5e98bfdd/bin/external/boringssl/libcrypto.so ->
+ # /usr/local/google/home/yudiliu/android/aosp/master/out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
+ # execroot/__main__/bazel-out/android_x86_64-fastbuild-ST-4ecd5e98bfdd/bin/external/boringssl/libcrypto.so ->
+ # /usr/local/google/home/yudiliu/android/aosp/master/out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
+ # execroot/__main__/bazel-out/android_x86_64-fastbuild-ST-4ecd5e98bfdd/bin/external/boringssl/
+ # liblibcrypto_stripped.so ->
+ # /usr/local/google/home/yudiliu/android/aosp/master/out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
+ # execroot/__main__/bazel-out/android_x86_64-fastbuild-ST-4ecd5e98bfdd/bin/external/boringssl/
+ # liblibcrypto_unstripped.so
+ #
+ # One level symlinks:
+ # bazel-out/android_target-fastbuild/bin/system/timezone/apex/apex_manifest.pb ->
+ # /usr/local/google/home/.../out/bazel/output_user_root/b1ed7e1e9af3ebbd1403e9cf794e4884/
+ # execroot/__main__/bazel-out/android_target-fastbuild/bin/system/timezone/apex/
+ # apex_manifest.pb
+ if os.path.islink(path_in_bazel):
+ path_in_bazel = os.readlink(path_in_bazel)
+
+ # For sandbox run these are the 2nd level symlinks and we need to resolve
+ while os.path.islink(path_in_bazel) and 'execroot/__main__' in path_in_bazel:
+ path_in_bazel = os.readlink(path_in_bazel)
+
+ os.makedirs(os.path.dirname(path_in_staging_dir), exist_ok=True)
+ # shutil.copy copies the file data and the file's permission mode
+ # file's permission mode is helpful for tools, such as build/soong/scripts/gen_ndk_usedby_apex.sh,
+ # that rely on the permission mode of the artifacts
+ shutil.copy(path_in_bazel, path_in_staging_dir, follow_symlinks=False)
+
+ result = subprocess.run(command_argv)
+
+ sys.exit(result.returncode)
+
+def main():
+ '''Build a staging directory, and then call a custom command.
+
+ The first argument to this script must be the path to a file containing a json
+ dictionary mapping paths in the staging directory to paths to files that should
+ be copied there. The rest of the arguments will be run as a separate command.
+
+ Example:
+ staging_dir_builder file_mapping.json path/to/staging_dir path/to/apexer --various-apexer-flags path/to/out.apex.unsigned
+ '''
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "file_mapping_path",
+ help="Path to the <staging dir path>:<bazel input path> file mapping JSON.",
+ )
+ parser.add_argument(
+ "staging_dir_path",
+ help="Path to a directory to store the staging directory content.",
+ )
+ args, command_argv = parser.parse_known_args()
+ build_staging_dir(args.file_mapping_path, args.staging_dir_path, command_argv)
+
+if __name__ == '__main__':
+ main()
diff --git a/rules/apex/bazel_apexer_wrapper_test.sh b/rules/staging_dir_builder_test.sh
index 1cf03fa9..578cbd48 100755
--- a/rules/apex/bazel_apexer_wrapper_test.sh
+++ b/rules/staging_dir_builder_test.sh
@@ -17,9 +17,20 @@
set -xeuo pipefail
-apexer_tool_path="${RUNFILES_DIR}/__main__/external/make_injection/host/linux-x86/bin"
+apexer_tool_path="${RUNFILES_DIR}/__main__/system/apex/apexer/apexer"
+conv_apex_manifest_tool_path="${RUNFILES_DIR}/__main__/system/apex/apexer/conv_apex_manifest"
+deapexer_tool_path="${RUNFILES_DIR}/__main__/system/apex/tools/deapexer"
avb_tool_path="${RUNFILES_DIR}/__main__/external/avb"
+e2fsdroid_path="${RUNFILES_DIR}/__main__/external/e2fsprogs/contrib/android"
+mke2fs_path="${RUNFILES_DIR}/__main__/external/e2fsprogs/misc"
+resize2fs_path="${RUNFILES_DIR}/__main__/external/e2fsprogs/resize"
+sefcontext_compile_path="${RUNFILES_DIR}/__main__/external/selinux/libselinux"
+debugfs_path="${RUNFILES_DIR}/__main__/external/e2fsprogs/debugfs"
+soong_zip_path="${RUNFILES_DIR}/__main__/prebuilts/build-tools/linux-x86/bin"
+aapt2_path="${RUNFILES_DIR}/__main__/frameworks/base/tools/aapt2"
android_jar="${RUNFILES_DIR}/__main__/prebuilts/sdk/current/public/android.jar"
+blkid_path="$(readlink -f ${RUNFILES_DIR}/__main__/external/e2fsprogs/misc/blkid)"
+fsckerofs_path="$(readlink -f ${RUNFILES_DIR}/__main__/external/erofs-utils/fsck.erofs)"
input_dir=$(mktemp -d)
output_dir=$(mktemp -d)
@@ -41,7 +52,9 @@ trap cleanup ERR
# 5. a two-level sumlink without "execroot/__main__" in the path
# 6. a three-level symlink with "execroot/__main__" in the path
echo "test file1" > "${input_dir}/file1"
+chmod 755 "${input_dir}/file1"
echo "test file2" > "${input_dir}/file2"
+chmod 755 "${input_dir}/file2"
mkdir -p "${input_dir}/execroot/__main__"
ln -s "${input_dir}/file1" "${input_dir}/one_level_sym"
ln -s "${input_dir}/file2" "${input_dir}/execroot/__main__/middle_sym"
@@ -53,7 +66,7 @@ ln -s "${input_dir}/two_level_sym_in_execroot" "${input_dir}/three_level_sym_in_
manifest_dir=$(mktemp -d)
manifest_file="${manifest_dir}/apex_manifest.pb"
echo '{"name": "com.android.example.apex", "version": 1}' > "${manifest_dir}/apex_manifest.json"
-"${apexer_tool_path}/conv_apex_manifest" proto "${manifest_dir}/apex_manifest.json" -o ${manifest_file}
+"${conv_apex_manifest_tool_path}" proto "${manifest_dir}/apex_manifest.json" -o ${manifest_file}
# Create the file_contexts file
file_contexts_file=$(mktemp)
@@ -65,33 +78,59 @@ echo '
output_file="${output_dir}/test.apex"
# Create the wrapper manifest file
-bazel_apexer_wrapper_manifest_file=$(mktemp)
-echo "
-dir1,file1,"${input_dir}/file1"
-dir2/dir3,file2,"${input_dir}/file2"
-dir4,one_level_sym,"${input_dir}/one_level_sym"
-dir5,two_level_sym_in_execroot,"${input_dir}/two_level_sym_in_execroot"
-dir6,two_level_sym_not_in_execroot,"${input_dir}/two_level_sym_not_in_execroot"
-dir7,three_level_sym_in_execroot,"${input_dir}/three_level_sym_in_execroot"
-" > ${bazel_apexer_wrapper_manifest_file}
+staging_dir_builder_manifest_file=$(mktemp)
+echo "{
+\"dir1/file1\": \"${input_dir}/file1\",
+\"dir2/dir3/file2\": \"${input_dir}/file2\",
+\"dir4/one_level_sym\": \"${input_dir}/one_level_sym\",
+\"dir5/two_level_sym_in_execroot\": \"${input_dir}/two_level_sym_in_execroot\",
+\"dir6/two_level_sym_not_in_execroot\": \"${input_dir}/two_level_sym_not_in_execroot\",
+\"dir7/three_level_sym_in_execroot\": \"${input_dir}/three_level_sym_in_execroot\"
+}" > ${staging_dir_builder_manifest_file}
+
+canned_fs_config=$(mktemp)
+echo "/ 0 2000 0755
+/apex_manifest.json 1000 1000 0644
+/apex_manifest.pb 1000 1000 0644
+/dir1 0 2000 0755
+/dir1/file1 1000 1000 0644
+/dir2 0 2000 0755
+/dir2/dir3 0 2000 0755
+/dir2/dir3/file2 1000 1000 0644
+/dir4 0 2000 0755
+/dir4/one_level_sym 1000 1000 0644
+/dir5 0 2000 0755
+/dir5/two_level_sym_in_execroot 1000 1000 0644
+/dir6 0 2000 0755
+/dir6/two_level_sym_not_in_execroot 1000 1000 0644
+/dir7 0 2000 0755
+/dir7/three_level_sym_in_execroot 1000 1000 0644" > ${canned_fs_config}
+
+apexer_tool_paths=${avb_tool_path}:${avb_tool_path}:${e2fsdroid_path}:${mke2fs_path}:${resize2fs_path}:${debugfs_path}:${soong_zip_path}:${aapt2_path}:${sefcontext_compile_path}
+
+staging_dir=$(mktemp -d /tmp/temporary-dir.XXXXXXXX)
+trap 'rm -rf -- "${staging_dir}"' EXIT
#############################################
-# run bazel_apexer_wrapper
+# run staging_dir_builder
#############################################
-"${RUNFILES_DIR}/__main__/build/bazel/rules/apex/bazel_apexer_wrapper" \
+"${RUNFILES_DIR}/__main__/build/bazel/rules/staging_dir_builder" \
+ ${staging_dir_builder_manifest_file} \
+ ${staging_dir} \
+ ${apexer_tool_path} \
--manifest ${manifest_file} \
--file_contexts ${file_contexts_file} \
- --key "${RUNFILES_DIR}/__main__/build/bazel/rules/apex/test.pem" \
- --apexer_path ${apexer_tool_path} \
- --apexer_tool_paths ${apexer_tool_path}:${avb_tool_path} \
- --apex_output_file ${output_file} \
- --bazel_apexer_wrapper_manifest ${bazel_apexer_wrapper_manifest_file} \
- --android_jar_path ${android_jar}
+ --key "${RUNFILES_DIR}/__main__/build/bazel/rules/test.pem" \
+ --apexer_tool_path "${apexer_tool_paths}" \
+ --android_jar_path ${android_jar} \
+ --canned_fs_config ${canned_fs_config} \
+ ${staging_dir} \
+ ${output_file}
#############################################
# check the result
#############################################
-"${apexer_tool_path}/deapexer" --debugfs_path="${apexer_tool_path}/debugfs" extract ${output_file} ${output_dir}
+"${deapexer_tool_path}" --debugfs_path="${debugfs_path}/debugfs" --blkid_path="${blkid_path}" --fsckerofs_path="${fsckerofs_path}" extract ${output_file} ${output_dir}
# The expected mounted tree should be something like this:
# /tmp/tmp.9u7ViPlMr7
@@ -136,6 +175,21 @@ diff ${input_dir}/file2 ${output_dir}/dir5/two_level_sym_in_execroot
[ `readlink ${output_dir}/dir6/two_level_sym_not_in_execroot` = "${input_dir}/file1" ]
diff ${input_dir}/file2 ${output_dir}/dir7/three_level_sym_in_execroot
+input_perms="$(stat -c '%A' ${input_dir}/file1)"
+output_perms="$(stat -c '%A' ${staging_dir}/dir1/file1)"
+if [ ${input_perms} != ${output_perms} ]; then
+ echo "File permissions not matched!"
+ exit 1
+fi
+
+input_perms="$(stat -c '%A' ${input_dir}/file2)"
+output_perms="$(stat -c '%A' ${staging_dir}/dir5/two_level_sym_in_execroot)"
+if [ ${input_perms} != ${output_perms} ]; then
+ echo "File permissions not matched!"
+ exit 1
+fi
+
+
cleanup
echo "Passed for all test cases"
diff --git a/rules/sysprop/BUILD.bazel b/rules/sysprop/BUILD.bazel
new file mode 100644
index 00000000..b8b7eae4
--- /dev/null
+++ b/rules/sysprop/BUILD.bazel
@@ -0,0 +1,19 @@
+"""
+Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+load(":sysprop_library_test.bzl", "sysprop_library_test_suite")
+
+sysprop_library_test_suite(name = "sysprop_library_tests")
diff --git a/rules/sysprop/sysprop_library.bzl b/rules/sysprop/sysprop_library.bzl
new file mode 100644
index 00000000..c9138a84
--- /dev/null
+++ b/rules/sysprop/sysprop_library.bzl
@@ -0,0 +1,36 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO(b/240466571): Introduce property owner
+SyspropGenInfo = provider(fields = ["srcs"])
+
+def _sysprop_library_impl(ctx):
+ return [SyspropGenInfo(srcs = ctx.attr.srcs)]
+
+# TODO(b/240466571): Add Java to the documentation once the rules/macros are created
+# TODO(b/240463568): Implement API checks
+sysprop_library = rule(
+ implementation = _sysprop_library_impl,
+ doc = """Defines a library of sysprop files which may be used across
+ the platform from either c++ or Java code. a `sysprop_library` may be
+ listed in the `dep` clause of `cc_sysprop_library_shared` or
+ `cc_sysprop_library_static` targets. Java is not yet supported""",
+ attrs = {
+ "srcs": attr.label_list(
+ allow_files = [".sysprop"],
+ mandatory = True,
+ ),
+ },
+ provides = [SyspropGenInfo],
+)
diff --git a/rules/sysprop/sysprop_library_test.bzl b/rules/sysprop/sysprop_library_test.bzl
new file mode 100644
index 00000000..1fab7809
--- /dev/null
+++ b/rules/sysprop/sysprop_library_test.bzl
@@ -0,0 +1,54 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load(":sysprop_library.bzl", "SyspropGenInfo", "sysprop_library")
+
+def _provides_src_files_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ target_under_test = analysistest.target_under_test(env)
+ asserts.equals(
+ env,
+ ["foo.sysprop", "bar.sysprop"],
+ [src.label.name for src in target_under_test[SyspropGenInfo].srcs],
+ )
+
+ return analysistest.end(env)
+
+provides_src_files_test = analysistest.make(
+ _provides_src_files_test_impl,
+)
+
+def _test_provides_src_files():
+ name = "provides_src_files"
+ test_name = name + "_test"
+ sysprop_library(
+ name = name,
+ srcs = ["foo.sysprop", "bar.sysprop"],
+ tags = ["manual"],
+ )
+ provides_src_files_test(
+ name = test_name,
+ target_under_test = name,
+ )
+ return test_name
+
+def sysprop_library_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ _test_provides_src_files(),
+ ],
+ )
diff --git a/rules/apex/test.pem b/rules/test.pem
index bd567789..bd567789 100644
--- a/rules/apex/test.pem
+++ b/rules/test.pem
diff --git a/rules/test_common/BUILD.bazel b/rules/test_common/BUILD.bazel
new file mode 100644
index 00000000..ff01df73
--- /dev/null
+++ b/rules/test_common/BUILD.bazel
@@ -0,0 +1,15 @@
+"""
+Copyright (C) 2022 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/rules/test_common/args.bzl b/rules/test_common/args.bzl
new file mode 100644
index 00000000..8c020efc
--- /dev/null
+++ b/rules/test_common/args.bzl
@@ -0,0 +1,98 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def get_arg_value(args_list, arg_name):
+ """
+ Fetches the value of a named argument from the list of args provided by a
+ Bazel action. If there are multiple instances of the arg present, this
+ function will return the first. This function makes all the same assumptions
+ as get_arg_values() below.
+
+ Arguments:
+ args_list (string[]): The list of arguments provided by the Bazel action.
+ i.e., bazel_action.argv
+ arg_name (string): The name of the argument to fetch the value of
+ Return:
+ The value corresponding to the specified argument name
+ """
+
+ values = get_arg_values(args_list, arg_name)
+ if len(values) == 0:
+ return None
+
+ if len(values) != 1:
+ fail("More than one args found `%s`" % values)
+
+ return values[0]
+
+def get_arg_values(args_list, arg_name):
+ """
+ Fetches all the values of a named argument from the list of args provided
+ by a Bazel action, the argument and its values can repeat multiple times
+ and all the values will be returned.
+
+ This function assumes that the only
+ one argument is added per call to args.add() or per string passed to
+ args.add_all(). It still works when two values are passed to
+ args.add() as separate strings, however.
+
+ The above assumption implies that this function does not handle cases where
+ an argument name is separated from its value by an =, or any character
+ other than a space, in the final command.
+
+ Arguments:
+ args_list (string[]): The list of arguments provided by the Bazel action.
+ i.e., bazel_action.argv
+ arg_name (string): The name of the argument to fetch the value of
+ Return:
+ All the values corresponding to the specified argument name
+ """
+
+ values = []
+ for i in range(1, len(args_list) - 1):
+ if args_list[i] == arg_name:
+ values.append(args_list[i + 1])
+
+ return values
+
+def get_all_args_with_prefix(input_args, arg_prefix):
+ """returns all arguments that start with arg_prefix
+
+ Args:
+ input_args (list[str]): list of arguments
+ arg_prefix (str): prefix of argument to search for
+ Returns:
+ args (list[str]): value in args that start with arg_prefix
+ """
+ args = []
+ for a in input_args:
+ if a.startswith(arg_prefix):
+ args.append(a[len(arg_prefix):])
+ return args
+
+def get_single_arg_with_prefix(input_args, arg_prefix):
+ """returns all arguments that start with arg_prefix
+
+ Fails if more than one argument exists.
+
+ Args:
+ input_args (list[str]): list of arguments
+ arg_prefix (str): prefix of argument to search for
+ Returns:
+ args (str): value in args that starts with arg_prefix
+ """
+ args = get_all_args_with_prefix(input_args, arg_prefix)
+ if len(args) != 1:
+ fail("expected single argument with prefix `%s`, got %d; args = `%s`" % (arg_prefix, len(args), args))
+ return args[0]
diff --git a/rules/test_common/asserts.bzl b/rules/test_common/asserts.bzl
new file mode 100644
index 00000000..3c5d3d8b
--- /dev/null
+++ b/rules/test_common/asserts.bzl
@@ -0,0 +1,34 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", skylib_asserts = "asserts")
+
+def _list_equals(env, l1, l2, msg = None):
+ skylib_asserts.equals(
+ env,
+ len(l1),
+ len(l2),
+ msg,
+ )
+ for i in range(len(l1)):
+ skylib_asserts.equals(
+ env,
+ l1[i],
+ l2[i],
+ msg,
+ )
+
+asserts = struct(
+ list_equals = _list_equals,
+)
diff --git a/rules/test_common/flags.bzl b/rules/test_common/flags.bzl
new file mode 100644
index 00000000..542a8213
--- /dev/null
+++ b/rules/test_common/flags.bzl
@@ -0,0 +1,159 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+
+def _assert_flags_present_in_action(env, action, expected_flags):
+ if action.argv == None:
+ asserts.true(
+ env,
+ False,
+ "expected %s action to have arguments, but argv was None" % (
+ action.mnemonic,
+ ),
+ )
+ return
+ for flag in expected_flags:
+ asserts.true(
+ env,
+ flag in action.argv,
+ "%s action did not contain flag %s; argv: %s" % (
+ action.mnemonic,
+ flag,
+ action.argv,
+ ),
+ )
+
+# Checks for the presence of a set of given flags in a set of given actions
+# non-exclusively. In other words, it confirms that the specified actions
+# contain the given flags, but does not confirm that other actions do not
+# contain them.
+def _action_flags_present_for_mnemonic_nonexclusive_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ for action in analysistest.target_actions(env):
+ if action.mnemonic in ctx.attr.mnemonics:
+ _assert_flags_present_in_action(
+ env,
+ action,
+ ctx.attr.expected_flags,
+ )
+
+ return analysistest.end(env)
+
+action_flags_present_for_mnemonic_nonexclusive_test = analysistest.make(
+ _action_flags_present_for_mnemonic_nonexclusive_test_impl,
+ attrs = {
+ "mnemonics": attr.string_list(
+ doc = """
+ Actions with these mnemonics will be expected to have the flags
+ specified in expected_flags
+ """,
+ ),
+ "expected_flags": attr.string_list(doc = "The flags to be checked for"),
+ },
+)
+
+# Checks for the presence of a set of given flags in a set of given actions
+# exclusively. In other words, it confirms that *only* the specified actions
+# contain the specified flags.
+def _action_flags_present_only_for_mnemonic_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ found_at_least_one_action = False
+ for action in actions:
+ if action.mnemonic in ctx.attr.mnemonics:
+ found_at_least_one_action = True
+ _assert_flags_present_in_action(
+ env,
+ action,
+ ctx.attr.expected_flags,
+ )
+ elif action.argv != None:
+ for flag in ctx.attr.expected_flags:
+ asserts.false(
+ env,
+ flag in action.argv,
+ "%s action unexpectedly contained flag %s; argv: %s" % (
+ action.mnemonic,
+ flag,
+ action.argv,
+ ),
+ )
+ asserts.true(
+ env,
+ found_at_least_one_action,
+ "did not find any actions with mnemonic %s" % (
+ ctx.attr.mnemonics,
+ ),
+ )
+ return analysistest.end(env)
+
+def action_flags_present_only_for_mnemonic_test_with_config_settings(config_settings = {}):
+ return analysistest.make(
+ _action_flags_present_only_for_mnemonic_test_impl,
+ attrs = {
+ "mnemonics": attr.string_list(
+ doc = """
+ Actions with these mnemonics will be expected to have the flags
+ specified in expected_flags
+ """,
+ ),
+ "expected_flags": attr.string_list(doc = "The flags to be checked for"),
+ },
+ config_settings = config_settings,
+ )
+
+action_flags_present_only_for_mnemonic_test = action_flags_present_only_for_mnemonic_test_with_config_settings()
+
+# Checks that a given set of flags are NOT present in a given set of actions.
+# Unlike the above test, this test does NOT confirm the absence of flags
+# *exclusively*. It does not confirm that the flags are present in actions
+# other than those specified
+def _action_flags_absent_for_mnemonic_test_impl(ctx):
+ env = analysistest.begin(ctx)
+
+ actions = analysistest.target_actions(env)
+ for action in actions:
+ if action.mnemonic in ctx.attr.mnemonics and action.argv != None:
+ for flag in ctx.attr.expected_absent_flags:
+ asserts.false(
+ env,
+ flag in action.argv,
+ "%s action unexpectedly contained flag %s; argv: %s" % (
+ action.mnemonic,
+ flag,
+ action.argv,
+ ),
+ )
+
+ return analysistest.end(env)
+
+action_flags_absent_for_mnemonic_test = analysistest.make(
+ _action_flags_absent_for_mnemonic_test_impl,
+ attrs = {
+ "mnemonics": attr.string_list(
+ doc = """
+ Actions with these mnemonics will be expected NOT to have the flags
+ specificed in expected_flags
+ """,
+ ),
+ "expected_absent_flags": attr.string_list(
+ doc = """
+ The flags to be confirmed are absent from the actions in mnemonics
+ """,
+ ),
+ },
+)
diff --git a/rules/test_common/paths.bzl b/rules/test_common/paths.bzl
new file mode 100644
index 00000000..393e71f1
--- /dev/null
+++ b/rules/test_common/paths.bzl
@@ -0,0 +1,31 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//lib:unittest.bzl", "analysistest")
+
+def get_package_dir_based_path(env, path):
+ """
+ Returns the given path prefixed with the full package directory path
+ """
+
+ return paths.join(analysistest.target_under_test(env).label.package, path)
+
+def get_output_and_package_dir_based_path(env, path):
+ """
+ Returns the given path prefixed with the full output and package directory
+ paths
+ """
+
+ return paths.join(analysistest.target_bin_dir_path(env), analysistest.target_under_test(env).label.package, path)
diff --git a/rules/test_common/rules.bzl b/rules/test_common/rules.bzl
new file mode 100644
index 00000000..76822884
--- /dev/null
+++ b/rules/test_common/rules.bzl
@@ -0,0 +1,38 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+
+def _rule_failure_impl(ctx):
+ env = analysistest.begin(ctx)
+ asserts.expect_failure(env, ctx.attr.failure_message)
+ return analysistest.end(env)
+
+expect_failure_test = analysistest.make(
+ impl = _rule_failure_impl,
+ expect_failure = True,
+ attrs = {
+ "failure_message": attr.string(),
+ },
+ doc = "This test checks that a rule fails with the expected failure_message",
+)
+
+def _target_under_test_exist_impl(ctx):
+ env = analysistest.begin(ctx)
+ return analysistest.end(env)
+
+target_under_test_exist_test = analysistest.make(
+ impl = _target_under_test_exist_impl,
+ doc = "This test checks that the target under test exists without failure",
+)
diff --git a/rules/toolchain_utils.bzl b/rules/toolchain_utils.bzl
new file mode 100644
index 00000000..8d6d658d
--- /dev/null
+++ b/rules/toolchain_utils.bzl
@@ -0,0 +1,23 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Bazel still resolves toolchains even if targets are marked incompatible with
+# target_compatible_with, which can cause failures due to the toolchain not being found.
+# To work around this issue, some rules make the toolchain optional, but then in their
+# impl functions assert that it exists. This helper function can do that assertion.
+def verify_toolchain_exists(ctx, toolchain):
+ if not ctx.toolchains[toolchain]:
+ # Mimic the bazel failure if this toolchain was mandatory
+ fail("While resolving toolchains for target %s: No matching toolchains found for types %s.\nTo debug, rerun with --toolchain_resolution_debug='%s'" %
+ (str(ctx.label), toolchain, toolchain))
diff --git a/rules/tradefed/BUILD.bazel b/rules/tradefed/BUILD.bazel
new file mode 100644
index 00000000..41750d8c
--- /dev/null
+++ b/rules/tradefed/BUILD.bazel
@@ -0,0 +1,47 @@
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load("@bazel_skylib//rules:common_settings.bzl", "string_flag")
+load(":tradefed_test.bzl", "tradefed_test_suite")
+
+# flags / configs.
+string_flag(
+ name = "runmode",
+ build_setting_default = "",
+)
+
+config_setting(
+ name = "all",
+ flag_values = {
+ ":runmode": "all",
+ },
+)
+
+config_setting(
+ name = "host_driven_test",
+ flag_values = {
+ ":runmode": "host_driven_test",
+ },
+)
+
+selects.config_setting_group(
+ name = "android_host_driven_tradefed_test",
+ match_all = [
+ "//build/bazel/platforms/os:android",
+ ":host_driven_test",
+ ],
+)
+
+selects.config_setting_group(
+ name = "linux_host_driven_tradefed_test",
+ match_all = [
+ "//build/bazel/platforms/os:linux",
+ ":host_driven_test",
+ ],
+)
+
+# exports.
+exports_files(
+ glob(["*.tpl"]),
+)
+
+# tests_suites.
+tradefed_test_suite(name = "tradefed_tests")
diff --git a/rules/tradefed/test/BUILD.bazel b/rules/tradefed/test/BUILD.bazel
new file mode 100644
index 00000000..a8460468
--- /dev/null
+++ b/rules/tradefed/test/BUILD.bazel
@@ -0,0 +1,5 @@
+package(default_visibility = ["//build/bazel/rules/tradefed:__subpackages__"])
+
+exports_files(
+ glob(["*.xml"]),
+)
diff --git a/rules/tradefed/test/example_config.xml b/rules/tradefed/test/example_config.xml
new file mode 100644
index 00000000..bec4259d
--- /dev/null
+++ b/rules/tradefed/test/example_config.xml
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<configuration description="Runs MODULE_NAME.">
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="MODULE_NAME" />
+ </test>
+</configuration>
diff --git a/rules/tradefed/tradefed.bzl b/rules/tradefed/tradefed.bzl
new file mode 100644
index 00000000..64b4469d
--- /dev/null
+++ b/rules/tradefed/tradefed.bzl
@@ -0,0 +1,184 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+TRADEFED_TEST_ATTRIBUTES = {
+ "test": attr.label(
+ providers = [[CcInfo]],
+ doc = "Test target to run in tradefed.",
+ ),
+ "test_identifier": attr.string(),
+ "host_driven": attr.bool(
+ default = True,
+ doc = "Is a host driven test",
+ ),
+ "_tradefed_test_sh_template": attr.label(
+ default = ":tradefed.sh.tpl",
+ allow_single_file = True,
+ doc = "Template script to launch tradefed.",
+ ),
+ "_tradefed_dependencies": attr.label_list(
+ default = [
+ "//prebuilts/runtime:prebuilt-runtime-adb",
+ "//tools/tradefederation/prebuilts/filegroups/tradefed:bp2build_all_srcs",
+ "//tools/tradefederation/prebuilts/filegroups/suite:compatibility-host-util-prebuilt",
+ "//tools/tradefederation/prebuilts/filegroups/suite:compatibility-tradefed-prebuilt",
+ "//tools/asuite/atest:atest-tradefed",
+ "//tools/asuite/atest/bazel/reporter:bazel-result-reporter",
+ ],
+ doc = "Files needed on the PATH to run tradefed",
+ cfg = "exec",
+ ),
+
+ # Test config and if test config generation attributes.
+ "test_config": attr.label(
+ allow_single_file = True,
+ doc = "Test/Tradefed config.",
+ ),
+ "template_test_config": attr.label(
+ allow_single_file = True,
+ doc = "Template to generate test config.",
+ ),
+ "template_configs": attr.string_list(
+ doc = "Extra tradefed config options to extend into generated test config.",
+ ),
+ "template_install_base": attr.string(
+ default = "/data/local/tmp",
+ doc = "Directory to install tests onto the device for generated config",
+ ),
+}
+
+# Get test config if specified or generate test config from template.
+def _get_or_generate_test_config(ctx):
+ # Validate input
+ c = ctx.file.test_config
+ c_template = ctx.file.template_test_config
+ if c and c_template:
+ fail("Both test_config and test_config_template were provided, please use only 1 of them")
+ if not c and not c_template:
+ fail("Either test_config or test_config_template should be provided")
+
+ # Check for existing tradefed config - and add a symlink with test_identifier.
+ out = ctx.actions.declare_file(ctx.attr.test_identifier + ".config")
+ if c:
+ ctx.actions.symlink(
+ output = out,
+ target_file = c,
+ )
+ return out
+
+ # No test config found, generate config from template.
+ # Join extra configs together and add xml spacing indent.
+ extra_configs = "\n ".join(ctx.attr.template_configs)
+ ctx.actions.expand_template(
+ template = c_template,
+ output = out,
+ substitutions = {
+ "{MODULE}": ctx.attr.test_identifier,
+ "{EXTRA_CONFIGS}": extra_configs,
+ "{TEST_INSTALL_BASE}": ctx.attr.template_install_base,
+ },
+ )
+ return out
+
+# Generate tradefed result reporter config.
+def _create_result_reporter_config(ctx):
+ result_reporters_config_file = ctx.actions.declare_file("result-reporters.xml")
+ config_lines = [
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>",
+ "<configuration>",
+ ]
+
+ result_reporters = [
+ "com.android.tradefed.result.BazelExitCodeResultReporter",
+ "com.android.tradefed.result.BazelXmlResultReporter",
+ ]
+ for result_reporter in result_reporters:
+ config_lines.append(" <result_reporter class=\"%s\" />" % result_reporter)
+ config_lines.append("</configuration>")
+
+ ctx.actions.write(result_reporters_config_file, "\n".join(config_lines))
+ return result_reporters_config_file
+
+# Generate and run tradefed bash script.
+def _tradefed_test_impl(ctx):
+ # Get or generate test config.
+ test_config = _get_or_generate_test_config(ctx)
+
+ # Generate result reporter config file.
+ report_config = _create_result_reporter_config(ctx)
+
+ # Symlink file names if `__test_binary` was appended in a previous rule.
+ targets = []
+ for f in ctx.attr.test.files.to_list():
+ if "__test_binary" not in f.basename:
+ targets.append(f)
+ else:
+ file_name = f.basename.replace("__test_binary", "")
+ out = ctx.actions.declare_file(file_name)
+ ctx.actions.symlink(
+ output = out,
+ target_file = f,
+ )
+ targets.append(out)
+
+ # Symlink tradefed dependencies.
+ for f in ctx.files._tradefed_dependencies:
+ out = ctx.actions.declare_file(f.basename)
+ ctx.actions.symlink(
+ output = out,
+ target_file = f,
+ )
+ targets.append(out)
+
+ # Gather runfiles.
+ runfiles = ctx.runfiles()
+ runfiles = runfiles.merge_all([
+ ctx.attr.test.default_runfiles,
+ ctx.runfiles(files = targets + [test_config, report_config]),
+ ])
+
+ # Generate script to run tradefed.
+ script = ctx.actions.declare_file("tradefed_test_%s.sh" % ctx.label.name)
+ ctx.actions.expand_template(
+ template = ctx.file._tradefed_test_sh_template,
+ output = script,
+ is_executable = True,
+ substitutions = {
+ "{MODULE}": ctx.attr.test_identifier,
+ },
+ )
+
+ return [DefaultInfo(
+ executable = script,
+ runfiles = runfiles,
+ )]
+
+# Generate and run tradefed bash script for deviceless (host) tests.
+_tradefed_test = rule(
+ doc = "A rule used to run tests using Tradefed",
+ attrs = TRADEFED_TEST_ATTRIBUTES,
+ test = True,
+ implementation = _tradefed_test_impl,
+)
+
+def tradefed_host_driven_test(**kwargs):
+ _tradefed_test(
+ **kwargs
+ )
+
+def tradefed_device_test(**kwargs):
+ _tradefed_test(
+ host_driven = False,
+ **kwargs
+ )
diff --git a/rules/tradefed/tradefed.sh.tpl b/rules/tradefed/tradefed.sh.tpl
new file mode 100644
index 00000000..13d4c606
--- /dev/null
+++ b/rules/tradefed/tradefed.sh.tpl
@@ -0,0 +1,55 @@
+#!/bin/bash
+set -e
+
+TEST_PATH="${TEST_SRCDIR}"
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+PATH_ADDITIONS="{PATH_ADDITIONS}"
+
+export PATH="$SCRIPT_DIR:${PATH}"
+# Prepend the REMOTE_JAVA_HOME environment variable to the path to ensure
+# that all Java invocations throughout the test execution flow use the same
+# version.
+if [ ! -z "${REMOTE_JAVA_HOME}" ]; then
+ export PATH="${REMOTE_JAVA_HOME}/bin:${PATH}"
+fi
+
+exit_code_file="$(mktemp /tmp/tf-exec-XXXXXXXXXX)"
+
+atest_tradefed.sh template/atest_local_min \
+ --template:map test=atest \
+ --template:map reporters="${SCRIPT_DIR}/result-reporters.xml" \
+ --tests-dir "$TEST_PATH" \
+ --logcat-on-failure \
+ --no-enable-granular-attempts \
+ --no-early-device-release \
+ --skip-host-arch-check \
+ --include-filter "{MODULE}" \
+ --skip-loading-config-jar \
+ "${ADDITIONAL_TRADEFED_OPTIONS[@]}" \
+ --bazel-exit-code-result-reporter:file=${exit_code_file} \
+ --bazel-xml-result-reporter:file=${XML_OUTPUT_FILE} \
+ --proto-output-file="${TEST_UNDECLARED_OUTPUTS_DIR}/proto-results" \
+ --log-file-path="${TEST_UNDECLARED_OUTPUTS_DIR}" \
+ "$@"
+
+# Use the TF exit code if it terminates abnormally.
+tf_exit=$?
+if [ ${tf_exit} -ne 0 ]
+then
+ echo "Tradefed command failed with exit code ${tf_exit}"
+ exit ${tf_exit}
+fi
+
+# Set the exit code based on the exit code in the reporter-generated file.
+exit_code=$(<${exit_code_file})
+if [ $? -ne 0 ]
+then
+ echo "Could not read exit code file: ${exit_code_file}"
+ exit 36
+fi
+
+if [ ${exit_code} -ne 0 ]
+then
+ echo "Test failed with exit code ${exit_code}"
+ exit ${exit_code}
+fi
diff --git a/rules/tradefed/tradefed_test.bzl b/rules/tradefed/tradefed_test.bzl
new file mode 100644
index 00000000..32a5afcc
--- /dev/null
+++ b/rules/tradefed/tradefed_test.bzl
@@ -0,0 +1,159 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
+load("//build/bazel/rules/cc:cc_library_static.bzl", "cc_library_static")
+load(
+ "//build/bazel/rules/test_common:paths.bzl",
+ "get_output_and_package_dir_based_path",
+)
+load(":tradefed.bzl", "tradefed_device_test", "tradefed_host_driven_test")
+
+tradefed_dependencies = [
+ "atest_tradefed.sh",
+ "libatest-tradefed.jar",
+ "libbazel-result-reporter.jar",
+ "tradefed.jar",
+]
+
+def _test_tradefed_config_generation_impl(ctx):
+ env = analysistest.begin(ctx)
+ actions = analysistest.target_actions(env)
+
+ actual_outputs = []
+ for action in actions:
+ for output in action.outputs.to_list():
+ actual_outputs.append(output.path)
+
+ for expected_output in ctx.attr.expected_outputs:
+ expected_output = get_output_and_package_dir_based_path(env, expected_output)
+ asserts.true(
+ env,
+ expected_output in actual_outputs,
+ "Expected: " + expected_output +
+ " in outputs: " + str(actual_outputs),
+ )
+ return analysistest.end(env)
+
+tradefed_config_generation_test = analysistest.make(
+ _test_tradefed_config_generation_impl,
+ attrs = {
+ "expected_outputs": attr.string_list(),
+ },
+)
+
+def tradefed_cc_outputs():
+ name = "cc"
+ target = "cc_target"
+
+ cc_library_static(
+ name = target,
+ srcs = ["foo.c"],
+ tags = ["manual"],
+ )
+ tradefed_device_test(
+ name = name,
+ test_identifier = target,
+ tags = ["manual"],
+ test = target,
+ test_config = "//build/bazel/rules/tradefed/test:example_config.xml",
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+
+ # check for expected output files (.config file and .sh script)
+ tradefed_config_generation_test(
+ name = name + "_test",
+ target_under_test = name,
+ expected_outputs = [
+ "tradefed_test_" + name + ".sh",
+ "result-reporters.xml",
+ target + ".config",
+ ] + tradefed_dependencies,
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+ return name + "_test"
+
+def tradefed_cc_host_outputs():
+ name = "cc_host"
+ target = "cc_host_target"
+
+ cc_library_static(
+ name = target,
+ tags = ["manual"],
+ )
+ tradefed_host_driven_test(
+ name = name,
+ test_identifier = target,
+ tags = ["manual"],
+ test = target,
+ test_config = "//build/bazel/rules/tradefed/test:example_config.xml",
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+
+ # check for expected output files (.config file and .sh script)
+ tradefed_config_generation_test(
+ name = name + "_test",
+ target_under_test = name,
+ expected_outputs = [
+ "tradefed_test_" + name + ".sh",
+ "result-reporters.xml",
+ target + ".config",
+ ] + tradefed_dependencies,
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+ return name + "_test"
+
+def tradefed_cc_host_outputs_generate_test_config():
+ name = "cc_host_generate_config"
+ target = "cc_host_target_generate_config"
+
+ cc_library_static(
+ name = target,
+ tags = ["manual"],
+ )
+ tradefed_host_driven_test(
+ name = name,
+ test_identifier = target,
+ tags = ["manual"],
+ test = target,
+ template_test_config = "//build/make/core:native_host_test_config_template.xml",
+ template_configs = [
+ "<option name=\"config-descriptor:metadata\" key=\"parameter\" value=\"not_multi_abi\" />",
+ "<option name=\"config-descriptor:metadata\" key=\"parameter\" value=\"secondary_user\" />",
+ ],
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+
+ # check for expected output files (.config file and .sh script)
+ tradefed_config_generation_test(
+ name = name + "_test",
+ target_under_test = name,
+ expected_outputs = [
+ "tradefed_test_" + name + ".sh",
+ "result-reporters.xml",
+ target + ".config",
+ ] + tradefed_dependencies,
+ target_compatible_with = ["//build/bazel/platforms/os:linux"],
+ )
+ return name + "_test"
+
+def tradefed_test_suite(name):
+ native.test_suite(
+ name = name,
+ tests = [
+ tradefed_cc_outputs(),
+ tradefed_cc_host_outputs(),
+ tradefed_cc_host_outputs_generate_test_config(),
+ ],
+ )
diff --git a/rules_cc/README.md b/rules_cc/README.md
deleted file mode 100644
index 9891f4b5..00000000
--- a/rules_cc/README.md
+++ /dev/null
@@ -1,13 +0,0 @@
-This directory and its subdirectories are a partial fork of the
-[`rules_cc`]((https://github.com/bazelbuild/rules_cc/)
-github repository, for experimental use with Bazel builds.
-
-To reduce the number of dependencies of this directory, not all files in `rules_cc`
-are included.
-
-When a file in this directory diverges
-from `rules_cc`, add a comment containing `Divergence from rules_cc` which
-explains the need for this divergence.
-
-This directory must follow HEAD `rules_cc` as closely as possible, with
-necessary changes made upstream ASAP.
diff --git a/rules_cc/WORKSPACE b/rules_cc/WORKSPACE
deleted file mode 100644
index 4a949e87..00000000
--- a/rules_cc/WORKSPACE
+++ /dev/null
@@ -1,76 +0,0 @@
-workspace(name = "rules_cc")
-
-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
-
-http_archive(
- name = "rules_pkg",
- urls = [
- "https://github.com/bazelbuild/rules_pkg/releases/download/0.2.5/rules_pkg-0.2.5.tar.gz",
- "https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.2.5/rules_pkg-0.2.5.tar.gz",
- ],
- sha256 = "352c090cc3d3f9a6b4e676cf42a6047c16824959b438895a76c2989c6d7c246a",
-)
-load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies")
-rules_pkg_dependencies()
-
-http_archive(
- name = "bazel_federation",
- sha256 = "33222ab7bcc430f1ff1db8788c2e0118b749319dd572476c4fd02322d7d15792",
- strip_prefix = "bazel-federation-f0e5eda7f0cbfe67f126ef4dacb18c89039b0506",
- type = "zip",
- url = "https://github.com/bazelbuild/bazel-federation/archive/f0e5eda7f0cbfe67f126ef4dacb18c89039b0506.zip", # 2019-09-30
-)
-
-load("@bazel_federation//:repositories.bzl", "rules_cc_deps")
-
-rules_cc_deps()
-
-load("@bazel_federation//setup:rules_cc.bzl", "rules_cc_setup")
-
-rules_cc_setup()
-
-#
-# Dependencies for development of rules_cc itself.
-#
-load("//:internal_deps.bzl", "rules_cc_internal_deps")
-
-rules_cc_internal_deps()
-
-load("//:internal_setup.bzl", "rules_cc_internal_setup")
-
-rules_cc_internal_setup()
-
-http_archive(
- name = "com_google_googletest",
- sha256 = "9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb",
- strip_prefix = "googletest-release-1.10.0",
- urls = [
- "https://mirror.bazel.build/github.com/google/googletest/archive/release-1.10.0.tar.gz",
- "https://github.com/google/googletest/archive/release-1.10.0.tar.gz",
- ],
-)
-
-http_archive(
- name = "rules_proto",
- sha256 = "602e7161d9195e50246177e7c55b2f39950a9cf7366f74ed5f22fd45750cd208",
- strip_prefix = "rules_proto-97d8af4dc474595af3900dd85cb3a29ad28cc313",
- urls = [
- "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/97d8af4dc474595af3900dd85cb3a29ad28cc313.tar.gz",
- "https://github.com/bazelbuild/rules_proto/archive/97d8af4dc474595af3900dd85cb3a29ad28cc313.tar.gz",
- ],
-)
-
-load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_proto_toolchains")
-
-rules_proto_dependencies()
-
-rules_proto_toolchains()
-
-load("//cc:repositories.bzl", "rules_cc_toolchains")
-
-rules_cc_toolchains()
-
-local_repository(
- name = "test_repo",
- path = "examples/test_cc_shared_library2",
-)
diff --git a/rules_cc/cc/BUILD b/rules_cc/cc/BUILD
deleted file mode 100644
index 9a4b067a..00000000
--- a/rules_cc/cc/BUILD
+++ /dev/null
@@ -1,6 +0,0 @@
-# Divergence from rules_cc: Use a stub BUILD file, as there are reduced
-# dependencies in this fork.
-alias(
- name = "toolchain_type",
- actual = "@bazel_tools//tools/cpp:toolchain_type",
-)
diff --git a/rules_cc/cc/defs.bzl b/rules_cc/cc/defs.bzl
deleted file mode 100644
index f16ca7ca..00000000
--- a/rules_cc/cc/defs.bzl
+++ /dev/null
@@ -1,175 +0,0 @@
-# Copyright 2018 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Starlark rules for building C++ projects."""
-
-# load("//cc/private/rules_impl:cc_flags_supplier.bzl", _cc_flags_supplier = "cc_flags_supplier")
-# load("//cc/private/rules_impl:compiler_flag.bzl", _compiler_flag = "compiler_flag")
-
-_MIGRATION_TAG = "__CC_RULES_MIGRATION_DO_NOT_USE_WILL_BREAK__"
-
-def _add_tags(attrs):
- if "tags" in attrs and attrs["tags"] != None:
- attrs["tags"] = attrs["tags"] + [_MIGRATION_TAG]
- else:
- attrs["tags"] = [_MIGRATION_TAG]
- return attrs
-
-def cc_binary(**attrs):
- """Bazel cc_binary rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#cc_binary
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.cc_binary(**_add_tags(attrs))
-
-def cc_test(**attrs):
- """Bazel cc_test rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#cc_test
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.cc_test(**_add_tags(attrs))
-
-def cc_library(**attrs):
- """Bazel cc_library rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#cc_library
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.cc_library(**_add_tags(attrs))
-
-def cc_import(**attrs):
- """Bazel cc_import rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#cc_import
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.cc_import(**_add_tags(attrs))
-
-def cc_proto_library(**attrs):
- """Bazel cc_proto_library rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#cc_proto_library
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.cc_proto_library(**_add_tags(attrs))
-
-def fdo_prefetch_hints(**attrs):
- """Bazel fdo_prefetch_hints rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#fdo_prefetch_hints
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.fdo_prefetch_hints(**_add_tags(attrs))
-
-def fdo_profile(**attrs):
- """Bazel fdo_profile rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#fdo_profile
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.fdo_profile(**_add_tags(attrs))
-
-def cc_toolchain(**attrs):
- """Bazel cc_toolchain rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#cc_toolchain
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.cc_toolchain(**_add_tags(attrs))
-
-def cc_toolchain_suite(**attrs):
- """Bazel cc_toolchain_suite rule.
-
- https://docs.bazel.build/versions/master/be/c-cpp.html#cc_toolchain_suite
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.cc_toolchain_suite(**_add_tags(attrs))
-
-def objc_library(**attrs):
- """Bazel objc_library rule.
-
- https://docs.bazel.build/versions/master/be/objective-c.html#objc_library
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.objc_library(**_add_tags(attrs))
-
-def objc_import(**attrs):
- """Bazel objc_import rule.
-
- https://docs.bazel.build/versions/master/be/objective-c.html#objc_import
-
- Args:
- **attrs: Rule attributes
- """
-
- # buildifier: disable=native-cc
- native.objc_import(**_add_tags(attrs))
-
-# def cc_flags_supplier(**attrs):
-# """Bazel cc_flags_supplier rule.
-
-# Args:
-# **attrs: Rule attributes
-# """
-# _cc_flags_supplier(**_add_tags(attrs))
-
-# def compiler_flag(**attrs):
-# """Bazel compiler_flag rule.
-
-# Args:
-# **attrs: Rule attributes
-# """
-# _compiler_flag(**_add_tags(attrs))
diff --git a/rules_cc/cc/find_cc_toolchain.bzl b/rules_cc/cc/find_cc_toolchain.bzl
deleted file mode 100644
index 85ac72ed..00000000
--- a/rules_cc/cc/find_cc_toolchain.bzl
+++ /dev/null
@@ -1,89 +0,0 @@
-# pylint: disable=g-bad-file-header
-# Copyright 2016 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Returns the current `CcToolchainInfo`.
-
-* When https://github.com/bazelbuild/bazel/issues/7260 is **not** flipped, current
- C++ toolchain is selected using the legacy mechanism (`--crosstool_top`,
- `--cpu`, `--compiler`). For that to work the rule needs to declare an
- `_cc_toolchain` attribute, e.g.
-
- foo = rule(
- implementation = _foo_impl,
- attrs = {
- "_cc_toolchain": attr.label(
- default = Label(
- "@rules_cc//cc:current_cc_toolchain", # copybara-use-repo-external-label
- ),
- ),
- },
- )
-
-* When https://github.com/bazelbuild/bazel/issues/7260 **is** flipped, current
- C++ toolchain is selected using the toolchain resolution mechanism
- (`--platforms`). For that to work the rule needs to declare a dependency on
- C++ toolchain type:
-
- foo = rule(
- implementation = _foo_impl,
- toolchains = [
- "@rules_cc//cc:toolchain_type", # copybara-use-repo-external-label
- ],
- )
-
-We advise to depend on both `_cc_toolchain` attr and on the toolchain type for
-the duration of the migration. After
-https://github.com/bazelbuild/bazel/issues/7260 is flipped (and support for old
-Bazel version is not needed), it's enough to only keep the toolchain type.
-"""
-
-def find_cc_toolchain(ctx):
- """
-Returns the current `CcToolchainInfo`.
-
- Args:
- ctx: The rule context for which to find a toolchain.
-
- Returns:
- A CcToolchainInfo.
- """
-
- # Check the incompatible flag for toolchain resolution.
- if hasattr(cc_common, "is_cc_toolchain_resolution_enabled_do_not_use") and cc_common.is_cc_toolchain_resolution_enabled_do_not_use(ctx = ctx):
- if not "@bazel_tools//tools/cpp:toolchain_type" in ctx.toolchains:
- fail("In order to use find_cc_toolchain, your rule has to depend on C++ toolchain. See find_cc_toolchain.bzl docs for details.")
- toolchain_info = ctx.toolchains["@bazel_tools//tools/cpp:toolchain_type"]
- if hasattr(toolchain_info, "cc_provider_in_toolchain") and hasattr(toolchain_info, "cc"):
- return toolchain_info.cc
- return toolchain_info
-
- # Fall back to the legacy implicit attribute lookup.
- if hasattr(ctx.attr, "_cc_toolchain"):
- return ctx.attr._cc_toolchain[cc_common.CcToolchainInfo]
-
- # We didn't find anything.
- fail("In order to use find_cc_toolchain, your rule has to depend on C++ toolchain. See find_cc_toolchain.bzl docs for details.")
-
-def find_cpp_toolchain(ctx):
- """Deprecated, use `find_cc_toolchain` instead.
-
- Args:
- ctx: See `find_cc_toolchain`.
-
- Returns:
- A CcToolchainInfo.
- """
- return find_cc_toolchain(ctx)
diff --git a/rules_cc/examples/BUILD b/rules_cc/examples/BUILD
deleted file mode 100644
index c7da75d1..00000000
--- a/rules_cc/examples/BUILD
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2019 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
-load("@bazel_skylib//rules:common_settings.bzl", "bool_flag")
-
-# A collection of examples showing the usage of rules_cc
-licenses(["notice"])
-
-bool_flag(
- name = "incompatible_link_once",
- build_setting_default = False,
- visibility = ["//visibility:public"],
-)
-
-bool_flag(
- name = "enable_permissions_check",
- build_setting_default = False,
- visibility = ["//visibility:public"],
-)
-
-bool_flag(
- name = "experimental_debug",
- build_setting_default = False,
- visibility = ["//visibility:public"],
-)
-
-bzl_library(
- name = "experimental_cc_shared_library_bzl",
- srcs = ["experimental_cc_shared_library.bzl"],
- visibility = ["//visibility:private"],
-)
diff --git a/rules_cc/examples/experimental_cc_shared_library.bzl b/rules_cc/examples/experimental_cc_shared_library.bzl
deleted file mode 100644
index 45659b90..00000000
--- a/rules_cc/examples/experimental_cc_shared_library.bzl
+++ /dev/null
@@ -1,514 +0,0 @@
-"""This is an experimental implementation of cc_shared_library.
-
-We may change the implementation at any moment or even delete this file. Do not
-rely on this. It requires bazel >1.2 and passing the flag
---experimental_cc_shared_library
-"""
-
-load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
-load("//cc:find_cc_toolchain.bzl", "find_cc_toolchain")
-
-# TODO(#5200): Add export_define to library_to_link and cc_library
-
-# Add this as a tag to any target that can be linked by more than one
-# cc_shared_library because it doesn't have static initializers or anything
-# else that may cause issues when being linked more than once. This should be
-# used sparingly after making sure it's safe to use.
-LINKABLE_MORE_THAN_ONCE = "LINKABLE_MORE_THAN_ONCE"
-
-CcSharedLibraryPermissionsInfo = provider(
- "Permissions for a cc shared library.",
- fields = {
- "targets": "Matches targets that can be exported.",
- },
-)
-GraphNodeInfo = provider(
- "Nodes in the graph of shared libraries.",
- fields = {
- "children": "Other GraphNodeInfo from dependencies of this target",
- "label": "Label of the target visited",
- "linkable_more_than_once": "Linkable into more than a single cc_shared_library",
- },
-)
-CcSharedLibraryInfo = provider(
- "Information about a cc shared library.",
- fields = {
- "dynamic_deps": "All shared libraries depended on transitively",
- "exports": "cc_libraries that are linked statically and exported",
- "link_once_static_libs": "All libraries linked statically into this library that should " +
- "only be linked once, e.g. because they have static " +
- "initializers. If we try to link them more than once, " +
- "we will throw an error",
- "linker_input": "the resulting linker input artifact for the shared library",
- "preloaded_deps": "cc_libraries needed by this cc_shared_library that should" +
- " be linked the binary. If this is set, this cc_shared_library has to " +
- " be a direct dependency of the cc_binary",
- },
-)
-
-def _separate_static_and_dynamic_link_libraries(
- direct_children,
- can_be_linked_dynamically,
- preloaded_deps_direct_labels):
- node = None
- all_children = list(direct_children)
- link_statically_labels = {}
- link_dynamically_labels = {}
-
- seen_labels = {}
-
- # Horrible I know. Perhaps Starlark team gives me a way to prune a tree.
- for i in range(2147483647):
- if i == len(all_children):
- break
-
- node = all_children[i]
- node_label = str(node.label)
-
- if node_label in seen_labels:
- continue
- seen_labels[node_label] = True
-
- if node_label in can_be_linked_dynamically:
- link_dynamically_labels[node_label] = True
- elif node_label not in preloaded_deps_direct_labels:
- link_statically_labels[node_label] = node.linkable_more_than_once
- all_children.extend(node.children)
-
- return (link_statically_labels, link_dynamically_labels)
-
-def _create_linker_context(ctx, linker_inputs):
- return cc_common.create_linking_context(
- linker_inputs = depset(linker_inputs, order = "topological"),
- )
-
-def _merge_cc_shared_library_infos(ctx):
- dynamic_deps = []
- transitive_dynamic_deps = []
- for dep in ctx.attr.dynamic_deps:
- if dep[CcSharedLibraryInfo].preloaded_deps != None:
- fail("{} can only be a direct dependency of a " +
- " cc_binary because it has " +
- "preloaded_deps".format(str(dep.label)))
- dynamic_dep_entry = (
- dep[CcSharedLibraryInfo].exports,
- dep[CcSharedLibraryInfo].linker_input,
- dep[CcSharedLibraryInfo].link_once_static_libs,
- )
- dynamic_deps.append(dynamic_dep_entry)
- transitive_dynamic_deps.append(dep[CcSharedLibraryInfo].dynamic_deps)
-
- return depset(direct = dynamic_deps, transitive = transitive_dynamic_deps)
-
-def _build_exports_map_from_only_dynamic_deps(merged_shared_library_infos):
- exports_map = {}
- for entry in merged_shared_library_infos.to_list():
- exports = entry[0]
- linker_input = entry[1]
- for export in exports:
- if export in exports_map:
- fail("Two shared libraries in dependencies export the same symbols. Both " +
- exports_map[export].libraries[0].dynamic_library.short_path +
- " and " + linker_input.libraries[0].dynamic_library.short_path +
- " export " + export)
- exports_map[export] = linker_input
- return exports_map
-
-def _build_link_once_static_libs_map(merged_shared_library_infos):
- link_once_static_libs_map = {}
- for entry in merged_shared_library_infos.to_list():
- link_once_static_libs = entry[2]
- linker_input = entry[1]
- for static_lib in link_once_static_libs:
- if static_lib in link_once_static_libs_map:
- fail("Two shared libraries in dependencies link the same " +
- " library statically. Both " + link_once_static_libs_map[static_lib] +
- " and " + str(linker_input.owner) +
- " link statically" + static_lib)
- link_once_static_libs_map[static_lib] = str(linker_input.owner)
- return link_once_static_libs_map
-
-def _wrap_static_library_with_alwayslink(ctx, feature_configuration, cc_toolchain, linker_input):
- new_libraries_to_link = []
- for old_library_to_link in linker_input.libraries:
- # TODO(#5200): This will lose the object files from a library to link.
- # Not too bad for the prototype but as soon as the library_to_link
- # constructor has object parameters this should be changed.
- new_library_to_link = cc_common.create_library_to_link(
- actions = ctx.actions,
- feature_configuration = feature_configuration,
- cc_toolchain = cc_toolchain,
- static_library = old_library_to_link.static_library,
- pic_static_library = old_library_to_link.pic_static_library,
- alwayslink = True,
- )
- new_libraries_to_link.append(new_library_to_link)
-
- return cc_common.create_linker_input(
- owner = linker_input.owner,
- libraries = depset(direct = new_libraries_to_link),
- user_link_flags = depset(direct = linker_input.user_link_flags),
- additional_inputs = depset(direct = linker_input.additional_inputs),
- )
-
-def _check_if_target_under_path(value, pattern):
- if pattern.workspace_name != value.workspace_name:
- return False
- if pattern.name == "__pkg__":
- return pattern.package == value.package
- if pattern.name == "__subpackages__":
- return _same_package_or_above(pattern, value)
-
- return pattern.package == value.package and pattern.name == value.name
-
-def _check_if_target_can_be_exported(target, current_label, permissions):
- if permissions == None:
- return True
-
- if (target.workspace_name != current_label.workspace_name or
- _same_package_or_above(current_label, target)):
- return True
-
- matched_by_target = False
- for permission in permissions:
- for permission_target in permission[CcSharedLibraryPermissionsInfo].targets:
- if _check_if_target_under_path(target, permission_target):
- return True
-
- return False
-
-def _check_if_target_should_be_exported_without_filter(target, current_label, permissions):
- return _check_if_target_should_be_exported_with_filter(target, current_label, None, permissions)
-
-def _check_if_target_should_be_exported_with_filter(target, current_label, exports_filter, permissions):
- should_be_exported = False
- if exports_filter == None:
- should_be_exported = True
- else:
- for export_filter in exports_filter:
- export_filter_label = current_label.relative(export_filter)
- if _check_if_target_under_path(target, export_filter_label):
- should_be_exported = True
- break
-
- if should_be_exported:
- if _check_if_target_can_be_exported(target, current_label, permissions):
- return True
- else:
- matched_by_filter_text = ""
- if exports_filter:
- matched_by_filter_text = " (matched by filter) "
- fail(str(target) + matched_by_filter_text +
- " cannot be exported from " + str(current_label) +
- " because it's not in the same package/subpackage and the library " +
- "doesn't have the necessary permissions. Use cc_shared_library_permissions.")
-
- return False
-
-def _filter_inputs(
- ctx,
- feature_configuration,
- cc_toolchain,
- transitive_exports,
- preloaded_deps_direct_labels,
- link_once_static_libs_map):
- linker_inputs = []
- link_once_static_libs = []
-
- graph_structure_aspect_nodes = []
- dependency_linker_inputs = []
- direct_exports = {}
- for export in ctx.attr.roots:
- direct_exports[str(export.label)] = True
- dependency_linker_inputs.extend(export[CcInfo].linking_context.linker_inputs.to_list())
- graph_structure_aspect_nodes.append(export[GraphNodeInfo])
-
- can_be_linked_dynamically = {}
- for linker_input in dependency_linker_inputs:
- owner = str(linker_input.owner)
- if owner in transitive_exports:
- can_be_linked_dynamically[owner] = True
-
- (link_statically_labels, link_dynamically_labels) = _separate_static_and_dynamic_link_libraries(
- graph_structure_aspect_nodes,
- can_be_linked_dynamically,
- preloaded_deps_direct_labels,
- )
-
- exports = {}
- owners_seen = {}
- for linker_input in dependency_linker_inputs:
- owner = str(linker_input.owner)
- if owner in owners_seen:
- continue
- owners_seen[owner] = True
- if owner in link_dynamically_labels:
- dynamic_linker_input = transitive_exports[owner]
- linker_inputs.append(dynamic_linker_input)
- elif owner in link_statically_labels:
- if owner in link_once_static_libs_map:
- fail(owner + " is already linked statically in " +
- link_once_static_libs_map[owner] + " but not exported")
-
- if owner in direct_exports:
- wrapped_library = _wrap_static_library_with_alwayslink(
- ctx,
- feature_configuration,
- cc_toolchain,
- linker_input,
- )
-
- if not link_statically_labels[owner]:
- link_once_static_libs.append(owner)
- linker_inputs.append(wrapped_library)
- else:
- can_be_linked_statically = False
-
- for static_dep_path in ctx.attr.static_deps:
- static_dep_path_label = ctx.label.relative(static_dep_path)
- if _check_if_target_under_path(linker_input.owner, static_dep_path_label):
- can_be_linked_statically = True
- break
-
- if _check_if_target_should_be_exported_with_filter(
- linker_input.owner,
- ctx.label,
- ctx.attr.exports_filter,
- _get_permissions(ctx),
- ):
- exports[owner] = True
- can_be_linked_statically = True
-
- if can_be_linked_statically:
- if not link_statically_labels[owner]:
- link_once_static_libs.append(owner)
- linker_inputs.append(linker_input)
- else:
- fail("We can't link " +
- str(owner) + " either statically or dynamically")
-
- # Divergence from rules_cc: Add all dynamic dependencies as linker inputs
- # even if they do not contain transitive dependencies of the roots.
- # TODO(cparsons): Push this as an option upstream..
- for dynamic_dep_input in transitive_exports.values():
- linker_inputs.append(dynamic_dep_input)
-
- return (exports, linker_inputs, link_once_static_libs)
-
-def _same_package_or_above(label_a, label_b):
- if label_a.workspace_name != label_b.workspace_name:
- return False
- package_a_tokenized = label_a.package.split("/")
- package_b_tokenized = label_b.package.split("/")
- if len(package_b_tokenized) < len(package_a_tokenized):
- return False
-
- if package_a_tokenized[0] != "":
- for i in range(len(package_a_tokenized)):
- if package_a_tokenized[i] != package_b_tokenized[i]:
- return False
-
- return True
-
-def _get_permissions(ctx):
- if ctx.attr._enable_permissions_check[BuildSettingInfo].value:
- return ctx.attr.permissions
- return None
-
-def _process_version_script(ctx):
- if ctx.attr.version_script == None:
- return ([], [])
-
- version_script = ctx.files.version_script[0]
- version_script_arg = "-Wl,--version-script," + version_script.path
- return ([version_script], [version_script_arg])
-
-def _cc_shared_library_impl(ctx):
- cc_common.check_experimental_cc_shared_library()
- cc_toolchain = find_cc_toolchain(ctx)
- feature_configuration = cc_common.configure_features(
- ctx = ctx,
- cc_toolchain = cc_toolchain,
- requested_features = ctx.features,
- unsupported_features = ctx.disabled_features,
- )
-
- merged_cc_shared_library_info = _merge_cc_shared_library_infos(ctx)
- exports_map = _build_exports_map_from_only_dynamic_deps(merged_cc_shared_library_info)
- for export in ctx.attr.roots:
- if str(export.label) in exports_map:
- fail("Trying to export a library already exported by a different shared library: " +
- str(export.label))
-
- _check_if_target_should_be_exported_without_filter(export.label, ctx.label, _get_permissions(ctx))
-
- preloaded_deps_direct_labels = {}
- preloaded_dep_merged_cc_info = None
- if len(ctx.attr.preloaded_deps) != 0:
- preloaded_deps_cc_infos = []
- for preloaded_dep in ctx.attr.preloaded_deps:
- preloaded_deps_direct_labels[str(preloaded_dep.label)] = True
- preloaded_deps_cc_infos.append(preloaded_dep[CcInfo])
-
- preloaded_dep_merged_cc_info = cc_common.merge_cc_infos(cc_infos = preloaded_deps_cc_infos)
-
- link_once_static_libs_map = _build_link_once_static_libs_map(merged_cc_shared_library_info)
-
- (exports, linker_inputs, link_once_static_libs) = _filter_inputs(
- ctx,
- feature_configuration,
- cc_toolchain,
- exports_map,
- preloaded_deps_direct_labels,
- link_once_static_libs_map,
- )
-
- linking_context = _create_linker_context(ctx, linker_inputs)
-
- # Divergence from rules_cc: that version does not support version scripts
- version_script, version_script_arg = _process_version_script(ctx)
-
- user_link_flags = version_script_arg[:]
- for user_link_flag in ctx.attr.user_link_flags:
- user_link_flags.append(ctx.expand_location(user_link_flag, targets = ctx.attr.additional_linker_inputs))
-
- linking_outputs = cc_common.link(
- actions = ctx.actions,
- feature_configuration = feature_configuration,
- cc_toolchain = cc_toolchain,
- linking_contexts = [linking_context],
- user_link_flags = user_link_flags,
- additional_inputs = ctx.files.additional_linker_inputs + version_script,
- name = ctx.label.name,
- output_type = "dynamic_library",
- )
-
- runfiles = ctx.runfiles(
- files = [linking_outputs.library_to_link.resolved_symlink_dynamic_library],
- )
- for dep in ctx.attr.dynamic_deps:
- runfiles = runfiles.merge(dep[DefaultInfo].data_runfiles)
-
- for export in ctx.attr.roots:
- exports[str(export.label)] = True
-
- debug_files = []
- if ctx.attr._experimental_debug[BuildSettingInfo].value:
- exports_debug_file = ctx.actions.declare_file(ctx.label.name + "_exports.txt")
- ctx.actions.write(content = "\n".join(exports.keys()), output = exports_debug_file)
-
- link_once_static_libs_debug_file = ctx.actions.declare_file(ctx.label.name + "_link_once_static_libs.txt")
- ctx.actions.write(content = "\n".join(link_once_static_libs), output = link_once_static_libs_debug_file)
-
- debug_files.append(exports_debug_file)
- debug_files.append(link_once_static_libs_debug_file)
-
- if not ctx.attr._incompatible_link_once[BuildSettingInfo].value:
- link_once_static_libs = []
-
- return [
- DefaultInfo(
- files = depset([linking_outputs.library_to_link.resolved_symlink_dynamic_library] + debug_files),
- runfiles = runfiles,
- ),
- CcSharedLibraryInfo(
- dynamic_deps = merged_cc_shared_library_info,
- exports = exports.keys(),
- link_once_static_libs = link_once_static_libs,
- linker_input = cc_common.create_linker_input(
- owner = ctx.label,
- libraries = depset([linking_outputs.library_to_link]),
- ),
- preloaded_deps = preloaded_dep_merged_cc_info,
- ),
- ]
-
-def _collect_graph_structure_info_from_children(ctx, attr):
- children = []
- deps = getattr(ctx.rule.attr, attr, [])
- if type(deps) == "list":
- for dep in deps:
- if GraphNodeInfo in dep:
- children.append(dep[GraphNodeInfo])
- elif deps != None and GraphNodeInfo in deps:
- # Single dep.
- children.append(deps[GraphNodeInfo])
- return children
-
-
-def _graph_structure_aspect_impl(target, ctx):
- children = []
-
- # This is a deviation from HEAD rules_cc because full_cc_library.bzl uses
- # static/shared (among others) attrs to combine multiple targets into one,
- # and the aspect needs to be able to traverse them to correctly populate
- # linker_inputs in the cc_shared_library impl.
- children += _collect_graph_structure_info_from_children(ctx, "deps")
- children += _collect_graph_structure_info_from_children(ctx, "whole_archive_deps")
- children += _collect_graph_structure_info_from_children(ctx, "dynamic_deps")
- children += _collect_graph_structure_info_from_children(ctx, "implementation_deps")
- children += _collect_graph_structure_info_from_children(ctx, "static")
- children += _collect_graph_structure_info_from_children(ctx, "shared")
-
- # TODO(bazel-team): Add flag to Bazel that can toggle the initialization of
- # linkable_more_than_once.
- linkable_more_than_once = False
- if hasattr(ctx.rule.attr, "tags"):
- for tag in ctx.rule.attr.tags:
- if tag == LINKABLE_MORE_THAN_ONCE:
- linkable_more_than_once = True
-
- return [GraphNodeInfo(
- label = ctx.label,
- children = children,
- linkable_more_than_once = linkable_more_than_once,
- )]
-
-def _cc_shared_library_permissions_impl(ctx):
- targets = []
- for target_filter in ctx.attr.targets:
- target_filter_label = ctx.label.relative(target_filter)
- if not _check_if_target_under_path(target_filter_label, ctx.label.relative(":__subpackages__")):
- fail("A cc_shared_library_permissions rule can only list " +
- "targets that are in the same package or a sub-package")
- targets.append(target_filter_label)
-
- return [CcSharedLibraryPermissionsInfo(
- targets = targets,
- )]
-
-graph_structure_aspect = aspect(
- attr_aspects = ["*"],
- implementation = _graph_structure_aspect_impl,
-)
-
-cc_shared_library_permissions = rule(
- implementation = _cc_shared_library_permissions_impl,
- attrs = {
- "targets": attr.string_list(),
- },
-)
-
-cc_shared_library = rule(
- implementation = _cc_shared_library_impl,
- attrs = {
- "additional_linker_inputs": attr.label_list(allow_files = True),
- "dynamic_deps": attr.label_list(providers = [CcSharedLibraryInfo]),
- "exports_filter": attr.string_list(),
- "permissions": attr.label_list(providers = [CcSharedLibraryPermissionsInfo]),
- "preloaded_deps": attr.label_list(providers = [CcInfo]),
- "roots": attr.label_list(providers = [CcInfo], aspects = [graph_structure_aspect]),
- "static_deps": attr.string_list(),
- "version_script": attr.label(allow_single_file = True),
- "user_link_flags": attr.string_list(),
- "_cc_toolchain": attr.label(default = "@bazel_tools//tools/cpp:current_cc_toolchain"),
- "_enable_permissions_check": attr.label(default = "//examples:enable_permissions_check"),
- "_experimental_debug": attr.label(default = "//examples:experimental_debug"),
- "_incompatible_link_once": attr.label(default = "//examples:incompatible_link_once"),
- },
- toolchains = ["@bazel_tools//tools/cpp:toolchain_type"], # copybara-use-repo-external-label
- fragments = ["cpp"],
- incompatible_use_toolchain_transition = True,
-)
-
-for_testing_dont_use_check_if_target_under_path = _check_if_target_under_path
diff --git a/scripts/apex_compare.sh b/scripts/apex_compare.sh
new file mode 100755
index 00000000..741b3d66
--- /dev/null
+++ b/scripts/apex_compare.sh
@@ -0,0 +1,208 @@
+#! /bin/bash -eu
+
+# Compares two APEX files.
+# This script is aimed at regression testing. It allows to compare an
+# APEX target built by Bazel to the same target built by Soong.
+# The first of its arguments is the reference APEX (the one built by
+# Soong), the second is "our" APEX (built by Bazel).
+#
+# An APEX is a ZIP archive, so we treat each APEX as a file system and
+# compare these two file systems. The script displays:
+# - missing files (those in the reference APEX missing from our APEX)
+# - extra files (those only in our APEX)
+# - for each file present in both, their difference.
+# The main part of an APEX is an image file (payload.img), which is an
+# image of a filesystem in EXT2 format. The script "mounts" such image
+# and then compares them side by side.
+#
+# This script relies on the presence of an executable (binary/script)
+# to "mount" a file of certain formats as file systems. It runs this
+# executable as follows:
+# * mount ZIPFILE at DIR:
+# view_file_as_fs zip ZIPFILE DIR
+# * unmount ZIPFILE at DIR:
+# view_file_as_fs -u zip DIR
+# * mount EXT2 image IMGFILE at DIR:
+# view_file_as_fs ext2 IMGFILE DIR
+# * unmount EXT2 image IMGFILE at DIR:
+# view_file_as_fs -u ext2 DIR
+#
+
+function die() { format=$1; shift; printf "$format\n" $@; exit 1; }
+
+# Delouse
+(($# == 2)) || die "usage: ${0##*/} REF_APEX OUR_APEX"
+declare -r ref_apex=$1 our_apex=$2
+for f in $ref_apex $our_apex; do
+ [[ -f $f ]] || die "$f does not exist"
+done
+
+# Maybe we are lucky.
+cmp -s $ref_apex $our_apex && exit
+
+declare -r file_as_fs_viewer=$(which view_file_as_fs)
+if [[ -z "${file_as_fs_viewer}" ]]; then
+ cat <<"EOF"
+You need to have file-as-filesystem viewer application `view_file_as_fs`
+on the PATH. If you have FUSE's fuse-ext2 and fuse-zip installed, you
+can the following script below view_file_as_fs:
+
+#!/bin/bash -eu
+#
+# Mounts a file as a read-only filesystem or unmounts such previously
+# mounted file system.
+# This script can mount a zip file or an file containing an ext2 image
+# as a file system. It requires the presence of fuse-zip and fuse-ext2
+# FUSE packages.
+function die() { format=$1; shift; printf "$format\n" $@; exit 1; }
+function usage() {
+ die "Usage:\n ${0##*/} {ext2|zip} FILE MOUNT-POINT\nor\n ${0##*/} -u {ext2|zip} MOUNT-POINT"
+}
+
+declare umount=
+while getopts "u" opt; do
+ case $opt in
+ u) umount=t ;;
+ ?) usage
+ esac
+done
+
+shift $(($OPTIND-1))
+if [[ -n "$umount" ]]; then
+ (($#==2)) || usage
+ mount | grep -q "on $2 " && umount "$2"
+else
+ (($#==3)) || usage
+ declare -r file="$2" mt="$3"
+ [[ -d "$mt" && -z "$(ls -1A $mt)" ]] || die "$mt should be an empty directory"
+ case "$1" in
+ ext2) fuse-ext2 "$file" "$mt" ;;
+ zip)
+ [[ -f $file ]] || die "$file is not a file" # Because fuse-zip silently mounts it as empty
+ fuse-zip "$file" "$mt" ;;
+ *) usage ;;
+ esac
+fi
+EOF
+ exit 1
+fi
+
+# "Mounts" file as filesystem and prints the sorted list of files in it.
+function mount_and_list() {
+ $file_as_fs_viewer $1 $2 $3 2>/dev/null
+ find $3 -type f -printf "%P\n"
+}
+
+function cleanup() {
+ for d in $fuse_dir/*.img; do
+ $file_as_fs_viewer -u ext2 $d || /bin/true
+ done
+ for d in $fuse_dir/*.apex; do
+ $file_as_fs_viewer -u zip $d || /bin/true
+ done
+ rm -rf $fuse_dir
+}
+
+function dump_proto() {
+ protoc --decode $1 $2
+}
+
+function dump_buildinfo() {
+ dump_proto apex.proto.ApexBuildInfo system/apex/proto/apex_build_info.proto
+}
+
+function dump_apex_manifest() {
+ dump_proto apex.proto.ApexManifest system/apex/proto/apex_manifest.proto
+}
+
+function compare_images() {
+ local -r ref_img=$1 our_img=$2
+
+ # Mount each APEX and save its sorted contents. Classify the contents
+ mount_and_list ext2 $ref_img $fuse_dir/ref.img >$fuse_dir/ref.img.list
+ mount_and_list ext2 $our_img $fuse_dir/our.img >$fuse_dir/our.img.list
+ . <(classify $fuse_dir/ref.img.list $fuse_dir/our.img.list; /bin/true)
+
+ # Now we have missing/extra/common holding respective file lists. Compare
+ ((${#missing[@]}==0)) || \
+ { printf "Missing image files:"; printf " %s" ${missing[@]}; printf "\n"; }
+ ((${#extra[@]}==0)) || \
+ { printf "Extra image files:"; printf " %s" ${extra[@]}; printf "\n"; }
+ for f in "${common[@]}"; do
+ cmp -s $fuse_dir/{ref,our}.img/$f && continue
+ echo " $f" in image differs:
+ case $f in
+ etc/init.rc)
+ diff $fuse_dir/{ref,our}.img/$f || /bin/true
+ ;;
+ apex_manifest.pb)
+ diff <(dump_apex_manifest <$fuse_dir/ref.img/$f) <(dump_apex_manifest <$fuse_dir/our.img/$f) || bin/true
+ ;;
+ *)
+ # TODO: should do more than just size comparison.
+ sizes=($(stat --format "%s" $fuse_dir/{ref,our}.img/$f))
+ delta=$((${sizes[1]}-${sizes[0]}))
+ (($delta==0)) || printf " size differs: %d (%d)\n" ${sizes[1]} $delta
+ ;;
+ esac
+ done
+}
+
+# Prints the script that sets `missing`/`extra`/`common` shell
+# variable to an array containing corresponding files, i.e. its
+# output is
+# declare declare -a missing=() extra=() common=()
+# missing+=(missing_file)
+# extra+=(extra_file)
+# common+=(common_file)
+# .....
+function classify() {
+ comm $1 $2 | sed -nr \
+ -e '1ideclare -a missing=() extra=() common=()' \
+ -e '/^\t\t/{s/\t\t(.*)/common+=(\1)/p;d}' \
+ -e '/^\t/{s/^\t(.*)/extra+=(\1)/p;d}' \
+ -e 's/(.*)/missing+=(\1)/p'; /bin/true
+}
+
+fuse_dir=$(mktemp -d --tmpdir apexfuse.XXXXX)
+mkdir -p $fuse_dir/{our,ref}.{apex,img}
+trap cleanup EXIT
+
+# Mount each APEX and save its sorted contents. Classify the contents
+mount_and_list zip $ref_apex $fuse_dir/ref.apex >$fuse_dir/ref.apex.list
+mount_and_list zip $our_apex $fuse_dir/our.apex >$fuse_dir/our.apex.list
+. <(classify $fuse_dir/ref.apex.list $fuse_dir/our.apex.list; /bin/true)
+
+# Now we have missing/extra/common holding respective file lists. Compare
+((${#missing[@]}==0)) || { printf "Missing files:"; printf " %s" ${missing[@]}; printf "\n"; }
+((${#extra[@]}==0)) || { printf "Extra files:"; printf " %s" ${extra[@]}; printf "\n"; }
+
+for f in "${common[@]}"; do
+ cmp -s $fuse_dir/{ref,our}.apex/$f && continue
+ # File differs, compare known file types intelligently
+ case $f in
+ AndroidManifest.xml)
+ echo $f differs:
+ diff \
+ <(aapt dump xmltree $fuse_dir/ref.apex AndroidManifest.xml) \
+ <(aapt dump xmltree $fuse_dir/our.apex AndroidManifest.xml) || /bin/true
+ ;;
+ apex_build_info.pb)
+ echo $f differs:
+ diff <(dump_buildinfo <$fuse_dir/ref.apex/$f) <(dump_buildinfo <$fuse_dir/our.apex/$f) || /bin/true
+ ;;
+ manifest.pb)
+ echo $f differs:
+ diff <(dump_apex_manifest <$fuse_dir/ref.apex/$f) <(dump_apex_manifest <$fuse_dir/our.apex/$f) || bin/true
+ ;;
+ apex_payload.img)
+ echo image $f differs, mounting it:
+ compare_images $fuse_dir/{ref,our}.apex/$f
+ ;;
+ META-INF/*)
+ # Ignore these. They are derived from the rest
+ # showing their difference does not help.
+ ;;
+ *) echo $f; diff $fuse_dir/{ref,our}.apex/$f || /bin/true
+ esac
+done
diff --git a/scripts/bp2build-progress/README.md b/scripts/bp2build-progress/README.md
deleted file mode 100644
index 61ffef51..00000000
--- a/scripts/bp2build-progress/README.md
+++ /dev/null
@@ -1,40 +0,0 @@
-# bp2build progress graphs
-
-This directory contains tools to generate reports and .png graphs of the
-bp2build conversion progress, for any module.
-
-This tool relies on `json-module-graph` and `bp2build` to be buildable targets
-for this branch.
-
-## Prerequisites
-
-* `/usr/bin/dot`: turning dot graphviz files into .pngs
-* Optional: `/usr/bin/jq`: running the query scripts over the json-module-graph.
-
-Tip: `--use_queryview=true` allows running `bp2build-progress.py` without `jq`.
-
-## Instructions
-
-# Generate the report for a module, e.g. adbd
-
-```
-./bp2build-progress.py report -m adbd
-```
-
-or:
-
-```
-./bp2build-progress.py report -m adbd --use_queryview=true
-```
-
-# Generate the report for a module, e.g. adbd
-
-```
-./bp2build-progress.py graph -m adbd > /tmp/graph.in && dot -Tpng -o /tmp/graph.png /tmp/graph.in
-```
-
-or:
-
-```
-./bp2build-progress.py graph -m adbd --use_queryview=true > /tmp/graph.in && dot -Tpng -o /tmp/graph.png /tmp/graph.in
-```
diff --git a/scripts/bp2build-progress/bp2build-module-dep-infos.py b/scripts/bp2build-progress/bp2build-module-dep-infos.py
deleted file mode 100755
index 95cc1b72..00000000
--- a/scripts/bp2build-progress/bp2build-module-dep-infos.py
+++ /dev/null
@@ -1,177 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A script to produce a csv report of all modules of a given type.
-
-There is one output row per module of the input type, each column corresponds
-to one of the fields of the _ModuleTypeInfo named tuple described below.
-The script allows to ignore certain dependency edges based on the target module
-name, or the dependency tag name.
-
-Usage:
- ./bp2build-module-dep-infos.py -m <module type>
- --ignore_by_name <modules to ignore>
- --ignore_by_tag <dependency tags to ignore>
-
-"""
-
-import argparse
-import collections
-import csv
-import dependency_analysis
-import sys
-
-_ModuleTypeInfo = collections.namedtuple(
- "_ModuleTypeInfo",
- [
- # map of module type to the set of properties used by modules
- # of the given type in the dependency tree.
- "type_to_properties",
- # [java modules only] list of source file extensions used by this module.
- "java_source_extensions",
- ])
-
-_DependencyRelation = collections.namedtuple("_DependencyRelation", [
- "transitive_dependency",
- "top_level_module",
-])
-
-
-def _get_java_source_extensions(module):
- out = set()
- if "Module" not in module:
- return out
- if "Java" not in module["Module"]:
- return out
- if "SourceExtensions" not in module["Module"]["Java"]:
- return out
- if module["Module"]["Java"]["SourceExtensions"]:
- out.update(module["Module"]["Java"]["SourceExtensions"])
- return out
-
-
-def _get_set_properties(module):
- set_properties = set()
- if "Module" not in module:
- return set_properties
- if "Android" not in module["Module"]:
- return set_properties
- if "SetProperties" not in module["Module"]["Android"]:
- return set_properties
- for prop in module["Module"]["Android"]["SetProperties"]:
- set_properties.add(prop["Name"])
- return set_properties
-
-
-def _should_ignore(module, ignored_names):
- return (dependency_analysis.is_windows_variation(module) or
- module["Name"] in ignored_names or
- dependency_analysis.ignore_kind(module["Type"]))
-
-def _update_infos(module_name, type_infos, module_graph_map, ignored_dep_names):
- module = module_graph_map[module_name]
- if _should_ignore(module, ignored_dep_names) or module_name in type_infos:
- return
- for dep in module["Deps"]:
- dep_name = dep["Name"]
- if dep_name == module_name:
- continue
- _update_infos(dep_name, type_infos, module_graph_map, ignored_dep_names)
-
- java_source_extensions = _get_java_source_extensions(module)
- type_to_properties = collections.defaultdict(set)
- if module["Type"]:
- type_to_properties[module["Type"]].update(_get_set_properties(module))
- for dep in module["Deps"]:
- dep_name = dep["Name"]
- if _should_ignore(module_graph_map[dep_name], ignored_dep_names):
- continue
- if dep_name == module_name:
- continue
- for dep_type, dep_type_properties in type_infos[dep_name].type_to_properties.items():
- type_to_properties[dep_type].update(dep_type_properties)
- java_source_extensions.update(type_infos[dep_name].java_source_extensions)
- type_infos[module_name] = _ModuleTypeInfo(
- type_to_properties=type_to_properties,
- java_source_extensions=java_source_extensions)
-
-
-def module_type_info_from_json(module_graph, module_type, ignored_dep_names):
- """Builds a map of module name to _ModuleTypeInfo for each module of module_type.
-
- Dependency edges pointing to modules in ignored_dep_names are not followed.
- """
- module_graph_map = dict()
- module_stack = []
- for module in module_graph:
- # Windows variants have incomplete dependency information in the json module graph.
- if dependency_analysis.is_windows_variation(module):
- continue
- module_graph_map[module["Name"]] = module
- if module["Type"] == module_type:
- module_stack.append(module["Name"])
- # dictionary of module name to _ModuleTypeInfo.
- type_infos = {}
- for module_name in module_stack:
- # post-order traversal of the dependency graph builds the type_infos
- # dictionary from the leaves so that common dependencies are visited
- # only once.
- _update_infos(module_name, type_infos, module_graph_map, ignored_dep_names)
-
- return {
- name: info
- for name, info in type_infos.items()
- if module_graph_map[name]["Type"] == module_type
- }
-
-
-def main():
- parser = argparse.ArgumentParser(description="")
- parser.add_argument("--module_type", "-m", help="name of Soong module type.")
- parser.add_argument(
- "--ignore_by_name",
- type=str,
- default="",
- required=False,
- help="Comma-separated list. When building the tree of transitive dependencies, will not follow dependency edges pointing to module names listed by this flag."
- )
- args = parser.parse_args()
-
- module_type = args.module_type
- ignore_by_name = args.ignore_by_name
-
- module_graph = dependency_analysis.get_json_module_type_info(module_type)
- type_infos = module_type_info_from_json(module_graph, module_type,
- ignore_by_name.split(","))
- writer = csv.writer(sys.stdout)
- writer.writerow([
- "module name",
- "properties",
- "java source extensions",
- ])
- for module, module_type_info in type_infos.items():
- writer.writerow([
- module,
- ("[\"%s\"]" % '"\n"'.join([
- "%s: %s" % (mtype, ",".join(properties)) for mtype, properties in
- module_type_info.type_to_properties.items()
- ]) if len(module_type_info.type_to_properties) else "[]"),
- ("[\"%s\"]" % '", "'.join(module_type_info.java_source_extensions)
- if len(module_type_info.java_source_extensions) else "[]"),
- ])
-
-
-if __name__ == "__main__":
- main()
diff --git a/scripts/bp2build-progress/bp2build-progress.py b/scripts/bp2build-progress/bp2build-progress.py
deleted file mode 100755
index 822d0720..00000000
--- a/scripts/bp2build-progress/bp2build-progress.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A json-module-graph postprocessing script to generate a bp2build progress tracker.
-
-Usage:
- ./bp2build-progress.py [report|graph] -m <module name>
-
-Example:
-
- To generate a report on the `adbd` module, run:
- ./bp2build-progress report -m adbd
-
- To generate a graph on the `adbd` module, run:
- ./bp2build-progress graph -m adbd > graph.in && dot -Tpng -o graph.png
- graph.in
-
-"""
-
-import argparse
-import collections
-import datetime
-import dependency_analysis
-import os.path
-import queue
-import subprocess
-import sys
-
-_ModuleInfo = collections.namedtuple("_ModuleInfo", [
- "name",
- "kind",
- "dirname",
-])
-
-_ReportData = collections.namedtuple("_ReportData", [
- "input_module",
- "all_unconverted_modules",
- "blocked_modules",
- "dirs_with_unconverted_modules",
- "kind_of_unconverted_modules",
- "converted",
-])
-
-
-def combine_report_data(data):
- ret = _ReportData(
- input_module=set(),
- all_unconverted_modules=collections.defaultdict(set),
- blocked_modules=collections.defaultdict(set),
- dirs_with_unconverted_modules=set(),
- kind_of_unconverted_modules=set(),
- converted=set(),
- )
- for item in data:
- ret.input_module.add(item.input_module)
- for key, value in item.all_unconverted_modules.items():
- ret.all_unconverted_modules[key].update(value)
- for key, value in item.blocked_modules.items():
- ret.blocked_modules[key].update(value)
- ret.dirs_with_unconverted_modules.update(item.dirs_with_unconverted_modules)
- ret.kind_of_unconverted_modules.update(item.kind_of_unconverted_modules)
- if len(ret.converted) == 0:
- ret.converted.update(item.converted)
- return ret
-
-
-# Generate a dot file containing the transitive closure of the module.
-def generate_dot_file(modules, converted, module):
- DOT_TEMPLATE = """
-digraph mygraph {{
- node [shape=box];
-
- %s
-}}
-"""
-
- make_node = lambda module, color: \
- ('"{name}" [label="{name}\\n{kind}" color=black, style=filled, '
- "fillcolor={color}]").format(name=module.name, kind=module.kind, color=color)
- make_edge = lambda module, dep: \
- '"%s" -> "%s"' % (module.name, dep)
-
- # Check that all modules in the argument are in the list of converted modules
- all_converted = lambda modules: all(map(lambda m: m in converted, modules))
-
- dot_entries = []
-
- for module, deps in modules.items():
- if module.name in converted:
- # Skip converted modules (nodes)
- continue
- elif module.name not in converted:
- if all_converted(deps):
- dot_entries.append(make_node(module, "yellow"))
- else:
- dot_entries.append(make_node(module, "tomato"))
-
- # Print all edges for this module
- for dep in list(deps):
- # Skip converted deps (edges)
- if dep not in converted:
- dot_entries.append(make_edge(module, dep))
-
- print(DOT_TEMPLATE % "\n ".join(dot_entries))
-
-
-# Generate a report for each module in the transitive closure, and the blockers for each module
-def generate_report_data(modules, converted, input_module):
- # Map of [number of unconverted deps] to list of entries,
- # with each entry being the string: "<module>: <comma separated list of unconverted modules>"
- blocked_modules = collections.defaultdict(set)
-
- # Map of unconverted modules to the modules they're blocking
- # (i.e. reverse deps)
- all_unconverted_modules = collections.defaultdict(set)
-
- dirs_with_unconverted_modules = set()
- kind_of_unconverted_modules = set()
-
- for module, deps in sorted(modules.items()):
- unconverted_deps = set(dep for dep in deps if dep not in converted)
- for dep in unconverted_deps:
- all_unconverted_modules[dep].add(module)
-
- unconverted_count = len(unconverted_deps)
- if module.name not in converted:
- report_entry = "{name} [{kind}] [{dirname}]: {unconverted_deps}".format(
- name=module.name,
- kind=module.kind,
- dirname=module.dirname,
- unconverted_deps=", ".join(sorted(unconverted_deps)))
- blocked_modules[unconverted_count].add(report_entry)
- dirs_with_unconverted_modules.add(module.dirname)
- kind_of_unconverted_modules.add(module.kind)
-
- return _ReportData(
- input_module=input_module,
- all_unconverted_modules=all_unconverted_modules,
- blocked_modules=blocked_modules,
- dirs_with_unconverted_modules=dirs_with_unconverted_modules,
- kind_of_unconverted_modules=kind_of_unconverted_modules,
- converted=converted,
- )
-
-
-def generate_report(report_data):
- report_lines = []
- input_modules = sorted(report_data.input_module)
-
- report_lines.append("# bp2build progress report for: %s\n" % input_modules)
- report_lines.append("Ignored module types: %s\n" %
- sorted(dependency_analysis.IGNORED_KINDS))
- report_lines.append("# Transitive dependency closure:")
-
- for count, modules in sorted(report_data.blocked_modules.items()):
- report_lines.append("\n%d unconverted deps remaining:" % count)
- for module_string in sorted(modules):
- report_lines.append(" " + module_string)
-
- report_lines.append("\n")
- report_lines.append("# Unconverted deps of {}:\n".format(input_modules))
- for count, dep in sorted(
- ((len(unconverted), dep)
- for dep, unconverted in report_data.all_unconverted_modules.items()),
- reverse=True):
- report_lines.append("%s: blocking %d modules" % (dep, count))
-
- report_lines.append("\n")
- report_lines.append("# Dirs with unconverted modules:\n\n{}".format("\n".join(
- sorted(report_data.dirs_with_unconverted_modules))))
-
- report_lines.append("\n")
- report_lines.append("# Kinds with unconverted modules:\n\n{}".format(
- "\n".join(sorted(report_data.kind_of_unconverted_modules))))
-
- report_lines.append("\n")
- report_lines.append("# Converted modules:\n\n%s" %
- "\n".join(sorted(report_data.converted)))
-
- report_lines.append("\n")
- report_lines.append(
- "Generated by: https://cs.android.com/android/platform/superproject/+/master:build/bazel/scripts/bp2build-progress/bp2build-progress.py"
- )
- report_lines.append("Generated at: %s" %
- datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S %z"))
- print("\n".join(report_lines))
-
-
-def adjacency_list_from_json(module_graph, ignore_by_name, top_level_module):
- # The set of ignored modules. These modules (and their dependencies) are not shown
- # in the graph or report.
- ignored = set()
-
- # A map of module name to _ModuleInfo
- name_to_info = dict()
- module_graph_map = dict()
- q = queue.Queue()
-
- # Do a single pass to find all top-level modules to be ignored
- for module in module_graph:
- name = module["Name"]
- if dependency_analysis.is_windows_variation(module):
- continue
- if ignore_kind(module["Type"]) or name in ignore_by_name:
- ignored.add(module["Name"])
- continue
- name_to_info[name] = _ModuleInfo(
- name=name,
- kind=module["Type"],
- dirname=os.path.dirname(module["Blueprint"]))
- module_graph_map[module["Name"]] = module
- if module["Name"] == top_level_module:
- q.put(module["Name"])
-
- # An adjacency list for all modules in the transitive closure, excluding ignored modules.
- module_adjacency_list = {}
- visited = set()
- # Create the adjacency list.
- while not q.empty():
- module_name = q.get()
- module = module_graph_map[module_name]
- visited.add(module_name)
- if module_name in ignored:
- continue
- if dependency_analysis.is_windows_variation(module):
- # ignore the windows variations of modules
- continue
-
- module_info = name_to_info[module_name]
- module_adjacency_list[module_info] = set()
- for dep in module["Deps"]:
- dep_name = dep["Name"]
- if dep_name in ignored or dep_name == module_name:
- continue
- module_adjacency_list[module_info].add(dep_name)
- if dep_name not in visited:
- q.put(dep_name)
-
- return module_adjacency_list
-
-
-def ignore_kind(kind):
- return kind in dependency_analysis.IGNORED_KINDS or "defaults" in kind
-
-
-def bazel_target_to_dir(full_target):
- dirname, _ = full_target.split(":")
- return dirname[2:]
-
-
-def adjacency_list_from_queryview_xml(module_graph, ignore_by_name,
- top_level_module):
- # The set of ignored modules. These modules (and their dependencies) are
- # not shown in the graph or report.
- ignored = set()
-
- # A map of module name to ModuleInfo
- name_to_info = dict()
-
- # queryview embeds variant in long name, keep a map of the name with vaiarnt
- # to just name
- name_with_variant_to_name = dict()
-
- module_graph_map = dict()
- q = queue.Queue()
-
- for module in module_graph:
- ignore = False
- if module.tag != "rule":
- continue
- kind = module.attrib["class"]
- name_with_variant = module.attrib["name"]
- name = None
- variant = ""
- for attr in module:
- attr_name = attr.attrib["name"]
- if attr_name == "soong_module_name":
- name = attr.attrib["value"]
- elif attr_name == "soong_module_variant":
- variant = attr.attrib["value"]
- elif attr_name == "soong_module_type" and kind == "generic_soong_module":
- kind = attr.attrib["value"]
- # special handling for filegroup srcs, if a source has the same name as
- # the module, we don't convert it
- elif kind == "filegroup" and attr_name == "srcs":
- for item in attr:
- if item.attrib["value"] == name:
- ignore = True
- if name in ignore_by_name:
- ignore = True
-
- if ignore_kind(kind) or variant.startswith("windows") or ignore:
- ignored.add(name_with_variant)
- else:
- if name == top_level_module:
- q.put(name_with_variant)
- name_with_variant_to_name.setdefault(name_with_variant, name)
- name_to_info.setdefault(
- name,
- _ModuleInfo(
- name=name,
- kind=kind,
- dirname=bazel_target_to_dir(name_with_variant),
- ))
- module_graph_map[name_with_variant] = module
-
- # An adjacency list for all modules in the transitive closure, excluding ignored modules.
- module_adjacency_list = dict()
- visited = set()
- while not q.empty():
- name_with_variant = q.get()
- module = module_graph_map[name_with_variant]
- if module.tag != "rule":
- continue
- visited.add(name_with_variant)
- if name_with_variant in ignored:
- continue
-
- name = name_with_variant_to_name[name_with_variant]
- module_info = name_to_info[name]
- module_adjacency_list[module_info] = set()
- for attr in module:
- if attr.tag != "rule-input":
- continue
- dep_name_with_variant = attr.attrib["name"]
- if dep_name_with_variant in ignored:
- continue
- dep_name = name_with_variant_to_name[dep_name_with_variant]
- if name == dep_name:
- continue
- if dep_name_with_variant not in visited:
- q.put(dep_name_with_variant)
- module_adjacency_list[module_info].add(dep_name)
-
- return module_adjacency_list
-
-
-def get_module_adjacency_list(top_level_module, use_queryview, ignore_by_name):
- # The main module graph containing _all_ modules in the Soong build,
- # and the list of converted modules.
- try:
- module_graph = dependency_analysis.get_queryview_module_info(
- top_level_module
- ) if use_queryview else dependency_analysis.get_json_module_info(
- top_level_module)
- converted = dependency_analysis.get_bp2build_converted_modules()
- except subprocess.CalledProcessError as err:
- print("Error running: '%s':", " ".join(err.cmd))
- print("Output:\n%s" % err.output.decode("utf-8"))
- print("Error:\n%s" % err.stderr.decode("utf-8"))
- sys.exit(-1)
-
- module_adjacency_list = None
- if use_queryview:
- module_adjacency_list = adjacency_list_from_queryview_xml(
- module_graph, ignore_by_name, top_level_module)
- else:
- module_adjacency_list = adjacency_list_from_json(module_graph,
- ignore_by_name,
- top_level_module)
-
- return module_adjacency_list, converted
-
-
-def main():
- parser = argparse.ArgumentParser(description="")
- parser.add_argument("mode", help="mode: graph or report")
- parser.add_argument(
- "--module",
- "-m",
- action="append",
- help="name(s) of Soong module(s). Multiple modules only supported for report"
- )
- parser.add_argument(
- "--use_queryview",
- type=bool,
- default=False,
- required=False,
- help="whether to use queryview or module_info")
- parser.add_argument(
- "--ignore_by_name",
- type=str,
- default="",
- required=False,
- help="Comma-separated list. When building the tree of transitive dependencies, will not follow dependency edges pointing to module names listed by this flag."
- )
- args = parser.parse_args()
-
- if len(args.module) > 1 and args.mode != "report":
- print("Can only support one module with mode {}", args.mode)
-
- mode = args.mode
- use_queryview = args.use_queryview
- ignore_by_name = args.ignore_by_name
-
- report_infos = []
- for top_level_module in args.module:
- module_adjacency_list, converted = get_module_adjacency_list(
- top_level_module, use_queryview, ignore_by_name)
-
- if mode == "graph":
- generate_dot_file(module_adjacency_list, converted, top_level_module)
- elif mode == "report":
- report_infos.append(
- generate_report_data(module_adjacency_list, converted,
- top_level_module))
- else:
- raise RuntimeError("unknown mode: %s" % mode)
-
- if mode == "report":
- combined_data = combine_report_data(report_infos)
- generate_report(combined_data)
-
-
-if __name__ == "__main__":
- main()
diff --git a/scripts/bp2build-progress/dependency_analysis.py b/scripts/bp2build-progress/dependency_analysis.py
deleted file mode 100644
index 6987c10e..00000000
--- a/scripts/bp2build-progress/dependency_analysis.py
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Utility functions to produce module or module type dependency graphs using json-module-graph or queryview."""
-
-import json
-import os
-import os.path
-import subprocess
-import xml.etree.ElementTree
-
-# This list of module types are omitted from the report and graph
-# for brevity and simplicity. Presence in this list doesn't mean
-# that they shouldn't be converted, but that they are not that useful
-# to be recorded in the graph or report currently.
-IGNORED_KINDS = set([
- "license_kind",
- "license",
- "cc_defaults",
- "cc_prebuilt_object",
- "cc_prebuilt_library_headers",
- "cc_prebuilt_library_shared",
- "cc_prebuilt_library_static",
- "cc_prebuilt_library_static",
- "cc_prebuilt_library",
- "java_defaults",
- "ndk_prebuilt_static_stl",
- "ndk_library",
-])
-
-SRC_ROOT_DIR = os.path.abspath(__file__ + "/../../../../..")
-
-
-def _build_with_soong(target):
- subprocess.check_output(
- [
- "build/soong/soong_ui.bash",
- "--make-mode",
- "--skip-soong-tests",
- target,
- ],
- cwd=SRC_ROOT_DIR,
- env={
- # Use aosp_arm as the canonical target product.
- "TARGET_PRODUCT": "aosp_arm",
- "TARGET_BUILD_VARIANT": "userdebug",
- },
- )
-
-
-def get_queryview_module_info(module):
- """Returns the list of transitive dependencies of input module as built by queryview."""
- _build_with_soong("queryview")
-
- result = subprocess.check_output(
- [
- "tools/bazel", "query", "--config=ci", "--config=queryview",
- "--output=xml",
- 'deps(attr("soong_module_name", "^{}$", //...))'.format(module)
- ],
- cwd=SRC_ROOT_DIR,
- )
- return xml.etree.ElementTree.fromstring(result)
-
-
-def get_json_module_info(module):
- """Returns the list of transitive dependencies of input module as provided by Soong's json module graph."""
- _build_with_soong("json-module-graph")
- # Run query.sh on the module graph for the top level module
- result = subprocess.check_output(
- [
- "build/bazel/json_module_graph/query.sh", "fullTransitiveDeps",
- "out/soong/module-graph.json", module
- ],
- cwd=SRC_ROOT_DIR,
- )
- return json.loads(result)
-
-
-def get_bp2build_converted_modules():
- """ Returns the list of modules that bp2build can currently convert. """
- _build_with_soong("bp2build")
- # Parse the list of converted module names from bp2build
- with open(
- os.path.join(
- SRC_ROOT_DIR,
- "out/soong/soong_injection/metrics/converted_modules.txt")) as f:
- # Read line by line, excluding comments.
- # Each line is a module name.
- ret = [line.strip() for line in f.readlines() if not line.startswith("#")]
- return set(ret)
-
-
-def get_json_module_type_info(module_type):
- """Returns the combined transitive dependency closures of all modules of module_type."""
- _build_with_soong("json-module-graph")
- # Run query.sh on the module graph for the top level module type
- result = subprocess.check_output(
- [
- "build/bazel/json_module_graph/query.sh",
- "fullTransitiveModuleTypeDeps", "out/soong/module-graph.json",
- module_type
- ],
- cwd=SRC_ROOT_DIR,
- )
- return json.loads(result)
-
-
-def is_windows_variation(module):
- """Returns True if input module's variant is Windows.
-
- Args:
- module: an entry parsed from Soong's json-module-graph
- """
- dep_variations = module.get("Variations")
- dep_variation_os = ""
- if dep_variations != None:
- dep_variation_os = dep_variations.get("os")
- return dep_variation_os == "windows"
-
-
-def ignore_kind(kind):
- return kind in IGNORED_KINDS or "defaults" in kind
diff --git a/scripts/bp2build_progress/BUILD.bazel b/scripts/bp2build_progress/BUILD.bazel
new file mode 100644
index 00000000..af7c5e48
--- /dev/null
+++ b/scripts/bp2build_progress/BUILD.bazel
@@ -0,0 +1,73 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+py_library(
+ name = "dependency_analysis",
+ srcs = ["dependency_analysis.py"],
+ visibility = ["//visibility:public"],
+)
+
+py_library(
+ name = "queryview_xml",
+ testonly = True,
+ srcs = ["queryview_xml.py"],
+ visibility = ["//visibility:public"],
+)
+
+py_library(
+ name = "soong_module_json",
+ testonly = True,
+ srcs = ["soong_module_json.py"],
+ visibility = ["//visibility:public"],
+)
+
+py_test(
+ name = "dependency_analysis_test",
+ srcs = ["dependency_analysis_test.py"],
+ python_version = "PY3",
+ deps = [
+ ":dependency_analysis",
+ ":queryview_xml",
+ ":soong_module_json",
+ ],
+)
+
+py_binary(
+ name = "bp2build_progress",
+ srcs = ["bp2build_progress.py"],
+ visibility = ["//visibility:public"],
+ deps = [
+ ":dependency_analysis",
+ "//build/soong/ui/metrics/bp2build_progress_metrics_proto:bp2build_py_proto",
+ ],
+)
+
+py_test(
+ name = "bp2build_progress_test",
+ srcs = ["bp2build_progress_test.py"],
+ python_version = "PY3",
+ deps = [
+ ":bp2build_progress",
+ ":dependency_analysis",
+ ":queryview_xml",
+ ":soong_module_json",
+ ],
+)
+
+py_binary(
+ name = "bp2build_module_dep_infos",
+ srcs = ["bp2build_module_dep_infos.py"],
+ visibility = ["//visibility:public"],
+ deps = [":dependency_analysis"],
+)
diff --git a/scripts/bp2build_progress/README.md b/scripts/bp2build_progress/README.md
new file mode 100644
index 00000000..c9750f0a
--- /dev/null
+++ b/scripts/bp2build_progress/README.md
@@ -0,0 +1,48 @@
+# bp2build progress graphs
+
+This directory contains tools to generate reports and .png graphs of the
+bp2build conversion progress, for any module.
+
+This tool relies on `json-module-graph` and `bp2build` to be buildable targets
+for this branch.
+
+## Prerequisites
+
+* `/usr/bin/dot`: turning dot graphviz files into .pngs
+
+Tip: `--use_queryview=true` runs `bp2build_progress.py` with queryview.
+
+## Instructions
+
+# Generate the report for a module, e.g. adbd
+
+```sh
+b run //build/bazel/scripts/bp2build_progress:bp2build_progress \
+ -- report -m <module-name>
+```
+
+or:
+
+```sh
+b run //build/bazel/scripts/bp2build_progress:bp2build_progress \
+ -- report -m <module-name> --use-queryview
+```
+
+When running in report mode, you can also write results to a proto with the flag
+`--proto-file`
+
+# Generate the report for a module, e.g. adbd
+
+```sh
+b run //build/bazel/scripts/bp2build_progress:bp2build_progress \
+ -- graph -m adbd > /tmp/graph.in && \
+ dot -Tpng -o /tmp/graph.png /tmp/graph.in
+```
+
+or:
+
+```sh
+b run //build/bazel/scripts/bp2build_progress:bp2build_progress \
+ -- graph -m adbd --use-queryview > /tmp/graph.in && \
+ dot -Tpng -o /tmp/graph.png /tmp/graph.in
+```
diff --git a/scripts/bp2build_progress/bp2build_module_dep_infos.py b/scripts/bp2build_progress/bp2build_module_dep_infos.py
new file mode 100755
index 00000000..c189a98b
--- /dev/null
+++ b/scripts/bp2build_progress/bp2build_module_dep_infos.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A script to produce a csv report of all modules of a given type.
+
+There is one output row per module of the input type, each column corresponds
+to one of the fields of the _ModuleTypeInfo named tuple described below.
+The script allows to ignore certain dependency edges based on the target module
+name, or the dependency tag name.
+
+Usage:
+ ./bp2build-module-dep-infos.py -m <module type>
+ --ignore-by-name <modules to ignore>
+
+"""
+
+import argparse
+import collections
+import csv
+import dependency_analysis
+import sys
+
+_ModuleTypeInfo = collections.namedtuple(
+ "_ModuleTypeInfo",
+ [
+ # map of module type to the set of properties used by modules
+ # of the given type in the dependency tree.
+ "type_to_properties",
+ # [java modules only] list of source file extensions used by this module.
+ "java_source_extensions",
+ ],
+)
+
+def _get_java_source_extensions(module):
+ out = set()
+ if "Module" not in module:
+ return out
+ if "Java" not in module["Module"]:
+ return out
+ if "SourceExtensions" not in module["Module"]["Java"]:
+ return out
+ if module["Module"]["Java"]["SourceExtensions"]:
+ out.update(module["Module"]["Java"]["SourceExtensions"])
+ return out
+
+
+def module_type_info_from_json(
+ module_graph, module_type, ignored_dep_names, ignore_java_auto_deps
+):
+ """Builds a map of module name to _ModuleTypeInfo for each module of module_type.
+
+ Dependency edges pointing to modules in ignored_dep_names are not followed.
+ """
+
+ modules_of_type = set()
+
+ def filter_by_type(json):
+ if json["Type"] == module_type:
+ modules_of_type.add(json["Name"])
+ return True
+ return False
+
+ # dictionary of module name to _ModuleTypeInfo.
+
+ type_infos = {}
+
+ def update_infos(module, deps):
+ module_name = module["Name"]
+ info = type_infos.get(
+ module_name,
+ _ModuleTypeInfo(
+ java_source_extensions=set(),
+ type_to_properties=collections.defaultdict(set),
+ ))
+
+ java_source_extensions = _get_java_source_extensions(module)
+
+ if module["Type"]:
+ info.type_to_properties[module["Type"]].update(
+ dependency_analysis.get_property_names(module))
+
+ for dep_name in deps:
+ for dep_type, dep_type_properties in type_infos[
+ dep_name].type_to_properties.items():
+ info.type_to_properties[dep_type].update(dep_type_properties)
+ java_source_extensions.update(
+ type_infos[dep_name].java_source_extensions)
+
+ info.java_source_extensions.update(java_source_extensions)
+ # for a module, collect all properties and java source extensions specified by
+ # transitive dependencies and the module itself
+ type_infos[module_name] = info
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ module_graph, ignored_dep_names, ignore_java_auto_deps, filter_by_type, update_infos)
+
+ return {
+ name: info for name, info in type_infos.items() if name in modules_of_type
+ }
+
+
+def _write_output(file_handle, type_infos):
+ writer = csv.writer(file_handle)
+ writer.writerow([
+ "module name",
+ "properties",
+ "java source extensions",
+ ])
+ for module, module_type_info in type_infos.items():
+ writer.writerow([
+ module,
+ ("[\"%s\"]" % '"\n"'.join([
+ "%s: %s" % (mtype, ",".join(properties)) for mtype, properties in
+ module_type_info.type_to_properties.items()
+ ]) if len(module_type_info.type_to_properties) else "[]"),
+ ("[\"%s\"]" % '", "'.join(module_type_info.java_source_extensions)
+ if len(module_type_info.java_source_extensions) else "[]"),
+ ])
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--module-type", "-m", help="name of Soong module type.")
+ parser.add_argument(
+ "--ignore-by-name",
+ default="",
+ help=(
+ "Comma-separated list. When building the tree of transitive"
+ " dependencies, will not follow dependency edges pointing to module"
+ " names listed by this flag."
+ ),
+ )
+ parser.add_argument(
+ "--ignore-java-auto-deps",
+ action="store_true",
+ help="whether to ignore automatically added java deps",
+ )
+ args = parser.parse_args()
+
+ module_type = args.module_type
+ ignore_by_name = args.ignore_by_name
+
+ module_graph = dependency_analysis.get_json_module_type_info(module_type)
+ type_infos = module_type_info_from_json(
+ module_graph,
+ module_type,
+ ignore_by_name.split(","),
+ args.ignore_java_auto_deps,
+ )
+
+ _write_output(sys.stdout, type_infos)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/bp2build_progress/bp2build_progress.py b/scripts/bp2build_progress/bp2build_progress.py
new file mode 100755
index 00000000..d6e450b4
--- /dev/null
+++ b/scripts/bp2build_progress/bp2build_progress.py
@@ -0,0 +1,530 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A json-module-graph postprocessing script to generate a bp2build progress tracker.
+
+Usage:
+ b run :bp2build_progress [report|graph] -m <module name>
+
+Example:
+
+ To generate a report on the `adbd` module, run:
+ b run //build/bazel/scripts/bp2build_progress:bp2build_progress \
+ -- report -m <module-name>
+
+ To generate a graph on the `adbd` module, run:
+ b run //build/bazel/scripts/bp2build_progress:bp2build_progress \
+ -- graph -m adbd > /tmp/graph.in && \
+ dot -Tpng -o /tmp/graph.png /tmp/graph.in
+"""
+
+import argparse
+import collections
+import dataclasses
+import datetime
+import os.path
+import subprocess
+import sys
+import xml
+from typing import Dict, List, Set, Optional
+
+import bp2build_pb2
+import dependency_analysis
+
+
+@dataclasses.dataclass(frozen=True, order=True)
+class ModuleInfo:
+ name: str
+ kind: str
+ dirname: str
+ created_by: Optional[str]
+ num_deps: int = 0
+ converted: bool = False
+
+ def __str__(self):
+ converted = " (converted)" if self.converted else ""
+ return f"{self.name} [{self.kind}] [{self.dirname}]{converted}"
+
+ def short_string(self, converted: Set[str]):
+ converted = " (c)" if self.is_converted(converted) else ""
+ return f"{self.name}{converted}"
+
+ def is_converted(self, converted: Set[str]):
+ return self.name in converted
+
+ def is_converted_or_skipped(self, converted: Set[str]):
+ if self.is_converted(converted):
+ return True
+ # these are implementation details of another module type that can never be
+ # created in a BUILD file
+ return ".go_android/soong" in self.kind and (
+ self.kind.endswith("__loadHookModule") or
+ self.kind.endswith("__topDownMutatorModule"))
+
+
+@dataclasses.dataclass(frozen=True, order=True)
+class InputModule:
+ module: ModuleInfo
+ num_deps: int = 0
+ num_unconverted_deps: int = 0
+
+ def __str__(self):
+ total = self.num_deps
+ converted = self.num_deps - self.num_unconverted_deps
+ percent = 1
+ if self.num_deps > 0:
+ percent = converted / self.num_deps * 100
+ return f"{self.module.name}: {percent:.1f}% ({converted}/{total}) converted"
+
+
+@dataclasses.dataclass(frozen=True)
+class ReportData:
+ input_modules: Set[InputModule]
+ total_deps: Set[ModuleInfo]
+ unconverted_deps: Set[str]
+ all_unconverted_modules: Dict[str, Set[ModuleInfo]]
+ blocked_modules: Dict[ModuleInfo, Set[str]]
+ dirs_with_unconverted_modules: Set[str]
+ kind_of_unconverted_modules: Set[str]
+ converted: Set[str]
+ show_converted: bool
+
+
+# Generate a dot file containing the transitive closure of the module.
+def generate_dot_file(modules: Dict[ModuleInfo, Set[ModuleInfo]],
+ converted: Set[str], show_converted: bool):
+ # Check that all modules in the argument are in the list of converted modules
+ all_converted = lambda modules: all(
+ m.is_converted(converted) for m in modules)
+
+ dot_entries = []
+
+ for module, deps in sorted(modules.items()):
+
+ if module.is_converted(converted):
+ if show_converted:
+ color = "dodgerblue"
+ else:
+ continue
+ elif all_converted(deps):
+ color = "yellow"
+ else:
+ color = "tomato"
+
+ dot_entries.append(
+ f'"{module.name}" [label="{module.name}\\n{module.kind}" color=black, style=filled, '
+ f"fillcolor={color}]")
+ dot_entries.extend(
+ f'"{module.name}" -> "{dep.name}"' for dep in sorted(deps)
+ if show_converted or not dep.is_converted(converted))
+
+ return """
+digraph mygraph {{
+ node [shape=box];
+
+ %s
+}}
+""" % "\n ".join(dot_entries)
+
+
+# Generate a report for each module in the transitive closure, and the blockers for each module
+def generate_report_data(modules: Dict[ModuleInfo, Set[ModuleInfo]],
+ converted: Set[str],
+ input_modules_names: Set[str],
+ show_converted: bool = False) -> ReportData:
+ # Map of [number of unconverted deps] to list of entries,
+ # with each entry being the string: "<module>: <comma separated list of unconverted modules>"
+ blocked_modules = collections.defaultdict(set)
+
+ # Map of unconverted modules to the modules they're blocking
+ # (i.e. reverse deps)
+ all_unconverted_modules = collections.defaultdict(set)
+
+ dirs_with_unconverted_modules = set()
+ kind_of_unconverted_modules = set()
+
+ input_all_deps = set()
+ input_unconverted_deps = set()
+ input_modules = set()
+
+ for module, deps in sorted(modules.items()):
+ unconverted_deps = set(
+ dep.name for dep in deps if not dep.is_converted_or_skipped(converted))
+
+ # replace deps count with transitive deps rather than direct deps count
+ module = ModuleInfo(
+ module.name,
+ module.kind,
+ module.dirname,
+ module.created_by,
+ len(deps),
+ module.is_converted(converted),
+ )
+
+ for dep in unconverted_deps:
+ all_unconverted_modules[dep].add(module)
+
+ if not module.is_converted_or_skipped(converted) or (
+ show_converted and not module.is_converted_or_skipped(set())):
+ if show_converted:
+ full_deps = set(f"{dep.short_string(converted)}" for dep in deps)
+ blocked_modules[module].update(full_deps)
+ else:
+ blocked_modules[module].update(unconverted_deps)
+
+ if not module.is_converted_or_skipped(converted):
+ dirs_with_unconverted_modules.add(module.dirname)
+ kind_of_unconverted_modules.add(module.kind)
+
+ if module.name in input_modules_names:
+ input_modules.add(InputModule(module, len(deps), len(unconverted_deps)))
+ input_all_deps.update(deps)
+ input_unconverted_deps.update(unconverted_deps)
+
+ return ReportData(
+ input_modules=input_modules,
+ total_deps=input_all_deps,
+ unconverted_deps=input_unconverted_deps,
+ all_unconverted_modules=all_unconverted_modules,
+ blocked_modules=blocked_modules,
+ dirs_with_unconverted_modules=dirs_with_unconverted_modules,
+ kind_of_unconverted_modules=kind_of_unconverted_modules,
+ converted=converted,
+ show_converted=show_converted,
+ )
+
+
+def generate_proto(report_data, file_name):
+ message = bp2build_pb2.Bp2buildConversionProgress(
+ root_modules=[m.module.name for m in report_data.input_modules],
+ num_deps=len(report_data.total_deps),
+ )
+ for module, unconverted_deps in report_data.blocked_modules.items():
+ message.unconverted.add(
+ name=module.name,
+ directory=module.dirname,
+ type=module.kind,
+ unconverted_deps=unconverted_deps,
+ num_deps=module.num_deps,
+ )
+
+ with open(file_name, "wb") as f:
+ f.write(message.SerializeToString())
+
+
+def generate_report(report_data):
+ report_lines = []
+ input_module_str = ", ".join(
+ str(i) for i in sorted(report_data.input_modules))
+
+ report_lines.append("# bp2build progress report for: %s\n" % input_module_str)
+
+ if report_data.show_converted:
+ report_lines.append(
+ "# progress report includes data both for converted and unconverted modules"
+ )
+
+ total = len(report_data.total_deps)
+ unconverted = len(report_data.unconverted_deps)
+ converted = total - unconverted
+ percent = converted / total * 100
+ report_lines.append(f"Percent converted: {percent:.2f} ({converted}/{total})")
+ report_lines.append(f"Total unique unconverted dependencies: {unconverted}")
+
+ report_lines.append("Ignored module types: %s\n" %
+ sorted(dependency_analysis.IGNORED_KINDS))
+ report_lines.append("# Transitive dependency closure:")
+
+ current_count = -1
+ for module, unconverted_deps in sorted(
+ report_data.blocked_modules.items(), key=lambda x: len(x[1])):
+ count = len(unconverted_deps)
+ if current_count != count:
+ report_lines.append(f"\n{count} unconverted deps remaining:")
+ current_count = count
+ report_lines.append("{module}: {deps}".format(
+ module=module, deps=", ".join(sorted(unconverted_deps))))
+
+ report_lines.append("\n")
+ report_lines.append("# Unconverted deps of {}:\n".format(input_module_str))
+ for count, dep in sorted(
+ ((len(unconverted), dep)
+ for dep, unconverted in report_data.all_unconverted_modules.items()),
+ reverse=True):
+ report_lines.append("%s: blocking %d modules" % (dep, count))
+
+ report_lines.append("\n")
+ report_lines.append("# Dirs with unconverted modules:\n\n{}".format("\n".join(
+ sorted(report_data.dirs_with_unconverted_modules))))
+
+ report_lines.append("\n")
+ report_lines.append("# Kinds with unconverted modules:\n\n{}".format(
+ "\n".join(sorted(report_data.kind_of_unconverted_modules))))
+
+ report_lines.append("\n")
+ report_lines.append("# Converted modules:\n\n%s" %
+ "\n".join(sorted(report_data.converted)))
+
+ report_lines.append("\n")
+ report_lines.append(
+ "Generated by: https://cs.android.com/android/platform/superproject/+/master:build/bazel/scripts/bp2build_progress/bp2build_progress.py"
+ )
+ report_lines.append("Generated at: %s" %
+ datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S %z"))
+
+ return "\n".join(report_lines)
+
+
+def adjacency_list_from_json(
+ module_graph: ...,
+ ignore_by_name: List[str],
+ ignore_java_auto_deps: bool,
+ top_level_modules: List[str],
+ collect_transitive_dependencies: bool = True,
+) -> Dict[ModuleInfo, Set[ModuleInfo]]:
+ def filter_by_name(json):
+ return json["Name"] in top_level_modules
+
+ module_adjacency_list = collections.defaultdict(set)
+ name_to_info = {}
+
+ def collect_dependencies(module, deps_names):
+ module_info = None
+ name = module["Name"]
+ name_to_info.setdefault(
+ name,
+ ModuleInfo(
+ name=name,
+ created_by=module["CreatedBy"],
+ kind=module["Type"],
+ dirname=os.path.dirname(module["Blueprint"]),
+ num_deps=len(deps_names),
+ ))
+
+ module_info = name_to_info[name]
+
+ # ensure module_info added to adjacency list even with no deps
+ module_adjacency_list[module_info].update(set())
+ for dep in deps_names:
+ # this may occur if there is a cycle between a module and created_by
+ # module
+ if not dep in name_to_info:
+ continue
+ dep_module_info = name_to_info[dep]
+ module_adjacency_list[module_info].add(dep_module_info)
+ if collect_transitive_dependencies:
+ module_adjacency_list[module_info].update(
+ module_adjacency_list.get(dep_module_info, set()))
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ module_graph, ignore_by_name, ignore_java_auto_deps, filter_by_name, collect_dependencies)
+
+ return module_adjacency_list
+
+
+def adjacency_list_from_queryview_xml(
+ module_graph: xml.etree.ElementTree,
+ ignore_by_name: List[str],
+ top_level_modules: List[str],
+ collect_transitive_dependencies: bool = True
+) -> Dict[ModuleInfo, Set[ModuleInfo]]:
+
+ def filter_by_name(module):
+ return module.name in top_level_modules
+
+ module_adjacency_list = collections.defaultdict(set)
+ name_to_info = {}
+
+ def collect_dependencies(module, deps_names):
+ module_info = None
+ name_to_info.setdefault(
+ module.name,
+ ModuleInfo(
+ name=module.name,
+ kind=module.kind,
+ dirname=module.dirname,
+ # required so that it cannot be forgotten when updating num_deps
+ created_by=None,
+ num_deps=len(deps_names),
+ ))
+ module_info = name_to_info[module.name]
+
+ # ensure module_info added to adjacency list even with no deps
+ module_adjacency_list[module_info].update(set())
+ for dep in deps_names:
+ dep_module_info = name_to_info[dep]
+ module_adjacency_list[module_info].add(dep_module_info)
+ if collect_transitive_dependencies:
+ module_adjacency_list[module_info].update(
+ module_adjacency_list.get(dep_module_info, set()))
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ module_graph, ignore_by_name, filter_by_name, collect_dependencies)
+
+ return module_adjacency_list
+
+
+def get_module_adjacency_list(
+ top_level_modules: List[str],
+ use_queryview: bool,
+ ignore_by_name: List[str],
+ ignore_java_auto_deps: bool = False,
+ collect_transitive_dependencies: bool = True,
+ banchan_mode: bool = False) -> Dict[ModuleInfo, Set[ModuleInfo]]:
+ # The main module graph containing _all_ modules in the Soong build,
+ # and the list of converted modules.
+ try:
+ if use_queryview:
+ module_graph = dependency_analysis.get_queryview_module_info(
+ top_level_modules, banchan_mode)
+ module_adjacency_list = adjacency_list_from_queryview_xml(
+ module_graph, ignore_by_name, top_level_modules,
+ collect_transitive_dependencies)
+ else:
+ module_graph = dependency_analysis.get_json_module_info(banchan_mode)
+ module_adjacency_list = adjacency_list_from_json(
+ module_graph,
+ ignore_by_name,
+ ignore_java_auto_deps,
+ top_level_modules,
+ collect_transitive_dependencies,
+ )
+ except subprocess.CalledProcessError as err:
+ sys.exit(f"""Error running: '{' '.join(err.cmd)}':"
+Stdout:
+{err.stdout.decode('utf-8') if err.stdout else ''}
+Stderr:
+{err.stderr.decode('utf-8') if err.stderr else ''}""")
+
+ return module_adjacency_list
+
+
+def add_created_by_to_converted(
+ converted: Set[str],
+ module_adjacency_list: Dict[ModuleInfo, Set[ModuleInfo]]) -> Set[str]:
+ modules_by_name = {m.name: m for m in module_adjacency_list.keys()}
+
+ converted_modules = set()
+ converted_modules.update(converted)
+
+ def _update_converted(module_name):
+ if module_name in converted_modules:
+ return True
+ if module_name not in modules_by_name:
+ return False
+ module = modules_by_name[module_name]
+ if module.created_by and _update_converted(module.created_by):
+ converted_modules.add(module_name)
+ return True
+ return False
+
+ for module in modules_by_name.keys():
+ _update_converted(module)
+
+ return converted_modules
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("mode", help="mode: graph or report")
+ parser.add_argument(
+ "--module",
+ "-m",
+ action="append",
+ required=True,
+ help="name(s) of Soong module(s). Multiple modules only supported for report"
+ )
+ parser.add_argument(
+ "--use-queryview",
+ action="store_true",
+ help="whether to use queryview or module_info")
+ parser.add_argument(
+ "--ignore-by-name",
+ default="",
+ help=(
+ "Comma-separated list. When building the tree of transitive"
+ " dependencies, will not follow dependency edges pointing to module"
+ " names listed by this flag."
+ ),
+ )
+ parser.add_argument(
+ "--ignore-java-auto-deps",
+ action="store_true",
+ help="whether to ignore automatically added java deps",
+ )
+ parser.add_argument(
+ "--banchan",
+ action="store_true",
+ help="whether to run Soong in a banchan configuration rather than lunch",
+ )
+ parser.add_argument(
+ "--proto-file",
+ help="Path to write proto output",
+ )
+ parser.add_argument(
+ "--out-file",
+ "-o",
+ type=argparse.FileType("w"),
+ default="-",
+ help="Path to write output, if omitted, writes to stdout",
+ )
+ parser.add_argument(
+ "--show-converted",
+ "-s",
+ action="store_true",
+ help="Show bp2build-converted modules in addition to the unconverted dependencies to see full dependencies post-migration. By default converted dependencies are not shown",
+ )
+ args = parser.parse_args()
+
+ if len(args.module) > 1 and args.mode == "graph":
+ sys.exit(f"Can only support one module with mode {args.mode}")
+ if args.proto_file and args.mode == "graph":
+ sys.exit(f"Proto file only supported for report mode, not {args.mode}")
+
+ mode = args.mode
+ use_queryview = args.use_queryview
+ ignore_by_name = args.ignore_by_name.split(",")
+ banchan_mode = args.banchan
+ modules = set(args.module)
+
+ converted = dependency_analysis.get_bp2build_converted_modules()
+
+ module_adjacency_list = get_module_adjacency_list(
+ modules,
+ use_queryview,
+ ignore_by_name,
+ collect_transitive_dependencies=mode != "graph",
+ banchan_mode=banchan_mode)
+
+ converted = add_created_by_to_converted(converted, module_adjacency_list)
+
+ output_file = args.out_file
+ if mode == "graph":
+ dot_file = generate_dot_file(module_adjacency_list, converted,
+ args.show_converted)
+ output_file.write(dot_file)
+ elif mode == "report":
+ report_data = generate_report_data(module_adjacency_list, converted,
+ modules, args.show_converted)
+ report = generate_report(report_data)
+ output_file.write(report)
+ if args.proto_file:
+ generate_proto(report_data, args.proto_file)
+ else:
+ raise RuntimeError("unknown mode: %s" % mode)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/bp2build_progress/bp2build_progress_test.py b/scripts/bp2build_progress/bp2build_progress_test.py
new file mode 100644
index 00000000..7a1e8009
--- /dev/null
+++ b/scripts/bp2build_progress/bp2build_progress_test.py
@@ -0,0 +1,414 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for bp2build-progress."""
+
+import bp2build_progress
+import collections
+import dependency_analysis
+import queryview_xml
+import soong_module_json
+import unittest
+import unittest.mock
+
+_queryview_graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a', 'a', 'type1', dep_names=['//pkg:b', '//other:c']),
+ queryview_xml.make_module('//pkg:b', 'b', 'type2', dep_names=['//pkg:d']),
+ queryview_xml.make_module('//pkg:d', 'd', 'type2'),
+ queryview_xml.make_module(
+ '//other:c', 'c', 'type2', dep_names=['//other:e']),
+ queryview_xml.make_module('//other:e', 'e', 'type3'),
+ queryview_xml.make_module('//pkg2:f', 'f', 'type4'),
+ queryview_xml.make_module('//pkg3:g', 'g', 'type5'),
+])
+
+_soong_module_graph = [
+ soong_module_json.make_module(
+ 'a',
+ 'type1',
+ blueprint='pkg/Android.bp',
+ deps=[soong_module_json.make_dep('b'),
+ soong_module_json.make_dep('c')]),
+ soong_module_json.make_module(
+ 'b',
+ 'type2',
+ blueprint='pkg/Android.bp',
+ deps=[soong_module_json.make_dep('d')]),
+ soong_module_json.make_module('d', 'type2', blueprint='pkg/Android.bp'),
+ soong_module_json.make_module(
+ 'c',
+ 'type2',
+ blueprint='other/Android.bp',
+ deps=[soong_module_json.make_dep('e')]),
+ soong_module_json.make_module('e', 'type3', blueprint='other/Android.bp'),
+ soong_module_json.make_module('f', 'type4', blueprint='pkg2/Android.bp'),
+ soong_module_json.make_module('g', 'type5', blueprint='pkg3/Android.bp'),
+]
+
+_soong_module_graph_created_by_no_loop = [
+ soong_module_json.make_module(
+ 'a', 'type1', blueprint='pkg/Android.bp', created_by='b'),
+ soong_module_json.make_module('b', 'type2', blueprint='pkg/Android.bp'),
+]
+
+_soong_module_graph_created_by_loop = [
+ soong_module_json.make_module(
+ 'a',
+ 'type1',
+ deps=[soong_module_json.make_dep('b')],
+ blueprint='pkg/Android.bp'),
+ soong_module_json.make_module(
+ 'b', 'type2', blueprint='pkg/Android.bp', created_by='a'),
+]
+
+
+class Bp2BuildProgressTest(unittest.TestCase):
+
+ @unittest.mock.patch(
+ 'dependency_analysis.get_queryview_module_info',
+ autospec=True,
+ return_value=_queryview_graph)
+ def test_get_module_adjacency_list_queryview_transitive_deps(self, _):
+ adjacency_dict = bp2build_progress.get_module_adjacency_list(
+ ['a', 'f'], True, set(), False, True, False
+ )
+
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=2, created_by=None)
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=1, created_by=None)
+ c = bp2build_progress.ModuleInfo(
+ name='c', kind='type2', dirname='other', num_deps=1, created_by=None)
+ d = bp2build_progress.ModuleInfo(
+ name='d', kind='type2', dirname='pkg', num_deps=0, created_by=None)
+ e = bp2build_progress.ModuleInfo(
+ name='e', kind='type3', dirname='other', num_deps=0, created_by=None)
+ f = bp2build_progress.ModuleInfo(
+ name='f', kind='type4', dirname='pkg2', num_deps=0, created_by=None)
+ expected_adjacency_dict = collections.defaultdict(set)
+ expected_adjacency_dict[a] = set([b, c, d, e])
+ expected_adjacency_dict[b] = set([d])
+ expected_adjacency_dict[c] = set([e])
+ expected_adjacency_dict[d].update(set())
+ expected_adjacency_dict[e].update(set())
+ expected_adjacency_dict[f].update(set())
+ self.assertDictEqual(adjacency_dict, expected_adjacency_dict)
+
+ @unittest.mock.patch(
+ 'dependency_analysis.get_queryview_module_info',
+ autospec=True,
+ return_value=_queryview_graph)
+ def test_get_module_adjacency_list_queryview_direct_deps(self, _):
+ adjacency_dict = bp2build_progress.get_module_adjacency_list(
+ ['a', 'f'], True, set(), False, False
+ )
+
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=2, created_by=None)
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=1, created_by=None)
+ c = bp2build_progress.ModuleInfo(
+ name='c', kind='type2', dirname='other', num_deps=1, created_by=None)
+ d = bp2build_progress.ModuleInfo(
+ name='d', kind='type2', dirname='pkg', num_deps=0, created_by=None)
+ e = bp2build_progress.ModuleInfo(
+ name='e', kind='type3', dirname='other', num_deps=0, created_by=None)
+ f = bp2build_progress.ModuleInfo(
+ name='f', kind='type4', dirname='pkg2', num_deps=0, created_by=None)
+
+ expected_adjacency_dict = collections.defaultdict(set)
+ expected_adjacency_dict[a] = set([b, c])
+ expected_adjacency_dict[b] = set([d])
+ expected_adjacency_dict[c] = set([e])
+ expected_adjacency_dict[d].update(set())
+ expected_adjacency_dict[e].update(set())
+ expected_adjacency_dict[f].update(set())
+ self.assertDictEqual(adjacency_dict, expected_adjacency_dict)
+
+ @unittest.mock.patch(
+ 'dependency_analysis.get_json_module_info',
+ autospec=True,
+ return_value=_soong_module_graph)
+ def test_get_module_adjacency_list_soong_module_transitive_deps(self, _):
+ adjacency_dict = bp2build_progress.get_module_adjacency_list(
+ ['a', 'f'], False, set(), False, True, False
+ )
+
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=2, created_by='')
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=1, created_by='')
+ c = bp2build_progress.ModuleInfo(
+ name='c', kind='type2', dirname='other', num_deps=1, created_by='')
+ d = bp2build_progress.ModuleInfo(
+ name='d', kind='type2', dirname='pkg', num_deps=0, created_by='')
+ e = bp2build_progress.ModuleInfo(
+ name='e', kind='type3', dirname='other', num_deps=0, created_by='')
+ f = bp2build_progress.ModuleInfo(
+ name='f', kind='type4', dirname='pkg2', num_deps=0, created_by='')
+
+ expected_adjacency_dict = collections.defaultdict(set)
+ expected_adjacency_dict[a] = set([b, c, d, e])
+ expected_adjacency_dict[b] = set([d])
+ expected_adjacency_dict[c] = set([e])
+ expected_adjacency_dict[d].update(set())
+ expected_adjacency_dict[e].update(set())
+ expected_adjacency_dict[f].update(set())
+ self.assertDictEqual(adjacency_dict, expected_adjacency_dict)
+
+ @unittest.mock.patch(
+ 'dependency_analysis.get_json_module_info',
+ autospec=True,
+ return_value=_soong_module_graph)
+ def test_get_module_adjacency_list_soong_module_direct_deps(self, _):
+ adjacency_dict = bp2build_progress.get_module_adjacency_list(['a', 'f'],
+ False, set(),
+ False, False)
+
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=2, created_by='')
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=1, created_by='')
+ c = bp2build_progress.ModuleInfo(
+ name='c', kind='type2', dirname='other', num_deps=1, created_by='')
+ d = bp2build_progress.ModuleInfo(
+ name='d', kind='type2', dirname='pkg', num_deps=0, created_by='')
+ e = bp2build_progress.ModuleInfo(
+ name='e', kind='type3', dirname='other', num_deps=0, created_by='')
+ f = bp2build_progress.ModuleInfo(
+ name='f', kind='type4', dirname='pkg2', num_deps=0, created_by='')
+
+ expected_adjacency_dict = collections.defaultdict(set)
+ expected_adjacency_dict[a] = set([b, c])
+ expected_adjacency_dict[b] = set([d])
+ expected_adjacency_dict[c] = set([e])
+ expected_adjacency_dict[d].update(set())
+ expected_adjacency_dict[e].update(set())
+ expected_adjacency_dict[f].update(set())
+ self.assertDictEqual(adjacency_dict, expected_adjacency_dict)
+
+ @unittest.mock.patch(
+ 'dependency_analysis.get_json_module_info',
+ autospec=True,
+ return_value=_soong_module_graph_created_by_no_loop)
+ def test_get_module_adjacency_list_soong_module_created_by(self, _):
+ adjacency_dict = bp2build_progress.get_module_adjacency_list(['a', 'f'],
+ False, set(),
+ True, False)
+
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=1, created_by='b')
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=0, created_by='')
+
+ expected_adjacency_dict = collections.defaultdict(set)
+ expected_adjacency_dict[a].update(set([b]))
+ expected_adjacency_dict[b].update(set())
+ self.assertDictEqual(adjacency_dict, expected_adjacency_dict)
+
+ @unittest.mock.patch(
+ 'dependency_analysis.get_json_module_info',
+ autospec=True,
+ return_value=_soong_module_graph_created_by_loop)
+ def test_get_module_adjacency_list_soong_module_created_by_loop(self, _):
+ adjacency_dict = bp2build_progress.get_module_adjacency_list(['a', 'f'],
+ False, set(),
+ True, False)
+
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=1, created_by='')
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=1, created_by='a')
+
+ expected_adjacency_dict = collections.defaultdict(set)
+ expected_adjacency_dict[a].update(set([b]))
+ expected_adjacency_dict[b].update(set())
+ self.assertDictEqual(adjacency_dict, expected_adjacency_dict)
+
+ def test_generate_report_data(self):
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=4, created_by=None)
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=1, created_by=None)
+ c = bp2build_progress.ModuleInfo(
+ name='c', kind='type2', dirname='other', num_deps=1, created_by=None)
+ d = bp2build_progress.ModuleInfo(
+ name='d', kind='type2', dirname='pkg', num_deps=0, created_by=None)
+ e = bp2build_progress.ModuleInfo(
+ name='e', kind='type3', dirname='other', num_deps=0, created_by=None)
+ f = bp2build_progress.ModuleInfo(
+ name='f', kind='type4', dirname='pkg2', num_deps=2, created_by=None)
+ g = bp2build_progress.ModuleInfo(
+ name='g', kind='type4', dirname='pkg2', num_deps=2, created_by=None)
+
+ module_graph = collections.defaultdict(set)
+ module_graph[a] = set([b, c, d, e])
+ module_graph[b] = set([d])
+ module_graph[c] = set([e])
+ module_graph[d].update(set())
+ module_graph[e].update(set())
+ module_graph[f].update(set([b, g]))
+ module_graph[g].update(set())
+
+ report_data = bp2build_progress.generate_report_data(
+ module_graph, {'d', 'e', 'g'}, {'a', 'f'})
+
+ all_unconverted_modules = collections.defaultdict(set)
+ all_unconverted_modules['b'].update({a, f})
+ all_unconverted_modules['c'].update({a})
+
+ blocked_modules = collections.defaultdict(set)
+ blocked_modules[a].update({'b', 'c'})
+ blocked_modules[b].update(set())
+ blocked_modules[c].update(set())
+ blocked_modules[f].update(set({'b'}))
+
+ expected_report_data = bp2build_progress.ReportData(
+ input_modules={
+ bp2build_progress.InputModule(a, 4, 2),
+ bp2build_progress.InputModule(f, 2, 1)
+ },
+ total_deps={b, c, d, e, g},
+ unconverted_deps={'b', 'c'},
+ all_unconverted_modules=all_unconverted_modules,
+ blocked_modules=blocked_modules,
+ dirs_with_unconverted_modules={'pkg', 'other', 'pkg2'},
+ kind_of_unconverted_modules={'type1', 'type2', 'type4'},
+ converted={'d', 'e', 'g'},
+ show_converted=False,
+ )
+
+ self.assertEqual(report_data, expected_report_data)
+
+ def test_generate_report_data_show_converted(self):
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=2, created_by=None)
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg2', num_deps=0, created_by=None, converted=True)
+ c = bp2build_progress.ModuleInfo(
+ name='c', kind='type3', dirname='other', num_deps=0, created_by=None)
+
+ module_graph = collections.defaultdict(set)
+ module_graph[a] = set([b, c])
+ module_graph[b].update(set())
+ module_graph[c].update(set())
+
+ report_data = bp2build_progress.generate_report_data(
+ module_graph, {'b'}, {'a'}, show_converted=True)
+
+ all_unconverted_modules = collections.defaultdict(set)
+ all_unconverted_modules['c'].update({a})
+
+ blocked_modules = collections.defaultdict(set)
+ blocked_modules[a].update({'b (c)', 'c'})
+ blocked_modules[b].update(set())
+ blocked_modules[c].update(set())
+
+ expected_report_data = bp2build_progress.ReportData(
+ input_modules={
+ bp2build_progress.InputModule(a, 2, 1),
+ },
+ total_deps={b, c},
+ unconverted_deps={'c'},
+ all_unconverted_modules=all_unconverted_modules,
+ blocked_modules=blocked_modules,
+ dirs_with_unconverted_modules={'pkg', 'other'},
+ kind_of_unconverted_modules={'type1', 'type3'},
+ converted={'b'},
+ show_converted=True,
+ )
+
+ self.assertEqual(report_data, expected_report_data)
+
+ def test_generate_dot_file(self):
+ self.maxDiff = None
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=2, created_by=None)
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=1, created_by=None)
+ c = bp2build_progress.ModuleInfo(
+ name='c', kind='type2', dirname='other', num_deps=1, created_by=None)
+ d = bp2build_progress.ModuleInfo(
+ name='d', kind='type2', dirname='pkg', num_deps=0, created_by=None)
+ e = bp2build_progress.ModuleInfo(
+ name='e', kind='type2', dirname='other', num_deps=0, created_by=None)
+
+ module_graph = collections.defaultdict(set)
+ module_graph[a] = set([b, c])
+ module_graph[b] = set([d])
+ module_graph[c] = set([e])
+ module_graph[d] = set([])
+ module_graph[e] = set([])
+
+ dot_graph = bp2build_progress.generate_dot_file(module_graph, {'e'}, False)
+
+ expected_dot_graph = """
+digraph mygraph {{
+ node [shape=box];
+
+ "a" [label="a\\ntype1" color=black, style=filled, fillcolor=tomato]
+ "a" -> "b"
+ "a" -> "c"
+ "b" [label="b\\ntype2" color=black, style=filled, fillcolor=tomato]
+ "b" -> "d"
+ "c" [label="c\\ntype2" color=black, style=filled, fillcolor=yellow]
+ "d" [label="d\\ntype2" color=black, style=filled, fillcolor=yellow]
+}}
+"""
+ self.assertEqual(dot_graph, expected_dot_graph)
+
+ def test_generate_dot_file_show_converted(self):
+ self.maxDiff = None
+ a = bp2build_progress.ModuleInfo(
+ name='a', kind='type1', dirname='pkg', num_deps=2, created_by=None)
+ b = bp2build_progress.ModuleInfo(
+ name='b', kind='type2', dirname='pkg', num_deps=1, created_by=None)
+ c = bp2build_progress.ModuleInfo(
+ name='c', kind='type2', dirname='other', num_deps=1, created_by=None)
+ d = bp2build_progress.ModuleInfo(
+ name='d', kind='type2', dirname='pkg', num_deps=0, created_by=None)
+ e = bp2build_progress.ModuleInfo(
+ name='e', kind='type2', dirname='other', num_deps=0, created_by=None)
+
+ module_graph = collections.defaultdict(set)
+ module_graph[a] = set([b, c])
+ module_graph[b] = set([d])
+ module_graph[c] = set([e])
+ module_graph[d] = set([])
+ module_graph[e] = set([])
+
+ dot_graph = bp2build_progress.generate_dot_file(module_graph, {'e'}, True)
+
+ expected_dot_graph = """
+digraph mygraph {{
+ node [shape=box];
+
+ "a" [label="a\\ntype1" color=black, style=filled, fillcolor=tomato]
+ "a" -> "b"
+ "a" -> "c"
+ "b" [label="b\\ntype2" color=black, style=filled, fillcolor=tomato]
+ "b" -> "d"
+ "c" [label="c\\ntype2" color=black, style=filled, fillcolor=yellow]
+ "c" -> "e"
+ "d" [label="d\\ntype2" color=black, style=filled, fillcolor=yellow]
+ "e" [label="e\\ntype2" color=black, style=filled, fillcolor=dodgerblue]
+}}
+"""
+ self.assertEqual(dot_graph, expected_dot_graph)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/scripts/bp2build_progress/dependency_analysis.py b/scripts/bp2build_progress/dependency_analysis.py
new file mode 100644
index 00000000..fe2f44f2
--- /dev/null
+++ b/scripts/bp2build_progress/dependency_analysis.py
@@ -0,0 +1,466 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utility functions to produce module or module type dependency graphs using json-module-graph or queryview."""
+
+from typing import Set
+import collections
+import dataclasses
+import json
+import os
+import os.path
+import subprocess
+import sys
+import xml.etree.ElementTree
+
+
+@dataclasses.dataclass(frozen=True, order=True)
+class _ModuleKey:
+ """_ModuleKey uniquely identifies a module by name nad variations."""
+ name: str
+ variations: list
+
+ def __str__(self):
+ return f"{self.name}, {self.variations}"
+
+ def __hash__(self):
+ return (self.name + str(self.variations)).__hash__()
+
+
+# This list of module types are omitted from the report and graph
+# for brevity and simplicity. Presence in this list doesn't mean
+# that they shouldn't be converted, but that they are not that useful
+# to be recorded in the graph or report currently.
+IGNORED_KINDS = set([
+ "cc_defaults",
+ "hidl_package_root", # not being converted, contents converted as part of hidl_interface
+ "java_defaults",
+ "license",
+ "license_kind",
+])
+
+# queryview doesn't have information on the type of deps, so we explicitly skip
+# prebuilt types
+_QUERYVIEW_IGNORE_KINDS = set([
+ "android_app_import",
+ "android_library_import",
+ "cc_prebuilt_library",
+ "cc_prebuilt_library_headers",
+ "cc_prebuilt_library_shared",
+ "cc_prebuilt_library_static",
+ "cc_prebuilt_library_static",
+ "cc_prebuilt_object",
+ "java_import",
+ "java_import_host",
+ "java_sdk_library_import",
+])
+
+SRC_ROOT_DIR = os.path.abspath(__file__ + "/../../../../..")
+
+LUNCH_ENV = {
+ # Use aosp_arm as the canonical target product.
+ "TARGET_PRODUCT": "aosp_arm",
+ "TARGET_BUILD_VARIANT": "userdebug",
+}
+
+BANCHAN_ENV = {
+ # Use module_arm64 as the canonical banchan target product.
+ "TARGET_PRODUCT": "module_arm64",
+ "TARGET_BUILD_VARIANT": "eng",
+ # just needs to be non-empty, not the specific module for Soong
+ # analysis purposes
+ "TARGET_BUILD_APPS": "all",
+}
+
+
+def _build_with_soong(target, banchan_mode=False):
+ subprocess.check_output(
+ [
+ "build/soong/soong_ui.bash",
+ "--make-mode",
+ "--skip-soong-tests",
+ target,
+ ],
+ cwd=SRC_ROOT_DIR,
+ env=BANCHAN_ENV if banchan_mode else LUNCH_ENV,
+ )
+
+
+def get_properties(json_module):
+ set_properties = {}
+ if "Module" not in json_module:
+ return set_properties
+ if "Android" not in json_module["Module"]:
+ return set_properties
+ if "SetProperties" not in json_module["Module"]["Android"]:
+ return set_properties
+
+ for prop in json_module['Module']['Android']['SetProperties']:
+ if prop["Values"]:
+ value = prop["Values"]
+ else:
+ value = prop["Value"]
+ set_properties[prop["Name"]] = value
+ return set_properties
+
+
+def get_property_names(json_module):
+ return get_properties(json_module).keys()
+
+
+def get_queryview_module_info(modules, banchan_mode):
+ """Returns the list of transitive dependencies of input module as built by queryview."""
+ _build_with_soong("queryview", banchan_mode)
+
+ queryview_xml = subprocess.check_output(
+ [
+ "build/bazel/bin/bazel",
+ "query",
+ "--config=ci",
+ "--config=queryview",
+ "--output=xml",
+ # union of queries to get the deps of all Soong modules with the give names
+ " + ".join(f'deps(attr("soong_module_name", "^{m}$", //...))'
+ for m in modules)
+ ],
+ cwd=SRC_ROOT_DIR,
+ )
+ try:
+ return xml.etree.ElementTree.fromstring(queryview_xml)
+ except xml.etree.ElementTree.ParseError as err:
+ sys.exit(f"""Could not parse XML:
+{queryview_xml}
+ParseError: {err}""")
+
+
+def get_json_module_info(banchan_mode=False):
+ """Returns the list of transitive dependencies of input module as provided by Soong's json module graph."""
+ _build_with_soong("json-module-graph", banchan_mode)
+ try:
+ with open(os.path.join(SRC_ROOT_DIR,"out/soong/module-graph.json")) as f:
+ return json.load(f)
+ except json.JSONDecodeError as err:
+ sys.exit(f"""Could not decode json:
+out/soong/module-graph.json
+JSONDecodeError: {err}""")
+
+
+def _ignore_json_module(json_module, ignore_by_name):
+ # windows is not a priority currently
+ if is_windows_variation(json_module):
+ return True
+ if ignore_kind(json_module['Type']):
+ return True
+ if json_module['Name'] in ignore_by_name:
+ return True
+ # for filegroups with a name the same as the source, we are not migrating the
+ # filegroup and instead just rely on the filename being exported
+ if json_module['Type'] == 'filegroup':
+ set_properties = get_properties(json_module)
+ srcs = set_properties.get('Srcs', [])
+ if len(srcs) == 1:
+ return json_module['Name'] in srcs
+ return False
+
+
+def visit_json_module_graph_post_order(
+ module_graph, ignore_by_name, ignore_java_auto_deps, filter_predicate, visit
+):
+ # The set of ignored modules. These modules (and their dependencies) are not shown
+ # in the graph or report.
+ ignored = set()
+
+ # name to all module variants
+ module_graph_map = {}
+ root_module_keys = []
+ name_to_keys = collections.defaultdict(set)
+
+ # Do a single pass to find all top-level modules to be ignored
+ for module in module_graph:
+ name = module["Name"]
+ key = _ModuleKey(name, module["Variations"])
+ if _ignore_json_module(module, ignore_by_name):
+ ignored.add(key)
+ continue
+ name_to_keys[name].add(key)
+ module_graph_map[key] = module
+ if filter_predicate(module):
+ root_module_keys.append(key)
+
+ visited = set()
+
+ def json_module_graph_post_traversal(module_key):
+ if module_key in ignored or module_key in visited:
+ return
+ visited.add(module_key)
+
+ deps = set()
+ module = module_graph_map[module_key]
+ created_by = module["CreatedBy"]
+ to_visit = set()
+
+ if created_by:
+ for key in name_to_keys[created_by]:
+ if key in ignored:
+ continue
+ # treat created by as a dep so it appears as a blocker, otherwise the
+ # module will be disconnected from the traversal graph despite having a
+ # direct relationship to a module and must addressed in the migration
+ deps.add(created_by)
+ json_module_graph_post_traversal(key)
+
+ for dep in module["Deps"]:
+ if ignore_json_dep(
+ dep, module["Name"], ignored, ignore_java_auto_deps
+ ):
+ continue
+
+ dep_name = dep["Name"]
+ deps.add(dep_name)
+ dep_key = _ModuleKey(dep_name, dep["Variations"])
+
+ if dep_key not in visited:
+ json_module_graph_post_traversal(dep_key)
+
+ visit(module, deps)
+
+ for module_key in root_module_keys:
+ json_module_graph_post_traversal(module_key)
+
+
+QueryviewModule = collections.namedtuple("QueryviewModule", [
+ "name",
+ "kind",
+ "variant",
+ "dirname",
+ "deps",
+ "srcs",
+])
+
+
+def _bazel_target_to_dir(full_target):
+ dirname, _ = full_target.split(":")
+ return dirname[len("//"):] # discard prefix
+
+
+def _get_queryview_module(name_with_variant, module, kind):
+ name = None
+ variant = ""
+ deps = []
+ srcs = []
+ for attr in module:
+ attr_name = attr.attrib["name"]
+ if attr.tag == "rule-input":
+ deps.append(attr_name)
+ elif attr_name == "soong_module_name":
+ name = attr.attrib["value"]
+ elif attr_name == "soong_module_variant":
+ variant = attr.attrib["value"]
+ elif attr_name == "soong_module_type" and kind == "generic_soong_module":
+ kind = attr.attrib["value"]
+ elif attr_name == "srcs":
+ for item in attr:
+ srcs.append(item.attrib["value"])
+
+ return QueryviewModule(
+ name=name,
+ kind=kind,
+ variant=variant,
+ dirname=_bazel_target_to_dir(name_with_variant),
+ deps=deps,
+ srcs=srcs,
+ )
+
+
+def _ignore_queryview_module(module, ignore_by_name):
+ if module.name in ignore_by_name:
+ return True
+ if ignore_kind(module.kind, queryview=True):
+ return True
+ # special handling for filegroup srcs, if a source has the same name as
+ # the filegroup module, we don't convert it
+ if module.kind == "filegroup" and module.name in module.srcs:
+ return True
+ return module.variant.startswith("windows")
+
+
+def visit_queryview_xml_module_graph_post_order(module_graph, ignored_by_name,
+ filter_predicate, visit):
+ # The set of ignored modules. These modules (and their dependencies) are
+ # not shown in the graph or report.
+ ignored = set()
+
+ # queryview embeds variant in long name, keep a map of the name with vaiarnt
+ # to just name
+ name_with_variant_to_name = dict()
+
+ module_graph_map = dict()
+ to_visit = []
+
+ for module in module_graph:
+ ignore = False
+ if module.tag != "rule":
+ continue
+ kind = module.attrib["class"]
+ name_with_variant = module.attrib["name"]
+
+ qv_module = _get_queryview_module(name_with_variant, module, kind)
+
+ if _ignore_queryview_module(qv_module, ignored_by_name):
+ ignored.add(name_with_variant)
+ continue
+
+ if filter_predicate(qv_module):
+ to_visit.append(name_with_variant)
+
+ name_with_variant_to_name.setdefault(name_with_variant, qv_module.name)
+ module_graph_map[name_with_variant] = qv_module
+
+ visited = set()
+
+ def queryview_module_graph_post_traversal(name_with_variant):
+ module = module_graph_map[name_with_variant]
+ if name_with_variant in ignored or name_with_variant in visited:
+ return
+ visited.add(name_with_variant)
+
+ name = name_with_variant_to_name[name_with_variant]
+
+ deps = set()
+ for dep_name_with_variant in module.deps:
+ if dep_name_with_variant in ignored:
+ continue
+ dep_name = name_with_variant_to_name[dep_name_with_variant]
+ if dep_name == "prebuilt_" + name:
+ continue
+ if dep_name_with_variant not in visited:
+ queryview_module_graph_post_traversal(dep_name_with_variant)
+
+ if name != dep_name:
+ deps.add(dep_name)
+
+ visit(module, deps)
+
+ for name_with_variant in to_visit:
+ queryview_module_graph_post_traversal(name_with_variant)
+
+
+def get_bp2build_converted_modules() -> Set[str]:
+ """ Returns the list of modules that bp2build can currently convert. """
+ _build_with_soong("bp2build")
+ # Parse the list of converted module names from bp2build
+ with open(
+ os.path.join(SRC_ROOT_DIR,
+ "out/soong/soong_injection/metrics/converted_modules.txt"),
+ "r") as f:
+ # Read line by line, excluding comments.
+ # Each line is a module name.
+ ret = set(line.strip() for line in f if not line.strip().startswith("#"))
+ return ret
+
+
+def get_json_module_type_info(module_type):
+ """Returns the combined transitive dependency closures of all modules of module_type."""
+ _build_with_soong("json-module-graph")
+ # Run query.sh on the module graph for the top level module type
+ result = subprocess.check_output(
+ [
+ "build/bazel/json_module_graph/query.sh",
+ "fullTransitiveModuleTypeDeps", "out/soong/module-graph.json",
+ module_type
+ ],
+ cwd=SRC_ROOT_DIR,
+ )
+ return json.loads(result)
+
+
+def is_windows_variation(module):
+ """Returns True if input module's variant is Windows.
+
+ Args:
+ module: an entry parsed from Soong's json-module-graph
+ """
+ dep_variations = module.get("Variations")
+ dep_variation_os = ""
+ if dep_variations != None:
+ for v in dep_variations:
+ if v["Mutator"] == "os":
+ dep_variation_os = v["Variation"]
+ return dep_variation_os == "windows"
+
+
+def ignore_kind(kind, queryview=False):
+ if queryview and kind in _QUERYVIEW_IGNORE_KINDS:
+ return True
+ return kind in IGNORED_KINDS or "defaults" in kind
+
+
+def is_prebuilt_to_source_dep(dep):
+ # Soong always adds a dependency from a source module to its corresponding
+ # prebuilt module, if it exists.
+ # https://cs.android.com/android/platform/superproject/+/master:build/soong/android/prebuilt.go;l=395-396;drc=5d6fa4d8571d01a6e5a63a8b7aa15e61f45737a9
+ # This makes it appear that the prebuilt is a transitive dependency regardless
+ # of whether it is actually necessary. Skip these to keep the graph to modules
+ # used to build.
+ return dep["Tag"] == "android.prebuiltDependencyTag {BaseDependencyTag:{}}"
+
+
+def _is_java_auto_dep(dep):
+ # Soong adds a number of dependencies automatically for Java deps, making it
+ # difficult to understand the actual dependencies, remove the
+ # non-user-specified deps
+ tag = dep["Tag"]
+ if not tag:
+ return False
+ return (
+ (
+ tag.startswith("java.dependencyTag")
+ and (
+ "name:proguard-raise" in tag
+ or "name:bootclasspath" in tag
+ or "name:system modules" in tag
+ or "name:framework-res" in tag
+ or "name:sdklib" in tag
+ or "name:java9lib" in tag
+ )
+ or (
+ tag.startswith("java.usesLibraryDependencyTag")
+ or tag.startswith("java.hiddenAPIStubsDependencyTag")
+ )
+ )
+ or (
+ tag.startswith("android.sdkMemberDependencyTag")
+ or tag.startswith("java.scopeDependencyTag")
+ )
+ or tag.startswith("dexpreopt.dex2oatDependencyTag")
+ )
+
+
+def ignore_json_dep(dep, module_name, ignored_keys, ignore_java_auto_deps):
+ """Whether to ignore a json dependency based on heuristics.
+
+ Args:
+ dep: dependency struct from an entry in Soogn's json-module-graph
+ module_name: name of the module this is a dependency of
+ ignored_names: a set of _ModuleKey to ignore
+ """
+ if is_prebuilt_to_source_dep(dep):
+ return True
+ if ignore_java_auto_deps and _is_java_auto_dep(dep):
+ return True
+ name = dep["Name"]
+ return (
+ _ModuleKey(name, dep["Variations"]) in ignored_keys or name == module_name
+ )
diff --git a/scripts/bp2build_progress/dependency_analysis_test.py b/scripts/bp2build_progress/dependency_analysis_test.py
new file mode 100755
index 00000000..a966d683
--- /dev/null
+++ b/scripts/bp2build_progress/dependency_analysis_test.py
@@ -0,0 +1,558 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for dependency_analysis.py."""
+
+import dependency_analysis
+import queryview_xml
+import soong_module_json
+import unittest
+
+
+class DependencyAnalysisTest(unittest.TestCase):
+
+ def test_visit_json_module_graph_post_order_visits_all_in_post_order(self):
+ graph = [
+ soong_module_json.make_module('q', 'module', [
+ soong_module_json.make_dep('a'),
+ soong_module_json.make_dep('b'),
+ ]),
+ soong_module_json.make_module('a', 'module', [
+ soong_module_json.make_dep('b'),
+ soong_module_json.make_dep('c'),
+ ]),
+ soong_module_json.make_module('b', 'module', [
+ soong_module_json.make_dep('d'),
+ ]),
+ soong_module_json.make_module('c', 'module', [
+ soong_module_json.make_dep('e'),
+ ]),
+ soong_module_json.make_module('d', 'module', []),
+ soong_module_json.make_module('e', 'module', []),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set(), False, only_a, visit)
+
+ expected_visited = ['d', 'b', 'e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_json_module_graph_post_order_skips_ignored_by_name_and_transitive(
+ self):
+ graph = [
+ soong_module_json.make_module('a', 'module', [
+ soong_module_json.make_dep('b'),
+ soong_module_json.make_dep('c'),
+ ]),
+ soong_module_json.make_module('b', 'module', [
+ soong_module_json.make_dep('d'),
+ ]),
+ soong_module_json.make_module('c', 'module', [
+ soong_module_json.make_dep('e'),
+ ]),
+ soong_module_json.make_module('d', 'module', []),
+ soong_module_json.make_module('e', 'module', []),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set('b'), False, only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_json_module_graph_post_order_skips_defaults_and_transitive(
+ self):
+ graph = [
+ soong_module_json.make_module('a', 'module', [
+ soong_module_json.make_dep('b'),
+ soong_module_json.make_dep('c'),
+ ]),
+ soong_module_json.make_module('b', 'module_defaults', [
+ soong_module_json.make_dep('d'),
+ ]),
+ soong_module_json.make_module('c', 'module', [
+ soong_module_json.make_dep('e'),
+ ]),
+ soong_module_json.make_module('d', 'module', []),
+ soong_module_json.make_module('e', 'module', []),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set(), False, only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_json_module_graph_post_order_skips_windows_and_transitive(
+ self):
+ windows_variation = soong_module_json.make_variation('os', 'windows')
+ graph = [
+ soong_module_json.make_module('a', 'module', [
+ soong_module_json.make_dep('b', variations=[windows_variation]),
+ soong_module_json.make_dep('c'),
+ ]),
+ soong_module_json.make_module(
+ 'b',
+ 'module',
+ [
+ soong_module_json.make_dep('d'),
+ ],
+ variations=[windows_variation],
+ ),
+ soong_module_json.make_module('c', 'module', [
+ soong_module_json.make_dep('e'),
+ ]),
+ soong_module_json.make_module('d', 'module', []),
+ soong_module_json.make_module('e', 'module', []),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set(), False, only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_json_module_graph_post_order_skips_prebuilt_tag_deps(self):
+ graph = [
+ soong_module_json.make_module('a', 'module', [
+ soong_module_json.make_dep(
+ 'b', 'android.prebuiltDependencyTag {BaseDependencyTag:{}}'),
+ soong_module_json.make_dep('c'),
+ ]),
+ soong_module_json.make_module('b', 'module', [
+ soong_module_json.make_dep('d'),
+ ]),
+ soong_module_json.make_module('c', 'module', [
+ soong_module_json.make_dep('e'),
+ ]),
+ soong_module_json.make_module('d', 'module', []),
+ soong_module_json.make_module('e', 'module', []),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set(), False, only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_json_module_graph_post_order_no_infinite_loop_for_self_dep(
+ self):
+ graph = [
+ soong_module_json.make_module('a', 'module',
+ [soong_module_json.make_dep('a')]),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set(), False, only_a, visit)
+
+ expected_visited = ['a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_json_module_graph_post_order_visits_all_variants(self):
+ graph = [
+ soong_module_json.make_module(
+ 'a',
+ 'module',
+ [
+ soong_module_json.make_dep('b'),
+ ],
+ variations=[soong_module_json.make_variation('m', '1')],
+ ),
+ soong_module_json.make_module(
+ 'a',
+ 'module',
+ [
+ soong_module_json.make_dep('c'),
+ ],
+ variations=[soong_module_json.make_variation('m', '2')],
+ ),
+ soong_module_json.make_module('b', 'module', [
+ soong_module_json.make_dep('d'),
+ ]),
+ soong_module_json.make_module('c', 'module', [
+ soong_module_json.make_dep('e'),
+ ]),
+ soong_module_json.make_module('d', 'module', []),
+ soong_module_json.make_module('e', 'module', []),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set(), False, only_a, visit)
+
+ expected_visited = ['d', 'b', 'a', 'e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_json_module_skips_filegroup_with_src_same_as_name(self):
+ graph = [
+ soong_module_json.make_module(
+ 'a',
+ 'filegroup',
+ [
+ soong_module_json.make_dep('b'),
+ ],
+ json_props=[
+ soong_module_json.make_property(
+ name='Srcs',
+ values=['other_file'],
+ ),
+ ],
+ ),
+ soong_module_json.make_module(
+ 'b',
+ 'filegroup',
+ json_props=[
+ soong_module_json.make_property(
+ name='Srcs',
+ values=['b'],
+ ),
+ ],
+ ),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set(), False, only_a, visit)
+
+ expected_visited = ['a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_json_module_graph_post_order_include_created_by(self):
+ graph = [
+ soong_module_json.make_module('a', 'module', [
+ soong_module_json.make_dep('b'),
+ soong_module_json.make_dep('c'),
+ ]),
+ soong_module_json.make_module('b', 'module', created_by='d'),
+ soong_module_json.make_module('c', 'module', [
+ soong_module_json.make_dep('e'),
+ ]),
+ soong_module_json.make_module('d', 'module', []),
+ soong_module_json.make_module('e', 'module', []),
+ ]
+
+ def only_a(json):
+ return json['Name'] == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module['Name'])
+
+ dependency_analysis.visit_json_module_graph_post_order(
+ graph, set(), False, only_a, visit)
+
+ expected_visited = ['d', 'b', 'e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_queryview_xml_module_graph_post_order_visits_all(self):
+ graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a', 'a', 'module', dep_names=['//pkg:b', '//pkg:c']),
+ queryview_xml.make_module(
+ '//pkg:b', 'b', 'module', dep_names=['//pkg:d']),
+ queryview_xml.make_module(
+ '//pkg:c', 'c', 'module', dep_names=['//pkg:e']),
+ queryview_xml.make_module('//pkg:d', 'd', 'module'),
+ queryview_xml.make_module('//pkg:e', 'e', 'module'),
+ ])
+
+ def only_a(module):
+ return module.name == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module.name)
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ graph, set(), only_a, visit)
+
+ expected_visited = ['d', 'b', 'e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_queryview_xml_module_graph_post_order_skips_ignore_by_name(
+ self):
+ graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a', 'a', 'module', dep_names=['//pkg:b', '//pkg:c']),
+ queryview_xml.make_module(
+ '//pkg:b', 'b', 'module', dep_names=['//pkg:d']),
+ queryview_xml.make_module(
+ '//pkg:c', 'c', 'module', dep_names=['//pkg:e']),
+ queryview_xml.make_module('//pkg:d', 'd', 'module'),
+ queryview_xml.make_module('//pkg:e', 'e', 'module'),
+ ])
+
+ def only_a(module):
+ return module.name == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module.name)
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ graph, set('b'), only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_queryview_xml_module_graph_post_order_skips_default(self):
+ graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a', 'a', 'module', dep_names=['//pkg:b', '//pkg:c']),
+ queryview_xml.make_module(
+ '//pkg:b', 'b', 'module_defaults', dep_names=['//pkg:d']),
+ queryview_xml.make_module(
+ '//pkg:c', 'c', 'module', dep_names=['//pkg:e']),
+ queryview_xml.make_module('//pkg:d', 'd', 'module'),
+ queryview_xml.make_module('//pkg:e', 'e', 'module'),
+ ])
+
+ def only_a(module):
+ return module.name == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module.name)
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ graph, set(), only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_queryview_xml_module_graph_post_order_skips_cc_prebuilt(self):
+ graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a', 'a', 'module', dep_names=['//pkg:b', '//pkg:c']),
+ queryview_xml.make_module(
+ '//pkg:b', 'b', 'cc_prebuilt_library', dep_names=['//pkg:d']),
+ queryview_xml.make_module(
+ '//pkg:c', 'c', 'module', dep_names=['//pkg:e']),
+ queryview_xml.make_module('//pkg:d', 'd', 'module'),
+ queryview_xml.make_module('//pkg:e', 'e', 'module'),
+ ])
+
+ def only_a(module):
+ return module.name == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module.name)
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ graph, set(), only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_queryview_xml_module_graph_post_order_skips_filegroup_duplicate_name(
+ self):
+ graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a', 'a', 'module', dep_names=['//pkg:b', '//pkg:c']),
+ queryview_xml.make_module(
+ '//pkg:b', 'b', 'filegroup', dep_names=['//pkg:d'], srcs=['b']),
+ queryview_xml.make_module(
+ '//pkg:c', 'c', 'module', dep_names=['//pkg:e']),
+ queryview_xml.make_module('//pkg:d', 'd', 'module'),
+ queryview_xml.make_module('//pkg:e', 'e', 'module'),
+ ])
+
+ def only_a(module):
+ return module.name == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module.name)
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ graph, set(), only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_queryview_xml_module_graph_post_order_skips_windows(self):
+ graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a', 'a', 'module', dep_names=['//pkg:b', '//pkg:c']),
+ queryview_xml.make_module(
+ '//pkg:b',
+ 'b',
+ 'module',
+ dep_names=['//pkg:d'],
+ variant='windows-x86'),
+ queryview_xml.make_module(
+ '//pkg:c', 'c', 'module', dep_names=['//pkg:e']),
+ queryview_xml.make_module('//pkg:d', 'd', 'module'),
+ queryview_xml.make_module('//pkg:e', 'e', 'module'),
+ ])
+
+ def only_a(module):
+ return module.name == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module.name)
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ graph, set(), only_a, visit)
+
+ expected_visited = ['e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_queryview_xml_module_graph_post_order_self_dep_no_infinite_loop(
+ self):
+ graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a',
+ 'a',
+ 'module',
+ dep_names=['//pkg:b--variant1', '//pkg:c']),
+ queryview_xml.make_module(
+ '//pkg:b--variant1',
+ 'b',
+ 'module',
+ variant='variant1',
+ dep_names=['//pkg:b--variant2']),
+ queryview_xml.make_module(
+ '//pkg:b--variant2',
+ 'b',
+ 'module',
+ variant='variant2',
+ dep_names=['//pkg:d']),
+ queryview_xml.make_module(
+ '//pkg:c', 'c', 'module', dep_names=['//pkg:e']),
+ queryview_xml.make_module('//pkg:d', 'd', 'module'),
+ queryview_xml.make_module('//pkg:e', 'e', 'module'),
+ ])
+
+ def only_a(module):
+ return module.name == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module.name)
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ graph, set(), only_a, visit)
+
+ expected_visited = ['d', 'b', 'b', 'e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+ def test_visit_queryview_xml_module_graph_post_order_skips_prebuilt_with_same_name(
+ self):
+ graph = queryview_xml.make_graph([
+ queryview_xml.make_module(
+ '//pkg:a',
+ 'a',
+ 'module',
+ dep_names=['//other_pkg:prebuilt_a', '//pkg:b', '//pkg:c']),
+ queryview_xml.make_module('//other_pkg:prebuilt_a', 'prebuilt_a',
+ 'prebuilt_module'),
+ queryview_xml.make_module(
+ '//pkg:b', 'b', 'module', dep_names=['//pkg:d']),
+ queryview_xml.make_module(
+ '//pkg:c', 'c', 'module', dep_names=['//pkg:e']),
+ queryview_xml.make_module('//pkg:d', 'd', 'module'),
+ queryview_xml.make_module('//pkg:e', 'e', 'module'),
+ ])
+
+ def only_a(module):
+ return module.name == 'a'
+
+ visited_modules = []
+
+ def visit(module, _):
+ visited_modules.append(module.name)
+
+ dependency_analysis.visit_queryview_xml_module_graph_post_order(
+ graph, set(), only_a, visit)
+
+ expected_visited = ['d', 'b', 'e', 'c', 'a']
+ self.assertListEqual(visited_modules, expected_visited)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/scripts/bp2build_progress/queryview_xml.py b/scripts/bp2build_progress/queryview_xml.py
new file mode 100644
index 00000000..e86153f3
--- /dev/null
+++ b/scripts/bp2build_progress/queryview_xml.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Generate queryview xml data for testing purposes."""
+
+import xml.etree.ElementTree as ElementTree
+
+
+def make_module(full_name,
+ name,
+ kind,
+ variant='',
+ dep_names=[],
+ soong_module_type=None,
+ srcs=None):
+ rule = ElementTree.Element('rule', attrib={'class': kind, 'name': full_name})
+ ElementTree.SubElement(
+ rule, 'string', attrib={
+ 'name': 'soong_module_name',
+ 'value': name
+ })
+ ElementTree.SubElement(
+ rule, 'string', attrib={
+ 'name': 'soong_module_variant',
+ 'value': variant
+ })
+ if soong_module_type:
+ ElementTree.SubElement(
+ rule,
+ 'string',
+ attrib={
+ 'name': 'soong_module_type',
+ 'value': soong_module_type
+ })
+ for dep in dep_names:
+ ElementTree.SubElement(rule, 'rule-input', attrib={'name': dep})
+
+ if not srcs:
+ return rule
+
+ src_element = ElementTree.SubElement(rule, 'list', attrib={'name': 'srcs'})
+ for src in srcs:
+ ElementTree.SubElement(src_element, 'string', attrib={'value': src})
+
+ return rule
+
+
+def make_graph(modules):
+ graph = ElementTree.Element('query', attrib={'version': '2'})
+ graph.extend(modules)
+ return graph
diff --git a/scripts/bp2build_progress/soong_module_json.py b/scripts/bp2build_progress/soong_module_json.py
new file mode 100644
index 00000000..4b26d979
--- /dev/null
+++ b/scripts/bp2build_progress/soong_module_json.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Generate module graph json data for testing purposes."""
+
+
+def make_dep(name, tag=None, variations=None):
+ return {
+ 'Name': name,
+ 'Tag': tag,
+ 'Variations': variations,
+ }
+
+
+def make_variation(mutator, variation):
+ return {
+ 'Mutator': mutator,
+ 'Variation': variation,
+ }
+
+
+def make_module(name,
+ typ,
+ deps=[],
+ blueprint='',
+ variations=None,
+ created_by='',
+ json_props=[]):
+ return {
+ 'Name': name,
+ 'Type': typ,
+ 'Blueprint': blueprint,
+ 'CreatedBy': created_by,
+ 'Deps': deps,
+ 'Variations': variations,
+ 'Module': {
+ 'Android': {
+ 'SetProperties': json_props,
+ },
+ },
+ }
+
+
+def make_property(name, value='', values=None):
+ return {
+ 'Name': name,
+ 'Value': value,
+ 'Values': values,
+ }
diff --git a/scripts/difftool/BUILD.bazel b/scripts/difftool/BUILD.bazel
index 719bc412..b93cca07 100644
--- a/scripts/difftool/BUILD.bazel
+++ b/scripts/difftool/BUILD.bazel
@@ -1,7 +1,21 @@
-filegroup (
- name = "collect_zip",
- srcs = [":collect"],
- output_group = "python_zip_file",
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+filegroup(
+ name = "collect_zip",
+ srcs = [":collect"],
+ output_group = "python_zip_file",
)
py_binary(
@@ -10,10 +24,10 @@ py_binary(
python_version = "PY3",
)
-filegroup (
- name = "difftool_zip",
- srcs = [":difftool"],
- output_group = "python_zip_file",
+filegroup(
+ name = "difftool_zip",
+ srcs = [":difftool"],
+ output_group = "python_zip_file",
)
py_library(
@@ -22,17 +36,26 @@ py_library(
"clangcompile.py",
"commands.py",
],
+ deps = [
+ "//build/bazel/scripts/difftool/diffs",
+ ],
)
py_test(
name = "difftool_test",
srcs = ["difftool_test.py"],
- deps = [":difftool", ":collect"],
+ deps = [
+ ":collect",
+ ":difftool",
+ ],
)
py_binary(
name = "difftool",
srcs = ["difftool.py"],
- deps = [":difftool_commands", ":collect"],
python_version = "PY3",
+ deps = [
+ ":collect",
+ ":difftool_commands",
+ ],
)
diff --git a/scripts/difftool/README.md b/scripts/difftool/README.md
index a06f725f..f1c3736d 100644
--- a/scripts/difftool/README.md
+++ b/scripts/difftool/README.md
@@ -20,7 +20,7 @@ Use of these tools requires a multistep process:
```
3. Build using the new build system:
```
- $ USE_BAZEL_ANALYSIS=1 m libc
+ $ m --bazel-mode-dev libc
```
4. Collect results to a tmp directory.
```
diff --git a/scripts/difftool/action_diff_notebook.py b/scripts/difftool/action_diff_notebook.py
new file mode 100755
index 00000000..e6cdfc11
--- /dev/null
+++ b/scripts/difftool/action_diff_notebook.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Provides useful diff information for build artifacts.
+
+This file is intended to be used like a Jupyter notebook. Since there isn't a
+one-to-one pairing between Soong intermediate artifacts and Bazel intermediate
+artifacts, I've found it's easiest to automate some of the diffing while
+leaving room for manual selection of what targets/artifacts to compare.
+
+In this file, the runnable sections are separated by the `# %%` identifier, and
+a compatible editor should be able to run those code blocks independently. I
+used VSCode during development, but this functionality also exists in other
+editors via plugins.
+
+There are some comments throughout to give an idea of how this notebook can be
+used.
+"""
+
+# %%
+import os
+import pathlib
+
+# This script should be run from the $TOP directory
+ANDROID_CHECKOUT_PATH = pathlib.Path(".").resolve()
+os.chdir(ANDROID_CHECKOUT_PATH)
+
+# %%
+import subprocess
+
+os.chdir(os.path.join(ANDROID_CHECKOUT_PATH, "build/bazel/scripts/difftool"))
+import difftool
+import commands
+import importlib
+
+# Python doesn't reload packages that have already been imported unless you
+# use importlib to explicitly reload them
+importlib.reload(difftool)
+importlib.reload(commands)
+os.chdir(ANDROID_CHECKOUT_PATH)
+
+# %%
+LUNCH_TARGET = "aosp_arm64"
+TARGET_BUILD_VARIANT = "userdebug"
+
+subprocess.run([
+ "build/soong/soong_ui.bash",
+ "--make-mode",
+ f"TARGET_PRODUCT={LUNCH_TARGET}",
+ f"TARGET_BUILD_VARIANT={TARGET_BUILD_VARIANT}",
+ "--skip-soong-tests",
+ "bp2build",
+ "nothing",
+])
+
+
+# %%
+def get_bazel_actions(
+ *, expr: str, config: str, mnemonic: str, additional_args: list[str] = []
+):
+ return difftool.collect_commands_bazel(
+ expr, config, mnemonic, *additional_args
+ )
+
+
+def get_ninja_actions(*, lunch_target: str, target: str, mnemonic: str):
+ ninja_output = difftool.collect_commands_ninja(
+ pathlib.Path(f"out/combined-{lunch_target}.ninja").resolve(),
+ pathlib.Path(target),
+ pathlib.Path("prebuilts/build-tools/linux-x86/bin/ninja").resolve(),
+ )
+ return [l for l in ninja_output if mnemonic in l]
+
+
+# %%
+# Example 1: Comparing link actions
+# This example gets all of the "CppLink" actions from the adb_test module, and
+# also gets the build actions that are needed to build the same module from
+# through Ninja.
+#
+# After getting the action lists from each build tool, you can inspect the list
+# to find the particular action you're interested in diffing. In this case, there
+# was only 1 CppLink action from Bazel. The corresponding link action from Ninja
+# happened to be the last one (this is pretty typical).
+#
+# Then we set a new variable to keep track of each of these action strings.
+
+bzl_actions = get_bazel_actions(
+ config="linux_x86_64",
+ expr="//packages/modules/adb:adb_test__test_binary_unstripped",
+ mnemonic="CppLink",
+)
+ninja_actions = get_ninja_actions(
+ lunch_target=LUNCH_TARGET,
+ target="out/soong/.intermediates/packages/modules/adb/adb_test/linux_glibc_x86_64/adb_test",
+ mnemonic="clang++",
+)
+bazel_action = bzl_actions[0]["arguments"]
+ninja_action = ninja_actions[-1].split()
+
+# %%
+# Example 2: Comparing compile actions
+# This example is similar and gets all of the "CppCompile" actions from the
+# internal sub-target of adb_test. There is a "CppCompile" action for every
+# .cc file that goes into the target, so we just pick one of these files and
+# get the corresponding compile action from Ninja for this file.
+#
+# Similarly, we select an action from the Bazel list and its corresponding
+# Ninja action.
+
+# bzl_actions = get_bazel_actions(
+# config="linux_x86_64",
+# expr="//packages/modules/adb:adb_test__test_binary__internal_root_cpp",
+# mnemonic="CppCompile",
+# )
+# ninja_actions = get_ninja_actions(
+# lunch_target=LUNCH_TARGET,
+# target="out/soong/.intermediates/packages/modules/adb/adb_test/linux_glibc_x86_64/obj/packages/modules/adb/adb_io_test.o",
+# mnemonic="clang++",
+# )
+# bazel_action = bzl_actions[0]["arguments"]
+# ninja_action = ninja_actions[-1].split()
+
+# %%
+# Example 3: more complex expressions in the Bazel action
+# This example gets all of the "CppCompile" actions from the deps of everything
+# under the //packages/modules/adb package, but it uses the additional_args
+# to exclude "manual" internal targets.
+
+# bzl_actions = get_bazel_actions(
+# config="linux_x86_64",
+# expr="deps(//packages/modules/adb/...)",
+# mnemonic="CppCompile",
+# additional_args=[
+# "--build_tag_filters=-manual",
+# ],
+# )
+
+# %%
+# Once we have the command-line string for each action from Bazel and Ninja,
+# we can use difftool to parse and compare the actions.
+ninja_action = commands.expand_rsp(ninja_action)
+bzl_rich_commands = difftool.rich_command_info(" ".join(bazel_action))
+ninja_rich_commands = difftool.rich_command_info(" ".join(ninja_action))
+
+print("Bazel args:")
+print(" \\\n\t".join([bzl_rich_commands.tool] + bzl_rich_commands.args))
+print("Soong args:")
+print(" \\\n\t".join([ninja_rich_commands.tool] + ninja_rich_commands.args))
+
+bzl_only = bzl_rich_commands.compare(ninja_rich_commands)
+soong_only = ninja_rich_commands.compare(bzl_rich_commands)
+print("In Bazel, not Soong:")
+print(bzl_only)
+print("In Soong, not Bazel:")
+print(soong_only)
+
+# %%
+# Now that we've diffed the action strings, it is sometimes useful to also
+# diff the paths that go into the action. This helps us narrow down diffs
+# in a module that are created in their dependencies. This section attempts
+# to match paths from the Bazel action to corresponding paths in the Ninja
+# action, and the runs difftool on these paths.
+bzl_paths, _ = commands.extract_paths_from_action_args(bazel_action)
+ninja_paths, _ = commands.extract_paths_from_action_args(ninja_action)
+unmatched_paths = []
+for p1, p2 in commands.match_paths(bzl_paths, ninja_paths).items():
+ if p2 is None:
+ unmatched_paths.append(p1)
+ continue
+ diff = difftool.file_differences(
+ pathlib.Path(p1).resolve(),
+ pathlib.Path(p2).resolve(),
+ level=difftool.DiffLevel.FINE,
+ )
+ for row in diff:
+ print(row)
+if unmatched_paths:
+ # Since the test for file paths looks for existing files, this matching won't
+ # work if the Soong artifacts don't exist.
+ print(
+ "Found some Bazel paths that didn't have a good match in Soong "
+ + "intermediates. Did you run `m`?"
+ )
+ print("Unmatched paths:")
+ for i in unmatched_paths:
+ print("\t" + i)
diff --git a/scripts/difftool/clangcompile.py b/scripts/difftool/clangcompile.py
index 8bae76e5..06831ed9 100644
--- a/scripts/difftool/clangcompile.py
+++ b/scripts/difftool/clangcompile.py
@@ -13,18 +13,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License."""
-
"""Helpers pertaining to clang compile actions."""
import collections
-import difflib
import pathlib
import subprocess
-from typing import Callable
from commands import CommandInfo
from commands import flag_repr
from commands import is_flag_starts_with
from commands import parse_flag_groups
+from diffs.diff import ExtractInfo
+from diffs.context import ContextDiff
+from diffs.nm import NmSymbolDiff
+from diffs.bloaty import BloatyDiff
class ClangCompileInfo(CommandInfo):
@@ -83,6 +84,22 @@ class ClangCompileInfo(CommandInfo):
s += self._str_for_field("Misc", self.misc_flags)
return s
+ def compare(self, other):
+ """computes difference in arguments from another ClangCompileInfo"""
+ diffs = ClangCompileInfo(self.tool, [])
+ diffs.i_includes = [i for i in self.i_includes if i not in other.i_includes]
+ diffs.iquote_includes = [
+ i for i in self.iquote_includes if i not in other.iquote_includes
+ ]
+ diffs.isystem_includes = [
+ i for i in self.isystem_includes if i not in other.isystem_includes
+ ]
+ diffs.defines = [i for i in self.defines if i not in other.defines]
+ diffs.warnings = [i for i in self.warnings if i not in other.warnings]
+ diffs.file_flags = [i for i in self.file_flags if i not in other.file_flags]
+ diffs.misc_flags = [i for i in self.misc_flags if i not in other.misc_flags]
+ return diffs
+
def _is_src_group(x):
"""Returns true if the given flag group describes a source file."""
@@ -96,7 +113,8 @@ def _custom_flag_group(x):
flag group for clang compile. (For example, `-c` is a single-arg flag for
clang compiles, but may not be for other tools.)
- See commands.parse_flag_groups documentation for signature details."""
+ See commands.parse_flag_groups documentation for signature details.
+ """
if x.startswith("-I") and len(x) > 2:
return ("I", x[2:])
if x.startswith("-W") and len(x) > 2:
@@ -137,51 +155,71 @@ def _process_includes(includes):
return result
-# given a file, give a list of "information" about it
-ExtractInfo = Callable[[pathlib.Path], list[str]]
+def _external_tool(*args) -> ExtractInfo:
+ return lambda file: subprocess.run(
+ [*args, str(file)
+ ], check=True, capture_output=True, encoding="utf-8").stdout.splitlines()
+
+# TODO(usta) use nm as a data dependency
+def nm_differences(left_path: pathlib.Path,
+ right_path: pathlib.Path) -> list[str]:
+ """Returns differences in symbol tables.
-def _diff(left_path: pathlib.Path, right_path: pathlib.Path, tool_name: str,
- tool: ExtractInfo) -> list[str]:
- """Returns a list of strings describing differences in `.o` files.
Returns the empty list if these files are deemed "similar enough".
+ """
+ return NmSymbolDiff(_external_tool("nm"),
+ "symbol tables").diff(left_path, right_path)
- The given files must exist and must be object (.o) files."""
- errors = []
- left = tool(left_path)
- right = tool(right_path)
- comparator = difflib.context_diff(left, right)
- difflines = list(comparator)
- if difflines:
- err = "\n".join(difflines)
- errors.append(
- f"{left_path}\ndiffers from\n{right_path}\nper {tool_name}:\n{err}")
- return errors
+# TODO(usta) use readelf as a data dependency
+def elf_differences(left_path: pathlib.Path,
+ right_path: pathlib.Path) -> list[str]:
+ """Returns differences in elf headers.
+
+ Returns the empty list if these files are deemed "similar enough".
+ The given files must exist and must be object (.o) files.
+ """
+ return ContextDiff(_external_tool("readelf", "-h"),
+ "elf headers").diff(left_path, right_path)
-def _external_tool(*args) -> ExtractInfo:
- return lambda file: subprocess.run([*args, str(file)],
- check=True, capture_output=True,
- encoding="utf-8").stdout.splitlines()
+# TODO(usta) use bloaty as a data dependency
+def bloaty_differences(left_path: pathlib.Path,
+ right_path: pathlib.Path) -> list[str]:
+ """Returns differences in symbol and section tables.
-# TODO(usta) use nm as a data dependency
-def nm_differences(left_path: pathlib.Path, right_path: pathlib.Path) -> list[
- str]:
- """Returns differences in symbol tables.
Returns the empty list if these files are deemed "similar enough".
- The given files must exist and must be object (.o) files."""
- return _diff(left_path, right_path, "symbol tables", _external_tool("nm"))
+ The given files must exist and must be object (.o) files.
+ """
+ return _bloaty_differences(left_path, right_path)
-# TODO(usta) use readelf as a data dependency
-def elf_differences(left_path: pathlib.Path, right_path: pathlib.Path) -> list[
- str]:
- """Returns differences in elf headers.
+# TODO(usta) use bloaty as a data dependency
+def bloaty_differences_compileunits(left_path: pathlib.Path,
+ right_path: pathlib.Path) -> list[str]:
+ """Returns differences in symbol and section tables.
+
Returns the empty list if these files are deemed "similar enough".
- The given files must exist and must be object (.o) files."""
- return _diff(left_path, right_path, "elf headers",
- _external_tool("readelf", "-h"))
+ The given files must exist and must be object (.o) files.
+ """
+ return _bloaty_differences(left_path, right_path, True)
+
+
+# TODO(usta) use bloaty as a data dependency
+def _bloaty_differences(left_path: pathlib.Path,
+ right_path: pathlib.Path,
+ debug=False) -> list[str]:
+ symbols = BloatyDiff(
+ "symbol tables", "symbols",
+ has_debug_symbols=debug).diff(left_path, right_path)
+ sections = BloatyDiff(
+ "section tables", "sections",
+ has_debug_symbols=debug).diff(left_path, right_path)
+ segments = BloatyDiff(
+ "segment tables", "segments",
+ has_debug_symbols=debug).diff(left_path, right_path)
+ return symbols + sections + segments
diff --git a/scripts/difftool/commands.py b/scripts/difftool/commands.py
index 2c05dad4..a02dd4c0 100644
--- a/scripts/difftool/commands.py
+++ b/scripts/difftool/commands.py
@@ -13,9 +13,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License."""
-
"""Helper functions and types for command processing for difftool."""
+import os
+import pathlib
+
class CommandInfo:
"""Contains information about an action commandline."""
@@ -52,7 +54,8 @@ def parse_flag_groups(args, custom_flag_group=None):
group abiding by the above convention, or None to use non-custom logic.
This may be required to accurately parse arg groups. For example, `-a b` may
be either a one-arg group `-a` followed by a positonal group `b`, or a two-arg
- group `-a b`."""
+ group `-a b`.
+ """
flag_groups = []
i = 0
@@ -72,7 +75,7 @@ def parse_flag_groups(args, custom_flag_group=None):
# Look for a two-arg group if there are at least 2 elements left.
if i < len(args) - 1:
- g = two_arg_group(args[i], args[i+1])
+ g = two_arg_group(args[i], args[i + 1])
if g is not None:
flag_groups += [g]
i += 2
@@ -107,7 +110,8 @@ def two_arg_group(a, b):
Returns:
A tuple of the two args without hyphens if they belong to a single
- flag, or None if they do not. """
+ flag, or None if they do not.
+ """
if a.startswith("-") and (not b.startswith("-")):
return (remove_hyphens(a), b)
else:
@@ -122,7 +126,8 @@ def one_arg_group(x):
Returns:
A tuple of `(key, value)` of the flag group, if the arg comprises a
- complete flag group, or None if it does not."""
+ complete flag group, or None if it does not.
+ """
tokens = x.split("=")
if len(tokens) == 2:
return (remove_hyphens(tokens[0]), tokens[1])
@@ -143,3 +148,90 @@ def flag_repr(x):
else:
return x
+
+def expand_rsp(arglist: list[str]) -> list[str]:
+ expanded_command = []
+ for arg in arglist:
+ if len(arg) > 4 and arg[-4:] == ".rsp":
+ if arg[0] == "@":
+ arg = arg[1:]
+ with open(arg) as f:
+ expanded_command.extend([f for l in f.readlines() for f in l.split()])
+ else:
+ expanded_command.append(arg)
+ return expanded_command
+
+
+def should_ignore_path_argument(arg) -> bool:
+ if arg.startswith("bazel-out"):
+ return True
+ if arg.startswith("out/soong/.intermediates"):
+ return True
+ return False
+
+
+def extract_paths_from_action_args(
+ args: list[str],
+) -> (list[pathlib.Path], list[pathlib.Path]):
+ paths = []
+ other_args = []
+ for arg in args:
+ p = pathlib.Path(arg)
+ if p.is_file():
+ paths.append(p)
+ else:
+ other_args.append(arg)
+ return paths, other_args
+
+
+def sanitize_bazel_path(path: str) -> pathlib.Path:
+ if path[:3] == "lib":
+ path = path[3:]
+ path = path.replace("_bp2build_cc_library_static", "")
+ return pathlib.Path(path)
+
+
+def find_matching_path(
+ path: pathlib.Path, other_paths: list[pathlib.Path]
+) -> pathlib.Path:
+ multiple_best_matches = False
+ best = (0, None)
+ for op in other_paths:
+ common = os.path.commonpath([path, op])
+ similarity = len(common.split(os.sep)) if common else 0
+ if similarity == best[0]:
+ multiple_best_matches = True
+ if similarity > best[0]:
+ multiple_best_matches = False
+ best = (similarity, op)
+ if multiple_best_matches:
+ print(
+ f"WARNING: path `{path}` had multiple best matches in list"
+ f" `{other_paths}`"
+ )
+ return best[1]
+
+
+def _reverse_path(p: pathlib.Path) -> str:
+ return os.path.join(*reversed(os.path.normpath(p).split(os.sep)))
+
+
+def _reverse_paths(paths: list[pathlib.Path]) -> list[pathlib.Path]:
+ return [_reverse_path(p) for p in paths]
+
+
+def match_paths(
+ bazel_paths: list[str], soong_paths: list[str]
+) -> dict[str, str]:
+ reversed_bazel_paths = _reverse_paths(bazel_paths)
+ reversed_soong_paths = _reverse_paths(soong_paths)
+ closest_path = {p: (0, None) for p in reversed_bazel_paths}
+ for bp in reversed_bazel_paths:
+ bp_soong_name = sanitize_bazel_path(bp)
+ closest_path[bp] = find_matching_path(bp_soong_name, reversed_soong_paths)
+ matched_paths = {}
+ for path, match in closest_path.items():
+ p1 = _reverse_path(path)
+ p2 = _reverse_path(match) if match is not None else None
+ matched_paths[p1] = p2
+ return matched_paths
diff --git a/scripts/bp2build-progress/BUILD.bazel b/scripts/difftool/diffs/BUILD.bazel
index 460308a4..bc2d222b 100644
--- a/scripts/bp2build-progress/BUILD.bazel
+++ b/scripts/difftool/diffs/BUILD.bazel
@@ -13,21 +13,12 @@
# limitations under the License.
py_library(
- name = "dependency_analysis",
- srcs = ["dependency_analysis.py"],
- visibility = ["//visibility:public"],
-)
-
-py_binary(
- name = "bp2build-progress",
- srcs = ["bp2build-progress.py"],
- visibility = ["//visibility:public"],
- deps = [":dependency_analysis"],
-)
-
-py_binary(
- name = "bp2build-module-dep-infos",
- srcs = ["bp2build-module-dep-infos.py"],
- visibility = ["//visibility:public"],
- deps = [":dependency_analysis"],
+ name = "diffs",
+ srcs = [
+ "bloaty.py",
+ "context.py",
+ "diff.py",
+ "nm.py",
+ ],
+ visibility = ["//build/bazel/scripts/difftool:__subpackages__"],
)
diff --git a/scripts/difftool/diffs/bloaty.py b/scripts/difftool/diffs/bloaty.py
new file mode 100644
index 00000000..06cea3d9
--- /dev/null
+++ b/scripts/difftool/diffs/bloaty.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License."""
+
+import csv
+import pathlib
+import subprocess
+from diffs.diff import Diff, ExtractInfo
+
+
+class BloatyDiff(Diff):
+ """BloatyDiff compares the sizes of symbols present in cc objects
+
+ Bloaty McBloatface (bloaty) is used to discover size differences in object
+ files or cc binaries. This diff returns a list of symbols which are new or
+ larger in one file than the other.
+
+ The output does not distinguish between new symbols and ones that are simply
+ larger, so this output is best combined with the NmSymbolDiff to see which
+ symbols are new.
+
+ Example bloaty output (note: compileunits may not always be available):
+ $ bloaty --csv -d compileunits,symbols $BAZEL_OBJ -- $LEGACY_OBJ
+ compileunits,symbols,vmsize,filesize
+ external/zstd/lib/compress/zstd_fast.c,ZSTD_compressBlock_doubleFast_extDict_generic,6240,6344
+ external/zstd/lib/compress/zstd_fast.c,ZSTD_compressBlock_lazy_dictMatchState,-3428,-3551
+
+ The first entry is a symbol that is larger in the Bazel version of the binary,
+ and the second entry is a symbol that is larger in the Soong version of the
+ binary.
+ """
+ def __init__(self, tool_name, data_source, has_debug_symbols=False):
+ self.tool_name = tool_name
+ self.data_source = data_source
+ self.has_debug_symbols = has_debug_symbols
+
+ def _print_diff_row(self, row, ignore_keys):
+ attrs = sorted({
+ k: v
+ for k, v in row.items()
+ if k not in ignore_keys
+ }.items())
+ return row[self.data_source] + ": { " + ", ".join(f"{a[0]}: {a[1]}" for a in attrs) + " }"
+
+ def _collect_diff_compileunits(self, diffreader: csv.DictReader):
+ # maps from compileunit to list of diff rows
+ left_bigger = collections.defaultdict(list)
+ right_bigger = collections.defaultdict(list)
+
+ for row in diffreader:
+ compileunit = row["compileunits"]
+ if len(compileunit) > 0 and compileunit[0] == "[":
+ continue
+ filesize = row["filesize"]
+ if int(filesize) < 0:
+ left_bigger[compileunit].append(row)
+ elif int(filesize) > 0:
+ right_bigger[compileunit].append(row)
+
+ def print_diff_dict(dict):
+ lines = []
+ for compileunit, data in sorted(dict.items()):
+ lines.append("\t" + compileunit + ":")
+ rows = []
+ for row in data:
+ if row[self.data_source] and row[self.data_source][0] == "[":
+ continue
+ rows.append("\t\t" + self.print_diff_row(row, ignore_keys=[self.data_source, "compileunits"]))
+ lines.extend(sorted(rows))
+ return "\n".join(lines)
+
+ return print_diff_dict(left_bigger), print_diff_dict(right_bigger)
+
+ def _collect_diff(self, diffreader):
+ left_bigger = []
+ right_bigger = []
+
+ for row in diffreader:
+ filesize = row["filesize"]
+ if int(filesize) > 0:
+ left_bigger.append(row)
+ elif int(filesize) < 0:
+ right_bigger.append(row)
+
+ left_errors = "\n".join(["\t" + self._print_diff_row(row, ignore_keys=[self.data_source]) for row in left_bigger])
+ right_errors = "\n".join(["\t" + self._print_diff_row(row, ignore_keys=[self.data_source]) for row in right_bigger])
+ return left_errors, right_errors
+
+ def diff(self, left_path: pathlib.Path, right_path: pathlib.Path) -> list[str]:
+ try:
+ diff_csv = subprocess.run(["bloaty",
+ "--csv",
+ "-n", "0",
+ "-w",
+ "-d",
+ self.data_source + (",compileunits" if self.has_debug_symbols else ""),
+ str(left_path),
+ "--",
+ str(right_path)],
+ check=True, capture_output=True,
+ encoding="utf-8").stdout.splitlines()
+ except subprocess.CalledProcessError as e:
+ print("ERROR: bloaty tool returned non-zero exit status")
+ if self.has_debug_symbols:
+ print("ERROR: do objects contain debug symbols?")
+ raise e
+
+ diffreader = csv.DictReader(diff_csv)
+
+ if self.has_debug_symbols:
+ left_bigger, right_bigger = self._collect_diff_compileunits(diffreader)
+ else:
+ left_bigger, right_bigger = self._collect_diff(diffreader)
+
+ errors = []
+ if left_bigger:
+ errors.append(f"the following {self.data_source} are either unique or larger in\n{left_path}\n than those in\n{right_path}:\n{left_bigger}")
+ if right_bigger:
+ errors.append(f"the following {self.data_source} are either unique or larger in\n{right_path}\n than those in\n{left_path}:\n{right_bigger}")
+
+ return errors
diff --git a/scripts/difftool/diffs/context.py b/scripts/difftool/diffs/context.py
new file mode 100644
index 00000000..7152d109
--- /dev/null
+++ b/scripts/difftool/diffs/context.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License."""
+
+import difflib
+import pathlib
+from diffs.diff import Diff, ExtractInfo
+
+
+class ContextDiff(Diff):
+ def __init__(self, tool: ExtractInfo, tool_name: str):
+ self.tool = tool
+ self.tool_name = tool_name
+
+ def diff(self, left_path: pathlib.Path, right_path: pathlib.Path) -> list[str]:
+ errors = []
+
+ left = self.tool(left_path)
+ right = self.tool(right_path)
+ comparator = difflib.context_diff(left, right)
+ difflines = list(comparator)
+ if difflines:
+ err = "\n".join(difflines)
+ errors.append(
+ f"{left_path}\ndiffers from\n{right_path}\nper {self.tool_name}:\n{err}")
+ return errors
diff --git a/scripts/difftool/diffs/diff.py b/scripts/difftool/diffs/diff.py
new file mode 100644
index 00000000..b40fdc5b
--- /dev/null
+++ b/scripts/difftool/diffs/diff.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License."""
+
+from abc import ABC, abstractmethod
+import pathlib
+from typing import Callable
+
+# given a file, give a list of "information" about it
+ExtractInfo = Callable[[pathlib.Path], list[str]]
+
+class Diff(ABC):
+ @abstractmethod
+ def diff(left_path: pathlib.Path, right_path: pathlib.Path) -> list[str]:
+ """Returns a list of strings describing differences in `.o` files.
+ Returns the empty list if these files are deemed "similar enough".
+ """
+ pass
diff --git a/scripts/difftool/diffs/nm.py b/scripts/difftool/diffs/nm.py
new file mode 100644
index 00000000..c5ea519f
--- /dev/null
+++ b/scripts/difftool/diffs/nm.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License."""
+
+import pathlib
+import re
+import subprocess
+from diffs.diff import Diff, ExtractInfo
+
+class _Symbol:
+ """Data structure to hold a symbol as specified by nm
+
+ Equality of symbols is based on their name and attributes.
+
+ The self._addr property is excluded from comparisons in this class
+ because the location of a symbol in one binary is not a useful
+ difference from another binary.
+ """
+ def __init__(self, name, addr, attr):
+ self.name = name
+ self._addr = addr
+ self.attr = attr
+ def __hash__(self):
+ return (self.name + self.attr).__hash__()
+ def __eq__(self, other):
+ return self.name == other.name and self.attr == other.attr
+ def __repr__(self):
+ return f"{self.name}{{{self.attr}}}"
+
+
+class NmSymbolDiff(Diff):
+ """Compares symbols the symbol table output by nm
+
+ Example nm output:
+ 0000000000000140 t GetExceptionSummary
+ U ExpandableStringInitialize
+ U ExpandableStringRelease
+ 0000000000000cf0 T jniCreateString
+
+ The first column is the address of the symbol in the binary, the second
+ column is an attribute associated with the symbol (see man nm for details),
+ and the last column is the demangled symbol name.
+ """
+ _nm_re = re.compile(r"^(\w+)?\s+([a-zA-Z])\s(\S+)$")
+
+ def __init__(self, tool: ExtractInfo, tool_name: str):
+ self.tool = tool
+ self.tool_name = tool_name
+
+ def _read_symbols(nm_output):
+ symbols = set()
+ for line in nm_output:
+ match = NmSymbolDiff._nm_re.match(line)
+ if match:
+ symbols.add(_Symbol(match.group(3), match.group(1), match.group(2)))
+ return symbols
+
+ def diff(self, left_path: pathlib.Path, right_path: pathlib.Path) -> list[str]:
+ left_nm = subprocess.run(["nm", left_path], capture_output=True, encoding="utf-8").stdout.splitlines()
+ right_nm = subprocess.run(["nm", right_path], capture_output=True, encoding="utf-8").stdout.splitlines()
+ left_symbols = NmSymbolDiff._read_symbols(left_nm)
+ right_symbols = NmSymbolDiff._read_symbols(right_nm)
+
+ left_only = []
+ for s in left_symbols:
+ if s not in right_symbols:
+ left_only.append(s)
+ right_only = []
+ for s in right_symbols:
+ if s not in left_symbols:
+ right_only.append(s)
+
+ errors = []
+ if left_only:
+ errors.append(f"symbols in {left_path} not in {right_path}:")
+ errors.extend("\t" + str(s) for s in left_only)
+ if right_only:
+ errors.append(f"symbols in {right_path} not in {left_path}:")
+ errors.extend("\t" + str(s) for s in right_only)
+
+ return errors
diff --git a/scripts/difftool/difftool.py b/scripts/difftool/difftool.py
index 3622f4a6..a67d0ecf 100755
--- a/scripts/difftool/difftool.py
+++ b/scripts/difftool/difftool.py
@@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
"""Provides useful diff information for build artifacts.
Uses collected build artifacts from two separate build invocations to
@@ -40,6 +39,7 @@ its analysis.
import argparse
import enum
import functools
+import json
import os
import pathlib
import re
@@ -88,14 +88,24 @@ class EnumAction(argparse.Action):
class ArtifactType(enum.Enum):
+ AUTO_INFER_FROM_SUFFIX = 0
CC_OBJECT = 1
CC_SHARED_LIBRARY = 2
+ CC_OBJECT_WITH_DEBUG_SYMBOLS = 3
OTHER = 99
+FILE_TYPE_CHOICES = {
+ "auto": ArtifactType.AUTO_INFER_FROM_SUFFIX,
+ "object": ArtifactType.CC_OBJECT,
+ "object_with_debug_symbols": ArtifactType.CC_OBJECT_WITH_DEBUG_SYMBOLS,
+ "shared_library": ArtifactType.CC_SHARED_LIBRARY,
+}
+
+
def _artifact_type(file_path):
ext = file_path.suffix
- if ext == ".o":
+ if ext in [".o", ".a"]:
return ArtifactType.CC_OBJECT
elif ext == ".so":
return ArtifactType.CC_SHARED_LIBRARY
@@ -104,50 +114,100 @@ def _artifact_type(file_path):
# TODO(usta) use libdiff
-def literal_diff(left_path: pathlib.Path, right_path: pathlib.Path) -> list[
- str]:
- return subprocess.run(["diff", str(left_path), str(right_path)],
- check=False, capture_output=True,
- encoding="utf-8").stdout.splitlines()
+def literal_diff(left_path: pathlib.Path,
+ right_path: pathlib.Path) -> list[str]:
+ return subprocess.run(
+ ["diff", str(left_path), str(right_path)],
+ check=False,
+ capture_output=True,
+ encoding="utf-8").stdout.splitlines()
@functools.cache
-def _diff_fns(artifact_type: ArtifactType, level: DiffLevel) -> list[
- DiffFunction]:
+def _diff_fns(artifact_type: ArtifactType,
+ level: DiffLevel) -> list[DiffFunction]:
fns = []
- if artifact_type == ArtifactType.CC_OBJECT:
+ if artifact_type in [
+ ArtifactType.CC_OBJECT, ArtifactType.CC_OBJECT_WITH_DEBUG_SYMBOLS
+ ]:
fns.append(clangcompile.nm_differences)
if level >= DiffLevel.WARNING:
fns.append(clangcompile.elf_differences)
+ if artifact_type == ArtifactType.CC_OBJECT_WITH_DEBUG_SYMBOLS:
+ fns.append(clangcompile.bloaty_differences_compileunits)
+ else:
+ fns.append(clangcompile.bloaty_differences)
else:
fns.append(literal_diff)
return fns
-def collect_commands(ninja_file_path: pathlib.Path,
- output_file_path: pathlib.Path) -> list[str]:
+def collect_commands_bazel(expr: str, config: str, mnemonic: str, *args):
+ bazel_tool_path = pathlib.Path("build/bazel/bin/bazel").resolve().absolute()
+ bazel_proc = subprocess.run(
+ [
+ bazel_tool_path,
+ "aquery",
+ "--curses=no",
+ "--config=bp2build",
+ "--output=jsonproto",
+ f"--config={config}",
+ *args,
+ f"{expr}",
+ ],
+ capture_output=True,
+ encoding="utf-8",
+ )
+ print(bazel_proc.stderr)
+ actions_json = json.loads(bazel_proc.stdout)
+ return [a for a in actions_json["actions"] if a["mnemonic"] == mnemonic]
+
+
+def collect_commands_ninja(ninja_file_path: pathlib.Path,
+ output_file_path: pathlib.Path,
+ ninja_tool_path: pathlib.Path) -> list[str]:
"""Returns a list of all command lines required to build the file at given
+
output_file_path_string, as described by the ninja file present at
- ninja_file_path_string."""
+ ninja_file_path_string.
+ """
- ninja_tool_path = pathlib.Path(
- "prebuilts/build-tools/linux-x86/bin/ninja").resolve()
- wd = os.getcwd()
- os.chdir(ninja_file_path.parent.absolute())
- result = subprocess.check_output([str(ninja_tool_path),
- "-f", ninja_file_path.name,
- "-t", "commands",
- str(output_file_path)]).decode("utf-8")
- os.chdir(wd)
+ result = subprocess.check_output([
+ str(ninja_tool_path), "-f", ninja_file_path, "-t", "commands",
+ str(output_file_path)
+ ]).decode("utf-8")
return result.splitlines()
-def file_differences(left_path: pathlib.Path, right_path: pathlib.Path,
- level=DiffLevel.SEVERE) -> list[str]:
+def collect_commands(ninja_file_path: pathlib.Path,
+ output_file_path: pathlib.Path) -> list[str]:
+ ninja_tool_path = pathlib.Path(
+ "prebuilts/build-tools/linux-x86/bin/ninja").resolve()
+ wd = os.getcwd()
+ try:
+ os.chdir(ninja_file_path.parent.absolute())
+ return collect_commands_ninja(
+ ninja_file_path.name,
+ output_file_path,
+ ninja_tool_path,
+ )
+ except Exception as e:
+ raise e
+ finally:
+ os.chdir(wd)
+
+
+def file_differences(
+ left_path: pathlib.Path,
+ right_path: pathlib.Path,
+ level=DiffLevel.SEVERE,
+ file_type=ArtifactType.AUTO_INFER_FROM_SUFFIX) -> list[str]:
"""Returns differences between the two given files.
- Returns the empty list if these files are deemed "similar enough"."""
+
+ Returns the empty list if these files are deemed "similar enough".
+ """
errors = []
if not left_path.is_file():
@@ -157,13 +217,14 @@ def file_differences(left_path: pathlib.Path, right_path: pathlib.Path,
if errors:
return errors
- left_type = _artifact_type(left_path)
- right_type = _artifact_type(right_path)
- if left_type != right_type:
- errors += ["file types differ: %s and %s" % (left_type, right_type)]
- return errors
+ if file_type is ArtifactType.AUTO_INFER_FROM_SUFFIX:
+ file_type = _artifact_type(left_path)
+ right_type = _artifact_type(right_path)
+ if file_type != right_type:
+ errors += ["file types differ: %s and %s" % (file_type, right_type)]
+ return errors
- for fn in _diff_fns(left_type, level):
+ for fn in _diff_fns(file_type, level):
errors += fn(left_path, right_path)
return errors
@@ -202,6 +263,14 @@ cd_rm_prefix_pattern = re.compile("^cd [^&]* &&( )+rm [^&]* && (.*)$")
comment_suffix_pattern = re.compile("(.*) # .*")
+def _remove_rbe_tokens(tokens, tool_endings):
+ for i in range(len(tokens)):
+ for ending in tool_endings:
+ if tokens[i].endswith(ending):
+ return tokens[i:]
+ return None
+
+
def rich_command_info(raw_command):
"""Returns a command info object describing the raw command string."""
cmd = raw_command.strip()
@@ -216,6 +285,9 @@ def rich_command_info(raw_command):
if m is not None:
cmd = m.group(1)
tokens = cmd.split()
+ tokens_without_rbe = _remove_rbe_tokens(tokens, ["clang", "clang++"])
+ if tokens_without_rbe:
+ tokens = tokens_without_rbe
tool = tokens[0]
args = tokens[1:]
@@ -229,42 +301,60 @@ def rich_command_info(raw_command):
def main():
parser = argparse.ArgumentParser(description="")
- parser.add_argument("--level",
- action=EnumAction,
- default=DiffLevel.SEVERE,
- type=DiffLevel,
- help="the level of differences to be considered." +
- "Diffs below the specified level are ignored.")
- parser.add_argument("--verbose", "-v",
- action=argparse.BooleanOptionalAction,
- default=False,
- help="log verbosely.")
- parser.add_argument("left_dir",
- help="the 'left' directory to compare build outputs " +
- "from. This must be the target of an invocation " +
- "of collect.py.")
- parser.add_argument("--left_file", "-l", dest="left_file", default=None,
- help="the output file (relative to execution root) for " +
- "the 'left' build invocation.")
- parser.add_argument("right_dir",
- help="the 'right' directory to compare build outputs " +
- "from. This must be the target of an invocation " +
- "of collect.py.")
- parser.add_argument("--right_file", "-r", dest="right_file", default=None,
- help="the output file (relative to execution root) " +
- "for the 'right' build invocation.")
- parser.add_argument("--allow_missing_file",
- action=argparse.BooleanOptionalAction,
- default=False,
- help="allow a missing output file; this is useful to " +
- "compare actions even in the absence of " +
- "an output file.")
+ parser.add_argument(
+ "--level",
+ action=EnumAction,
+ default=DiffLevel.SEVERE,
+ type=DiffLevel,
+ help="the level of differences to be considered." +
+ "Diffs below the specified level are ignored.")
+ parser.add_argument(
+ "--verbose",
+ "-v",
+ action=argparse.BooleanOptionalAction,
+ default=False,
+ help="log verbosely.")
+ parser.add_argument(
+ "left_dir",
+ help="the 'left' directory to compare build outputs " +
+ "from. This must be the target of an invocation of collect.py.")
+ parser.add_argument(
+ "--left_file",
+ "-l",
+ dest="left_file",
+ default=None,
+ help="the output file (relative to execution root) for " +
+ "the 'left' build invocation.")
+ parser.add_argument(
+ "right_dir",
+ help="the 'right' directory to compare build outputs " +
+ "from. This must be the target of an invocation of collect.py.")
+ parser.add_argument(
+ "--right_file",
+ "-r",
+ dest="right_file",
+ default=None,
+ help="the output file (relative to execution root) " +
+ "for the 'right' build invocation.")
+ parser.add_argument(
+ "--file_type",
+ dest="file_type",
+ default="auto",
+ choices=FILE_TYPE_CHOICES.keys(),
+ help="the type of file being diffed (overrides automatic " +
+ "filetype resolution)")
+ parser.add_argument(
+ "--allow_missing_file",
+ action=argparse.BooleanOptionalAction,
+ default=False,
+ help="allow a missing output file; this is useful to " +
+ "compare actions even in the absence of an output file.")
args = parser.parse_args()
level = args.level
left_diffinfo = pathlib.Path(args.left_dir).joinpath(COLLECTION_INFO_FILENAME)
- right_diffinfo = pathlib.Path(args.right_dir).joinpath(
- COLLECTION_INFO_FILENAME)
+ right_diffinfo = pathlib.Path(
+ args.right_dir).joinpath(COLLECTION_INFO_FILENAME)
left_ninja_name, left_file = parse_collection_info(left_diffinfo)
right_ninja_name, right_file = parse_collection_info(right_diffinfo)
@@ -288,7 +378,8 @@ def main():
if not right_path.is_file():
raise RuntimeError("Expected file %s was not found. " % right_path)
- file_diff_errors = file_differences(left_path, right_path, level)
+ file_diff_errors = file_differences(left_path, right_path, level,
+ FILE_TYPE_CHOICES[args.file_type])
if file_diff_errors:
for err in file_diff_errors:
@@ -301,11 +392,11 @@ def main():
right_commands = collect_commands(right_ninja_path, right_file)
right_command_info = rich_command_info(right_commands[-1])
print("======== ACTION COMPARISON: ========")
- print("=== LEFT:\n")
- print(left_command_info)
+ print("=== LEFT ONLY:\n")
+ print(left_command_info.compare(right_command_info))
print()
- print("=== RIGHT:\n")
- print(right_command_info)
+ print("=== RIGHT ONLY:\n")
+ print(right_command_info.compare(left_command_info))
print()
sys.exit(1)
else:
diff --git a/scripts/elf_compare.sh b/scripts/elf_compare.sh
new file mode 100755
index 00000000..8b053444
--- /dev/null
+++ b/scripts/elf_compare.sh
@@ -0,0 +1,126 @@
+#! /bin/bash -eu
+
+# Compare object files the linker used to build given binary for
+# two different configurations.
+# As an example, suppose we comnt to compare `adbd` binary that is
+# included in `com.android.adbd` APEX. We first build this APEX
+# with Soong and rename the build tree to `out.ref`:
+# $ m com.android.adbd && mv out out.ref
+# Then we build it again with mixed build and rename the build tree
+# to `out.mix`
+# $ m --bazel-mode-staging com.android.adbd && mv out out.mix
+# Now we can run this script to compare `adbd` binaries between
+# two builds as follows:
+# $ compare_elf.sh adbd out.ref out.mix
+# Note that we refer to the first of the two build directories as
+# 'reference' and to the second one as 'our'.
+#
+# There are two ways to specify the binaries to compare:
+# * compare_elf.sh REFDIR REFELF OURDIR OURELF
+# Compare REFDIR/**/REFELF (i.e., the file in REFDIR whose path ends
+# with REFELF in REFDIR) to OURDIR/**/OUROELF
+# * compare_elf.sh ELF REFDIR OURDIR
+# This is a shortcut:
+# if ELF ends with .so, the same as
+# compare_elf.sh REFDIR ELF OURDIR ELF
+# otherwise the same as
+# compare_elf.sh REFDIR ELF OURDIR ELF.unstripped
+#
+# Overall, the process is as follows:
+# * For each build, extract the list of the input objects and
+# map each such object's unique configuration-independent key
+# * Compare the maps. For each common key, use `elfdiff` to compare
+# the ELF files
+function die() { format=$1; shift; printf "$format\n" $@; exit 1; }
+
+case $# in
+ 3) declare -r refelf=$1 refdir=$2 ourdir=$3
+ [[ ${ourelf:=$refelf} =~ .so$ ]] || ourelf=$ourelf.unstripped ;;
+ 4) declare -r refdir=$1 refelf=$2 ourdir=$3 ourelf=$4 ;;
+ *) die "usage:\n ${0##*/} ELF REFDIR OURDIR\nor\n ${0##*/} REFDIR REFELF OURDIR OURELF" ;;
+esac
+[[ -d $refdir ]] || die "$refdir is not a build directory"
+[[ -d $ourdir ]] || die "$outdir is not a build directory"
+
+declare -r elf_input_files="${0%/*}"/elf_input_files.sh
+
+# Outputs the script that initialize an associative array with
+# given name that maps object keys to their paths inside the tree.
+# Ignore prebuilts and .so files.
+# Normalize library names as in Bazel they sometimes start with
+# `liblib` instead of `lib` and may end with `_bp2build_library_static`
+# It's a rather ugly sed script.
+# Anyways, the output script looks like this:
+# declare -A <name>=(
+# ["libfoo.a(abc.o)"]="<path>/libfoo(abc.o)"
+# ....
+# )
+function objects_map() {
+ local -r name=$1 out_dir=$2 prefix="${3:-}"
+ grep -v -e '^prebuilts/' -e '\.so$' | sed -nr \
+ -e "1ideclare -A $name=(" \
+ -e "s|^|$prefix|" \
+ -e "s|^out/|$out_dir/|" \
+ -e '/_bp2build_cc_library_static\.a/s|(.*)/(lib)?(lib[^/]*)(_bp2build_cc_library_static\.a)\((.+)\)$|["\3.a(\5)"]="\1/\2\3\4(\5)"|p' \
+ -e '/_bp2build_cc_library_static\.a/!s|(.*)/(lib)?(lib[^/]*)\((.+)\)$|["\3(\4)"]="\1/\2\3(\4)"|p' \
+ -e 's|(.*)/([^/]*\.s?o)$|["\2"]="\1/\2"|p' \
+ -e '$i)'
+}
+
+declare -r reffiles=$(mktemp --suffix=.ref) ourfiles=$(mktemp --suffix=.our)
+declare -r comparator=$(mktemp /tmp/elfdiff.XXXXXX)
+trap 'rm -f $ourfiles $reffiles $comparator' EXIT
+
+# Initialize `ref_objects` to be objects map for ref build
+"$elf_input_files" $refelf $refdir >$reffiles || exit 1
+. <(objects_map ref_objects $refdir <$reffiles )
+
+# Initialize `our_objects` to be objects map for our build
+"$elf_input_files" $ourelf $ourdir >$ourfiles || exit 1
+declare -r bazel_prefix=out/bazel/output/execroot/__main__/
+. <(objects_map our_objects $ourdir $bazel_prefix <$ourfiles )
+
+# Minor re-keying fo `our_objects` (e.g., Soong's `main.o` is
+# Bazel's libadbd__internal_root.lo(main.o)
+declare -Ar equivalences=(
+ ["libadbd__internal_root.lo(main.o)"]="main.o"
+ ["libadbd__internal_root.lo(libbuildversion.o)"]="libbuildversion.a(libbuildversion.o)"
+ ["crtend.o"]="crtend_android.o")
+for k in "${!equivalences[@]}"; do
+ if [[ -v "our_objects[$k]" ]]; then
+ our_objects["${equivalences[$k]}"]="${our_objects[$k]}"
+ unset "our_objects[$k]"
+ fi
+done
+
+declare -a missing extra common
+# Compare the keys from `ref_objects` and `our_objects` and output the script
+# to initialize `missing`, `extra` and `common` arrays to resp. only in
+# `ref_objects`, only in `sour_objects`, and common
+function classify() {
+ comm <(printf "%s\n" "${!ref_objects[@]}" | sort) <(printf "%s\n" "${!our_objects[@]}" | sort) \
+ | sed -nr '/^\t\t/{s|^\t\t(.*)|common+=("\1")|p;d};/^\t/{s|^\t(.*)|extra+=("\1")|p;d};s|(.*)|missing+=("\1")|p'
+}
+
+. <(classify)
+if [[ -v missing ]]; then
+ printf "The following input object files are missing:\n"
+ for o in "${missing[@]}"; do
+ printf " %s\n" "${ref_objects[$o]}"
+ done
+fi
+
+if [[ -v extra ]]; then
+ printf "The following input object files are extra:\n"
+ for o in "${extra[@]}"; do
+ printf " %s\n" "${our_objects[$o]}"
+ done
+fi
+
+# Build the ELF files comparator, it is Go binary.
+declare -r elfdiff=android/bazel/mkcompare/elfdiff/...
+GOWORK=$PWD/build/bazel/mkcompare/go.work go build -o $comparator $elfdiff || exit 1
+
+# Output ELF file pairs to compare and feed them the parallel executor.
+for o in "${common[@]}"; do echo "${ref_objects[$o]} ${our_objects[$o]}"; done |\
+ parallel --colsep ' ' $comparator {1} {2}
diff --git a/scripts/elf_input_files.sh b/scripts/elf_input_files.sh
new file mode 100755
index 00000000..7cfd8151
--- /dev/null
+++ b/scripts/elf_input_files.sh
@@ -0,0 +1,68 @@
+#! /bin/bash -eu
+
+# This script prints linker trace for a given ELF file.
+# It extracts the command that has built this ELF file from the
+# build log (verbose.log.gz file in the given output directory),
+# appends `-t` linker option to it in order to print its source
+# files and runs this command
+# This script can be used when we want to compare ELF executables
+# built by two different configurations (e.g., conventional and mixed
+# builds). In this case, we run the build for one configuration, then
+# rename `out` directory to something else, and then run the build
+# for another configuration. To accommodate this scenario, an optional
+# second argument specifies the renamed output directory. The linker
+# command then runs inside nsjail that maps the renamed output
+# directory to `out`.
+
+function die() { format=$1; shift; printf >&2 "$format\n" $@; exit 1; }
+function usage() {
+ die "usage: ${0##*/} [-v] ELF [DIR]"
+}
+
+# Delouse
+declare show_command=
+while getopts "v" opt; do
+ case $opt in
+ v) show_command=t ;;
+ *) usage ;;
+ esac
+done
+shift $(($OPTIND-1))
+(($# >= 1)) || usage
+
+declare -r elf="$1"; shift
+declare -r outdir="${1:-out}"
+[[ -d "$outdir" ]] || die "$outdir does not exist"
+[[ -f "$outdir/verbose.log.gz" ]] || \
+ die "$outdir does not contain Android build (verbose.log.gz is missing)"
+
+function zgrep_command() {
+ zgrep -e "bin/clang\+\+.* -o [^ ]*$elf " $outdir/verbose.log.gz
+}
+
+# Locate the command that builds this ELF file and write it to
+# the temporary file editing it on the way:
+# * remove step number (`[nn/NN]`) prefix
+# * linker should write to the bit bucket
+# * add `-Wl,-t` (linker's `-t` option)
+cmdfile=$(mktemp); trap 'rm -f $cmdfile' EXIT
+zgrep_command |\
+ sed -r 's| -o ([^ ]+) | -Wl,-t -o /dev/null |;s|^\[.*\]||' > $cmdfile
+[[ -z "${show_command}" ]] || cat $cmdfile >&2
+[[ -s $cmdfile ]] || die "no ELF file ending with $elf was built in $outdir"
+(($(wc -l $cmdfile | cut -f1 -d ' ') == 1)) || \
+ { printf >&2 "Multiple elf files ending with $elf were built in $outdir:\n";
+ die " %s" $(zgrep_command | sed -r 's|.* -o ([^ ]+) .*|\1|'); }
+
+# Run the linker (i.e., the command we have written into $cmdfile). Its output
+# is the list of the object files it read. If output directory has been renamed,
+# run it inside `nsjail`, mapping output directory ot `out/`
+if [[ "$outdir" == out ]]; then
+ /bin/bash $cmdfile
+else
+ prebuilts/build-tools/linux-x86/bin/nsjail \
+ -Mo -q -e -t 0 -B / -B /tmp -B $(realpath $outdir):$PWD/out \
+ --cwd $PWD --skip_setsid --keep_caps --disable_clone_newcgroup --disable_clone_newnet \
+ --rlimit_as soft --rlimit_core soft --rlimit_cpu soft --rlimit_fsize soft --rlimit_nofile soft \
+ --proc_rw --hostname "$(hostname)" -- /bin/bash $cmdfile
+fi
diff --git a/scripts/gen_build_number.sh b/scripts/gen_build_number.sh
index 80085acd..7a4dea11 100755
--- a/scripts/gen_build_number.sh
+++ b/scripts/gen_build_number.sh
@@ -17,17 +17,23 @@
# Must execute at the root of workspace.
# https://docs.bazel.build/versions/main/command-line-reference.html#flag--workspace_status_command
+# Common script utilities
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../make/shell_utils.sh
+require_top
+
+
if [[ ! -f "WORKSPACE" ]]; then
echo "ERROR: gen_build_number.sh must be executed at the root of Bazel workspace." >&2
exit 1
fi
+
# TODO(b/228463719): figure out how to get the path properly.
-BUILD_NUMBER_FILE=out/soong/build_number.txt
+BUILD_NUMBER_FILE=$(getoutdir)/soong/build_number.txt
if [[ -f ${BUILD_NUMBER_FILE} ]]; then
BUILD_NUMBER=$(cat ${BUILD_NUMBER_FILE})
else
BUILD_NUMBER=eng.${USER:0:6}.$(date '+%Y%m%d.%H%M%S')
fi
-echo "BUILD_NUMBER ${BUILD_NUMBER}" \ No newline at end of file
+echo "BUILD_NUMBER ${BUILD_NUMBER}"
diff --git a/scripts/incremental_build/BUILD.bazel b/scripts/incremental_build/BUILD.bazel
new file mode 100644
index 00000000..fec95c09
--- /dev/null
+++ b/scripts/incremental_build/BUILD.bazel
@@ -0,0 +1,52 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+py_binary(
+ name = "incremental_build",
+ srcs = [
+ "cuj_catalog.py",
+ "incremental_build.py",
+ "pretty.py",
+ "ui.py",
+ ],
+ main = "incremental_build.py",
+ python_version = "PY3",
+ deps = [":perf_metrics"],
+)
+
+py_library(
+ name = "util",
+ srcs = ["util.py"],
+)
+
+py_test(
+ name = "util_test",
+ srcs = ["util_test.py"],
+ deps = [":util"],
+)
+
+py_library(
+ name = "perf_metrics",
+ srcs = ["perf_metrics.py"],
+ deps = [
+ ":util",
+ "//build/soong/ui/metrics:metrics-py-proto",
+ ],
+)
+
+py_test(
+ name = "perf_metrics_test",
+ srcs = ["perf_metrics_test.py"],
+ deps = [":perf_metrics"],
+)
diff --git a/scripts/incremental_build/README.md b/scripts/incremental_build/README.md
new file mode 100644
index 00000000..9a54314b
--- /dev/null
+++ b/scripts/incremental_build/README.md
@@ -0,0 +1,16 @@
+# How to Use
+
+The most basic invocation, e.g. `incremental_build.sh -- libc`, is logically
+equivalent to
+
+1. running `m --skip-soong-tests libc` and then
+2. parsing `$OUTDIR/soong_metrics` and `$OUTDIR/bp2build_metrics.pb` files
+3. Adding timing-related metrics from those files
+ into `out/timing_logs/metrics.csv`
+4. repeat 1-3 for each CUJ
+
+CUJs are defined in `cuj_catalog.py`
+Each row in `metrics.csv` has the timings of various "phases" of a build.
+
+Try `incremental_build.sh --help` and `canoncial_perf.sh --help` for help on
+usage.
diff --git a/scripts/incremental_build/canonical_perf.sh b/scripts/incremental_build/canonical_perf.sh
new file mode 100755
index 00000000..97e47d43
--- /dev/null
+++ b/scripts/incremental_build/canonical_perf.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# Gather and print top-line performance metrics for the android build
+#
+readonly TOP="$(realpath "$(dirname "$0")/../../../..")"
+
+usage() {
+ cat <<EOF
+usage: $0 [-l LOG_DIR] [BUILD_TYPES]
+ -l LOG_DIR should be outside of source tree, including not in out/,
+ because the whole tree will be cleaned during testing.
+example:
+ $0 soong prod
+EOF
+ exit 1
+}
+
+declare -a build_types
+while getopts "l:" opt; do
+ case "$opt" in
+ l) log_dir=$OPTARG ;;
+ ?) usage ;;
+ esac
+done
+shift $((OPTIND - 1))
+readonly -a build_types=("$@")
+
+log_dir=${log_dir:-"$TOP/../timing-$(date +%b%d-%H%M)"}
+
+function build() {
+ date
+ set -x
+ if ! "$TOP/build/bazel/scripts/incremental_build/incremental_build.sh" \
+ --ignore-repo-diff --log-dir "$log_dir" \
+ ${build_types:+--build-types "${build_types[@]}"} \
+ "$@"; then
+ echo "See logs for errors"
+ exit 1
+ fi
+ set +x
+}
+build --cujs clean 'create bionic/unreferenced.txt' 'modify Android.bp' -- droid
+build --cujs 'modify bionic/.*/stdio.cpp' --append-csv libc
+build --cujs 'modify .*/adb/daemon/main.cpp' --append-csv adbd
+build --cujs 'modify frameworks/.*/View.java' --append-csv framework
+
diff --git a/scripts/incremental_build/cuj_catalog.py b/scripts/incremental_build/cuj_catalog.py
new file mode 100644
index 00000000..7d2de238
--- /dev/null
+++ b/scripts/incremental_build/cuj_catalog.py
@@ -0,0 +1,494 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import dataclasses
+import enum
+import functools
+import io
+import logging
+import os
+import shutil
+import tempfile
+import textwrap
+import uuid
+from enum import Enum
+from pathlib import Path
+from typing import Callable, Optional
+from typing import Final
+from typing import TypeAlias
+
+import util
+import ui
+
+"""
+Provides some representative CUJs. If you wanted to manually run something but
+would like the metrics to be collated in the metrics.csv file, use
+`perf_metrics.py` as a stand-alone after your build.
+"""
+
+
+class BuildResult(Enum):
+ SUCCESS = enum.auto()
+ FAILED = enum.auto()
+ TEST_FAILURE = enum.auto()
+
+
+Action: TypeAlias = Callable[[], None]
+Verifier: TypeAlias = Callable[[], None]
+
+
+def skip_when_soong_only(func: Verifier) -> Verifier:
+ """A decorator for Verifiers that are not applicable to soong-only builds"""
+
+ def wrapper():
+ if InWorkspace.ws_counterpart(util.get_top_dir()).exists():
+ func()
+
+ return wrapper
+
+
+@skip_when_soong_only
+def verify_symlink_forest_has_only_symlink_leaves():
+ """Verifies that symlink forest has only symlinks or directories but no
+ files except for merged BUILD.bazel files"""
+
+ top_in_ws = InWorkspace.ws_counterpart(util.get_top_dir())
+
+ for root, dirs, files in os.walk(top_in_ws, topdown=True, followlinks=False):
+ for file in files:
+ if file == 'symlink_forest_version' and top_in_ws.samefile(root):
+ continue
+ f = Path(root).joinpath(file)
+ if file != 'BUILD.bazel' and not f.is_symlink():
+ raise AssertionError(f'{f} unexpected')
+
+ logging.info('VERIFIED Symlink Forest has no real files except BUILD.bazel')
+
+
+@dataclasses.dataclass(frozen=True)
+class CujStep:
+ verb: str
+ """a human-readable description"""
+ apply_change: Action
+ """user action(s) that are performed prior to a build attempt"""
+ verify: Verifier = verify_symlink_forest_has_only_symlink_leaves
+ """post-build assertions, i.e. tests.
+ Should raise `Exception` for failures.
+ """
+
+
+@dataclasses.dataclass(frozen=True)
+class CujGroup:
+ """A sequence of steps to be performed, such that at the end of all steps the
+ initial state of the source tree is attained.
+ NO attempt is made to achieve atomicity programmatically. It is left as the
+ responsibility of the user.
+ """
+ description: str
+ steps: list[CujStep]
+
+ def __str__(self) -> str:
+ if len(self.steps) < 2:
+ return f'{self.steps[0].verb} {self.description}'.strip()
+ return ' '.join(
+ [f'({chr(ord("a") + i)}) {step.verb} {self.description}'.strip() for
+ i, step in enumerate(self.steps)])
+
+
+Warmup: Final[CujGroup] = CujGroup('WARMUP',
+ [CujStep('no change', lambda: None)])
+
+
+class InWorkspace(Enum):
+ """For a given file in the source tree, the counterpart in the symlink forest
+ could be one of these kinds.
+ """
+ SYMLINK = enum.auto()
+ NOT_UNDER_SYMLINK = enum.auto()
+ UNDER_SYMLINK = enum.auto()
+ OMISSION = enum.auto()
+
+ @staticmethod
+ def ws_counterpart(src_path: Path) -> Path:
+ return util.get_out_dir().joinpath('soong/workspace').joinpath(
+ de_src(src_path))
+
+ def verifier(self, src_path: Path) -> Verifier:
+ @skip_when_soong_only
+ def f():
+ ws_path = InWorkspace.ws_counterpart(src_path)
+ actual: Optional[InWorkspace] = None
+ if ws_path.is_symlink():
+ actual = InWorkspace.SYMLINK
+ if not ws_path.exists():
+ logging.warning('Dangling symlink %s', ws_path)
+ elif not ws_path.exists():
+ actual = InWorkspace.OMISSION
+ else:
+ for p in ws_path.parents:
+ if not p.is_relative_to(util.get_out_dir()):
+ actual = InWorkspace.NOT_UNDER_SYMLINK
+ break
+ if p.is_symlink():
+ actual = InWorkspace.UNDER_SYMLINK
+ break
+
+ if self != actual:
+ raise AssertionError(
+ f'{ws_path} expected {self.name} but got {actual.name}')
+ logging.info(f'VERIFIED {de_src(ws_path)} {self.name}')
+
+ return f
+
+
+def de_src(p: Path) -> str:
+ return str(p.relative_to(util.get_top_dir()))
+
+
+def src(p: str) -> Path:
+ return util.get_top_dir().joinpath(p)
+
+
+def modify_revert(file: Path, text: str = '//BOGUS line\n') -> CujGroup:
+ """
+ :param file: the file to be modified and reverted
+ :param text: the text to be appended to the file to modify it
+ :return: A pair of CujSteps, where the first modifies the file and the
+ second reverts the modification
+ """
+ if not file.exists():
+ raise RuntimeError(f'{file} does not exist')
+
+ def add_line():
+ with open(file, mode="a") as f:
+ f.write(text)
+
+ def revert():
+ with open(file, mode="rb+") as f:
+ # assume UTF-8
+ f.seek(-len(text), io.SEEK_END)
+ f.truncate()
+
+ return CujGroup(de_src(file), [
+ CujStep('modify', add_line),
+ CujStep('revert', revert)
+ ])
+
+
+def create_delete(file: Path, ws: InWorkspace,
+ text: str = '//Test File: safe to delete\n') -> CujGroup:
+ """
+ :param file: the file to be created and deleted
+ :param ws: the expectation for the counterpart file in symlink
+ forest (aka the synthetic bazel workspace) when its created
+ :param text: the content of the file
+ :return: A pair of CujSteps, where the fist creates the file and the
+ second deletes it
+ """
+ missing_dirs = [f for f in file.parents if not f.exists()]
+ shallowest_missing_dir = missing_dirs[-1] if len(missing_dirs) else None
+
+ def create():
+ if file.exists():
+ raise RuntimeError(
+ f'File {file} already exists. Interrupted an earlier run?\n'
+ 'TIP: `repo status` and revert changes!!!')
+ file.parent.mkdir(parents=True, exist_ok=True)
+ file.touch(exist_ok=False)
+ with open(file, mode="w") as f:
+ f.write(text)
+
+ def delete():
+ if shallowest_missing_dir:
+ shutil.rmtree(shallowest_missing_dir)
+ else:
+ file.unlink(missing_ok=False)
+
+ return CujGroup(de_src(file), [
+ CujStep('create', create, ws.verifier(file)),
+ CujStep('delete', delete, InWorkspace.OMISSION.verifier(file)),
+ ])
+
+
+def create_delete_bp(bp_file: Path) -> CujGroup:
+ """
+ This is basically the same as "create_delete" but with canned content for
+ an Android.bp file.
+ """
+ return create_delete(
+ bp_file, InWorkspace.SYMLINK,
+ 'filegroup { name: "test-bogus-filegroup", srcs: ["**/*.md"] }')
+
+
+def delete_restore(original: Path, ws: InWorkspace) -> CujGroup:
+ """
+ :param original: The file to be deleted then restored
+ :param ws: When restored, expectation for the file's counterpart in the
+ symlink forest (aka synthetic bazel workspace)
+ :return: A pair of CujSteps, where the first deletes a file and the second
+ restores it
+ """
+ tempdir = Path(tempfile.gettempdir())
+ if tempdir.is_relative_to(util.get_top_dir()):
+ raise SystemExit(f'Temp dir {tempdir} is under source tree')
+ if tempdir.is_relative_to(util.get_out_dir()):
+ raise SystemExit(f'Temp dir {tempdir} is under '
+ f'OUT dir {util.get_out_dir()}')
+ copied = tempdir.joinpath(f'{original.name}-{uuid.uuid4()}.bak')
+
+ def move_to_tempdir_to_mimic_deletion():
+ logging.warning('MOVING %s TO %s', de_src(original), copied)
+ original.rename(copied)
+
+ return CujGroup(de_src(original), [
+ CujStep('delete',
+ move_to_tempdir_to_mimic_deletion,
+ InWorkspace.OMISSION.verifier(original)),
+ CujStep('restore',
+ lambda: copied.rename(original),
+ ws.verifier(original))
+ ])
+
+
+def replace_link_with_dir(p: Path):
+ """Create a file, replace it with a non-empty directory, delete it"""
+ cd = create_delete(p, InWorkspace.SYMLINK)
+ create_file: CujStep
+ delete_file: CujStep
+ create_file, delete_file, *tail = cd.steps
+ assert len(tail) == 0
+
+ # an Android.bp is always a symlink in the workspace and thus its parent
+ # will be a directory in the workspace
+ create_dir: CujStep
+ delete_dir: CujStep
+ create_dir, delete_dir, *tail = create_delete_bp(
+ p.joinpath('Android.bp')).steps
+ assert len(tail) == 0
+
+ def replace_it():
+ delete_file.apply_change()
+ create_dir.apply_change()
+
+ return CujGroup(cd.description, [
+ create_file,
+ CujStep(f'{de_src(p)}/Android.bp instead of',
+ replace_it,
+ create_dir.verify),
+ delete_dir
+ ])
+
+
+def _sequence(*vs: Verifier) -> Verifier:
+ def f():
+ for v in vs:
+ v()
+
+ return f
+
+
+def content_verfiers(
+ ws_build_file: Path, content: str) -> (Verifier, Verifier):
+ def search() -> bool:
+ with open(ws_build_file, "r") as f:
+ for line in f:
+ if line == content:
+ return True
+ return False
+
+ @skip_when_soong_only
+ def contains():
+ if not search():
+ raise AssertionError(
+ f'{de_src(ws_build_file)} expected to contain {content}')
+ logging.info(f'VERIFIED {de_src(ws_build_file)} contains {content}')
+
+ @skip_when_soong_only
+ def does_not_contain():
+ if search():
+ raise AssertionError(
+ f'{de_src(ws_build_file)} not expected to contain {content}')
+ logging.info(f'VERIFIED {de_src(ws_build_file)} does not contain {content}')
+
+ return contains, does_not_contain
+
+
+def modify_revert_kept_build_file(build_file: Path) -> CujGroup:
+ content = f'//BOGUS {uuid.uuid4()}\n'
+ step1, step2, *tail = modify_revert(build_file, content).steps
+ assert len(tail) == 0
+ ws_build_file = InWorkspace.ws_counterpart(build_file).with_name(
+ 'BUILD.bazel')
+ merge_prover, merge_disprover = content_verfiers(ws_build_file, content)
+ return CujGroup(de_src(build_file), [
+ CujStep(step1.verb,
+ step1.apply_change,
+ _sequence(step1.verify, merge_prover)),
+ CujStep(step2.verb,
+ step2.apply_change,
+ _sequence(step2.verify, merge_disprover))
+ ])
+
+
+def create_delete_kept_build_file(build_file: Path) -> CujGroup:
+ content = f'//BOGUS {uuid.uuid4()}\n'
+ ws_build_file = InWorkspace.ws_counterpart(build_file).with_name(
+ 'BUILD.bazel')
+ if build_file.name == 'BUILD.bazel':
+ ws = InWorkspace.NOT_UNDER_SYMLINK
+ elif build_file.name == 'BUILD':
+ ws = InWorkspace.SYMLINK
+ else:
+ raise RuntimeError(f'Illegal name for a build file {build_file}')
+
+ merge_prover, merge_disprover = content_verfiers(ws_build_file, content)
+
+ step1: CujStep
+ step2: CujStep
+ step1, step2, *tail = create_delete(build_file, ws, content).steps
+ assert len(tail) == 0
+ return CujGroup(de_src(build_file), [
+ CujStep(step1.verb,
+ step1.apply_change,
+ _sequence(step1.verify, merge_prover)),
+ CujStep(step2.verb,
+ step2.apply_change,
+ _sequence(step2.verify, merge_disprover))
+ ])
+
+
+def create_delete_unkept_build_file(build_file) -> CujGroup:
+ content = f'//BOGUS {uuid.uuid4()}\n'
+ ws_build_file = InWorkspace.ws_counterpart(build_file).with_name(
+ 'BUILD.bazel')
+ step1: CujStep
+ step2: CujStep
+ step1, step2, *tail = create_delete(
+ build_file, InWorkspace.SYMLINK, content).steps
+ assert len(tail) == 0
+ _, merge_disprover = content_verfiers(ws_build_file, content)
+ return CujGroup(de_src(build_file), [
+ CujStep(step1.verb,
+ step1.apply_change,
+ _sequence(step1.verify, merge_disprover)),
+ CujStep(step2.verb,
+ step2.apply_change,
+ _sequence(step2.verify, merge_disprover))
+ ])
+
+
+NON_LEAF = '*/*'
+"""If `a/*/*` is a valid path `a` is not a leaf directory"""
+LEAF = '!*/*'
+"""If `a/*/*` is not a valid path `a` is a leaf directory, i.e. has no other
+non-empty sub-directories"""
+PKG = ['Android.bp', '!BUILD', '!BUILD.bazel']
+"""limiting the candidate to Android.bp file with no sibling bazel files"""
+PKG_FREE = ['!**/Android.bp', '!**/BUILD', '!**/BUILD.bazel']
+"""no Android.bp or BUILD or BUILD.bazel file anywhere"""
+
+
+def _kept_build_cujs() -> list[CujGroup]:
+ # Bp2BuildKeepExistingBuildFile(build/bazel) is True(recursive)
+ kept = src('build/bazel')
+ pkg = util.any_dir_under(kept, *PKG)
+ examples = [pkg.joinpath('BUILD'),
+ pkg.joinpath('BUILD.bazel')]
+
+ return [
+ *[create_delete_kept_build_file(build_file) for build_file in examples],
+ create_delete(pkg.joinpath('BUILD/kept-dir'), InWorkspace.SYMLINK),
+ modify_revert_kept_build_file(util.any_file_under(kept, 'BUILD'))]
+
+
+def _unkept_build_cujs() -> list[CujGroup]:
+ # Bp2BuildKeepExistingBuildFile(bionic) is False(recursive)
+ unkept = src('bionic')
+ pkg = util.any_dir_under(unkept, *PKG)
+ return [
+ *[create_delete_unkept_build_file(build_file) for build_file in [
+ pkg.joinpath('BUILD'),
+ pkg.joinpath('BUILD.bazel'),
+ ]],
+ *[create_delete(build_file, InWorkspace.OMISSION) for build_file in [
+ unkept.joinpath('bogus-unkept/BUILD'),
+ unkept.joinpath('bogus-unkept/BUILD.bazel'),
+ ]],
+ create_delete(pkg.joinpath('BUILD/unkept-dir'), InWorkspace.SYMLINK)
+ ]
+
+
+@functools.cache
+def get_cujgroups() -> list[CujGroup]:
+ # we are choosing "package" directories that have Android.bp but
+ # not BUILD nor BUILD.bazel because
+ # we can't tell if ShouldKeepExistingBuildFile would be True or not
+ pkg, p_why = util.any_match(NON_LEAF, *PKG)
+ pkg_free, f_why = util.any_match(NON_LEAF, *PKG_FREE)
+ leaf_pkg_free, _ = util.any_match(LEAF, *PKG_FREE)
+ ancestor, a_why = util.any_match('!Android.bp', '!BUILD', '!BUILD.bazel',
+ '**/Android.bp')
+ logging.info(textwrap.dedent(f'''Choosing:
+ package: {de_src(pkg)} has {p_why}
+ package ancestor: {de_src(ancestor)} has {a_why} but no direct Android.bp
+ package free: {de_src(pkg_free)} has {f_why} but no Android.bp anywhere
+ leaf package free: {de_src(leaf_pkg_free)} has neither Android.bp nor sub-dirs
+ '''))
+
+ android_bp_cujs = [
+ modify_revert(src('Android.bp')),
+ *[create_delete_bp(d.joinpath('Android.bp')) for d in
+ [ancestor, pkg_free, leaf_pkg_free]]
+ ]
+ mixed_build_launch_cujs = [
+ modify_revert(src('bionic/libc/tzcode/asctime.c')),
+ modify_revert(src('bionic/libc/stdio/stdio.cpp')),
+ modify_revert(src('packages/modules/adb/daemon/main.cpp')),
+ modify_revert(src('frameworks/base/core/java/android/view/View.java')),
+ ]
+ unreferenced_file_cujs = [
+ *[create_delete(d.joinpath('unreferenced.txt'), InWorkspace.SYMLINK) for
+ d in [ancestor, pkg]],
+ *[create_delete(d.joinpath('unreferenced.txt'), InWorkspace.UNDER_SYMLINK)
+ for d
+ in [pkg_free, leaf_pkg_free]]
+ ]
+
+ def clean():
+ if ui.get_user_input().log_dir.is_relative_to(util.get_top_dir()):
+ raise AssertionError(
+ f'specify a different LOG_DIR: {ui.get_user_input().log_dir}')
+ if util.get_out_dir().exists():
+ shutil.rmtree(util.get_out_dir())
+
+ return [
+ CujGroup('', [CujStep('clean', clean)]),
+ Warmup,
+
+ create_delete(src('bionic/libc/tzcode/globbed.c'),
+ InWorkspace.UNDER_SYMLINK),
+
+ # TODO (usta): find targets that should be affected
+ *[delete_restore(f, InWorkspace.SYMLINK) for f in [
+ util.any_file('version_script.txt'),
+ util.any_file('AndroidManifest.xml')]],
+
+ *unreferenced_file_cujs,
+ *mixed_build_launch_cujs,
+ *android_bp_cujs,
+ *_unkept_build_cujs(),
+ *_kept_build_cujs(),
+ replace_link_with_dir(pkg.joinpath('bogus.txt')),
+ # TODO(usta): add a dangling symlink
+ ]
diff --git a/scripts/incremental_build/incremental_build.py b/scripts/incremental_build/incremental_build.py
new file mode 100644
index 00000000..b43e0fb1
--- /dev/null
+++ b/scripts/incremental_build/incremental_build.py
@@ -0,0 +1,247 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A tool for running builds (soong or b) and measuring the time taken.
+"""
+import datetime
+import functools
+import hashlib
+import logging
+import os
+import subprocess
+import sys
+import textwrap
+import time
+from pathlib import Path
+from typing import Final
+from typing import Mapping
+
+import cuj_catalog
+import perf_metrics
+import pretty
+import ui
+import util
+
+MAX_RUN_COUNT: int = 5
+
+
+@functools.cache
+def _prepare_env() -> (Mapping[str, str], str):
+ def get_soong_build_ninja_args():
+ ninja_args = os.environ.get('NINJA_ARGS') or ''
+ if ninja_args != '':
+ ninja_args += ' '
+ ninja_args += '-d explain --quiet'
+ if util.is_ninja_dry_run(ninja_args):
+ global MAX_RUN_COUNT
+ MAX_RUN_COUNT = 1
+ logging.warning(f'Running dry ninja runs NINJA_ARGS={ninja_args}')
+ return ninja_args
+
+ def get_soong_ui_ninja_args():
+ soong_ui_ninja_args = os.environ.get('SOONG_UI_NINJA_ARGS') or ''
+ if util.is_ninja_dry_run(soong_ui_ninja_args):
+ sys.exit('"-n" in SOONG_UI_NINJA_ARGS would not update build.ninja etc')
+
+ if soong_ui_ninja_args != '':
+ soong_ui_ninja_args += ' '
+ soong_ui_ninja_args += '-d explain --quiet'
+ return soong_ui_ninja_args
+
+ overrides: Mapping[str, str] = {
+ 'NINJA_ARGS': get_soong_build_ninja_args(),
+ 'SOONG_UI_NINJA_ARGS': get_soong_ui_ninja_args()
+ }
+ env = {**os.environ, **overrides}
+ # TODO: Switch to oriole when it works
+ default_product: Final[str] = 'cf_x86_64_phone' \
+ if util.get_top_dir().joinpath('vendor/google/build').exists() \
+ else 'aosp_cf_x86_64_phone'
+ target_product = os.environ.get('TARGET_PRODUCT') or default_product
+ variant = os.environ.get('TARGET_BUILD_VARIANT') or 'eng'
+
+ if target_product != default_product or variant != 'eng':
+ if util.is_interactive_shell():
+ response = input(f'Are you sure you want {target_product}-{variant} '
+ f'and not {default_product}-eng? [Y/n]')
+ if response.upper() != 'Y':
+ sys.exit(1)
+ else:
+ logging.warning(
+ f'Using {target_product}-{variant} instead of {default_product}-eng')
+ env['TARGET_PRODUCT'] = target_product
+ env['TARGET_BUILD_VARIANT'] = variant
+ pretty_env_str = [f'{k}={v}' for (k, v) in env.items()]
+ pretty_env_str.sort()
+ return env, '\n'.join(pretty_env_str)
+
+
+def _build_file_sha() -> str:
+ build_file = util.get_out_dir().joinpath('soong/build.ninja')
+ if not build_file.exists():
+ return '--'
+ with open(build_file, mode="rb") as f:
+ h = hashlib.sha256()
+ for block in iter(lambda: f.read(4096), b''):
+ h.update(block)
+ return h.hexdigest()[0:8]
+
+
+def _build_file_size() -> int:
+ build_file = util.get_out_dir().joinpath('soong/build.ninja')
+ return os.path.getsize(build_file) if build_file.exists() else 0
+
+
+BuildInfo = dict[str, any]
+
+
+def _build(build_type: ui.BuildType, run_dir: Path) -> (int, BuildInfo):
+ logfile = run_dir.joinpath('output.txt')
+ logging.info('TIP: to see the log:\n tail -f "%s"', logfile)
+ cmd = [*build_type.value, *ui.get_user_input().targets]
+ logging.info('Command: %s', cmd)
+ env, env_str = _prepare_env()
+ ninja_log_file = util.get_out_dir().joinpath('.ninja_log')
+
+ def get_action_count() -> int:
+ if not ninja_log_file.exists():
+ return 0
+ with open(ninja_log_file, 'r') as ninja_log:
+ # subtracting 1 to account for "# ninja log v5" in the first line
+ return sum(1 for _ in ninja_log) - 1
+
+ def recompact_ninja_log():
+ subprocess.run([
+ util.get_top_dir().joinpath(
+ 'prebuilts/build-tools/linux-x86/bin/ninja'),
+ '-f',
+ util.get_out_dir().joinpath(
+ f'combined-{env.get("TARGET_PRODUCT", "aosp_arm")}.ninja'),
+ '-t', 'recompact'],
+ check=False, cwd=util.get_top_dir(), shell=False,
+ stdout=f, stderr=f)
+
+ with open(logfile, mode='w') as f:
+ action_count_before = get_action_count()
+ if action_count_before > 0:
+ recompact_ninja_log()
+ action_count_before = get_action_count()
+ f.write(f'Command: {cmd}\n')
+ f.write(f'Environment Variables:\n{textwrap.indent(env_str, " ")}\n\n\n')
+ f.flush()
+ start_ns = time.perf_counter_ns()
+ p = subprocess.run(cmd, check=False, cwd=util.get_top_dir(), env=env,
+ shell=False, stdout=f, stderr=f)
+ elapsed_ns = time.perf_counter_ns() - start_ns
+ action_count_after = get_action_count()
+
+ return (p.returncode, {
+ 'build_type': build_type.to_flag(),
+ 'build.ninja': _build_file_sha(),
+ 'build.ninja.size': _build_file_size(),
+ 'targets': ' '.join(ui.get_user_input().targets),
+ 'log': str(run_dir.relative_to(ui.get_user_input().log_dir)),
+ 'ninja_explains': util.count_explanations(logfile),
+ 'actions': action_count_after - action_count_before,
+ 'time': util.hhmmss(datetime.timedelta(microseconds=elapsed_ns / 1000))
+ })
+
+
+def _run_cuj(run_dir: Path, build_type: ui.BuildType,
+ cujstep: cuj_catalog.CujStep, desc: str, run) -> BuildInfo:
+ run_dir.mkdir(parents=True, exist_ok=False)
+ (exit_code, build_info) = _build(build_type, run_dir)
+ # if build was successful, run test
+ if exit_code != 0:
+ build_result = cuj_catalog.BuildResult.FAILED.name
+ else:
+ try:
+ cujstep.verify()
+ build_result = cuj_catalog.BuildResult.SUCCESS.name
+ except Exception as e:
+ logging.error(e)
+ build_result = (cuj_catalog.BuildResult.TEST_FAILURE.name +
+ ':' + str(e))
+ # summarize
+ log_desc = desc if run == 0 else f'rebuild-{run} after {desc}'
+ build_info = {
+ 'description': log_desc,
+ 'build_result': build_result
+ } | build_info
+ logging.info('%s after %s: %s',
+ build_info["build_result"], build_info["time"], log_desc)
+ return build_info
+
+
+def main():
+ """
+ Run provided target(s) under various CUJs and collect metrics.
+ In pseudocode:
+ time build <target> with m or b
+ collect metrics
+ for each cuj:
+ make relevant changes
+ time rebuild
+ collect metrics
+ revert those changes
+ time rebuild
+ collect metrics
+ """
+ user_input = ui.get_user_input()
+
+ logging.warning(textwrap.dedent('''
+ If you kill this process, make sure to revert unwanted changes.
+ TIP: If you have no local changes of interest you may
+ `repo forall -p -c git reset --hard` and
+ `repo forall -p -c git clean --force` and even
+ `m clean && rm -rf out`
+ '''))
+
+ run_dir_gen = util.next_path(user_input.log_dir.joinpath(util.RUN_DIR_PREFIX))
+
+ def run_cuj_group(cuj_group: cuj_catalog.CujGroup):
+ for cujstep in cuj_group.steps:
+ desc = cujstep.verb
+ desc = f'{desc} {cuj_group.description}'.strip()
+ desc = f'{desc} {user_input.description}'.strip()
+ logging.info('START %s %s [%s]', build_type.name,
+ ' '.join(user_input.targets), desc)
+ cujstep.apply_change()
+ for run in range(0, MAX_RUN_COUNT):
+ run_dir = next(run_dir_gen)
+ build_info = _run_cuj(run_dir, build_type, cujstep, desc, run)
+ perf_metrics.archive_run(run_dir, build_info)
+ if build_info['ninja_explains'] == 0:
+ break
+ logging.info(' DONE %s %s [%s]', build_type.name,
+ ' '.join(user_input.targets), desc)
+
+ for build_type in user_input.build_types:
+ # warm-up run reduces variations attributable to OS caches
+ run_cuj_group(cuj_catalog.Warmup)
+ for i in user_input.chosen_cujgroups:
+ run_cuj_group(cuj_catalog.get_cujgroups()[i])
+
+ perf_metrics.tabulate_metrics_csv(user_input.log_dir)
+ perf_metrics.display_tabulated_metrics(user_input.log_dir)
+ pretty.summarize_metrics(user_input.log_dir)
+ pretty.display_summarized_metrics(user_input.log_dir)
+
+
+if __name__ == '__main__':
+ logging.root.setLevel(logging.INFO)
+ main()
diff --git a/scripts/incremental_build/incremental_build.sh b/scripts/incremental_build/incremental_build.sh
new file mode 100755
index 00000000..491549d3
--- /dev/null
+++ b/scripts/incremental_build/incremental_build.sh
@@ -0,0 +1,12 @@
+#!/bin/bash -eu
+readonly TOP="$(realpath "$(dirname "$0")/../../../..")"
+"$TOP/build/soong/soong_ui.bash" \
+ --build-mode \
+ --all-modules \
+ --dir="$(pwd)" \
+ --skip-soong-tests \
+ bp2build
+
+ANDROID_BUILD_TOP=$TOP "$TOP/build/bazel/bin/bazel" run --config=bp2build --verbose_failures //build/bazel/scripts/incremental_build -- "$@"
+
+# Alternatively, we could use python_zip_file, https://github.com/bazelbuild/bazel/pull/9453
diff --git a/scripts/incremental_build/perf_metrics.py b/scripts/incremental_build/perf_metrics.py
new file mode 100644
index 00000000..64512e73
--- /dev/null
+++ b/scripts/incremental_build/perf_metrics.py
@@ -0,0 +1,249 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import dataclasses
+import datetime
+import glob
+import json
+import logging
+import re
+import shutil
+import subprocess
+import textwrap
+from pathlib import Path
+from typing import Iterable
+
+from bp2build_metrics_proto.bp2build_metrics_pb2 import Bp2BuildMetrics
+from metrics_proto.metrics_pb2 import MetricsBase
+from metrics_proto.metrics_pb2 import PerfInfo
+from metrics_proto.metrics_pb2 import SoongBuildMetrics
+
+import util
+
+
+@dataclasses.dataclass
+class PerfInfoOrEvent:
+ """
+ A duck-typed union of `soong_build_metrics.PerfInfo` and
+ `soong_build_bp2build_metrics.Event` protobuf message types
+ """
+ name: str
+ real_time: datetime.timedelta
+ start_time: datetime.datetime
+ description: str = '' # Bp2BuildMetrics#Event doesn't have description
+
+ def __post_init__(self):
+ if isinstance(self.real_time, int):
+ self.real_time = datetime.timedelta(microseconds=self.real_time / 1000)
+ if isinstance(self.start_time, int):
+ epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
+ self.start_time = epoch + datetime.timedelta(
+ microseconds=self.start_time / 1000)
+
+
+SOONG_PB = 'soong_metrics'
+SOONG_BUILD_PB = 'soong_build_metrics.pb'
+BP2BUILD_PB = 'bp2build_metrics.pb'
+
+
+def _copy_pbs_to(d: Path):
+ soong_pb = util.get_out_dir().joinpath(SOONG_PB)
+ soong_build_pb = util.get_out_dir().joinpath(SOONG_BUILD_PB)
+ bp2build_pb = util.get_out_dir().joinpath(BP2BUILD_PB)
+ if soong_pb.exists():
+ shutil.copy(soong_pb, d.joinpath(SOONG_PB))
+ if soong_build_pb.exists():
+ shutil.copy(soong_build_pb, d.joinpath(SOONG_BUILD_PB))
+ if bp2build_pb.exists():
+ shutil.copy(bp2build_pb, d.joinpath(BP2BUILD_PB))
+
+
+def archive_run(d: Path, build_info: dict[str, any]):
+ _copy_pbs_to(d)
+ with open(d.joinpath(util.BUILD_INFO_JSON), 'w') as f:
+ json.dump(build_info, f, indent=True)
+
+
+def read_pbs(d: Path) -> dict[str, str]:
+ """
+ Reads metrics data from pb files and archives the file by copying
+ them under the log_dir.
+ Soong_build event names may contain "mixed_build" event. To normalize the
+ event names between mixed builds and soong-only build, convert
+ `soong_build/soong_build.xyz` and `soong_build/soong_build.mixed_build.xyz`
+ both to simply `soong_build/*.xyz`
+ """
+ soong_pb = d.joinpath(SOONG_PB)
+ soong_build_pb = d.joinpath(SOONG_BUILD_PB)
+ bp2build_pb = d.joinpath(BP2BUILD_PB)
+
+ events: list[PerfInfoOrEvent] = []
+
+ def extract_perf_info(root_obj):
+ for field_name in dir(root_obj):
+ if field_name.startswith('__'):
+ continue
+ field_value = getattr(root_obj, field_name)
+ if isinstance(field_value, Iterable):
+ for item in field_value:
+ if not isinstance(item, PerfInfo):
+ break
+ events.append(
+ PerfInfoOrEvent(item.name, item.real_time, item.start_time,
+ item.description))
+
+ if soong_pb.exists():
+ metrics_base = MetricsBase()
+ with open(soong_pb, "rb") as f:
+ metrics_base.ParseFromString(f.read())
+ extract_perf_info(metrics_base)
+
+ if soong_build_pb.exists():
+ soong_build_metrics = SoongBuildMetrics()
+ with open(soong_build_pb, "rb") as f:
+ soong_build_metrics.ParseFromString(f.read())
+ extract_perf_info(soong_build_metrics)
+
+ if bp2build_pb.exists():
+ bp2build_metrics = Bp2BuildMetrics()
+ with open(bp2build_pb, "rb") as f:
+ bp2build_metrics.ParseFromString(f.read())
+ for event in bp2build_metrics.events:
+ events.append(
+ PerfInfoOrEvent(event.name, event.real_time, event.start_time, ''))
+
+ events.sort(key=lambda e: e.start_time)
+
+ def normalize(desc: str) -> str:
+ return re.sub(r'^(?:soong_build|mixed_build)', '*', desc)
+
+ return {f'{m.name}/{normalize(m.description)}': util.hhmmss(m.real_time) for m
+ in events}
+
+
+Row = dict[str, any]
+
+
+def _get_column_headers(rows: list[Row], allow_cycles: bool) -> list[str]:
+ """
+ Basically a topological sort or column headers. For each Row, the column order
+ can be thought of as a partial view of a chain of events in chronological
+ order. It's a partial view because not all events may have needed to occur for
+ a build.
+ """
+
+ @dataclasses.dataclass
+ class Column:
+ header: str
+ indegree: int
+ nexts: set[str]
+
+ def __str__(self):
+ return f'#{self.indegree}->{self.header}->{self.nexts}'
+
+ def dfs(self, target: str, visited: set[str] = None) -> list[str]:
+ if not visited:
+ visited = set()
+ if target == self.header and self.header in visited:
+ return [self.header]
+ for n in self.nexts:
+ if n in visited:
+ continue
+ visited.add(n)
+ next_col = all_cols[n]
+ path = next_col.dfs(target, visited)
+ if path:
+ return [self.header, *path]
+ return []
+
+ all_cols: dict[str, Column] = {}
+ for row in rows:
+ prev_col = None
+ for col in row:
+ if col not in all_cols:
+ column = Column(col, 0, set())
+ all_cols[col] = column
+ if prev_col is not None and col not in prev_col.nexts:
+ all_cols[col].indegree += 1
+ prev_col.nexts.add(col)
+ prev_col = all_cols[col]
+
+ acc = []
+ entries = [c for c in all_cols.values()]
+ while len(entries) > 0:
+ # sorting alphabetically to break ties for concurrent events
+ entries.sort(key=lambda c: c.header, reverse=True)
+ entries.sort(key=lambda c: c.indegree, reverse=True)
+ entry = entries.pop()
+ # take only one to maintain alphabetical sort
+ if entry.indegree != 0:
+ cycle = '->'.join(entry.dfs(entry.header))
+ s = f'event ordering has a cycle {cycle}'
+ logging.warning(s)
+ if not allow_cycles:
+ raise ValueError(s)
+ acc.append(entry.header)
+ for n in entry.nexts:
+ n = all_cols.get(n)
+ if n is not None:
+ n.indegree -= 1
+ else:
+ if not allow_cycles:
+ raise ValueError(f'unexpected error for: {n}')
+ return acc
+
+
+def get_build_info_and_perf(d: Path) -> dict[str, any]:
+ perf = read_pbs(d)
+ build_info_json = d.joinpath(util.BUILD_INFO_JSON)
+ if not build_info_json.exists():
+ return perf
+ with open(build_info_json, 'r') as f:
+ build_info = json.load(f)
+ return build_info | perf
+
+
+def tabulate_metrics_csv(log_dir: Path):
+ rows: list[dict[str, any]] = []
+ dirs = glob.glob(f'{util.RUN_DIR_PREFIX}*', root_dir=log_dir)
+ dirs.sort(key=lambda x: int(x[1 + len(util.RUN_DIR_PREFIX):]))
+ for d in dirs:
+ d = log_dir.joinpath(d)
+ row = get_build_info_and_perf(d)
+ rows.append(row)
+
+ headers: list[str] = _get_column_headers(rows, allow_cycles=True)
+
+ def row2line(r):
+ return ','.join([str(r.get(col) or '') for col in headers])
+
+ lines = [','.join(headers)]
+ lines.extend(row2line(r) for r in rows)
+
+ with open(log_dir.joinpath(util.METRICS_TABLE), mode='wt') as f:
+ f.writelines(f'{line}\n' for line in lines)
+
+
+def display_tabulated_metrics(log_dir: Path):
+ cmd_str = util.get_cmd_to_display_tabulated_metrics(log_dir)
+ output = subprocess.check_output(cmd_str, shell=True, text=True)
+ logging.info(textwrap.dedent(f'''
+ %s
+ TIPS:
+ 1 To view key metrics in metrics.csv:
+ %s
+ 2 To view column headers:
+ %s
+ '''), output, cmd_str, util.get_csv_columns_cmd(log_dir))
diff --git a/scripts/incremental_build/perf_metrics_test.py b/scripts/incremental_build/perf_metrics_test.py
new file mode 100644
index 00000000..119bdc10
--- /dev/null
+++ b/scripts/incremental_build/perf_metrics_test.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import dataclasses
+import unittest
+
+from perf_metrics import _get_column_headers
+
+
+def to_row(concatenated_keys: str) -> dict:
+ return {c: None for c in concatenated_keys}
+
+
+class PerfMetricsTest(unittest.TestCase):
+ """Tests utility functions. This is not Perf Test itself."""
+
+ def test_get_column_headers(self):
+
+ @dataclasses.dataclass
+ class Example:
+ # each string = concatenated keys of the row object
+ row_keysets: list[str]
+ # concatenated headers
+ expected_headers: str
+
+ examples: list[Example] = [
+ Example(['a'], 'a'),
+ Example(['ac', 'bd'], 'abcd'),
+ Example(['abe', 'cde'], 'abcde'),
+ Example(['ab', 'ba'], 'ab'),
+ Example(['abcde', 'edcba'], 'abcde'),
+ Example(['ac', 'abc'], 'abc')
+ ]
+ for e in examples:
+ rows = [to_row(kz) for kz in e.row_keysets]
+ expected_headers = [*e.expected_headers]
+ with self.subTest(rows=rows, expected_headers=expected_headers):
+ self.assertEqual(_get_column_headers(rows, allow_cycles=True),
+ expected_headers)
+
+ def test_cycles(self):
+ examples = [
+ (['ab', 'ba'], 'a->b->a'),
+ (['abcd', 'db'], 'b->c->d->b')
+ ]
+ for (e, cycle) in examples:
+ rows = [to_row(kz) for kz in e]
+ with self.subTest(rows=rows, cycle=cycle):
+ with self.assertRaisesRegex(ValueError,
+ f'event ordering has a cycle {cycle}'):
+ _get_column_headers(rows, allow_cycles=False)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/scripts/incremental_build/pretty.py b/scripts/incremental_build/pretty.py
new file mode 100644
index 00000000..ca7c6243
--- /dev/null
+++ b/scripts/incremental_build/pretty.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import csv
+import datetime
+import logging
+import re
+import statistics
+import subprocess
+import textwrap
+from pathlib import Path
+from typing import Callable
+
+from typing.io import TextIO
+
+import util
+
+
+def normalize_rebuild(line: dict) -> dict:
+ line['description'] = re.sub(r'^(rebuild)-[\d+](.*)$', '\\1\\2',
+ line['description'])
+ return line
+
+
+def groupby(xs: list[dict], keyfn: Callable[[dict], str]) -> dict[
+ str, list[dict]]:
+ grouped = {}
+ for x in xs:
+ k = keyfn(x)
+ grouped.setdefault(k, []).append(x)
+ return grouped
+
+
+def write_table(out: TextIO, rows: list[list[str]]):
+ for r in rows:
+ for c in r:
+ out.write(str(c) + ',')
+ out.write('\n')
+ return
+
+
+def _get_build_types(xs: list[dict]) -> list[str]:
+ build_types = []
+ for x in xs:
+ b = x["build_type"]
+ if b not in build_types:
+ build_types.append(b)
+ return build_types
+
+
+def summarize_metrics(log_dir: Path):
+ filename = log_dir if log_dir.is_file() else log_dir.joinpath(
+ util.METRICS_TABLE)
+ with open(filename) as f:
+ csv_lines = [normalize_rebuild(line) for line in csv.DictReader(f)]
+
+ lines: list[dict] = []
+ for line in csv_lines:
+ if line["build_result"] == "FAILED":
+ logging.warning(f"{line['description']} / {line['build_type']}")
+ else:
+ lines.append(line)
+
+ build_types = _get_build_types(lines)
+ headers = ["cuj", "targets"] + build_types
+ rows: list[list[str]] = [headers]
+
+ by_cuj = groupby(lines, lambda l: l["description"])
+ for (cuj, cuj_rows) in by_cuj.items():
+ for (targets, target_rows) in groupby(cuj_rows,
+ lambda l: l["targets"]).items():
+ row = [cuj, targets]
+ by_build_type = groupby(target_rows, lambda l: l["build_type"])
+ for build_type in build_types:
+ selected_lines = by_build_type.get(build_type)
+ if not selected_lines:
+ row.append('')
+ else:
+ times = [util.period_to_seconds(sl['time']) for sl in selected_lines]
+ cell = util.hhmmss(
+ datetime.timedelta(seconds=statistics.median(times)))
+ if len(selected_lines) > 1:
+ cell = f'{cell}[N={len(selected_lines)}]'
+ row.append(cell)
+ rows.append(row)
+
+ with open(log_dir.joinpath(util.SUMMARY_TABLE), mode='wt') as f:
+ write_table(f, rows)
+
+
+def display_summarized_metrics(log_dir: Path):
+ f = log_dir.joinpath(util.SUMMARY_TABLE)
+ cmd = f'grep -v rebuild {f} | grep -v WARMUP | column -t -s,'
+ output = subprocess.check_output(cmd, shell=True, text=True)
+ logging.info(textwrap.dedent(f'''
+ %s
+ TIPS:
+ To view condensed summary:
+ %s
+ '''), output, cmd)
diff --git a/scripts/incremental_build/ui.py b/scripts/incremental_build/ui.py
new file mode 100644
index 00000000..f3bab40d
--- /dev/null
+++ b/scripts/incremental_build/ui.py
@@ -0,0 +1,210 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import dataclasses
+import functools
+import logging
+import os
+import re
+import sys
+import textwrap
+from datetime import date
+from enum import Enum
+from pathlib import Path
+from typing import Optional
+
+import cuj_catalog
+import util
+
+
+class BuildType(Enum):
+ _ignore_ = '_soong_cmd'
+ _soong_cmd = ['build/soong/soong_ui.bash',
+ '--make-mode',
+ '--skip-soong-tests']
+ SOONG_ONLY = [*_soong_cmd, 'BUILD_BROKEN_DISABLE_BAZEL=true']
+ MIXED_PROD = [*_soong_cmd, '--bazel-mode']
+ MIXED_STAGING = [*_soong_cmd, '--bazel-mode-staging']
+ MIXED_DEV = [*_soong_cmd, '--bazel-mode-dev']
+ B = ['build/bazel/bin/b', 'build']
+ B_ANDROID = [*B, '--config=android']
+
+ @staticmethod
+ def from_flag(s: str) -> list['BuildType']:
+ chosen: list[BuildType] = []
+ for e in BuildType:
+ if s.lower() in e.name.lower():
+ chosen.append(e)
+ if len(chosen) == 0:
+ raise RuntimeError(f'no such build type: {s}')
+ return chosen
+
+ def to_flag(self):
+ return self.name.lower()
+
+
+@dataclasses.dataclass(frozen=True)
+class UserInput:
+ build_types: list[BuildType]
+ chosen_cujgroups: list[int]
+ description: Optional[str]
+ log_dir: Path
+ targets: list[str]
+
+
+@functools.cache
+def get_user_input() -> UserInput:
+ cujgroups = cuj_catalog.get_cujgroups()
+
+ def validate_cujgroups(input_str: str) -> list[int]:
+ if input_str.isnumeric():
+ i = int(input_str)
+ if 0 <= i < len(cujgroups):
+ return [i]
+ else:
+ pattern = re.compile(input_str)
+
+ def matches(cujgroup: cuj_catalog.CujGroup) -> bool:
+ for cujstep in cujgroup.steps:
+ # because we should run all cujsteps in a group we will select
+ # a group if any of its steps match the pattern
+ if pattern.search(f'{cujstep.verb} {cujgroup.description}'):
+ return True
+ return False
+
+ matching_cuj_groups = [i for i, cujgroup in enumerate(cujgroups) if
+ matches(cujgroup)]
+ if len(matching_cuj_groups):
+ return matching_cuj_groups
+ raise argparse.ArgumentError(
+ argument=None,
+ message=f'Invalid input: "{input_str}" '
+ f'expected an index <= {len(cujgroups)} '
+ 'or a regex pattern for a CUJ descriptions')
+
+ # importing locally here to avoid chances of cyclic import
+ import incremental_build
+ p = argparse.ArgumentParser(
+ formatter_class=argparse.RawTextHelpFormatter,
+ description='' +
+ textwrap.dedent(incremental_build.__doc__) +
+ textwrap.dedent(incremental_build.main.__doc__))
+
+ cuj_list = '\n'.join(
+ [f'{i:2}: {cujgroup}' for i, cujgroup in enumerate(cujgroups)])
+ p.add_argument('-c', '--cujs', nargs='+',
+ type=validate_cujgroups,
+ help='Index number(s) for the CUJ(s) from the following list. '
+ 'Or substring matches for the CUJ description.'
+ f'Note the ordering will be respected:\n{cuj_list}')
+ p.add_argument('-C', '--exclude-cujs', nargs='+',
+ type=validate_cujgroups,
+ help='Index number(s) or substring match(es) for the CUJ(s) '
+ 'to be excluded')
+ p.add_argument('-d', '--description', type=str, default='',
+ help='Any additional tag/description for the set of builds')
+
+ log_levels = dict(getattr(logging, '_levelToName')).values()
+ p.add_argument('-v', '--verbosity', choices=log_levels, default='INFO',
+ help='Log level. Defaults to %(default)s')
+ default_log_dir = util.get_top_dir().parent.joinpath(
+ f'timing-{date.today().strftime("%b%d")}')
+ p.add_argument('-l', '--log-dir', type=Path, default=default_log_dir,
+ help=textwrap.dedent(f'''
+ Directory for timing logs. Defaults to %(default)s
+ TIPS:
+ 1 Specify a directory outside of the source tree
+ 2 To view key metrics in metrics.csv:
+ {util.get_cmd_to_display_tabulated_metrics(default_log_dir)}
+ 3 To view column headers:
+ {util.get_csv_columns_cmd(default_log_dir)}''').strip())
+ def_build_types = [BuildType.SOONG_ONLY,
+ BuildType.MIXED_PROD,
+ BuildType.MIXED_STAGING]
+ p.add_argument('-b', '--build-types', nargs='+',
+ type=BuildType.from_flag,
+ default=[def_build_types],
+ help=f'Defaults to {[b.to_flag() for b in def_build_types]}. '
+ f'Choose from {[e.name.lower() for e in BuildType]}')
+ p.add_argument('--ignore-repo-diff', default=False, action='store_true',
+ help='Skip "repo status" check')
+ p.add_argument('--append-csv', default=False, action='store_true',
+ help='Add results to existing spreadsheet')
+ p.add_argument('targets', nargs='*', default=['nothing'],
+ help='Targets to run, e.g. "libc adbd". '
+ 'Defaults to %(default)s')
+
+ options = p.parse_args()
+
+ if options.verbosity:
+ logging.root.setLevel(options.verbosity)
+
+ if options.cujs and options.exclude_cujs:
+ sys.exit('specify either --cujs or --exclude-cujs not both')
+ chosen_cujgroups: list[int]
+ if options.exclude_cujs:
+ exclusions: list[int] = [i for sublist in options.exclude_cujs for i in
+ sublist]
+ chosen_cujgroups = [i for i in range(0, len(cujgroups)) if
+ i not in exclusions]
+ elif options.cujs:
+ chosen_cujgroups = [i for sublist in options.cujs for i in sublist]
+ else:
+ chosen_cujgroups = [i for i in range(0, len(cujgroups))]
+
+ bazel_labels: list[str] = [target for target in options.targets if
+ target.startswith('//')]
+ if 0 < len(bazel_labels) < len(options.targets):
+ sys.exit(f'Don\'t mix bazel labels {bazel_labels} with soong targets '
+ f'{[t for t in options.targets if t not in bazel_labels]}')
+ if os.getenv('BUILD_BROKEN_DISABLE_BAZEL') is not None:
+ raise RuntimeError(f'use -b {BuildType.SOONG_ONLY.to_flag()} '
+ f'instead of BUILD_BROKEN_DISABLE_BAZEL')
+ build_types: list[BuildType] = [i for sublist in options.build_types for i in
+ sublist]
+ if len(bazel_labels) > 0:
+ non_b = [b for b in build_types if
+ b != BuildType.B and b != BuildType.B_ANDROID]
+ raise RuntimeError(f'bazel labels can not be used with {non_b}')
+
+ pretty_str = '\n'.join(
+ [f'{i:2}: {cujgroups[i]}' for i in chosen_cujgroups])
+ logging.info(f'%d CUJs chosen:\n%s', len(chosen_cujgroups), pretty_str)
+
+ if not options.ignore_repo_diff and util.has_uncommitted_changes():
+ error_message = 'THERE ARE UNCOMMITTED CHANGES (TIP: repo status).' \
+ 'Use --ignore-repo-diff to skip this check.'
+ if not util.is_interactive_shell():
+ sys.exit(error_message)
+ response = input(f'{error_message}\nContinue?[Y/n]')
+ if response.upper() != 'Y':
+ sys.exit(1)
+
+ log_dir = Path(options.log_dir).resolve()
+ if not options.append_csv and log_dir.exists():
+ error_message = f'{log_dir} already exists. ' \
+ 'Use --append-csv to skip this check.'
+ if not util.is_interactive_shell():
+ sys.exit(error_message)
+ response = input(f'{error_message}\nContinue?[Y/n]')
+ if response.upper() != 'Y':
+ sys.exit(1)
+
+ return UserInput(
+ build_types=build_types,
+ chosen_cujgroups=chosen_cujgroups,
+ description=options.description,
+ log_dir=Path(options.log_dir).resolve(),
+ targets=options.targets)
diff --git a/scripts/incremental_build/util.py b/scripts/incremental_build/util.py
new file mode 100644
index 00000000..6a3fef44
--- /dev/null
+++ b/scripts/incremental_build/util.py
@@ -0,0 +1,278 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import csv
+import datetime
+import functools
+import glob
+import logging
+import os
+import re
+import subprocess
+import sys
+from datetime import date
+from pathlib import Path
+from typing import Final
+from typing import Generator
+
+INDICATOR_FILE: Final[str] = 'build/soong/soong_ui.bash'
+METRICS_TABLE: Final[str] = 'metrics.csv'
+SUMMARY_TABLE: Final[str] = 'summary.csv'
+RUN_DIR_PREFIX: Final[str] = 'run'
+BUILD_INFO_JSON: Final[str] = 'build_info.json'
+
+
+@functools.cache
+def _is_important(column) -> bool:
+ patterns = {
+ 'description', 'build_type', r'build\.ninja(\.size)?', 'targets',
+ 'log', 'actions', 'time',
+ 'soong/soong', 'bp2build/', 'symlink_forest/', r'soong_build/\*',
+ r'soong_build/\*\.bazel', 'bp2build/', 'kati/kati build', 'ninja/ninja'
+ }
+ for pattern in patterns:
+ if re.fullmatch(pattern, column):
+ return True
+ return False
+
+
+def get_csv_columns_cmd(d: Path) -> str:
+ """
+ :param d: the log directory
+ :return: a quick shell command to view columns in metrics.csv
+ """
+ csv_file = d.joinpath(METRICS_TABLE)
+ return f'head -n 1 "{csv_file.absolute()}" | sed "s/,/\\n/g" | nl'
+
+
+def get_cmd_to_display_tabulated_metrics(d: Path) -> str:
+ """
+ :param d: the log directory
+ :return: a quick shell command to view some collected metrics
+ """
+ csv_file = d.joinpath(METRICS_TABLE)
+ headers: list[str] = []
+ if csv_file.exists():
+ with open(csv_file) as r:
+ reader = csv.DictReader(r)
+ headers = reader.fieldnames or []
+
+ columns: list[int] = [i for i, h in enumerate(headers) if _is_important(h)]
+ f = ','.join(str(i + 1) for i in columns)
+ return f'grep -v rebuild- "{csv_file}" | grep -v FAILED | ' \
+ f'cut -d, -f{f} | column -t -s,'
+
+
+@functools.cache
+def get_top_dir(d: Path = Path('.').absolute()) -> Path:
+ """Get the path to the root of the Android source tree"""
+ top_dir = os.environ.get('ANDROID_BUILD_TOP')
+ if top_dir:
+ logging.info('ANDROID BUILD TOP = %s', d)
+ return Path(top_dir)
+ logging.debug('Checking if Android source tree root is %s', d)
+ if d.parent == d:
+ sys.exit('Unable to find ROOT source directory, specifically,'
+ f'{INDICATOR_FILE} not found anywhere. '
+ 'Try `m nothing` and `repo sync`')
+ if d.joinpath(INDICATOR_FILE).is_file():
+ logging.info('ANDROID BUILD TOP assumed to be %s', d)
+ return d
+ return get_top_dir(d.parent)
+
+
+@functools.cache
+def get_out_dir() -> Path:
+ out_dir = os.environ.get('OUT_DIR')
+ return Path(out_dir) if out_dir else get_top_dir().joinpath('out')
+
+
+@functools.cache
+def get_default_log_dir() -> Path:
+ return get_top_dir().parent.joinpath(
+ f'timing-{date.today().strftime("%b%d")}')
+
+
+def is_interactive_shell() -> bool:
+ return sys.__stdin__.isatty() and sys.__stdout__.isatty() \
+ and sys.__stderr__.isatty()
+
+
+# see test_next_path_helper() for examples
+def _next_path_helper(basename: str) -> str:
+ name = re.sub(r'(?<=-)\d+(?=(\..*)?$)', lambda d: str(int(d.group(0)) + 1),
+ basename)
+ if name == basename:
+ name = re.sub(r'(\..*)$', r'-1\1', name, 1)
+ if name == basename:
+ name = f'{name}-1'
+ return name
+
+
+def next_path(path: Path) -> Generator[Path, None, None]:
+ """
+ :returns a new Path with an increasing number suffix to the name
+ e.g. _to_file('a.txt') = a-5.txt (if a-4.txt already exists)
+ """
+ path.parent.mkdir(parents=True, exist_ok=True)
+ while True:
+ name = _next_path_helper(path.name)
+ path = path.parent.joinpath(name)
+ if not path.exists():
+ yield path
+
+
+def has_uncommitted_changes() -> bool:
+ """
+ effectively a quick 'repo status' that fails fast
+ if any project has uncommitted changes
+ """
+ for cmd in ['diff', 'diff --staged']:
+ diff = subprocess.run(
+ args=f'repo forall -c git {cmd} --quiet --exit-code'.split(),
+ cwd=get_top_dir(), text=True,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ if diff.returncode != 0:
+ return True
+ return False
+
+
+@functools.cache
+def is_ninja_dry_run(ninja_args: str = None) -> bool:
+ if ninja_args is None:
+ ninja_args = os.environ.get('NINJA_ARGS') or ''
+ ninja_dry_run = re.compile(r'(?:^|\s)-n\b')
+ return ninja_dry_run.search(ninja_args) is not None
+
+
+def count_explanations(process_log_file: Path) -> int:
+ """
+ Builds are run with '-d explain' flag and ninja's explanations for running
+ build statements (except for phony outputs) are counted. The explanations
+ help debugging. The count is an over-approximation of actions run, but it
+ will be ZERO for a no-op build.
+ """
+ explanations = 0
+ pattern = re.compile(
+ r'^ninja explain:(?! edge with output .* is a phony output,'
+ r' so is always dirty$)')
+ with open(process_log_file) as f:
+ for line in f:
+ if pattern.match(line):
+ explanations += 1
+ return explanations
+
+
+def is_git_repo(p: Path) -> bool:
+ """checks if p is in a directory that's under git version control"""
+ git = subprocess.run(args=f'git remote'.split(), cwd=p,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ return git.returncode == 0
+
+
+def any_file(pattern: str) -> Path:
+ return any_file_under(get_top_dir(), pattern)
+
+
+def any_file_under(root: Path, pattern: str) -> Path:
+ if pattern.startswith('!'):
+ raise RuntimeError(f'provide a filename instead of {pattern}')
+ d, files = any_match_under(get_top_dir() if root is None else root, pattern)
+ files = [d.joinpath(f) for f in files]
+ try:
+ file = next(f for f in files if f.is_file())
+ return file
+ except StopIteration:
+ raise RuntimeError(f'no file matched {pattern}')
+
+
+def any_dir_under(root: Path, *patterns: str) -> Path:
+ d, _ = any_match_under(root, *patterns)
+ return d
+
+
+def any_match(*patterns: str) -> (Path, list[str]):
+ return any_match_under(get_top_dir(), *patterns)
+
+
+@functools.cache
+def any_match_under(root: Path, *patterns: str) -> (Path, list[str]):
+ """
+ :param patterns glob pattern to match or unmatch if starting with "!"
+ :param root the first directory to start searching from
+ :returns the dir and sub-paths matching the pattern
+ """
+ bfs: list[Path] = [root]
+ while len(bfs) > 0:
+ first = bfs.pop(0)
+ if is_git_repo(first):
+ matches: list[str] = []
+ for pattern in patterns:
+ negate = pattern.startswith('!')
+ if negate:
+ pattern = pattern.removeprefix('!')
+ try:
+ found_match = next(
+ glob.iglob(pattern, root_dir=first, recursive=True))
+ except StopIteration:
+ found_match = None
+ if negate and found_match is not None:
+ break
+ if not negate:
+ if found_match is None:
+ break
+ else:
+ matches.append(found_match)
+ else:
+ return Path(first), matches
+
+ def should_visit(c: os.DirEntry) -> bool:
+ return c.is_dir() and not (c.is_symlink() or
+ '.' in c.name or
+ 'test' in c.name or
+ Path(c.path) == get_out_dir())
+
+ children = [Path(c.path) for c in os.scandir(first) if should_visit(c)]
+ children.sort()
+ bfs.extend(children)
+ raise RuntimeError(f'No suitable directory for {patterns}')
+
+
+def hhmmss(t: datetime.timedelta) -> str:
+ """pretty prints time periods, prefers mm:ss.sss and resorts to hh:mm:ss.sss
+ only if t >= 1 hour.
+ Examples: 02:12.231, 00:00.512, 00:01:11.321, 1:12:13.121
+ See unit test for more examples."""
+ h, f = divmod(t.seconds, 60 * 60)
+ m, f = divmod(f, 60)
+ s = f + t.microseconds / 1000_000
+ return f'{h}:{m:02d}:{s:06.3f}' if h else f'{m:02d}:{s:06.3f}'
+
+
+def period_to_seconds(s: str) -> float:
+ """converts a time period into seconds. The input is expected to be in the
+ format used by hhmmss().
+ Example: 02:04.000 -> 125.0
+ See unit test for more examples."""
+ if s == '':
+ return 0.0
+ acc = 0.0
+ while True:
+ [left, *right] = s.split(':', 1)
+ acc = acc * 60 + float(left)
+ if right:
+ s = right[0]
+ else:
+ return acc
diff --git a/scripts/incremental_build/util_test.py b/scripts/incremental_build/util_test.py
new file mode 100644
index 00000000..b117bd74
--- /dev/null
+++ b/scripts/incremental_build/util_test.py
@@ -0,0 +1,107 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import datetime
+import os
+import unittest
+
+from util import _next_path_helper
+from util import any_match
+from util import get_top_dir
+from util import hhmmss
+from util import period_to_seconds
+
+
+class UtilTest(unittest.TestCase):
+ def test_next_path_helper(self):
+ examples = [
+ ('output', 'output-1'),
+ ('output.x', 'output-1.x'),
+ ('output.x.y', 'output-1.x.y'),
+ ('output-1', 'output-2'),
+ ('output-9', 'output-10'),
+ ('output-10', 'output-11'),
+ ]
+ for (pattern, expected) in examples:
+ with self.subTest(msg=pattern, pattern=pattern, expected=expected):
+ self.assertEqual(_next_path_helper(pattern), expected)
+
+ def test_any_match(self):
+ path, matches = any_match('root.bp')
+ self.assertEqual(matches, ['root.bp'])
+ self.assertEqual(path, get_top_dir().joinpath('build/soong'))
+
+ path, matches = any_match('!Android.bp', '!BUILD',
+ 'scripts/incremental_build/incremental_build.py')
+ self.assertEqual(matches,
+ ['scripts/incremental_build/incremental_build.py'])
+ self.assertEqual(path, get_top_dir().joinpath('build/bazel'))
+
+ path, matches = any_match('BUILD', 'README.md')
+ self.assertEqual(matches, ['BUILD', 'README.md'])
+ self.assertTrue(path.joinpath('BUILD').exists())
+ self.assertTrue(path.joinpath('README.md').exists())
+
+ path, matches = any_match('BUILD', '!README.md')
+ self.assertEqual(matches, ['BUILD'])
+ self.assertTrue(path.joinpath('BUILD').exists())
+ self.assertFalse(path.joinpath('README.md').exists())
+
+ path, matches = any_match('!*.bazel', '*')
+ self.assertGreater(len(matches), 0)
+ children = os.listdir(path)
+ self.assertGreater(len(children), 0)
+ for child in children:
+ self.assertFalse(child.endswith('.bazel'))
+
+ path, matches = any_match('*/BUILD', '*/README.md')
+ self.assertGreater(len(matches), 0)
+ for m in matches:
+ self.assertTrue(path.joinpath(m).exists())
+
+ path, matches = any_match('!**/BUILD', '**/*.cpp')
+ self.assertEqual(len(matches), 1)
+ self.assertTrue(path.joinpath(matches[0]).exists())
+ self.assertTrue(matches[0].endswith('.cpp'))
+ for _, dirs, files in os.walk(path):
+ self.assertFalse('BUILD' in dirs)
+ self.assertFalse('BUILD' in files)
+
+ def test_hhmmss(self):
+ examples = [
+ (datetime.timedelta(seconds=(2 * 60 + 5)), '02:05.000'),
+ (datetime.timedelta(seconds=(3600 + 23 * 60 + 45.897898)),
+ '1:23:45.898'),
+ ]
+ for (ts, expected) in examples:
+ self.subTest(ts=ts, expected=expected)
+ self.assertEqual(hhmmss(ts), expected)
+
+ def test_period_to_seconds(self):
+ examples = [
+ ('02:05.000', 2 * 60 + 5),
+ ('1:23:45.898', 3600 + 23 * 60 + 45.898),
+ ('1.898', 1.898),
+ ('0.3', 0.3),
+ ('0', 0),
+ ('0:00', 0),
+ ('0:00:00', 0),
+ ('', 0)
+ ]
+ for (ts, expected) in examples:
+ self.subTest(ts=ts, expected=expected)
+ self.assertEqual(period_to_seconds(ts), expected)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/scripts/milestone-2/demo.sh b/scripts/milestone-2/demo.sh
index 3f63841f..68baf965 100755
--- a/scripts/milestone-2/demo.sh
+++ b/scripts/milestone-2/demo.sh
@@ -55,7 +55,7 @@ function log() {
# Ensure that this script uses the checked-in Bazel binary.
function bazel() {
- "${AOSP_ROOT}/tools/bazel" "$@"
+ "${AOSP_ROOT}/build/bazel/bin/bazel" "$@"
}
# Run the bp2build converter to generate BUILD files into out/soong/bp2build.
diff --git a/scripts/mkmodules_diff.sh b/scripts/mkmodules_diff.sh
new file mode 100755
index 00000000..84f09e50
--- /dev/null
+++ b/scripts/mkmodules_diff.sh
@@ -0,0 +1,35 @@
+#!/bin/bash -eu
+#
+# Compares Android-TARGET.mk files generated by the mixed build
+# against the same file generated by the reference build.
+# This is the wrapper around build/bazel/mkcompare tool
+# Usage:
+# mkmodules_diff [--bazel-mode-staging] <mkcompare_option> ...
+# Android-TARGET.mk files that are compared are for the product
+# defined by the TARGET_PRODUCT and TARGET_BUILD_VARIANT environment
+# variables.
+# Without --bazel-mode-staging option, the mixed build is run with
+# --bazel-mode-dev option.
+# The output can be safely redirected to a file, it does not include
+# the noise from the build.
+
+trap 'printf "FAILED: $BASH_COMMAND (rc=%s)\n" $? >&2' ERR
+declare -r builder=build/soong/soong_ui.bash
+[[ -x ${builder} ]] || \
+ { echo "current directory should be the root of the Android source tree"; exit 1; }
+export ANDROID_QUIET_BUILD=yes
+declare -a mkargs
+declare bazel_mode=--bazel-mode-dev
+for a in $@; do
+ if [[ "$a" =~ ^--bazel-mode ]]; then
+ bazel_mode="$a"
+ else
+ mkargs+=("$a")
+ fi
+done
+declare -r mkmod_file="out/soong/Android-${TARGET_PRODUCT?TARGET_PRODUCT not set}.mk"
+${builder} --make-mode nothing >/dev/null
+mv ${mkmod_file} ${mkmod_file}.ref
+${builder} --make-mode "${bazel_mode}" nothing >/dev/null
+GOWORK=$PWD/build/bazel/mkcompare/go.work \
+ go run android/bazel/mkcompare/cmd ${mkargs[@]} ${mkmod_file}.ref ${mkmod_file}
diff --git a/scripts/print_analysis_metrics.py b/scripts/print_analysis_metrics.py
new file mode 100755
index 00000000..e5d8199e
--- /dev/null
+++ b/scripts/print_analysis_metrics.py
@@ -0,0 +1,197 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A tool to print human-readable metrics information regarding the last build.
+
+By default, the consumed file will be $OUT_DIR/soong_build_metrics.pb. You may
+pass in a different file instead using the metrics_file flag.
+"""
+
+import argparse
+import json
+import os
+import subprocess
+import sys
+
+
+class Event(object):
+ """Contains nested event data.
+
+ Fields:
+ name: The short name of this event e.g. the 'b' in an event called a.b.
+ children: Nested events
+ start_time_relative_ns: Time since the epoch that the event started
+ duration_ns: Duration of this event, including time spent in children.
+ """
+
+ def __init__(self, name):
+ self.name = name
+ self.children = list()
+ self.start_time_relative_ns = 0
+ self.duration_ns = 0
+
+ def get_child(self, name):
+ "Get a child called 'name' or return None"
+ for child in self.children:
+ if child.name == name:
+ return child
+ return None
+
+ def get_or_add_child(self, name):
+ "Get a child called 'name', or if it isn't there, add it and return it."
+ child = self.get_child(name)
+ if not child:
+ child = Event(name)
+ self.children.append(child)
+ return child
+
+
+def _get_proto_output_file():
+ """Returns the location of the proto file used for analyzing out/soong_build_metrics.pb.
+
+ This corresponds to soong/ui/metrics/metrics_proto/metrics.proto.
+ """
+ return os.getenv("ANDROID_BUILD_TOP"
+ ) + "/build/soong/ui/metrics/metrics_proto/metrics.proto"
+
+
+def _get_default_output_file():
+ """Returns the filepath for the build output."""
+ out_dir = os.getenv("OUT_DIR")
+ if not out_dir:
+ out_dir = "out"
+ build_top = os.getenv("ANDROID_BUILD_TOP")
+ if not build_top:
+ raise Exception(
+ "$ANDROID_BUILD_TOP not found in environment. Have you run lunch?")
+ return os.path.join(build_top, out_dir, "soong_build_metrics.pb")
+
+
+def _make_nested_events(root_event, event):
+ """Splits the event into its '.' separated name parts, and adds Event objects for it to the
+
+ synthetic root_event event.
+ """
+ node = root_event
+ for sub_event in event["description"].split("."):
+ node = node.get_or_add_child(sub_event)
+ node.start_time_relative_ns = event["start_time_relative_ns"]
+ node.duration_ns = event["real_time"]
+
+
+def _write_events(out, events, parent=None):
+ """Writes the list of events.
+
+ Args:
+ out: The stream to write to
+ events: The list of events to write
+ parent: Prefix parent's name
+ """
+ for event in events:
+ _write_event(out, event, parent)
+
+
+def _write_event(out, event, parent=None):
+ "Writes an event. See _write_events for args."
+ full_event_name = parent + "." + event.name if parent else event.name
+ out.write(
+ "%(start)9s %(duration)9s %(name)s\n" % {
+ "start": _format_ns(event.start_time_relative_ns),
+ "duration": _format_ns(event.duration_ns),
+ "name": full_event_name,
+ })
+ _write_events(out, event.children, full_event_name)
+
+
+def _format_ns(duration_ns):
+ "Pretty print duration in nanoseconds"
+ return "%.02fs" % (duration_ns / 1_000_000_000)
+
+
+def _save_file(data, file):
+ f = open(file, "wb")
+ f.write(data)
+ f.close()
+
+
+def main():
+ # Parse args
+ parser = argparse.ArgumentParser(description="")
+ parser.add_argument(
+ "metrics_file",
+ nargs="?",
+ default=_get_default_output_file(),
+ help="The soong_metrics file created as part of the last build. " +
+ "Defaults to out/soong_build_metrics.pb")
+ parser.add_argument(
+ "--save-proto-output-file",
+ nargs="?",
+ default="",
+ help="(Optional) The file to save the output of the printproto command to."
+ )
+ args = parser.parse_args()
+
+ # Check the metrics file
+ metrics_file = args.metrics_file
+ if not os.path.exists(metrics_file):
+ raise Exception("File " + metrics_file + " not found. Did you run a build?")
+
+ # Check the proto definition file
+ proto_file = _get_proto_output_file()
+ if not os.path.exists(proto_file):
+ raise Exception(
+ "$ANDROID_BUILD_TOP not found in environment. Have you run lunch?")
+
+ # Load the metrics file from the out dir
+ cmd = r"""printproto --proto2 --raw_protocol_buffer --json \
+ --json_accuracy_loss_reaction=ignore \
+ --message=soong_build_metrics.SoongBuildMetrics --multiline \
+ --proto=""" + proto_file + " " + metrics_file
+ json_out = subprocess.check_output(cmd, shell=True)
+
+ if args.save_proto_output_file != "":
+ _save_file(json_out, args.save_proto_output_file)
+
+ build_output = json.loads(json_out)
+
+ # Bail if there are no events
+ raw_events = build_output.get("events")
+ if not raw_events:
+ print("No events to display")
+ return
+
+ # Update the start times to be based on the first event
+ first_time_ns = min([event["start_time"] for event in raw_events])
+ for event in raw_events:
+ event["start_time_relative_ns"] = event["start_time"] - first_time_ns
+
+ # Sort by start time so the nesting also is sorted by time
+ raw_events.sort(key=lambda x: x["start_time_relative_ns"])
+
+ # We don't show this event, so that there doesn't have to be a single top level event
+ fake_root_event = Event("<root>")
+
+ # Convert the flat event list into the tree
+ for event in raw_events:
+ _make_nested_events(fake_root_event, event)
+
+ # Output the results
+ print(" start duration")
+
+ _write_events(sys.stdout, fake_root_event.children)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/queryview-bottom-up.sh b/scripts/queryview-bottom-up.sh
index 0ad2a1dd..f6b6dca2 100755
--- a/scripts/queryview-bottom-up.sh
+++ b/scripts/queryview-bottom-up.sh
@@ -68,7 +68,7 @@ set -euo pipefail
function bazel() {
# We're in <root>/build/bazel/scripts
AOSP_ROOT="$(dirname $0)/../../.."
- "${AOSP_ROOT}/tools/bazel" "$@"
+ "${AOSP_ROOT}/build/bazel/bin/bazel" "$@"
}
T=${1:-//bionic/libc:libc--android_arm_armv7-a-neon_shared}
diff --git a/scripts/run_apex_tests.sh b/scripts/run_apex_tests.sh
index f6f5a4f3..9e955b0f 100755
--- a/scripts/run_apex_tests.sh
+++ b/scripts/run_apex_tests.sh
@@ -4,43 +4,12 @@
set -o pipefail
-# TODO: Refactor build/make/envsetup.sh to make gettop() available elsewhere
-function gettop
-{
- # Function uses potentially uninitialzied variables
- set +u
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../make/shell_utils.sh
+require_top
- local TOPFILE=build/bazel/bazel.sh
- if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
- # The following circumlocution ensures we remove symlinks from TOP.
- (cd "$TOP"; PWD= /bin/pwd)
- else
- if [ -f $TOPFILE ] ; then
- # The following circumlocution (repeated below as well) ensures
- # that we record the true directory name and not one that is
- # faked up with symlink names.
- PWD= /bin/pwd
- else
- local HERE=$PWD
- local T=
- while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
- \cd ..
- T=`PWD= /bin/pwd -P`
- done
- \cd "$HERE"
- if [ -f "$T/$TOPFILE" ]; then
- echo "$T"
- fi
- fi
- fi
-
- set -u
-}
-
-AOSP_ROOT=`gettop`
# Generate BUILD files into out/soong/bp2build
-"${AOSP_ROOT}/build/soong/soong_ui.bash" --make-mode BP2BUILD_VERBOSE=1 bp2build --skip-soong-tests
+"${TOP}/build/soong/soong_ui.bash" --make-mode BP2BUILD_VERBOSE=1 bp2build --skip-soong-tests
BUILD_FLAGS_LIST=(
--color=no
@@ -63,10 +32,10 @@ BUILD_TARGETS_LIST=(
BUILD_TARGETS="${BUILD_TARGETS_LIST[@]}"
echo "Building APEXes with Bazel..."
-${AOSP_ROOT}/tools/bazel --max_idle_secs=5 build ${BUILD_FLAGS} --platforms //build/bazel/platforms:android_x86 -k ${BUILD_TARGETS}
-${AOSP_ROOT}/tools/bazel --max_idle_secs=5 build ${BUILD_FLAGS} --platforms //build/bazel/platforms:android_x86_64 -k ${BUILD_TARGETS}
-${AOSP_ROOT}/tools/bazel --max_idle_secs=5 build ${BUILD_FLAGS} --platforms //build/bazel/platforms:android_arm -k ${BUILD_TARGETS}
-${AOSP_ROOT}/tools/bazel --max_idle_secs=5 build ${BUILD_FLAGS} --platforms //build/bazel/platforms:android_arm64 -k ${BUILD_TARGETS}
+${TOP}/build/bazel/bin/bazel --max_idle_secs=5 build ${BUILD_FLAGS} --platforms //build/bazel/platforms:android_x86 -k ${BUILD_TARGETS}
+${TOP}/build/bazel/bin/bazel --max_idle_secs=5 build ${BUILD_FLAGS} --platforms //build/bazel/platforms:android_x86_64 -k ${BUILD_TARGETS}
+${TOP}/build/bazel/bin/bazel --max_idle_secs=5 build ${BUILD_FLAGS} --platforms //build/bazel/platforms:android_arm -k ${BUILD_TARGETS}
+${TOP}/build/bazel/bin/bazel --max_idle_secs=5 build ${BUILD_FLAGS} --platforms //build/bazel/platforms:android_arm64 -k ${BUILD_TARGETS}
set +x
echo
diff --git a/tests/apex/BUILD b/tests/apex/BUILD
index 560d4b85..3964ce7b 100644
--- a/tests/apex/BUILD
+++ b/tests/apex/BUILD
@@ -1,44 +1,216 @@
+load("@bazel_skylib//rules:diff_test.bzl", "diff_test")
+load(":apex_compression_test.bzl", "apex_compression_test")
load(":apex_diff_test.bzl", "apex_diff_test")
-load(":apex_test.bzl", "apex_compression_test")
-load(":apex_aab_test.bzl", "apex_aab_test")
+load(":apex_package_name_test.bzl", "apex_package_name_test")
-apex_diff_test(
- name = "com.android.tzdata",
- apex1 = "//system/timezone/apex:com.android.tzdata.apex",
- apex2 = "@make_injection//:target/product/generic/system/apex/com.android.tzdata.apex",
-)
-
-apex_diff_test(
- name = "build.bazel.examples.apex.minimal",
- apex1 = "//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal.apex",
- apex2 = "@make_injection//:target/product/generic/system/product/apex/build.bazel.examples.apex.minimal.apex",
-)
-
-apex_diff_test(
- name = "com.android.adbd_uncompressed",
- apex1 = "//packages/modules/adb/apex:com.android.adbd.apex",
- apex2 = "@make_injection//:target/product/generic/system/apex/com.android.adbd.capex",
+filegroup(
+ name = "com.android.adbd.capex",
+ srcs = ["//packages/modules/adb/apex:com.android.adbd"],
+ output_group = "signed_compressed_output",
+ tags = ["manual"],
)
+# Test that the compressed apex and uncompressed apex have the same contents
apex_diff_test(
- name = "com.android.adbd_compressed",
- apex1 = "//packages/modules/adb/apex:com.android.adbd.capex",
- apex2 = "@make_injection//:target/product/generic/system/apex/com.android.adbd.capex",
+ name = "com.android.adbd_compressed_vs_uncompressed",
+ apex1 = ":com.android.adbd.capex",
+ apex2 = "//packages/modules/adb/apex:com.android.adbd",
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
)
apex_compression_test(
name = "build.bazel.examples.apex.minimal_apex",
- apex = "//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal.apex",
+ apex = "//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal",
compressed = False,
)
+filegroup(
+ name = "build.bazel.examples.apex.minimal_compressed.capex",
+ srcs = ["//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal_compressed"],
+ output_group = "signed_compressed_output",
+ tags = ["manual"],
+)
+
apex_compression_test(
name = "build.bazel.examples.apex.minimal_capex",
- apex = "//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal_compressed.capex",
+ apex = ":build.bazel.examples.apex.minimal_compressed.capex",
compressed = True,
)
-apex_aab_test(
- name = "build.bazel.examples.apex.minimal_mainline-module",
+genrule(
+ name = "build.bazel.examples.apex.minimal_android_manifest",
+ testonly = True,
+ srcs = ["//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal"],
+ outs = ["minimal_apex_android_manifest.txt"],
+ cmd = "$(location //frameworks/base/tools/aapt2) dump xmltree --file AndroidManifest.xml $< > $@",
+ tools = ["//frameworks/base/tools/aapt2"],
+)
+
+diff_test(
+ name = "minimal_apex_android_manifest_test",
+ file1 = "minimal_apex_android_manifest.txt",
+ file2 = "minimal_apex_android_manifest.golden",
+)
+
+genrule(
+ name = "adbd_aab_zipinfo",
+ testonly = True,
+ srcs = ["//build/bazel/vendor/google:com.android.adbd_apex_aab"],
+ outs = ["adbd_aab_zipinfo.actual"],
+ cmd = "$(location @bazel_tools//tools/zip:zipper) v $< | sort > $@",
+ tools = ["@bazel_tools//tools/zip:zipper"],
+)
+
+diff_test(
+ name = "aab_filelist_diff_test",
+ file1 = "adbd_aab_zipinfo.golden",
+ file2 = "adbd_aab_zipinfo.actual",
+)
+
+py_binary(
+ name = "apks_binary_arches",
+ srcs = ["apks_binary_arches.py"],
+)
+
+genrule(
+ name = "adbd_apks_arches",
+ testonly = True,
+ srcs = ["//build/bazel/vendor/google:com.android.adbd_apks"],
+ outs = ["adb_apks_arches.actual"],
+ cmd = " ".join([
+ "$(location :apks_binary_arches)",
+ "--deapexer-path $(location //system/apex/tools:deapexer)",
+ "--readelf-path $(location //prebuilts/clang/host/linux-x86:llvm-readelf)",
+ "--debugfs-path $(location //external/e2fsprogs/debugfs)",
+ "--blkid-path $(location //external/e2fsprogs/misc:blkid)",
+ "--fsckerofs-path $(location //external/erofs-utils:fsck.erofs)",
+ "$<",
+ "$@",
+ ]),
+ tags = ["manual"],
+ tools = [
+ ":apks_binary_arches",
+ "//external/e2fsprogs/debugfs",
+ "//external/e2fsprogs/misc:blkid",
+ "//external/erofs-utils:fsck.erofs",
+ "//prebuilts/clang/host/linux-x86:llvm-readelf",
+ "//system/apex/tools:deapexer",
+ ],
+)
+
+diff_test(
+ name = "apks_arch_diff_test",
+ file1 = "adb_apks_arches.golden",
+ file2 = "adb_apks_arches.actual",
+)
+
+genrule(
+ name = "adbd_apks_zipinfo",
+ testonly = True,
+ srcs = ["//build/bazel/vendor/google:com.android.adbd_apks"],
+ outs = ["adbd_apks_zipinfo.actual"],
+ cmd = "$(location @bazel_tools//tools/zip:zipper) v $< | sort > $@",
+ tools = ["@bazel_tools//tools/zip:zipper"],
+)
+
+diff_test(
+ name = "apks_filelist_diff_test",
+ file1 = "adbd_apks_zipinfo.golden",
+ file2 = "adbd_apks_zipinfo.actual",
+)
+
+apex_package_name_test(
+ name = "build.bazel.examples.apex.minimal_package_name",
apex = "//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal",
-) \ No newline at end of file
+ expected_package_name = "build.bazel.examples.apex.minimal",
+)
+
+apex_package_name_test(
+ name = "build.bazel.examples.apex.override.minimal_package_name",
+ apex = "//build/bazel/examples/apex/minimal:build.bazel.examples.apex.override.minimal",
+ expected_package_name = "build.bazel.examples.apex.override.minimal",
+)
+
+apex_package_name_test(
+ name = "com.android.adbd_package_name",
+ apex = "//packages/modules/adb/apex:com.android.adbd",
+ expected_package_name = "com.android.adbd",
+)
+
+genrule(
+ name = "com.android.adbd_manifest_json",
+ srcs = ["//packages/modules/adb/apex:com.android.adbd"],
+ outs = ["com.android.adbd_manifest.json"],
+ cmd = "$(location //system/apex/tools:deapexer) info $< > $@",
+ tools = ["//system/apex/tools:deapexer"],
+)
+
+diff_test(
+ name = "com.android.adbd_manifest_diff_test",
+ file1 = "com.android.adbd_manifest.json",
+ file2 = "com.android.adbd_manifest.json.golden",
+)
+
+filegroup(
+ name = "minimal_apex_coverage_files",
+ srcs = ["//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal"],
+ output_group = "coverage_files",
+)
+
+genrule(
+ name = "minimal_apex_using_txt",
+ srcs = [":minimal_apex_coverage_files"],
+ outs = ["minimal_apex_using.txt"],
+ cmd = "cat $(location :minimal_apex_coverage_files) | sort > $@",
+)
+
+diff_test(
+ name = "minimal_apex_symbols_used_by_apex_diff_test",
+ file1 = ":minimal_apex_using.txt",
+ file2 = ":minimal_apex_using.txt.golden",
+ target_compatible_with = [
+ "//build/bazel/platforms/os:android",
+ "//build/bazel/platforms/arch:arm64",
+ ],
+)
+
+filegroup(
+ name = "minimal_apex_java_coverage_files",
+ srcs = ["//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal"],
+ output_group = "java_coverage_files",
+)
+
+genrule(
+ name = "minimal_apex_using_xml",
+ srcs = [":minimal_apex_java_coverage_files"],
+ outs = ["minimal_apex_using.xml"],
+ cmd = "cat $(location :minimal_apex_java_coverage_files) | sort > $@",
+)
+
+diff_test(
+ name = "minimal_apex_java_symbols_used_by_apex_diff_test",
+ file1 = ":minimal_apex_using.xml",
+ file2 = ":minimal_apex_using.xml.golden",
+ target_compatible_with = [
+ "//build/bazel/platforms/os:android",
+ "//build/bazel/platforms/arch:arm64",
+ ],
+)
+
+filegroup(
+ name = "com.android.adbd_backing_libs",
+ srcs = ["//packages/modules/adb/apex:com.android.adbd"],
+ output_group = "backing_libs",
+)
+
+diff_test(
+ name = "com.android.adbd_backing_libs_diff_test",
+ file1 = ":com.android.adbd_backing_libs",
+ file2 = "com.android.adbd_backing.txt.golden",
+)
+
+filegroup(
+ name = "com.android.adbd_installed_files",
+ srcs = ["//packages/modules/adb/apex:com.android.adbd"],
+ output_group = "installed_files",
+)
diff --git a/tests/apex/adb_apks_arches.golden b/tests/apex/adb_apks_arches.golden
new file mode 100644
index 00000000..3c7bd0bc
--- /dev/null
+++ b/tests/apex/adb_apks_arches.golden
@@ -0,0 +1,14 @@
+standalone-arm64_v8a.apex:
+ lib64/libcutils.so: AArch64
+standalone-armeabi_v7a.apex:
+ lib/libcutils.so: ARM
+standalone-armeabi_v7a.arm64_v8a.apex:
+ lib/libcutils.so: ARM
+ lib64/libcutils.so: AArch64
+standalone-x86.apex:
+ lib/libcutils.so: Intel 80386
+standalone-x86.x86_64.apex:
+ lib/libcutils.so: Intel 80386
+ lib64/libcutils.so: Advanced Micro Devices X86-64
+standalone-x86_64.apex:
+ lib64/libcutils.so: Advanced Micro Devices X86-64
diff --git a/tests/apex/adbd_aab_zipinfo.golden b/tests/apex/adbd_aab_zipinfo.golden
new file mode 100644
index 00000000..b016eaec
--- /dev/null
+++ b/tests/apex/adbd_aab_zipinfo.golden
@@ -0,0 +1,18 @@
+f 777 BundleConfig.pb
+f 777 base/apex.pb
+f 777 base/apex/arm64-v8a.armeabi-v7a.build_info.pb
+f 777 base/apex/arm64-v8a.armeabi-v7a.img
+f 777 base/apex/arm64-v8a.build_info.pb
+f 777 base/apex/arm64-v8a.img
+f 777 base/apex/armeabi-v7a.build_info.pb
+f 777 base/apex/armeabi-v7a.img
+f 777 base/apex/x86.build_info.pb
+f 777 base/apex/x86.img
+f 777 base/apex/x86_64.build_info.pb
+f 777 base/apex/x86_64.img
+f 777 base/apex/x86_64.x86.build_info.pb
+f 777 base/apex/x86_64.x86.img
+f 777 base/assets.pb
+f 777 base/assets/NOTICE.html.gz
+f 777 base/manifest/AndroidManifest.xml
+f 777 base/root/apex_manifest.pb
diff --git a/tests/apex/adbd_apks_zipinfo.golden b/tests/apex/adbd_apks_zipinfo.golden
new file mode 100644
index 00000000..bdd56807
--- /dev/null
+++ b/tests/apex/adbd_apks_zipinfo.golden
@@ -0,0 +1,7 @@
+f 666 standalones/standalone-arm64_v8a.apex
+f 666 standalones/standalone-armeabi_v7a.apex
+f 666 standalones/standalone-armeabi_v7a.arm64_v8a.apex
+f 666 standalones/standalone-x86.apex
+f 666 standalones/standalone-x86.x86_64.apex
+f 666 standalones/standalone-x86_64.apex
+f 666 toc.pb
diff --git a/tests/apex/apex_aab_test.bzl b/tests/apex/apex_aab_test.bzl
index 566f4313..f88a6df6 100644
--- a/tests/apex/apex_aab_test.bzl
+++ b/tests/apex/apex_aab_test.bzl
@@ -1,22 +1,20 @@
-"""
-Copyright (C) 2022 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-load("//build/bazel/rules/apex:mainline_modules.bzl", "apex_aab")
-
-def apex_aab_test(name, apex, **kwargs):
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/bazel/rules/apex:apex_aab.bzl", "apex_aab")
+
+def apex_aab_test(name, apex, golden):
"""Diff the .aab generated by Bazel and Soong"""
aab_name = name + "_apex_aab"
@@ -29,13 +27,13 @@ def apex_aab_test(name, apex, **kwargs):
name = name + "_wrapper_sh_lib",
data = [
":" + aab_name,
- "build.bazel.examples.apex.minimal.aab",
+ golden,
],
)
args = [
"$(location //build/bazel/tests/apex:" + aab_name + ")",
- "$(location build.bazel.examples.apex.minimal.aab)",
+ "$(location %s)" % golden,
]
native.sh_test(
@@ -46,7 +44,8 @@ def apex_aab_test(name, apex, **kwargs):
":" + name + "_wrapper_sh_lib",
"@bazel_tools//tools/zip:zipper",
":" + aab_name,
- "build.bazel.examples.apex.minimal.aab",
+ golden,
],
args = args,
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
)
diff --git a/tests/apex/apex_aab_test.sh b/tests/apex/apex_aab_test.sh
deleted file mode 100755
index d28cea4b..00000000
--- a/tests/apex/apex_aab_test.sh
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/bin/bash
-
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-set -xeuo pipefail
-
-readonly arg_aab_filepath=$1
-readonly arg_prebuilt_aab_filepath=$2
-
-readonly ZIPPER=$(rlocation bazel_tools/tools/zip/zipper/zipper)
-readonly -a AAB_FILES=(
- "BundleConfig.pb"
- "base/apex.pb"
- "base/apex/arm64-v8a.build_info.pb"
- "base/apex/arm64-v8a.img"
- "base/apex/armeabi-v7a.build_info.pb"
- "base/apex/armeabi-v7a.img"
- "base/apex/x86.build_info.pb"
- "base/apex/x86.img"
- "base/apex/x86_64.build_info.pb"
- "base/apex/x86_64.img"
- "base/manifest/AndroidManifest.xml"
- "base/root/apex_manifest.pb"
-)
-readonly -a EXCLUDE_FILES=(
- # The following files are 1)not in bazel built abb file; 2)not same as the
- # ones created by Soong, so exclude them in diff to make the test case pass.
- #(TODO: b/190817312) assets/NOTICE.html.gz is not in bazel built aab file.
- "assets"
- "NOTICE.html.gz"
- #(TODO: b/222587783) base/assets.pb is not in bazel built aab file
- "assets.pb"
- #(TODO: b/222588072) all .img files are different
- "*.img"
- #(TODO: b/222588241) all .build_info.pb files are different
- "*.build_info.pb"
- #(TODO: b/222588061) base/root/apex_manifest.pb
- "apex_manifest.pb"
- #(TODO: b/222587792) base/manifest/AndroidManifest.xml
- # two bytes are different, prebuilt has 0x20, bazel built has 0x1f
- "AndroidManifest.xml"
-)
-
-# Check if .aab file contains specified files
-function aab_contains_files() {
- local aab_filepath=$1
- shift
- local expected_files=("$@")
- local aab_entries=$($ZIPPER v "$aab_filepath")
- for file in "${expected_files[@]}"; do
- if ! echo -e "$aab_entries" | grep "$file"; then
- echo "Failed to find file $file in $aab_filepath"
- exit 1
- fi
- done
-}
-
-# Test .aab file contains required files
-function test_aab_contains_required_files() {
- if [ "${arg_aab_filepath: -4}" != ".aab" ]; then
- echo "@arg_aab_filepath does not have .aab as extension."
- exit 1
- fi
- aab_contains_files "$arg_aab_filepath" "${AAB_FILES[@]}"
-}
-
-function test_aab_files_diff() {
- local prebuilt_aab_file_dir=$(dirname "$arg_prebuilt_aab_filepath")
-
- local extracted_prebuilt_aab_dir=$(mktemp -d -p "$prebuilt_aab_file_dir" prebuilt_XXXXXX)
- $ZIPPER x "$arg_prebuilt_aab_filepath" -d "$extracted_prebuilt_aab_dir"
-
- local extracted_aab_dir=$(mktemp -d -p "$prebuilt_aab_file_dir" aab_XXXXXX)
- $ZIPPER x "$arg_aab_filepath" -d "$extracted_aab_dir"
-
- local diff_exclude=
- for pattern in "${EXCLUDE_FILES[@]}"; do
- diff_exclude="$diff_exclude -x $pattern"
- done
-
- if ! diff -w $diff_exclude -r $extracted_prebuilt_aab_dir $extracted_aab_dir; then
- echo ".aab file content is not same as the prebuilt one."
- exit 1
- fi
-
- rm -rf "${extracted_prebuilt_aab_dir}"
- rm -rf "${extracted_aab_dir}"
-}
-
-test_aab_contains_required_files
-test_aab_files_diff
-
-echo "Passed all test cases." \ No newline at end of file
diff --git a/tests/apex/apex_compression_test.bzl b/tests/apex/apex_compression_test.bzl
new file mode 100644
index 00000000..4a5f1246
--- /dev/null
+++ b/tests/apex/apex_compression_test.bzl
@@ -0,0 +1,41 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def apex_compression_test(name, apex, compressed):
+ """This verifies APEX or compressed APEX file:
+ 1) has the correct file extension name
+ 2) contains the required files specified by the APEX file format
+ """
+
+ native.sh_library(
+ name = name + "_wrapper_sh_lib",
+ data = [apex],
+ )
+
+ args = ["$(location " + apex + ")"]
+ if compressed:
+ args.append("compressed")
+
+ native.sh_test(
+ name = name,
+ srcs = ["apex_compression_test.sh"],
+ deps = ["@bazel_tools//tools/bash/runfiles"],
+ data = [
+ ":" + name + "_wrapper_sh_lib",
+ "@bazel_tools//tools/zip:zipper",
+ apex,
+ ],
+ args = args,
+ target_compatible_with = ["//build/bazel/platforms/os:android"],
+ )
diff --git a/tests/apex/apex_test.sh b/tests/apex/apex_compression_test.sh
index 78f89eb6..fe5e9808 100755
--- a/tests/apex/apex_test.sh
+++ b/tests/apex/apex_compression_test.sh
@@ -26,6 +26,7 @@ readonly -a APEX_FILES=(
"AndroidManifest.xml"
"apex_payload.img"
"apex_pubkey"
+ "assets/NOTICE.html.gz"
"META-INF/CERT\.SF"
"META-INF/CERT\.RSA"
"META-INF/MANIFEST\.MF"
diff --git a/tests/apex/apex_diff_test.bzl b/tests/apex/apex_diff_test.bzl
index e809f943..b7def314 100644
--- a/tests/apex/apex_diff_test.bzl
+++ b/tests/apex/apex_diff_test.bzl
@@ -1,35 +1,41 @@
load("@bazel_skylib//rules:diff_test.bzl", "diff_test")
-def apex_diff_test(name, apex1, apex2, expected_diff=None, **kwargs):
+def apex_diff_test(
+ name,
+ apex1,
+ apex2,
+ target_compatible_with = None,
+ expected_diff = None):
"""A test that compares the content list of two APEXes, determined by `deapexer`."""
native.genrule(
name = name + "_apex1_deapex",
tools = [
- "@make_injection//:host/linux-x86/bin/deapexer",
+ "//system/apex/tools:deapexer",
"//external/e2fsprogs/debugfs:debugfs",
],
srcs = [apex1],
outs = [name + ".apex1.txt"],
- cmd = "$(location @make_injection//:host/linux-x86/bin/deapexer) --debugfs_path=$(location //external/e2fsprogs/debugfs:debugfs) list $< > $@",
+ cmd = "$(location //system/apex/tools:deapexer) --debugfs_path=$(location //external/e2fsprogs/debugfs:debugfs) list $< > $@",
)
native.genrule(
name = name + "_apex2_deapex",
tools = [
- "@make_injection//:host/linux-x86/bin/deapexer",
+ "//system/apex/tools:deapexer",
"//external/e2fsprogs/debugfs:debugfs",
],
srcs = [apex2],
outs = [name + ".apex2.txt"],
- cmd = "$(location @make_injection//:host/linux-x86/bin/deapexer) --debugfs_path=$(location //external/e2fsprogs/debugfs:debugfs) list $< > $@",
+ cmd = "$(location //system/apex/tools:deapexer) --debugfs_path=$(location //external/e2fsprogs/debugfs:debugfs) list $< > $@",
)
if expected_diff == None:
diff_test(
- name = name + "_content_diff_test",
+ name = name,
file1 = name + ".apex1.txt",
file2 = name + ".apex2.txt",
+ target_compatible_with = target_compatible_with,
)
else:
# Make our own diff to compare against the expected one
@@ -44,7 +50,8 @@ def apex_diff_test(name, apex1, apex2, expected_diff=None, **kwargs):
cmd_bash = "diff $(SRCS) > $@ || true",
)
diff_test(
- name = name + "_content_diff_test",
+ name = name,
file1 = name + ".apex1.apex2.diff.txt",
file2 = expected_diff,
+ target_compatible_with = target_compatible_with,
)
diff --git a/tests/apex/apex_package_name_test.bzl b/tests/apex/apex_package_name_test.bzl
new file mode 100644
index 00000000..4fadc547
--- /dev/null
+++ b/tests/apex/apex_package_name_test.bzl
@@ -0,0 +1,38 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+load("@bazel_skylib//rules:diff_test.bzl", "diff_test")
+
+def apex_package_name_test(name, apex, expected_package_name):
+ """Compare the actual package name of an apex using aapt2."""
+ native.genrule(
+ name = name + "_actual_package_name",
+ tools = ["//frameworks/base/tools/aapt2"],
+ srcs = [apex],
+ outs = [name + "_actual_package_name.txt"],
+ cmd = "$(location //frameworks/base/tools/aapt2) dump packagename $< > $@",
+ tags = ["manual"],
+ )
+
+ native.genrule(
+ name = name + "_expected_package_name",
+ outs = [name + "expected_package_name.txt"],
+ cmd = "echo " + expected_package_name + " > $@",
+ tags = ["manual"],
+ )
+
+ diff_test(
+ name = name,
+ file1 = name + "_actual_package_name",
+ file2 = name + "_expected_package_name",
+ )
diff --git a/tests/apex/apex_test.bzl b/tests/apex/apex_test.bzl
deleted file mode 100644
index a9181ec1..00000000
--- a/tests/apex/apex_test.bzl
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
-Copyright (C) 2022 The Android Open Source Project
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-def apex_compression_test(name, apex, compressed, **kwargs):
- """This verifies APEX or compressed APEX file:
- 1) has the correct file extension name
- 2) contains the required files specified by the APEX file format
- """
-
- native.sh_library(
- name = name + "_wrapper_sh_lib",
- data = [apex],
- )
-
- args = ["$(location " + apex + ")"]
- if compressed:
- args.append("compressed")
-
- native.sh_test(
- name = name,
- srcs = ["apex_test.sh"],
- deps = ["@bazel_tools//tools/bash/runfiles"],
- data = [
- ":" + name + "_wrapper_sh_lib",
- "@bazel_tools//tools/zip:zipper",
- apex,
- ],
- args = args,
- )
diff --git a/tests/apex/apks_binary_arches.py b/tests/apex/apks_binary_arches.py
new file mode 100644
index 00000000..6cf0f7d8
--- /dev/null
+++ b/tests/apex/apks_binary_arches.py
@@ -0,0 +1,77 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="This program takes an apks file and outputs a text file containing the " +
+ "name of all the libcutils.so files it found in the apex, and their arches.")
+ parser.add_argument('--deapexer-path', required=True)
+ parser.add_argument('--readelf-path', required=True)
+ parser.add_argument('--debugfs-path', required=True)
+ parser.add_argument('--blkid-path', required=True)
+ parser.add_argument('--fsckerofs-path', required=True)
+ parser.add_argument('apks')
+ parser.add_argument('output')
+ args = parser.parse_args()
+
+ with tempfile.TemporaryDirectory() as d:
+ with zipfile.ZipFile(args.apks) as zip:
+ zip.extractall(d)
+ result = ''
+ for name in sorted(os.listdir(os.path.join(d, 'standalones'))):
+ extractedDir = os.path.join(d, 'standalones', name+'_extracted')
+ subprocess.run([
+ args.deapexer_path,
+ '--debugfs_path',
+ args.debugfs_path,
+ '--blkid_path',
+ args.blkid_path,
+ '--fsckerofs_path',
+ args.fsckerofs_path,
+ 'extract',
+ os.path.join(d, 'standalones', name),
+ extractedDir,
+ ], check=True)
+
+ result += name + ':\n'
+ all_files = []
+ for root, _, files in os.walk(extractedDir):
+ for f in files:
+ if f == 'libcutils.so':
+ all_files.append(os.path.join(root, f))
+ all_files.sort()
+ for f in all_files:
+ readOutput = subprocess.check_output([
+ args.readelf_path,
+ '-h',
+ f,
+ ], text=True)
+ arch = [x.strip().removeprefix('Machine:').strip() for x in readOutput.split('\n') if x.strip().startswith('Machine:')]
+ if len(arch) != 1:
+ sys.exit(f"Expected 1 arch, got {arch}")
+ rel = os.path.relpath(f, extractedDir)
+ result += f' {rel}: {arch[0]}\n'
+
+ with open(args.output, 'w') as f:
+ f.write(result)
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/apex/build.bazel.examples.apex.minimal.aab b/tests/apex/build.bazel.examples.apex.minimal.aab
deleted file mode 100644
index 4edd3a20..00000000
--- a/tests/apex/build.bazel.examples.apex.minimal.aab
+++ /dev/null
Binary files differ
diff --git a/tests/apex/com.android.adbd_backing.txt.golden b/tests/apex/com.android.adbd_backing.txt.golden
new file mode 100644
index 00000000..95212975
--- /dev/null
+++ b/tests/apex/com.android.adbd_backing.txt.golden
@@ -0,0 +1 @@
+libadb_pairing_auth.so libadb_pairing_connection.so libadb_pairing_server.so libadbconnection_client.so libbase.so libc++.so libcrypto.so libcrypto_utils.so libcutils.so
diff --git a/tests/apex/com.android.adbd_installed-files.txt.golden b/tests/apex/com.android.adbd_installed-files.txt.golden
new file mode 100644
index 00000000..63d86553
--- /dev/null
+++ b/tests/apex/com.android.adbd_installed-files.txt.golden
@@ -0,0 +1,11 @@
+1803948 ./bin/adbd
+772404 ./lib/libcrypto.so
+443480 ./lib/libc++.so
+291756 ./lib/libadb_pairing_connection.so
+174552 ./lib/libbase.so
+124604 ./lib/libadb_pairing_auth.so
+124220 ./lib/libadbconnection_client.so
+60712 ./lib/libcutils.so
+36688 ./lib/libadb_pairing_server.so
+3956 ./lib/libcrypto_utils.so
+181 ./etc/init.rc
diff --git a/tests/apex/com.android.adbd_manifest.json.golden b/tests/apex/com.android.adbd_manifest.json.golden
new file mode 100644
index 00000000..8a0736da
--- /dev/null
+++ b/tests/apex/com.android.adbd_manifest.json.golden
@@ -0,0 +1,19 @@
+{
+ "name": "com.android.adbd",
+ "version": "339990000",
+ "provideNativeLibs": [
+ "libadb_pairing_auth.so",
+ "libadb_pairing_connection.so",
+ "libadb_pairing_server.so",
+ "libadbconnection_client.so"
+ ],
+ "requireNativeLibs": [
+ "libadbd_auth.so",
+ "libadbd_fs.so",
+ "libc.so",
+ "libdl.so",
+ "liblog.so",
+ "libm.so",
+ "libselinux.so"
+ ]
+}
diff --git a/tests/apex/minimal_apex_android_manifest.golden b/tests/apex/minimal_apex_android_manifest.golden
new file mode 100644
index 00000000..16a322b8
--- /dev/null
+++ b/tests/apex/minimal_apex_android_manifest.golden
@@ -0,0 +1,13 @@
+N: android=http://schemas.android.com/apk/res/android (line=2)
+ E: manifest (line=2)
+ A: http://schemas.android.com/apk/res/android:versionCode(0x0101021b)=1
+ A: http://schemas.android.com/apk/res/android:compileSdkVersion(0x01010572)=33
+ A: http://schemas.android.com/apk/res/android:compileSdkVersionCodename(0x01010573)="UpsideDownCake" (Raw: "UpsideDownCake")
+ A: package="build.bazel.examples.apex.minimal" (Raw: "build.bazel.examples.apex.minimal")
+ A: platformBuildVersionCode=33
+ A: platformBuildVersionName="UpsideDownCake" (Raw: "UpsideDownCake")
+ E: uses-sdk (line=0)
+ A: http://schemas.android.com/apk/res/android:minSdkVersion(0x0101020c)=30
+ A: http://schemas.android.com/apk/res/android:targetSdkVersion(0x01010270)=10000
+ E: application (line=5)
+ A: http://schemas.android.com/apk/res/android:hasCode(0x0101000c)=false
diff --git a/tests/apex/minimal_apex_using.txt.golden b/tests/apex/minimal_apex_using.txt.golden
new file mode 100644
index 00000000..c3a1d94d
--- /dev/null
+++ b/tests/apex/minimal_apex_using.txt.golden
@@ -0,0 +1,151 @@
+
+_Unwind_DeleteException@LIBC_R
+_Unwind_GetIP@LIBC_R
+_Unwind_GetLanguageSpecificData@LIBC_R
+_Unwind_GetRegionStart@LIBC_R
+_Unwind_RaiseException@LIBC_R
+_Unwind_Resume@LIBC_R
+_Unwind_SetGR@LIBC_R
+_Unwind_SetIP@LIBC_R
+__ctype_get_mb_cur_max@LIBC
+__cxa_atexit@LIBC
+__cxa_finalize@LIBC
+__cxa_finalize@LIBC
+__cxa_finalize@LIBC
+__cxa_finalize@LIBC
+__cxa_thread_atexit_impl@LIBC
+__errno@LIBC
+__fwrite_chk@LIBC_N
+__gnu_strerror_r@LIBC
+__libc_init@LIBC
+__open_2@LIBC
+__read_chk@LIBC
+__stack_chk_fail@LIBC
+__stack_chk_fail@LIBC
+__stack_chk_fail@LIBC
+__stack_chk_fail@LIBC
+__stack_chk_fail@LIBC
+__strlen_chk@LIBC
+__system_property_get@LIBC
+__vsnprintf_chk@LIBC
+abort@LIBC
+android_set_abort_message@LIBC
+btowc@LIBC
+calloc@LIBC
+clock_gettime@LIBC
+close@LIBC
+closelog@LIBC
+fflush@LIBC
+fprintf@LIBC
+fputc@LIBC
+free@LIBC
+freelocale@LIBC
+fwrite@LIBC
+getauxval@LIBC
+getc@LIBC
+iswalpha@LIBC
+iswblank@LIBC
+iswcntrl@LIBC
+iswdigit@LIBC
+iswlower@LIBC
+iswprint@LIBC
+iswpunct@LIBC
+iswspace@LIBC
+iswupper@LIBC
+iswxdigit@LIBC
+localeconv@LIBC
+malloc@LIBC
+mbrlen@LIBC
+mbrtowc@LIBC
+mbsnrtowcs@LIBC
+mbsrtowcs@LIBC
+mbtowc@LIBC
+memchr@LIBC
+memcmp@LIBC
+memcpy@LIBC
+memmove@LIBC
+memset@LIBC
+memset@LIBC
+memset@LIBC
+memset@LIBC
+memset@LIBC
+nanosleep@LIBC
+newlocale@LIBC
+openlog@LIBC
+posix_memalign@LIBC
+pthread_cond_broadcast@LIBC
+pthread_cond_destroy@LIBC
+pthread_cond_signal@LIBC
+pthread_cond_timedwait@LIBC
+pthread_cond_wait@LIBC
+pthread_detach@LIBC
+pthread_equal@LIBC
+pthread_getspecific@LIBC
+pthread_join@LIBC
+pthread_key_create@LIBC
+pthread_mutex_destroy@LIBC
+pthread_mutex_init@LIBC
+pthread_mutex_lock@LIBC
+pthread_mutex_trylock@LIBC
+pthread_mutex_unlock@LIBC
+pthread_mutexattr_destroy@LIBC
+pthread_mutexattr_init@LIBC
+pthread_mutexattr_settype@LIBC
+pthread_once@LIBC
+pthread_self@LIBC
+pthread_setspecific@LIBC
+realloc@LIBC
+sched_yield@LIBC
+setlocale@LIBC
+snprintf@LIBC
+sscanf@LIBC
+strcmp@LIBC
+strcoll@LIBC
+strftime@LIBC
+strlen@LIBC
+strlen@LIBC
+strlen@LIBC
+strlen@LIBC
+strncmp@LIBC
+strtod@LIBC
+strtod_l@LIBC_O
+strtof@LIBC
+strtof_l@LIBC_O
+strtol@LIBC
+strtold@LIBC
+strtold_l@LIBC
+strtoll@LIBC
+strtoll_l@LIBC
+strtoul@LIBC
+strtoull@LIBC
+strtoull_l@LIBC
+strxfrm@LIBC
+swprintf@LIBC
+sysconf@LIBC
+syslog@LIBC
+towlower@LIBC
+towupper@LIBC
+ungetc@LIBC
+uselocale@LIBC
+vasprintf@LIBC
+vfprintf@LIBC
+vsnprintf@LIBC
+vsscanf@LIBC
+wcrtomb@LIBC
+wcscoll@LIBC
+wcslen@LIBC
+wcsnrtombs@LIBC
+wcstod@LIBC
+wcstof@LIBC
+wcstol@LIBC
+wcstold@LIBC
+wcstoll@LIBC
+wcstoul@LIBC
+wcstoull@LIBC
+wcsxfrm@LIBC
+wctob@LIBC
+wmemchr@LIBC
+wmemcmp@LIBC
+wmemcpy@LIBC
+wmemmove@LIBC
+wmemset@LIBC
diff --git a/tests/apex/minimal_apex_using.xml.golden b/tests/apex/minimal_apex_using.xml.golden
new file mode 100644
index 00000000..24788eb4
--- /dev/null
+++ b/tests/apex/minimal_apex_using.xml.golden
@@ -0,0 +1,2 @@
+</externals>
+<externals>
diff --git a/tests/bionic/BUILD b/tests/bionic/BUILD
index 8c6e3236..3de64ff4 100644
--- a/tests/bionic/BUILD
+++ b/tests/bionic/BUILD
@@ -1,4 +1,4 @@
-load("@soong_injection//cc_toolchain:constants.bzl", "constants")
+load("@soong_injection//cc_toolchain:config_constants.bzl", "constants")
# This test requires bp2build to run and the generated BUILD files in the source tree.
sh_test(
@@ -13,7 +13,7 @@ sh_test(
"//bionic/linker:ld-android_bp2build_cc_library_static",
"//prebuilts/clang/host/linux-x86:test_tools",
],
- env = {"CLANG_DEFAULT_VERSION": constants.CLANG_DEFAULT_VERSION},
+ env = {"ClangVersion": constants.ClangVersion},
deps = ["@bazel_tools//tools/bash/runfiles"],
)
@@ -24,6 +24,6 @@ sh_test(
"//bionic/libc",
"//bionic/libc:libc_unstripped",
],
- env = {"CLANG_DEFAULT_VERSION": constants.CLANG_DEFAULT_VERSION},
+ env = {"ClangVersion": constants.ClangVersion},
deps = ["@bazel_tools//tools/bash/runfiles"],
)
diff --git a/tests/bionic/verify_bionic_outputs.sh b/tests/bionic/verify_bionic_outputs.sh
index 0001ff56..dec1e18e 100755
--- a/tests/bionic/verify_bionic_outputs.sh
+++ b/tests/bionic/verify_bionic_outputs.sh
@@ -18,8 +18,8 @@ set -euo pipefail
source "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash"
-READELF="$(rlocation __main__/prebuilts/clang/host/linux-x86/${CLANG_DEFAULT_VERSION}/bin/llvm-readelf)"
-NM="$(rlocation __main__/prebuilts/clang/host/linux-x86/${CLANG_DEFAULT_VERSION}/bin/llvm-nm)"
+READELF="$(rlocation __main__/prebuilts/clang/host/linux-x86/${ClangVersion}/bin/llvm-readelf)"
+NM="$(rlocation __main__/prebuilts/clang/host/linux-x86/${ClangVersion}/bin/llvm-nm)"
# This should be abstracted to a unit-test library when it has more uses.
function assert_contains_regex() {
diff --git a/tests/partitions/BUILD b/tests/partitions/BUILD
new file mode 100644
index 00000000..fbb4dcf1
--- /dev/null
+++ b/tests/partitions/BUILD
@@ -0,0 +1,9 @@
+load(":image_contents_test.bzl", "image_contents_test")
+
+image_contents_test(
+ name = "image_contains_apex_test",
+ expected = "//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal",
+ image = "//build/bazel/examples/partitions:system_image",
+ path = "/system/apex/build.bazel.examples.apex.minimal.apex",
+ tags = ["manual"], # b/234509030
+)
diff --git a/tests/partitions/image_contents_test.bzl b/tests/partitions/image_contents_test.bzl
new file mode 100644
index 00000000..06cf868f
--- /dev/null
+++ b/tests/partitions/image_contents_test.bzl
@@ -0,0 +1,31 @@
+load("@bazel_skylib//rules:diff_test.bzl", "diff_test")
+
+def image_contents_test(
+ name,
+ image,
+ path,
+ expected,
+ target_compatible_with = None,
+ tags = []):
+ """A test that extracts a file from a disk image file, and then asserts that it's identical to some other file."""
+
+ extracted_path = name + path.replace("/", "_") + "_extracted.bin"
+
+ native.genrule(
+ name = name + "_extracted",
+ tools = [
+ "//external/e2fsprogs/debugfs:debugfs",
+ ],
+ srcs = [image],
+ outs = [extracted_path],
+ cmd = "$(location //external/e2fsprogs/debugfs:debugfs) -R 'dump " + path + " $@' $<",
+ tags = ["manual"],
+ )
+
+ diff_test(
+ name = name,
+ file1 = extracted_path,
+ file2 = expected,
+ target_compatible_with = target_compatible_with,
+ tags = tags,
+ )
diff --git a/tests/products/BUILD b/tests/products/BUILD
new file mode 100644
index 00000000..22dd0163
--- /dev/null
+++ b/tests/products/BUILD
@@ -0,0 +1,88 @@
+load("@//build/bazel/product_config:android_product.bzl", "android_product")
+load("@//build/bazel/tests/products:aosp_arm.variables.bzl", _soong_variables_arm = "variables")
+load("@//build/bazel/tests/products:aosp_arm64.variables.bzl", _soong_variables_arm64 = "variables")
+load("@//build/bazel/tests/products:aosp_x86.variables.bzl", _soong_variables_x86 = "variables")
+load("@//build/bazel/tests/products:aosp_x86_64.variables.bzl", _soong_variables_x86_64 = "variables")
+load("@bazel_skylib//lib:dicts.bzl", "dicts")
+
+package(default_visibility = [
+ "@//build/bazel/product_config:__subpackages__",
+])
+
+# This package contains pregenerated soong.variables files for the aosp_<arch> products, used to
+# make platforms for testing. This is an optimization, we could generate these directly from source
+# at build time but it would add time to every `m nothing`. Converting the product config makefiles
+# to starlark and checking them in would also solve this performance issue.
+#
+# This is also where we can define platforms that have set product config variables to certain
+# values for testing. Unfortunately we cannot just transition on a single product config variable
+# due to limitations in bazel.
+
+android_product(
+ name = "aosp_arm_for_testing",
+ soong_variables = _soong_variables_arm,
+)
+
+android_product(
+ name = "aosp_arm_for_testing_custom_linker_alignment",
+ soong_variables = dicts.add(
+ _soong_variables_arm,
+ {"DeviceMaxPageSizeSupported": "65536"},
+ ),
+)
+
+android_product(
+ name = "aosp_arm64_for_testing",
+ soong_variables = _soong_variables_arm64,
+)
+
+android_product(
+ name = "aosp_arm64_for_testing_no_compression",
+ soong_variables = dicts.add(
+ _soong_variables_arm64,
+ {"CompressedApex": False},
+ ),
+)
+
+android_product(
+ name = "aosp_arm64_for_testing_unbundled_build",
+ soong_variables = dicts.add(
+ _soong_variables_arm64,
+ {"Unbundled_build": True},
+ ),
+)
+
+android_product(
+ name = "aosp_arm64_for_testing_with_overrides_and_app_cert",
+ soong_variables = dicts.add(
+ _soong_variables_arm64,
+ {
+ "ManifestPackageNameOverrides": [
+ "apex_certificate_label_with_overrides:another",
+ "package_name_override_from_config:another.package",
+ ],
+ "CertificateOverrides": [
+ "apex_certificate_label_with_overrides:another.certificate",
+ ],
+ "DefaultAppCertificate": "build/bazel/rules/apex/testdata/devkey",
+ },
+ ),
+)
+
+android_product(
+ name = "aosp_arm64_for_testing_custom_linker_alignment",
+ soong_variables = dicts.add(
+ _soong_variables_arm64,
+ {"DeviceMaxPageSizeSupported": "16384"},
+ ),
+)
+
+android_product(
+ name = "aosp_x86_for_testing",
+ soong_variables = _soong_variables_x86,
+)
+
+android_product(
+ name = "aosp_x86_64_for_testing",
+ soong_variables = _soong_variables_x86_64,
+)
diff --git a/tests/products/aosp_arm.variables.bzl b/tests/products/aosp_arm.variables.bzl
new file mode 100644
index 00000000..75d59e75
--- /dev/null
+++ b/tests/products/aosp_arm.variables.bzl
@@ -0,0 +1,242 @@
+variables = json.decode('''{
+ "Make_suffix": "-aosp_arm",
+ "BuildId": "AOSP.MASTER",
+ "BuildNumberFile": "build_number.txt",
+ "Platform_version_name": "UpsideDownCake",
+ "Platform_sdk_version": 33,
+ "Platform_sdk_codename": "UpsideDownCake",
+ "Platform_sdk_final": false,
+ "Platform_sdk_extension_version": 3,
+ "Platform_base_sdk_extension_version": 3,
+ "Platform_version_active_codenames": ["UpsideDownCake"],
+ "Platform_security_patch": "2023-03-05",
+ "Platform_preview_sdk_version": "1",
+ "Platform_base_os": "",
+ "Platform_version_last_stable": "13",
+ "Platform_version_known_codenames": "Base,Base11,Cupcake,Donut,Eclair,Eclair01,EclairMr1,Froyo,Gingerbread,GingerbreadMr1,Honeycomb,HoneycombMr1,HoneycombMr2,IceCreamSandwich,IceCreamSandwichMr1,JellyBean,JellyBeanMr1,JellyBeanMr2,Kitkat,KitkatWatch,Lollipop,LollipopMr1,M,N,NMr1,O,OMr1,P,Q,R,S,Sv2,Tiramisu,UpsideDownCake",
+ "Platform_min_supported_target_sdk_version": "23",
+ "Allow_missing_dependencies": false,
+ "Unbundled_build": false,
+ "Unbundled_build_apps": [],
+ "Unbundled_build_image": false,
+ "Always_use_prebuilt_sdks": false,
+ "Debuggable": true,
+ "Eng": false,
+ "DeviceName": "generic",
+ "DeviceProduct": "aosp_arm",
+ "DeviceArch": "arm",
+ "DeviceArchVariant": "armv7-a-neon",
+ "DeviceCpuVariant": "generic",
+ "DeviceAbi": ["armeabi-v7a","armeabi"],
+ "DeviceSecondaryArch": "",
+ "DeviceSecondaryArchVariant": "",
+ "DeviceSecondaryCpuVariant": "",
+ "DeviceSecondaryAbi": [],
+ "DeviceMaxPageSizeSupported": "4096",
+ "Aml_abis": false,
+ "Ndk_abis": false,
+ "NativeBridgeArch": "",
+ "NativeBridgeArchVariant": "",
+ "NativeBridgeCpuVariant": "",
+ "NativeBridgeAbi": [],
+ "NativeBridgeRelativePath": "",
+ "NativeBridgeSecondaryArch": "",
+ "NativeBridgeSecondaryArchVariant": "",
+ "NativeBridgeSecondaryCpuVariant": "",
+ "NativeBridgeSecondaryAbi": [],
+ "NativeBridgeSecondaryRelativePath": "",
+ "HostArch": "x86_64",
+ "HostSecondaryArch": "x86",
+ "HostStaticBinaries": false,
+ "HostMusl": false,
+ "CrossHost": "windows",
+ "CrossHostArch": "x86",
+ "CrossHostSecondaryArch": "x86_64",
+ "DeviceResourceOverlays": ["device/generic/goldfish/overlay"],
+ "ProductResourceOverlays": [],
+ "EnforceRROTargets": ["*"],
+ "EnforceRROExcludedOverlays": [],
+ "AAPTCharacteristics": "emulator",
+ "AAPTConfig": ["en_US,af_ZA,am_ET,ar_EG,ar_XB,as_IN,az_AZ,be_BY,bg_BG,bn_BD,bs_BA,ca_ES,cs_CZ,da_DK,de_DE,el_GR,en_AU,en_CA,en_GB,en_IN,en_XA,es_ES,es_US,et_EE,eu_ES,fa_IR,fi_FI,fr_CA,fr_FR,gl_ES,gu_IN,hi_IN,hr_HR,hu_HU,hy_AM,in_ID,is_IS,it_IT,iw_IL,ja_JP,ka_GE,kk_KZ,km_KH,kn_IN,ko_KR,ky_KG,lo_LA,lt_LT,lv_LV,mk_MK,ml_IN,mn_MN,mr_IN,ms_MY,my_MM,nb_NO,ne_NP,nl_NL,or_IN,pa_IN,pl_PL,pt_BR,pt_PT,ro_RO,ru_RU,si_LK,sk_SK,sl_SI,sq_AL,sr_Latn_RS,sr_RS,sv_SE,sw_TZ,ta_IN,te_IN,th_TH,tl_PH,tr_TR,uk_UA,ur_PK,uz_UZ,vi_VN,zh_CN,zh_HK,zh_TW,zu_ZA,"],
+ "AAPTPreferredConfig": "",
+ "AAPTPrebuiltDPI": [],
+ "DefaultAppCertificate": "",
+ "MainlineSepolicyDevCertificates": "build/make/target/product/security/",
+ "AppsDefaultVersionName": "UpsideDownCake",
+ "SanitizeHost": [],
+ "SanitizeDevice": [],
+ "SanitizeDeviceDiag": [],
+ "SanitizeDeviceArch": [],
+ "Safestack": false,
+ "EnableCFI": true,
+ "CFIExcludePaths": [],
+ "CFIIncludePaths": ["device/generic/goldfish/wifi/wpa_supplicant_8_lib","device/google/cuttlefish/guest/libs/wpa_supplicant_8_lib","external/tinyxml2","external/wpa_supplicant_8","frameworks/av/camera","frameworks/av/media","frameworks/av/services","frameworks/minikin","hardware/broadcom/wlan/bcmdhd/wpa_supplicant_8_lib","hardware/synaptics/wlan/synadhd/wpa_supplicant_8_lib","hardware/interfaces/nfc","hardware/qcom/wlan/legacy/qcwcn/wpa_supplicant_8_lib","hardware/qcom/wlan/wcn6740/qcwcn/wpa_supplicant_8_lib","hardware/interfaces/keymaster","hardware/interfaces/security","packages/modules/Bluetooth/system","system/chre","system/core/libnetutils","system/libziparchive","system/gatekeeper","system/keymaster","system/nfc","system/security"],
+ "IntegerOverflowExcludePaths": [],
+ "MemtagHeapExcludePaths": [],
+ "MemtagHeapAsyncIncludePaths": [],
+ "MemtagHeapSyncIncludePaths": [],
+ "DisableScudo": false,
+ "ClangTidy": false,
+ "TidyChecks": "",
+ "JavaCoveragePaths": [],
+ "JavaCoverageExcludePaths": [],
+ "GcovCoverage": false,
+ "ClangCoverage": false,
+ "ClangCoverageContinuousMode": false,
+ "NativeCoveragePaths": [],
+ "NativeCoverageExcludePaths": [],
+ "SamplingPGO": false,
+ "ArtUseReadBarrier": true,
+ "Binder32bit": false,
+ "BtConfigIncludeDir": "",
+ "DeviceKernelHeaders": [],
+ "DeviceVndkVersion": "current",
+ "Platform_vndk_version": "UpsideDownCake",
+ "ProductVndkVersion": "current",
+ "ExtraVndkVersions": ["29","30","31","32","33"],
+ "DeviceSystemSdkVersions": ["UpsideDownCake"],
+ "RecoverySnapshotVersion": "",
+ "Platform_systemsdk_versions": ["28","29","30","31","32","33","UpsideDownCake"],
+ "Malloc_not_svelte": true,
+ "Malloc_zero_contents": true,
+ "Malloc_pattern_fill_contents": false,
+ "Override_rs_driver": "",
+ "UncompressPrivAppDex": true,
+ "ModulesLoadedByPrivilegedModules": [],
+ "BootJars": ["com.android.art:core-oj","com.android.art:core-libart","com.android.art:okhttp","com.android.art:bouncycastle","com.android.art:apache-xml","platform:framework-minus-apex","platform:framework-graphics","platform:ext","platform:telephony-common","platform:voip-common","platform:ims-common","com.android.i18n:core-icu4j"],
+ "ApexBootJars": ["com.android.adservices:framework-adservices","com.android.adservices:framework-sdksandbox","com.android.appsearch:framework-appsearch","com.android.btservices:framework-bluetooth","com.android.conscrypt:conscrypt","com.android.ipsec:android.net.ipsec.ike","com.android.media:updatable-media","com.android.mediaprovider:framework-mediaprovider","com.android.ondevicepersonalization:framework-ondevicepersonalization","com.android.os.statsd:framework-statsd","com.android.permission:framework-permission","com.android.permission:framework-permission-s","com.android.scheduling:framework-scheduling","com.android.sdkext:framework-sdkextensions","com.android.tethering:framework-connectivity","com.android.tethering:framework-connectivity-t","com.android.tethering:framework-tethering","com.android.uwb:framework-uwb","com.android.virt:framework-virtualization","com.android.wifi:framework-wifi"],
+ "VndkUseCoreVariant": false,
+ "VndkSnapshotBuildArtifacts": false,
+ "DirectedVendorSnapshot": false,
+ "VendorSnapshotModules": {
+ },
+ "DirectedRecoverySnapshot": false,
+ "RecoverySnapshotModules": {
+ },
+ "VendorSnapshotDirsIncluded": [],
+ "VendorSnapshotDirsExcluded": [],
+ "RecoverySnapshotDirsIncluded": [],
+ "RecoverySnapshotDirsExcluded": [],
+ "HostFakeSnapshotEnabled": false,
+ "MultitreeUpdateMeta": false,
+ "Treble_linker_namespaces": true,
+ "Enforce_vintf_manifest": true,
+ "Uml": false,
+ "VendorPath": "vendor",
+ "OdmPath": "vendor/odm",
+ "VendorDlkmPath": "vendor/vendor_dlkm",
+ "OdmDlkmPath": "vendor/odm_dlkm",
+ "SystemDlkmPath": "system_dlkm",
+ "ProductPath": "system/product",
+ "SystemExtPath": "system/system_ext",
+ "MinimizeJavaDebugInfo": false,
+ "UseGoma": false,
+ "UseRBE": false,
+ "UseRBEJAVAC": false,
+ "UseRBER8": false,
+ "UseRBED8": false,
+ "Arc": false,
+ "NamespacesToExport": ["device/generic/goldfish","device/generic/goldfish-opengl","hardware/google/camera","hardware/google/camera/devices/EmulatedCamera","device/generic/goldfish","device/generic/goldfish-opengl"],
+ "PgoAdditionalProfileDirs": [],
+ "BoardPlatVendorPolicy": [],
+ "BoardReqdMaskPolicy": [],
+ "BoardSystemExtPublicPrebuiltDirs": [],
+ "BoardSystemExtPrivatePrebuiltDirs": [],
+ "BoardProductPublicPrebuiltDirs": [],
+ "BoardProductPrivatePrebuiltDirs": [],
+ "BoardVendorSepolicyDirs": ["system/bt/vendor_libs/linux/sepolicy"],
+ "BoardOdmSepolicyDirs": [],
+ "BoardVendorDlkmSepolicyDirs": [],
+ "BoardOdmDlkmSepolicyDirs": [],
+ "BoardSystemDlkmSepolicyDirs": [],
+ "SystemExtPublicSepolicyDirs": [],
+ "SystemExtPrivateSepolicyDirs": [],
+ "BoardSepolicyM4Defs": [],
+ "BoardSepolicyVers": "10000.0",
+ "SystemExtSepolicyPrebuiltApiDir": "",
+ "ProductSepolicyPrebuiltApiDir": "",
+ "PlatformSepolicyVersion": "10000.0",
+ "TotSepolicyVersion": "10000.0",
+ "PlatformSepolicyCompatVersions": ["28.0","29.0","30.0","31.0","32.0","33.0"],
+ "Flatten_apex": false,
+ "ForceApexSymlinkOptimization": false,
+ "DexpreoptGlobalConfig": "out/soong/dexpreopt.config",
+ "WithDexpreopt": true,
+ "ManifestPackageNameOverrides": [],
+ "PackageNameOverrides": [],
+ "CertificateOverrides": [],
+ "ApexGlobalMinSdkVersionOverride": "",
+ "EnforceSystemCertificate": false,
+ "EnforceSystemCertificateAllowList": [],
+ "ProductHiddenAPIStubs": [],
+ "ProductHiddenAPIStubsSystem": [],
+ "ProductHiddenAPIStubsTest": [],
+ "ProductPublicSepolicyDirs": [],
+ "ProductPrivateSepolicyDirs": [],
+ "TargetFSConfigGen": [],
+ "MissingUsesLibraries": ["com.google.android.ble","com.google.android.media.effects","com.google.android.wearable"],
+ "VendorVars": {
+ "ANDROID": {
+ "BOARD_USES_ODMIMAGE": "",
+ "BOARD_USES_RECOVERY_AS_BOOT": "",
+ "IS_TARGET_MIXED_SEPOLICY": "",
+ "PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT": "",
+ "SYSTEMUI_OPTIMIZE_JAVA": "true",
+ "SYSTEMUI_USE_COMPOSE": "false",
+ "SYSTEM_OPTIMIZE_JAVA": "true",
+ "TARGET_DYNAMIC_64_32_MEDIASERVER": "",
+ "TARGET_ENABLE_MEDIADRM_64": "true",
+ "include_nonpublic_framework_api": "false",
+ "module_build_from_source": "true"
+ },
+ "art_module": {
+ "source_build": "true"
+ },
+ "bluetooth_module": {
+ "source_build": "true"
+ },
+ "permission_module": {
+ "source_build": "true"
+ },
+ "rkpd_module": {
+ "source_build": "true"
+ },
+ "uwb_module": {
+ "source_build": "true"
+ },
+ "wifi_module": {
+ "source_build": "true"
+ }
+ },
+ "EnforceProductPartitionInterface": true,
+ "DeviceCurrentApiLevelForVendorModules": "current",
+ "EnforceInterPartitionJavaSdkLibrary": false,
+ "InterPartitionJavaLibraryAllowList": [],
+ "InstallExtraFlattenedApexes": true,
+ "CompressedApex": true,
+ "TrimmedApex": false,
+ "BoardUsesRecoveryAsBoot": false,
+ "BoardKernelBinaries": [],
+ "BoardKernelModuleInterfaceVersions": [],
+ "BoardMoveRecoveryResourcesToVendorBoot": false,
+ "PrebuiltHiddenApiDir": "",
+ "ShippingApiLevel": "31",
+ "BuildBrokenClangProperty": false,
+ "BuildBrokenClangAsFlags": false,
+ "BuildBrokenClangCFlags": false,
+ "BuildBrokenDepfile": false,
+ "BuildBrokenEnforceSyspropOwner": false,
+ "BuildBrokenTrebleSyspropNeverallow": false,
+ "BuildBrokenUsesSoongPython2Modules": false,
+ "BuildBrokenVendorPropertyNamespace": false,
+ "BuildBrokenInputDirModules": [],
+ "BuildDebugfsRestrictionsEnabled": false,
+ "RequiresInsecureExecmemForSwiftshader": false,
+ "SelinuxIgnoreNeverallows": false,
+ "SepolicySplit": true,
+ "SepolicyFreezeTestExtraDirs": [],
+ "SepolicyFreezeTestExtraPrebuiltDirs": [],
+ "GenerateAidlNdkPlatformBackend": false,
+ "IgnorePrefer32OnDevice": false,
+ "IncludeTags": []
+}''')
diff --git a/tests/products/aosp_arm64.variables.bzl b/tests/products/aosp_arm64.variables.bzl
new file mode 100644
index 00000000..3b2e97a0
--- /dev/null
+++ b/tests/products/aosp_arm64.variables.bzl
@@ -0,0 +1,243 @@
+variables = json.decode('''{
+ "Make_suffix": "-aosp_arm64",
+ "BuildId": "AOSP.MASTER",
+ "BuildNumberFile": "build_number.txt",
+ "Platform_version_name": "UpsideDownCake",
+ "Platform_sdk_version": 33,
+ "Platform_sdk_codename": "UpsideDownCake",
+ "Platform_sdk_final": false,
+ "Platform_sdk_extension_version": 3,
+ "Platform_base_sdk_extension_version": 3,
+ "Platform_version_active_codenames": ["UpsideDownCake"],
+ "Platform_security_patch": "2023-03-05",
+ "Platform_preview_sdk_version": "1",
+ "Platform_base_os": "",
+ "Platform_version_last_stable": "13",
+ "Platform_version_known_codenames": "Base,Base11,Cupcake,Donut,Eclair,Eclair01,EclairMr1,Froyo,Gingerbread,GingerbreadMr1,Honeycomb,HoneycombMr1,HoneycombMr2,IceCreamSandwich,IceCreamSandwichMr1,JellyBean,JellyBeanMr1,JellyBeanMr2,Kitkat,KitkatWatch,Lollipop,LollipopMr1,M,N,NMr1,O,OMr1,P,Q,R,S,Sv2,Tiramisu,UpsideDownCake",
+ "Platform_min_supported_target_sdk_version": "23",
+ "Allow_missing_dependencies": false,
+ "Unbundled_build": false,
+ "Unbundled_build_apps": [],
+ "Unbundled_build_image": false,
+ "Always_use_prebuilt_sdks": false,
+ "Debuggable": true,
+ "Eng": false,
+ "DeviceName": "generic_arm64",
+ "DeviceProduct": "aosp_arm64",
+ "DeviceArch": "arm64",
+ "DeviceArchVariant": "armv8-a",
+ "DeviceCpuVariant": "generic",
+ "DeviceAbi": ["arm64-v8a"],
+ "DeviceSecondaryArch": "arm",
+ "DeviceSecondaryArchVariant": "armv8-a",
+ "DeviceSecondaryCpuVariant": "generic",
+ "DeviceSecondaryAbi": ["armeabi-v7a","armeabi"],
+ "DeviceMaxPageSizeSupported": "4096",
+ "Aml_abis": false,
+ "Ndk_abis": false,
+ "NativeBridgeArch": "",
+ "NativeBridgeArchVariant": "",
+ "NativeBridgeCpuVariant": "",
+ "NativeBridgeAbi": [],
+ "NativeBridgeRelativePath": "",
+ "NativeBridgeSecondaryArch": "",
+ "NativeBridgeSecondaryArchVariant": "",
+ "NativeBridgeSecondaryCpuVariant": "",
+ "NativeBridgeSecondaryAbi": [],
+ "NativeBridgeSecondaryRelativePath": "",
+ "HostArch": "x86_64",
+ "HostSecondaryArch": "x86",
+ "HostStaticBinaries": false,
+ "HostMusl": false,
+ "CrossHost": "windows",
+ "CrossHostArch": "x86",
+ "CrossHostSecondaryArch": "x86_64",
+ "DeviceResourceOverlays": ["device/generic/goldfish/overlay"],
+ "ProductResourceOverlays": [],
+ "EnforceRROTargets": ["*"],
+ "EnforceRROExcludedOverlays": [],
+ "AAPTCharacteristics": "emulator",
+ "AAPTConfig": ["en_US,af_ZA,am_ET,ar_EG,ar_XB,as_IN,az_AZ,be_BY,bg_BG,bn_BD,bs_BA,ca_ES,cs_CZ,da_DK,de_DE,el_GR,en_AU,en_CA,en_GB,en_IN,en_XA,es_ES,es_US,et_EE,eu_ES,fa_IR,fi_FI,fr_CA,fr_FR,gl_ES,gu_IN,hi_IN,hr_HR,hu_HU,hy_AM,in_ID,is_IS,it_IT,iw_IL,ja_JP,ka_GE,kk_KZ,km_KH,kn_IN,ko_KR,ky_KG,lo_LA,lt_LT,lv_LV,mk_MK,ml_IN,mn_MN,mr_IN,ms_MY,my_MM,nb_NO,ne_NP,nl_NL,or_IN,pa_IN,pl_PL,pt_BR,pt_PT,ro_RO,ru_RU,si_LK,sk_SK,sl_SI,sq_AL,sr_Latn_RS,sr_RS,sv_SE,sw_TZ,ta_IN,te_IN,th_TH,tl_PH,tr_TR,uk_UA,ur_PK,uz_UZ,vi_VN,zh_CN,zh_HK,zh_TW,zu_ZA,"],
+ "AAPTPreferredConfig": "",
+ "AAPTPrebuiltDPI": [],
+ "DefaultAppCertificate": "",
+ "MainlineSepolicyDevCertificates": "build/make/target/product/security/",
+ "AppsDefaultVersionName": "UpsideDownCake",
+ "SanitizeHost": [],
+ "SanitizeDevice": [],
+ "SanitizeDeviceDiag": [],
+ "SanitizeDeviceArch": [],
+ "Safestack": false,
+ "EnableCFI": true,
+ "CFIExcludePaths": [],
+ "CFIIncludePaths": ["device/generic/goldfish/wifi/wpa_supplicant_8_lib","device/google/cuttlefish/guest/libs/wpa_supplicant_8_lib","external/tinyxml2","external/wpa_supplicant_8","frameworks/av/camera","frameworks/av/media","frameworks/av/services","frameworks/minikin","hardware/broadcom/wlan/bcmdhd/wpa_supplicant_8_lib","hardware/synaptics/wlan/synadhd/wpa_supplicant_8_lib","hardware/interfaces/nfc","hardware/qcom/wlan/legacy/qcwcn/wpa_supplicant_8_lib","hardware/qcom/wlan/wcn6740/qcwcn/wpa_supplicant_8_lib","hardware/interfaces/keymaster","hardware/interfaces/security","packages/modules/Bluetooth/system","system/chre","system/core/libnetutils","system/libziparchive","system/gatekeeper","system/keymaster","system/nfc","system/security"],
+ "IntegerOverflowExcludePaths": [],
+ "MemtagHeapExcludePaths": [],
+ "MemtagHeapAsyncIncludePaths": [],
+ "MemtagHeapSyncIncludePaths": [],
+ "DisableScudo": false,
+ "ClangTidy": false,
+ "TidyChecks": "",
+ "JavaCoveragePaths": [],
+ "JavaCoverageExcludePaths": [],
+ "GcovCoverage": false,
+ "ClangCoverage": false,
+ "ClangCoverageContinuousMode": false,
+ "NativeCoveragePaths": [],
+ "NativeCoverageExcludePaths": [],
+ "SamplingPGO": false,
+ "ArtUseReadBarrier": true,
+ "Binder32bit": false,
+ "BtConfigIncludeDir": "",
+ "DeviceKernelHeaders": [],
+ "DeviceVndkVersion": "current",
+ "Platform_vndk_version": "UpsideDownCake",
+ "ProductVndkVersion": "current",
+ "ExtraVndkVersions": ["29","30","31","32","33"],
+ "DeviceSystemSdkVersions": ["UpsideDownCake"],
+ "RecoverySnapshotVersion": "",
+ "Platform_systemsdk_versions": ["28","29","30","31","32","33","UpsideDownCake"],
+ "Malloc_not_svelte": true,
+ "Malloc_zero_contents": true,
+ "Malloc_pattern_fill_contents": false,
+ "Override_rs_driver": "",
+ "UncompressPrivAppDex": true,
+ "ModulesLoadedByPrivilegedModules": [],
+ "BootJars": ["com.android.art:core-oj","com.android.art:core-libart","com.android.art:okhttp","com.android.art:bouncycastle","com.android.art:apache-xml","platform:framework-minus-apex","platform:framework-graphics","platform:ext","platform:telephony-common","platform:voip-common","platform:ims-common","com.android.i18n:core-icu4j"],
+ "ApexBootJars": ["com.android.adservices:framework-adservices","com.android.adservices:framework-sdksandbox","com.android.appsearch:framework-appsearch","com.android.btservices:framework-bluetooth","com.android.conscrypt:conscrypt","com.android.ipsec:android.net.ipsec.ike","com.android.media:updatable-media","com.android.mediaprovider:framework-mediaprovider","com.android.ondevicepersonalization:framework-ondevicepersonalization","com.android.os.statsd:framework-statsd","com.android.permission:framework-permission","com.android.permission:framework-permission-s","com.android.scheduling:framework-scheduling","com.android.sdkext:framework-sdkextensions","com.android.tethering:framework-connectivity","com.android.tethering:framework-connectivity-t","com.android.tethering:framework-tethering","com.android.uwb:framework-uwb","com.android.virt:framework-virtualization","com.android.wifi:framework-wifi"],
+ "VndkUseCoreVariant": false,
+ "VndkSnapshotBuildArtifacts": false,
+ "DirectedVendorSnapshot": false,
+ "VendorSnapshotModules": {
+ },
+ "DirectedRecoverySnapshot": false,
+ "RecoverySnapshotModules": {
+ },
+ "VendorSnapshotDirsIncluded": [],
+ "VendorSnapshotDirsExcluded": [],
+ "RecoverySnapshotDirsIncluded": [],
+ "RecoverySnapshotDirsExcluded": [],
+ "HostFakeSnapshotEnabled": false,
+ "MultitreeUpdateMeta": false,
+ "Treble_linker_namespaces": true,
+ "Enforce_vintf_manifest": true,
+ "Uml": false,
+ "VendorPath": "vendor",
+ "OdmPath": "vendor/odm",
+ "VendorDlkmPath": "vendor/vendor_dlkm",
+ "OdmDlkmPath": "vendor/odm_dlkm",
+ "SystemDlkmPath": "system_dlkm",
+ "ProductPath": "system/product",
+ "SystemExtPath": "system/system_ext",
+ "MinimizeJavaDebugInfo": false,
+ "UseGoma": false,
+ "UseRBE": false,
+ "UseRBEJAVAC": false,
+ "UseRBER8": false,
+ "UseRBED8": false,
+ "Arc": false,
+ "NamespacesToExport": ["device/generic/goldfish","device/generic/goldfish-opengl","hardware/google/camera","hardware/google/camera/devices/EmulatedCamera"],
+ "PgoAdditionalProfileDirs": [],
+ "BoardPlatVendorPolicy": [],
+ "BoardReqdMaskPolicy": [],
+ "BoardSystemExtPublicPrebuiltDirs": [],
+ "BoardSystemExtPrivatePrebuiltDirs": [],
+ "BoardProductPublicPrebuiltDirs": [],
+ "BoardProductPrivatePrebuiltDirs": [],
+ "BoardVendorSepolicyDirs": ["system/bt/vendor_libs/linux/sepolicy","build/make/target/board/generic_arm64/sepolicy"],
+ "BoardOdmSepolicyDirs": [],
+ "BoardVendorDlkmSepolicyDirs": [],
+ "BoardOdmDlkmSepolicyDirs": [],
+ "BoardSystemDlkmSepolicyDirs": [],
+ "SystemExtPublicSepolicyDirs": [],
+ "SystemExtPrivateSepolicyDirs": [],
+ "BoardSepolicyM4Defs": [],
+ "BoardSepolicyVers": "10000.0",
+ "SystemExtSepolicyPrebuiltApiDir": "",
+ "ProductSepolicyPrebuiltApiDir": "",
+ "PlatformSepolicyVersion": "10000.0",
+ "TotSepolicyVersion": "10000.0",
+ "PlatformSepolicyCompatVersions": ["28.0","29.0","30.0","31.0","32.0","33.0"],
+ "Flatten_apex": false,
+ "ForceApexSymlinkOptimization": false,
+ "DexpreoptGlobalConfig": "out/soong/dexpreopt.config",
+ "WithDexpreopt": true,
+ "ManifestPackageNameOverrides": [],
+ "PackageNameOverrides": [],
+ "CertificateOverrides": [],
+ "ApexGlobalMinSdkVersionOverride": "",
+ "EnforceSystemCertificate": false,
+ "EnforceSystemCertificateAllowList": [],
+ "ProductHiddenAPIStubs": [],
+ "ProductHiddenAPIStubsSystem": [],
+ "ProductHiddenAPIStubsTest": [],
+ "ProductPublicSepolicyDirs": [],
+ "ProductPrivateSepolicyDirs": [],
+ "TargetFSConfigGen": [],
+ "MissingUsesLibraries": ["com.google.android.ble","com.google.android.media.effects","com.google.android.wearable"],
+ "VendorVars": {
+ "ANDROID": {
+ "BOARD_USES_ODMIMAGE": "",
+ "BOARD_USES_RECOVERY_AS_BOOT": "",
+ "IS_TARGET_MIXED_SEPOLICY": "",
+ "PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT": "",
+ "SYSTEMUI_OPTIMIZE_JAVA": "true",
+ "SYSTEMUI_USE_COMPOSE": "false",
+ "SYSTEM_OPTIMIZE_JAVA": "true",
+ "TARGET_DYNAMIC_64_32_MEDIASERVER": "true",
+ "TARGET_ENABLE_MEDIADRM_64": "true",
+ "avf_enabled": "true",
+ "include_nonpublic_framework_api": "false",
+ "module_build_from_source": "true"
+ },
+ "art_module": {
+ "source_build": "true"
+ },
+ "bluetooth_module": {
+ "source_build": "true"
+ },
+ "permission_module": {
+ "source_build": "true"
+ },
+ "rkpd_module": {
+ "source_build": "true"
+ },
+ "uwb_module": {
+ "source_build": "true"
+ },
+ "wifi_module": {
+ "source_build": "true"
+ }
+ },
+ "EnforceProductPartitionInterface": true,
+ "DeviceCurrentApiLevelForVendorModules": "current",
+ "EnforceInterPartitionJavaSdkLibrary": false,
+ "InterPartitionJavaLibraryAllowList": [],
+ "InstallExtraFlattenedApexes": true,
+ "CompressedApex": true,
+ "TrimmedApex": false,
+ "BoardUsesRecoveryAsBoot": false,
+ "BoardKernelBinaries": [],
+ "BoardKernelModuleInterfaceVersions": [],
+ "BoardMoveRecoveryResourcesToVendorBoot": false,
+ "PrebuiltHiddenApiDir": "",
+ "ShippingApiLevel": "31",
+ "BuildBrokenClangProperty": false,
+ "BuildBrokenClangAsFlags": false,
+ "BuildBrokenClangCFlags": false,
+ "BuildBrokenDepfile": false,
+ "BuildBrokenEnforceSyspropOwner": false,
+ "BuildBrokenTrebleSyspropNeverallow": false,
+ "BuildBrokenUsesSoongPython2Modules": false,
+ "BuildBrokenVendorPropertyNamespace": false,
+ "BuildBrokenInputDirModules": [],
+ "BuildDebugfsRestrictionsEnabled": false,
+ "RequiresInsecureExecmemForSwiftshader": false,
+ "SelinuxIgnoreNeverallows": false,
+ "SepolicySplit": true,
+ "SepolicyFreezeTestExtraDirs": [],
+ "SepolicyFreezeTestExtraPrebuiltDirs": [],
+ "GenerateAidlNdkPlatformBackend": false,
+ "IgnorePrefer32OnDevice": false,
+ "IncludeTags": []
+}''')
diff --git a/tests/products/aosp_x86.variables.bzl b/tests/products/aosp_x86.variables.bzl
new file mode 100644
index 00000000..09ee23de
--- /dev/null
+++ b/tests/products/aosp_x86.variables.bzl
@@ -0,0 +1,242 @@
+variables = json.decode('''{
+ "Make_suffix": "-aosp_x86",
+ "BuildId": "AOSP.MASTER",
+ "BuildNumberFile": "build_number.txt",
+ "Platform_version_name": "UpsideDownCake",
+ "Platform_sdk_version": 33,
+ "Platform_sdk_codename": "UpsideDownCake",
+ "Platform_sdk_final": false,
+ "Platform_sdk_extension_version": 3,
+ "Platform_base_sdk_extension_version": 3,
+ "Platform_version_active_codenames": ["UpsideDownCake"],
+ "Platform_security_patch": "2023-03-05",
+ "Platform_preview_sdk_version": "1",
+ "Platform_base_os": "",
+ "Platform_version_last_stable": "13",
+ "Platform_version_known_codenames": "Base,Base11,Cupcake,Donut,Eclair,Eclair01,EclairMr1,Froyo,Gingerbread,GingerbreadMr1,Honeycomb,HoneycombMr1,HoneycombMr2,IceCreamSandwich,IceCreamSandwichMr1,JellyBean,JellyBeanMr1,JellyBeanMr2,Kitkat,KitkatWatch,Lollipop,LollipopMr1,M,N,NMr1,O,OMr1,P,Q,R,S,Sv2,Tiramisu,UpsideDownCake",
+ "Platform_min_supported_target_sdk_version": "23",
+ "Allow_missing_dependencies": false,
+ "Unbundled_build": false,
+ "Unbundled_build_apps": [],
+ "Unbundled_build_image": false,
+ "Always_use_prebuilt_sdks": false,
+ "Debuggable": true,
+ "Eng": false,
+ "DeviceName": "generic_x86",
+ "DeviceProduct": "aosp_x86",
+ "DeviceArch": "x86",
+ "DeviceArchVariant": "x86",
+ "DeviceCpuVariant": "",
+ "DeviceAbi": ["x86"],
+ "DeviceSecondaryArch": "",
+ "DeviceSecondaryArchVariant": "",
+ "DeviceSecondaryCpuVariant": "",
+ "DeviceSecondaryAbi": [],
+ "DeviceMaxPageSizeSupported": "4096",
+ "Aml_abis": false,
+ "Ndk_abis": false,
+ "NativeBridgeArch": "",
+ "NativeBridgeArchVariant": "",
+ "NativeBridgeCpuVariant": "",
+ "NativeBridgeAbi": [],
+ "NativeBridgeRelativePath": "",
+ "NativeBridgeSecondaryArch": "",
+ "NativeBridgeSecondaryArchVariant": "",
+ "NativeBridgeSecondaryCpuVariant": "",
+ "NativeBridgeSecondaryAbi": [],
+ "NativeBridgeSecondaryRelativePath": "",
+ "HostArch": "x86_64",
+ "HostSecondaryArch": "x86",
+ "HostStaticBinaries": false,
+ "HostMusl": false,
+ "CrossHost": "windows",
+ "CrossHostArch": "x86",
+ "CrossHostSecondaryArch": "x86_64",
+ "DeviceResourceOverlays": ["device/generic/goldfish/overlay"],
+ "ProductResourceOverlays": [],
+ "EnforceRROTargets": ["*"],
+ "EnforceRROExcludedOverlays": [],
+ "AAPTCharacteristics": "emulator",
+ "AAPTConfig": ["en_US,af_ZA,am_ET,ar_EG,ar_XB,as_IN,az_AZ,be_BY,bg_BG,bn_BD,bs_BA,ca_ES,cs_CZ,da_DK,de_DE,el_GR,en_AU,en_CA,en_GB,en_IN,en_XA,es_ES,es_US,et_EE,eu_ES,fa_IR,fi_FI,fr_CA,fr_FR,gl_ES,gu_IN,hi_IN,hr_HR,hu_HU,hy_AM,in_ID,is_IS,it_IT,iw_IL,ja_JP,ka_GE,kk_KZ,km_KH,kn_IN,ko_KR,ky_KG,lo_LA,lt_LT,lv_LV,mk_MK,ml_IN,mn_MN,mr_IN,ms_MY,my_MM,nb_NO,ne_NP,nl_NL,or_IN,pa_IN,pl_PL,pt_BR,pt_PT,ro_RO,ru_RU,si_LK,sk_SK,sl_SI,sq_AL,sr_Latn_RS,sr_RS,sv_SE,sw_TZ,ta_IN,te_IN,th_TH,tl_PH,tr_TR,uk_UA,ur_PK,uz_UZ,vi_VN,zh_CN,zh_HK,zh_TW,zu_ZA,"],
+ "AAPTPreferredConfig": "",
+ "AAPTPrebuiltDPI": [],
+ "DefaultAppCertificate": "",
+ "MainlineSepolicyDevCertificates": "build/make/target/product/security/",
+ "AppsDefaultVersionName": "UpsideDownCake",
+ "SanitizeHost": [],
+ "SanitizeDevice": [],
+ "SanitizeDeviceDiag": [],
+ "SanitizeDeviceArch": [],
+ "Safestack": false,
+ "EnableCFI": true,
+ "CFIExcludePaths": [],
+ "CFIIncludePaths": ["device/generic/goldfish/wifi/wpa_supplicant_8_lib","device/google/cuttlefish/guest/libs/wpa_supplicant_8_lib","external/tinyxml2","external/wpa_supplicant_8","frameworks/av/camera","frameworks/av/media","frameworks/av/services","frameworks/minikin","hardware/broadcom/wlan/bcmdhd/wpa_supplicant_8_lib","hardware/synaptics/wlan/synadhd/wpa_supplicant_8_lib","hardware/interfaces/nfc","hardware/qcom/wlan/legacy/qcwcn/wpa_supplicant_8_lib","hardware/qcom/wlan/wcn6740/qcwcn/wpa_supplicant_8_lib","hardware/interfaces/keymaster","hardware/interfaces/security","packages/modules/Bluetooth/system","system/chre","system/core/libnetutils","system/libziparchive","system/gatekeeper","system/keymaster","system/nfc","system/security"],
+ "IntegerOverflowExcludePaths": [],
+ "MemtagHeapExcludePaths": [],
+ "MemtagHeapAsyncIncludePaths": [],
+ "MemtagHeapSyncIncludePaths": [],
+ "DisableScudo": false,
+ "ClangTidy": false,
+ "TidyChecks": "",
+ "JavaCoveragePaths": [],
+ "JavaCoverageExcludePaths": [],
+ "GcovCoverage": false,
+ "ClangCoverage": false,
+ "ClangCoverageContinuousMode": false,
+ "NativeCoveragePaths": [],
+ "NativeCoverageExcludePaths": [],
+ "SamplingPGO": false,
+ "ArtUseReadBarrier": true,
+ "Binder32bit": false,
+ "BtConfigIncludeDir": "",
+ "DeviceKernelHeaders": [],
+ "DeviceVndkVersion": "current",
+ "Platform_vndk_version": "UpsideDownCake",
+ "ProductVndkVersion": "current",
+ "ExtraVndkVersions": ["29","30","31","32","33"],
+ "DeviceSystemSdkVersions": ["UpsideDownCake"],
+ "RecoverySnapshotVersion": "",
+ "Platform_systemsdk_versions": ["28","29","30","31","32","33","UpsideDownCake"],
+ "Malloc_not_svelte": true,
+ "Malloc_zero_contents": true,
+ "Malloc_pattern_fill_contents": false,
+ "Override_rs_driver": "",
+ "UncompressPrivAppDex": true,
+ "ModulesLoadedByPrivilegedModules": [],
+ "BootJars": ["com.android.art:core-oj","com.android.art:core-libart","com.android.art:okhttp","com.android.art:bouncycastle","com.android.art:apache-xml","platform:framework-minus-apex","platform:framework-graphics","platform:ext","platform:telephony-common","platform:voip-common","platform:ims-common","com.android.i18n:core-icu4j"],
+ "ApexBootJars": ["com.android.adservices:framework-adservices","com.android.adservices:framework-sdksandbox","com.android.appsearch:framework-appsearch","com.android.btservices:framework-bluetooth","com.android.conscrypt:conscrypt","com.android.ipsec:android.net.ipsec.ike","com.android.media:updatable-media","com.android.mediaprovider:framework-mediaprovider","com.android.ondevicepersonalization:framework-ondevicepersonalization","com.android.os.statsd:framework-statsd","com.android.permission:framework-permission","com.android.permission:framework-permission-s","com.android.scheduling:framework-scheduling","com.android.sdkext:framework-sdkextensions","com.android.tethering:framework-connectivity","com.android.tethering:framework-connectivity-t","com.android.tethering:framework-tethering","com.android.uwb:framework-uwb","com.android.virt:framework-virtualization","com.android.wifi:framework-wifi"],
+ "VndkUseCoreVariant": false,
+ "VndkSnapshotBuildArtifacts": false,
+ "DirectedVendorSnapshot": false,
+ "VendorSnapshotModules": {
+ },
+ "DirectedRecoverySnapshot": false,
+ "RecoverySnapshotModules": {
+ },
+ "VendorSnapshotDirsIncluded": [],
+ "VendorSnapshotDirsExcluded": [],
+ "RecoverySnapshotDirsIncluded": [],
+ "RecoverySnapshotDirsExcluded": [],
+ "HostFakeSnapshotEnabled": false,
+ "MultitreeUpdateMeta": false,
+ "Treble_linker_namespaces": true,
+ "Enforce_vintf_manifest": true,
+ "Uml": false,
+ "VendorPath": "vendor",
+ "OdmPath": "vendor/odm",
+ "VendorDlkmPath": "vendor/vendor_dlkm",
+ "OdmDlkmPath": "vendor/odm_dlkm",
+ "SystemDlkmPath": "system_dlkm",
+ "ProductPath": "system/product",
+ "SystemExtPath": "system/system_ext",
+ "MinimizeJavaDebugInfo": false,
+ "UseGoma": false,
+ "UseRBE": false,
+ "UseRBEJAVAC": false,
+ "UseRBER8": false,
+ "UseRBED8": false,
+ "Arc": false,
+ "NamespacesToExport": ["device/generic/goldfish","device/generic/goldfish-opengl","hardware/google/camera","hardware/google/camera/devices/EmulatedCamera","device/generic/goldfish","device/generic/goldfish-opengl"],
+ "PgoAdditionalProfileDirs": [],
+ "BoardPlatVendorPolicy": [],
+ "BoardReqdMaskPolicy": [],
+ "BoardSystemExtPublicPrebuiltDirs": [],
+ "BoardSystemExtPrivatePrebuiltDirs": [],
+ "BoardProductPublicPrebuiltDirs": [],
+ "BoardProductPrivatePrebuiltDirs": [],
+ "BoardVendorSepolicyDirs": ["system/bt/vendor_libs/linux/sepolicy"],
+ "BoardOdmSepolicyDirs": [],
+ "BoardVendorDlkmSepolicyDirs": [],
+ "BoardOdmDlkmSepolicyDirs": [],
+ "BoardSystemDlkmSepolicyDirs": [],
+ "SystemExtPublicSepolicyDirs": [],
+ "SystemExtPrivateSepolicyDirs": [],
+ "BoardSepolicyM4Defs": [],
+ "BoardSepolicyVers": "10000.0",
+ "SystemExtSepolicyPrebuiltApiDir": "",
+ "ProductSepolicyPrebuiltApiDir": "",
+ "PlatformSepolicyVersion": "10000.0",
+ "TotSepolicyVersion": "10000.0",
+ "PlatformSepolicyCompatVersions": ["28.0","29.0","30.0","31.0","32.0","33.0"],
+ "Flatten_apex": false,
+ "ForceApexSymlinkOptimization": false,
+ "DexpreoptGlobalConfig": "out/soong/dexpreopt.config",
+ "WithDexpreopt": true,
+ "ManifestPackageNameOverrides": [],
+ "PackageNameOverrides": [],
+ "CertificateOverrides": [],
+ "ApexGlobalMinSdkVersionOverride": "",
+ "EnforceSystemCertificate": false,
+ "EnforceSystemCertificateAllowList": [],
+ "ProductHiddenAPIStubs": [],
+ "ProductHiddenAPIStubsSystem": [],
+ "ProductHiddenAPIStubsTest": [],
+ "ProductPublicSepolicyDirs": [],
+ "ProductPrivateSepolicyDirs": [],
+ "TargetFSConfigGen": [],
+ "MissingUsesLibraries": ["com.google.android.ble","com.google.android.media.effects","com.google.android.wearable"],
+ "VendorVars": {
+ "ANDROID": {
+ "BOARD_USES_ODMIMAGE": "",
+ "BOARD_USES_RECOVERY_AS_BOOT": "",
+ "IS_TARGET_MIXED_SEPOLICY": "",
+ "PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT": "",
+ "SYSTEMUI_OPTIMIZE_JAVA": "true",
+ "SYSTEMUI_USE_COMPOSE": "false",
+ "SYSTEM_OPTIMIZE_JAVA": "true",
+ "TARGET_DYNAMIC_64_32_MEDIASERVER": "",
+ "TARGET_ENABLE_MEDIADRM_64": "true",
+ "include_nonpublic_framework_api": "false",
+ "module_build_from_source": "true"
+ },
+ "art_module": {
+ "source_build": "true"
+ },
+ "bluetooth_module": {
+ "source_build": "true"
+ },
+ "permission_module": {
+ "source_build": "true"
+ },
+ "rkpd_module": {
+ "source_build": "true"
+ },
+ "uwb_module": {
+ "source_build": "true"
+ },
+ "wifi_module": {
+ "source_build": "true"
+ }
+ },
+ "EnforceProductPartitionInterface": true,
+ "DeviceCurrentApiLevelForVendorModules": "current",
+ "EnforceInterPartitionJavaSdkLibrary": false,
+ "InterPartitionJavaLibraryAllowList": [],
+ "InstallExtraFlattenedApexes": true,
+ "CompressedApex": true,
+ "TrimmedApex": false,
+ "BoardUsesRecoveryAsBoot": false,
+ "BoardKernelBinaries": [],
+ "BoardKernelModuleInterfaceVersions": [],
+ "BoardMoveRecoveryResourcesToVendorBoot": false,
+ "PrebuiltHiddenApiDir": "",
+ "ShippingApiLevel": "31",
+ "BuildBrokenClangProperty": false,
+ "BuildBrokenClangAsFlags": false,
+ "BuildBrokenClangCFlags": false,
+ "BuildBrokenDepfile": false,
+ "BuildBrokenEnforceSyspropOwner": false,
+ "BuildBrokenTrebleSyspropNeverallow": false,
+ "BuildBrokenUsesSoongPython2Modules": false,
+ "BuildBrokenVendorPropertyNamespace": false,
+ "BuildBrokenInputDirModules": [],
+ "BuildDebugfsRestrictionsEnabled": false,
+ "RequiresInsecureExecmemForSwiftshader": false,
+ "SelinuxIgnoreNeverallows": false,
+ "SepolicySplit": true,
+ "SepolicyFreezeTestExtraDirs": [],
+ "SepolicyFreezeTestExtraPrebuiltDirs": [],
+ "GenerateAidlNdkPlatformBackend": false,
+ "IgnorePrefer32OnDevice": false,
+ "IncludeTags": []
+}''')
diff --git a/tests/products/aosp_x86_64.variables.bzl b/tests/products/aosp_x86_64.variables.bzl
new file mode 100644
index 00000000..f6661a8b
--- /dev/null
+++ b/tests/products/aosp_x86_64.variables.bzl
@@ -0,0 +1,243 @@
+variables = json.decode('''{
+ "Make_suffix": "-aosp_x86_64",
+ "BuildId": "AOSP.MASTER",
+ "BuildNumberFile": "build_number.txt",
+ "Platform_version_name": "UpsideDownCake",
+ "Platform_sdk_version": 33,
+ "Platform_sdk_codename": "UpsideDownCake",
+ "Platform_sdk_final": false,
+ "Platform_sdk_extension_version": 3,
+ "Platform_base_sdk_extension_version": 3,
+ "Platform_version_active_codenames": ["UpsideDownCake"],
+ "Platform_security_patch": "2023-03-05",
+ "Platform_preview_sdk_version": "1",
+ "Platform_base_os": "",
+ "Platform_version_last_stable": "13",
+ "Platform_version_known_codenames": "Base,Base11,Cupcake,Donut,Eclair,Eclair01,EclairMr1,Froyo,Gingerbread,GingerbreadMr1,Honeycomb,HoneycombMr1,HoneycombMr2,IceCreamSandwich,IceCreamSandwichMr1,JellyBean,JellyBeanMr1,JellyBeanMr2,Kitkat,KitkatWatch,Lollipop,LollipopMr1,M,N,NMr1,O,OMr1,P,Q,R,S,Sv2,Tiramisu,UpsideDownCake",
+ "Platform_min_supported_target_sdk_version": "23",
+ "Allow_missing_dependencies": false,
+ "Unbundled_build": false,
+ "Unbundled_build_apps": [],
+ "Unbundled_build_image": false,
+ "Always_use_prebuilt_sdks": false,
+ "Debuggable": true,
+ "Eng": false,
+ "DeviceName": "generic_x86_64",
+ "DeviceProduct": "aosp_x86_64",
+ "DeviceArch": "x86_64",
+ "DeviceArchVariant": "x86_64",
+ "DeviceCpuVariant": "",
+ "DeviceAbi": ["x86_64"],
+ "DeviceSecondaryArch": "x86",
+ "DeviceSecondaryArchVariant": "x86_64",
+ "DeviceSecondaryCpuVariant": "",
+ "DeviceSecondaryAbi": ["x86"],
+ "DeviceMaxPageSizeSupported": "4096",
+ "Aml_abis": false,
+ "Ndk_abis": false,
+ "NativeBridgeArch": "",
+ "NativeBridgeArchVariant": "",
+ "NativeBridgeCpuVariant": "",
+ "NativeBridgeAbi": [],
+ "NativeBridgeRelativePath": "",
+ "NativeBridgeSecondaryArch": "",
+ "NativeBridgeSecondaryArchVariant": "",
+ "NativeBridgeSecondaryCpuVariant": "",
+ "NativeBridgeSecondaryAbi": [],
+ "NativeBridgeSecondaryRelativePath": "",
+ "HostArch": "x86_64",
+ "HostSecondaryArch": "x86",
+ "HostStaticBinaries": false,
+ "HostMusl": false,
+ "CrossHost": "windows",
+ "CrossHostArch": "x86",
+ "CrossHostSecondaryArch": "x86_64",
+ "DeviceResourceOverlays": ["device/generic/goldfish/overlay"],
+ "ProductResourceOverlays": [],
+ "EnforceRROTargets": ["*"],
+ "EnforceRROExcludedOverlays": [],
+ "AAPTCharacteristics": "emulator",
+ "AAPTConfig": ["en_US,af_ZA,am_ET,ar_EG,ar_XB,as_IN,az_AZ,be_BY,bg_BG,bn_BD,bs_BA,ca_ES,cs_CZ,da_DK,de_DE,el_GR,en_AU,en_CA,en_GB,en_IN,en_XA,es_ES,es_US,et_EE,eu_ES,fa_IR,fi_FI,fr_CA,fr_FR,gl_ES,gu_IN,hi_IN,hr_HR,hu_HU,hy_AM,in_ID,is_IS,it_IT,iw_IL,ja_JP,ka_GE,kk_KZ,km_KH,kn_IN,ko_KR,ky_KG,lo_LA,lt_LT,lv_LV,mk_MK,ml_IN,mn_MN,mr_IN,ms_MY,my_MM,nb_NO,ne_NP,nl_NL,or_IN,pa_IN,pl_PL,pt_BR,pt_PT,ro_RO,ru_RU,si_LK,sk_SK,sl_SI,sq_AL,sr_Latn_RS,sr_RS,sv_SE,sw_TZ,ta_IN,te_IN,th_TH,tl_PH,tr_TR,uk_UA,ur_PK,uz_UZ,vi_VN,zh_CN,zh_HK,zh_TW,zu_ZA,"],
+ "AAPTPreferredConfig": "",
+ "AAPTPrebuiltDPI": [],
+ "DefaultAppCertificate": "",
+ "MainlineSepolicyDevCertificates": "build/make/target/product/security/",
+ "AppsDefaultVersionName": "UpsideDownCake",
+ "SanitizeHost": [],
+ "SanitizeDevice": [],
+ "SanitizeDeviceDiag": [],
+ "SanitizeDeviceArch": [],
+ "Safestack": false,
+ "EnableCFI": true,
+ "CFIExcludePaths": [],
+ "CFIIncludePaths": ["device/generic/goldfish/wifi/wpa_supplicant_8_lib","device/google/cuttlefish/guest/libs/wpa_supplicant_8_lib","external/tinyxml2","external/wpa_supplicant_8","frameworks/av/camera","frameworks/av/media","frameworks/av/services","frameworks/minikin","hardware/broadcom/wlan/bcmdhd/wpa_supplicant_8_lib","hardware/synaptics/wlan/synadhd/wpa_supplicant_8_lib","hardware/interfaces/nfc","hardware/qcom/wlan/legacy/qcwcn/wpa_supplicant_8_lib","hardware/qcom/wlan/wcn6740/qcwcn/wpa_supplicant_8_lib","hardware/interfaces/keymaster","hardware/interfaces/security","packages/modules/Bluetooth/system","system/chre","system/core/libnetutils","system/libziparchive","system/gatekeeper","system/keymaster","system/nfc","system/security"],
+ "IntegerOverflowExcludePaths": [],
+ "MemtagHeapExcludePaths": [],
+ "MemtagHeapAsyncIncludePaths": [],
+ "MemtagHeapSyncIncludePaths": [],
+ "DisableScudo": false,
+ "ClangTidy": false,
+ "TidyChecks": "",
+ "JavaCoveragePaths": [],
+ "JavaCoverageExcludePaths": [],
+ "GcovCoverage": false,
+ "ClangCoverage": false,
+ "ClangCoverageContinuousMode": false,
+ "NativeCoveragePaths": [],
+ "NativeCoverageExcludePaths": [],
+ "SamplingPGO": false,
+ "ArtUseReadBarrier": true,
+ "Binder32bit": false,
+ "BtConfigIncludeDir": "",
+ "DeviceKernelHeaders": [],
+ "DeviceVndkVersion": "current",
+ "Platform_vndk_version": "UpsideDownCake",
+ "ProductVndkVersion": "current",
+ "ExtraVndkVersions": ["29","30","31","32","33"],
+ "DeviceSystemSdkVersions": ["UpsideDownCake"],
+ "RecoverySnapshotVersion": "",
+ "Platform_systemsdk_versions": ["28","29","30","31","32","33","UpsideDownCake"],
+ "Malloc_not_svelte": true,
+ "Malloc_zero_contents": true,
+ "Malloc_pattern_fill_contents": false,
+ "Override_rs_driver": "",
+ "UncompressPrivAppDex": true,
+ "ModulesLoadedByPrivilegedModules": [],
+ "BootJars": ["com.android.art:core-oj","com.android.art:core-libart","com.android.art:okhttp","com.android.art:bouncycastle","com.android.art:apache-xml","platform:framework-minus-apex","platform:framework-graphics","platform:ext","platform:telephony-common","platform:voip-common","platform:ims-common","com.android.i18n:core-icu4j"],
+ "ApexBootJars": ["com.android.adservices:framework-adservices","com.android.adservices:framework-sdksandbox","com.android.appsearch:framework-appsearch","com.android.btservices:framework-bluetooth","com.android.conscrypt:conscrypt","com.android.ipsec:android.net.ipsec.ike","com.android.media:updatable-media","com.android.mediaprovider:framework-mediaprovider","com.android.ondevicepersonalization:framework-ondevicepersonalization","com.android.os.statsd:framework-statsd","com.android.permission:framework-permission","com.android.permission:framework-permission-s","com.android.scheduling:framework-scheduling","com.android.sdkext:framework-sdkextensions","com.android.tethering:framework-connectivity","com.android.tethering:framework-connectivity-t","com.android.tethering:framework-tethering","com.android.uwb:framework-uwb","com.android.virt:framework-virtualization","com.android.wifi:framework-wifi"],
+ "VndkUseCoreVariant": false,
+ "VndkSnapshotBuildArtifacts": false,
+ "DirectedVendorSnapshot": false,
+ "VendorSnapshotModules": {
+ },
+ "DirectedRecoverySnapshot": false,
+ "RecoverySnapshotModules": {
+ },
+ "VendorSnapshotDirsIncluded": [],
+ "VendorSnapshotDirsExcluded": [],
+ "RecoverySnapshotDirsIncluded": [],
+ "RecoverySnapshotDirsExcluded": [],
+ "HostFakeSnapshotEnabled": false,
+ "MultitreeUpdateMeta": false,
+ "Treble_linker_namespaces": true,
+ "Enforce_vintf_manifest": true,
+ "Uml": false,
+ "VendorPath": "vendor",
+ "OdmPath": "vendor/odm",
+ "VendorDlkmPath": "vendor/vendor_dlkm",
+ "OdmDlkmPath": "vendor/odm_dlkm",
+ "SystemDlkmPath": "system_dlkm",
+ "ProductPath": "system/product",
+ "SystemExtPath": "system/system_ext",
+ "MinimizeJavaDebugInfo": false,
+ "UseGoma": false,
+ "UseRBE": false,
+ "UseRBEJAVAC": false,
+ "UseRBER8": false,
+ "UseRBED8": false,
+ "Arc": false,
+ "NamespacesToExport": ["device/generic/goldfish","device/generic/goldfish-opengl","hardware/google/camera","hardware/google/camera/devices/EmulatedCamera"],
+ "PgoAdditionalProfileDirs": [],
+ "BoardPlatVendorPolicy": [],
+ "BoardReqdMaskPolicy": [],
+ "BoardSystemExtPublicPrebuiltDirs": [],
+ "BoardSystemExtPrivatePrebuiltDirs": [],
+ "BoardProductPublicPrebuiltDirs": [],
+ "BoardProductPrivatePrebuiltDirs": [],
+ "BoardVendorSepolicyDirs": ["system/bt/vendor_libs/linux/sepolicy"],
+ "BoardOdmSepolicyDirs": [],
+ "BoardVendorDlkmSepolicyDirs": [],
+ "BoardOdmDlkmSepolicyDirs": [],
+ "BoardSystemDlkmSepolicyDirs": [],
+ "SystemExtPublicSepolicyDirs": [],
+ "SystemExtPrivateSepolicyDirs": [],
+ "BoardSepolicyM4Defs": [],
+ "BoardSepolicyVers": "10000.0",
+ "SystemExtSepolicyPrebuiltApiDir": "",
+ "ProductSepolicyPrebuiltApiDir": "",
+ "PlatformSepolicyVersion": "10000.0",
+ "TotSepolicyVersion": "10000.0",
+ "PlatformSepolicyCompatVersions": ["28.0","29.0","30.0","31.0","32.0","33.0"],
+ "Flatten_apex": false,
+ "ForceApexSymlinkOptimization": false,
+ "DexpreoptGlobalConfig": "out/soong/dexpreopt.config",
+ "WithDexpreopt": true,
+ "ManifestPackageNameOverrides": [],
+ "PackageNameOverrides": [],
+ "CertificateOverrides": [],
+ "ApexGlobalMinSdkVersionOverride": "",
+ "EnforceSystemCertificate": false,
+ "EnforceSystemCertificateAllowList": [],
+ "ProductHiddenAPIStubs": [],
+ "ProductHiddenAPIStubsSystem": [],
+ "ProductHiddenAPIStubsTest": [],
+ "ProductPublicSepolicyDirs": [],
+ "ProductPrivateSepolicyDirs": [],
+ "TargetFSConfigGen": [],
+ "MissingUsesLibraries": ["com.google.android.ble","com.google.android.media.effects","com.google.android.wearable"],
+ "VendorVars": {
+ "ANDROID": {
+ "BOARD_USES_ODMIMAGE": "",
+ "BOARD_USES_RECOVERY_AS_BOOT": "",
+ "IS_TARGET_MIXED_SEPOLICY": "",
+ "PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT": "",
+ "SYSTEMUI_OPTIMIZE_JAVA": "true",
+ "SYSTEMUI_USE_COMPOSE": "false",
+ "SYSTEM_OPTIMIZE_JAVA": "true",
+ "TARGET_DYNAMIC_64_32_MEDIASERVER": "true",
+ "TARGET_ENABLE_MEDIADRM_64": "true",
+ "avf_enabled": "true",
+ "include_nonpublic_framework_api": "false",
+ "module_build_from_source": "true"
+ },
+ "art_module": {
+ "source_build": "true"
+ },
+ "bluetooth_module": {
+ "source_build": "true"
+ },
+ "permission_module": {
+ "source_build": "true"
+ },
+ "rkpd_module": {
+ "source_build": "true"
+ },
+ "uwb_module": {
+ "source_build": "true"
+ },
+ "wifi_module": {
+ "source_build": "true"
+ }
+ },
+ "EnforceProductPartitionInterface": true,
+ "DeviceCurrentApiLevelForVendorModules": "current",
+ "EnforceInterPartitionJavaSdkLibrary": false,
+ "InterPartitionJavaLibraryAllowList": [],
+ "InstallExtraFlattenedApexes": true,
+ "CompressedApex": true,
+ "TrimmedApex": false,
+ "BoardUsesRecoveryAsBoot": false,
+ "BoardKernelBinaries": [],
+ "BoardKernelModuleInterfaceVersions": [],
+ "BoardMoveRecoveryResourcesToVendorBoot": false,
+ "PrebuiltHiddenApiDir": "",
+ "ShippingApiLevel": "31",
+ "BuildBrokenClangProperty": false,
+ "BuildBrokenClangAsFlags": false,
+ "BuildBrokenClangCFlags": false,
+ "BuildBrokenDepfile": false,
+ "BuildBrokenEnforceSyspropOwner": false,
+ "BuildBrokenTrebleSyspropNeverallow": false,
+ "BuildBrokenUsesSoongPython2Modules": false,
+ "BuildBrokenVendorPropertyNamespace": false,
+ "BuildBrokenInputDirModules": [],
+ "BuildDebugfsRestrictionsEnabled": false,
+ "RequiresInsecureExecmemForSwiftshader": false,
+ "SelinuxIgnoreNeverallows": false,
+ "SepolicySplit": true,
+ "SepolicyFreezeTestExtraDirs": [],
+ "SepolicyFreezeTestExtraPrebuiltDirs": [],
+ "GenerateAidlNdkPlatformBackend": false,
+ "IgnorePrefer32OnDevice": false,
+ "IncludeTags": []
+}''')
diff --git a/tests/products/product_labels.bzl b/tests/products/product_labels.bzl
new file mode 100644
index 00000000..e1cefefc
--- /dev/null
+++ b/tests/products/product_labels.bzl
@@ -0,0 +1,11 @@
+product_labels = [
+ "@//build/bazel/tests/products:aosp_arm_for_testing",
+ "@//build/bazel/tests/products:aosp_arm_for_testing_custom_linker_alignment",
+ "@//build/bazel/tests/products:aosp_arm64_for_testing",
+ "@//build/bazel/tests/products:aosp_arm64_for_testing_custom_linker_alignment",
+ "@//build/bazel/tests/products:aosp_arm64_for_testing_no_compression",
+ "@//build/bazel/tests/products:aosp_arm64_for_testing_unbundled_build",
+ "@//build/bazel/tests/products:aosp_arm64_for_testing_with_overrides_and_app_cert",
+ "@//build/bazel/tests/products:aosp_x86_for_testing",
+ "@//build/bazel/tests/products:aosp_x86_64_for_testing",
+]
diff --git a/vendor/google/BUILD b/vendor/google/BUILD
index ce0d6e8e..5b2eeecd 100644
--- a/vendor/google/BUILD
+++ b/vendor/google/BUILD
@@ -1,19 +1,45 @@
-load("//build/bazel/rules/apex:mainline_modules.bzl", "apex_aab")
+load("//build/bazel/rules/apex:apex_aab.bzl", "apex_aab")
+
+package(default_visibility = ["//build/bazel/tests/apex:__subpackages__"])
+
+# This package contains the multi-architecture aab and apks packages of
+# Bazel-buildable Mainline modules. These are exclusively used for development
+# and testing purposes and not for distribution.
+#
+# We have added testonly and visibility guardrails to ensure that these do not
+# get used unintentionally.
modules = [
"//build/bazel/examples/apex/minimal:build.bazel.examples.apex.minimal",
+ "//frameworks/av/apex:com.android.media.swcodec",
+ "//packages/modules/NeuralNetworks/apex:com.android.neuralnetworks",
+ "//packages/modules/adb/apex:com.android.adbd",
+ "//system/timezone/apex:com.android.tzdata",
]
+
name_label_map = {module[module.index(":") + 1:]: module for module in modules}
[
- apex_aab(
- name = "%s_apex_aab" % name,
- mainline_module = label,
+ (
+ apex_aab(
+ name = "%s_apex_aab" % name,
+ testonly = True,
+ mainline_module = label,
+ ),
+ genrule(
+ name = "%s_apks" % name,
+ testonly = True,
+ srcs = ["%s_apex_aab" % name],
+ outs = ["%s.apks" % name],
+ cmd = "$(location //prebuilts/bundletool) build-apks --bundle $(location %s_apex_aab) --output $@" % name,
+ tools = ["//prebuilts/bundletool"],
+ ),
)
for name, label in name_label_map.items()
]
filegroup(
name = "mainline_modules",
+ testonly = True,
srcs = ["%s_apex_aab" % name for name, label in name_label_map.items()],
)
diff --git a/vendor/google/build_mainline_modules.sh b/vendor/google/build_mainline_modules.sh
index 3be4da02..f5510514 100755
--- a/vendor/google/build_mainline_modules.sh
+++ b/vendor/google/build_mainline_modules.sh
@@ -15,14 +15,14 @@
# limitations under the License.
#
-BAZEL=tools/bazel
+BAZEL=build/bazel/bin/bazel
function main() {
if [ ! -e "build/make/core/Makefile" ]; then
echo "$0 must be run from the top of the Android source tree."
exit 1
fi
- "build/soong/soong_ui.bash" --build-mode --all-modules --dir="$(pwd)" bp2build USE_BAZEL_ANALYSIS=
+ "build/soong/soong_ui.bash" --build-mode --all-modules --dir="$(pwd)" bp2build
${BAZEL} build //build/bazel/vendor/google:mainline_modules --config=bp2build
}