summaryrefslogtreecommitdiff
path: root/build
diff options
context:
space:
mode:
authorCronet Mainline Eng <cronet-mainline-eng+copybara@google.com>2023-04-17 14:36:25 -0800
committerPatrick Rohr <prohr@google.com>2023-04-17 15:41:30 -0700
commit41cb724250484f326b0bbd5f8b955eb37b3b83c6 (patch)
treecb7b2cbe08b2c5305a56b31649359f8141edd198 /build
parentc175721cfcc03e339122be17d569239df9762b2b (diff)
downloadcronet-41cb724250484f326b0bbd5f8b955eb37b3b83c6.tar.gz
Import Cronet version 114.0.5709.3
Project import generated by Copybara. FolderOrigin-RevId: /tmp/copybara-origin/src Test: none Change-Id: I263aa6b692a17ac2471b98b6e662b26dd1327c9c
Diffstat (limited to 'build')
-rw-r--r--build/BUILD.gn6
-rw-r--r--build/OWNERS.setnoparent2
-rw-r--r--build/action_helpers.py126
-rwxr-xr-xbuild/action_helpers_unittest.py87
-rwxr-xr-xbuild/add_rts_filters.py2
-rw-r--r--build/android/BUILD.gn3
-rw-r--r--build/android/PRESUBMIT.py1
-rwxr-xr-xbuild/android/adb_command_line.py1
-rwxr-xr-xbuild/android/adb_logcat_monitor.py1
-rwxr-xr-xbuild/android/adb_system_webengine_command_line16
-rwxr-xr-xbuild/android/apk_operations.py127
-rw-r--r--build/android/apk_operations.pydeps5
-rwxr-xr-xbuild/android/asan_symbolize.py1
-rw-r--r--build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java5
-rwxr-xr-xbuild/android/diff_resource_sizes.py1
-rw-r--r--build/android/dummy_libgcc/README.md20
-rw-r--r--build/android/dummy_libgcc/libgcc.a0
-rwxr-xr-xbuild/android/dump_apk_resource_strings.py1
-rwxr-xr-xbuild/android/emma_coverage_stats.py481
-rwxr-xr-xbuild/android/emma_coverage_stats_test.py593
-rwxr-xr-xbuild/android/generate_jacoco_report.py1
-rwxr-xr-xbuild/android/gradle/generate_gradle.py157
-rwxr-xr-xbuild/android/gradle/gn_to_cmake.py690
-rw-r--r--build/android/gradle/root.jinja12
-rwxr-xr-xbuild/android/gyp/aar.py6
-rw-r--r--build/android/gyp/aar.pydeps1
-rwxr-xr-xbuild/android/gyp/aidl.py14
-rw-r--r--build/android/gyp/aidl.pydeps2
-rwxr-xr-xbuild/android/gyp/allot_native_libraries.py3
-rw-r--r--build/android/gyp/allot_native_libraries.pydeps1
-rwxr-xr-xbuild/android/gyp/apkbuilder.py74
-rw-r--r--build/android/gyp/apkbuilder.pydeps3
-rwxr-xr-xbuild/android/gyp/assert_static_initializers.py1
-rwxr-xr-xbuild/android/gyp/bundletool.py6
-rwxr-xr-xbuild/android/gyp/bytecode_processor.py33
-rw-r--r--build/android/gyp/bytecode_processor.pydeps21
-rwxr-xr-xbuild/android/gyp/bytecode_rewriter.py7
-rw-r--r--build/android/gyp/bytecode_rewriter.pydeps1
-rw-r--r--build/android/gyp/check_flag_expectations.pydeps1
-rwxr-xr-xbuild/android/gyp/compile_java.py177
-rw-r--r--build/android/gyp/compile_java.pydeps2
-rwxr-xr-xbuild/android/gyp/compile_kt.py182
-rw-r--r--build/android/gyp/compile_kt.pydeps33
-rwxr-xr-xbuild/android/gyp/compile_resources.py184
-rw-r--r--build/android/gyp/compile_resources.pydeps4
-rwxr-xr-xbuild/android/gyp/copy_ex.py24
-rw-r--r--build/android/gyp/copy_ex.pydeps1
-rwxr-xr-xbuild/android/gyp/create_app_bundle.py48
-rw-r--r--build/android/gyp/create_app_bundle.pydeps4
-rw-r--r--build/android/gyp/create_app_bundle_apks.pydeps3
-rwxr-xr-xbuild/android/gyp/create_bundle_wrapper_script.py3
-rw-r--r--build/android/gyp/create_bundle_wrapper_script.pydeps1
-rwxr-xr-xbuild/android/gyp/create_java_binary_script.py21
-rw-r--r--build/android/gyp/create_java_binary_script.pydeps1
-rwxr-xr-xbuild/android/gyp/create_r_java.py15
-rw-r--r--build/android/gyp/create_r_java.pydeps4
-rw-r--r--build/android/gyp/create_r_txt.pydeps3
-rwxr-xr-xbuild/android/gyp/create_size_info_files.py26
-rw-r--r--build/android/gyp/create_size_info_files.pydeps1
-rwxr-xr-xbuild/android/gyp/create_ui_locale_resources.py13
-rw-r--r--build/android/gyp/create_ui_locale_resources.pydeps4
-rwxr-xr-xbuild/android/gyp/create_unwind_table.py8
-rwxr-xr-xbuild/android/gyp/dex.py33
-rw-r--r--build/android/gyp/dex.pydeps3
-rwxr-xr-xbuild/android/gyp/dist_aar.py66
-rw-r--r--build/android/gyp/dist_aar.pydeps2
-rwxr-xr-xbuild/android/gyp/filter_zip.py11
-rw-r--r--build/android/gyp/filter_zip.pydeps2
-rw-r--r--build/android/gyp/finalize_apk.py2
-rwxr-xr-xbuild/android/gyp/find.py1
-rwxr-xr-xbuild/android/gyp/flatc_java.py5
-rw-r--r--build/android/gyp/flatc_java.pydeps2
-rwxr-xr-xbuild/android/gyp/gcc_preprocess.py10
-rw-r--r--build/android/gyp/gcc_preprocess.pydeps2
-rwxr-xr-xbuild/android/gyp/generate_android_wrapper.py3
-rwxr-xr-xbuild/android/gyp/generate_linker_version_script.py11
-rw-r--r--build/android/gyp/generate_linker_version_script.pydeps1
-rwxr-xr-xbuild/android/gyp/ijar.py3
-rw-r--r--build/android/gyp/ijar.pydeps1
-rwxr-xr-xbuild/android/gyp/jacoco_instr.py62
-rw-r--r--build/android/gyp/jacoco_instr.pydeps2
-rwxr-xr-xbuild/android/gyp/java_cpp_enum.py7
-rw-r--r--build/android/gyp/java_cpp_enum.pydeps2
-rwxr-xr-xbuild/android/gyp/java_cpp_features.py6
-rw-r--r--build/android/gyp/java_cpp_features.pydeps2
-rwxr-xr-xbuild/android/gyp/java_cpp_strings.py6
-rw-r--r--build/android/gyp/java_cpp_strings.pydeps2
-rwxr-xr-xbuild/android/gyp/java_google_api_keys.py3
-rw-r--r--build/android/gyp/java_google_api_keys.pydeps1
-rwxr-xr-xbuild/android/gyp/javac_output_processor.py60
-rwxr-xr-xbuild/android/gyp/jinja_template.py11
-rw-r--r--build/android/gyp/jinja_template.pydeps4
-rwxr-xr-xbuild/android/gyp/lint.py33
-rw-r--r--build/android/gyp/lint.pydeps1
-rwxr-xr-xbuild/android/gyp/merge_manifest.py78
-rw-r--r--build/android/gyp/merge_manifest.pydeps1
-rwxr-xr-xbuild/android/gyp/optimize_resources.py3
-rw-r--r--build/android/gyp/optimize_resources.pydeps1
-rwxr-xr-xbuild/android/gyp/prepare_resources.py6
-rw-r--r--build/android/gyp/prepare_resources.pydeps4
-rwxr-xr-xbuild/android/gyp/process_native_prebuilt.py3
-rw-r--r--build/android/gyp/process_native_prebuilt.pydeps1
-rwxr-xr-xbuild/android/gyp/proguard.py212
-rw-r--r--build/android/gyp/proguard.pydeps6
-rwxr-xr-xbuild/android/gyp/trace_event_bytecode_rewriter.py11
-rw-r--r--build/android/gyp/trace_event_bytecode_rewriter.pydeps1
-rwxr-xr-xbuild/android/gyp/turbine.py48
-rw-r--r--build/android/gyp/turbine.pydeps7
-rwxr-xr-xbuild/android/gyp/unused_resources.py11
-rw-r--r--build/android/gyp/unused_resources.pydeps3
-rw-r--r--build/android/gyp/util/build_utils.py305
-rw-r--r--build/android/gyp/util/diff_utils.py5
-rw-r--r--build/android/gyp/util/manifest_utils.py103
-rwxr-xr-xbuild/android/gyp/util/manifest_utils_test.py10
-rw-r--r--build/android/gyp/util/md5_check.py6
-rw-r--r--build/android/gyp/util/resources_parser.py3
-rw-r--r--build/android/gyp/util/zipalign.py45
-rwxr-xr-xbuild/android/gyp/validate_inputs.py34
-rwxr-xr-xbuild/android/gyp/validate_static_library_dex_references.py6
-rw-r--r--build/android/gyp/validate_static_library_dex_references.pydeps1
-rwxr-xr-xbuild/android/gyp/write_build_config.py322
-rw-r--r--build/android/gyp/write_build_config.pydeps3
-rwxr-xr-xbuild/android/gyp/write_native_libraries_java.py14
-rw-r--r--build/android/gyp/write_native_libraries_java.pydeps2
-rwxr-xr-xbuild/android/gyp/zip.py37
-rw-r--r--build/android/gyp/zip.pydeps2
-rwxr-xr-xbuild/android/incremental_install/generate_android_manifest.py3
-rw-r--r--build/android/incremental_install/generate_android_manifest.pydeps1
-rwxr-xr-xbuild/android/incremental_install/write_installer_json.py7
-rw-r--r--build/android/incremental_install/write_installer_json.pydeps1
-rwxr-xr-xbuild/android/lighttpd_server.py1
-rwxr-xr-xbuild/android/list_java_targets.py11
-rwxr-xr-xbuild/android/method_count.py1
-rwxr-xr-xbuild/android/print_cipd_version.py46
-rw-r--r--build/android/pylib/base/base_test_result.py8
-rw-r--r--build/android/pylib/base/output_manager.py42
-rw-r--r--build/android/pylib/constants/__init__.py6
-rw-r--r--build/android/pylib/gtest/gtest_test_instance.py1
-rw-r--r--build/android/pylib/instrumentation/test_result.py21
-rw-r--r--build/android/pylib/local/device/local_device_environment.py13
-rw-r--r--build/android/pylib/local/device/local_device_gtest_run.py4
-rw-r--r--build/android/pylib/local/device/local_device_instrumentation_test_run.py2
-rw-r--r--build/android/pylib/local/emulator/avd.py387
-rw-r--r--build/android/pylib/local/emulator/ini.py70
-rwxr-xr-xbuild/android/pylib/local/emulator/ini_test.py90
-rw-r--r--build/android/pylib/local/emulator/local_emulator_environment.py5
-rw-r--r--build/android/pylib/local/machine/local_machine_junit_test_run.py98
-rwxr-xr-xbuild/android/pylib/results/presentation/standard_gtest_merge.py1
-rw-r--r--build/android/pylib/results/presentation/test_results_presentation.pydeps2
-rw-r--r--build/android/pylib/results/report_results.py1
-rwxr-xr-xbuild/android/pylib/symbols/mock_addr2line/mock_addr2line3
-rw-r--r--build/android/pylib/utils/device_dependencies.py19
-rwxr-xr-xbuild/android/resource_sizes.py1
-rwxr-xr-xbuild/android/stacktrace/stackwalker.py1
-rwxr-xr-xbuild/android/test_runner.py28
-rw-r--r--build/android/test_runner.pydeps5
-rwxr-xr-xbuild/apple/tweak_info_plist.py1
-rw-r--r--build/args/README.txt4
-rw-r--r--build/args/headless.gn3
-rw-r--r--build/build_config.h3
-rw-r--r--build/check_gn_headers_whitelist.txt3
-rwxr-xr-xbuild/check_return_value.py3
-rw-r--r--build/chromeos/.style.yapf2
-rwxr-xr-xbuild/chromeos/test_runner.py24
-rwxr-xr-xbuild/chromeos/test_runner_test.py23
-rwxr-xr-xbuild/clobber.py59
-rwxr-xr-xbuild/clobber_unittest.py148
-rwxr-xr-xbuild/compute_build_timestamp.py3
-rw-r--r--build/config/BUILD.gn29
-rw-r--r--build/config/BUILDCONFIG.gn49
-rw-r--r--build/config/android/config.gni72
-rw-r--r--build/config/android/internal_rules.gni469
-rw-r--r--build/config/android/linker_version_script.gni4
-rw-r--r--build/config/android/rules.gni672
-rw-r--r--build/config/android/sdk.gni5
-rw-r--r--build/config/android/test/classpath_order/BUILD.gn108
-rw-r--r--build/config/android/test/classpath_order/java/res_template/values/values.xml9
-rw-r--r--build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java32
-rw-r--r--build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja28
-rw-r--r--build/config/android/test/resource_overlay/BUILD.gn2
-rw-r--r--build/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java2
-rw-r--r--build/config/apple/BUILD.gn17
-rw-r--r--build/config/apple/symbols.gni2
-rw-r--r--build/config/arm.gni18
-rw-r--r--build/config/c++/BUILD.gn16
-rw-r--r--build/config/c++/c++.gni21
-rw-r--r--build/config/c++/libc++.natvis4
-rw-r--r--build/config/chrome_build.gni38
-rw-r--r--build/config/chromeos/rules.gni16
-rw-r--r--build/config/clang/BUILD.gn26
-rw-r--r--build/config/clang/clang.gni3
-rw-r--r--build/config/compiler/BUILD.gn186
-rw-r--r--build/config/compiler/compiler.gni13
-rw-r--r--build/config/compiler/pgo/BUILD.gn20
-rw-r--r--build/config/compiler/pgo/pgo.gni2
-rw-r--r--build/config/cronet/config.gni10
-rw-r--r--build/config/dcheck_always_on.gni7
-rw-r--r--build/config/features.gni2
-rw-r--r--build/config/fuchsia/BUILD.gn3
-rw-r--r--build/config/fuchsia/build_cmx_from_fragment.py49
-rwxr-xr-xbuild/config/fuchsia/build_symbol_archive.py2
-rw-r--r--build/config/fuchsia/generate_runner_scripts.gni102
-rw-r--r--build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni2
-rw-r--r--build/config/fuchsia/size_optimized_cast_receiver_args.gn2
-rw-r--r--build/config/fuchsia/sizes.gni7
-rw-r--r--build/config/fuchsia/test/README.md45
-rw-r--r--build/config/fuchsia/test/chromium_system_test_facet.shard.test-cml8
-rw-r--r--build/config/fuchsia/test/context_provider.shard.test-cml2
-rw-r--r--build/config/fuchsia/test/fonts.shard.test-cml21
-rw-r--r--build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml5
-rw-r--r--build/config/fuchsia/test/minimum.shard.test-cml19
-rw-r--r--build/config/fuchsia/test/platform_video_codecs.shard.test-cml3
-rw-r--r--build/config/fuchsia/test/sysmem.shard.test-cml (renamed from build/config/fuchsia/test/cfv1_launcher.shard.test-cml)2
-rw-r--r--build/config/fuchsia/test/system_test_minimum.shard.test-cml46
-rw-r--r--build/config/fuchsia/test/test_ui_stack.shard.test-cml34
-rw-r--r--build/config/gclient_args.gni1
-rwxr-xr-xbuild/config/get_host_byteorder.py3
-rw-r--r--build/config/ios/BUILD.gn5
-rw-r--r--build/config/ios/asset_catalog.gni100
-rw-r--r--build/config/ios/bundle_data_from_filelist.gni24
-rw-r--r--build/config/ios/codesign.py1
-rw-r--r--build/config/ios/compile_ib_files.py1
-rw-r--r--build/config/ios/config.gni5
-rw-r--r--build/config/ios/find_signing_identity.py1
-rw-r--r--build/config/ios/ios_sdk.gni47
-rw-r--r--build/config/ios/ios_sdk_overrides.gni4
-rw-r--r--build/config/ios/ios_test_runner_wrapper.gni6
-rw-r--r--build/config/ios/rules.gni1146
-rw-r--r--build/config/ios/write_framework_hmap.py1
-rwxr-xr-xbuild/config/linux/pkg-config.py3
-rw-r--r--build/config/logging.gni4
-rw-r--r--build/config/mac/BUILD.gn12
-rw-r--r--build/config/ozone.gni2
-rw-r--r--build/config/riscv.gni19
-rw-r--r--build/config/rust.gni254
-rw-r--r--build/config/sanitizers/BUILD.gn25
-rw-r--r--build/config/sanitizers/sanitizers.gni50
-rw-r--r--build/config/siso/README.md8
-rw-r--r--build/config/siso/linux.star52
-rw-r--r--build/config/siso/main.star39
-rw-r--r--build/config/siso/simple.star45
-rw-r--r--build/config/win/BUILD.gn52
-rw-r--r--build/config/win/visual_studio_version.gni9
-rwxr-xr-xbuild/copy_test_data_ios.py3
-rwxr-xr-xbuild/cp.py2
-rwxr-xr-xbuild/detect_host_arch.py3
-rwxr-xr-xbuild/dir_exists.py2
-rw-r--r--build/docs/writing_gn_templates.md13
-rw-r--r--build/dotfile_settings.gni4
-rwxr-xr-xbuild/download_nacl_toolchains.py3
-rwxr-xr-xbuild/env_dump.py2
-rwxr-xr-xbuild/extract_from_cab.py3
-rwxr-xr-xbuild/extract_partition.py2
-rwxr-xr-xbuild/find_depot_tools.py3
-rwxr-xr-xbuild/fix_gn_headers.py3
-rw-r--r--build/fuchsia/PRESUBMIT.py4
-rwxr-xr-xbuild/fuchsia/binary_size_differ.py14
-rwxr-xr-xbuild/fuchsia/binary_size_differ_test.py2
-rwxr-xr-xbuild/fuchsia/binary_sizes.py20
-rwxr-xr-xbuild/fuchsia/binary_sizes_test.py3
-rw-r--r--build/fuchsia/boot_data.py107
-rwxr-xr-xbuild/fuchsia/boot_data_test.py46
-rw-r--r--build/fuchsia/cipd/BUILD.gn39
-rw-r--r--build/fuchsia/cipd/version.template (renamed from build/fuchsia/cipd/build_id.template)0
-rw-r--r--build/fuchsia/common.py149
-rw-r--r--build/fuchsia/common_args.py189
-rwxr-xr-xbuild/fuchsia/deploy_to_pkg_repo.py48
-rw-r--r--build/fuchsia/device_target.py404
-rwxr-xr-xbuild/fuchsia/device_target_test.py272
-rw-r--r--build/fuchsia/emu_target.py190
-rw-r--r--build/fuchsia/exit_on_sig_term.py26
-rwxr-xr-xbuild/fuchsia/ffx_session.py620
-rw-r--r--build/fuchsia/fvdl_target.py247
-rwxr-xr-xbuild/fuchsia/fvdl_target_test.py117
-rw-r--r--build/fuchsia/gcs_download.py59
-rwxr-xr-xbuild/fuchsia/gcs_download_test.py55
-rw-r--r--build/fuchsia/legacy_ermine_ctl.py25
-rw-r--r--build/fuchsia/linux_internal.sdk.sha12
-rw-r--r--build/fuchsia/log_manager.py53
-rw-r--r--build/fuchsia/net_test_server.py84
-rw-r--r--build/fuchsia/pkg_repo.py236
-rw-r--r--build/fuchsia/qemu_image.py75
-rw-r--r--build/fuchsia/qemu_target.py274
-rwxr-xr-xbuild/fuchsia/qemu_target_test.py47
-rw-r--r--build/fuchsia/remote_cmd.py150
-rw-r--r--build/fuchsia/run_test_package.py213
-rw-r--r--build/fuchsia/runner_exceptions.py82
-rwxr-xr-xbuild/fuchsia/start_emulator.py67
-rw-r--r--build/fuchsia/symbolizer.py45
-rw-r--r--build/fuchsia/target.py339
-rw-r--r--build/fuchsia/test/PRESUBMIT.py4
-rw-r--r--build/fuchsia/test/common.py313
-rwxr-xr-xbuild/fuchsia/test/common_unittests.py54
-rw-r--r--build/fuchsia/test/compatible_utils.py64
-rwxr-xr-xbuild/fuchsia/test/compatible_utils_unittests.py94
-rwxr-xr-xbuild/fuchsia/test/coveragetest.py12
-rw-r--r--build/fuchsia/test/ffx_emulator.py158
-rwxr-xr-xbuild/fuchsia/test/ffx_emulator_unittests.py49
-rw-r--r--build/fuchsia/test/ffx_integration.py162
-rwxr-xr-xbuild/fuchsia/test/flash_device.py106
-rwxr-xr-xbuild/fuchsia/test/flash_device_unittests.py140
-rw-r--r--build/fuchsia/test/lockfile.py79
-rwxr-xr-xbuild/fuchsia/test/log_manager.py20
-rwxr-xr-xbuild/fuchsia/test/log_manager_unittests.py26
-rwxr-xr-xbuild/fuchsia/test/run_executable_test.py65
-rwxr-xr-xbuild/fuchsia/test/run_test.py12
-rw-r--r--build/fuchsia/test/run_webpage_test.py60
-rwxr-xr-xbuild/fuchsia/test/start_emulator.py24
-rwxr-xr-xbuild/fuchsia/test_runner.py313
-rwxr-xr-xbuild/fuchsia/update_images.py44
-rwxr-xr-xbuild/fuchsia/update_images_test.py16
-rwxr-xr-xbuild/fuchsia/update_product_bundles.py115
-rwxr-xr-xbuild/fuchsia/update_product_bundles_test.py117
-rwxr-xr-xbuild/fuchsia/update_sdk.py15
-rwxr-xr-xbuild/fuchsia/update_sdk_test.py2
-rwxr-xr-xbuild/get_landmines.py6
-rwxr-xr-xbuild/get_symlink_targets.py2
-rw-r--r--build/gn_run_binary.py1
-rwxr-xr-xbuild/install-build-deps-android.sh37
-rwxr-xr-xbuild/install-build-deps.sh98
-rw-r--r--build/ios/PRESUBMIT.py20
-rw-r--r--build/ios/presubmit_support.py39
-rwxr-xr-xbuild/ios/presubmit_support_test.py165
-rw-r--r--build/ios/test_data/bar.html0
-rw-r--r--build/ios/test_data/basic.filelist7
-rw-r--r--build/ios/test_data/basic.globlist5
-rw-r--r--build/ios/test_data/comment.filelist2
-rw-r--r--build/ios/test_data/comment.globlist7
-rw-r--r--build/ios/test_data/different_local_path.filelist9
-rw-r--r--build/ios/test_data/different_local_path.globlist6
-rw-r--r--build/ios/test_data/duplicates.filelist7
-rw-r--r--build/ios/test_data/duplicates.globlist7
-rw-r--r--build/ios/test_data/exclusions.filelist9
-rw-r--r--build/ios/test_data/exclusions.globlist6
-rw-r--r--build/ios/test_data/extra.filelist8
-rw-r--r--build/ios/test_data/extra.globlist5
-rw-r--r--build/ios/test_data/foo.css0
-rw-r--r--build/ios/test_data/ignore_outside_globlist_dir.filelist8
-rw-r--r--build/ios/test_data/ignore_outside_globlist_dir.globlist8
-rw-r--r--build/ios/test_data/missing.filelist9
-rw-r--r--build/ios/test_data/missing.globlist8
-rw-r--r--build/ios/test_data/outside_globlist_dir.filelist8
-rw-r--r--build/ios/test_data/outside_globlist_dir.globlist6
-rw-r--r--build/ios/test_data/reorder.filelist9
-rw-r--r--build/ios/test_data/reorder.globlist6
-rw-r--r--build/ios/test_data/repository_relative.filelist9
-rw-r--r--build/ios/test_data/repository_relative.globlist6
-rw-r--r--build/ios/test_data/subdirectory/baz.txt0
-rwxr-xr-xbuild/ios/update_bundle_filelist.py318
-rw-r--r--build/lacros/README.md11
-rwxr-xr-xbuild/lacros/test_runner.py121
-rwxr-xr-xbuild/lacros/test_runner_test.py3
-rwxr-xr-xbuild/landmines.py2
-rwxr-xr-xbuild/locale_tool.py1
-rwxr-xr-xbuild/mac/find_sdk.py3
-rwxr-xr-xbuild/mac/should_use_hermetic_xcode.py1
-rw-r--r--build/nocompile.gni11
-rw-r--r--build/private_code_test/BUILD.gn47
-rw-r--r--build/private_code_test/README.md36
-rwxr-xr-xbuild/private_code_test/list_gclient_deps.py43
-rw-r--r--build/private_code_test/private_code_test.gni63
-rwxr-xr-xbuild/private_code_test/private_code_test.py135
-rwxr-xr-xbuild/protoc_java.py13
-rw-r--r--build/protoc_java.pydeps2
-rw-r--r--build/redirect_stdout.py1
-rwxr-xr-xbuild/rm.py3
-rw-r--r--build/rust/BUILD.gn47
-rw-r--r--build/rust/analyze.gni79
-rw-r--r--build/rust/cargo_crate.gni120
-rw-r--r--build/rust/clanglibs/BUILD.gn6
-rwxr-xr-xbuild/rust/clanglibs/find_clanglibs.py10
-rwxr-xr-xbuild/rust/collect_rust_sources.py64
-rwxr-xr-xbuild/rust/run_bindgen.py20
-rwxr-xr-xbuild/rust/run_build_script.py22
-rwxr-xr-xbuild/rust/run_rs_bindings_from_cc.py3
-rw-r--r--build/rust/rust_bindgen.gni157
-rw-r--r--build/rust/rust_static_library.gni33
-rw-r--r--build/rust/rust_target.gni132
-rw-r--r--build/rust/rust_unit_test.gni29
-rwxr-xr-xbuild/rust/rustc_wrapper.py32
-rw-r--r--build/rust/std/BUILD.gn247
-rw-r--r--build/rust/std/fake_root/.cargo/config.toml5
-rw-r--r--build/rust/std/fake_root/Cargo.toml16
-rw-r--r--build/rust/std/fake_root/README.md2
-rw-r--r--build/rust/std/fake_root/src/main.rs3
-rwxr-xr-xbuild/rust/std/find_std_rlibs.py59
-rw-r--r--build/rust/std/gnrt_config.toml53
-rw-r--r--build/rust/std/remap_alloc.cc6
-rw-r--r--build/rust/std/rules/BUILD.gn865
-rw-r--r--build/rust/tests/BUILD.gn65
-rw-r--r--build/rust/tests/bindgen_test/BUILD.gn1
-rw-r--r--build/rust/tests/bindgen_test/src/lib.rs1
-rw-r--r--build/rust/tests/test_aliased_deps/BUILD.gn30
-rw-r--r--build/rust/tests/test_aliased_deps/lib.rs11
-rw-r--r--build/rust/tests/test_aliased_deps/main.rs (renamed from build/config/fuchsia/test/web_instance_host.shard.test-cml)11
-rw-r--r--build/rust/tests/test_aliased_deps/real_name.rs11
-rw-r--r--build/rust/tests/test_control_flow_guard/BUILD.gn11
-rw-r--r--build/rust/tests/test_control_flow_guard/test_control_flow_guard.rs43
-rw-r--r--build/rust/tests/test_local_std/BUILD.gn23
-rw-r--r--build/rust/tests/test_local_std/lib.rs8
-rw-r--r--build/rust/tests/test_local_std/main.rs7
-rw-r--r--build/rust/tests/test_rlib_crate/BUILD.gn32
-rw-r--r--build/rust/tests/test_rust_shared_library/BUILD.gn11
-rw-r--r--build/rust/tests/test_rust_shared_library/src/lib.rs4
-rw-r--r--build/rust/tests/test_rust_static_library/BUILD.gn1
-rw-r--r--build/rust/tests/test_rust_static_library/src/lib.rs4
-rw-r--r--build/rust/tests/test_serde_json_lenient/BUILD.gn1
-rw-r--r--build/rust/tests/test_simple_rust_exe/BUILD.gn1
-rw-r--r--build/sanitizers/lsan_suppressions.cc5
-rw-r--r--build/sanitizers/tsan_suppressions.cc54
-rw-r--r--build/shim_headers.gni6
-rw-r--r--build/skia_gold_common/PRESUBMIT.py34
-rw-r--r--build/symlink.gni3
-rwxr-xr-xbuild/symlink.py3
-rw-r--r--build/toolchain/apple/filter_libtool.py1
-rw-r--r--build/toolchain/apple/get_tool_mtime.py1
-rwxr-xr-xbuild/toolchain/apple/linker_driver.py63
-rw-r--r--build/toolchain/apple/toolchain.gni222
-rwxr-xr-xbuild/toolchain/clang_code_coverage_wrapper.py3
-rwxr-xr-xbuild/toolchain/gcc_link_wrapper.py2
-rwxr-xr-xbuild/toolchain/gcc_solink_wrapper.py2
-rw-r--r--build/toolchain/gcc_toolchain.gni234
-rw-r--r--build/toolchain/get_cpu_count.py1
-rw-r--r--build/toolchain/ios/BUILD.gn34
-rw-r--r--build/toolchain/ios/swiftc.py9
-rw-r--r--build/toolchain/linux/BUILD.gn8
-rw-r--r--build/toolchain/nacl_toolchain.gni4
-rw-r--r--build/toolchain/rbe.gni12
-rw-r--r--build/toolchain/toolchain.gni13
-rw-r--r--build/toolchain/whole_archive.py51
-rw-r--r--build/toolchain/win/BUILD.gn23
-rw-r--r--build/toolchain/win/midl.py1
-rwxr-xr-xbuild/toolchain/win/ml.py2
-rwxr-xr-xbuild/toolchain/win/rc/rc.py3
-rw-r--r--build/toolchain/win/setup_toolchain.py5
-rw-r--r--build/toolchain/win/tool_wrapper.py1
-rw-r--r--build/toolchain/win/toolchain.gni246
-rw-r--r--build/toolchain/win/win_toolchain_data.gni43
-rw-r--r--build/toolchain/wrapper_utils.py6
-rw-r--r--build/util/LASTCHANGE4
-rw-r--r--build/util/LASTCHANGE.committime2
-rwxr-xr-xbuild/util/action_remote.py34
-rwxr-xr-xbuild/util/android_chrome_version.py13
-rw-r--r--build/util/android_chrome_version_test.py38
-rw-r--r--build/util/generate_wrapper.gni61
-rwxr-xr-xbuild/util/java_action.py2
-rwxr-xr-xbuild/util/lastchange.py3
-rw-r--r--build/util/lib/common/perf_tests_results_helper.py1
-rwxr-xr-xbuild/util/lib/common/unittest_util_test.py2
-rw-r--r--build/util/lib/results/DIR_METADATA11
-rw-r--r--build/util/lib/results/result_sink.py7
-rwxr-xr-xbuild/util/lib/results/result_sink_test.py18
-rwxr-xr-xbuild/util/version.py1
-rwxr-xr-xbuild/vs_toolchain.py123
-rw-r--r--build/win/BUILD.gn104
-rwxr-xr-xbuild/win/copy_cdb_to_output.py14
-rw-r--r--build/win/gn_meta_sln.py1
-rw-r--r--build/win/message_compiler.py1
-rwxr-xr-xbuild/win/reorder-imports.py2
-rwxr-xr-xbuild/win/use_ansi_codes.py3
-rwxr-xr-xbuild/write_build_date_header.py37
-rwxr-xr-xbuild/write_buildflag_header.py2
-rw-r--r--build/zip_helpers.py222
463 files changed, 10556 insertions, 11495 deletions
diff --git a/build/BUILD.gn b/build/BUILD.gn
index 3d0551349..58f5f20fb 100644
--- a/build/BUILD.gn
+++ b/build/BUILD.gn
@@ -7,6 +7,7 @@ import("//build/config/chrome_build.gni")
import("//build/config/chromecast_build.gni")
import("//build/config/chromeos/args.gni")
import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/features.gni")
import("//build/util/process_version.gni")
import("//build_overrides/build.gni")
@@ -30,6 +31,11 @@ buildflag_header("branding_buildflags") {
}
}
+buildflag_header("blink_buildflags") {
+ header = "blink_buildflags.h"
+ flags = [ "USE_BLINK=$use_blink" ]
+}
+
buildflag_header("chromecast_buildflags") {
header = "chromecast_buildflags.h"
diff --git a/build/OWNERS.setnoparent b/build/OWNERS.setnoparent
index 260254e43..52755b513 100644
--- a/build/OWNERS.setnoparent
+++ b/build/OWNERS.setnoparent
@@ -10,7 +10,7 @@ file://third_party/OWNERS
# Security reviews
file://build/fuchsia/SECURITY_OWNERS
file://chromeos/SECURITY_OWNERS
-file://content/browser/SITE_ISOLATION_OWNERS
+file://content/browser/CHILD_PROCESS_SECURITY_POLICY_OWNERS
file://ipc/SECURITY_OWNERS
file://net/base/SECURITY_OWNERS
file://sandbox/linux/OWNERS
diff --git a/build/action_helpers.py b/build/action_helpers.py
new file mode 100644
index 000000000..046a292ba
--- /dev/null
+++ b/build/action_helpers.py
@@ -0,0 +1,126 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helper functions useful when writing scripts used by action() targets."""
+
+import contextlib
+import filecmp
+import os
+import pathlib
+import posixpath
+import shutil
+import tempfile
+
+import gn_helpers
+
+
+@contextlib.contextmanager
+def atomic_output(path, mode='w+b', only_if_changed=True):
+ """Prevent half-written files and dirty mtimes for unchanged files.
+
+ Args:
+ path: Path to the final output file, which will be written atomically.
+ mode: The mode to open the file in (str).
+ only_if_changed: Whether to maintain the mtime if the file has not changed.
+ Returns:
+ A Context Manager that yields a NamedTemporaryFile instance. On exit, the
+ manager will check if the file contents is different from the destination
+ and if so, move it into place.
+
+ Example:
+ with action_helpers.atomic_output(output_path) as tmp_file:
+ subprocess.check_call(['prog', '--output', tmp_file.name])
+ """
+ # Create in same directory to ensure same filesystem when moving.
+ dirname = os.path.dirname(path) or '.'
+ os.makedirs(dirname, exist_ok=True)
+ with tempfile.NamedTemporaryFile(mode,
+ suffix=os.path.basename(path),
+ dir=dirname,
+ delete=False) as f:
+ try:
+ yield f
+
+ # File should be closed before comparison/move.
+ f.close()
+ if not (only_if_changed and os.path.exists(path)
+ and filecmp.cmp(f.name, path)):
+ shutil.move(f.name, path)
+ finally:
+ f.close()
+ if os.path.exists(f.name):
+ os.unlink(f.name)
+
+
+def add_depfile_arg(parser):
+ if hasattr(parser, 'add_option'):
+ func = parser.add_option
+ else:
+ func = parser.add_argument
+ func('--depfile', help='Path to depfile (refer to "gn help depfile")')
+
+
+def write_depfile(depfile_path, first_gn_output, inputs=None):
+ """Writes a ninja depfile.
+
+ See notes about how to use depfiles in //build/docs/writing_gn_templates.md.
+
+ Args:
+ depfile_path: Path to file to write.
+ first_gn_output: Path of first entry in action's outputs.
+ inputs: List of inputs to add to depfile.
+ """
+ assert depfile_path != first_gn_output # http://crbug.com/646165
+ assert not isinstance(inputs, str) # Easy mistake to make
+
+ def _process_path(path):
+ assert not os.path.isabs(path), f'Found abs path in depfile: {path}'
+ if os.path.sep != posixpath.sep:
+ path = str(pathlib.Path(path).as_posix())
+ assert '\\' not in path, f'Found \\ in depfile: {path}'
+ return path.replace(' ', '\\ ')
+
+ sb = []
+ sb.append(_process_path(first_gn_output))
+ if inputs:
+ # Sort and uniquify to ensure file is hermetic.
+ # One path per line to keep it human readable.
+ sb.append(': \\\n ')
+ sb.append(' \\\n '.join(sorted(_process_path(p) for p in set(inputs))))
+ else:
+ sb.append(': ')
+ sb.append('\n')
+
+ path = pathlib.Path(depfile_path)
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_text(''.join(sb))
+
+
+def parse_gn_list(value):
+ """Converts a "GN-list" command-line parameter into a list.
+
+ Conversions handled:
+ * None -> []
+ * '' -> []
+ * 'asdf' -> ['asdf']
+ * '["a", "b"]' -> ['a', 'b']
+ * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (action='append')
+
+ This allows passing args like:
+ gn_list = [ "one", "two", "three" ]
+ args = [ "--items=$gn_list" ]
+ """
+ # Convert None to [].
+ if not value:
+ return []
+ # Convert a list of GN lists to a flattened list.
+ if isinstance(value, list):
+ ret = []
+ for arg in value:
+ ret.extend(parse_gn_list(arg))
+ return ret
+ # Convert normal GN list.
+ if value.startswith('['):
+ return gn_helpers.GNValueParser(value).ParseList()
+ # Convert a single string value to a list.
+ return [value]
diff --git a/build/action_helpers_unittest.py b/build/action_helpers_unittest.py
new file mode 100755
index 000000000..a377291be
--- /dev/null
+++ b/build/action_helpers_unittest.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import pathlib
+import shutil
+import sys
+import tempfile
+import time
+import unittest
+
+import action_helpers
+
+
+class ActionHelperTest(unittest.TestCase):
+ def test_atomic_output(self):
+ tmp_file = pathlib.Path(tempfile.mktemp())
+ tmp_file.write_text('test')
+ try:
+ # Test that same contents does not change mtime.
+ orig_mtime = os.path.getmtime(tmp_file)
+ with action_helpers.atomic_output(str(tmp_file), 'wt') as af:
+ time.sleep(.01)
+ af.write('test')
+
+ self.assertEqual(os.path.getmtime(tmp_file), orig_mtime)
+
+ # Test that contents is written.
+ with action_helpers.atomic_output(str(tmp_file), 'wt') as af:
+ af.write('test2')
+ self.assertEqual(tmp_file.read_text(), 'test2')
+ self.assertNotEqual(os.path.getmtime(tmp_file), orig_mtime)
+ finally:
+ tmp_file.unlink()
+
+ def test_parse_gn_list(self):
+ def test(value, expected):
+ self.assertEqual(action_helpers.parse_gn_list(value), expected)
+
+ test(None, [])
+ test('', [])
+ test('asdf', ['asdf'])
+ test('["one"]', ['one'])
+ test(['["one"]', '["two"]'], ['one', 'two'])
+ test(['["one", "two"]', '["three"]'], ['one', 'two', 'three'])
+
+ def test_write_depfile(self):
+ tmp_file = pathlib.Path(tempfile.mktemp())
+ try:
+
+ def capture_output(inputs):
+ action_helpers.write_depfile(str(tmp_file), 'output', inputs)
+ return tmp_file.read_text()
+
+ self.assertEqual(capture_output(None), 'output: \n')
+ self.assertEqual(capture_output([]), 'output: \n')
+ self.assertEqual(capture_output(['a']), 'output: \\\n a\n')
+ # Check sorted.
+ self.assertEqual(capture_output(['b', 'a']), 'output: \\\n a \\\n b\n')
+ # Check converts to forward slashes.
+ self.assertEqual(capture_output(['a', os.path.join('b', 'c')]),
+ 'output: \\\n a \\\n b/c\n')
+
+ # Arg should be a list.
+ with self.assertRaises(AssertionError):
+ capture_output('a')
+
+ # Do not use depfile itself as an output.
+ with self.assertRaises(AssertionError):
+ capture_output([str(tmp_file)])
+
+ # Do not use absolute paths.
+ with self.assertRaises(AssertionError):
+ capture_output([os.path.sep + 'foo'])
+
+ # Do not use absolute paths (output path).
+ with self.assertRaises(AssertionError):
+ action_helpers.write_depfile(str(tmp_file), '/output', [])
+
+ finally:
+ tmp_file.unlink()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/build/add_rts_filters.py b/build/add_rts_filters.py
index 1a53474a8..94297c550 100755
--- a/build/add_rts_filters.py
+++ b/build/add_rts_filters.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2021 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/android/BUILD.gn b/build/android/BUILD.gn
index 4d224ff57..f9f6b939e 100644
--- a/build/android/BUILD.gn
+++ b/build/android/BUILD.gn
@@ -169,6 +169,9 @@ group("test_runner_device_support") {
if (build_with_chromium) {
data_deps += [ "//tools/android/forwarder2" ]
data += [ "//tools/android/avd/proto/" ]
+ if (enable_chrome_android_internal) {
+ data += [ "//clank/tools/android/avd/proto/" ]
+ }
if (is_asan) {
data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ]
}
diff --git a/build/android/PRESUBMIT.py b/build/android/PRESUBMIT.py
index 993e3d386..8348558c1 100644
--- a/build/android/PRESUBMIT.py
+++ b/build/android/PRESUBMIT.py
@@ -94,7 +94,6 @@ def CommonChecks(input_api, output_api):
input_api,
output_api,
unit_tests=[
- J('.', 'emma_coverage_stats_test.py'),
J('.', 'list_class_verification_failures_test.py'),
J('pylib', 'constants', 'host_paths_unittest.py'),
J('pylib', 'gtest', 'gtest_test_instance_test.py'),
diff --git a/build/android/adb_command_line.py b/build/android/adb_command_line.py
index 97cb8833b..8557085d5 100755
--- a/build/android/adb_command_line.py
+++ b/build/android/adb_command_line.py
@@ -5,7 +5,6 @@
"""Utility for reading / writing command-line flag files on device(s)."""
-from __future__ import print_function
import argparse
import logging
diff --git a/build/android/adb_logcat_monitor.py b/build/android/adb_logcat_monitor.py
index 7d6f619fa..0b52997a2 100755
--- a/build/android/adb_logcat_monitor.py
+++ b/build/android/adb_logcat_monitor.py
@@ -16,7 +16,6 @@ resilient across phone disconnects and reconnects and start the logcat
early enough to not miss anything.
"""
-from __future__ import print_function
import logging
import os
diff --git a/build/android/adb_system_webengine_command_line b/build/android/adb_system_webengine_command_line
new file mode 100755
index 000000000..2dce6d25f
--- /dev/null
+++ b/build/android/adb_system_webengine_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+# adb_system_webengine_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+# adb_system_webengine_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name weblayer-command-line "$@"
diff --git a/build/android/apk_operations.py b/build/android/apk_operations.py
index d8dce6087..6014b6789 100755
--- a/build/android/apk_operations.py
+++ b/build/android/apk_operations.py
@@ -6,7 +6,6 @@
# Using colorama.Fore/Back/Style members
# pylint: disable=no-member
-from __future__ import print_function
import argparse
import collections
@@ -188,24 +187,95 @@ def _NormalizeProcessName(debug_process_name, package_name):
return debug_process_name
-def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None,
- url=None, apk=None, wait_for_java_debugger=False,
- debug_process_name=None, nokill=None):
+def _ResolveActivity(device, package_name, category, action):
+ # E.g.:
+ # Activity Resolver Table:
+ # Schemes:
+ # http:
+ # 67e97c0 org.chromium.pkg/.MainActivityfilter c91d43e
+ # Action: "android.intent.action.VIEW"
+ # Category: "android.intent.category.DEFAULT"
+ # Category: "android.intent.category.BROWSABLE"
+ # Scheme: "http"
+ # Scheme: "https"
+ #
+ # Non-Data Actions:
+ # android.intent.action.MAIN:
+ # 67e97c0 org.chromium.pkg/.MainActivity filter 4a34cf9
+ # Action: "android.intent.action.MAIN"
+ # Category: "android.intent.category.LAUNCHER"
+ lines = device.RunShellCommand(['dumpsys', 'package', package_name],
+ check_return=True)
+
+ # Extract the Activity Resolver Table: section.
+ start_idx = next((i for i, l in enumerate(lines)
+ if l.startswith('Activity Resolver Table:')), None)
+ if start_idx is None:
+ if not device.IsApplicationInstalled(package_name):
+ raise Exception('Package not installed: ' + package_name)
+ raise Exception('No Activity Resolver Table in:\n' + '\n'.join(lines))
+ line_count = next(i for i, l in enumerate(lines[start_idx + 1:])
+ if l and not l[0].isspace())
+ data = '\n'.join(lines[start_idx:start_idx + line_count])
+
+ # Split on each Activity entry.
+ entries = re.split(r'^ [0-9a-f]+ ', data, flags=re.MULTILINE)
+
+ def activity_name_from_entry(entry):
+ assert entry.startswith(package_name), 'Got: ' + entry
+ activity_name = entry[len(package_name) + 1:].split(' ', 1)[0]
+ if activity_name[0] == '.':
+ activity_name = package_name + activity_name
+ return activity_name
+
+ # Find the one with the text we want.
+ category_text = f'Category: "{category}"'
+ action_text = f'Action: "{action}"'
+ matched_entries = [
+ e for e in entries[1:] if category_text in e and action_text in e
+ ]
+
+ if not matched_entries:
+ raise Exception(f'Did not find {category_text}, {action_text} in\n{data}')
+ if len(matched_entries) > 1:
+ # When there are multiple matches, look for the one marked as default.
+ # Necessary for Monochrome, which also has MonochromeLauncherActivity.
+ default_entries = [
+ e for e in matched_entries if 'android.intent.category.DEFAULT' in e
+ ]
+ matched_entries = default_entries or matched_entries
+
+ # See if all matches point to the same activity.
+ activity_names = {activity_name_from_entry(e) for e in matched_entries}
+
+ if len(activity_names) > 1:
+ raise Exception('Found multiple launcher activities:\n * ' +
+ '\n * '.join(sorted(activity_names)))
+ return next(iter(activity_names))
+
+
+def _LaunchUrl(devices,
+ package_name,
+ argv=None,
+ command_line_flags_file=None,
+ url=None,
+ wait_for_java_debugger=False,
+ debug_process_name=None,
+ nokill=None):
if argv and command_line_flags_file is None:
raise Exception('This apk does not support any flags.')
- if url:
- # TODO(agrieve): Launch could be changed to require only package name by
- # parsing "dumpsys package" rather than relying on the apk.
- if not apk:
- raise Exception('Launching with URL is not supported when using '
- '--package-name. Use --apk-path instead.')
- view_activity = apk.GetViewActivityName()
- if not view_activity:
- raise Exception('APK does not support launching with URLs.')
debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+ if url is None:
+ category = 'android.intent.category.LAUNCHER'
+ action = 'android.intent.action.MAIN'
+ else:
+ category = 'android.intent.category.BROWSABLE'
+ action = 'android.intent.action.VIEW'
+
def launch(device):
+ activity = _ResolveActivity(device, package_name, category, action)
# --persistent is required to have Settings.Global.DEBUG_APP be set, which
# we currently use to allow reading of flags. https://crbug.com/784947
if not nokill:
@@ -228,18 +298,13 @@ def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None,
except device_errors.AdbShellCommandFailedError:
logging.exception('Failed to set flags')
- if url is None:
- # Simulate app icon click if no url is present.
- cmd = [
- 'am', 'start', '-p', package_name, '-c',
- 'android.intent.category.LAUNCHER', '-a', 'android.intent.action.MAIN'
- ]
- device.RunShellCommand(cmd, check_return=True)
- else:
- launch_intent = intent.Intent(action='android.intent.action.VIEW',
- activity=view_activity, data=url,
- package=package_name)
- device.StartActivity(launch_intent)
+ launch_intent = intent.Intent(action=action,
+ activity=activity,
+ data=url,
+ package=package_name)
+ logging.info('Sending launch intent for %s', activity)
+ device.StartActivity(launch_intent)
+
device_utils.DeviceUtils.parallel(devices).pMap(launch)
if wait_for_java_debugger:
print('Waiting for debugger to attach to process: ' +
@@ -649,7 +714,7 @@ class _LogcatProcessor:
# START u0 {act=android.intent.action.MAIN \
# cat=[android.intent.category.LAUNCHER] \
# flg=0x10000000 pkg=com.google.chromeremotedesktop} from uid 2000
- self._start_pattern = re.compile(r'START .*pkg=' + package_name)
+ self._start_pattern = re.compile(r'START .*(?:cmp|pkg)=' + package_name)
self.nonce = 'Chromium apk_operations.py nonce={}'.format(random.random())
# Holds lines buffered on start-up, before we find our nonce message.
@@ -1399,13 +1464,11 @@ class _LaunchCommand(_Command):
def Run(self):
if self.is_test_apk:
raise Exception('Use the bin/run_* scripts to run test apks.')
- if self.args.url and self.is_bundle:
- # TODO(digit): Support this, maybe by using 'dumpsys' as described
- # in the _LaunchUrl() comment.
- raise Exception('Launching with URL not supported for bundles yet!')
- _LaunchUrl(self.devices, self.args.package_name, argv=self.args.args,
+ _LaunchUrl(self.devices,
+ self.args.package_name,
+ argv=self.args.args,
command_line_flags_file=self.args.command_line_flags_file,
- url=self.args.url, apk=self.apk_helper,
+ url=self.args.url,
wait_for_java_debugger=self.args.wait_for_java_debugger,
debug_process_name=self.args.debug_process_name,
nokill=self.args.nokill)
diff --git a/build/android/apk_operations.pydeps b/build/android/apk_operations.pydeps
index 1db605904..26f69e89d 100644
--- a/build/android/apk_operations.pydeps
+++ b/build/android/apk_operations.pydeps
@@ -64,8 +64,8 @@
../../third_party/catapult/devil/devil/utils/zip_utils.py
../../third_party/catapult/third_party/six/six.py
../../third_party/jinja2/__init__.py
-../../third_party/jinja2/_compat.py
../../third_party/jinja2/_identifier.py
+../../third_party/jinja2/async_utils.py
../../third_party/jinja2/bccache.py
../../third_party/jinja2/compiler.py
../../third_party/jinja2/defaults.py
@@ -85,8 +85,10 @@
../../third_party/markupsafe/__init__.py
../../third_party/markupsafe/_compat.py
../../third_party/markupsafe/_native.py
+../action_helpers.py
../gn_helpers.py
../print_python_deps.py
+../zip_helpers.py
adb_command_line.py
apk_operations.py
devil_chromium.py
@@ -96,7 +98,6 @@ gyp/util/__init__.py
gyp/util/build_utils.py
gyp/util/md5_check.py
gyp/util/resource_utils.py
-gyp/util/zipalign.py
incremental_install/__init__.py
incremental_install/installer.py
pylib/__init__.py
diff --git a/build/android/asan_symbolize.py b/build/android/asan_symbolize.py
index 197e7211c..3274b9504 100755
--- a/build/android/asan_symbolize.py
+++ b/build/android/asan_symbolize.py
@@ -4,7 +4,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import argparse
import collections
diff --git a/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
index 173d71baf..a997bf05a 100644
--- a/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
+++ b/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
@@ -71,6 +71,11 @@ public class ClassPathValidator {
// API.
return;
}
+ if (className.matches("^android\\b.*")) {
+ // OS APIs sometime pop up in prebuilts. Rather than force prebuilt targets to set a
+ // proper alternative_android_sdk_dep, just ignore android.*
+ return;
+ }
try {
classLoader.loadClass(className.replace('/', '.'));
} catch (ClassNotFoundException e) {
diff --git a/build/android/diff_resource_sizes.py b/build/android/diff_resource_sizes.py
index 79c0a3d33..ff21d8180 100755
--- a/build/android/diff_resource_sizes.py
+++ b/build/android/diff_resource_sizes.py
@@ -5,7 +5,6 @@
"""Runs resource_sizes.py on two apks and outputs the diff."""
-from __future__ import print_function
import argparse
import json
diff --git a/build/android/dummy_libgcc/README.md b/build/android/dummy_libgcc/README.md
new file mode 100644
index 000000000..01762db81
--- /dev/null
+++ b/build/android/dummy_libgcc/README.md
@@ -0,0 +1,20 @@
+# Dummy libgcc.a
+
+This directory contains an empty `libgcc.a` file for use with the Rust toolchain
+when targeting Android.
+
+The Rust compiler unconditionally attempts to link libgcc when targeting
+Android, so `-lgcc` appears in the linker command when producing a `.so` file
+for Android. Rustc expects libgcc will provide unwinding support, however we
+already use libunwind, which we explicitly link ourselves through `ldflags`, to
+provide this. Our Android toolchain has no libgcc present, which means Rustc
+driven linking targeting Android fails with a missing library that we don't
+actually want to use.
+
+This same issue occurs for other consumers of rustc, when building for targets
+without a libgcc, and the solution is to [give rustc an empty `libgcc.a` file](
+https://www.reddit.com/r/rust/comments/jst1kk/building_rust_without_linking_against_libgcc/).
+
+Therefore this directory contains an empty `libgcc.a` file, and on Android we
+include this directory in the linker paths so that the rustc-driven linking step
+can succeed without needing a real libgcc.
diff --git a/build/android/dummy_libgcc/libgcc.a b/build/android/dummy_libgcc/libgcc.a
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/build/android/dummy_libgcc/libgcc.a
diff --git a/build/android/dump_apk_resource_strings.py b/build/android/dump_apk_resource_strings.py
index cc8c6f39d..962103e01 100755
--- a/build/android/dump_apk_resource_strings.py
+++ b/build/android/dump_apk_resource_strings.py
@@ -6,7 +6,6 @@
"""A script to parse and dump localized strings in resource.arsc files."""
-from __future__ import print_function
import argparse
import collections
diff --git a/build/android/emma_coverage_stats.py b/build/android/emma_coverage_stats.py
deleted file mode 100755
index 9604ba6a1..000000000
--- a/build/android/emma_coverage_stats.py
+++ /dev/null
@@ -1,481 +0,0 @@
-#!/usr/bin/env vpython3
-# Copyright 2015 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Generates incremental code coverage reports for Java code in Chromium.
-
-Usage:
-
- build/android/emma_coverage_stats.py -v --out <output file path> --emma-dir
- <EMMA file directory> --lines-for-coverage-file
- <path to file containing lines for coverage>
-
- Creates a JSON representation of the overall and file coverage stats and saves
- this information to the specified output file.
-"""
-
-import argparse
-import collections
-import json
-import logging
-import os
-import re
-import sys
-from xml.etree import ElementTree
-
-import devil_chromium
-from devil.utils import run_tests_helper
-
-NOT_EXECUTABLE = -1
-NOT_COVERED = 0
-COVERED = 1
-PARTIALLY_COVERED = 2
-
-# Coverage information about a single line of code.
-LineCoverage = collections.namedtuple(
- 'LineCoverage',
- ['lineno', 'source', 'covered_status', 'fractional_line_coverage'])
-
-
-class _EmmaHtmlParser:
- """Encapsulates HTML file parsing operations.
-
- This class contains all operations related to parsing HTML files that were
- produced using the EMMA code coverage tool.
-
- Example HTML:
-
- Package links:
- <a href="_files/1.html">org.chromium.chrome</a>
- This is returned by the selector |XPATH_SELECT_PACKAGE_ELEMENTS|.
-
- Class links:
- <a href="1e.html">DoActivity.java</a>
- This is returned by the selector |XPATH_SELECT_CLASS_ELEMENTS|.
-
- Line coverage data:
- <tr class="p">
- <td class="l" title="78% line coverage (7 out of 9)">108</td>
- <td title="78% line coverage (7 out of 9 instructions)">
- if (index < 0 || index = mSelectors.size()) index = 0;</td>
- </tr>
- <tr>
- <td class="l">109</td>
- <td> </td>
- </tr>
- <tr class="c">
- <td class="l">110</td>
- <td> if (mSelectors.get(index) != null) {</td>
- </tr>
- <tr class="z">
- <td class="l">111</td>
- <td> for (int i = 0; i < mSelectors.size(); i++) {</td>
- </tr>
- Each <tr> element is returned by the selector |XPATH_SELECT_LOC|.
-
- We can parse this to get:
- 1. Line number
- 2. Line of source code
- 3. Coverage status (c, z, or p)
- 4. Fractional coverage value (% out of 100 if PARTIALLY_COVERED)
- """
- # Selector to match all <a> elements within the rows that are in the table
- # that displays all of the different packages.
- _XPATH_SELECT_PACKAGE_ELEMENTS = './/BODY/TABLE[4]/TR/TD/A'
-
- # Selector to match all <a> elements within the rows that are in the table
- # that displays all of the different classes within a package.
- _XPATH_SELECT_CLASS_ELEMENTS = './/BODY/TABLE[3]/TR/TD/A'
-
- # Selector to match all <tr> elements within the table containing Java source
- # code in an EMMA HTML file.
- _XPATH_SELECT_LOC = './/BODY/TABLE[4]/TR'
-
- # Children of HTML elements are represented as a list in ElementTree. These
- # constants represent list indices corresponding to relevant child elements.
-
- # Child 1 contains percentage covered for a line.
- _ELEMENT_PERCENT_COVERED = 1
-
- # Child 1 contains the original line of source code.
- _ELEMENT_CONTAINING_SOURCE_CODE = 1
-
- # Child 0 contains the line number.
- _ELEMENT_CONTAINING_LINENO = 0
-
- # Maps CSS class names to corresponding coverage constants.
- _CSS_TO_STATUS = {'c': COVERED, 'p': PARTIALLY_COVERED, 'z': NOT_COVERED}
-
- # UTF-8 no break space.
- _NO_BREAK_SPACE = '\xc2\xa0'
-
- def __init__(self, emma_file_base_dir):
- """Initializes _EmmaHtmlParser.
-
- Args:
- emma_file_base_dir: Path to the location where EMMA report files are
- stored. Should be where index.html is stored.
- """
- self._base_dir = emma_file_base_dir
- self._emma_files_path = os.path.join(self._base_dir, '_files')
- self._index_path = os.path.join(self._base_dir, 'index.html')
-
- def GetLineCoverage(self, emma_file_path):
- """Returns a list of LineCoverage objects for the given EMMA HTML file.
-
- Args:
- emma_file_path: String representing the path to the EMMA HTML file.
-
- Returns:
- A list of LineCoverage objects.
- """
- line_tr_elements = self._FindElements(
- emma_file_path, self._XPATH_SELECT_LOC)
- line_coverage = []
- for tr in line_tr_elements:
- # Get the coverage status.
- coverage_status = self._CSS_TO_STATUS.get(tr.get('CLASS'), NOT_EXECUTABLE)
- # Get the fractional coverage value.
- if coverage_status == PARTIALLY_COVERED:
- title_attribute = (tr[self._ELEMENT_PERCENT_COVERED].get('TITLE'))
- # Parse string that contains percent covered: "83% line coverage ...".
- percent_covered = title_attribute.split('%')[0]
- fractional_coverage = int(percent_covered) / 100.0
- else:
- fractional_coverage = 1.0
-
- # Get the line number.
- lineno_element = tr[self._ELEMENT_CONTAINING_LINENO]
- # Handles oddly formatted HTML (where there is an extra <a> tag).
- lineno = int(lineno_element.text or
- lineno_element[self._ELEMENT_CONTAINING_LINENO].text)
- # Get the original line of Java source code.
- raw_source = tr[self._ELEMENT_CONTAINING_SOURCE_CODE].text
- source = raw_source.replace(self._NO_BREAK_SPACE, ' ')
-
- line = LineCoverage(lineno, source, coverage_status, fractional_coverage)
- line_coverage.append(line)
-
- return line_coverage
-
- def GetPackageNameToEmmaFileDict(self):
- """Returns a dict mapping Java packages to EMMA HTML coverage files.
-
- Parses the EMMA index.html file to get a list of packages, then parses each
- package HTML file to get a list of classes for that package, and creates
- a dict with this info.
-
- Returns:
- A dict mapping string representation of Java packages (with class
- names appended) to the corresponding file paths of EMMA HTML files.
- """
- # These <a> elements contain each package name and the path of the file
- # where all classes within said package are listed.
- package_link_elements = self._FindElements(
- self._index_path, self._XPATH_SELECT_PACKAGE_ELEMENTS)
- # Maps file path of package directory (EMMA generated) to package name.
- # Example: emma_dir/f.html: org.chromium.chrome.
- package_links = {
- os.path.join(self._base_dir, link.attrib['HREF']): link.text
- for link in package_link_elements if 'HREF' in link.attrib
- }
-
- package_to_emma = {}
- for package_emma_file_path, package_name in package_links.items():
- # These <a> elements contain each class name in the current package and
- # the path of the file where the coverage info is stored for each class.
- coverage_file_link_elements = self._FindElements(
- package_emma_file_path, self._XPATH_SELECT_CLASS_ELEMENTS)
-
- for class_name_element in coverage_file_link_elements:
- emma_coverage_file_path = os.path.join(
- self._emma_files_path, class_name_element.attrib['HREF'])
- full_package_name = '%s.%s' % (package_name, class_name_element.text)
- package_to_emma[full_package_name] = emma_coverage_file_path
-
- return package_to_emma
-
- # pylint: disable=no-self-use
- def _FindElements(self, file_path, xpath_selector):
- """Reads a HTML file and performs an XPath match.
-
- Args:
- file_path: String representing the path to the HTML file.
- xpath_selector: String representing xpath search pattern.
-
- Returns:
- A list of ElementTree.Elements matching the given XPath selector.
- Returns an empty list if there is no match.
- """
- with open(file_path) as f:
- file_contents = f.read()
- root = ElementTree.fromstring(file_contents)
- return root.findall(xpath_selector)
-
-
-class _EmmaCoverageStats:
- """Computes code coverage stats for Java code using the coverage tool EMMA.
-
- This class provides an API that allows users to capture absolute code coverage
- and code coverage on a subset of lines for each Java source file. Coverage
- reports are generated in JSON format.
- """
- # Regular expression to get package name from Java package statement.
- RE_PACKAGE_MATCH_GROUP = 'package'
- RE_PACKAGE = re.compile(r'package (?P<%s>[\w.]*);' % RE_PACKAGE_MATCH_GROUP)
-
- def __init__(self, emma_file_base_dir, files_for_coverage):
- """Initialize _EmmaCoverageStats.
-
- Args:
- emma_file_base_dir: String representing the path to the base directory
- where EMMA HTML coverage files are stored, i.e. parent of index.html.
- files_for_coverage: A list of Java source code file paths to get EMMA
- coverage for.
- """
- self._emma_parser = _EmmaHtmlParser(emma_file_base_dir)
- self._source_to_emma = self._GetSourceFileToEmmaFileDict(files_for_coverage)
-
- def GetCoverageDict(self, lines_for_coverage):
- """Returns a dict containing detailed coverage information.
-
- Gets detailed coverage stats for each file specified in the
- |lines_for_coverage| dict and the total incremental number of lines covered
- and executable for all files in |lines_for_coverage|.
-
- Args:
- lines_for_coverage: A dict mapping Java source file paths to lists of line
- numbers.
-
- Returns:
- A dict containing coverage stats for the given dict of files and lines.
- Contains absolute coverage stats for each file, coverage stats for each
- file's lines specified in |lines_for_coverage|, line by line coverage
- for each file, and overall coverage stats for the lines specified in
- |lines_for_coverage|.
- """
- file_coverage = {}
- for file_path, line_numbers in lines_for_coverage.items():
- file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers)
- if file_coverage_dict:
- file_coverage[file_path] = file_coverage_dict
- else:
- logging.warning(
- 'No code coverage data for %s, skipping.', file_path)
-
- covered_statuses = [s['incremental'] for s in file_coverage.values()]
- num_covered_lines = sum(s['covered'] for s in covered_statuses)
- num_total_lines = sum(s['total'] for s in covered_statuses)
- return {
- 'files': file_coverage,
- 'patch': {
- 'incremental': {
- 'covered': num_covered_lines,
- 'total': num_total_lines
- }
- }
- }
-
- def GetCoverageDictForFile(self, file_path, line_numbers):
- """Returns a dict containing detailed coverage info for the given file.
-
- Args:
- file_path: The path to the Java source file that we want to create the
- coverage dict for.
- line_numbers: A list of integer line numbers to retrieve additional stats
- for.
-
- Returns:
- A dict containing absolute, incremental, and line by line coverage for
- a file.
- """
- if file_path not in self._source_to_emma:
- return None
- emma_file = self._source_to_emma[file_path]
- total_line_coverage = self._emma_parser.GetLineCoverage(emma_file)
- incremental_line_coverage = [line for line in total_line_coverage
- if line.lineno in line_numbers]
- line_by_line_coverage = [
- {
- 'line': line.source,
- 'coverage': line.covered_status,
- 'changed': line.lineno in line_numbers,
- 'fractional_coverage': line.fractional_line_coverage,
- }
- for line in total_line_coverage
- ]
- total_covered_lines, total_lines = (
- self.GetSummaryStatsForLines(total_line_coverage))
- incremental_covered_lines, incremental_total_lines = (
- self.GetSummaryStatsForLines(incremental_line_coverage))
-
- file_coverage_stats = {
- 'absolute': {
- 'covered': total_covered_lines,
- 'total': total_lines
- },
- 'incremental': {
- 'covered': incremental_covered_lines,
- 'total': incremental_total_lines
- },
- 'source': line_by_line_coverage,
- }
- return file_coverage_stats
-
- # pylint: disable=no-self-use
- def GetSummaryStatsForLines(self, line_coverage):
- """Gets summary stats for a given list of LineCoverage objects.
-
- Args:
- line_coverage: A list of LineCoverage objects.
-
- Returns:
- A tuple containing the number of lines that are covered and the total
- number of lines that are executable, respectively
- """
- partially_covered_sum = 0
- covered_status_totals = {COVERED: 0, NOT_COVERED: 0, PARTIALLY_COVERED: 0}
- for line in line_coverage:
- status = line.covered_status
- if status == NOT_EXECUTABLE:
- continue
- covered_status_totals[status] += 1
- if status == PARTIALLY_COVERED:
- partially_covered_sum += line.fractional_line_coverage
-
- total_covered = covered_status_totals[COVERED] + partially_covered_sum
- total_lines = sum(covered_status_totals.values())
- return total_covered, total_lines
-
- def _GetSourceFileToEmmaFileDict(self, files):
- """Gets a dict used to correlate Java source files with EMMA HTML files.
-
- This method gathers the information needed to correlate EMMA HTML
- files with Java source files. EMMA XML and plain text reports do not provide
- line by line coverage data, so HTML reports must be used instead.
- Unfortunately, the HTML files that are created are given garbage names
- (i.e 1.html) so we need to manually correlate EMMA HTML files
- with the original Java source files.
-
- Args:
- files: A list of file names for which coverage information is desired.
-
- Returns:
- A dict mapping Java source file paths to EMMA HTML file paths.
- """
- # Maps Java source file paths to package names.
- # Example: /usr/code/file.java -> org.chromium.file.java.
- source_to_package = {}
- for file_path in files:
- package = self.GetPackageNameFromFile(file_path)
- if package:
- source_to_package[file_path] = package
- else:
- logging.warning("Skipping %s because it doesn\'t have a package "
- "statement.", file_path)
-
- # Maps package names to EMMA report HTML files.
- # Example: org.chromium.file.java -> out/coverage/1a.html.
- package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict()
- # Finally, we have a dict mapping Java file paths to EMMA report files.
- # Example: /usr/code/file.java -> out/coverage/1a.html.
- source_to_emma = {
- source: package_to_emma[package]
- for source, package in source_to_package.items()
- if package in package_to_emma
- }
- return source_to_emma
-
- @staticmethod
- def NeedsCoverage(file_path):
- """Checks to see if the file needs to be analyzed for code coverage.
-
- Args:
- file_path: A string representing path to the file.
-
- Returns:
- True for Java files that exist, False for all others.
- """
- if os.path.splitext(file_path)[1] == '.java' and os.path.exists(file_path):
- return True
- logging.info('Skipping file %s, cannot compute code coverage.', file_path)
- return False
-
- @staticmethod
- def GetPackageNameFromFile(file_path):
- """Gets the full package name including the file name for a given file path.
-
- Args:
- file_path: String representing the path to the Java source file.
-
- Returns:
- A string representing the full package name with file name appended or
- None if there is no package statement in the file.
- """
- with open(file_path) as f:
- file_content = f.read()
- package_match = re.search(_EmmaCoverageStats.RE_PACKAGE, file_content)
- if package_match:
- package = package_match.group(_EmmaCoverageStats.RE_PACKAGE_MATCH_GROUP)
- file_name = os.path.basename(file_path)
- return '%s.%s' % (package, file_name)
- return None
-
-
-def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir):
- """Generates a coverage report for a given set of lines.
-
- Writes the results of the coverage analysis to the file specified by
- |out_file_path|.
-
- Args:
- line_coverage_file: The path to a file which contains a dict mapping file
- names to lists of line numbers. Example: {file1: [1, 2, 3], ...} means
- that we should compute coverage information on lines 1 - 3 for file1.
- out_file_path: A string representing the location to write the JSON report.
- coverage_dir: A string representing the file path where the EMMA
- HTML coverage files are located (i.e. folder where index.html is located).
- """
- with open(line_coverage_file) as f:
- potential_files_for_coverage = json.load(f)
-
- files_for_coverage = {
- f: lines
- for f, lines in potential_files_for_coverage.items()
- if _EmmaCoverageStats.NeedsCoverage(f)
- }
-
- coverage_results = {}
- if files_for_coverage:
- code_coverage = _EmmaCoverageStats(coverage_dir,
- list(files_for_coverage.keys()))
- coverage_results = code_coverage.GetCoverageDict(files_for_coverage)
- else:
- logging.info('No Java files requiring coverage were included in %s.',
- line_coverage_file)
-
- with open(out_file_path, 'w+') as out_status_file:
- json.dump(coverage_results, out_status_file)
-
-
-def main():
- argparser = argparse.ArgumentParser()
- argparser.add_argument('--out', required=True, type=str,
- help='Report output file path.')
- argparser.add_argument('--emma-dir', required=True, type=str,
- help='EMMA HTML report directory.')
- argparser.add_argument('--lines-for-coverage-file', required=True, type=str,
- help='File containing a JSON object. Should contain a '
- 'dict mapping file names to lists of line numbers of '
- 'code for which coverage information is desired.')
- argparser.add_argument('-v', '--verbose', action='count',
- help='Print verbose log information.')
- args = argparser.parse_args()
- run_tests_helper.SetLogLevel(args.verbose)
- devil_chromium.Initialize()
- GenerateCoverageReport(args.lines_for_coverage_file, args.out, args.emma_dir)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/android/emma_coverage_stats_test.py b/build/android/emma_coverage_stats_test.py
deleted file mode 100755
index c922050e4..000000000
--- a/build/android/emma_coverage_stats_test.py
+++ /dev/null
@@ -1,593 +0,0 @@
-#!/usr/bin/env vpython3
-# Copyright 2015 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# pylint: disable=protected-access
-
-import sys
-import unittest
-from xml.etree import ElementTree
-
-import emma_coverage_stats
-
-import mock # pylint: disable=import-error
-
-EMPTY_COVERAGE_STATS_DICT = {
- 'files': {},
- 'patch': {
- 'incremental': {
- 'covered': 0, 'total': 0
- }
- }
-}
-
-
-class _EmmaHtmlParserTest(unittest.TestCase):
- """Tests for _EmmaHtmlParser.
-
- Uses modified EMMA report HTML that contains only the subset of tags needed
- for test verification.
- """
-
- def setUp(self):
- self.emma_dir = 'fake/dir/'
- self.parser = emma_coverage_stats._EmmaHtmlParser(self.emma_dir)
- self.simple_html = '<TR><TD CLASS="p">Test HTML</TD></TR>'
- self.index_html = (
- '<HTML>'
- '<BODY>'
- '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '<TABLE CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '<TABLE CLASS="it" CELLSPACING="0">'
- '</TABLE>'
- '<TABLE CELLSPACING="0" WIDTH="100%">'
- '<TR>'
- '<TH CLASS="f">name</TH>'
- '<TH>class, %</TH>'
- '<TH>method, %</TH>'
- '<TH>block, %</TH>'
- '<TH>line, %</TH>'
- '</TR>'
- '<TR CLASS="o">'
- '<TD><A HREF="_files/0.html"'
- '>org.chromium.chrome.browser</A></TD>'
- '<TD CLASS="h">0% (0/3)</TD>'
- '</TR>'
- '<TR>'
- '<TD><A HREF="_files/1.html"'
- '>org.chromium.chrome.browser.tabmodel</A></TD>'
- '<TD CLASS="h">0% (0/8)</TD>'
- '</TR>'
- '</TABLE>'
- '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '</BODY>'
- '</HTML>'
- )
- self.package_1_class_list_html = (
- '<HTML>'
- '<BODY>'
- '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '<TABLE CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '<TABLE CELLSPACING="0" WIDTH="100%">'
- '<TR>'
- '<TH CLASS="f">name</TH>'
- '<TH>class, %</TH>'
- '<TH>method, %</TH>'
- '<TH>block, %</TH>'
- '<TH>line, %</TH>'
- '</TR>'
- '<TR CLASS="o">'
- '<TD><A HREF="1e.html">IntentHelper.java</A></TD>'
- '<TD CLASS="h">0% (0/3)</TD>'
- '<TD CLASS="h">0% (0/9)</TD>'
- '<TD CLASS="h">0% (0/97)</TD>'
- '<TD CLASS="h">0% (0/26)</TD>'
- '</TR>'
- '</TABLE>'
- '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '</BODY>'
- '</HTML>'
- )
- self.package_2_class_list_html = (
- '<HTML>'
- '<BODY>'
- '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '<TABLE CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '<TABLE CELLSPACING="0" WIDTH="100%">'
- '<TR>'
- '<TH CLASS="f">name</TH>'
- '<TH>class, %</TH>'
- '<TH>method, %</TH>'
- '<TH>block, %</TH>'
- '<TH>line, %</TH>'
- '</TR>'
- '<TR CLASS="o">'
- '<TD><A HREF="1f.html">ContentSetting.java</A></TD>'
- '<TD CLASS="h">0% (0/1)</TD>'
- '</TR>'
- '<TR>'
- '<TD><A HREF="20.html">DevToolsServer.java</A></TD>'
- '</TR>'
- '<TR CLASS="o">'
- '<TD><A HREF="21.html">FileProviderHelper.java</A></TD>'
- '</TR>'
- '<TR>'
- '<TD><A HREF="22.html">ContextualMenuBar.java</A></TD>'
- '</TR>'
- '<TR CLASS="o">'
- '<TD><A HREF="23.html">AccessibilityUtil.java</A></TD>'
- '</TR>'
- '<TR>'
- '<TD><A HREF="24.html">NavigationPopup.java</A></TD>'
- '</TR>'
- '</TABLE>'
- '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
- '</TABLE>'
- '</BODY>'
- '</HTML>'
- )
- self.partially_covered_tr_html = (
- '<TR CLASS="p">'
- '<TD CLASS="l" TITLE="78% line coverage (7 out of 9)">108</TD>'
- '<TD TITLE="78% line coverage (7 out of 9 instructions)">'
- 'if (index &lt; 0 || index = mSelectors.size()) index = 0;</TD>'
- '</TR>'
- )
- self.covered_tr_html = (
- '<TR CLASS="c">'
- '<TD CLASS="l">110</TD>'
- '<TD> if (mSelectors.get(index) != null) {</TD>'
- '</TR>'
- )
- self.not_executable_tr_html = (
- '<TR>'
- '<TD CLASS="l">109</TD>'
- '<TD> </TD>'
- '</TR>'
- )
- self.tr_with_extra_a_tag = (
- '<TR CLASS="z">'
- '<TD CLASS="l">'
- '<A name="1f">54</A>'
- '</TD>'
- '<TD> }</TD>'
- '</TR>'
- )
-
- def testInit(self):
- emma_dir = self.emma_dir
- parser = emma_coverage_stats._EmmaHtmlParser(emma_dir)
- self.assertEqual(parser._base_dir, emma_dir)
- self.assertEqual(parser._emma_files_path, 'fake/dir/_files')
- self.assertEqual(parser._index_path, 'fake/dir/index.html')
-
- def testFindElements_basic(self):
- read_values = [self.simple_html]
- found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
- file_path='fake', xpath_selector='.//TD')
- self.assertIs(type(found), list)
- self.assertIs(type(found[0]), ElementTree.Element)
- self.assertEqual(found[0].text, 'Test HTML')
-
- def testFindElements_multipleElements(self):
- multiple_trs = self.not_executable_tr_html + self.covered_tr_html
- read_values = ['<div>' + multiple_trs + '</div>']
- found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
- file_path='fake', xpath_selector='.//TR')
- self.assertEqual(2, len(found))
-
- def testFindElements_noMatch(self):
- read_values = [self.simple_html]
- found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
- file_path='fake', xpath_selector='.//TR')
- self.assertEqual(found, [])
-
- def testFindElements_badFilePath(self):
- with self.assertRaises(IOError):
- with mock.patch('os.path.exists', return_value=False):
- self.parser._FindElements('fake', xpath_selector='//tr')
-
- def testGetPackageNameToEmmaFileDict_basic(self):
- if sys.version_info.major < 3:
- expected_dict = {
- 'org.chromium.chrome.browser.AccessibilityUtil.java':
- 'fake/dir/_files/23.html',
- 'org.chromium.chrome.browser.ContextualMenuBar.java':
- 'fake/dir/_files/22.html',
- 'org.chromium.chrome.browser.tabmodel.IntentHelper.java':
- 'fake/dir/_files/1e.html',
- 'org.chromium.chrome.browser.ContentSetting.java':
- 'fake/dir/_files/1f.html',
- 'org.chromium.chrome.browser.DevToolsServer.java':
- 'fake/dir/_files/20.html',
- 'org.chromium.chrome.browser.NavigationPopup.java':
- 'fake/dir/_files/24.html',
- 'org.chromium.chrome.browser.FileProviderHelper.java':
- 'fake/dir/_files/21.html'
- }
- else:
- expected_dict = {
- 'org.chromium.chrome.browser.IntentHelper.java':
- 'fake/dir/_files/1e.html',
- 'org.chromium.chrome.browser.tabmodel.AccessibilityUtil.java':
- 'fake/dir/_files/23.html',
- 'org.chromium.chrome.browser.tabmodel.ContextualMenuBar.java':
- 'fake/dir/_files/22.html',
- 'org.chromium.chrome.browser.tabmodel.ContentSetting.java':
- 'fake/dir/_files/1f.html',
- 'org.chromium.chrome.browser.tabmodel.DevToolsServer.java':
- 'fake/dir/_files/20.html',
- 'org.chromium.chrome.browser.tabmodel.NavigationPopup.java':
- 'fake/dir/_files/24.html',
- 'org.chromium.chrome.browser.tabmodel.FileProviderHelper.java':
- 'fake/dir/_files/21.html'
- }
- read_values = [self.index_html, self.package_1_class_list_html,
- self.package_2_class_list_html]
- return_dict, mock_open = MockOpenForFunction(
- self.parser.GetPackageNameToEmmaFileDict, read_values)
-
- self.assertDictEqual(return_dict, expected_dict)
- self.assertEqual(mock_open.call_count, 3)
- if sys.version_info.major < 3:
- calls = [
- mock.call('fake/dir/index.html'),
- mock.call('fake/dir/_files/1.html'),
- mock.call('fake/dir/_files/0.html')
- ]
- else:
- calls = [
- mock.call('fake/dir/index.html'),
- mock.call('fake/dir/_files/0.html'),
- mock.call('fake/dir/_files/1.html')
- ]
- mock_open.assert_has_calls(calls)
-
- def testGetPackageNameToEmmaFileDict_noPackageElements(self):
- self.parser._FindElements = mock.Mock(return_value=[])
- return_dict = self.parser.GetPackageNameToEmmaFileDict()
- self.assertDictEqual({}, return_dict)
-
- def testGetLineCoverage_status_basic(self):
- line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
- self.assertEqual(line_coverage[0].covered_status,
- emma_coverage_stats.COVERED)
-
- def testGetLineCoverage_status_statusMissing(self):
- line_coverage = self.GetLineCoverageWithFakeElements(
- [self.not_executable_tr_html])
- self.assertEqual(line_coverage[0].covered_status,
- emma_coverage_stats.NOT_EXECUTABLE)
-
- def testGetLineCoverage_fractionalCoverage_basic(self):
- line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
- self.assertEqual(line_coverage[0].fractional_line_coverage, 1.0)
-
- def testGetLineCoverage_fractionalCoverage_partial(self):
- line_coverage = self.GetLineCoverageWithFakeElements(
- [self.partially_covered_tr_html])
- self.assertEqual(line_coverage[0].fractional_line_coverage, 0.78)
-
- def testGetLineCoverage_lineno_basic(self):
- line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
- self.assertEqual(line_coverage[0].lineno, 110)
-
- def testGetLineCoverage_lineno_withAlternativeHtml(self):
- line_coverage = self.GetLineCoverageWithFakeElements(
- [self.tr_with_extra_a_tag])
- self.assertEqual(line_coverage[0].lineno, 54)
-
- def testGetLineCoverage_source(self):
- self.parser._FindElements = mock.Mock(
- return_value=[ElementTree.fromstring(self.covered_tr_html)])
- line_coverage = self.parser.GetLineCoverage('fake_path')
- self.assertEqual(line_coverage[0].source,
- ' if (mSelectors.get(index) != null) {')
-
- def testGetLineCoverage_multipleElements(self):
- line_coverage = self.GetLineCoverageWithFakeElements(
- [self.covered_tr_html, self.partially_covered_tr_html,
- self.tr_with_extra_a_tag])
- self.assertEqual(len(line_coverage), 3)
-
- def GetLineCoverageWithFakeElements(self, html_elements):
- """Wraps GetLineCoverage so mock HTML can easily be used.
-
- Args:
- html_elements: List of strings each representing an HTML element.
-
- Returns:
- A list of LineCoverage objects.
- """
- elements = [ElementTree.fromstring(string) for string in html_elements]
- with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
- return_value=elements):
- return self.parser.GetLineCoverage('fake_path')
-
-
-class _EmmaCoverageStatsTest(unittest.TestCase):
- """Tests for _EmmaCoverageStats."""
-
- def setUp(self):
- self.good_source_to_emma = {
- '/path/to/1/File1.java': '/emma/1.html',
- '/path/2/File2.java': '/emma/2.html',
- '/path/2/File3.java': '/emma/3.html'
- }
- self.line_coverage = [
- emma_coverage_stats.LineCoverage(
- 1, '', emma_coverage_stats.COVERED, 1.0),
- emma_coverage_stats.LineCoverage(
- 2, '', emma_coverage_stats.COVERED, 1.0),
- emma_coverage_stats.LineCoverage(
- 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
- emma_coverage_stats.LineCoverage(
- 4, '', emma_coverage_stats.NOT_COVERED, 1.0),
- emma_coverage_stats.LineCoverage(
- 5, '', emma_coverage_stats.PARTIALLY_COVERED, 0.85),
- emma_coverage_stats.LineCoverage(
- 6, '', emma_coverage_stats.PARTIALLY_COVERED, 0.20)
- ]
- self.lines_for_coverage = [1, 3, 5, 6]
- with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
- return_value=[]):
- self.simple_coverage = emma_coverage_stats._EmmaCoverageStats(
- 'fake_dir', {})
-
- def testInit(self):
- coverage_stats = self.simple_coverage
- self.assertIsInstance(coverage_stats._emma_parser,
- emma_coverage_stats._EmmaHtmlParser)
- self.assertIsInstance(coverage_stats._source_to_emma, dict)
-
- def testNeedsCoverage_withExistingJavaFile(self):
- test_file = '/path/to/file/File.java'
- with mock.patch('os.path.exists', return_value=True):
- self.assertTrue(
- emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
-
- def testNeedsCoverage_withNonJavaFile(self):
- test_file = '/path/to/file/File.c'
- with mock.patch('os.path.exists', return_value=True):
- self.assertFalse(
- emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
-
- def testNeedsCoverage_fileDoesNotExist(self):
- test_file = '/path/to/file/File.java'
- with mock.patch('os.path.exists', return_value=False):
- self.assertFalse(
- emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
-
- def testGetPackageNameFromFile_basic(self):
- test_file_text = """// Test Copyright
- package org.chromium.chrome.browser;
- import android.graphics.RectF;"""
- result_package, _ = MockOpenForFunction(
- emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
- [test_file_text], file_path='/path/to/file/File.java')
- self.assertEqual(result_package, 'org.chromium.chrome.browser.File.java')
-
- def testGetPackageNameFromFile_noPackageStatement(self):
- result_package, _ = MockOpenForFunction(
- emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
- ['not a package statement'], file_path='/path/to/file/File.java')
- self.assertIsNone(result_package)
-
- def testGetSummaryStatsForLines_basic(self):
- covered, total = self.simple_coverage.GetSummaryStatsForLines(
- self.line_coverage)
- self.assertEqual(covered, 3.05)
- self.assertEqual(total, 5)
-
- def testGetSourceFileToEmmaFileDict(self):
- package_names = {
- '/path/to/1/File1.java': 'org.fake.one.File1.java',
- '/path/2/File2.java': 'org.fake.File2.java',
- '/path/2/File3.java': 'org.fake.File3.java'
- }
- package_to_emma = {
- 'org.fake.one.File1.java': '/emma/1.html',
- 'org.fake.File2.java': '/emma/2.html',
- 'org.fake.File3.java': '/emma/3.html'
- }
- with mock.patch('os.path.exists', return_value=True):
- coverage_stats = self.simple_coverage
- coverage_stats._emma_parser.GetPackageNameToEmmaFileDict = mock.MagicMock(
- return_value=package_to_emma)
- coverage_stats.GetPackageNameFromFile = lambda x: package_names[x]
- result_dict = coverage_stats._GetSourceFileToEmmaFileDict(
- list(package_names.keys()))
- self.assertDictEqual(result_dict, self.good_source_to_emma)
-
- def testGetCoverageDictForFile(self):
- line_coverage = self.line_coverage
- self.simple_coverage._emma_parser.GetLineCoverage = lambda x: line_coverage
- self.simple_coverage._source_to_emma = {'/fake/src': 'fake/emma'}
- lines = self.lines_for_coverage
- expected_dict = {
- 'absolute': {
- 'covered': 3.05,
- 'total': 5
- },
- 'incremental': {
- 'covered': 2.05,
- 'total': 3
- },
- 'source': [
- {
- 'line': line_coverage[0].source,
- 'coverage': line_coverage[0].covered_status,
- 'changed': True,
- 'fractional_coverage': line_coverage[0].fractional_line_coverage,
- },
- {
- 'line': line_coverage[1].source,
- 'coverage': line_coverage[1].covered_status,
- 'changed': False,
- 'fractional_coverage': line_coverage[1].fractional_line_coverage,
- },
- {
- 'line': line_coverage[2].source,
- 'coverage': line_coverage[2].covered_status,
- 'changed': True,
- 'fractional_coverage': line_coverage[2].fractional_line_coverage,
- },
- {
- 'line': line_coverage[3].source,
- 'coverage': line_coverage[3].covered_status,
- 'changed': False,
- 'fractional_coverage': line_coverage[3].fractional_line_coverage,
- },
- {
- 'line': line_coverage[4].source,
- 'coverage': line_coverage[4].covered_status,
- 'changed': True,
- 'fractional_coverage': line_coverage[4].fractional_line_coverage,
- },
- {
- 'line': line_coverage[5].source,
- 'coverage': line_coverage[5].covered_status,
- 'changed': True,
- 'fractional_coverage': line_coverage[5].fractional_line_coverage,
- }
- ]
- }
- result_dict = self.simple_coverage.GetCoverageDictForFile(
- '/fake/src', lines)
- self.assertDictEqual(result_dict, expected_dict)
-
- def testGetCoverageDictForFile_emptyCoverage(self):
- expected_dict = {
- 'absolute': {'covered': 0, 'total': 0},
- 'incremental': {'covered': 0, 'total': 0},
- 'source': []
- }
- self.simple_coverage._emma_parser.GetLineCoverage = lambda x: []
- self.simple_coverage._source_to_emma = {'fake_dir': 'fake/emma'}
- result_dict = self.simple_coverage.GetCoverageDictForFile('fake_dir', {})
- self.assertDictEqual(result_dict, expected_dict)
-
- def testGetCoverageDictForFile_missingCoverage(self):
- self.simple_coverage._source_to_emma = {}
- result_dict = self.simple_coverage.GetCoverageDictForFile('fake_file', {})
- self.assertIsNone(result_dict)
-
- def testGetCoverageDict_basic(self):
- files_for_coverage = {
- '/path/to/1/File1.java': [1, 3, 4],
- '/path/2/File2.java': [1, 2]
- }
- self.simple_coverage._source_to_emma = {
- '/path/to/1/File1.java': 'emma_1',
- '/path/2/File2.java': 'emma_2'
- }
- coverage_info = {
- 'emma_1': [
- emma_coverage_stats.LineCoverage(
- 1, '', emma_coverage_stats.COVERED, 1.0),
- emma_coverage_stats.LineCoverage(
- 2, '', emma_coverage_stats.PARTIALLY_COVERED, 0.5),
- emma_coverage_stats.LineCoverage(
- 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
- emma_coverage_stats.LineCoverage(
- 4, '', emma_coverage_stats.COVERED, 1.0)
- ],
- 'emma_2': [
- emma_coverage_stats.LineCoverage(
- 1, '', emma_coverage_stats.NOT_COVERED, 1.0),
- emma_coverage_stats.LineCoverage(
- 2, '', emma_coverage_stats.COVERED, 1.0)
- ]
- }
- expected_dict = {
- 'files': {
- '/path/2/File2.java': {
- 'absolute': {'covered': 1, 'total': 2},
- 'incremental': {'covered': 1, 'total': 2},
- 'source': [{'changed': True, 'coverage': 0,
- 'line': '', 'fractional_coverage': 1.0},
- {'changed': True, 'coverage': 1,
- 'line': '', 'fractional_coverage': 1.0}]
- },
- '/path/to/1/File1.java': {
- 'absolute': {'covered': 2.5, 'total': 3},
- 'incremental': {'covered': 2, 'total': 2},
- 'source': [{'changed': True, 'coverage': 1,
- 'line': '', 'fractional_coverage': 1.0},
- {'changed': False, 'coverage': 2,
- 'line': '', 'fractional_coverage': 0.5},
- {'changed': True, 'coverage': -1,
- 'line': '', 'fractional_coverage': 1.0},
- {'changed': True, 'coverage': 1,
- 'line': '', 'fractional_coverage': 1.0}]
- }
- },
- 'patch': {'incremental': {'covered': 3, 'total': 4}}
- }
- # Return the relevant coverage info for each file.
- self.simple_coverage._emma_parser.GetLineCoverage = (
- lambda x: coverage_info[x])
- result_dict = self.simple_coverage.GetCoverageDict(files_for_coverage)
- self.assertDictEqual(result_dict, expected_dict)
-
- def testGetCoverageDict_noCoverage(self):
- result_dict = self.simple_coverage.GetCoverageDict({})
- self.assertDictEqual(result_dict, EMPTY_COVERAGE_STATS_DICT)
-
-
-class EmmaCoverageStatsGenerateCoverageReport(unittest.TestCase):
- """Tests for GenerateCoverageReport."""
-
- def testGenerateCoverageReport_missingJsonFile(self):
- with self.assertRaises(IOError):
- with mock.patch('os.path.exists', return_value=False):
- emma_coverage_stats.GenerateCoverageReport('', '', '')
-
- def testGenerateCoverageReport_invalidJsonFile(self):
- with self.assertRaises(ValueError):
- with mock.patch('os.path.exists', return_value=True):
- MockOpenForFunction(emma_coverage_stats.GenerateCoverageReport, [''],
- line_coverage_file='', out_file_path='',
- coverage_dir='')
-
-
-def MockOpenForFunction(func, side_effects, **kwargs):
- """Allows easy mock open and read for callables that open multiple files.
-
- Will mock the python open function in a way such that each time read() is
- called on an open file, the next element in |side_effects| is returned. This
- makes it easier to test functions that call open() multiple times.
-
- Args:
- func: The callable to invoke once mock files are setup.
- side_effects: A list of return values for each file to return once read.
- Length of list should be equal to the number calls to open in |func|.
- **kwargs: Keyword arguments to be passed to |func|.
-
- Returns:
- A tuple containing the return value of |func| and the MagicMock object used
- to mock all calls to open respectively.
- """
- mock_open = mock.mock_open()
- mock_open.side_effect = [mock.mock_open(read_data=side_effect).return_value
- for side_effect in side_effects]
- if sys.version_info.major < 3:
- open_builtin_path = '__builtin__.open'
- else:
- open_builtin_path = 'builtins.open'
- with mock.patch(open_builtin_path, mock_open):
- return func(**kwargs), mock_open
-
-
-if __name__ == '__main__':
- # Suppress logging messages.
- unittest.main(buffer=True)
diff --git a/build/android/generate_jacoco_report.py b/build/android/generate_jacoco_report.py
index b022c82bd..44e82acbf 100755
--- a/build/android/generate_jacoco_report.py
+++ b/build/android/generate_jacoco_report.py
@@ -6,7 +6,6 @@
"""Aggregates Jacoco coverage files to produce output."""
-from __future__ import print_function
import argparse
import fnmatch
diff --git a/build/android/gradle/generate_gradle.py b/build/android/gradle/generate_gradle.py
index 91def7263..bc05baf9b 100755
--- a/build/android/gradle/generate_gradle.py
+++ b/build/android/gradle/generate_gradle.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env vpython3
+#!/usr/bin/env python3
# Copyright 2016 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -12,6 +12,7 @@ import glob
import json
import logging
import os
+import pathlib
import re
import shutil
import subprocess
@@ -32,6 +33,12 @@ from util import resource_utils
sys.path.append(os.path.dirname(_BUILD_ANDROID))
import gn_helpers
+# Typically these should track the versions that works on the slowest release
+# channel, i.e. Android Studio stable.
+_DEFAULT_ANDROID_GRADLE_PLUGIN_VERSION = '7.3.1'
+_DEFAULT_KOTLIN_GRADLE_PLUGIN_VERSION = '1.8.0'
+_DEFAULT_GRADLE_WRAPPER_VERSION = '7.4'
+
_DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party',
'depot_tools')
_DEFAULT_ANDROID_MANIFEST_PATH = os.path.join(
@@ -45,8 +52,6 @@ _GRADLE_BUILD_FILE = 'build.gradle'
_CMAKE_FILE = 'CMakeLists.txt'
# This needs to come first alphabetically among all modules.
_MODULE_ALL = '_all'
-_SRC_INTERNAL = os.path.join(
- os.path.dirname(host_paths.DIR_SOURCE_ROOT), 'src-internal')
_INSTRUMENTATION_TARGET_SUFFIX = '_test_apk__test_apk__apk'
_DEFAULT_TARGETS = [
@@ -89,11 +94,6 @@ def _RebasePath(path_or_list, new_cwd=None, old_cwd=None):
return os.path.abspath(os.path.join(old_cwd, path_or_list))
-def _IsSubpathOf(child, parent):
- """Returns whether |child| is a subpath of |parent|."""
- return not os.path.relpath(child, parent).startswith(os.pardir)
-
-
def _WriteFile(path, data):
"""Writes |data| to |path|, constucting parent directories if necessary."""
logging.info('Writing %s', path)
@@ -225,11 +225,11 @@ class _ProjectEntry:
def JavaFiles(self):
if self._java_files is None:
- java_sources_file = self.DepsInfo().get('java_sources_file')
+ target_sources_file = self.DepsInfo().get('target_sources_file')
java_files = []
- if java_sources_file:
- java_sources_file = _RebasePath(java_sources_file)
- java_files = build_utils.ReadSourcesList(java_sources_file)
+ if target_sources_file:
+ target_sources_file = _RebasePath(target_sources_file)
+ java_files = build_utils.ReadSourcesList(target_sources_file)
self._java_files = java_files
return self._java_files
@@ -257,13 +257,12 @@ class _ProjectEntry:
class _ProjectContextGenerator:
"""Helper class to generate gradle build files"""
def __init__(self, project_dir, build_vars, use_gradle_process_resources,
- jinja_processor, split_projects, channel):
+ jinja_processor, split_projects):
self.project_dir = project_dir
self.build_vars = build_vars
self.use_gradle_process_resources = use_gradle_process_resources
self.jinja_processor = jinja_processor
self.split_projects = split_projects
- self.channel = channel
self.processed_java_dirs = set()
self.processed_prebuilts = set()
self.processed_res_dirs = set()
@@ -416,37 +415,42 @@ def _ComputeExcludeFilters(wanted_files, unwanted_files, parent_dir):
return excludes
-def _ComputeJavaSourceDirsAndExcludes(output_dir, java_files):
+def _ComputeJavaSourceDirsAndExcludes(output_dir, source_files):
"""Computes the list of java source directories and exclude patterns.
- 1. Computes the root java source directories from the list of files.
+ This includes both Java and Kotlin files since both are listed in the same
+ "java" section for gradle.
+
+ 1. Computes the root source directories from the list of files.
2. Compute exclude patterns that exclude all extra files only.
- 3. Returns the list of java source directories and exclude patterns.
+ 3. Returns the list of source directories and exclude patterns.
"""
java_dirs = []
excludes = []
- if java_files:
- java_files = _RebasePath(java_files)
- computed_dirs = _ComputeJavaSourceDirs(java_files)
+ if source_files:
+ source_files = _RebasePath(source_files)
+ computed_dirs = _ComputeJavaSourceDirs(source_files)
java_dirs = list(computed_dirs.keys())
- all_found_java_files = set()
+ all_found_source_files = set()
for directory, files in computed_dirs.items():
- found_java_files = build_utils.FindInDirectory(directory, '*.java')
- all_found_java_files.update(found_java_files)
- unwanted_java_files = set(found_java_files) - set(files)
- if unwanted_java_files:
+ found_source_files = (build_utils.FindInDirectory(directory, '*.java') +
+ build_utils.FindInDirectory(directory, '*.kt'))
+ all_found_source_files.update(found_source_files)
+ unwanted_source_files = set(found_source_files) - set(files)
+ if unwanted_source_files:
logging.debug('Directory requires excludes: %s', directory)
excludes.extend(
- _ComputeExcludeFilters(files, unwanted_java_files, directory))
+ _ComputeExcludeFilters(files, unwanted_source_files, directory))
- missing_java_files = set(java_files) - all_found_java_files
+ missing_source_files = set(source_files) - all_found_source_files
# Warn only about non-generated files that are missing.
- missing_java_files = [p for p in missing_java_files
- if not p.startswith(output_dir)]
- if missing_java_files:
- logging.warning(
- 'Some java files were not found: %s', missing_java_files)
+ missing_source_files = [
+ p for p in missing_source_files if not p.startswith(output_dir)
+ ]
+ if missing_source_files:
+ logging.warning('Some source files were not found: %s',
+ missing_source_files)
return java_dirs, excludes
@@ -479,6 +483,19 @@ def _CreateJniLibsDir(output_dir, entry_output_dir, so_files):
return []
+def _ParseVersionFromFile(file_path, version_regex_string, default_version):
+ if os.path.exists(file_path):
+ content = pathlib.Path(file_path).read_text()
+ match = re.search(version_regex_string, content)
+ if match:
+ version = match.group(1)
+ logging.info('Using existing version %s in %s.', version, file_path)
+ return version
+ logging.warning('Unable to find %s in %s:\n%s', version_regex_string,
+ file_path, content)
+ return default_version
+
+
def _GenerateLocalProperties(sdk_dir):
"""Returns the data for local.properties as a string."""
return '\n'.join([
@@ -488,12 +505,17 @@ def _GenerateLocalProperties(sdk_dir):
])
-def _GenerateGradleWrapperProperties():
+def _GenerateGradleWrapperProperties(file_path):
"""Returns the data for gradle-wrapper.properties as a string."""
+
+ version = _ParseVersionFromFile(file_path,
+ r'/distributions/gradle-([\d.]+)-all.zip',
+ _DEFAULT_GRADLE_WRAPPER_VERSION)
+
return '\n'.join([
'# Generated by //build/android/gradle/generate_gradle.py',
- ('distributionUrl=https\\://services.gradle.org/distributions/'
- 'gradle-7.3.3-all.zip\n'),
+ ('distributionUrl=https\\://services.gradle.org'
+ f'/distributions/gradle-{version}-all.zip'),
'',
])
@@ -521,7 +543,6 @@ def _GenerateBaseVars(generator, build_vars):
variables['min_sdk_version'] = build_vars['default_min_sdk_version']
variables['use_gradle_process_resources'] = (
generator.use_gradle_process_resources)
- variables['channel'] = generator.channel
return variables
@@ -633,12 +654,12 @@ def _GenerateModuleAll(gradle_output_dir, generator, build_vars,
'android_manifest': Relativize(_DEFAULT_ANDROID_MANIFEST_PATH),
'java_dirs': Relativize(main_java_dirs),
'prebuilts': Relativize(prebuilts),
- 'java_excludes': ['**/*.java'],
+ 'java_excludes': ['**/*.java', '**/*.kt'],
'res_dirs': Relativize(res_dirs),
}
variables['android_test'] = [{
'java_dirs': Relativize(junit_test_java_dirs),
- 'java_excludes': ['**/*.java'],
+ 'java_excludes': ['**/*.java', '**/*.kt'],
}]
if native_targets:
variables['native'] = _GetNative(
@@ -653,9 +674,20 @@ def _GenerateModuleAll(gradle_output_dir, generator, build_vars,
os.path.join(gradle_output_dir, _MODULE_ALL, _CMAKE_FILE), cmake_data)
-def _GenerateRootGradle(jinja_processor, channel):
+def _GenerateRootGradle(jinja_processor, file_path):
"""Returns the data for the root project's build.gradle."""
- return jinja_processor.Render(_TemplatePath('root'), {'channel': channel})
+ android_gradle_plugin_version = _ParseVersionFromFile(
+ file_path, r'com.android.tools.build:gradle:([\d.]+)',
+ _DEFAULT_ANDROID_GRADLE_PLUGIN_VERSION)
+ kotlin_gradle_plugin_version = _ParseVersionFromFile(
+ file_path, r'org.jetbrains.kotlin:kotlin-gradle-plugin:([\d.]+)',
+ _DEFAULT_KOTLIN_GRADLE_PLUGIN_VERSION)
+
+ return jinja_processor.Render(
+ _TemplatePath('root'), {
+ 'android_gradle_plugin_version': android_gradle_plugin_version,
+ 'kotlin_gradle_plugin_version': kotlin_gradle_plugin_version,
+ })
def _GenerateSettingsGradle(project_entries):
@@ -764,15 +796,6 @@ def main():
default=os.path.expanduser('~/Android/Sdk'),
help='The path to use as the SDK root, overrides the '
'default at ~/Android/Sdk.')
- version_group = parser.add_mutually_exclusive_group()
- version_group.add_argument('--beta',
- action='store_true',
- help='Generate a project that is compatible with '
- 'Android Studio Beta.')
- version_group.add_argument('--canary',
- action='store_true',
- help='Generate a project that is compatible with '
- 'Android Studio Canary.')
args = parser.parse_args()
if args.output_directory:
constants.SetOutputDirectory(args.output_directory)
@@ -822,15 +845,9 @@ def main():
build_vars = gn_helpers.ReadBuildVars(output_dir)
jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR)
- if args.beta:
- channel = 'beta'
- elif args.canary:
- channel = 'canary'
- else:
- channel = 'stable'
generator = _ProjectContextGenerator(_gradle_output_dir, build_vars,
- args.use_gradle_process_resources, jinja_processor, args.split_projects,
- channel)
+ args.use_gradle_process_resources,
+ jinja_processor, args.split_projects)
main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets]
@@ -870,8 +887,9 @@ def main():
_GenerateModuleAll(_gradle_output_dir, generator, build_vars,
jinja_processor, args.native_targets)
- _WriteFile(os.path.join(generator.project_dir, _GRADLE_BUILD_FILE),
- _GenerateRootGradle(jinja_processor, channel))
+ root_gradle_path = os.path.join(generator.project_dir, _GRADLE_BUILD_FILE)
+ _WriteFile(root_gradle_path,
+ _GenerateRootGradle(jinja_processor, root_gradle_path))
_WriteFile(os.path.join(generator.project_dir, 'settings.gradle'),
_GenerateSettingsGradle(project_entries))
@@ -889,9 +907,8 @@ def main():
wrapper_properties = os.path.join(generator.project_dir, 'gradle', 'wrapper',
'gradle-wrapper.properties')
- if os.path.exists(wrapper_properties):
- os.unlink(wrapper_properties)
- _WriteFile(wrapper_properties, _GenerateGradleWrapperProperties())
+ _WriteFile(wrapper_properties,
+ _GenerateGradleWrapperProperties(wrapper_properties))
generated_inputs = set()
for entry in entries:
@@ -901,13 +918,19 @@ def main():
# Build all paths references by .gradle that exist within output_dir.
generated_inputs.update(generator.GeneratedInputs(entry_to_gen))
if generated_inputs:
- targets = _RebasePath(generated_inputs, output_dir)
+ # Skip targets outside the output_dir since those are not generated.
+ targets = [
+ p for p in _RebasePath(generated_inputs, output_dir)
+ if not p.startswith(os.pardir)
+ ]
_RunNinja(output_dir, targets)
- logging.warning('Generated files will only appear once you\'ve built them.')
- logging.warning('Generated projects for Android Studio %s', channel)
- logging.warning('For more tips: https://chromium.googlesource.com/chromium'
- '/src.git/+/main/docs/android_studio.md')
+ print('Generated projects for Android Studio.')
+ print('** Building using Android Studio / Gradle does not work.')
+ print('** This project is only for IDE editing & tools.')
+ print('Note: Generated files will appear only if they have been built')
+ print('For more tips: https://chromium.googlesource.com/chromium/src.git/'
+ '+/main/docs/android_studio.md')
if __name__ == '__main__':
diff --git a/build/android/gradle/gn_to_cmake.py b/build/android/gradle/gn_to_cmake.py
deleted file mode 100755
index bdbd1c009..000000000
--- a/build/android/gradle/gn_to_cmake.py
+++ /dev/null
@@ -1,690 +0,0 @@
-#!/usr/bin/env python3
-# Copyright 2016 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Usage: gn_to_cmake.py <json_file_name>
-
-gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
-
-or
-
-gn gen out/config --ide=json
-python gn/gn_to_cmake.py out/config/project.json
-
-The first is recommended, as it will auto-update.
-"""
-
-from __future__ import print_function
-
-import functools
-import json
-import posixpath
-import string
-import sys
-
-
-def CMakeStringEscape(a):
- """Escapes the string 'a' for use inside a CMake string.
-
- This means escaping
- '\' otherwise it may be seen as modifying the next character
- '"' otherwise it will end the string
- ';' otherwise the string becomes a list
-
- The following do not need to be escaped
- '#' when the lexer is in string state, this does not start a comment
- """
- return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
-
-
-def CMakeTargetEscape(a):
- """Escapes the string 'a' for use as a CMake target name.
-
- CMP0037 in CMake 3.0 restricts target names to "^[A-Za-z0-9_.:+-]+$"
- The ':' is only allowed for imported targets.
- """
- def Escape(c):
- if c in string.ascii_letters or c in string.digits or c in '_.+-':
- return c
- return '__'
-
- return ''.join([Escape(c) for c in a])
-
-
-def SetVariable(out, variable_name, value):
- """Sets a CMake variable."""
- out.write('set("')
- out.write(CMakeStringEscape(variable_name))
- out.write('" "')
- out.write(CMakeStringEscape(value))
- out.write('")\n')
-
-
-def SetVariableList(out, variable_name, values):
- """Sets a CMake variable to a list."""
- if not values:
- SetVariable(out, variable_name, "")
- return
- if len(values) == 1:
- SetVariable(out, variable_name, values[0])
- return
- out.write('list(APPEND "')
- out.write(CMakeStringEscape(variable_name))
- out.write('"\n "')
- out.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
- out.write('")\n')
-
-
-def SetFilesProperty(output, variable, property_name, values, sep):
- """Given a set of source files, sets the given property on them."""
- output.write('set_source_files_properties(')
- WriteVariable(output, variable)
- output.write(' PROPERTIES ')
- output.write(property_name)
- output.write(' "')
- for value in values:
- output.write(CMakeStringEscape(value))
- output.write(sep)
- output.write('")\n')
-
-
-def SetCurrentTargetProperty(out, property_name, values, sep=''):
- """Given a target, sets the given property."""
- out.write('set_target_properties("${target}" PROPERTIES ')
- out.write(property_name)
- out.write(' "')
- for value in values:
- out.write(CMakeStringEscape(value))
- out.write(sep)
- out.write('")\n')
-
-
-def WriteVariable(output, variable_name, prepend=None):
- if prepend:
- output.write(prepend)
- output.write('${')
- output.write(variable_name)
- output.write('}')
-
-
-# See GetSourceFileType in gn
-source_file_types = {
- '.cc': 'cxx',
- '.cpp': 'cxx',
- '.cxx': 'cxx',
- '.c': 'c',
- '.s': 'asm',
- '.S': 'asm',
- '.asm': 'asm',
- '.o': 'obj',
- '.obj': 'obj',
-}
-
-
-class CMakeTargetType:
- def __init__(self, command, modifier, property_modifier, is_linkable):
- self.command = command
- self.modifier = modifier
- self.property_modifier = property_modifier
- self.is_linkable = is_linkable
-CMakeTargetType.custom = CMakeTargetType('add_custom_target', 'SOURCES',
- None, False)
-
-# See GetStringForOutputType in gn
-cmake_target_types = {
- 'unknown': CMakeTargetType.custom,
- 'group': CMakeTargetType.custom,
- 'executable': CMakeTargetType('add_executable', None, 'RUNTIME', True),
- 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY', True),
- 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY', True),
- 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE', False),
- 'source_set': CMakeTargetType('add_library', 'OBJECT', None, False),
- 'copy': CMakeTargetType.custom,
- 'action': CMakeTargetType.custom,
- 'action_foreach': CMakeTargetType.custom,
- 'bundle_data': CMakeTargetType.custom,
- 'create_bundle': CMakeTargetType.custom,
-}
-
-
-def FindFirstOf(s, a):
- return min(s.find(i) for i in a if i in s)
-
-
-def GetCMakeTargetName(gn_target_name):
- # See <chromium>/src/tools/gn/label.cc#Resolve
- # //base/test:test_support(//build/toolchain/win:msvc)
- path_separator = FindFirstOf(gn_target_name, (':', '('))
- location = None
- name = None
- toolchain = None
- if not path_separator:
- location = gn_target_name[2:]
- else:
- location = gn_target_name[2:path_separator]
- toolchain_separator = gn_target_name.find('(', path_separator)
- if toolchain_separator == -1:
- name = gn_target_name[path_separator + 1:]
- else:
- if toolchain_separator > path_separator:
- name = gn_target_name[path_separator + 1:toolchain_separator]
- assert gn_target_name.endswith(')')
- toolchain = gn_target_name[toolchain_separator + 1:-1]
- assert location or name
-
- cmake_target_name = None
- if location.endswith('/' + name):
- cmake_target_name = location
- elif location:
- cmake_target_name = location + '_' + name
- else:
- cmake_target_name = name
- if toolchain:
- cmake_target_name += '--' + toolchain
- return CMakeTargetEscape(cmake_target_name)
-
-
-class Project:
- def __init__(self, project_json):
- self.targets = project_json['targets']
- build_settings = project_json['build_settings']
- self.root_path = build_settings['root_path']
- self.build_path = posixpath.join(self.root_path,
- build_settings['build_dir'][2:])
- self.object_source_deps = {}
-
- def GetAbsolutePath(self, path):
- if path.startswith("//"):
- return self.root_path + "/" + path[2:]
- return path
-
- def GetObjectSourceDependencies(self, gn_target_name, object_dependencies):
- """All OBJECT libraries whose sources have not been absorbed."""
- if gn_target_name in self.object_source_deps:
- object_dependencies.update(self.object_source_deps[gn_target_name])
- return
- target_deps = set()
- dependencies = self.targets[gn_target_name].get('deps', [])
- for dependency in dependencies:
- dependency_type = self.targets[dependency].get('type', None)
- if dependency_type == 'source_set':
- target_deps.add(dependency)
- if dependency_type not in gn_target_types_that_absorb_objects:
- self.GetObjectSourceDependencies(dependency, target_deps)
- self.object_source_deps[gn_target_name] = target_deps
- object_dependencies.update(target_deps)
-
- def GetObjectLibraryDependencies(self, gn_target_name, object_dependencies):
- """All OBJECT libraries whose libraries have not been absorbed."""
- dependencies = self.targets[gn_target_name].get('deps', [])
- for dependency in dependencies:
- dependency_type = self.targets[dependency].get('type', None)
- if dependency_type == 'source_set':
- object_dependencies.add(dependency)
- self.GetObjectLibraryDependencies(dependency, object_dependencies)
-
-
-class Target:
- def __init__(self, gn_target_name, project):
- self.gn_name = gn_target_name
- self.properties = project.targets[self.gn_name]
- self.cmake_name = GetCMakeTargetName(self.gn_name)
- self.gn_type = self.properties.get('type', None)
- self.cmake_type = cmake_target_types.get(self.gn_type, None)
-
-
-def WriteAction(out, target, project, sources, synthetic_dependencies):
- outputs = []
- output_directories = set()
- for output in target.properties.get('outputs', []):
- output_abs_path = project.GetAbsolutePath(output)
- outputs.append(output_abs_path)
- output_directory = posixpath.dirname(output_abs_path)
- if output_directory:
- output_directories.add(output_directory)
- outputs_name = '${target}__output'
- SetVariableList(out, outputs_name, outputs)
-
- out.write('add_custom_command(OUTPUT ')
- WriteVariable(out, outputs_name)
- out.write('\n')
-
- if output_directories:
- out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "')
- out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
- out.write('"\n')
-
- script = target.properties['script']
- arguments = target.properties['args']
- out.write(' COMMAND python "')
- out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
- out.write('"')
- if arguments:
- out.write('\n "')
- out.write('"\n "'.join([CMakeStringEscape(a) for a in arguments]))
- out.write('"')
- out.write('\n')
-
- out.write(' DEPENDS ')
- for sources_type_name in sources.values():
- WriteVariable(out, sources_type_name, ' ')
- out.write('\n')
-
- #TODO: CMake 3.7 is introducing DEPFILE
-
- out.write(' WORKING_DIRECTORY "')
- out.write(CMakeStringEscape(project.build_path))
- out.write('"\n')
-
- out.write(' COMMENT "Action: ${target}"\n')
-
- out.write(' VERBATIM)\n')
-
- synthetic_dependencies.add(outputs_name)
-
-
-def ExpandPlaceholders(source, a):
- source_dir, source_file_part = posixpath.split(source)
- source_name_part, _ = posixpath.splitext(source_file_part)
- #TODO: {{source_gen_dir}}, {{source_out_dir}}, {{response_file_name}}
- return a.replace('{{source}}', source) \
- .replace('{{source_file_part}}', source_file_part) \
- .replace('{{source_name_part}}', source_name_part) \
- .replace('{{source_dir}}', source_dir) \
- .replace('{{source_root_relative_dir}}', source_dir)
-
-
-def WriteActionForEach(out, target, project, sources, synthetic_dependencies):
- all_outputs = target.properties.get('outputs', [])
- inputs = target.properties.get('sources', [])
- # TODO: consider expanding 'output_patterns' instead.
- outputs_per_input = len(all_outputs) / len(inputs)
- for count, source in enumerate(inputs):
- source_abs_path = project.GetAbsolutePath(source)
-
- outputs = []
- output_directories = set()
- for output in all_outputs[outputs_per_input * count:
- outputs_per_input * (count+1)]:
- output_abs_path = project.GetAbsolutePath(output)
- outputs.append(output_abs_path)
- output_directory = posixpath.dirname(output_abs_path)
- if output_directory:
- output_directories.add(output_directory)
- outputs_name = '${target}__output_' + str(count)
- SetVariableList(out, outputs_name, outputs)
-
- out.write('add_custom_command(OUTPUT ')
- WriteVariable(out, outputs_name)
- out.write('\n')
-
- if output_directories:
- out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "')
- out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
- out.write('"\n')
-
- script = target.properties['script']
- # TODO: need to expand {{xxx}} in arguments
- arguments = target.properties['args']
- out.write(' COMMAND python "')
- out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
- out.write('"')
- if arguments:
- out.write('\n "')
- expand = functools.partial(ExpandPlaceholders, source_abs_path)
- out.write('"\n "'.join(
- [CMakeStringEscape(expand(a)) for a in arguments]))
- out.write('"')
- out.write('\n')
-
- out.write(' DEPENDS')
- if 'input' in sources:
- WriteVariable(out, sources['input'], ' ')
- out.write(' "')
- out.write(CMakeStringEscape(source_abs_path))
- out.write('"\n')
-
- #TODO: CMake 3.7 is introducing DEPFILE
-
- out.write(' WORKING_DIRECTORY "')
- out.write(CMakeStringEscape(project.build_path))
- out.write('"\n')
-
- out.write(' COMMENT "Action ${target} on ')
- out.write(CMakeStringEscape(source_abs_path))
- out.write('"\n')
-
- out.write(' VERBATIM)\n')
-
- synthetic_dependencies.add(outputs_name)
-
-
-def WriteCopy(out, target, project, sources, synthetic_dependencies):
- inputs = target.properties.get('sources', [])
- raw_outputs = target.properties.get('outputs', [])
-
- # TODO: consider expanding 'output_patterns' instead.
- outputs = []
- for output in raw_outputs:
- output_abs_path = project.GetAbsolutePath(output)
- outputs.append(output_abs_path)
- outputs_name = '${target}__output'
- SetVariableList(out, outputs_name, outputs)
-
- out.write('add_custom_command(OUTPUT ')
- WriteVariable(out, outputs_name)
- out.write('\n')
-
- for src, dst in zip(inputs, outputs):
- out.write(' COMMAND ${CMAKE_COMMAND} -E copy "')
- out.write(CMakeStringEscape(project.GetAbsolutePath(src)))
- out.write('" "')
- out.write(CMakeStringEscape(dst))
- out.write('"\n')
-
- out.write(' DEPENDS ')
- for sources_type_name in sources.values():
- WriteVariable(out, sources_type_name, ' ')
- out.write('\n')
-
- out.write(' WORKING_DIRECTORY "')
- out.write(CMakeStringEscape(project.build_path))
- out.write('"\n')
-
- out.write(' COMMENT "Copy ${target}"\n')
-
- out.write(' VERBATIM)\n')
-
- synthetic_dependencies.add(outputs_name)
-
-
-def WriteCompilerFlags(out, target, project, sources):
- # Hack, set linker language to c if no c or cxx files present.
- if not 'c' in sources and not 'cxx' in sources:
- SetCurrentTargetProperty(out, 'LINKER_LANGUAGE', ['C'])
-
- # Mark uncompiled sources as uncompiled.
- if 'input' in sources:
- SetFilesProperty(out, sources['input'], 'HEADER_FILE_ONLY', ('True',), '')
- if 'other' in sources:
- SetFilesProperty(out, sources['other'], 'HEADER_FILE_ONLY', ('True',), '')
-
- # Mark object sources as linkable.
- if 'obj' in sources:
- SetFilesProperty(out, sources['obj'], 'EXTERNAL_OBJECT', ('True',), '')
-
- # TODO: 'output_name', 'output_dir', 'output_extension'
- # This includes using 'source_outputs' to direct compiler output.
-
- # Includes
- includes = target.properties.get('include_dirs', [])
- if includes:
- out.write('set_property(TARGET "${target}" ')
- out.write('APPEND PROPERTY INCLUDE_DIRECTORIES')
- for include_dir in includes:
- out.write('\n "')
- out.write(project.GetAbsolutePath(include_dir))
- out.write('"')
- out.write(')\n')
-
- # Defines
- defines = target.properties.get('defines', [])
- if defines:
- SetCurrentTargetProperty(out, 'COMPILE_DEFINITIONS', defines, ';')
-
- # Compile flags
- # "arflags", "asmflags", "cflags",
- # "cflags_c", "clfags_cc", "cflags_objc", "clfags_objcc"
- # CMake does not have per target lang compile flags.
- # TODO: $<$<COMPILE_LANGUAGE:CXX>:cflags_cc style generator expression.
- # http://public.kitware.com/Bug/view.php?id=14857
- flags = []
- flags.extend(target.properties.get('cflags', []))
- cflags_asm = target.properties.get('asmflags', [])
- cflags_c = target.properties.get('cflags_c', [])
- cflags_cxx = target.properties.get('cflags_cc', [])
- if 'c' in sources and not any(k in sources for k in ('asm', 'cxx')):
- flags.extend(cflags_c)
- elif 'cxx' in sources and not any(k in sources for k in ('asm', 'c')):
- flags.extend(cflags_cxx)
- else:
- # TODO: This is broken, one cannot generally set properties on files,
- # as other targets may require different properties on the same files.
- if 'asm' in sources and cflags_asm:
- SetFilesProperty(out, sources['asm'], 'COMPILE_FLAGS', cflags_asm, ' ')
- if 'c' in sources and cflags_c:
- SetFilesProperty(out, sources['c'], 'COMPILE_FLAGS', cflags_c, ' ')
- if 'cxx' in sources and cflags_cxx:
- SetFilesProperty(out, sources['cxx'], 'COMPILE_FLAGS', cflags_cxx, ' ')
- if flags:
- SetCurrentTargetProperty(out, 'COMPILE_FLAGS', flags, ' ')
-
- # Linker flags
- ldflags = target.properties.get('ldflags', [])
- if ldflags:
- SetCurrentTargetProperty(out, 'LINK_FLAGS', ldflags, ' ')
-
-
-gn_target_types_that_absorb_objects = (
- 'executable',
- 'loadable_module',
- 'shared_library',
- 'static_library'
-)
-
-
-def WriteSourceVariables(out, target, project):
- # gn separates the sheep from the goats based on file extensions.
- # A full separation is done here because of flag handing (see Compile flags).
- source_types = {'cxx':[], 'c':[], 'asm':[],
- 'obj':[], 'obj_target':[], 'input':[], 'other':[]}
-
- # TODO .def files on Windows
- for source in target.properties.get('sources', []):
- _, ext = posixpath.splitext(source)
- source_abs_path = project.GetAbsolutePath(source)
- source_types[source_file_types.get(ext, 'other')].append(source_abs_path)
-
- for input_path in target.properties.get('inputs', []):
- input_abs_path = project.GetAbsolutePath(input_path)
- source_types['input'].append(input_abs_path)
-
- # OBJECT library dependencies need to be listed as sources.
- # Only executables and non-OBJECT libraries may reference an OBJECT library.
- # https://gitlab.kitware.com/cmake/cmake/issues/14778
- if target.gn_type in gn_target_types_that_absorb_objects:
- object_dependencies = set()
- project.GetObjectSourceDependencies(target.gn_name, object_dependencies)
- for dependency in object_dependencies:
- cmake_dependency_name = GetCMakeTargetName(dependency)
- obj_target_sources = '$<TARGET_OBJECTS:' + cmake_dependency_name + '>'
- source_types['obj_target'].append(obj_target_sources)
-
- sources = {}
- for source_type, sources_of_type in source_types.items():
- if sources_of_type:
- sources[source_type] = '${target}__' + source_type + '_srcs'
- SetVariableList(out, sources[source_type], sources_of_type)
- return sources
-
-
-def WriteTarget(out, target, project):
- out.write('\n#')
- out.write(target.gn_name)
- out.write('\n')
-
- if target.cmake_type is None:
- print('Target {} has unknown target type {}, skipping.'.format(
- target.gn_name, target.gn_type))
- return
-
- SetVariable(out, 'target', target.cmake_name)
-
- sources = WriteSourceVariables(out, target, project)
-
- synthetic_dependencies = set()
- if target.gn_type == 'action':
- WriteAction(out, target, project, sources, synthetic_dependencies)
- if target.gn_type == 'action_foreach':
- WriteActionForEach(out, target, project, sources, synthetic_dependencies)
- if target.gn_type == 'copy':
- WriteCopy(out, target, project, sources, synthetic_dependencies)
-
- out.write(target.cmake_type.command)
- out.write('("${target}"')
- if target.cmake_type.modifier is not None:
- out.write(' ')
- out.write(target.cmake_type.modifier)
- for sources_type_name in sources.values():
- WriteVariable(out, sources_type_name, ' ')
- if synthetic_dependencies:
- out.write(' DEPENDS')
- for synthetic_dependencie in synthetic_dependencies:
- WriteVariable(out, synthetic_dependencie, ' ')
- out.write(')\n')
-
- if target.cmake_type.command != 'add_custom_target':
- WriteCompilerFlags(out, target, project, sources)
-
- libraries = set()
- nonlibraries = set()
-
- dependencies = set(target.properties.get('deps', []))
- # Transitive OBJECT libraries are in sources.
- # Those sources are dependent on the OBJECT library dependencies.
- # Those sources cannot bring in library dependencies.
- object_dependencies = set()
- if target.gn_type != 'source_set':
- project.GetObjectLibraryDependencies(target.gn_name, object_dependencies)
- for object_dependency in object_dependencies:
- dependencies.update(project.targets.get(object_dependency).get('deps', []))
-
- for dependency in dependencies:
- gn_dependency_type = project.targets.get(dependency, {}).get('type', None)
- cmake_dependency_type = cmake_target_types.get(gn_dependency_type, None)
- cmake_dependency_name = GetCMakeTargetName(dependency)
- if cmake_dependency_type.command != 'add_library':
- nonlibraries.add(cmake_dependency_name)
- elif cmake_dependency_type.modifier != 'OBJECT':
- if target.cmake_type.is_linkable:
- libraries.add(cmake_dependency_name)
- else:
- nonlibraries.add(cmake_dependency_name)
-
- # Non-library dependencies.
- if nonlibraries:
- out.write('add_dependencies("${target}"')
- for nonlibrary in nonlibraries:
- out.write('\n "')
- out.write(nonlibrary)
- out.write('"')
- out.write(')\n')
-
- # Non-OBJECT library dependencies.
- external_libraries = target.properties.get('libs', [])
- if target.cmake_type.is_linkable and (external_libraries or libraries):
- library_dirs = target.properties.get('lib_dirs', [])
- if library_dirs:
- SetVariableList(out, '${target}__library_directories', library_dirs)
-
- system_libraries = []
- for external_library in external_libraries:
- if '/' in external_library:
- libraries.add(project.GetAbsolutePath(external_library))
- else:
- if external_library.endswith('.framework'):
- external_library = external_library[:-len('.framework')]
- system_library = 'library__' + external_library
- if library_dirs:
- system_library = system_library + '__for_${target}'
- out.write('find_library("')
- out.write(CMakeStringEscape(system_library))
- out.write('" "')
- out.write(CMakeStringEscape(external_library))
- out.write('"')
- if library_dirs:
- out.write(' PATHS "')
- WriteVariable(out, '${target}__library_directories')
- out.write('"')
- out.write(')\n')
- system_libraries.append(system_library)
- out.write('target_link_libraries("${target}"')
- for library in libraries:
- out.write('\n "')
- out.write(CMakeStringEscape(library))
- out.write('"')
- for system_library in system_libraries:
- WriteVariable(out, system_library, '\n "')
- out.write('"')
- out.write(')\n')
-
-
-def WriteProject(project):
- out = open(posixpath.join(project.build_path, 'CMakeLists.txt'), 'w+')
- out.write('# Generated by gn_to_cmake.py.\n')
- out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
- out.write('cmake_policy(VERSION 2.8.8)\n\n')
-
- # Update the gn generated ninja build.
- # If a build file has changed, this will update CMakeLists.ext if
- # gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
- # style was used to create this config.
- out.write('execute_process(COMMAND ninja -C "')
- out.write(CMakeStringEscape(project.build_path))
- out.write('" build.ninja)\n')
-
- out.write('include(CMakeLists.ext)\n')
- out.close()
-
- out = open(posixpath.join(project.build_path, 'CMakeLists.ext'), 'w+')
- out.write('# Generated by gn_to_cmake.py.\n')
- out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
- out.write('cmake_policy(VERSION 2.8.8)\n')
-
- # The following appears to be as-yet undocumented.
- # http://public.kitware.com/Bug/view.php?id=8392
- out.write('enable_language(ASM)\n\n')
- # ASM-ATT does not support .S files.
- # output.write('enable_language(ASM-ATT)\n')
-
- # Current issues with automatic re-generation:
- # The gn generated build.ninja target uses build.ninja.d
- # but build.ninja.d does not contain the ide or gn.
- # Currently the ide is not run if the project.json file is not changed
- # but the ide needs to be run anyway if it has itself changed.
- # This can be worked around by deleting the project.json file.
- out.write('file(READ "')
- gn_deps_file = posixpath.join(project.build_path, 'build.ninja.d')
- out.write(CMakeStringEscape(gn_deps_file))
- out.write('" "gn_deps_string" OFFSET ')
- out.write(str(len('build.ninja: ')))
- out.write(')\n')
- # One would think this would need to worry about escaped spaces
- # but gn doesn't escape spaces here (it generates invalid .d files).
- out.write('string(REPLACE " " ";" "gn_deps" ${gn_deps_string})\n')
- out.write('foreach("gn_dep" ${gn_deps})\n')
- out.write(' configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n')
- out.write('endforeach("gn_dep")\n')
-
- for target_name in project.targets.keys():
- out.write('\n')
- WriteTarget(out, Target(target_name, project), project)
-
-
-def main():
- if len(sys.argv) != 2:
- print('Usage: ' + sys.argv[0] + ' <json_file_name>')
- sys.exit(1)
-
- json_path = sys.argv[1]
- project = None
- with open(json_path, 'r') as json_file:
- project = json.loads(json_file.read())
-
- WriteProject(Project(project))
-
-
-if __name__ == "__main__":
- main()
diff --git a/build/android/gradle/root.jinja b/build/android/gradle/root.jinja
index 73698647c..8009ebe07 100644
--- a/build/android/gradle/root.jinja
+++ b/build/android/gradle/root.jinja
@@ -3,12 +3,22 @@
{# found in the LICENSE file. #}
// Generated by //build/android/generate_gradle.py
+// This section is used to find the plugins.
buildscript {
repositories {
google()
mavenCentral()
}
dependencies {
- classpath "com.android.tools.build:gradle:7.2.0"
+ classpath "com.android.tools.build:gradle:{{ android_gradle_plugin_version }}"
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:{{ kotlin_gradle_plugin_version }}"
}
}
+
+// This is used by individual modules to find/fetch dependencies.
+allprojects {
+ repositories {
+ google()
+ mavenCentral()
+ }
+} \ No newline at end of file
diff --git a/build/android/gyp/aar.py b/build/android/gyp/aar.py
index 0217c152d..512d5dbe4 100755
--- a/build/android/gyp/aar.py
+++ b/build/android/gyp/aar.py
@@ -16,9 +16,7 @@ from xml.etree import ElementTree
import zipfile
from util import build_utils
-
-sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
- os.pardir, os.pardir)))
+import action_helpers # build_utils adds //build to sys.path.
import gn_helpers
@@ -165,7 +163,7 @@ def main():
args = parser.parse_args()
- args.resource_exclusion_globs = build_utils.ParseGnList(
+ args.resource_exclusion_globs = action_helpers.parse_gn_list(
args.resource_exclusion_globs)
if args.ignore_resources:
args.resource_exclusion_globs.append('res/*')
diff --git a/build/android/gyp/aar.pydeps b/build/android/gyp/aar.pydeps
index 7e2924b34..56f860e25 100644
--- a/build/android/gyp/aar.pydeps
+++ b/build/android/gyp/aar.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
+../../action_helpers.py
../../gn_helpers.py
aar.py
util/__init__.py
diff --git a/build/android/gyp/aidl.py b/build/android/gyp/aidl.py
index cb1aa8ca2..8eab45dd7 100755
--- a/build/android/gyp/aidl.py
+++ b/build/android/gyp/aidl.py
@@ -14,6 +14,8 @@ import sys
import zipfile
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
def main(argv):
@@ -23,10 +25,10 @@ def main(argv):
option_parser.add_option('--includes',
help='Directories to add as import search paths.')
option_parser.add_option('--srcjar', help='Path for srcjar output.')
- build_utils.AddDepfileOption(option_parser)
+ action_helpers.add_depfile_arg(option_parser)
options, args = option_parser.parse_args(argv[1:])
- options.includes = build_utils.ParseGnList(options.includes)
+ options.includes = action_helpers.parse_gn_list(options.includes)
with build_utils.TempDir() as temp_dir:
for f in args:
@@ -34,7 +36,7 @@ def main(argv):
output = os.path.join(temp_dir, classname + '.java')
aidl_cmd = [options.aidl_path]
aidl_cmd += [
- '-p' + s for s in build_utils.ParseGnList(options.imports)
+ '-p' + s for s in action_helpers.parse_gn_list(options.imports)
]
aidl_cmd += ['-I' + s for s in options.includes]
aidl_cmd += [
@@ -43,7 +45,7 @@ def main(argv):
]
build_utils.CheckOutput(aidl_cmd)
- with build_utils.AtomicOutput(options.srcjar) as f:
+ with action_helpers.atomic_output(options.srcjar) as f:
with zipfile.ZipFile(f, 'w') as srcjar:
for path in build_utils.FindInDirectory(temp_dir, '*.java'):
with open(path) as fileobj:
@@ -51,13 +53,13 @@ def main(argv):
pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1)
arcname = '%s/%s' % (
pkg_name.replace('.', '/'), os.path.basename(path))
- build_utils.AddToZipHermetic(srcjar, arcname, data=data)
+ zip_helpers.add_to_zip_hermetic(srcjar, arcname, data=data)
if options.depfile:
include_files = []
for include_dir in options.includes:
include_files += build_utils.FindInDirectory(include_dir, '*.java')
- build_utils.WriteDepfile(options.depfile, options.srcjar, include_files)
+ action_helpers.write_depfile(options.depfile, options.srcjar, include_files)
if __name__ == '__main__':
diff --git a/build/android/gyp/aidl.pydeps b/build/android/gyp/aidl.pydeps
index 11c55ed4b..d841c9451 100644
--- a/build/android/gyp/aidl.pydeps
+++ b/build/android/gyp/aidl.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
aidl.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/allot_native_libraries.py b/build/android/gyp/allot_native_libraries.py
index 0ac2f3b17..61daac224 100755
--- a/build/android/gyp/allot_native_libraries.py
+++ b/build/android/gyp/allot_native_libraries.py
@@ -46,6 +46,7 @@ import json
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
def _ModuleLibrariesPair(arg):
@@ -145,7 +146,7 @@ def main(args):
help='A pair of parent module name and child module name '
'(format: "<parent>:<child>"). Can be specified multiple times.')
options = parser.parse_args(build_utils.ExpandFileArgs(args))
- options.libraries = [(m, build_utils.ParseGnList(l))
+ options.libraries = [(m, action_helpers.parse_gn_list(l))
for m, l in options.libraries]
# Parse input creating libraries and dependency tree.
diff --git a/build/android/gyp/allot_native_libraries.pydeps b/build/android/gyp/allot_native_libraries.pydeps
index d8b10cd3d..aacaafffe 100644
--- a/build/android/gyp/allot_native_libraries.pydeps
+++ b/build/android/gyp/allot_native_libraries.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/allot_native_libraries.pydeps build/android/gyp/allot_native_libraries.py
+../../action_helpers.py
../../gn_helpers.py
allot_native_libraries.py
util/__init__.py
diff --git a/build/android/gyp/apkbuilder.py b/build/android/gyp/apkbuilder.py
index 3ed14c283..ac0023e75 100755
--- a/build/android/gyp/apkbuilder.py
+++ b/build/android/gyp/apkbuilder.py
@@ -9,6 +9,7 @@
import argparse
import logging
import os
+import posixpath
import shutil
import sys
import tempfile
@@ -19,7 +20,8 @@ import finalize_apk
from util import build_utils
from util import diff_utils
-from util import zipalign
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
# Taken from aapt's Package.cpp:
@@ -32,11 +34,10 @@ _NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
def _ParseArgs(args):
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
- parser.add_argument(
- '--assets',
- help='GYP-list of files to add as assets in the form '
- '"srcPath:zipPath", where ":zipPath" is optional.')
+ action_helpers.add_depfile_arg(parser)
+ parser.add_argument('--assets',
+ help='GYP-list of files to add as assets in the form '
+ '"srcPath:zipPath", where ":zipPath" is optional.')
parser.add_argument(
'--java-resources', help='GYP-list of java_resources JARs to include.')
parser.add_argument('--write-asset-list',
@@ -114,18 +115,18 @@ def _ParseArgs(args):
help='Treat all warnings as errors.')
diff_utils.AddCommandLineFlags(parser)
options = parser.parse_args(args)
- options.assets = build_utils.ParseGnList(options.assets)
- options.uncompressed_assets = build_utils.ParseGnList(
+ options.assets = action_helpers.parse_gn_list(options.assets)
+ options.uncompressed_assets = action_helpers.parse_gn_list(
options.uncompressed_assets)
- options.native_lib_placeholders = build_utils.ParseGnList(
+ options.native_lib_placeholders = action_helpers.parse_gn_list(
options.native_lib_placeholders)
- options.secondary_native_lib_placeholders = build_utils.ParseGnList(
+ options.secondary_native_lib_placeholders = action_helpers.parse_gn_list(
options.secondary_native_lib_placeholders)
- options.java_resources = build_utils.ParseGnList(options.java_resources)
- options.native_libs = build_utils.ParseGnList(options.native_libs)
- options.secondary_native_libs = build_utils.ParseGnList(
+ options.java_resources = action_helpers.parse_gn_list(options.java_resources)
+ options.native_libs = action_helpers.parse_gn_list(options.native_libs)
+ options.secondary_native_libs = action_helpers.parse_gn_list(
options.secondary_native_libs)
- options.library_always_compress = build_utils.ParseGnList(
+ options.library_always_compress = action_helpers.parse_gn_list(
options.library_always_compress)
if not options.android_abi and (options.native_libs or
@@ -178,7 +179,8 @@ def _ExpandPaths(paths):
def _GetAssetsToAdd(path_tuples,
fast_align,
disable_compression=False,
- allow_reads=True):
+ allow_reads=True,
+ apk_root_dir=''):
"""Returns the list of file_detail tuples for assets in the apk.
Args:
@@ -202,12 +204,16 @@ def _GetAssetsToAdd(path_tuples,
os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
if target_compress == compress:
- # AddToZipHermetic() uses this logic to avoid growing small files.
+ # add_to_zip_hermetic() uses this logic to avoid growing small files.
# We need it here in order to set alignment correctly.
if allow_reads and compress and os.path.getsize(src_path) < 16:
compress = False
- apk_path = 'assets/' + dest_path
+ if dest_path.startswith('../'):
+ # posixpath.join('', 'foo') == 'foo'
+ apk_path = posixpath.join(apk_root_dir, dest_path[3:])
+ else:
+ apk_path = 'assets/' + dest_path
alignment = 0 if compress and not fast_align else 4
assets_to_add.append((apk_path, src_path, compress, alignment))
return assets_to_add
@@ -230,12 +236,11 @@ def _AddFiles(apk, details):
raise Exception(
'Multiple targets specified the asset path: %s' % apk_path)
except KeyError:
- zipalign.AddToZipHermetic(
- apk,
- apk_path,
- src_path=src_path,
- compress=compress,
- alignment=alignment)
+ zip_helpers.add_to_zip_hermetic(apk,
+ apk_path,
+ src_path=src_path,
+ compress=compress,
+ alignment=alignment)
def _GetNativeLibrariesToAdd(native_libs, android_abi, fast_align,
@@ -347,12 +352,14 @@ def main(args):
ret = _GetAssetsToAdd(assets,
fast_align,
disable_compression=False,
- allow_reads=allow_reads)
+ allow_reads=allow_reads,
+ apk_root_dir=apk_root_dir)
ret.extend(
_GetAssetsToAdd(uncompressed_assets,
fast_align,
disable_compression=True,
- allow_reads=allow_reads))
+ allow_reads=allow_reads,
+ apk_root_dir=apk_root_dir))
return ret
libs_to_add = _GetNativeLibrariesToAdd(native_libs, options.android_abi,
@@ -379,9 +386,9 @@ def main(args):
if options.only_verify_expectations:
if options.depfile:
- build_utils.WriteDepfile(options.depfile,
- options.actual_file,
- inputs=depfile_deps)
+ action_helpers.write_depfile(options.depfile,
+ options.actual_file,
+ inputs=depfile_deps)
return
# If we are past this point, we are going to actually create the final apk so
@@ -391,12 +398,13 @@ def main(args):
assets, uncompressed_assets, fast_align, allow_reads=True)
# Targets generally do not depend on apks, so no need for only_if_changed.
- with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
+ with action_helpers.atomic_output(options.output_apk,
+ only_if_changed=False) as f:
with zipfile.ZipFile(options.resource_apk) as resource_apk, \
zipfile.ZipFile(f, 'w') as out_apk:
def add_to_zip(zip_path, data, compress=True, alignment=4):
- zipalign.AddToZipHermetic(
+ zip_helpers.add_to_zip_hermetic(
out_apk,
zip_path,
data=data,
@@ -515,9 +523,9 @@ def main(args):
logging.debug('Moving file into place')
if options.depfile:
- build_utils.WriteDepfile(options.depfile,
- options.output_apk,
- inputs=depfile_deps)
+ action_helpers.write_depfile(options.depfile,
+ options.output_apk,
+ inputs=depfile_deps)
if __name__ == '__main__':
diff --git a/build/android/gyp/apkbuilder.pydeps b/build/android/gyp/apkbuilder.pydeps
index e6122edd2..28dfdb035 100644
--- a/build/android/gyp/apkbuilder.pydeps
+++ b/build/android/gyp/apkbuilder.pydeps
@@ -1,9 +1,10 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
apkbuilder.py
finalize_apk.py
util/__init__.py
util/build_utils.py
util/diff_utils.py
-util/zipalign.py
diff --git a/build/android/gyp/assert_static_initializers.py b/build/android/gyp/assert_static_initializers.py
index 92061b844..e7b8bbd4f 100755
--- a/build/android/gyp/assert_static_initializers.py
+++ b/build/android/gyp/assert_static_initializers.py
@@ -5,7 +5,6 @@
"""Checks the number of static initializers in an APK's library."""
-from __future__ import print_function
import argparse
import os
diff --git a/build/android/gyp/bundletool.py b/build/android/gyp/bundletool.py
index 6dfe1294d..79151335c 100755
--- a/build/android/gyp/bundletool.py
+++ b/build/android/gyp/bundletool.py
@@ -24,12 +24,10 @@ BUNDLETOOL_DIR = os.path.abspath(os.path.join(
BUNDLETOOL_JAR_PATH = os.path.join(BUNDLETOOL_DIR, 'bundletool.jar')
-def RunBundleTool(args, warnings_as_errors=(), print_stdout=False):
- # Use () instead of None because command-line flags are None by default.
- verify = warnings_as_errors == () or warnings_as_errors
+def RunBundleTool(args, print_stdout=False):
# ASAN builds failed with the default of 1GB (crbug.com/1120202).
# Bug for bundletool: https://issuetracker.google.com/issues/165911616
- cmd = build_utils.JavaCmd(verify, xmx='4G')
+ cmd = build_utils.JavaCmd(xmx='4G')
cmd += ['-jar', BUNDLETOOL_JAR_PATH]
cmd += args
logging.debug(' '.join(cmd))
diff --git a/build/android/gyp/bytecode_processor.py b/build/android/gyp/bytecode_processor.py
index ef9916470..f6065dbe7 100755
--- a/build/android/gyp/bytecode_processor.py
+++ b/build/android/gyp/bytecode_processor.py
@@ -8,8 +8,10 @@
import argparse
import sys
+import javac_output_processor
from util import build_utils
from util import server_utils
+import action_helpers # build_utils adds //build to sys.path.
def _AddSwitch(parser, val):
@@ -47,13 +49,15 @@ def main(argv):
force=args.use_build_server):
return
- args.sdk_classpath_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
- args.direct_classpath_jars = build_utils.ParseGnList(
+ args.sdk_classpath_jars = action_helpers.parse_gn_list(
+ args.sdk_classpath_jars)
+ args.direct_classpath_jars = action_helpers.parse_gn_list(
args.direct_classpath_jars)
- args.full_classpath_jars = build_utils.ParseGnList(args.full_classpath_jars)
- args.full_classpath_gn_targets = build_utils.ParseGnList(
+ args.full_classpath_jars = action_helpers.parse_gn_list(
+ args.full_classpath_jars)
+ args.full_classpath_gn_targets = action_helpers.parse_gn_list(
args.full_classpath_gn_targets)
- args.missing_classes_allowlist = build_utils.ParseGnList(
+ args.missing_classes_allowlist = action_helpers.parse_gn_list(
args.missing_classes_allowlist)
verbose = '--verbose' if args.verbose else '--not-verbose'
@@ -68,11 +72,20 @@ def main(argv):
cmd += [str(len(args.full_classpath_jars))]
cmd += args.full_classpath_jars
cmd += [str(len(args.full_classpath_gn_targets))]
- cmd += args.full_classpath_gn_targets
- build_utils.CheckOutput(cmd,
- print_stdout=True,
- fail_func=None,
- fail_on_output=args.warnings_as_errors)
+ cmd += [
+ javac_output_processor.ReplaceGmsPackageIfNeeded(t)
+ for t in args.full_classpath_gn_targets
+ ]
+ try:
+ build_utils.CheckOutput(cmd,
+ print_stdout=True,
+ fail_func=None,
+ fail_on_output=args.warnings_as_errors)
+ except build_utils.CalledProcessError as e:
+ # Do not output command line because it is massive and makes the actual
+ # error message hard to find.
+ sys.stderr.write(e.output)
+ sys.exit(1)
if args.stamp:
build_utils.Touch(args.stamp)
diff --git a/build/android/gyp/bytecode_processor.pydeps b/build/android/gyp/bytecode_processor.pydeps
index 6105d934d..e7f1d98bd 100644
--- a/build/android/gyp/bytecode_processor.pydeps
+++ b/build/android/gyp/bytecode_processor.pydeps
@@ -1,7 +1,28 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/colorama/src/colorama/__init__.py
+../../../third_party/colorama/src/colorama/ansi.py
+../../../third_party/colorama/src/colorama/ansitowin32.py
+../../../third_party/colorama/src/colorama/initialise.py
+../../../third_party/colorama/src/colorama/win32.py
+../../../third_party/colorama/src/colorama/winterm.py
+../../../tools/android/modularization/convenience/lookup_dep.py
+../../action_helpers.py
../../gn_helpers.py
+../list_java_targets.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
bytecode_processor.py
+javac_output_processor.py
util/__init__.py
util/build_utils.py
util/server_utils.py
diff --git a/build/android/gyp/bytecode_rewriter.py b/build/android/gyp/bytecode_rewriter.py
index add5558a6..d16fee523 100755
--- a/build/android/gyp/bytecode_rewriter.py
+++ b/build/android/gyp/bytecode_rewriter.py
@@ -8,12 +8,13 @@ import argparse
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
def main(argv):
argv = build_utils.ExpandFileArgs(argv[1:])
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--script',
required=True,
help='Path to the java binary wrapper script.')
@@ -22,8 +23,8 @@ def main(argv):
parser.add_argument('--output-jar', required=True)
args = parser.parse_args(argv)
- classpath = build_utils.ParseGnList(args.classpath)
- build_utils.WriteDepfile(args.depfile, args.output_jar, inputs=classpath)
+ classpath = action_helpers.parse_gn_list(args.classpath)
+ action_helpers.write_depfile(args.depfile, args.output_jar, inputs=classpath)
classpath.append(args.input_jar)
cmd = [
diff --git a/build/android/gyp/bytecode_rewriter.pydeps b/build/android/gyp/bytecode_rewriter.pydeps
index b8f304a78..b0a656036 100644
--- a/build/android/gyp/bytecode_rewriter.pydeps
+++ b/build/android/gyp/bytecode_rewriter.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_rewriter.pydeps build/android/gyp/bytecode_rewriter.py
+../../action_helpers.py
../../gn_helpers.py
bytecode_rewriter.py
util/__init__.py
diff --git a/build/android/gyp/check_flag_expectations.pydeps b/build/android/gyp/check_flag_expectations.pydeps
index d8c394a04..6bade9490 100644
--- a/build/android/gyp/check_flag_expectations.pydeps
+++ b/build/android/gyp/check_flag_expectations.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/check_flag_expectations.pydeps build/android/gyp/check_flag_expectations.py
+../../action_helpers.py
../../gn_helpers.py
check_flag_expectations.py
util/__init__.py
diff --git a/build/android/gyp/compile_java.py b/build/android/gyp/compile_java.py
index 7953731c7..007e8b2f7 100755
--- a/build/android/gyp/compile_java.py
+++ b/build/android/gyp/compile_java.py
@@ -14,12 +14,15 @@ import shutil
import sys
import time
import zipfile
+import pathlib
import javac_output_processor
from util import build_utils
from util import md5_check
from util import jar_info_utils
from util import server_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
_JAVAC_EXTRACTOR = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party',
'android_prebuilts', 'build_tools', 'common',
@@ -244,10 +247,39 @@ def ProcessJavacOutput(output, target_name):
return '\n'.join(lines)
-def _ParsePackageAndClassNames(java_file):
+def CreateJarFile(jar_path,
+ classes_dir,
+ service_provider_configuration_dir=None,
+ additional_jar_files=None,
+ extra_classes_jar=None):
+ """Zips files from compilation into a single jar."""
+ logging.info('Start creating jar file: %s', jar_path)
+ with action_helpers.atomic_output(jar_path) as f:
+ with zipfile.ZipFile(f.name, 'w') as z:
+ zip_helpers.zip_directory(z, classes_dir)
+ if service_provider_configuration_dir:
+ config_files = build_utils.FindInDirectory(
+ service_provider_configuration_dir)
+ for config_file in config_files:
+ zip_path = os.path.relpath(config_file,
+ service_provider_configuration_dir)
+ zip_helpers.add_to_zip_hermetic(z, zip_path, src_path=config_file)
+
+ if additional_jar_files:
+ for src_path, zip_path in additional_jar_files:
+ zip_helpers.add_to_zip_hermetic(z, zip_path, src_path=src_path)
+ if extra_classes_jar:
+ path_transform = lambda p: p if p.endswith('.class') else None
+ zip_helpers.merge_zips(z, [extra_classes_jar],
+ path_transform=path_transform)
+ logging.info('Completed jar file: %s', jar_path)
+
+
+def _ParsePackageAndClassNames(source_file):
+ """This should support both Java and Kotlin files."""
package_name = ''
class_names = []
- with open(java_file) as f:
+ with open(source_file) as f:
for l in f:
# Strip unindented comments.
# Considers a leading * as a continuation of a multi-line comment (our
@@ -258,7 +290,8 @@ def _ParsePackageAndClassNames(java_file):
# (with escaped quotes) but covers a very large percentage of cases.
l = re.sub('(?:".*?")', '', l)
- m = re.match(r'package\s+(.*?);', l)
+ # Java lines end in semicolon, whereas Kotlin lines do not.
+ m = re.match(r'package\s+(.*?)(;|\s*$)', l)
if m and not package_name:
package_name = m.group(1)
@@ -270,9 +303,9 @@ def _ParsePackageAndClassNames(java_file):
return package_name, class_names
-def _ProcessJavaFileForInfo(java_file):
- package_name, class_names = _ParsePackageAndClassNames(java_file)
- return java_file, package_name, class_names
+def _ProcessSourceFileForInfo(source_file):
+ package_name, class_names = _ParsePackageAndClassNames(source_file)
+ return source_file, package_name, class_names
class _InfoFileContext:
@@ -295,23 +328,29 @@ class _InfoFileContext:
self._srcjar_files[path] = '{}/{}'.format(
srcjar_path, os.path.relpath(path, parent_dir))
- def SubmitFiles(self, java_files):
+ def SubmitFiles(self, source_files):
+ if not source_files:
+ return
if self._pool is None:
# Restrict to just one process to not slow down compiling. Compiling
# is always slower.
self._pool = multiprocessing.Pool(1)
- logging.info('Submitting %d files for info', len(java_files))
+ logging.info('Submitting %d files for info', len(source_files))
self._results.append(
- self._pool.imap_unordered(
- _ProcessJavaFileForInfo, java_files, chunksize=1000))
-
- def _CheckPathMatchesClassName(self, java_file, package_name, class_name):
- parts = package_name.split('.') + [class_name + '.java']
- expected_path_suffix = os.path.sep.join(parts)
- if not java_file.endswith(expected_path_suffix):
- raise Exception(('Java package+class name do not match its path.\n'
+ self._pool.imap_unordered(_ProcessSourceFileForInfo,
+ source_files,
+ chunksize=1000))
+
+ def _CheckPathMatchesClassName(self, source_file, package_name, class_name):
+ if source_file.endswith('.java'):
+ parts = package_name.split('.') + [class_name + '.java']
+ else:
+ parts = package_name.split('.') + [class_name + '.kt']
+ expected_suffix = os.path.sep.join(parts)
+ if not source_file.endswith(expected_suffix):
+ raise Exception(('Source package+class name do not match its path.\n'
'Actual path: %s\nExpected path: %s') %
- (java_file, expected_path_suffix))
+ (source_file, expected_suffix))
def _ProcessInfo(self, java_file, package_name, class_names, source):
for class_name in class_names:
@@ -361,32 +400,12 @@ class _InfoFileContext:
entries = self._Collect()
logging.info('Writing info file: %s', output_path)
- with build_utils.AtomicOutput(output_path, mode='wb') as f:
+ with action_helpers.atomic_output(output_path, mode='wb') as f:
jar_info_utils.WriteJarInfoFile(f, entries, self._srcjar_files)
logging.info('Completed info file: %s', output_path)
-def _CreateJarFile(jar_path, service_provider_configuration_dir,
- additional_jar_files, classes_dir):
- logging.info('Start creating jar file: %s', jar_path)
- with build_utils.AtomicOutput(jar_path) as f:
- with zipfile.ZipFile(f.name, 'w') as z:
- build_utils.ZipDir(z, classes_dir)
- if service_provider_configuration_dir:
- config_files = build_utils.FindInDirectory(
- service_provider_configuration_dir)
- for config_file in config_files:
- zip_path = os.path.relpath(config_file,
- service_provider_configuration_dir)
- build_utils.AddToZipHermetic(z, zip_path, src_path=config_file)
-
- if additional_jar_files:
- for src_path, zip_path in additional_jar_files:
- build_utils.AddToZipHermetic(z, zip_path, src_path=src_path)
- logging.info('Completed jar file: %s', jar_path)
-
-
-def _OnStaleMd5(changes, options, javac_cmd, javac_args, java_files):
+def _OnStaleMd5(changes, options, javac_cmd, javac_args, java_files, kt_files):
logging.info('Starting _OnStaleMd5')
if options.enable_kythe_annotations:
# Kythe requires those env variables to be set and compile_java.py does the
@@ -397,6 +416,11 @@ def _OnStaleMd5(changes, options, javac_cmd, javac_args, java_files):
'KYTHE_ROOT_DIRECTORY and KYTHE_OUTPUT_DIRECTORY '
'environment variables to be set.')
javac_extractor_cmd = build_utils.JavaCmd() + [
+ '--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED',
'-jar',
_JAVAC_EXTRACTOR,
]
@@ -431,6 +455,7 @@ def _OnStaleMd5(changes, options, javac_cmd, javac_args, java_files):
javac_cmd + javac_args,
java_files,
options.jar_path,
+ kt_files=kt_files,
jar_info_path=jar_info_path,
intermediates_out_dir=intermediates_out_dir,
enable_partial_javac=True)
@@ -442,6 +467,7 @@ def _RunCompiler(changes,
javac_cmd,
java_files,
jar_path,
+ kt_files=None,
jar_info_path=None,
intermediates_out_dir=None,
enable_partial_javac=False):
@@ -453,6 +479,7 @@ def _RunCompiler(changes,
javac_cmd: Command to execute.
java_files: List of java files passed from command line.
jar_path: Path of output jar file.
+ kt_files: List of Kotlin files passed from command line if any.
jar_info_path: Path of the .info file to generate.
If None, .info file will not be generated.
intermediates_out_dir: Directory for saving intermediate outputs.
@@ -509,9 +536,9 @@ def _RunCompiler(changes,
options.jar_info_exclude_globs)
if intermediates_out_dir is None:
- input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars')
- else:
- input_srcjars_dir = os.path.join(intermediates_out_dir, 'input_srcjars')
+ intermediates_out_dir = temp_dir
+
+ input_srcjars_dir = os.path.join(intermediates_out_dir, 'input_srcjars')
if java_srcjars:
logging.info('Extracting srcjars to %s', input_srcjars_dir)
@@ -537,6 +564,7 @@ def _RunCompiler(changes,
if save_info_file and java_files:
info_file_context.SubmitFiles(java_files)
+ info_file_context.SubmitFiles(kt_files)
if java_files:
# Don't include the output directory in the initial set of args since it
@@ -567,8 +595,8 @@ def _RunCompiler(changes,
end = time.time() - start
logging.info('Java compilation took %ss', end)
- _CreateJarFile(jar_path, service_provider_configuration,
- options.additional_jar_files, classes_dir)
+ CreateJarFile(jar_path, classes_dir, service_provider_configuration,
+ options.additional_jar_files, options.kotlin_jar_path)
if save_info_file:
info_file_context.Commit(jar_info_path)
@@ -582,7 +610,7 @@ def _RunCompiler(changes,
def _ParseOptions(argv):
parser = optparse.OptionParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_option('--target-name', help='Fully qualified GN target name.')
parser.add_option('--skip-build-server',
@@ -653,14 +681,18 @@ def _ParseOptions(argv):
'--header-jar',
help='This is the header jar for the current target that contains '
'META-INF/services/* files to be included in the output jar.')
+ parser.add_option(
+ '--kotlin-jar-path',
+ help='Kotlin jar to be merged into the output jar. This contains the '
+ ".class files from this target's .kt files.")
options, args = parser.parse_args(argv)
build_utils.CheckOptions(options, parser, required=('jar_path', ))
- options.classpath = build_utils.ParseGnList(options.classpath)
- options.processorpath = build_utils.ParseGnList(options.processorpath)
- options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
- options.jar_info_exclude_globs = build_utils.ParseGnList(
+ options.classpath = action_helpers.parse_gn_list(options.classpath)
+ options.processorpath = action_helpers.parse_gn_list(options.processorpath)
+ options.java_srcjars = action_helpers.parse_gn_list(options.java_srcjars)
+ options.jar_info_exclude_globs = action_helpers.parse_gn_list(
options.jar_info_exclude_globs)
additional_jar_files = []
@@ -669,21 +701,29 @@ def _ParseOptions(argv):
additional_jar_files.append((filepath, jar_filepath))
options.additional_jar_files = additional_jar_files
- java_files = []
+ files = []
for arg in args:
# Interpret a path prefixed with @ as a file containing a list of sources.
if arg.startswith('@'):
- java_files.extend(build_utils.ReadSourcesList(arg[1:]))
+ files.extend(build_utils.ReadSourcesList(arg[1:]))
else:
- java_files.append(arg)
+ files.append(arg)
- return options, java_files
+ # The target's .sources file contains both Java and Kotlin files. We use
+ # compile_kt.py to compile the Kotlin files to .class and header jars. Javac
+ # is run only on .java files.
+ java_files = [f for f in files if f.endswith('.java')]
+ # Kotlin files are needed to populate the info file and attribute size in
+ # supersize back to the appropriate Kotlin file.
+ kt_files = [f for f in files if f.endswith('.kt')]
+
+ return options, java_files, kt_files
def main(argv):
build_utils.InitLogging('JAVAC_DEBUG')
argv = build_utils.ExpandFileArgs(argv)
- options, java_files = _ParseOptions(argv)
+ options, java_files, kt_files = _ParseOptions(argv)
# Only use the build server for errorprone runs.
if (options.enable_errorprone and not options.skip_build_server
@@ -700,7 +740,8 @@ def main(argv):
javac_args = [
'-g',
- # We currently target JDK 11 everywhere.
+ # We currently target JDK 11 everywhere, since Mockito is broken by JDK17.
+ # See crbug.com/1409661 for more details.
'--release',
'11',
# Chromium only allows UTF8 source files. Being explicit avoids
@@ -735,6 +776,22 @@ def main(argv):
'-XepPatchChecks:,' + ','.join(ERRORPRONE_CHECKS_TO_APPLY)
]
+ # These are required to use JDK 16, and are taken directly from
+ # https://errorprone.info/docs/installation
+ javac_args += [
+ '-J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
+ '-J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED',
+ '-J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED',
+ '-J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED',
+ '-J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED',
+ '-J--add-exports=jdk.compiler/com.sun.tools.javac.processing='
+ 'ALL-UNNAMED',
+ '-J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED',
+ '-J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED',
+ '-J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
+ '-J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED',
+ ]
+
javac_args += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)]
# This flag quits errorprone after checks and before code generation, since
@@ -761,7 +818,7 @@ def main(argv):
depfile_deps = classpath_inputs
# Files that are already inputs in GN should go in input_paths.
- input_paths = depfile_deps + options.java_srcjars + java_files
+ input_paths = depfile_deps + options.java_srcjars + java_files + kt_files
if options.header_jar:
input_paths.append(options.header_jar)
input_paths += [x[0] for x in options.additional_jar_files]
@@ -770,13 +827,13 @@ def main(argv):
if not options.enable_errorprone:
output_paths += [options.jar_path + '.info']
- input_strings = javac_cmd + javac_args + options.classpath + java_files + [
- options.warnings_as_errors, options.jar_info_exclude_globs
- ]
+ input_strings = (javac_cmd + javac_args + options.classpath + java_files +
+ kt_files +
+ [options.warnings_as_errors, options.jar_info_exclude_globs])
# Use md5_check for |pass_changes| feature.
md5_check.CallAndWriteDepfileIfStale(lambda changes: _OnStaleMd5(
- changes, options, javac_cmd, javac_args, java_files),
+ changes, options, javac_cmd, javac_args, java_files, kt_files),
options,
depfile_deps=depfile_deps,
input_paths=input_paths,
diff --git a/build/android/gyp/compile_java.pydeps b/build/android/gyp/compile_java.pydeps
index c1c7d5fd5..45617b150 100644
--- a/build/android/gyp/compile_java.pydeps
+++ b/build/android/gyp/compile_java.pydeps
@@ -16,8 +16,10 @@
../../../third_party/colorama/src/colorama/win32.py
../../../third_party/colorama/src/colorama/winterm.py
../../../tools/android/modularization/convenience/lookup_dep.py
+../../action_helpers.py
../../gn_helpers.py
../../print_python_deps.py
+../../zip_helpers.py
../list_java_targets.py
../pylib/__init__.py
../pylib/constants/__init__.py
diff --git a/build/android/gyp/compile_kt.py b/build/android/gyp/compile_kt.py
new file mode 100755
index 000000000..4c7eb6ff8
--- /dev/null
+++ b/build/android/gyp/compile_kt.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import logging
+import os
+import shutil
+import sys
+import time
+
+import compile_java
+
+from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
+
+
+def _RunCompiler(args,
+ kotlinc_cmd,
+ source_files,
+ jar_path,
+ intermediates_out_dir=None):
+ """Runs the Kotlin compiler."""
+ logging.info('Starting _RunCompiler')
+
+ source_files = source_files.copy()
+ kt_files = [f for f in source_files if f.endswith('.kt')]
+ assert len(kt_files) > 0, 'At least one .kt file must be passed in.'
+
+ java_srcjars = args.java_srcjars
+
+ # Use jar_path's directory to ensure paths are relative (needed for goma).
+ temp_dir = jar_path + '.staging'
+ build_utils.DeleteDirectory(temp_dir)
+ os.makedirs(temp_dir)
+ try:
+ classes_dir = os.path.join(temp_dir, 'classes')
+ os.makedirs(classes_dir)
+
+ input_srcjars_dir = os.path.join(intermediates_out_dir or temp_dir,
+ 'input_srcjars')
+
+ if java_srcjars:
+ logging.info('Extracting srcjars to %s', input_srcjars_dir)
+ build_utils.MakeDirectory(input_srcjars_dir)
+ for srcjar in args.java_srcjars:
+ source_files += build_utils.ExtractAll(srcjar,
+ no_clobber=True,
+ path=input_srcjars_dir,
+ pattern='*.java')
+ logging.info('Done extracting srcjars')
+
+ # Don't include the output directory in the initial set of args since it
+ # being in a temp dir makes it unstable (breaks md5 stamping).
+ cmd = list(kotlinc_cmd)
+ cmd += ['-d', classes_dir]
+
+ if args.classpath:
+ cmd += ['-classpath', ':'.join(args.classpath)]
+
+ # This a kotlinc plugin to generate header files for .kt files, similar to
+ # turbine for .java files.
+ jvm_abi_path = os.path.join(build_utils.KOTLIN_HOME, 'lib',
+ 'jvm-abi-gen.jar')
+ cmd += [
+ f'-Xplugin={jvm_abi_path}', '-P',
+ 'plugin:org.jetbrains.kotlin.jvm.abi:outputDir=' +
+ args.interface_jar_path
+ ]
+
+ # Pass source paths as response files to avoid extremely long command
+ # lines that are tedius to debug.
+ source_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
+ with open(source_files_rsp_path, 'w') as f:
+ f.write(' '.join(source_files))
+ cmd += ['@' + source_files_rsp_path]
+
+ # Explicitly set JAVA_HOME since some bots do not have this already set.
+ env = os.environ.copy()
+ env['JAVA_HOME'] = build_utils.JAVA_HOME
+
+ logging.debug('Build command %s', cmd)
+ start = time.time()
+ build_utils.CheckOutput(cmd,
+ env=env,
+ print_stdout=args.chromium_code,
+ fail_on_output=args.warnings_as_errors)
+ logging.info('Kotlin compilation took %ss', time.time() - start)
+
+ compile_java.CreateJarFile(jar_path, classes_dir)
+
+ logging.info('Completed all steps in _RunCompiler')
+ finally:
+ shutil.rmtree(temp_dir)
+
+
+def _ParseOptions(argv):
+ parser = argparse.ArgumentParser()
+ action_helpers.add_depfile_arg(parser)
+
+ parser.add_argument('--java-srcjars',
+ action='append',
+ default=[],
+ help='List of srcjars to include in compilation.')
+ parser.add_argument(
+ '--generated-dir',
+ help='Subdirectory within target_gen_dir to place extracted srcjars and '
+ 'annotation processor output for codesearch to find.')
+ parser.add_argument('--classpath', action='append', help='Classpath to use.')
+ parser.add_argument(
+ '--chromium-code',
+ action='store_true',
+ help='Whether code being compiled should be built with stricter '
+ 'warnings for chromium code.')
+ parser.add_argument('--gomacc-path',
+ help='When set, prefix kotlinc command with gomacc')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ parser.add_argument('--jar-path', help='Jar output path.', required=True)
+ parser.add_argument('--interface-jar-path',
+ help='Interface jar output path.',
+ required=True)
+
+ args, extra_args = parser.parse_known_args(argv)
+
+ args.classpath = action_helpers.parse_gn_list(args.classpath)
+ args.java_srcjars = action_helpers.parse_gn_list(args.java_srcjars)
+
+ source_files = []
+ for arg in extra_args:
+ # Interpret a path prefixed with @ as a file containing a list of sources.
+ if arg.startswith('@'):
+ source_files.extend(build_utils.ReadSourcesList(arg[1:]))
+ else:
+ assert not arg.startswith('--'), f'Undefined option {arg}'
+ source_files.append(arg)
+
+ return args, source_files
+
+
+def main(argv):
+ build_utils.InitLogging('KOTLINC_DEBUG')
+ argv = build_utils.ExpandFileArgs(argv)
+ args, source_files = _ParseOptions(argv)
+
+ kotlinc_cmd = []
+ if args.gomacc_path:
+ kotlinc_cmd.append(args.gomacc_path)
+ kotlinc_cmd.append(build_utils.KOTLINC_PATH)
+
+ kotlinc_cmd += [
+ '-no-jdk', # Avoid depending on the bundled JDK.
+ # Avoid depending on the bundled Kotlin stdlib. This may have a version
+ # skew with the one in //third_party/android_deps (which is the one we
+ # prefer to use).
+ '-no-stdlib',
+ # Avoid depending on the bundled Kotlin reflect libs.
+ '-no-reflect',
+ ]
+
+ if args.generated_dir:
+ # Delete any stale files in the generated directory. The purpose of
+ # args.generated_dir is for codesearch.
+ shutil.rmtree(args.generated_dir, True)
+
+ _RunCompiler(args,
+ kotlinc_cmd,
+ source_files,
+ args.jar_path,
+ intermediates_out_dir=args.generated_dir)
+
+ if args.depfile:
+ # GN already knows of the source files, so avoid listing individual files
+ # in the depfile.
+ action_helpers.write_depfile(args.depfile, args.jar_path, args.classpath)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/compile_kt.pydeps b/build/android/gyp/compile_kt.pydeps
new file mode 100644
index 000000000..818bca802
--- /dev/null
+++ b/build/android/gyp/compile_kt.pydeps
@@ -0,0 +1,33 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_kt.pydeps build/android/gyp/compile_kt.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/colorama/src/colorama/__init__.py
+../../../third_party/colorama/src/colorama/ansi.py
+../../../third_party/colorama/src/colorama/ansitowin32.py
+../../../third_party/colorama/src/colorama/initialise.py
+../../../third_party/colorama/src/colorama/win32.py
+../../../third_party/colorama/src/colorama/winterm.py
+../../../tools/android/modularization/convenience/lookup_dep.py
+../../action_helpers.py
+../../gn_helpers.py
+../../print_python_deps.py
+../../zip_helpers.py
+../list_java_targets.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+compile_java.py
+compile_kt.py
+javac_output_processor.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
+util/server_utils.py
diff --git a/build/android/gyp/compile_resources.py b/build/android/gyp/compile_resources.py
index 8c444ee49..d3791af78 100755
--- a/build/android/gyp/compile_resources.py
+++ b/build/android/gyp/compile_resources.py
@@ -33,6 +33,8 @@ from util import manifest_utils
from util import parallel
from util import protoresources
from util import resource_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
# Pngs that we shouldn't convert to webp. Please add rationale when updating.
@@ -62,17 +64,14 @@ def _ParseArgs(args):
required=True,
help='Paths to arsc resource files used to link '
'against. Can be specified multiple times.')
-
input_opts.add_argument(
'--dependencies-res-zips',
default=[],
help='Resources zip archives from dependents. Required to '
'resolve @type/foo references into dependent libraries.')
-
input_opts.add_argument(
'--extra-res-packages',
help='Additional package names to generate R.java files for.')
-
input_opts.add_argument(
'--aapt2-path', required=True, help='Path to the Android aapt2 tool.')
input_opts.add_argument(
@@ -94,31 +93,25 @@ def _ParseArgs(args):
action='store_true',
help='Same as --shared-resources, but also ensures all resource IDs are '
'directly usable from the APK loaded as an application.')
-
input_opts.add_argument(
'--package-id',
type=int,
help='Decimal integer representing custom package ID for resources '
'(instead of 127==0x7f). Cannot be used with --shared-resources.')
-
input_opts.add_argument(
'--package-name',
help='Package name that will be used to create R class.')
-
input_opts.add_argument(
'--rename-manifest-package', help='Package name to force AAPT to use.')
-
input_opts.add_argument(
'--arsc-package-name',
help='Package name to set in manifest of resources.arsc file. This is '
'only used for apks under test.')
-
input_opts.add_argument(
'--shared-resources-allowlist',
help='An R.txt file acting as a allowlist for resources that should be '
'non-final and have their package ID changed at runtime in R.java. '
'Implies and overrides --shared-resources.')
-
input_opts.add_argument(
'--shared-resources-allowlist-locales',
default='[]',
@@ -126,16 +119,13 @@ def _ParseArgs(args):
' to this locale list will be kept in the final output for the '
'resources identified through --shared-resources-allowlist, even '
'if --locale-allowlist is being used.')
-
input_opts.add_argument(
'--use-resource-ids-path',
help='Use resource IDs generated by aapt --emit-ids.')
-
input_opts.add_argument(
'--debuggable',
action='store_true',
help='Whether to add android:debuggable="true".')
-
input_opts.add_argument('--version-code', help='Version code for apk.')
input_opts.add_argument('--version-name', help='Version name for apk.')
input_opts.add_argument(
@@ -149,7 +139,6 @@ def _ParseArgs(args):
help="android:maxSdkVersion expected in AndroidManifest.xml.")
input_opts.add_argument(
'--manifest-package', help='Package name of the AndroidManifest.xml.')
-
input_opts.add_argument(
'--locale-allowlist',
default='[]',
@@ -165,17 +154,14 @@ def _ParseArgs(args):
default='[]',
help='GN list of globs that say which files to include even '
'when --resource-exclusion-regex is set.')
-
input_opts.add_argument(
'--dependencies-res-zip-overlays',
help='GN list with subset of --dependencies-res-zips to use overlay '
'semantics for.')
-
input_opts.add_argument(
'--values-filter-rules',
help='GN list of source_glob:regex for filtering resources after they '
'are compiled. Use this to filter out entries within values/ files.')
-
input_opts.add_argument('--png-to-webp', action='store_true',
help='Convert png files to webp format.')
@@ -183,66 +169,60 @@ def _ParseArgs(args):
help='Path to the cwebp binary.')
input_opts.add_argument(
'--webp-cache-dir', help='The directory to store webp image cache.')
-
input_opts.add_argument(
'--no-xml-namespaces',
action='store_true',
help='Whether to strip xml namespaces from processed xml resources.')
-
input_opts.add_argument(
'--is-bundle-module',
action='store_true',
help='Whether resources are being generated for a bundle module.')
-
input_opts.add_argument(
'--uses-split',
help='Value to set uses-split to in the AndroidManifest.xml.')
-
input_opts.add_argument(
- '--extra-verification-manifest',
- help='Path to AndroidManifest.xml which should be merged into base '
- 'manifest when performing verification.')
+ '--verification-version-code-offset',
+ help='Subtract this from versionCode for expectation files')
+ input_opts.add_argument(
+ '--verification-library-version-offset',
+ help='Subtract this from static-library version for expectation files')
- build_utils.AddDepfileOption(output_opts)
+ action_helpers.add_depfile_arg(output_opts)
output_opts.add_argument('--arsc-path', help='Apk output for arsc format.')
output_opts.add_argument('--proto-path', help='Apk output for proto format.')
-
output_opts.add_argument(
'--info-path', help='Path to output info file for the partial apk.')
-
output_opts.add_argument(
'--srcjar-out',
help='Path to srcjar to contain generated R.java.')
-
output_opts.add_argument('--r-text-out',
help='Path to store the generated R.txt file.')
-
output_opts.add_argument(
'--proguard-file', help='Path to proguard.txt generated file.')
-
output_opts.add_argument(
'--proguard-file-main-dex',
help='Path to proguard.txt generated file for main dex.')
-
output_opts.add_argument(
'--emit-ids-out', help='Path to file produced by aapt2 --emit-ids.')
diff_utils.AddCommandLineFlags(parser)
options = parser.parse_args(args)
- options.include_resources = build_utils.ParseGnList(options.include_resources)
- options.dependencies_res_zips = build_utils.ParseGnList(
+ options.include_resources = action_helpers.parse_gn_list(
+ options.include_resources)
+ options.dependencies_res_zips = action_helpers.parse_gn_list(
options.dependencies_res_zips)
- options.extra_res_packages = build_utils.ParseGnList(
+ options.extra_res_packages = action_helpers.parse_gn_list(
options.extra_res_packages)
- options.locale_allowlist = build_utils.ParseGnList(options.locale_allowlist)
- options.shared_resources_allowlist_locales = build_utils.ParseGnList(
+ options.locale_allowlist = action_helpers.parse_gn_list(
+ options.locale_allowlist)
+ options.shared_resources_allowlist_locales = action_helpers.parse_gn_list(
options.shared_resources_allowlist_locales)
- options.resource_exclusion_exceptions = build_utils.ParseGnList(
+ options.resource_exclusion_exceptions = action_helpers.parse_gn_list(
options.resource_exclusion_exceptions)
- options.dependencies_res_zip_overlays = build_utils.ParseGnList(
+ options.dependencies_res_zip_overlays = action_helpers.parse_gn_list(
options.dependencies_res_zip_overlays)
- options.values_filter_rules = build_utils.ParseGnList(
+ options.values_filter_rules = action_helpers.parse_gn_list(
options.values_filter_rules)
if not options.arsc_path and not options.proto_path:
@@ -376,27 +356,10 @@ def _MoveImagesToNonMdpiFolders(res_root, path_info):
os.path.relpath(dst_file, res_root))
-def _FixManifest(options, temp_dir, extra_manifest=None):
- """Fix the APK's AndroidManifest.xml.
-
- This adds any missing namespaces for 'android' and 'tools', and
- sets certains elements like 'platformBuildVersionCode' or
- 'android:debuggable' depending on the content of |options|.
-
- Args:
- options: The command-line arguments tuple.
- temp_dir: A temporary directory where the fixed manifest will be written to.
- extra_manifest: Path to an AndroidManifest.xml file which will get merged
- into the application node of the base manifest.
- Returns:
- Tuple of:
- * Manifest path within |temp_dir|.
- * Original package_name.
- * Manifest package name.
- """
+def _DeterminePlatformVersion(aapt2_path, jar_candidates):
def maybe_extract_version(j):
try:
- return resource_utils.ExtractBinaryManifestValues(options.aapt2_path, j)
+ return resource_utils.ExtractBinaryManifestValues(aapt2_path, j)
except build_utils.CalledProcessError:
return None
@@ -407,62 +370,66 @@ def _FixManifest(options, temp_dir, extra_manifest=None):
return 'android-all' in jar_name and 'robolectric' in jar_name
android_sdk_jars = [
- j for j in options.include_resources if is_sdk_jar(os.path.basename(j))
+ j for j in jar_candidates if is_sdk_jar(os.path.basename(j))
]
extract_all = [maybe_extract_version(j) for j in android_sdk_jars]
- successful_extractions = [x for x in extract_all if x]
- if len(successful_extractions) == 0:
+ extract_all = [x for x in extract_all if x]
+ if len(extract_all) == 0:
raise Exception(
'Unable to find android SDK jar among candidates: %s'
% ', '.join(android_sdk_jars))
- if len(successful_extractions) > 1:
+ if len(extract_all) > 1:
raise Exception(
'Found multiple android SDK jars among candidates: %s'
% ', '.join(android_sdk_jars))
- version_code, version_name = successful_extractions.pop()[:2]
+ platform_version_code, platform_version_name = extract_all.pop()[:2]
+ return platform_version_code, platform_version_name
- debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
+
+def _FixManifest(options, temp_dir):
+ """Fix the APK's AndroidManifest.xml.
+
+ This adds any missing namespaces for 'android' and 'tools', and
+ sets certains elements like 'platformBuildVersionCode' or
+ 'android:debuggable' depending on the content of |options|.
+
+ Args:
+ options: The command-line arguments tuple.
+ temp_dir: A temporary directory where the fixed manifest will be written to.
+ Returns:
+ Tuple of:
+ * Manifest path within |temp_dir|.
+ * Original package_name.
+ * Manifest package name.
+ """
doc, manifest_node, app_node = manifest_utils.ParseManifest(
options.android_manifest)
- if extra_manifest:
- _, extra_manifest_node, extra_app_node = manifest_utils.ParseManifest(
- extra_manifest)
- for node in extra_app_node:
- app_node.append(node)
- for node in extra_manifest_node:
- # DFM manifests have a bunch of tags we don't care about inside
- # <manifest>, so only take <queries>.
- if node.tag == 'queries':
- manifest_node.append(node)
-
- manifest_utils.AssertUsesSdk(manifest_node, options.min_sdk_version,
- options.target_sdk_version)
- # We explicitly check that maxSdkVersion is set in the manifest since we don't
- # add it later like minSdkVersion and targetSdkVersion.
- manifest_utils.AssertUsesSdk(
- manifest_node,
- max_sdk_version=options.max_sdk_version,
- fail_if_not_exist=True)
- manifest_utils.AssertPackage(manifest_node, options.manifest_package)
-
- manifest_node.set('platformBuildVersionCode', version_code)
- manifest_node.set('platformBuildVersionName', version_name)
-
- orig_package = manifest_node.get('package')
- fixed_package = orig_package
- if options.arsc_package_name:
- manifest_node.set('package', options.arsc_package_name)
- fixed_package = options.arsc_package_name
-
+ # merge_manifest.py also sets package & <uses-sdk>. We may want to ensure
+ # manifest merger is always enabled and remove these command-line arguments.
+ manifest_utils.SetUsesSdk(manifest_node, options.target_sdk_version,
+ options.min_sdk_version, options.max_sdk_version)
+ orig_package = manifest_node.get('package') or options.manifest_package
+ fixed_package = (options.arsc_package_name or options.manifest_package
+ or orig_package)
+ manifest_node.set('package', fixed_package)
+
+ platform_version_code, platform_version_name = _DeterminePlatformVersion(
+ options.aapt2_path, options.include_resources)
+ manifest_node.set('platformBuildVersionCode', platform_version_code)
+ manifest_node.set('platformBuildVersionName', platform_version_name)
+ if options.version_code:
+ manifest_utils.NamespacedSet(manifest_node, 'versionCode',
+ options.version_code)
+ if options.version_name:
+ manifest_utils.NamespacedSet(manifest_node, 'versionName',
+ options.version_name)
if options.debuggable:
- app_node.set('{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, 'debuggable'),
- 'true')
+ manifest_utils.NamespacedSet(app_node, 'debuggable', 'true')
if options.uses_split:
uses_split = ElementTree.SubElement(manifest_node, 'uses-split')
- uses_split.set('{%s}name' % manifest_utils.ANDROID_NAMESPACE,
- options.uses_split)
+ manifest_utils.NamespacedSet(uses_split, 'name', options.uses_split)
# Make sure the min-sdk condition is not less than the min-sdk of the bundle.
for min_sdk_node in manifest_node.iter('{%s}min-sdk' %
@@ -471,6 +438,7 @@ def _FixManifest(options, temp_dir, extra_manifest=None):
if int(min_sdk_node.get(dist_value)) < int(options.min_sdk_version):
min_sdk_node.set(dist_value, options.min_sdk_version)
+ debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
manifest_utils.SaveManifest(doc, debug_manifest_path)
return debug_manifest_path, orig_package, fixed_package
@@ -782,21 +750,12 @@ def _PackageApk(options, build):
'link',
'--auto-add-overlay',
'--no-version-vectors',
- # Set SDK versions in case they are not set in the Android manifest.
- '--min-sdk-version',
- options.min_sdk_version,
- '--target-sdk-version',
- options.target_sdk_version,
'--output-text-symbols',
build.r_txt_path,
]
for j in options.include_resources:
link_command += ['-I', j]
- if options.version_code:
- link_command += ['--version-code', options.version_code]
- if options.version_name:
- link_command += ['--version-name', options.version_name]
if options.proguard_file:
link_command += ['--proguard', build.proguard_path]
link_command += ['--proguard-minimal-keep-rules']
@@ -954,10 +913,11 @@ def _WriteOutputs(options, build):
def _CreateNormalizedManifestForVerification(options):
with build_utils.TempDir() as tempdir:
- fixed_manifest, _, _ = _FixManifest(
- options, tempdir, extra_manifest=options.extra_verification_manifest)
+ fixed_manifest, _, _ = _FixManifest(options, tempdir)
with open(fixed_manifest) as f:
- return manifest_utils.NormalizeManifest(f.read())
+ return manifest_utils.NormalizeManifest(
+ f.read(), options.verification_version_code_offset,
+ options.verification_library_version_offset)
def main(args):
@@ -1039,7 +999,8 @@ def main(args):
rjava_build_options, options.srcjar_out,
custom_root_package_name,
grandparent_custom_package_name)
- build_utils.ZipDir(build.srcjar_path, build.srcjar_dir)
+ with action_helpers.atomic_output(build.srcjar_path) as f:
+ zip_helpers.zip_directory(f, build.srcjar_dir)
logging.debug('Copying outputs')
_WriteOutputs(options, build)
@@ -1049,7 +1010,8 @@ def main(args):
depfile_deps = (options.dependencies_res_zips +
options.dependencies_res_zip_overlays +
options.include_resources)
- build_utils.WriteDepfile(options.depfile, options.srcjar_out, depfile_deps)
+ action_helpers.write_depfile(options.depfile, options.srcjar_out,
+ depfile_deps)
if __name__ == '__main__':
diff --git a/build/android/gyp/compile_resources.pydeps b/build/android/gyp/compile_resources.pydeps
index 12c473b65..458a772c3 100644
--- a/build/android/gyp/compile_resources.pydeps
+++ b/build/android/gyp/compile_resources.pydeps
@@ -1,8 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -22,7 +22,9 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
compile_resources.py
proto/Configuration_pb2.py
proto/Resources_pb2.py
diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py
index c9a149d62..542a08ca1 100755
--- a/build/android/gyp/copy_ex.py
+++ b/build/android/gyp/copy_ex.py
@@ -6,7 +6,6 @@
"""Copies files to a directory."""
-from __future__ import print_function
import filecmp
import itertools
@@ -16,6 +15,7 @@ import shutil
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
def _get_all_files(base):
@@ -50,8 +50,9 @@ def CopyFile(f, dest, deps):
def DoCopy(options, deps):
"""Copy files or directories given in options.files and update deps."""
- files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f)
- for f in options.files))
+ files = list(
+ itertools.chain.from_iterable(
+ action_helpers.parse_gn_list(f) for f in options.files))
for f in files:
if os.path.isdir(f) and not options.clear:
@@ -62,13 +63,14 @@ def DoCopy(options, deps):
def DoRenaming(options, deps):
"""Copy and rename files given in options.renaming_sources and update deps."""
- src_files = list(itertools.chain.from_iterable(
- build_utils.ParseGnList(f)
- for f in options.renaming_sources))
+ src_files = list(
+ itertools.chain.from_iterable(
+ action_helpers.parse_gn_list(f) for f in options.renaming_sources))
- dest_files = list(itertools.chain.from_iterable(
- build_utils.ParseGnList(f)
- for f in options.renaming_destinations))
+ dest_files = list(
+ itertools.chain.from_iterable(
+ action_helpers.parse_gn_list(f)
+ for f in options.renaming_destinations))
if (len(src_files) != len(dest_files)):
print('Renaming source and destination files not match.')
@@ -85,7 +87,7 @@ def main(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_option('--dest', help='Directory to copy files to.')
parser.add_option('--files', action='append',
@@ -119,7 +121,7 @@ def main(args):
DoRenaming(options, deps)
if options.depfile:
- build_utils.WriteDepfile(options.depfile, options.stamp, deps)
+ action_helpers.write_depfile(options.depfile, options.stamp, deps)
if options.stamp:
build_utils.Touch(options.stamp)
diff --git a/build/android/gyp/copy_ex.pydeps b/build/android/gyp/copy_ex.pydeps
index 37352512b..5d75f9a39 100644
--- a/build/android/gyp/copy_ex.pydeps
+++ b/build/android/gyp/copy_ex.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py
+../../action_helpers.py
../../gn_helpers.py
copy_ex.py
util/__init__.py
diff --git a/build/android/gyp/create_app_bundle.py b/build/android/gyp/create_app_bundle.py
index ba6786107..b07a0df2c 100755
--- a/build/android/gyp/create_app_bundle.py
+++ b/build/android/gyp/create_app_bundle.py
@@ -11,20 +11,23 @@ import concurrent.futures
import json
import logging
import os
+import posixpath
import shutil
import sys
+from xml.etree import ElementTree
import zipfile
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
from pylib.utils import dexdump
+import bundletool
from util import build_utils
from util import manifest_utils
from util import resource_utils
-from xml.etree import ElementTree
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
-import bundletool
# Location of language-based assets in bundle modules.
_LOCALES_SUBDIR = 'assets/locales/'
@@ -122,30 +125,30 @@ def _ParseArgs(args):
help='Check if services are in base module if isolatedSplits is enabled.')
options = parser.parse_args(args)
- options.module_zips = build_utils.ParseGnList(options.module_zips)
- options.rtxt_in_paths = build_utils.ParseGnList(options.rtxt_in_paths)
- options.pathmap_in_paths = build_utils.ParseGnList(options.pathmap_in_paths)
+ options.module_zips = action_helpers.parse_gn_list(options.module_zips)
+ options.rtxt_in_paths = action_helpers.parse_gn_list(options.rtxt_in_paths)
+ options.pathmap_in_paths = action_helpers.parse_gn_list(
+ options.pathmap_in_paths)
if len(options.module_zips) == 0:
raise Exception('The module zip list cannot be empty.')
# Merge all uncompressed assets into a set.
uncompressed_list = []
- if options.uncompressed_assets:
- for l in options.uncompressed_assets:
- for entry in build_utils.ParseGnList(l):
- # Each entry has the following format: 'zipPath' or 'srcPath:zipPath'
- pos = entry.find(':')
- if pos >= 0:
- uncompressed_list.append(entry[pos + 1:])
- else:
- uncompressed_list.append(entry)
+ for entry in action_helpers.parse_gn_list(options.uncompressed_assets):
+ # Each entry has the following format: 'zipPath' or 'srcPath:zipPath'
+ pos = entry.find(':')
+ if pos >= 0:
+ uncompressed_list.append(entry[pos + 1:])
+ else:
+ uncompressed_list.append(entry)
options.uncompressed_assets = set(uncompressed_list)
# Check that all split dimensions are valid
if options.split_dimensions:
- options.split_dimensions = build_utils.ParseGnList(options.split_dimensions)
+ options.split_dimensions = action_helpers.parse_gn_list(
+ options.split_dimensions)
for dim in options.split_dimensions:
if dim.upper() not in _ALL_SPLIT_DIMENSIONS:
parser.error('Invalid split dimension "%s" (expected one of: %s)' % (
@@ -203,7 +206,9 @@ def _GenerateBundleConfigJson(uncompressed_assets, compress_dex,
uncompressed_globs = [
'assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak'
]
- uncompressed_globs.extend('assets/' + x for x in uncompressed_assets)
+ # normpath to allow for ../ prefix.
+ uncompressed_globs.extend(
+ posixpath.normpath('assets/' + x) for x in uncompressed_assets)
# NOTE: Use '**' instead of '*' to work through directories!
uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS)
if not compress_dex:
@@ -316,11 +321,10 @@ def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions):
if src_path in language_files:
dst_path = _RewriteLanguageAssetPath(src_path)
- build_utils.AddToZipHermetic(
- dst_zip,
- dst_path,
- data=src_zip.read(src_path),
- compress=is_compressed)
+ zip_helpers.add_to_zip_hermetic(dst_zip,
+ dst_path,
+ data=src_zip.read(src_path),
+ compress=is_compressed)
return tmp_zip
@@ -531,7 +535,7 @@ def main(args):
f.write(bundle_config)
logging.info('Running bundletool')
- cmd_args = build_utils.JavaCmd(options.warnings_as_errors) + [
+ cmd_args = build_utils.JavaCmd() + [
'-jar',
bundletool.BUNDLETOOL_JAR_PATH,
'build-bundle',
diff --git a/build/android/gyp/create_app_bundle.pydeps b/build/android/gyp/create_app_bundle.pydeps
index 6a5b13e5b..5e7a79f63 100644
--- a/build/android/gyp/create_app_bundle.pydeps
+++ b/build/android/gyp/create_app_bundle.pydeps
@@ -13,8 +13,8 @@
../../../third_party/catapult/devil/devil/utils/__init__.py
../../../third_party/catapult/devil/devil/utils/cmd_helper.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -34,7 +34,9 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
../pylib/__init__.py
../pylib/constants/__init__.py
../pylib/utils/__init__.py
diff --git a/build/android/gyp/create_app_bundle_apks.pydeps b/build/android/gyp/create_app_bundle_apks.pydeps
index bcbc5ff62..65810c3eb 100644
--- a/build/android/gyp/create_app_bundle_apks.pydeps
+++ b/build/android/gyp/create_app_bundle_apks.pydeps
@@ -1,8 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_apks.pydeps build/android/gyp/create_app_bundle_apks.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -22,6 +22,7 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
../../print_python_deps.py
../pylib/__init__.py
diff --git a/build/android/gyp/create_bundle_wrapper_script.py b/build/android/gyp/create_bundle_wrapper_script.py
index fcd9b7ea6..a3870bf89 100755
--- a/build/android/gyp/create_bundle_wrapper_script.py
+++ b/build/android/gyp/create_bundle_wrapper_script.py
@@ -11,6 +11,7 @@ import string
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
SCRIPT_TEMPLATE = string.Template("""\
#!/usr/bin/env python3
@@ -109,7 +110,7 @@ def main(args):
'TARGET_CPU':
repr(args.target_cpu),
'SYSTEM_IMAGE_LOCALES':
- repr(build_utils.ParseGnList(args.system_image_locales)),
+ repr(action_helpers.parse_gn_list(args.system_image_locales)),
'DEFAULT_MODULES':
repr(args.default_modules),
}
diff --git a/build/android/gyp/create_bundle_wrapper_script.pydeps b/build/android/gyp/create_bundle_wrapper_script.pydeps
index 7758ed627..51d912c83 100644
--- a/build/android/gyp/create_bundle_wrapper_script.pydeps
+++ b/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py
+../../action_helpers.py
../../gn_helpers.py
create_bundle_wrapper_script.py
util/__init__.py
diff --git a/build/android/gyp/create_java_binary_script.py b/build/android/gyp/create_java_binary_script.py
index 909c008c3..f9e665f4e 100755
--- a/build/android/gyp/create_java_binary_script.py
+++ b/build/android/gyp/create_java_binary_script.py
@@ -15,6 +15,7 @@ import os
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
# The java command must be executed in the current directory because there may
# be user-supplied paths in the args. The script receives the classpath relative
@@ -91,12 +92,12 @@ def main(argv):
action='append',
default=[],
help='Classpath for running the jar.')
- parser.add_argument('--noverify',
- action='store_true',
- help='JVM flag: noverify.')
parser.add_argument('--tiered-stop-at-level-one',
action='store_true',
help='JVM flag: -XX:TieredStopAtLevel=1.')
+ parser.add_argument('--use-jdk-11',
+ action='store_true',
+ help='Use older JDK11 instead of modern JDK.')
parser.add_argument('extra_program_args',
nargs='*',
help='This captures all '
@@ -105,21 +106,23 @@ def main(argv):
args = parser.parse_args(argv)
extra_flags = [f'java_cmd.append("-Xmx{args.max_heap_size}")']
- if args.noverify:
- extra_flags.append('java_cmd.append("-noverify")')
if args.tiered_stop_at_level_one:
extra_flags.append('java_cmd.append("-XX:TieredStopAtLevel=1")')
classpath = []
for cp_arg in args.classpath:
- classpath += build_utils.ParseGnList(cp_arg)
+ classpath += action_helpers.parse_gn_list(cp_arg)
run_dir = os.path.dirname(args.output)
classpath = [os.path.relpath(p, run_dir) for p in classpath]
- java_path = os.path.relpath(
- os.path.join(build_utils.JAVA_HOME, 'bin', 'java'), run_dir)
- with build_utils.AtomicOutput(args.output, mode='w') as script:
+ if args.use_jdk_11:
+ java_home = build_utils.JAVA_11_HOME_DEPRECATED
+ else:
+ java_home = build_utils.JAVA_HOME
+ java_path = os.path.relpath(os.path.join(java_home, 'bin', 'java'), run_dir)
+
+ with action_helpers.atomic_output(args.output, mode='w') as script:
script.write(
script_template.format(classpath=('"%s"' % '", "'.join(classpath)),
java_path=repr(java_path),
diff --git a/build/android/gyp/create_java_binary_script.pydeps b/build/android/gyp/create_java_binary_script.pydeps
index 6bc21fa7e..a0a740dec 100644
--- a/build/android/gyp/create_java_binary_script.pydeps
+++ b/build/android/gyp/create_java_binary_script.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py
+../../action_helpers.py
../../gn_helpers.py
create_java_binary_script.py
util/__init__.py
diff --git a/build/android/gyp/create_r_java.py b/build/android/gyp/create_r_java.py
index 45d99b063..b662a3969 100755
--- a/build/android/gyp/create_r_java.py
+++ b/build/android/gyp/create_r_java.py
@@ -9,6 +9,8 @@ import sys
from util import build_utils
from util import resource_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
def _ConcatRTxts(rtxt_in_paths, combined_out_path):
@@ -34,12 +36,13 @@ def _CreateRJava(rtxts, package_name, srcjar_out):
rjava_build_options=rjava_build_options,
srcjar_out=srcjar_out,
ignore_mismatched_values=True)
- build_utils.ZipDir(srcjar_out, build.srcjar_dir)
+ with action_helpers.atomic_output(srcjar_out) as f:
+ zip_helpers.zip_directory(f, build.srcjar_dir)
def main(args):
parser = argparse.ArgumentParser(description='Create an R.java srcjar.')
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--srcjar-out',
required=True,
help='Path to output srcjar.')
@@ -50,12 +53,12 @@ def main(args):
required=True,
help='R.java package to use.')
options = parser.parse_args(build_utils.ExpandFileArgs(args))
- options.deps_rtxts = build_utils.ParseGnList(options.deps_rtxts)
+ options.deps_rtxts = action_helpers.parse_gn_list(options.deps_rtxts)
_CreateRJava(options.deps_rtxts, options.r_package, options.srcjar_out)
- build_utils.WriteDepfile(options.depfile,
- options.srcjar_out,
- inputs=options.deps_rtxts)
+ action_helpers.write_depfile(options.depfile,
+ options.srcjar_out,
+ inputs=options.deps_rtxts)
if __name__ == "__main__":
diff --git a/build/android/gyp/create_r_java.pydeps b/build/android/gyp/create_r_java.pydeps
index 76de786fd..20fd1f8bd 100644
--- a/build/android/gyp/create_r_java.pydeps
+++ b/build/android/gyp/create_r_java.pydeps
@@ -1,8 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_java.pydeps build/android/gyp/create_r_java.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -22,7 +22,9 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
create_r_java.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/create_r_txt.pydeps b/build/android/gyp/create_r_txt.pydeps
index 122915127..65378f038 100644
--- a/build/android/gyp/create_r_txt.pydeps
+++ b/build/android/gyp/create_r_txt.pydeps
@@ -1,8 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_txt.pydeps build/android/gyp/create_r_txt.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -22,6 +22,7 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
create_r_txt.py
util/__init__.py
diff --git a/build/android/gyp/create_size_info_files.py b/build/android/gyp/create_size_info_files.py
index aa8a48a79..24fcf8dc8 100755
--- a/build/android/gyp/create_size_info_files.py
+++ b/build/android/gyp/create_size_info_files.py
@@ -15,6 +15,7 @@ import zipfile
from util import build_utils
from util import jar_info_utils
+import action_helpers # build_utils adds //build to sys.path.
_AAR_VERSION_PATTERN = re.compile(r'/[^/]*?(\.aar/|\.jar/)')
@@ -40,9 +41,7 @@ def _TransformAarPaths(path):
def _MergeResInfoFiles(res_info_path, info_paths):
# Concatenate them all.
- # only_if_changed=False since no build rules depend on this as an input.
- with build_utils.AtomicOutput(res_info_path, only_if_changed=False,
- mode='w+') as dst:
+ with action_helpers.atomic_output(res_info_path, 'w+') as dst:
for p in info_paths:
with open(p) as src:
dst.writelines(_TransformAarPaths(l) for l in src)
@@ -58,8 +57,9 @@ def _MergePakInfoFiles(merged_path, pak_infos):
with open(pak_info_path, 'r') as src_info_file:
info_lines.update(_TransformAarPaths(x) for x in src_info_file)
# only_if_changed=False since no build rules depend on this as an input.
- with build_utils.AtomicOutput(merged_path, only_if_changed=False,
- mode='w+') as f:
+ with action_helpers.atomic_output(merged_path,
+ only_if_changed=False,
+ mode='w+') as f:
f.writelines(sorted(info_lines))
@@ -121,7 +121,7 @@ def _MergeJarInfoFiles(output, inputs):
attributed_path, name))
# only_if_changed=False since no build rules depend on this as an input.
- with build_utils.AtomicOutput(output, only_if_changed=False) as f:
+ with action_helpers.atomic_output(output, only_if_changed=False) as f:
jar_info_utils.WriteJarInfoFile(f, info_data)
@@ -139,7 +139,7 @@ def _FindJarInputs(jar_paths):
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = argparse.ArgumentParser(description=__doc__)
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument(
'--jar-info-path', required=True, help='Output .jar.info file')
parser.add_argument(
@@ -170,9 +170,9 @@ def main(args):
options = parser.parse_args(args)
- options.jar_files = build_utils.ParseGnList(options.jar_files)
- options.assets = build_utils.ParseGnList(options.assets)
- options.uncompressed_assets = build_utils.ParseGnList(
+ options.jar_files = action_helpers.parse_gn_list(options.jar_files)
+ options.assets = action_helpers.parse_gn_list(options.assets)
+ options.uncompressed_assets = action_helpers.parse_gn_list(
options.uncompressed_assets)
jar_inputs = _FindJarInputs(_RemoveDuplicatesFromList(options.jar_files))
@@ -186,9 +186,9 @@ def main(args):
_MergeResInfoFiles(options.res_info_path, res_inputs)
all_inputs = jar_inputs + pak_inputs + res_inputs
- build_utils.WriteDepfile(options.depfile,
- options.jar_info_path,
- inputs=all_inputs)
+ action_helpers.write_depfile(options.depfile,
+ options.jar_info_path,
+ inputs=all_inputs)
if __name__ == '__main__':
diff --git a/build/android/gyp/create_size_info_files.pydeps b/build/android/gyp/create_size_info_files.pydeps
index 1a69c553d..0dd61cbb3 100644
--- a/build/android/gyp/create_size_info_files.pydeps
+++ b/build/android/gyp/create_size_info_files.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py
+../../action_helpers.py
../../gn_helpers.py
create_size_info_files.py
util/__init__.py
diff --git a/build/android/gyp/create_ui_locale_resources.py b/build/android/gyp/create_ui_locale_resources.py
index f63f49de4..c767bc501 100755
--- a/build/android/gyp/create_ui_locale_resources.py
+++ b/build/android/gyp/create_ui_locale_resources.py
@@ -27,6 +27,9 @@ sys.path.insert(
from util import build_utils
from util import resource_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
+
# A small string template for the content of each strings.xml file.
# NOTE: The name is chosen to avoid any conflicts with other string defined
@@ -52,8 +55,10 @@ def _AddLocaleResourceFileToZip(out_zip, android_locale, locale):
zip_path = 'values-%s/strings.xml' % android_locale
else:
zip_path = 'values/strings.xml'
- build_utils.AddToZipHermetic(
- out_zip, zip_path, data=locale_data, compress=False)
+ zip_helpers.add_to_zip_hermetic(out_zip,
+ zip_path,
+ data=locale_data,
+ compress=False)
def main():
@@ -69,11 +74,11 @@ def main():
args = parser.parse_args()
- locale_list = build_utils.ParseGnList(args.locale_list)
+ locale_list = action_helpers.parse_gn_list(args.locale_list)
if not locale_list:
raise Exception('Locale list cannot be empty!')
- with build_utils.AtomicOutput(args.output_zip) as tmp_file:
+ with action_helpers.atomic_output(args.output_zip) as tmp_file:
with zipfile.ZipFile(tmp_file, 'w') as out_zip:
# First, write the default value, since aapt requires one.
_AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE)
diff --git a/build/android/gyp/create_ui_locale_resources.pydeps b/build/android/gyp/create_ui_locale_resources.pydeps
index 2e6f20f47..5cffc7906 100644
--- a/build/android/gyp/create_ui_locale_resources.pydeps
+++ b/build/android/gyp/create_ui_locale_resources.pydeps
@@ -1,8 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -22,7 +22,9 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
create_ui_locale_resources.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/create_unwind_table.py b/build/android/gyp/create_unwind_table.py
index 3f835db14..83cd73d65 100755
--- a/build/android/gyp/create_unwind_table.py
+++ b/build/android/gyp/create_unwind_table.py
@@ -1027,6 +1027,12 @@ def ReadTextSectionStartAddress(readobj_path: str, libchrome_path: str) -> int:
Returns:
The text section start address as a number.
"""
+ def GetSectionName(section) -> str:
+ # See crbug.com/1426287 for context on different JSON names.
+ if 'Name' in section['Section']['Name']:
+ return section['Section']['Name']['Name']
+ return section['Section']['Name']['Value']
+
proc = subprocess.Popen(
[readobj_path, '--sections', '--elf-output-style=JSON', libchrome_path],
stdout=subprocess.PIPE,
@@ -1036,7 +1042,7 @@ def ReadTextSectionStartAddress(readobj_path: str, libchrome_path: str) -> int:
sections = elfs['Sections']
return next(s['Section']['Address'] for s in sections
- if s['Section']['Name']['Value'] == '.text')
+ if GetSectionName(s) == '.text')
def main():
diff --git a/build/android/gyp/dex.py b/build/android/gyp/dex.py
index 0b2577090..a7f024a27 100755
--- a/build/android/gyp/dex.py
+++ b/build/android/gyp/dex.py
@@ -17,12 +17,19 @@ import zipfile
from util import build_utils
from util import md5_check
-from util import zipalign
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
_DEX_XMX = '2G' # Increase this when __final_dex OOMs.
_IGNORE_WARNINGS = (
+ # Warning: Running R8 version main (build engineering), which cannot be
+ # represented as a semantic version. Using an artificial version newer than
+ # any known version for selecting Proguard configurations embedded under
+ # META-INF/. This means that all rules with a '-upto-' qualifier will be
+ # excluded and all rules with a -from- qualifier will be included.
+ r'Running R8 version main',
# E.g. Triggers for weblayer_instrumentation_test_apk since both it and its
# apk_under_test have no shared_libraries.
# https://crbug.com/1364192 << To fix this in a better way.
@@ -49,6 +56,8 @@ _IGNORE_WARNINGS = (
r'Proguard configuration rule does not match anything:.*class android\.',
# TODO(crbug.com/1303951): Don't ignore all such warnings.
r'Proguard configuration rule does not match anything:',
+ # TODO(agrieve): Remove once we update to U SDK.
+ r'OnBackAnimationCallback',
)
_SKIPPED_CLASS_FILE_NAMES = (
@@ -60,7 +69,7 @@ def _ParseArgs(args):
args = build_utils.ExpandFileArgs(args)
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--output', required=True, help='Dex output path.')
parser.add_argument(
'--class-inputs',
@@ -141,13 +150,13 @@ def _ParseArgs(args):
parser.error('Cannot use both --force-enable-assertions and '
'--assertion-handler')
- options.class_inputs = build_utils.ParseGnList(options.class_inputs)
- options.class_inputs_filearg = build_utils.ParseGnList(
+ options.class_inputs = action_helpers.parse_gn_list(options.class_inputs)
+ options.class_inputs_filearg = action_helpers.parse_gn_list(
options.class_inputs_filearg)
- options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
- options.classpath = build_utils.ParseGnList(options.classpath)
- options.dex_inputs = build_utils.ParseGnList(options.dex_inputs)
- options.dex_inputs_filearg = build_utils.ParseGnList(
+ options.bootclasspath = action_helpers.parse_gn_list(options.bootclasspath)
+ options.classpath = action_helpers.parse_gn_list(options.classpath)
+ options.dex_inputs = action_helpers.parse_gn_list(options.dex_inputs)
+ options.dex_inputs_filearg = action_helpers.parse_gn_list(
options.dex_inputs_filearg)
return options
@@ -159,7 +168,7 @@ def CreateStderrFilter(show_desugar_default_interface_warnings):
if os.environ.get('R8_SHOW_ALL_OUTPUT', '0') != '0':
return output
- warnings = re.split(r'^(?=Warning)', output, flags=re.MULTILINE)
+ warnings = re.split(r'^(?=Warning|Error)', output, flags=re.MULTILINE)
preamble, *warnings = warnings
patterns = list(_IGNORE_WARNINGS)
@@ -227,7 +236,7 @@ def _ZipAligned(dex_files, output_path):
with zipfile.ZipFile(output_path, 'w') as z:
for i, dex_file in enumerate(dex_files):
name = 'classes{}.dex'.format(i + 1 if i > 0 else '')
- zipalign.AddToZipHermetic(z, name, src_path=dex_file, alignment=4)
+ zip_helpers.add_to_zip_hermetic(z, name, src_path=dex_file, alignment=4)
def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
@@ -422,7 +431,7 @@ def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar,
min_api):
- dex_cmd = build_utils.JavaCmd(verify=False, xmx=_DEX_XMX) + [
+ dex_cmd = build_utils.JavaCmd(xmx=_DEX_XMX) + [
'-cp',
r8_jar_path,
'com.android.tools.r8.D8',
@@ -460,7 +469,7 @@ def main(args):
final_dex_inputs = list(options.class_inputs)
final_dex_inputs += options.dex_inputs
- dex_cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx=_DEX_XMX)
+ dex_cmd = build_utils.JavaCmd(xmx=_DEX_XMX)
if options.dump_inputs:
dex_cmd += ['-Dcom.android.tools.r8.dumpinputtofile=d8inputs.zip']
diff --git a/build/android/gyp/dex.pydeps b/build/android/gyp/dex.pydeps
index f41626430..d920e2461 100644
--- a/build/android/gyp/dex.pydeps
+++ b/build/android/gyp/dex.pydeps
@@ -1,9 +1,10 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
+../../action_helpers.py
../../gn_helpers.py
../../print_python_deps.py
+../../zip_helpers.py
dex.py
util/__init__.py
util/build_utils.py
util/md5_check.py
-util/zipalign.py
diff --git a/build/android/gyp/dist_aar.py b/build/android/gyp/dist_aar.py
index 72758f50a..507d0c3d8 100755
--- a/build/android/gyp/dist_aar.py
+++ b/build/android/gyp/dist_aar.py
@@ -16,6 +16,8 @@ import zipfile
import filter_zip
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__))
@@ -65,7 +67,7 @@ def _AddResources(aar_zip, resource_zips, include_globs):
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--output', required=True, help='Path to output aar.')
parser.add_argument('--jars', required=True, help='GN list of jar inputs.')
parser.add_argument('--dependencies-res-zips', required=True,
@@ -98,52 +100,60 @@ def main(args):
if options.native_libraries and not options.abi:
parser.error('You must provide --abi if you have native libs')
- options.jars = build_utils.ParseGnList(options.jars)
- options.dependencies_res_zips = build_utils.ParseGnList(
+ options.jars = action_helpers.parse_gn_list(options.jars)
+ options.dependencies_res_zips = action_helpers.parse_gn_list(
options.dependencies_res_zips)
- options.r_text_files = build_utils.ParseGnList(options.r_text_files)
- options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
- options.native_libraries = build_utils.ParseGnList(options.native_libraries)
- options.jar_excluded_globs = build_utils.ParseGnList(
+ options.r_text_files = action_helpers.parse_gn_list(options.r_text_files)
+ options.proguard_configs = action_helpers.parse_gn_list(
+ options.proguard_configs)
+ options.native_libraries = action_helpers.parse_gn_list(
+ options.native_libraries)
+ options.jar_excluded_globs = action_helpers.parse_gn_list(
options.jar_excluded_globs)
- options.jar_included_globs = build_utils.ParseGnList(
+ options.jar_included_globs = action_helpers.parse_gn_list(
options.jar_included_globs)
- options.resource_included_globs = build_utils.ParseGnList(
+ options.resource_included_globs = action_helpers.parse_gn_list(
options.resource_included_globs)
with tempfile.NamedTemporaryFile(delete=False) as staging_file:
try:
with zipfile.ZipFile(staging_file.name, 'w') as z:
- build_utils.AddToZipHermetic(
- z, 'AndroidManifest.xml', src_path=options.android_manifest)
+ zip_helpers.add_to_zip_hermetic(z,
+ 'AndroidManifest.xml',
+ src_path=options.android_manifest)
path_transform = filter_zip.CreatePathTransform(
options.jar_excluded_globs, options.jar_included_globs)
with tempfile.NamedTemporaryFile() as jar_file:
- build_utils.MergeZips(
- jar_file.name, options.jars, path_transform=path_transform)
- build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name)
-
- build_utils.AddToZipHermetic(
- z,
- 'R.txt',
- data=_MergeRTxt(options.r_text_files,
- options.resource_included_globs))
- build_utils.AddToZipHermetic(z, 'public.txt', data='')
+ zip_helpers.merge_zips(jar_file.name,
+ options.jars,
+ path_transform=path_transform)
+ zip_helpers.add_to_zip_hermetic(z,
+ 'classes.jar',
+ src_path=jar_file.name)
+
+ zip_helpers.add_to_zip_hermetic(z,
+ 'R.txt',
+ data=_MergeRTxt(
+ options.r_text_files,
+ options.resource_included_globs))
+ zip_helpers.add_to_zip_hermetic(z, 'public.txt', data='')
if options.proguard_configs:
- build_utils.AddToZipHermetic(
- z, 'proguard.txt',
- data=_MergeProguardConfigs(options.proguard_configs))
+ zip_helpers.add_to_zip_hermetic(z,
+ 'proguard.txt',
+ data=_MergeProguardConfigs(
+ options.proguard_configs))
_AddResources(z, options.dependencies_res_zips,
options.resource_included_globs)
for native_library in options.native_libraries:
libname = os.path.basename(native_library)
- build_utils.AddToZipHermetic(
- z, os.path.join('jni', options.abi, libname),
- src_path=native_library)
+ zip_helpers.add_to_zip_hermetic(z,
+ os.path.join('jni', options.abi,
+ libname),
+ src_path=native_library)
except:
os.unlink(staging_file.name)
raise
@@ -152,7 +162,7 @@ def main(args):
if options.depfile:
all_inputs = (options.jars + options.dependencies_res_zips +
options.r_text_files + options.proguard_configs)
- build_utils.WriteDepfile(options.depfile, options.output, all_inputs)
+ action_helpers.write_depfile(options.depfile, options.output, all_inputs)
if __name__ == '__main__':
diff --git a/build/android/gyp/dist_aar.pydeps b/build/android/gyp/dist_aar.pydeps
index 3182580af..ba0dd5259 100644
--- a/build/android/gyp/dist_aar.pydeps
+++ b/build/android/gyp/dist_aar.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
dist_aar.py
filter_zip.py
util/__init__.py
diff --git a/build/android/gyp/filter_zip.py b/build/android/gyp/filter_zip.py
index 5f1181815..0382651bf 100755
--- a/build/android/gyp/filter_zip.py
+++ b/build/android/gyp/filter_zip.py
@@ -9,6 +9,8 @@ import shutil
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
def CreatePathTransform(exclude_globs, include_globs):
@@ -50,13 +52,14 @@ def main():
argv = build_utils.ExpandFileArgs(sys.argv[1:])
args = parser.parse_args(argv)
- args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
- args.include_globs = build_utils.ParseGnList(args.include_globs)
+ args.exclude_globs = action_helpers.parse_gn_list(args.exclude_globs)
+ args.include_globs = action_helpers.parse_gn_list(args.include_globs)
path_transform = CreatePathTransform(args.exclude_globs, args.include_globs)
- with build_utils.AtomicOutput(args.output) as f:
+ with action_helpers.atomic_output(args.output) as f:
if path_transform:
- build_utils.MergeZips(f.name, [args.input], path_transform=path_transform)
+ zip_helpers.merge_zips(f.name, [args.input],
+ path_transform=path_transform)
else:
shutil.copy(args.input, f.name)
diff --git a/build/android/gyp/filter_zip.pydeps b/build/android/gyp/filter_zip.pydeps
index f561e05c4..4905fd5d7 100644
--- a/build/android/gyp/filter_zip.pydeps
+++ b/build/android/gyp/filter_zip.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
filter_zip.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/finalize_apk.py b/build/android/gyp/finalize_apk.py
index f7581c7e9..aaf66c2af 100644
--- a/build/android/gyp/finalize_apk.py
+++ b/build/android/gyp/finalize_apk.py
@@ -38,7 +38,7 @@ def FinalizeApk(apksigner_path,
else:
signer_input_path = unsigned_apk_path
- sign_cmd = build_utils.JavaCmd(warnings_as_errors) + [
+ sign_cmd = build_utils.JavaCmd() + [
'-jar',
apksigner_path,
'sign',
diff --git a/build/android/gyp/find.py b/build/android/gyp/find.py
index df0206f82..617efef3f 100755
--- a/build/android/gyp/find.py
+++ b/build/android/gyp/find.py
@@ -7,7 +7,6 @@
"""Finds files in directories.
"""
-from __future__ import print_function
import fnmatch
import optparse
diff --git a/build/android/gyp/flatc_java.py b/build/android/gyp/flatc_java.py
index fc0d4cd42..003f8201f 100755
--- a/build/android/gyp/flatc_java.py
+++ b/build/android/gyp/flatc_java.py
@@ -11,6 +11,8 @@ import argparse
import sys
from util import build_utils
+import action_helpers
+import zip_helpers
def main(argv):
@@ -32,7 +34,8 @@ def main(argv):
build_utils.CheckOutput([options.flatc, '-j', '-o', temp_dir] +
import_args + options.flatbuffers)
- build_utils.ZipDir(options.srcjar, temp_dir)
+ with action_helpers.atomic_output(options.srcjar) as f:
+ zip_helpers.zip_directory(f, temp_dir)
if __name__ == '__main__':
diff --git a/build/android/gyp/flatc_java.pydeps b/build/android/gyp/flatc_java.pydeps
index 960e34fc3..8c0c4f01f 100644
--- a/build/android/gyp/flatc_java.pydeps
+++ b/build/android/gyp/flatc_java.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/flatc_java.pydeps build/android/gyp/flatc_java.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
flatc_java.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py
index 384f944ca..2e5b3b3b5 100755
--- a/build/android/gyp/gcc_preprocess.py
+++ b/build/android/gyp/gcc_preprocess.py
@@ -12,6 +12,8 @@ import sys
import zipfile
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
def _ParsePackageName(data):
@@ -32,8 +34,8 @@ def main(args):
parser.add_argument('templates', nargs='+', help='Template files.')
options = parser.parse_args(args)
- options.defines = build_utils.ParseGnList(options.defines)
- options.include_dirs = build_utils.ParseGnList(options.include_dirs)
+ options.defines = action_helpers.parse_gn_list(options.defines)
+ options.include_dirs = action_helpers.parse_gn_list(options.include_dirs)
gcc_cmd = [
'gcc',
@@ -46,7 +48,7 @@ def main(args):
gcc_cmd.extend('-D' + x for x in options.defines)
gcc_cmd.extend('-I' + x for x in options.include_dirs)
- with build_utils.AtomicOutput(options.output) as f:
+ with action_helpers.atomic_output(options.output) as f:
with zipfile.ZipFile(f, 'w') as z:
for template in options.templates:
data = build_utils.CheckOutput(gcc_cmd + [template])
@@ -56,7 +58,7 @@ def main(args):
zip_path = posixpath.join(
package_name.replace('.', '/'),
os.path.splitext(os.path.basename(template))[0]) + '.java'
- build_utils.AddToZipHermetic(z, zip_path, data=data)
+ zip_helpers.add_to_zip_hermetic(z, zip_path, data=data)
if __name__ == '__main__':
diff --git a/build/android/gyp/gcc_preprocess.pydeps b/build/android/gyp/gcc_preprocess.pydeps
index 39e56f700..b57d40042 100644
--- a/build/android/gyp/gcc_preprocess.pydeps
+++ b/build/android/gyp/gcc_preprocess.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
gcc_preprocess.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/generate_android_wrapper.py b/build/android/gyp/generate_android_wrapper.py
index 187ac525e..46c7afeab 100755
--- a/build/android/gyp/generate_android_wrapper.py
+++ b/build/android/gyp/generate_android_wrapper.py
@@ -8,6 +8,7 @@ import re
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
sys.path.append(
os.path.abspath(
@@ -23,7 +24,7 @@ def ExpandWrappedPathLists(args):
for arg in args:
m = _WRAPPED_PATH_LIST_RE.match(arg)
if m:
- for p in build_utils.ParseGnList(m.group(2)):
+ for p in action_helpers.parse_gn_list(m.group(2)):
expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p])
else:
expanded_args.append(arg)
diff --git a/build/android/gyp/generate_linker_version_script.py b/build/android/gyp/generate_linker_version_script.py
index dd5d604a3..456ddd1f6 100755
--- a/build/android/gyp/generate_linker_version_script.py
+++ b/build/android/gyp/generate_linker_version_script.py
@@ -8,6 +8,7 @@ import argparse
import os
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
_SCRIPT_HEADER = """\
# AUTO-GENERATED FILE. DO NOT MODIFY.
@@ -35,6 +36,10 @@ def main():
'--export-java-symbols',
action='store_true',
help='Export Java_* JNI methods')
+ parser.add_argument(
+ '--jni-multiplexing',
+ action='store_true',
+ help='Export only the JNI methods generated by multiplexing')
parser.add_argument('--export-fortesting-java-symbols',
action='store_true',
help='Export Java_*_ForTesting JNI methods')
@@ -61,7 +66,9 @@ def main():
symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad']
if options.export_java_symbols:
- if options.export_fortesting_java_symbols:
+ if options.jni_multiplexing:
+ symbol_list.append('Java_*_resolve_1for_*')
+ elif options.export_fortesting_java_symbols:
symbol_list.append('Java_*')
else:
# The linker uses unix shell globbing patterns, not regex. So, we have to
@@ -102,7 +109,7 @@ def main():
script = ''.join(script_content)
- with build_utils.AtomicOutput(options.output, mode='w') as f:
+ with action_helpers.atomic_output(options.output, mode='w') as f:
f.write(script)
diff --git a/build/android/gyp/generate_linker_version_script.pydeps b/build/android/gyp/generate_linker_version_script.pydeps
index de9fa56a9..03ac25d5c 100644
--- a/build/android/gyp/generate_linker_version_script.pydeps
+++ b/build/android/gyp/generate_linker_version_script.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py
+../../action_helpers.py
../../gn_helpers.py
generate_linker_version_script.py
util/__init__.py
diff --git a/build/android/gyp/ijar.py b/build/android/gyp/ijar.py
index c2da8b60b..ec12cecaa 100755
--- a/build/android/gyp/ijar.py
+++ b/build/android/gyp/ijar.py
@@ -10,6 +10,7 @@ import subprocess
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
# python -c "import zipfile; zipfile.ZipFile('test.jar', 'w')"
@@ -23,7 +24,7 @@ def main():
if len(sys.argv) != 4:
raise ValueError('unexpected arguments were given. %s' % sys.argv)
ijar_bin, in_jar, out_jar = sys.argv[1], sys.argv[2], sys.argv[3]
- with build_utils.AtomicOutput(out_jar) as f:
+ with action_helpers.atomic_output(out_jar) as f:
# ijar fails on empty jars: https://github.com/bazelbuild/bazel/issues/10162
if os.path.getsize(in_jar) <= _EMPTY_JAR_SIZE:
with open(in_jar, 'rb') as in_f:
diff --git a/build/android/gyp/ijar.pydeps b/build/android/gyp/ijar.pydeps
index e9ecb6636..530aabe8e 100644
--- a/build/android/gyp/ijar.pydeps
+++ b/build/android/gyp/ijar.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py
+../../action_helpers.py
../../gn_helpers.py
ijar.py
util/__init__.py
diff --git a/build/android/gyp/jacoco_instr.py b/build/android/gyp/jacoco_instr.py
index 88a604762..f32d6e87d 100755
--- a/build/android/gyp/jacoco_instr.py
+++ b/build/android/gyp/jacoco_instr.py
@@ -3,7 +3,6 @@
# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Instruments classes and jar files.
This script corresponds to the 'jacoco_instr' action in the Java build process.
@@ -13,17 +12,20 @@ jacococli.jar.
"""
-from __future__ import print_function
-
import argparse
import json
import os
import shutil
import sys
-import tempfile
import zipfile
from util import build_utils
+import action_helpers
+import zip_helpers
+
+
+# This should be same as recipe side token. See bit.ly/3STSPcE.
+INSTRUMENT_ALL_JACOCO_OVERRIDE_TOKEN = 'INSTRUMENT_ALL_JACOCO'
def _AddArguments(parser):
@@ -49,9 +51,9 @@ def _AddArguments(parser):
help='File to create with the list of source directories '
'and input path.')
parser.add_argument(
- '--java-sources-file',
+ '--target-sources-file',
required=True,
- help='File containing newline-separated .java paths')
+ help='File containing newline-separated .java and .kt paths')
parser.add_argument(
'--jacococli-jar', required=True, help='Path to jacococli.jar.')
parser.add_argument(
@@ -135,7 +137,8 @@ def _GetAffectedClasses(jar_file, source_files):
if index == -1:
index = member.find('.class')
for source_file in source_files:
- if source_file.endswith(member[:index] + '.java'):
+ if source_file.endswith(
+ (member[:index] + '.java', member[:index] + '.kt')):
affected_classes.append(member)
is_affected = True
break
@@ -182,7 +185,8 @@ def _InstrumentClassFiles(instrument_cmd,
f.extractall(instrumented_dir, unaffected_members)
# Zip all files to output_path
- build_utils.ZipDir(output_path, instrumented_dir)
+ with action_helpers.atomic_output(output_path) as f:
+ zip_helpers.zip_directory(f, instrumented_dir)
def _RunInstrumentCommand(parser):
@@ -197,8 +201,8 @@ def _RunInstrumentCommand(parser):
args = parser.parse_args()
source_files = []
- if args.java_sources_file:
- source_files.extend(build_utils.ReadSourcesList(args.java_sources_file))
+ if args.target_sources_file:
+ source_files.extend(build_utils.ReadSourcesList(args.target_sources_file))
with build_utils.TempDir() as temp_dir:
instrument_cmd = build_utils.JavaCmd() + [
@@ -206,22 +210,32 @@ def _RunInstrumentCommand(parser):
]
if not args.files_to_instrument:
- _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
- temp_dir)
+ affected_source_files = None
else:
affected_files = build_utils.ReadSourcesList(args.files_to_instrument)
- source_set = set(source_files)
- affected_source_files = [f for f in affected_files if f in source_set]
-
- # Copy input_path to output_path and return if no source file affected.
- if not affected_source_files:
- shutil.copyfile(args.input_path, args.output_path)
- # Create a dummy sources_json_file.
- _CreateSourcesJsonFile([], None, args.sources_json_file,
- build_utils.DIR_SOURCE_ROOT)
- return 0
- _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
- temp_dir, affected_source_files)
+ # Check if coverage recipe decided to instrument everything by overriding
+ # the try builder default setting(selective instrumentation). This can
+ # happen in cases like a DEPS roll of jacoco library
+
+ # Note: This token is preceded by ../../ because the paths to be
+ # instrumented are expected to be relative to the build directory.
+ # See _rebase_paths() at https://bit.ly/40oiixX
+ token = '../../' + INSTRUMENT_ALL_JACOCO_OVERRIDE_TOKEN
+ if token in affected_files:
+ affected_source_files = None
+ else:
+ source_set = set(source_files)
+ affected_source_files = [f for f in affected_files if f in source_set]
+
+ # Copy input_path to output_path and return if no source file affected.
+ if not affected_source_files:
+ shutil.copyfile(args.input_path, args.output_path)
+ # Create a dummy sources_json_file.
+ _CreateSourcesJsonFile([], None, args.sources_json_file,
+ build_utils.DIR_SOURCE_ROOT)
+ return 0
+ _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+ temp_dir, affected_source_files)
source_dirs = _GetSourceDirsFromSourceFiles(source_files)
# TODO(GYP): In GN, we are passed the list of sources, detecting source
diff --git a/build/android/gyp/jacoco_instr.pydeps b/build/android/gyp/jacoco_instr.pydeps
index d7fec19fd..9c763fc62 100644
--- a/build/android/gyp/jacoco_instr.pydeps
+++ b/build/android/gyp/jacoco_instr.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jacoco_instr.pydeps build/android/gyp/jacoco_instr.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
jacoco_instr.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/java_cpp_enum.py b/build/android/gyp/java_cpp_enum.py
index b664021f1..9098cfc82 100755
--- a/build/android/gyp/java_cpp_enum.py
+++ b/build/android/gyp/java_cpp_enum.py
@@ -16,6 +16,9 @@ import zipfile
from util import build_utils
from util import java_cpp_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
+
# List of C++ types that are compatible with the Java code generated by this
# script.
@@ -427,10 +430,10 @@ def DoMain(argv):
parser.error('Need to specify at least one input file')
input_paths = args
- with build_utils.AtomicOutput(options.srcjar) as f:
+ with action_helpers.atomic_output(options.srcjar) as f:
with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
for output_path, data in DoGenerate(input_paths):
- build_utils.AddToZipHermetic(srcjar, output_path, data=data)
+ zip_helpers.add_to_zip_hermetic(srcjar, output_path, data=data)
if __name__ == '__main__':
diff --git a/build/android/gyp/java_cpp_enum.pydeps b/build/android/gyp/java_cpp_enum.pydeps
index e6aaeb7b1..3e63ff861 100644
--- a/build/android/gyp/java_cpp_enum.pydeps
+++ b/build/android/gyp/java_cpp_enum.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
java_cpp_enum.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/java_cpp_features.py b/build/android/gyp/java_cpp_features.py
index 451acb642..10639a548 100755
--- a/build/android/gyp/java_cpp_features.py
+++ b/build/android/gyp/java_cpp_features.py
@@ -12,6 +12,8 @@ import zipfile
from util import build_utils
from util import java_cpp_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
class FeatureParserDelegate(java_cpp_utils.CppConstantParser.Delegate):
@@ -100,10 +102,10 @@ def _Main(argv):
metavar='INPUTFILE')
args = parser.parse_args(argv)
- with build_utils.AtomicOutput(args.srcjar) as f:
+ with action_helpers.atomic_output(args.srcjar) as f:
with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
data, path = _Generate(args.inputs, args.template)
- build_utils.AddToZipHermetic(srcjar, path, data=data)
+ zip_helpers.add_to_zip_hermetic(srcjar, path, data=data)
if __name__ == '__main__':
diff --git a/build/android/gyp/java_cpp_features.pydeps b/build/android/gyp/java_cpp_features.pydeps
index acffae2bb..4faa9033e 100644
--- a/build/android/gyp/java_cpp_features.pydeps
+++ b/build/android/gyp/java_cpp_features.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_features.pydeps build/android/gyp/java_cpp_features.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
java_cpp_features.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/java_cpp_strings.py b/build/android/gyp/java_cpp_strings.py
index d6ce81e18..c3d05de6c 100755
--- a/build/android/gyp/java_cpp_strings.py
+++ b/build/android/gyp/java_cpp_strings.py
@@ -12,6 +12,8 @@ import zipfile
from util import build_utils
from util import java_cpp_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
class StringParserDelegate(java_cpp_utils.CppConstantParser.Delegate):
@@ -93,10 +95,10 @@ def _Main(argv):
'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE')
args = parser.parse_args(argv)
- with build_utils.AtomicOutput(args.srcjar) as f:
+ with action_helpers.atomic_output(args.srcjar) as f:
with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
data, path = _Generate(args.inputs, args.template)
- build_utils.AddToZipHermetic(srcjar, path, data=data)
+ zip_helpers.add_to_zip_hermetic(srcjar, path, data=data)
if __name__ == '__main__':
diff --git a/build/android/gyp/java_cpp_strings.pydeps b/build/android/gyp/java_cpp_strings.pydeps
index 0a821f446..39b299e4b 100644
--- a/build/android/gyp/java_cpp_strings.pydeps
+++ b/build/android/gyp/java_cpp_strings.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
java_cpp_strings.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/java_google_api_keys.py b/build/android/gyp/java_google_api_keys.py
index 4223e5d29..4e4fa1998 100755
--- a/build/android/gyp/java_google_api_keys.py
+++ b/build/android/gyp/java_google_api_keys.py
@@ -13,6 +13,7 @@ import sys
import zipfile
from util import build_utils
+import zip_helpers
sys.path.append(
os.path.abspath(os.path.join(sys.path[0], '../../../google_apis')))
@@ -81,7 +82,7 @@ def _DoWriteJarOutput(output_path, constant_definition):
with zipfile.ZipFile(output_path, 'w') as srcjar:
path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java')
data = GenerateOutput(constant_definition)
- build_utils.AddToZipHermetic(srcjar, path, data=data)
+ zip_helpers.add_to_zip_hermetic(srcjar, path, data=data)
def _DoMain(argv):
diff --git a/build/android/gyp/java_google_api_keys.pydeps b/build/android/gyp/java_google_api_keys.pydeps
index ebb717273..6c027a19d 100644
--- a/build/android/gyp/java_google_api_keys.pydeps
+++ b/build/android/gyp/java_google_api_keys.pydeps
@@ -2,6 +2,7 @@
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_google_api_keys.pydeps build/android/gyp/java_google_api_keys.py
../../../google_apis/google_api_keys.py
../../gn_helpers.py
+../../zip_helpers.py
java_google_api_keys.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/javac_output_processor.py b/build/android/gyp/javac_output_processor.py
index c89980c2c..6faf5de5a 100755
--- a/build/android/gyp/javac_output_processor.py
+++ b/build/android/gyp/javac_output_processor.py
@@ -5,11 +5,13 @@
# found in the LICENSE file.
"""Contains helper class for processing javac output."""
+import dataclasses
import os
import pathlib
import re
import sys
import traceback
+from typing import List
from util import build_utils
@@ -24,6 +26,43 @@ sys.path.insert(
import lookup_dep
+def ReplaceGmsPackageIfNeeded(target_name: str) -> str:
+ if target_name.startswith(
+ ('//third_party/android_deps:google_play_services_',
+ '//clank/third_party/google3:google_play_services_')):
+ return f'$google_play_services_package:{target_name.split(":")[1]}'
+ return target_name
+
+
+def _DisambiguateDeps(class_entries: List[lookup_dep.ClassEntry]):
+ def filter_if_not_empty(entries, filter_func):
+ filtered_entries = [e for e in entries if filter_func(e)]
+ return filtered_entries or entries
+
+ # When some deps are preferred, ignore all other potential deps.
+ class_entries = filter_if_not_empty(class_entries, lambda e: e.preferred_dep)
+
+ # E.g. javax_annotation_jsr250_api_java.
+ class_entries = filter_if_not_empty(class_entries,
+ lambda e: 'jsr' in e.target)
+
+ # Avoid suggesting subtargets when regular targets exist.
+ class_entries = filter_if_not_empty(class_entries,
+ lambda e: '__' not in e.target)
+
+ # Swap out GMS package names if needed.
+ class_entries = [
+ dataclasses.replace(e, target=ReplaceGmsPackageIfNeeded(e.target))
+ for e in class_entries
+ ]
+
+ # Convert to dict and then use list to get the keys back to remove dups and
+ # keep order the same as before.
+ class_entries = list({e: True for e in class_entries})
+
+ return class_entries
+
+
class JavacOutputProcessor:
def __init__(self, target_name):
self._target_name = self._RemoveSuffixesIfPresent(
@@ -153,7 +192,7 @@ class JavacOutputProcessor:
if not suggested_deps:
return
- suggested_deps = self._DisambiguateDeps(suggested_deps)
+ suggested_deps = _DisambiguateDeps(suggested_deps)
suggested_deps_str = ', '.join(s.target for s in suggested_deps)
if len(suggested_deps) > 1:
@@ -162,25 +201,6 @@ class JavacOutputProcessor:
self._suggested_deps.add(suggested_deps_str)
@staticmethod
- def _DisambiguateDeps(class_entries):
- if len(class_entries) == 1:
- return class_entries
-
- # android_library_factory() targets set low_classpath_priority=true, and any
- # target that is the "impl" side of a target that uses jar_excluded_patterns
- # should use this as well.
- # We should generally always suggest depending on the non-impl library
- # target.
- # TODO(crbug.com/1296711): Also use "visibility" a hint here.
- low_entries = [x for x in class_entries if x.low_classpath_priority]
- class_entries = low_entries or class_entries
-
- # E.g. javax_annotation_jsr250_api_java.
- jsr_entries = [x for x in class_entries if 'jsr' in x.target]
- class_entries = jsr_entries or class_entries
- return class_entries
-
- @staticmethod
def _RemoveSuffixesIfPresent(suffixes, text):
for suffix in suffixes:
if text.endswith(suffix):
diff --git a/build/android/gyp/jinja_template.py b/build/android/gyp/jinja_template.py
index 265f0081a..4a242683a 100755
--- a/build/android/gyp/jinja_template.py
+++ b/build/android/gyp/jinja_template.py
@@ -13,6 +13,8 @@ import sys
from util import build_utils
from util import resource_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
from pylib.constants import host_paths
@@ -90,12 +92,13 @@ def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip):
path_info.AddMapping(relpath, input_filename)
path_info.Write(outputs_zip + '.info')
- build_utils.ZipDir(outputs_zip, temp_dir)
+ with action_helpers.atomic_output(outputs_zip) as f:
+ zip_helpers.zip_directory(f, temp_dir)
def _ParseVariables(variables_arg, error_func):
variables = {}
- for v in build_utils.ParseGnList(variables_arg):
+ for v in action_helpers.parse_gn_list(variables_arg):
if '=' not in v:
error_func('--variables argument must contain "=": ' + v)
name, _, value = v.partition('=')
@@ -128,8 +131,8 @@ def main():
help='Enable inputs and includes checks.')
options = parser.parse_args()
- inputs = build_utils.ParseGnList(options.inputs)
- includes = build_utils.ParseGnList(options.includes)
+ inputs = action_helpers.parse_gn_list(options.inputs)
+ includes = action_helpers.parse_gn_list(options.includes)
if (options.output is None) == (options.outputs_zip is None):
parser.error('Exactly one of --output and --output-zip must be given')
diff --git a/build/android/gyp/jinja_template.pydeps b/build/android/gyp/jinja_template.pydeps
index 6990e6afb..1eafd884a 100644
--- a/build/android/gyp/jinja_template.pydeps
+++ b/build/android/gyp/jinja_template.pydeps
@@ -10,8 +10,8 @@
../../../third_party/catapult/devil/devil/constants/__init__.py
../../../third_party/catapult/devil/devil/constants/exit_codes.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -31,7 +31,9 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
../pylib/__init__.py
../pylib/constants/__init__.py
../pylib/constants/host_paths.py
diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py
index 9666fa52c..ae26a1808 100755
--- a/build/android/gyp/lint.py
+++ b/build/android/gyp/lint.py
@@ -5,8 +5,6 @@
# found in the LICENSE file.
"""Runs Android's lint tool."""
-from __future__ import print_function
-
import argparse
import logging
import os
@@ -19,6 +17,7 @@ from xml.etree import ElementTree
from util import build_utils
from util import manifest_utils
from util import server_utils
+import action_helpers # build_utils adds //build to sys.path.
_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/main/build/android/docs/lint.md' # pylint: disable=line-too-long
@@ -180,7 +179,7 @@ def _GenerateAndroidManifest(original_manifest_path, extra_manifest_paths,
def _WriteXmlFile(root, path):
logging.info('Writing xml file %s', path)
build_utils.MakeDirectory(os.path.dirname(path))
- with build_utils.AtomicOutput(path) as f:
+ with action_helpers.atomic_output(path) as f:
# Although we can write it just with ElementTree.tostring, using minidom
# makes it a lot easier to read as a human (also on code search).
f.write(
@@ -379,7 +378,7 @@ def _RunLint(create_cache,
def _ParseArgs(argv):
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--target-name', help='Fully qualified GN target name.')
parser.add_argument('--skip-build-server',
action='store_true',
@@ -424,8 +423,9 @@ def _ParseArgs(argv):
parser.add_argument('--warnings-as-errors',
action='store_true',
help='Treat all warnings as errors.')
- parser.add_argument('--java-sources',
- help='File containing a list of java sources files.')
+ parser.add_argument('--sources',
+ help='A list of files containing java and kotlin source '
+ 'files.')
parser.add_argument('--aars', help='GN list of included aars.')
parser.add_argument('--srcjars', help='GN list of included srcjars.')
parser.add_argument('--manifest-path',
@@ -451,13 +451,14 @@ def _ParseArgs(argv):
'on new errors.')
args = parser.parse_args(build_utils.ExpandFileArgs(argv))
- args.java_sources = build_utils.ParseGnList(args.java_sources)
- args.aars = build_utils.ParseGnList(args.aars)
- args.srcjars = build_utils.ParseGnList(args.srcjars)
- args.resource_sources = build_utils.ParseGnList(args.resource_sources)
- args.extra_manifest_paths = build_utils.ParseGnList(args.extra_manifest_paths)
- args.resource_zips = build_utils.ParseGnList(args.resource_zips)
- args.classpath = build_utils.ParseGnList(args.classpath)
+ args.sources = action_helpers.parse_gn_list(args.sources)
+ args.aars = action_helpers.parse_gn_list(args.aars)
+ args.srcjars = action_helpers.parse_gn_list(args.srcjars)
+ args.resource_sources = action_helpers.parse_gn_list(args.resource_sources)
+ args.extra_manifest_paths = action_helpers.parse_gn_list(
+ args.extra_manifest_paths)
+ args.resource_zips = action_helpers.parse_gn_list(args.resource_zips)
+ args.classpath = action_helpers.parse_gn_list(args.classpath)
if args.baseline:
assert os.path.basename(args.baseline) == 'lint-baseline.xml', (
@@ -484,8 +485,8 @@ def main():
return
sources = []
- for java_sources_file in args.java_sources:
- sources.extend(build_utils.ReadSourcesList(java_sources_file))
+ for sources_file in args.sources:
+ sources.extend(build_utils.ReadSourcesList(sources_file))
resource_sources = []
for resource_sources_file in args.resource_sources:
resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file))
@@ -522,7 +523,7 @@ def main():
build_utils.Touch(args.stamp)
if args.depfile:
- build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
+ action_helpers.write_depfile(args.depfile, args.stamp, depfile_deps)
if __name__ == '__main__':
diff --git a/build/android/gyp/lint.pydeps b/build/android/gyp/lint.pydeps
index 0994e19a4..84bafde47 100644
--- a/build/android/gyp/lint.pydeps
+++ b/build/android/gyp/lint.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
+../../action_helpers.py
../../gn_helpers.py
lint.py
util/__init__.py
diff --git a/build/android/gyp/merge_manifest.py b/build/android/gyp/merge_manifest.py
index 1389e67a1..a9c2535ae 100755
--- a/build/android/gyp/merge_manifest.py
+++ b/build/android/gyp/merge_manifest.py
@@ -7,6 +7,7 @@
"""Merges dependency Android manifests into a root manifest."""
import argparse
+import collections
import contextlib
import os
import sys
@@ -15,22 +16,20 @@ import xml.etree.ElementTree as ElementTree
from util import build_utils
from util import manifest_utils
+import action_helpers # build_utils adds //build to sys.path.
_MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger'
@contextlib.contextmanager
-def _ProcessManifest(manifest_path, min_sdk_version, target_sdk_version,
- max_sdk_version, manifest_package):
- """Patches an Android manifest's package and performs assertions to ensure
- correctness for the manifest.
- """
+def _ProcessMainManifest(manifest_path, min_sdk_version, target_sdk_version,
+ max_sdk_version, manifest_package):
+ """Patches the main Android manifest"""
doc, manifest, _ = manifest_utils.ParseManifest(manifest_path)
- manifest_utils.AssertUsesSdk(manifest, min_sdk_version, target_sdk_version,
- max_sdk_version)
+ manifest_utils.SetUsesSdk(manifest, target_sdk_version, min_sdk_version,
+ max_sdk_version)
assert manifest_utils.GetPackage(manifest) or manifest_package, \
'Must set manifest package in GN or in AndroidManifest.xml'
- manifest_utils.AssertPackage(manifest, manifest_package)
if manifest_package:
manifest.set('package', manifest_package)
tmp_prefix = manifest_path.replace(os.path.sep, '-')
@@ -40,25 +39,36 @@ def _ProcessManifest(manifest_path, min_sdk_version, target_sdk_version,
@contextlib.contextmanager
-def _SetTargetApi(manifest_path, target_sdk_version):
- """Patches an Android manifest's TargetApi if not set.
-
- We do this to avoid the manifest merger assuming we have a targetSdkVersion
- of 1 and inserting unnecessary permission requests into our merged manifests.
- See b/222331337 for more details.
- """
+def _ProcessOtherManifest(manifest_path, target_sdk_version,
+ seen_package_names):
+ """Patches non-main AndroidManifest.xml if necessary."""
+ # 1. Ensure targetSdkVersion is set to the expected value to avoid
+ # spurious permissions being added (b/222331337).
+ # 2. Ensure all manifests have a unique package name so that the merger
+ # does not fail when this happens.
doc, manifest, _ = manifest_utils.ParseManifest(manifest_path)
- manifest_utils.SetTargetApiIfUnset(manifest, target_sdk_version)
- tmp_prefix = manifest_path.replace(os.path.sep, '-')
- with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest:
- manifest_utils.SaveManifest(doc, patched_manifest.name)
- yield patched_manifest.name
+
+ changed_api = manifest_utils.SetTargetApiIfUnset(manifest, target_sdk_version)
+
+ package_name = manifest_utils.GetPackage(manifest)
+ package_count = seen_package_names[package_name]
+ seen_package_names[package_name] += 1
+ if package_count > 0:
+ manifest.set('package', f'{package_name}_{package_count}')
+
+ if package_count > 0 or changed_api:
+ tmp_prefix = manifest_path.replace(os.path.sep, '-')
+ with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest:
+ manifest_utils.SaveManifest(doc, patched_manifest.name)
+ yield patched_manifest.name
+ else:
+ yield manifest_path
def main(argv):
argv = build_utils.ExpandFileArgs(argv)
parser = argparse.ArgumentParser(description=__doc__)
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--manifest-merger-jar',
help='Path to SDK\'s manifest merger jar.',
required=True)
@@ -86,8 +96,8 @@ def main(argv):
help='Treat all warnings as errors.')
args = parser.parse_args(argv)
- with build_utils.AtomicOutput(args.output) as output:
- cmd = build_utils.JavaCmd(args.warnings_as_errors) + [
+ with action_helpers.atomic_output(args.output) as output:
+ cmd = build_utils.JavaCmd() + [
'-cp',
args.manifest_merger_jar,
_MANIFEST_MERGER_MAIN_CLASS,
@@ -105,17 +115,19 @@ def main(argv):
'MAX_SDK_VERSION=' + args.max_sdk_version,
]
- extras = build_utils.ParseGnList(args.extras)
+ extras = action_helpers.parse_gn_list(args.extras)
with contextlib.ExitStack() as stack:
root_manifest, package = stack.enter_context(
- _ProcessManifest(args.root_manifest, args.min_sdk_version,
- args.target_sdk_version, args.max_sdk_version,
- args.manifest_package))
+ _ProcessMainManifest(args.root_manifest, args.min_sdk_version,
+ args.target_sdk_version, args.max_sdk_version,
+ args.manifest_package))
if extras:
+ seen_package_names = collections.Counter()
extras_processed = [
- stack.enter_context(_SetTargetApi(e, args.target_sdk_version))
- for e in extras
+ stack.enter_context(
+ _ProcessOtherManifest(e, args.target_sdk_version,
+ seen_package_names)) for e in extras
]
cmd += ['--libs', ':'.join(extras_processed)]
cmd += [
@@ -133,14 +145,8 @@ def main(argv):
IsTimeStale(output.name, [root_manifest] + extras),
fail_on_output=args.warnings_as_errors)
- # Check for correct output.
- _, manifest, _ = manifest_utils.ParseManifest(output.name)
- manifest_utils.AssertUsesSdk(manifest, args.min_sdk_version,
- args.target_sdk_version)
- manifest_utils.AssertPackage(manifest, package)
-
if args.depfile:
- build_utils.WriteDepfile(args.depfile, args.output, inputs=extras)
+ action_helpers.write_depfile(args.depfile, args.output, inputs=extras)
if __name__ == '__main__':
diff --git a/build/android/gyp/merge_manifest.pydeps b/build/android/gyp/merge_manifest.pydeps
index ef9bb3404..003690ff3 100644
--- a/build/android/gyp/merge_manifest.pydeps
+++ b/build/android/gyp/merge_manifest.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py
+../../action_helpers.py
../../gn_helpers.py
merge_manifest.py
util/__init__.py
diff --git a/build/android/gyp/optimize_resources.py b/build/android/gyp/optimize_resources.py
index 867e80b10..f1be4ccf1 100755
--- a/build/android/gyp/optimize_resources.py
+++ b/build/android/gyp/optimize_resources.py
@@ -10,6 +10,7 @@ import os
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
def _ParseArgs(args):
@@ -49,7 +50,7 @@ def _ParseArgs(args):
help='Output for `aapt2 optimize`.')
options = parser.parse_args(args)
- options.resources_config_paths = build_utils.ParseGnList(
+ options.resources_config_paths = action_helpers.parse_gn_list(
options.resources_config_paths)
if options.resources_path_map_out_path and not options.short_resource_paths:
diff --git a/build/android/gyp/optimize_resources.pydeps b/build/android/gyp/optimize_resources.pydeps
index 57a99b8c7..be3e8e713 100644
--- a/build/android/gyp/optimize_resources.pydeps
+++ b/build/android/gyp/optimize_resources.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/optimize_resources.pydeps build/android/gyp/optimize_resources.py
+../../action_helpers.py
../../gn_helpers.py
optimize_resources.py
util/__init__.py
diff --git a/build/android/gyp/prepare_resources.py b/build/android/gyp/prepare_resources.py
index 609064bd0..e86711c3b 100755
--- a/build/android/gyp/prepare_resources.py
+++ b/build/android/gyp/prepare_resources.py
@@ -18,6 +18,8 @@ from util import jar_info_utils
from util import md5_check
from util import resources_parser
from util import resource_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
def _ParseArgs(args):
@@ -27,7 +29,7 @@ def _ParseArgs(args):
An options object as from argparse.ArgumentParser.parse_args()
"""
parser = argparse.ArgumentParser(description=__doc__)
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--res-sources-path',
required=True,
@@ -116,7 +118,7 @@ def _ZipResources(resource_dirs, zip_path, ignore_pattern):
# the contents of possibly multiple res/ dirs each within an encapsulating
# directory within the zip.
z.comment = resource_utils.MULTIPLE_RES_MAGIC_STRING
- build_utils.DoZip(files_to_zip, z)
+ zip_helpers.add_files_to_zip(files_to_zip, z)
def _GenerateRTxt(options, r_txt_path):
diff --git a/build/android/gyp/prepare_resources.pydeps b/build/android/gyp/prepare_resources.pydeps
index de22addd7..5c7c4410a 100644
--- a/build/android/gyp/prepare_resources.pydeps
+++ b/build/android/gyp/prepare_resources.pydeps
@@ -1,8 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -22,8 +22,10 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
../../print_python_deps.py
+../../zip_helpers.py
prepare_resources.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/process_native_prebuilt.py b/build/android/gyp/process_native_prebuilt.py
index 5749eb60e..060adae81 100755
--- a/build/android/gyp/process_native_prebuilt.py
+++ b/build/android/gyp/process_native_prebuilt.py
@@ -10,6 +10,7 @@ import shutil
import sys
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
def main(args):
@@ -23,7 +24,7 @@ def main(args):
# eu-strip's output keeps mode from source file which might not be writable
# thus it fails to override its output on the next run. AtomicOutput fixes
# the issue.
- with build_utils.AtomicOutput(options.stripped_output_path) as out:
+ with action_helpers.atomic_output(options.stripped_output_path) as out:
cmd = [
options.strip_path,
options.input_path,
diff --git a/build/android/gyp/process_native_prebuilt.pydeps b/build/android/gyp/process_native_prebuilt.pydeps
index 8e2012ace..baf9eff7a 100644
--- a/build/android/gyp/process_native_prebuilt.pydeps
+++ b/build/android/gyp/process_native_prebuilt.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/process_native_prebuilt.pydeps build/android/gyp/process_native_prebuilt.py
+../../action_helpers.py
../../gn_helpers.py
process_native_prebuilt.py
util/__init__.py
diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py
index 8c3922099..579501c7d 100755
--- a/build/android/gyp/proguard.py
+++ b/build/android/gyp/proguard.py
@@ -5,9 +5,9 @@
# found in the LICENSE file.
import argparse
-from collections import defaultdict
import logging
import os
+import pathlib
import re
import shutil
import sys
@@ -16,20 +16,21 @@ import zipfile
import dex
from util import build_utils
from util import diff_utils
-
-sys.path.insert(1, os.path.dirname(os.path.dirname(__file__)))
-from pylib.dex import dex_parser
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
_BLOCKLISTED_EXPECTATION_PATHS = [
# A separate expectation file is created for these files.
- 'clank/third_party/google3/pg_confs/'
+ 'clank/third_party/google3/pg_confs/',
]
+_DUMP_DIR_NAME = 'r8inputs_dir'
+
def _ParseOptions():
args = build_utils.ExpandFileArgs(sys.argv[1:])
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--r8-path',
required=True,
help='Path to the R8.jar to use.')
@@ -72,7 +73,9 @@ def _ParseOptions():
'--disable-checks',
action='store_true',
help='Disable -checkdiscard directives and missing symbols check')
- parser.add_argument('--sourcefile', help='Value for source file attribute')
+ parser.add_argument('--source-file', help='Value for source file attribute.')
+ parser.add_argument('--package-name',
+ help='Goes into a comment in the mapping file.')
parser.add_argument(
'--force-enable-assertions',
action='store_true',
@@ -147,10 +150,11 @@ def _ParseOptions():
parser.error('Cannot use both --force-enable-assertions and '
'--assertion-handler')
- options.classpath = build_utils.ParseGnList(options.classpath)
- options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
- options.input_paths = build_utils.ParseGnList(options.input_paths)
- options.extra_mapping_output_paths = build_utils.ParseGnList(
+ options.classpath = action_helpers.parse_gn_list(options.classpath)
+ options.proguard_configs = action_helpers.parse_gn_list(
+ options.proguard_configs)
+ options.input_paths = action_helpers.parse_gn_list(options.input_paths)
+ options.extra_mapping_output_paths = action_helpers.parse_gn_list(
options.extra_mapping_output_paths)
if options.feature_names:
@@ -161,7 +165,7 @@ def _ParseOptions():
parser.error('Invalid feature argument lengths.')
options.feature_jars = [
- build_utils.ParseGnList(x) for x in options.feature_jars
+ action_helpers.parse_gn_list(x) for x in options.feature_jars
]
split_map = {}
@@ -201,52 +205,12 @@ class _SplitContext:
# Add to .jar using Python rather than having R8 output to a .zip directly
# in order to disable compression of the .jar, saving ~500ms.
tmp_jar_output = self.staging_dir + '.jar'
- build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir)
+ zip_helpers.add_files_to_zip(found_files,
+ tmp_jar_output,
+ base_dir=self.staging_dir)
shutil.move(tmp_jar_output, self.final_output_path)
-def _DeDupeInputJars(split_contexts_by_name):
- """Moves jars used by multiple splits into common ancestors.
-
- Updates |input_jars| for each _SplitContext.
- """
-
- def count_ancestors(split_context):
- ret = 0
- if split_context.parent_name:
- ret += 1
- ret += count_ancestors(split_contexts_by_name[split_context.parent_name])
- return ret
-
- base_context = split_contexts_by_name['base']
- # Sort by tree depth so that ensure children are visited before their parents.
- sorted_contexts = list(split_contexts_by_name.values())
- sorted_contexts.remove(base_context)
- sorted_contexts.sort(key=count_ancestors, reverse=True)
-
- # If a jar is present in multiple siblings, promote it to their parent.
- seen_jars_by_parent = defaultdict(set)
- for split_context in sorted_contexts:
- seen_jars = seen_jars_by_parent[split_context.parent_name]
- new_dupes = seen_jars.intersection(split_context.input_jars)
- parent_context = split_contexts_by_name[split_context.parent_name]
- parent_context.input_jars.update(new_dupes)
- seen_jars.update(split_context.input_jars)
-
- def ancestor_jars(parent_name, dest=None):
- dest = dest or set()
- if not parent_name:
- return dest
- parent_context = split_contexts_by_name[parent_name]
- dest.update(parent_context.input_jars)
- return ancestor_jars(parent_context.parent_name, dest)
-
- # Now that jars have been moved up the tree, remove those that appear in
- # ancestors.
- for split_context in sorted_contexts:
- split_context.input_jars -= ancestor_jars(split_context.parent_name)
-
-
def _OptimizeWithR8(options,
config_paths,
libraries,
@@ -289,19 +253,16 @@ def _OptimizeWithR8(options,
base_context = split_contexts_by_name['base']
# R8 OOMs with the default xmx=1G.
- cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx='2G') + [
+ cmd = build_utils.JavaCmd(xmx='2G') + [
# Allows -whyareyounotinlining, which we don't have by default, but
# which is useful for one-off queries.
'-Dcom.android.tools.r8.experimental.enablewhyareyounotinlining=1',
# Restricts horizontal class merging to apply only to classes that
# share a .java file (nested classes). https://crbug.com/1363709
'-Dcom.android.tools.r8.enableSameFilePolicy=1',
- # Enables API modelling for all classes that need it. Breaks reflection
- # on SDK versions that we no longer support. http://b/259076765
- '-Dcom.android.tools.r8.stubNonThrowableClasses=1',
]
if options.dump_inputs:
- cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip']
+ cmd += [f'-Dcom.android.tools.r8.dumpinputtodirectory={_DUMP_DIR_NAME}']
if options.dump_unknown_refs:
cmd += ['-Dcom.android.tools.r8.reportUnknownApiReferences=1']
cmd += [
@@ -309,6 +270,10 @@ def _OptimizeWithR8(options,
options.r8_path,
'com.android.tools.r8.R8',
'--no-data-resources',
+ '--map-id-template',
+ f'{options.source_file} ({options.package_name})',
+ '--source-file-template',
+ options.source_file,
'--output',
base_context.staging_dir,
'--pg-map-output',
@@ -316,12 +281,11 @@ def _OptimizeWithR8(options,
]
if options.disable_checks:
- # Info level priority logs are not printed by default.
- cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info']
- else:
- cmd += ['--map-diagnostics', 'info', 'warning']
- if not options.warnings_as_errors:
- cmd += ['--map-diagnostics', 'error', 'warning']
+ cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'none']
+ cmd += ['--map-diagnostics', 'info', 'warning']
+ # An "error" level diagnostic causes r8 to return an error exit code. Doing
+ # this allows our filter to decide what should/shouldn't break our build.
+ cmd += ['--map-diagnostics', 'error', 'warning']
if options.min_api:
cmd += ['--min-api', options.min_api]
@@ -341,8 +305,6 @@ def _OptimizeWithR8(options,
for main_dex_rule in options.main_dex_rules_path:
cmd += ['--main-dex-rules', main_dex_rule]
- _DeDupeInputJars(split_contexts_by_name)
-
# Add any extra inputs to the base context (e.g. desugar runtime).
extra_jars = set(options.input_paths)
for split_context in split_contexts_by_name.values():
@@ -366,11 +328,10 @@ def _OptimizeWithR8(options,
stderr_filter=stderr_filter,
fail_on_output=options.warnings_as_errors)
except build_utils.CalledProcessError as e:
- # Python will print the original exception as well.
- raise Exception(
- 'R8 failed. Please see '
- 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
- 'android/docs/java_optimization.md#Debugging-common-failures') from e
+ # Do not output command line because it is massive and makes the actual
+ # error message hard to find.
+ sys.stderr.write(e.output)
+ sys.exit(1)
logging.debug('Collecting ouputs')
base_context.CreateOutput()
@@ -384,7 +345,8 @@ def _OptimizeWithR8(options,
def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string,
keep_rules_output):
- cmd = build_utils.JavaCmd(False) + [
+
+ cmd = build_utils.JavaCmd() + [
'-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
'--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
'--keep-rules', '--output', keep_rules_output
@@ -402,8 +364,13 @@ def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string,
def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors,
- error_title):
- cmd = build_utils.JavaCmd(warnings_as_errors) + [
+ dump_inputs, error_title):
+ cmd = build_utils.JavaCmd()
+
+ if dump_inputs:
+ cmd += [f'-Dcom.android.tools.r8.dumpinputtodirectory={_DUMP_DIR_NAME}']
+
+ cmd += [
'-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
'--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
'--check'
@@ -429,6 +396,8 @@ def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors,
# Found in: com/facebook/fbui/textlayoutbuilder/StaticLayoutHelper
'android.text.StaticLayout.<init>',
+ # TODO(crbug/1426964): Remove once chrome builds with Android U SDK.
+ 'android.adservices.measurement',
# Explicictly guarded by try (NoClassDefFoundError) in Flogger's
# PlatformProvider.
@@ -448,6 +417,10 @@ def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors,
# Explicitly guarded by try (NoClassDefFoundError) in Firebase's
# KotlinDetector: com.google.firebase.platforminfo.KotlinDetector.
'kotlin.KotlinVersion',
+
+ # TODO(agrieve): Remove once we move to Android U SDK.
+ 'android.window.BackEvent',
+ 'android.window.OnBackAnimationCallback',
]
had_unfiltered_items = ' ' in stderr
@@ -479,10 +452,16 @@ https://chromium.googlesource.com/chromium/src.git/+/main/docs/ui/android/byteco
stderr = ''
return stderr
- build_utils.CheckOutput(cmd,
- print_stdout=True,
- stderr_filter=stderr_filter,
- fail_on_output=warnings_as_errors)
+ try:
+ build_utils.CheckOutput(cmd,
+ print_stdout=True,
+ stderr_filter=stderr_filter,
+ fail_on_output=warnings_as_errors)
+ except build_utils.CalledProcessError as e:
+ # Do not output command line because it is massive and makes the actual
+ # error message hard to find.
+ sys.stderr.write(e.output)
+ sys.exit(1)
return failed_holder[0]
@@ -536,15 +515,7 @@ def _CombineConfigs(configs,
def _CreateDynamicConfig(options):
- # Our scripts already fail on output. Adding -ignorewarnings makes R8 output
- # warnings rather than throw exceptions so we can selectively ignore them via
- # dex.py's ignore list. Context: https://crbug.com/1180222
- ret = ["-ignorewarnings"]
-
- if options.sourcefile:
- ret.append("-renamesourcefileattribute '%s' # OMIT FROM EXPECTATIONS" %
- options.sourcefile)
-
+ ret = []
if options.enable_obfuscation:
ret.append("-repackageclasses ''")
else:
@@ -590,7 +561,7 @@ def _MaybeWriteStampAndDepFile(options, inputs):
build_utils.Touch(options.stamp)
output = options.stamp
if options.depfile:
- build_utils.WriteDepfile(options.depfile, output, inputs=inputs)
+ action_helpers.write_depfile(options.depfile, output, inputs=inputs)
def _IterParentContexts(context_name, split_contexts_by_name):
@@ -618,7 +589,8 @@ def _DoTraceReferencesChecks(options, split_contexts_by_name):
dex_files = sorted(c.final_output_path
for c in split_contexts_by_name.values())
if _CheckForMissingSymbols(options.r8_path, dex_files, options.classpath,
- options.warnings_as_errors, error_title):
+ options.warnings_as_errors, options.dump_inputs,
+ error_title):
# Failed but didn't raise due to warnings_as_errors=False
return
@@ -633,29 +605,20 @@ def _DoTraceReferencesChecks(options, split_contexts_by_name):
# We could run them concurrently, to shave off 5-6 seconds, but would need
# to make sure that the order is maintained.
if _CheckForMissingSymbols(options.r8_path, dex_files, options.classpath,
- options.warnings_as_errors, error_title):
+ options.warnings_as_errors, options.dump_inputs,
+ error_title):
# Failed but didn't raise due to warnings_as_errors=False
return
-def main():
- build_utils.InitLogging('PROGUARD_DEBUG')
- options = _ParseOptions()
-
+def _Run(options):
# ProGuard configs that are derived from flags.
logging.debug('Preparing configs')
dynamic_config_data = _CreateDynamicConfig(options)
logging.debug('Looking for embedded configs')
- libraries = []
- for p in options.classpath:
- # TODO(bjoyce): Remove filter once old android support libraries are gone.
- # Fix for having Library class extend program class dependency problem.
- if 'com_android_support' in p or 'android_support_test' in p:
- continue
- # If a jar is part of input no need to include it as library jar.
- if p not in libraries and p not in options.input_paths:
- libraries.append(p)
+ # If a jar is part of input no need to include it as library jar.
+ libraries = [p for p in options.classpath if p not in options.input_paths]
embedded_configs = {}
for jar_path in options.input_paths + libraries:
@@ -672,9 +635,9 @@ def main():
if options.expected_file:
diff_utils.CheckExpectations(merged_configs, options)
if options.only_verify_expectations:
- build_utils.WriteDepfile(options.depfile,
- options.actual_file,
- inputs=depfile_inputs)
+ action_helpers.write_depfile(options.depfile,
+ options.actual_file,
+ inputs=depfile_inputs)
return
if options.keep_rules_output_path:
@@ -683,15 +646,6 @@ def main():
options.keep_rules_output_path)
return
- # TODO(agrieve): Stop appending to dynamic_config_data once R8 natively
- # supports finding configs the "tools" directory.
- # https://issuetracker.google.com/227983179
- tools_configs = {
- k: v
- for k, v in embedded_configs.items() if 'com.android.tools' in k
- }
- dynamic_config_data += '\n' + _CombineConfigs([], None, tools_configs)
-
split_contexts_by_name = _OptimizeWithR8(options, options.proguard_configs,
libraries, dynamic_config_data,
print_stdout)
@@ -709,5 +663,29 @@ def main():
_MaybeWriteStampAndDepFile(options, depfile_inputs)
+def main():
+ build_utils.InitLogging('PROGUARD_DEBUG')
+ options = _ParseOptions()
+
+ if options.dump_inputs:
+ # Dumping inputs causes output to be emitted, avoid failing due to stdout.
+ options.warnings_as_errors = False
+ # Use dumpinputtodirectory instead of dumpinputtofile to avoid failing the
+ # build and keep running tracereferences.
+ dump_dir_name = _DUMP_DIR_NAME
+ dump_dir_path = pathlib.Path(dump_dir_name)
+ if dump_dir_path.exists():
+ shutil.rmtree(dump_dir_path)
+ # The directory needs to exist before r8 adds the zip files in it.
+ dump_dir_path.mkdir()
+
+ # This ensure that the final outputs are zipped and easily uploaded to a bug.
+ try:
+ _Run(options)
+ finally:
+ if options.dump_inputs:
+ zip_helpers.zip_directory('r8inputs.zip', _DUMP_DIR_NAME)
+
+
if __name__ == '__main__':
main()
diff --git a/build/android/gyp/proguard.pydeps b/build/android/gyp/proguard.pydeps
index de1010623..7ee251b8e 100644
--- a/build/android/gyp/proguard.pydeps
+++ b/build/android/gyp/proguard.pydeps
@@ -1,14 +1,12 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
+../../action_helpers.py
../../gn_helpers.py
../../print_python_deps.py
-../pylib/__init__.py
-../pylib/dex/__init__.py
-../pylib/dex/dex_parser.py
+../../zip_helpers.py
dex.py
proguard.py
util/__init__.py
util/build_utils.py
util/diff_utils.py
util/md5_check.py
-util/zipalign.py
diff --git a/build/android/gyp/trace_event_bytecode_rewriter.py b/build/android/gyp/trace_event_bytecode_rewriter.py
index ec39e5f83..3e0e696f5 100755
--- a/build/android/gyp/trace_event_bytecode_rewriter.py
+++ b/build/android/gyp/trace_event_bytecode_rewriter.py
@@ -9,12 +9,13 @@ import sys
import os
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
def main(argv):
argv = build_utils.ExpandFileArgs(argv[1:])
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--script',
required=True,
help='Path to the java binary wrapper script.')
@@ -24,9 +25,9 @@ def main(argv):
parser.add_argument('--output-jars', action='append', nargs='+')
args = parser.parse_args(argv)
- args.classpath = build_utils.ParseGnList(args.classpath)
- args.input_jars = build_utils.ParseGnList(args.input_jars)
- args.output_jars = build_utils.ParseGnList(args.output_jars)
+ args.classpath = action_helpers.parse_gn_list(args.classpath)
+ args.input_jars = action_helpers.parse_gn_list(args.input_jars)
+ args.output_jars = action_helpers.parse_gn_list(args.output_jars)
for output_jar in args.output_jars:
jar_dir = os.path.dirname(output_jar)
@@ -42,7 +43,7 @@ def main(argv):
build_utils.Touch(args.stamp)
- build_utils.WriteDepfile(args.depfile, args.stamp, inputs=all_input_jars)
+ action_helpers.write_depfile(args.depfile, args.stamp, inputs=all_input_jars)
if __name__ == '__main__':
diff --git a/build/android/gyp/trace_event_bytecode_rewriter.pydeps b/build/android/gyp/trace_event_bytecode_rewriter.pydeps
index 9f3337d7d..e03fc0c23 100644
--- a/build/android/gyp/trace_event_bytecode_rewriter.pydeps
+++ b/build/android/gyp/trace_event_bytecode_rewriter.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/trace_event_bytecode_rewriter.pydeps build/android/gyp/trace_event_bytecode_rewriter.py
+../../action_helpers.py
../../gn_helpers.py
trace_event_bytecode_rewriter.py
util/__init__.py
diff --git a/build/android/gyp/turbine.py b/build/android/gyp/turbine.py
index 61935a6de..00aa3106b 100755
--- a/build/android/gyp/turbine.py
+++ b/build/android/gyp/turbine.py
@@ -9,9 +9,13 @@ import functools
import logging
import sys
import time
+import zipfile
+import compile_java
import javac_output_processor
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
def ProcessJavacOutput(output, target_name):
@@ -24,7 +28,7 @@ def main(argv):
build_utils.InitLogging('TURBINE_DEBUG')
argv = build_utils.ExpandFileArgs(argv[1:])
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--target-name', help='Fully qualified GN target name.')
parser.add_argument(
'--turbine-jar-path', required=True, help='Path to the turbine jar file.')
@@ -55,12 +59,14 @@ def main(argv):
parser.add_argument('--warnings-as-errors',
action='store_true',
help='Treat all warnings as errors.')
+ parser.add_argument('--kotlin-jar-path',
+ help='Kotlin jar to be merged into the output jar.')
options, unknown_args = parser.parse_known_args(argv)
- options.classpath = build_utils.ParseGnList(options.classpath)
- options.processorpath = build_utils.ParseGnList(options.processorpath)
- options.processors = build_utils.ParseGnList(options.processors)
- options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+ options.classpath = action_helpers.parse_gn_list(options.classpath)
+ options.processorpath = action_helpers.parse_gn_list(options.processorpath)
+ options.processors = action_helpers.parse_gn_list(options.processors)
+ options.java_srcjars = action_helpers.parse_gn_list(options.java_srcjars)
files = []
for arg in unknown_args:
@@ -68,7 +74,12 @@ def main(argv):
if arg.startswith('@'):
files.extend(build_utils.ReadSourcesList(arg[1:]))
- cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
+ # The target's .sources file contains both Java and Kotlin files. We use
+ # compile_kt.py to compile the Kotlin files to .class and header jars.
+ # Turbine is run only on .java files.
+ java_files = [f for f in files if f.endswith('.java')]
+
+ cmd = build_utils.JavaCmd() + [
'-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main'
]
javac_cmd = [
@@ -100,13 +111,13 @@ def main(argv):
cmd += ['--source_jars']
cmd += options.java_srcjars
- if files:
+ if java_files:
# Use jar_path to ensure paths are relative (needed for goma).
- files_rsp_path = options.jar_path + '.files_list.txt'
+ files_rsp_path = options.jar_path + '.java_files_list.txt'
with open(files_rsp_path, 'w') as f:
- f.write(' '.join(files))
- # Pass source paths as response files to avoid extremely long command lines
- # that are tedius to debug.
+ f.write(' '.join(java_files))
+ # Pass source paths as response files to avoid extremely long command
+ # lines that are tedius to debug.
cmd += ['--sources']
cmd += ['@' + files_rsp_path]
@@ -116,10 +127,9 @@ def main(argv):
# Use AtomicOutput so that output timestamps are not updated when outputs
# are not changed.
- with build_utils.AtomicOutput(options.jar_path) as output_jar, \
- build_utils.AtomicOutput(options.generated_jar_path) as generated_jar:
- cmd += ['--output', output_jar.name, '--gensrc_output', generated_jar.name]
-
+ with action_helpers.atomic_output(options.jar_path) as output_jar, \
+ action_helpers.atomic_output(options.generated_jar_path) as gensrc_jar:
+ cmd += ['--output', output_jar.name, '--gensrc_output', gensrc_jar.name]
process_javac_output_partial = functools.partial(
ProcessJavacOutput, target_name=options.target_name)
@@ -132,13 +142,19 @@ def main(argv):
fail_on_output=options.warnings_as_errors)
end = time.time() - start
logging.info('Header compilation took %ss', end)
+ if options.kotlin_jar_path:
+ with zipfile.ZipFile(output_jar.name, 'a') as out_zip:
+ path_transform = lambda p: p if p.endswith('.class') else None
+ zip_helpers.merge_zips(out_zip, [options.kotlin_jar_path],
+ path_transform=path_transform)
if options.depfile:
# GN already knows of the java files, so avoid listing individual java files
# in the depfile.
depfile_deps = (options.classpath + options.processorpath +
options.java_srcjars)
- build_utils.WriteDepfile(options.depfile, options.jar_path, depfile_deps)
+ action_helpers.write_depfile(options.depfile, options.jar_path,
+ depfile_deps)
if __name__ == '__main__':
diff --git a/build/android/gyp/turbine.pydeps b/build/android/gyp/turbine.pydeps
index 8f20e8b78..3d20f2ef4 100644
--- a/build/android/gyp/turbine.pydeps
+++ b/build/android/gyp/turbine.pydeps
@@ -16,11 +16,18 @@
../../../third_party/colorama/src/colorama/win32.py
../../../third_party/colorama/src/colorama/winterm.py
../../../tools/android/modularization/convenience/lookup_dep.py
+../../action_helpers.py
../../gn_helpers.py
+../../print_python_deps.py
+../../zip_helpers.py
../list_java_targets.py
../pylib/__init__.py
../pylib/constants/__init__.py
+compile_java.py
javac_output_processor.py
turbine.py
util/__init__.py
util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
+util/server_utils.py
diff --git a/build/android/gyp/unused_resources.py b/build/android/gyp/unused_resources.py
index 6e49560e0..d7578ce70 100755
--- a/build/android/gyp/unused_resources.py
+++ b/build/android/gyp/unused_resources.py
@@ -8,10 +8,9 @@ import argparse
import os
import sys
-sys.path.insert(
- 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
from util import build_utils
from util import resource_utils
+import action_helpers # build_utils adds //build to sys.path.
def _FilterUnusedResources(r_text_in, r_text_out, unused_resources_config):
@@ -37,7 +36,7 @@ def _FilterUnusedResources(r_text_in, r_text_out, unused_resources_config):
def main(args):
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--script',
required=True,
help='Path to the unused resources detector script.')
@@ -66,7 +65,7 @@ def main(args):
help='Path to output the aapt2 config to.')
args = build_utils.ExpandFileArgs(args)
options = parser.parse_args(args)
- options.dependencies_res_zips = (build_utils.ParseGnList(
+ options.dependencies_res_zips = (action_helpers.parse_gn_list(
options.dependencies_res_zips))
# in case of no resources, short circuit early.
@@ -108,8 +107,8 @@ def main(args):
options.dexes) + [options.r_text_in]
if options.proguard_mapping:
depfile_deps.append(options.proguard_mapping)
- build_utils.WriteDepfile(options.depfile, options.output_config,
- depfile_deps)
+ action_helpers.write_depfile(options.depfile, options.output_config,
+ depfile_deps)
if __name__ == '__main__':
diff --git a/build/android/gyp/unused_resources.pydeps b/build/android/gyp/unused_resources.pydeps
index c244b4439..b4da89a95 100644
--- a/build/android/gyp/unused_resources.pydeps
+++ b/build/android/gyp/unused_resources.pydeps
@@ -1,8 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/unused_resources.pydeps build/android/gyp/unused_resources.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -22,6 +22,7 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
unused_resources.py
util/__init__.py
diff --git a/build/android/gyp/util/build_utils.py b/build/android/gyp/util/build_utils.py
index af19f14fb..f88518210 100644
--- a/build/android/gyp/util/build_utils.py
+++ b/build/android/gyp/util/build_utils.py
@@ -20,6 +20,7 @@ import stat
import subprocess
import sys
import tempfile
+import textwrap
import time
import zipfile
@@ -37,24 +38,19 @@ DIR_SOURCE_ROOT = os.path.relpath(
JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
JAVAC_PATH = os.path.join(JAVA_HOME, 'bin', 'javac')
JAVAP_PATH = os.path.join(JAVA_HOME, 'bin', 'javap')
-
-try:
- string_types = basestring
-except NameError:
- string_types = (str, bytes)
-
-
-def JavaCmd(verify=True, xmx='1G'):
+KOTLIN_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'kotlinc', 'current')
+KOTLINC_PATH = os.path.join(KOTLIN_HOME, 'bin', 'kotlinc')
+# Please avoid using this. Our JAVA_HOME is using a newer and actively patched
+# JDK.
+JAVA_11_HOME_DEPRECATED = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk11',
+ 'current')
+
+def JavaCmd(xmx='1G'):
ret = [os.path.join(JAVA_HOME, 'bin', 'java')]
# Limit heap to avoid Java not GC'ing when it should, and causing
# bots to OOM when many java commands are runnig at the same time
# https://crbug.com/1098333
ret += ['-Xmx' + xmx]
-
- # Disable bytecode verification for local builds gives a ~2% speed-up.
- if not verify:
- ret += ['-noverify']
-
return ret
@@ -96,35 +92,6 @@ def FindInDirectory(directory, filename_filter='*'):
return files
-def ParseGnList(value):
- """Converts a "GN-list" command-line parameter into a list.
-
- Conversions handled:
- * None -> []
- * '' -> []
- * 'asdf' -> ['asdf']
- * '["a", "b"]' -> ['a', 'b']
- * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (flattened list)
-
- The common use for this behavior is in the Android build where things can
- take lists of @FileArg references that are expanded via ExpandFileArgs.
- """
- # Convert None to [].
- if not value:
- return []
- # Convert a list of GN lists to a flattened list.
- if isinstance(value, list):
- ret = []
- for arg in value:
- ret.extend(ParseGnList(arg))
- return ret
- # Convert normal GN list.
- if value.startswith('['):
- return gn_helpers.GNValueParser(value).ParseList()
- # Convert a single string value to a list.
- return [value]
-
-
def CheckOptions(options, parser, required=None):
if not required:
return
@@ -147,24 +114,7 @@ def WriteJson(obj, path, only_if_changed=False):
@contextlib.contextmanager
-def AtomicOutput(path, only_if_changed=True, mode='w+b'):
- """Helper to prevent half-written outputs.
-
- Args:
- path: Path to the final output file, which will be written atomically.
- only_if_changed: If True (the default), do not touch the filesystem
- if the content has not changed.
- mode: The mode to open the file in (str).
- Returns:
- A python context manager that yelds a NamedTemporaryFile instance
- that must be used by clients to write the data to. On exit, the
- manager will try to replace the final output file with the
- temporary one if necessary. The temporary file is always destroyed
- on exit.
- Example:
- with build_utils.AtomicOutput(output_path) as tmp_file:
- subprocess.check_call(['prog', '--output', tmp_file.name])
- """
+def _AtomicOutput(path, only_if_changed=True, mode='w+b'):
# Create in same directory to ensure same filesystem when moving.
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
@@ -196,9 +146,14 @@ class CalledProcessError(Exception):
def __str__(self):
# A user should be able to simply copy and paste the command that failed
- # into their shell.
+ # into their shell (unless it is more than 200 chars).
+ # User can set PRINT_FULL_COMMAND=1 to always print the full command.
+ print_full = os.environ.get('PRINT_FULL_COMMAND', '0') != '0'
+ full_cmd = shlex.join(self.args)
+ short_cmd = textwrap.shorten(full_cmd, width=200)
+ printed_cmd = full_cmd if print_full else short_cmd
copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
- ' '.join(map(pipes.quote, self.args)))
+ printed_cmd)
return 'Command failed: {}\n{}'.format(copyable_command, self.output)
@@ -281,29 +236,23 @@ def CheckOutput(args,
has_stderr = print_stderr and stderr
if has_stdout or has_stderr:
if has_stdout and has_stderr:
- stream_string = 'stdout and stderr'
+ stream_name = 'stdout and stderr'
elif has_stdout:
- stream_string = 'stdout'
+ stream_name = 'stdout'
else:
- stream_string = 'stderr'
+ stream_name = 'stderr'
if fail_on_output:
MSG = """
Command failed because it wrote to {}.
You can often set treat_warnings_as_errors=false to not treat output as \
-failure (useful when developing locally)."""
- raise CalledProcessError(cwd, args, MSG.format(stream_string))
-
- MSG = """
-The above {} output was from:
-{}
+failure (useful when developing locally).
"""
- if sys.version_info.major == 2:
- joined_args = ' '.join(args)
- else:
- joined_args = shlex.join(args)
+ raise CalledProcessError(cwd, args, MSG.format(stream_name))
- sys.stderr.write(MSG.format(stream_string, joined_args))
+ short_cmd = textwrap.shorten(shlex.join(args), width=200)
+ sys.stderr.write(
+ f'\nThe above {stream_name} output was from: {short_cmd}\n')
return stdout
@@ -380,181 +329,6 @@ def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
return extracted
-def HermeticDateTime(timestamp=None):
- """Returns a constant ZipInfo.date_time tuple.
-
- Args:
- timestamp: Unix timestamp to use for files in the archive.
-
- Returns:
- A ZipInfo.date_time tuple for Jan 1, 2001, or the given timestamp.
- """
- if not timestamp:
- return (2001, 1, 1, 0, 0, 0)
- utc_time = time.gmtime(timestamp)
- return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour,
- utc_time.tm_min, utc_time.tm_sec)
-
-
-def HermeticZipInfo(*args, **kwargs):
- """Creates a zipfile.ZipInfo with a constant timestamp and external_attr.
-
- If a date_time value is not provided in the positional or keyword arguments,
- the default value from HermeticDateTime is used.
-
- Args:
- See zipfile.ZipInfo.
-
- Returns:
- A zipfile.ZipInfo.
- """
- # The caller may have provided a date_time either as a positional parameter
- # (args[1]) or as a keyword parameter. Use the default hermetic date_time if
- # none was provided. Note that even if date_time is set, it can be None.
- date_time = kwargs.get('date_time')
- if len(args) >= 2:
- date_time = args[1]
- if not date_time:
- kwargs['date_time'] = HermeticDateTime()
- ret = zipfile.ZipInfo(*args, **kwargs)
- ret.external_attr = (0o644 << 16)
- return ret
-
-
-def AddToZipHermetic(zip_file,
- zip_path,
- src_path=None,
- data=None,
- compress=None,
- date_time=None):
- """Adds a file to the given ZipFile with a hard-coded modified time.
-
- Args:
- zip_file: ZipFile instance to add the file to.
- zip_path: Destination path within the zip file (or ZipInfo instance).
- src_path: Path of the source file. Mutually exclusive with |data|.
- data: File data as a string.
- compress: Whether to enable compression. Default is taken from ZipFile
- constructor.
- date_time: The last modification date and time for the archive member.
- """
- assert (src_path is None) != (data is None), (
- '|src_path| and |data| are mutually exclusive.')
- if isinstance(zip_path, zipfile.ZipInfo):
- zipinfo = zip_path
- zip_path = zipinfo.filename
- else:
- zipinfo = HermeticZipInfo(filename=zip_path, date_time=date_time)
-
- _CheckZipPath(zip_path)
-
- if src_path and os.path.islink(src_path):
- zipinfo.filename = zip_path
- zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink
- zip_file.writestr(zipinfo, os.readlink(src_path))
- return
-
- # zipfile.write() does
- # external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16
- # but we want to use _HERMETIC_FILE_ATTR, so manually set
- # the few attr bits we care about.
- if src_path:
- st = os.stat(src_path)
- for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
- if st.st_mode & mode:
- zipinfo.external_attr |= mode << 16
-
- if src_path:
- with open(src_path, 'rb') as f:
- data = f.read()
-
- # zipfile will deflate even when it makes the file bigger. To avoid
- # growing files, disable compression at an arbitrary cut off point.
- if len(data) < 16:
- compress = False
-
- # None converts to ZIP_STORED, when passed explicitly rather than the
- # default passed to the ZipFile constructor.
- compress_type = zip_file.compression
- if compress is not None:
- compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
- zip_file.writestr(zipinfo, data, compress_type)
-
-
-def DoZip(inputs,
- output,
- base_dir=None,
- compress_fn=None,
- zip_prefix_path=None,
- timestamp=None):
- """Creates a zip file from a list of files.
-
- Args:
- inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
- output: Path, fileobj, or ZipFile instance to add files to.
- base_dir: Prefix to strip from inputs.
- compress_fn: Applied to each input to determine whether or not to compress.
- By default, items will be |zipfile.ZIP_STORED|.
- zip_prefix_path: Path prepended to file path in zip file.
- timestamp: Unix timestamp to use for files in the archive.
- """
- if base_dir is None:
- base_dir = '.'
- input_tuples = []
- for tup in inputs:
- if isinstance(tup, string_types):
- tup = (os.path.relpath(tup, base_dir), tup)
- if tup[0].startswith('..'):
- raise Exception('Invalid zip_path: ' + tup[0])
- input_tuples.append(tup)
-
- # Sort by zip path to ensure stable zip ordering.
- input_tuples.sort(key=lambda tup: tup[0])
-
- out_zip = output
- if not isinstance(output, zipfile.ZipFile):
- out_zip = zipfile.ZipFile(output, 'w')
-
- date_time = HermeticDateTime(timestamp)
- try:
- for zip_path, fs_path in input_tuples:
- if zip_prefix_path:
- zip_path = os.path.join(zip_prefix_path, zip_path)
- compress = compress_fn(zip_path) if compress_fn else None
- AddToZipHermetic(out_zip,
- zip_path,
- src_path=fs_path,
- compress=compress,
- date_time=date_time)
- finally:
- if output is not out_zip:
- out_zip.close()
-
-
-def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
- """Creates a zip file from a directory."""
- inputs = []
- for root, _, files in os.walk(base_dir):
- for f in files:
- inputs.append(os.path.join(root, f))
-
- if isinstance(output, zipfile.ZipFile):
- DoZip(
- inputs,
- output,
- base_dir,
- compress_fn=compress_fn,
- zip_prefix_path=zip_prefix_path)
- else:
- with AtomicOutput(output) as f:
- DoZip(
- inputs,
- f,
- base_dir,
- compress_fn=compress_fn,
- zip_prefix_path=zip_prefix_path)
-
-
def MatchesGlob(path, filters):
"""Returns whether the given path matches any of the given glob patterns."""
return filters and any(fnmatch.fnmatch(path, f) for f in filters)
@@ -571,12 +345,14 @@ def MergeZips(output, input_zips, path_transform=None, compress=None):
compress: Overrides compression setting from origin zip entries.
"""
path_transform = path_transform or (lambda p: p)
- added_names = set()
out_zip = output
if not isinstance(output, zipfile.ZipFile):
out_zip = zipfile.ZipFile(output, 'w')
+ # Include paths in the existing zip here to avoid adding duplicate files.
+ added_names = set(out_zip.namelist())
+
try:
for in_file in input_zips:
with zipfile.ZipFile(in_file, 'r') as in_zip:
@@ -648,29 +424,6 @@ def InitLogging(enabling_env):
atexit.register(log_exit)
-def AddDepfileOption(parser):
- # TODO(agrieve): Get rid of this once we've moved to argparse.
- if hasattr(parser, 'add_option'):
- func = parser.add_option
- else:
- func = parser.add_argument
- func('--depfile',
- help='Path to depfile (refer to `gn help depfile`)')
-
-
-def WriteDepfile(depfile_path, first_gn_output, inputs=None):
- assert depfile_path != first_gn_output # http://crbug.com/646165
- assert not isinstance(inputs, string_types) # Easy mistake to make
- inputs = inputs or []
- MakeDirectory(os.path.dirname(depfile_path))
- # Ninja does not support multiple outputs in depfiles.
- with open(depfile_path, 'w') as depfile:
- depfile.write(first_gn_output.replace(' ', '\\ '))
- depfile.write(': \\\n ')
- depfile.write(' \\\n '.join(i.replace(' ', '\\ ') for i in inputs))
- depfile.write('\n')
-
-
def ExpandFileArgs(args):
"""Replaces file-arg placeholders in args.
@@ -715,7 +468,7 @@ def ExpandFileArgs(args):
raise Exception('Expected single item list but got %s' % expansion)
expansion = expansion[0]
- # This should match ParseGnList. The output is either a GN-formatted list
+ # This should match parse_gn_list. The output is either a GN-formatted list
# or a literal (with no quotes).
if isinstance(expansion, list):
new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) +
diff --git a/build/android/gyp/util/diff_utils.py b/build/android/gyp/util/diff_utils.py
index 0bc811c03..445bbe3d2 100644
--- a/build/android/gyp/util/diff_utils.py
+++ b/build/android/gyp/util/diff_utils.py
@@ -2,11 +2,12 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import difflib
import os
import sys
-import difflib
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
def _SkipOmitted(line):
@@ -96,7 +97,7 @@ def AddCommandLineFlags(parser):
def CheckExpectations(actual_data, options, custom_msg=''):
if options.actual_file:
- with build_utils.AtomicOutput(options.actual_file) as f:
+ with action_helpers.atomic_output(options.actual_file) as f:
f.write(actual_data.encode('utf8'))
if options.expected_file_base:
actual_data = _GenerateDiffWithOnlyAdditons(options.expected_file_base,
diff --git a/build/android/gyp/util/manifest_utils.py b/build/android/gyp/util/manifest_utils.py
index ddeab957b..3202058b6 100644
--- a/build/android/gyp/util/manifest_utils.py
+++ b/build/android/gyp/util/manifest_utils.py
@@ -10,9 +10,10 @@ import re
import shlex
import sys
import xml.dom.minidom as minidom
+from xml.etree import ElementTree
from util import build_utils
-from xml.etree import ElementTree
+import action_helpers # build_utils adds //build to sys.path.
ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
@@ -45,6 +46,14 @@ def _RegisterElementTreeNamespaces():
ElementTree.register_namespace('dist', DIST_NAMESPACE)
+def NamespacedGet(node, key):
+ return node.get('{%s}%s' % (ANDROID_NAMESPACE, key))
+
+
+def NamespacedSet(node, key, value):
+ node.set('{%s}%s' % (ANDROID_NAMESPACE, key), value)
+
+
def ParseManifest(path):
"""Parses an AndroidManifest.xml using ElementTree.
@@ -73,7 +82,7 @@ def ParseManifest(path):
def SaveManifest(doc, path):
- with build_utils.AtomicOutput(path) as f:
+ with action_helpers.atomic_output(path) as f:
f.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8'))
@@ -81,62 +90,27 @@ def GetPackage(manifest_node):
return manifest_node.get('package')
-def AssertUsesSdk(manifest_node,
- min_sdk_version=None,
- target_sdk_version=None,
- max_sdk_version=None,
- fail_if_not_exist=False):
- """Asserts values of attributes of <uses-sdk> element.
-
- Unless |fail_if_not_exist| is true, will only assert if both the passed value
- is not None and the value of attribute exist. If |fail_if_not_exist| is true
- will fail if passed value is not None but attribute does not exist.
- """
- uses_sdk_node = _FindUsesSdkNode(manifest_node)
+def SetUsesSdk(manifest_node,
+ target_sdk_version,
+ min_sdk_version,
+ max_sdk_version=None):
+ uses_sdk_node = manifest_node.find('./uses-sdk')
if uses_sdk_node is None:
- return
- for prefix, sdk_version in (('min', min_sdk_version), ('target',
- target_sdk_version),
- ('max', max_sdk_version)):
- value = uses_sdk_node.get('{%s}%sSdkVersion' % (ANDROID_NAMESPACE, prefix))
- if fail_if_not_exist and not value and sdk_version:
- assert False, (
- '%sSdkVersion in Android manifest does not exist but we expect %s' %
- (prefix, sdk_version))
- if not value or not sdk_version:
- continue
- assert value == sdk_version, (
- '%sSdkVersion in Android manifest is %s but we expect %s' %
- (prefix, value, sdk_version))
+ uses_sdk_node = ElementTree.SubElement(manifest_node, 'uses-sdk')
+ NamespacedSet(uses_sdk_node, 'targetSdkVersion', target_sdk_version)
+ NamespacedSet(uses_sdk_node, 'minSdkVersion', min_sdk_version)
+ if max_sdk_version:
+ NamespacedSet(uses_sdk_node, 'maxSdkVersion', max_sdk_version)
def SetTargetApiIfUnset(manifest_node, target_sdk_version):
- uses_sdk_node = _FindUsesSdkNode(manifest_node)
+ uses_sdk_node = manifest_node.find('./uses-sdk')
if uses_sdk_node is None:
- # Right now it seems like only some random test-only manifests don't have
- # any uses-sdk. If we start seeing some libraries which need their target
- # api to be set, but don't have a uses-sdk node, we may have to insert the
- # node here.
- return
- target_sdk_attribute_name = '{%s}targetSdkVersion' % ANDROID_NAMESPACE
- curr_target_sdk_version = uses_sdk_node.get(target_sdk_attribute_name)
+ uses_sdk_node = ElementTree.SubElement(manifest_node, 'uses-sdk')
+ curr_target_sdk_version = NamespacedGet(uses_sdk_node, 'targetSdkVersion')
if curr_target_sdk_version is None:
- uses_sdk_node.set(target_sdk_attribute_name, target_sdk_version)
-
-
-def AssertPackage(manifest_node, package):
- """Asserts that manifest package has desired value.
-
- Will only assert if both |package| is not None and the package is set in the
- manifest.
- """
- package_value = GetPackage(manifest_node)
- if package_value is None or package is None or (
- package_value == 'no.manifest.configured'):
- return
- assert package_value == package, (
- 'Package in Android manifest is %s but we expect %s' % (package_value,
- package))
+ NamespacedSet(uses_sdk_node, 'targetSdkVersion', target_sdk_version)
+ return curr_target_sdk_version is None
def _SortAndStripElementTree(root):
@@ -195,10 +169,6 @@ def _SplitElement(line):
return start_tag, [restore_quotes(x) for x in attrs], end_tag
-def _FindUsesSdkNode(manifest_node):
- return manifest_node.find('./uses-sdk')
-
-
def _CreateNodeHash(lines):
"""Computes a hash (md5) for the first XML node found in |lines|.
@@ -271,7 +241,8 @@ def _AddDiffTags(lines):
assert not hash_stack, 'hash_stack was not empty:\n' + '\n'.join(hash_stack)
-def NormalizeManifest(manifest_contents):
+def NormalizeManifest(manifest_contents, version_code_offset,
+ library_version_offset):
_RegisterElementTreeNamespaces()
# This also strips comments and sorts node attributes alphabetically.
root = ElementTree.fromstring(manifest_contents)
@@ -286,14 +257,24 @@ def NormalizeManifest(manifest_contents):
if debuggable_name in app_node.attrib:
del app_node.attrib[debuggable_name]
+ version_code = NamespacedGet(root, 'versionCode')
+ if version_code and version_code_offset:
+ version_code = int(version_code) - int(version_code_offset)
+ NamespacedSet(root, 'versionCode', f'OFFSET={version_code}')
+ version_name = NamespacedGet(root, 'versionName')
+ if version_name:
+ version_name = re.sub(r'\d+', '#', version_name)
+ NamespacedSet(root, 'versionName', version_name)
+
# Trichrome's static library version number is updated daily. To avoid
# frequent manifest check failures, we remove the exact version number
# during normalization.
for node in app_node:
- if (node.tag in ['uses-static-library', 'static-library']
- and '{%s}version' % ANDROID_NAMESPACE in node.keys()
- and '{%s}name' % ANDROID_NAMESPACE in node.keys()):
- node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER')
+ if node.tag in ['uses-static-library', 'static-library']:
+ version = NamespacedGet(node, 'version')
+ if version and library_version_offset:
+ version = int(version) - int(library_version_offset)
+ NamespacedSet(node, 'version', f'OFFSET={version}')
# We also remove the exact package name (except the one at the root level)
# to avoid noise during manifest comparison.
diff --git a/build/android/gyp/util/manifest_utils_test.py b/build/android/gyp/util/manifest_utils_test.py
index 8d8a3d6d6..165df4c55 100755
--- a/build/android/gyp/util/manifest_utils_test.py
+++ b/build/android/gyp/util/manifest_utils_test.py
@@ -14,6 +14,8 @@ from util import manifest_utils
_TEST_MANIFEST = """\
<?xml version="1.0" ?>
<manifest package="test.pkg"
+ android:versionCode="1234"
+ android:versionName="1.2.33.4"
tools:ignore="MissingVersion"
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
@@ -52,6 +54,8 @@ _TEST_MANIFEST_NORMALIZED = """\
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="test.pkg"
+ android:versionCode="OFFSET=4"
+ android:versionName="#.#.#.#"
tools:ignore="MissingVersion">
<uses-feature android:name="android.hardware.vr.headtracking" \
android:required="false" android:version="1"/>
@@ -106,19 +110,19 @@ class ManifestUtilsTest(unittest.TestCase):
def testNormalizeManifest_golden(self):
test_manifest, expected = _CreateTestData()
- actual = manifest_utils.NormalizeManifest(test_manifest)
+ actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None)
self.assertMultiLineEqual(expected, actual)
def testNormalizeManifest_nameUsedForActivity(self):
test_manifest, expected = _CreateTestData(extra_activity_attr='a="b"')
- actual = manifest_utils.NormalizeManifest(test_manifest)
+ actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None)
# Checks that the DIFF-ANCHOR does not change with the added attribute.
self.assertMultiLineEqual(expected, actual)
def testNormalizeManifest_nameNotUsedForIntentFilter(self):
test_manifest, expected = _CreateTestData(
extra_intent_filter_elem='<a/>', intent_filter_diff_anchor='5f5c8a70')
- actual = manifest_utils.NormalizeManifest(test_manifest)
+ actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None)
# Checks that the DIFF-ANCHOR does change with the added element despite
# having a nested element with an android:name set.
self.assertMultiLineEqual(expected, actual)
diff --git a/build/android/gyp/util/md5_check.py b/build/android/gyp/util/md5_check.py
index 8a59ba5e5..269ae2840 100644
--- a/build/android/gyp/util/md5_check.py
+++ b/build/android/gyp/util/md5_check.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import difflib
import hashlib
@@ -13,8 +12,7 @@ import sys
import zipfile
from util import build_utils
-
-sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build'))
+import action_helpers # build_utils adds //build to sys.path.
import print_python_deps
# When set and a difference is detected, a diff of what changed is printed.
@@ -67,7 +65,7 @@ def CallAndWriteDepfileIfStale(on_stale_md5,
# on bots that build with & without patch, and the patch changes the depfile
# location.
if hasattr(options, 'depfile') and options.depfile:
- build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps)
+ action_helpers.write_depfile(options.depfile, output_paths[0], depfile_deps)
def CallAndRecordIfStale(function,
diff --git a/build/android/gyp/util/resources_parser.py b/build/android/gyp/util/resources_parser.py
index 3ce2f2c54..525e13624 100644
--- a/build/android/gyp/util/resources_parser.py
+++ b/build/android/gyp/util/resources_parser.py
@@ -9,6 +9,7 @@ from xml.etree import ElementTree
from util import build_utils
from util import resource_utils
+import action_helpers # build_utils adds //build to sys.path.
_TextSymbolEntry = collections.namedtuple(
'RTextEntry', ('java_type', 'resource_type', 'name', 'value'))
@@ -141,7 +142,7 @@ class RTxtGenerator:
def WriteRTxtFile(self, rtxt_path):
resources = self._CollectResourcesListFromDirectories()
- with build_utils.AtomicOutput(rtxt_path, mode='w') as f:
+ with action_helpers.atomic_output(rtxt_path, mode='w') as f:
for resource in resources:
line = '{0.java_type} {0.resource_type} {0.name} {0.value}\n'.format(
resource)
diff --git a/build/android/gyp/util/zipalign.py b/build/android/gyp/util/zipalign.py
deleted file mode 100644
index ac7835561..000000000
--- a/build/android/gyp/util/zipalign.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2019 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from util import build_utils
-
-_FIXED_ZIP_HEADER_LEN = 30
-
-
-def _SetAlignment(zip_obj, zip_info, alignment):
- """Sets a ZipInfo's extra field such that the file will be aligned.
-
- Args:
- zip_obj: The ZipFile object that is being written.
- zip_info: The ZipInfo object about to be written.
- alignment: The amount of alignment (e.g. 4, or 4*1024).
- """
- cur_offset = zip_obj.fp.tell()
- header_size = _FIXED_ZIP_HEADER_LEN + len(zip_info.filename)
- padding_needed = (alignment - (
- (cur_offset + header_size) % alignment)) % alignment
-
-
- # Python writes |extra| to both the local file header and the central
- # directory's file header. Android's zipalign tool writes only to the
- # local file header, so there is more overhead in using python to align.
- zip_info.extra = b'\0' * padding_needed
-
-
-def AddToZipHermetic(zip_file,
- zip_path,
- src_path=None,
- data=None,
- compress=None,
- alignment=None):
- """Same as build_utils.AddToZipHermetic(), but with alignment.
-
- Args:
- alignment: If set, align the data of the entry to this many bytes.
- """
- zipinfo = build_utils.HermeticZipInfo(filename=zip_path)
- if alignment:
- _SetAlignment(zip_file, zipinfo, alignment)
- build_utils.AddToZipHermetic(
- zip_file, zipinfo, src_path=src_path, data=data, compress=compress)
diff --git a/build/android/gyp/validate_inputs.py b/build/android/gyp/validate_inputs.py
new file mode 100755
index 000000000..e6435d600
--- /dev/null
+++ b/build/android/gyp/validate_inputs.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Ensures inputs exist and writes a stamp file."""
+
+import argparse
+import pathlib
+import sys
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--stamp', help='Path to touch on success.')
+ parser.add_argument('inputs', nargs='+', help='Files to check.')
+
+ args = parser.parse_args()
+
+ for path in args.inputs:
+ path_obj = pathlib.Path(path)
+ if not path_obj.is_file():
+ if not path_obj.exists():
+ sys.stderr.write(f'File not found: {path}\n')
+ else:
+ sys.stderr.write(f'Not a file: {path}\n')
+ sys.exit(1)
+
+ if args.stamp:
+ pathlib.Path(args.stamp).touch()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/android/gyp/validate_static_library_dex_references.py b/build/android/gyp/validate_static_library_dex_references.py
index 20b8c6dab..419776e16 100755
--- a/build/android/gyp/validate_static_library_dex_references.py
+++ b/build/android/gyp/validate_static_library_dex_references.py
@@ -12,6 +12,7 @@ import zipfile
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
from pylib.dex import dex_parser
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
_FLAGS_PATH = (
'//chrome/android/java/static_library_dex_reference_workarounds.flags')
@@ -49,8 +50,7 @@ def _DexFilesFromPath(path):
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = argparse.ArgumentParser()
- parser.add_argument(
- '--depfile', required=True, help='Path to output depfile.')
+ action_helpers.add_depfile_arg(parser)
parser.add_argument(
'--stamp', required=True, help='Path to file to touch upon success.')
parser.add_argument(
@@ -86,7 +86,7 @@ def main(args):
input_paths = [args.static_library_dex] + args.static_library_dependent_dexes
build_utils.Touch(args.stamp)
- build_utils.WriteDepfile(args.depfile, args.stamp, inputs=input_paths)
+ action_helpers.write_depfile(args.depfile, args.stamp, inputs=input_paths)
if __name__ == '__main__':
diff --git a/build/android/gyp/validate_static_library_dex_references.pydeps b/build/android/gyp/validate_static_library_dex_references.pydeps
index e57172dbd..7fd91c201 100644
--- a/build/android/gyp/validate_static_library_dex_references.pydeps
+++ b/build/android/gyp/validate_static_library_dex_references.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/validate_static_library_dex_references.pydeps build/android/gyp/validate_static_library_dex_references.py
+../../action_helpers.py
../../gn_helpers.py
../pylib/__init__.py
../pylib/dex/__init__.py
diff --git a/build/android/gyp/write_build_config.py b/build/android/gyp/write_build_config.py
index 8c64fd75a..e64c62233 100755
--- a/build/android/gyp/write_build_config.py
+++ b/build/android/gyp/write_build_config.py
@@ -114,10 +114,10 @@ Path to an AndroidManifest.xml file related to the current target.
Only seen for the [`android_app_bundle`](#target_android_app_bundle) type.
Path to the base module for the bundle.
-* `deps_info['is_base_module']`:
+* `deps_info['module_name']`:
Only seen for the
[`android_app_bundle_module`](#target_android_app_bundle_module)
-type. Whether or not this module is the base module for some bundle.
+type. The name of the feature module.
* `deps_info['dependency_zips']`:
List of `deps_info['resources_zip']` entries for all `android_resources`
@@ -264,17 +264,18 @@ True to indicate that this target corresponds to a prebuilt `.jar` file.
In this case, `deps_info['unprocessed_jar_path']` will point to the source
`.jar` file. Otherwise, it will be point to a build-generated file.
-* `deps_info['java_sources_file']`:
-Path to a single `.sources` file listing all the Java sources that were used
-to generate the library (simple text format, one `.jar` path per line).
+* `deps_info['target_sources_file']`:
+Path to a single `.sources` file listing all the Java and Kotlin sources that
+were used to generate the library (simple text format, one `.jar` path per
+line).
* `deps_info['lint_android_manifest']`:
Path to an AndroidManifest.xml file to use for this lint target.
-* `deps_info['lint_java_sources']`:
-The list of all `deps_info['java_sources_file']` entries for all library
+* `deps_info['lint_sources']`:
+The list of all `deps_info['target_sources_file']` entries for all library
dependencies that are chromium code. Note: this is a list of files, where each
-file contains a list of Java source files. This is used for lint.
+file contains a list of Java and Kotlin source files. This is used for lint.
* `deps_info['lint_aars']`:
List of all aars from transitive java dependencies. This allows lint to collect
@@ -329,6 +330,16 @@ The classpath used when running a Java or Android binary. Essentially the
collection of all `deps_info['device_jar_path']` entries for the target and all
its dependencies.
+* `deps_info['all_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all libraries
+that comprise this APK. Valid only for debug builds.
+
+* `deps_info['preferred_dep']`:
+Whether the target should be the preferred dep. This is usually the case when we
+have a java_group that depends on either the public or internal dep accordingly,
+and it is better to depend on the group rather than the underlying dep. Another
+case is for android_library_factory targets, the factory target should be
+preferred instead of the actual implementation.
## <a name="target_robolectric_binary">Target type `robolectric_binary`</a>:
@@ -367,11 +378,7 @@ that will be merged into the final `.jar` file for distribution.
* `deps_info['final_dex']['path']`:
Path to the final classes.dex file (or classes.zip in case of multi-dex)
-for this APK.
-
-* `deps_info['final_dex']['all_dex_files']`:
-The list of paths to all `deps_info['dex_path']` entries for all libraries
-that comprise this APK. Valid only for debug builds.
+for this APK - only used for proguarded builds.
* `native['libraries']`
List of native libraries for the primary ABI to be embedded in this APK.
@@ -430,10 +437,11 @@ into the final APK as-is.
NOTE: This has nothing to do with *Android* resources.
-* `jni['all_source']`
-The list of all `deps_info['java_sources_file']` entries for all library
+* `deps_info['jni_all_source']`
+The list of all `deps_info['target_sources_file']` entries for all library
dependencies for this APK. Note: this is a list of files, where each file
-contains a list of Java source files. This is used for JNI registration.
+contains a list of Java and Kotlin source files. This is used for JNI
+registration.
* `deps_info['proguard_all_configs']`:
The collection of all 'deps_info['proguard_configs']` values from this target
@@ -558,6 +566,7 @@ import xml.dom.minidom
from util import build_utils
from util import resource_utils
+import action_helpers # build_utils adds //build to sys.path.
# Types that should never be used as a dependency of another build config.
@@ -773,10 +782,9 @@ def _MergeAssets(all_assets):
locale_paks.add(dest)
def create_list(asset_map):
- ret = ['%s:%s' % (src, dest) for dest, src in asset_map.items()]
# Sort to ensure deterministic ordering.
- ret.sort()
- return ret
+ items = sorted(asset_map.items())
+ return [f'{src}:{dest}' for dest, src in items]
return create_list(compressed), create_list(uncompressed), locale_paks
@@ -929,6 +937,86 @@ def _CompareClasspathPriority(dep):
return 1 if dep.get('low_classpath_priority') else 0
+def _DedupFeatureModuleSharedCode(uses_split_arg, modules,
+ field_names_to_dedup):
+ child_to_ancestors = collections.defaultdict(list)
+ if uses_split_arg:
+ for split_pair in uses_split_arg:
+ child, parent = split_pair.split(':')
+ assert child in modules
+ assert parent in modules
+ child_to_ancestors[child] = [parent]
+
+ # Create a full list of ancestors for each module.
+ for name in modules:
+ if name == 'base':
+ continue
+ curr_name = name
+ while curr_name in child_to_ancestors:
+ parent = child_to_ancestors[curr_name][0]
+ if parent not in child_to_ancestors[name]:
+ child_to_ancestors[name].append(parent)
+ curr_name = parent
+
+ if curr_name != 'base':
+ child_to_ancestors[name].append('base')
+
+ # Strip out duplicates from ancestors.
+ for name, module in modules.items():
+ if name == 'base':
+ continue
+ # Make sure we get all ancestors, not just direct parent.
+ for ancestor in child_to_ancestors[name]:
+ for f in field_names_to_dedup:
+ if f in module:
+ RemoveObjDups(module, modules[ancestor], f)
+
+ # Strip out duplicates from siblings/cousins.
+ for f in field_names_to_dedup:
+ _PromoteToCommonAncestor(modules, child_to_ancestors, f)
+
+
+def _PromoteToCommonAncestor(modules, child_to_ancestors, field_name):
+ module_to_fields_set = {}
+ for module_name, module in modules.items():
+ if field_name in module:
+ module_to_fields_set[module_name] = set(module[field_name])
+
+ seen = set()
+ dupes = set()
+ for fields in module_to_fields_set.values():
+ new_dupes = seen & fields
+ if new_dupes:
+ dupes |= new_dupes
+ seen |= fields
+
+ for d in dupes:
+ owning_modules = []
+ for module_name, fields in module_to_fields_set.items():
+ if d in fields:
+ owning_modules.append(module_name)
+ assert len(owning_modules) >= 2
+ # Rely on the fact that ancestors are inserted from closest to
+ # farthest, where "base" should always be the last element.
+ # Arbitrarily using the first owning module - any would work.
+ for ancestor in child_to_ancestors[owning_modules[0]]:
+ ancestor_is_shared_with_all = True
+ for o in owning_modules[1:]:
+ if ancestor not in child_to_ancestors[o]:
+ ancestor_is_shared_with_all = False
+ break
+ if ancestor_is_shared_with_all:
+ common_ancestor = ancestor
+ break
+ for o in owning_modules:
+ module_to_fields_set[o].remove(d)
+ module_to_fields_set[common_ancestor].add(d)
+
+ for module_name, module in modules.items():
+ if field_name in module:
+ module[field_name] = sorted(list(module_to_fields_set[module_name]))
+
+
def _CopyBuildConfigsForDebugging(debug_dir):
shutil.rmtree(debug_dir, ignore_errors=True)
os.makedirs(debug_dir)
@@ -942,7 +1030,7 @@ def _CopyBuildConfigsForDebugging(debug_dir):
def main(argv):
parser = optparse.OptionParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_option('--build-config', help='Path to build_config output.')
parser.add_option('--store-deps-for-debugging-to',
help='Path to copy all transitive build config files to.')
@@ -992,8 +1080,12 @@ def main(argv):
parser.add_option('--treat-as-locale-paks', action='store_true',
help='Consider the assets as locale paks in BuildConfig.java')
- # java library options
+ # java library and group options
+ parser.add_option('--preferred-dep',
+ action='store_true',
+ help='Whether the target should be preferred as a dep.')
+ # java library options
parser.add_option('--public-deps-configs',
help='GN list of config files of deps which are exposed as '
'part of the target\'s public API.')
@@ -1007,7 +1099,7 @@ def main(argv):
help='Path to the interface .jar to use for javac classpath purposes.')
parser.add_option('--is-prebuilt', action='store_true',
help='Whether the jar was compiled or pre-compiled.')
- parser.add_option('--java-sources-file', help='Path to .sources file')
+ parser.add_option('--target-sources-file', help='Path to .sources file')
parser.add_option('--bundled-srcjars',
help='GYP-list of .srcjars that have been included in this java_library.')
parser.add_option('--supports-android', action='store_true',
@@ -1132,24 +1224,29 @@ def main(argv):
parser.add_option(
'--base-allowlist-rtxt-path',
help='Path to R.txt file for the base resources allowlist.')
- parser.add_option(
- '--is-base-module',
- action='store_true',
- help='Specifies that this module is a base module for some app bundle.')
parser.add_option('--generate-markdown-format-doc', action='store_true',
help='Dump the Markdown .build_config format documentation '
'then exit immediately.')
+ parser.add_option('--module-name', help='The name of this feature module.')
parser.add_option(
'--base-module-build-config',
help='Path to the base module\'s build config '
'if this is a feature module.')
+ parser.add_option('--parent-module-build-config',
+ help='Path to the parent module\'s build config '
+ 'when not using base module as parent.')
parser.add_option(
'--module-build-configs',
help='For bundles, the paths of all non-async module .build_configs '
'for modules that are part of the bundle.')
+ parser.add_option(
+ '--uses-split',
+ action='append',
+ help='List of name pairs separated by : mapping a feature module to a '
+ 'dependent feature module.')
parser.add_option(
'--trace-events-jar-dir',
@@ -1170,15 +1267,14 @@ def main(argv):
return 0
if options.fail:
- parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
+ parser.error('\n'.join(action_helpers.parse_gn_list(options.fail)))
lib_options = ['unprocessed_jar_path', 'interface_jar_path']
device_lib_options = ['device_jar_path', 'dex_path']
required_options_map = {
'android_apk': ['build_config'] + lib_options + device_lib_options,
'android_app_bundle_module':
- ['build_config', 'final_dex_path', 'res_size_info'] + lib_options +
- device_lib_options,
+ ['build_config', 'res_size_info'] + lib_options + device_lib_options,
'android_assets': ['build_config'],
'android_resources': ['build_config', 'resources_zip'],
'dist_aar': ['build_config'],
@@ -1207,8 +1303,8 @@ def main(argv):
if options.base_allowlist_rtxt_path:
raise Exception('--base-allowlist-rtxt-path can only be used with '
'--type=android_app_bundle_module')
- if options.is_base_module:
- raise Exception('--is-base-module can only be used with '
+ if options.module_name:
+ raise Exception('--module-name can only be used with '
'--type=android_app_bundle_module')
is_apk_or_module_target = options.type in ('android_apk',
@@ -1237,17 +1333,18 @@ def main(argv):
'system_java_library',
'android_app_bundle_module')
- deps_configs_paths = build_utils.ParseGnList(options.deps_configs)
- public_deps_configs_paths = build_utils.ParseGnList(
+ deps_configs_paths = action_helpers.parse_gn_list(options.deps_configs)
+ public_deps_configs_paths = action_helpers.parse_gn_list(
options.public_deps_configs)
deps_configs_paths += public_deps_configs_paths
deps = _DepsFromPaths(deps_configs_paths,
options.type,
recursive_resource_deps=options.recursive_resource_deps)
public_deps = _DepsFromPaths(public_deps_configs_paths, options.type)
- processor_deps = _DepsFromPaths(
- build_utils.ParseGnList(options.annotation_processor_configs or ''),
- options.type, filter_root_targets=False)
+ processor_deps = _DepsFromPaths(action_helpers.parse_gn_list(
+ options.annotation_processor_configs or ''),
+ options.type,
+ filter_root_targets=False)
all_inputs = (deps.AllConfigPaths() + processor_deps.AllConfigPaths())
@@ -1285,6 +1382,10 @@ def main(argv):
if options.base_module_build_config:
base_module_build_config = GetDepConfigRoot(
options.base_module_build_config)
+ parent_module_build_config = base_module_build_config
+ if options.parent_module_build_config:
+ parent_module_build_config = GetDepConfigRoot(
+ options.parent_module_build_config)
# Initialize some common config.
# Any value that needs to be queryable by dependents must go within deps_info.
@@ -1326,13 +1427,16 @@ def main(argv):
gradle['apk_under_test'] = tested_apk_config['name']
if options.type == 'android_app_bundle_module':
- deps_info['is_base_module'] = bool(options.is_base_module)
+ deps_info['module_name'] = options.module_name
# Required for generating gradle files.
if options.type == 'java_library':
deps_info['is_prebuilt'] = bool(options.is_prebuilt)
deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt
+ if options.preferred_dep:
+ deps_info['preferred_dep'] = bool(options.preferred_dep)
+
if options.android_manifest:
deps_info['android_manifest'] = options.android_manifest
@@ -1340,11 +1444,11 @@ def main(argv):
deps_info['merged_android_manifest'] = options.merged_android_manifest
if options.bundled_srcjars:
- deps_info['bundled_srcjars'] = build_utils.ParseGnList(
+ deps_info['bundled_srcjars'] = action_helpers.parse_gn_list(
options.bundled_srcjars)
- if options.java_sources_file:
- deps_info['java_sources_file'] = options.java_sources_file
+ if options.target_sources_file:
+ deps_info['target_sources_file'] = options.target_sources_file
if is_java_target:
if options.main_class:
@@ -1375,11 +1479,12 @@ def main(argv):
if is_apk_or_module_target or options.type in ('group', 'java_library',
'robolectric_binary',
'dist_aar'):
- deps_info['jni'] = {}
- all_java_sources = [c['java_sources_file'] for c in all_library_deps
- if 'java_sources_file' in c]
- if options.java_sources_file:
- all_java_sources.append(options.java_sources_file)
+ all_target_sources = [
+ c['target_sources_file'] for c in all_library_deps
+ if 'target_sources_file' in c
+ ]
+ if options.target_sources_file:
+ all_target_sources.append(options.target_sources_file)
if is_apk_or_module_target or options.type in ('group', 'java_library',
'robolectric_binary'):
@@ -1455,16 +1560,17 @@ def main(argv):
all_asset_sources = []
if options.asset_renaming_sources:
all_asset_sources.extend(
- build_utils.ParseGnList(options.asset_renaming_sources))
+ action_helpers.parse_gn_list(options.asset_renaming_sources))
if options.asset_sources:
- all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources))
+ all_asset_sources.extend(
+ action_helpers.parse_gn_list(options.asset_sources))
deps_info['assets'] = {
'sources': all_asset_sources
}
if options.asset_renaming_destinations:
- deps_info['assets']['outputs'] = (
- build_utils.ParseGnList(options.asset_renaming_destinations))
+ deps_info['assets']['outputs'] = (action_helpers.parse_gn_list(
+ options.asset_renaming_destinations))
if options.disable_asset_compression:
deps_info['assets']['disable_compression'] = True
if options.treat_as_locale_paks:
@@ -1559,19 +1665,20 @@ def main(argv):
config['deps_info']['extra_package_names'] = extra_package_names
# These are .jars to add to javac classpath but not to runtime classpath.
- extra_classpath_jars = build_utils.ParseGnList(options.extra_classpath_jars)
+ extra_classpath_jars = action_helpers.parse_gn_list(
+ options.extra_classpath_jars)
if extra_classpath_jars:
extra_classpath_jars.sort()
deps_info['extra_classpath_jars'] = extra_classpath_jars
- mergeable_android_manifests = build_utils.ParseGnList(
+ mergeable_android_manifests = action_helpers.parse_gn_list(
options.mergeable_android_manifests)
mergeable_android_manifests.sort()
if mergeable_android_manifests:
deps_info['mergeable_android_manifests'] = mergeable_android_manifests
extra_proguard_classpath_jars = []
- proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+ proguard_configs = action_helpers.parse_gn_list(options.proguard_configs)
if proguard_configs:
# Make a copy of |proguard_configs| since it's mutated below.
deps_info['proguard_configs'] = list(proguard_configs)
@@ -1666,18 +1773,18 @@ def main(argv):
# Collect all sources and resources at the apk/bundle_module level.
lint_aars = set()
lint_srcjars = set()
- lint_java_sources = set()
+ lint_sources = set()
lint_resource_sources = set()
lint_resource_zips = set()
- if options.java_sources_file:
- lint_java_sources.add(options.java_sources_file)
+ if options.target_sources_file:
+ lint_sources.add(options.target_sources_file)
if options.bundled_srcjars:
lint_srcjars.update(deps_info['bundled_srcjars'])
for c in all_library_deps:
if c['chromium_code'] and c['requires_android']:
- if 'java_sources_file' in c:
- lint_java_sources.add(c['java_sources_file'])
+ if 'target_sources_file' in c:
+ lint_sources.add(c['target_sources_file'])
lint_srcjars.update(c['bundled_srcjars'])
if 'aar_path' in c:
lint_aars.add(c['aar_path'])
@@ -1697,7 +1804,7 @@ def main(argv):
deps_info['lint_aars'] = sorted(lint_aars)
deps_info['lint_srcjars'] = sorted(lint_srcjars)
- deps_info['lint_java_sources'] = sorted(lint_java_sources)
+ deps_info['lint_sources'] = sorted(lint_sources)
deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
deps_info['lint_extra_android_manifests'] = []
@@ -1707,19 +1814,25 @@ def main(argv):
deps_info['lint_android_manifest'] = options.android_manifest
if options.type == 'android_app_bundle':
- module_configs = [
- GetDepConfig(c)
- for c in build_utils.ParseGnList(options.module_build_configs)
+ module_config_paths = action_helpers.parse_gn_list(
+ options.module_build_configs)
+ module_configs = [GetDepConfig(c) for c in module_config_paths]
+ module_configs_by_name = {d['module_name']: d for d in module_configs}
+ per_module_fields = [
+ 'device_classpath', 'trace_event_rewritten_device_classpath',
+ 'all_dex_files'
]
jni_all_source = set()
lint_aars = set()
lint_srcjars = set()
- lint_java_sources = set()
+ lint_sources = set()
lint_resource_sources = set()
lint_resource_zips = set()
lint_extra_android_manifests = set()
- for c in module_configs:
- if c['is_base_module']:
+ config['modules'] = {}
+ modules = config['modules']
+ for n, c in module_configs_by_name.items():
+ if n == 'base':
assert 'base_module_config' not in deps_info, (
'Must have exactly 1 base module!')
deps_info['package_name'] = c['package_name']
@@ -1730,25 +1843,32 @@ def main(argv):
deps_info['lint_android_manifest'] = c['android_manifest']
else:
lint_extra_android_manifests.add(c['android_manifest'])
- jni_all_source.update(c['jni']['all_source'])
+ jni_all_source.update(c['jni_all_source'])
lint_aars.update(c['lint_aars'])
lint_srcjars.update(c['lint_srcjars'])
- lint_java_sources.update(c['lint_java_sources'])
+ lint_sources.update(c['lint_sources'])
lint_resource_sources.update(c['lint_resource_sources'])
lint_resource_zips.update(c['lint_resource_zips'])
- deps_info['jni'] = {'all_source': sorted(jni_all_source)}
+ module = modules[n] = {}
+ for f in per_module_fields:
+ if f in c:
+ module[f] = c[f]
+ deps_info['jni_all_source'] = sorted(jni_all_source)
deps_info['lint_aars'] = sorted(lint_aars)
deps_info['lint_srcjars'] = sorted(lint_srcjars)
- deps_info['lint_java_sources'] = sorted(lint_java_sources)
+ deps_info['lint_sources'] = sorted(lint_sources)
deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
deps_info['lint_extra_android_manifests'] = sorted(
lint_extra_android_manifests)
+ _DedupFeatureModuleSharedCode(options.uses_split, modules,
+ per_module_fields)
+
if is_apk_or_module_target or options.type in ('group', 'java_library',
'robolectric_binary',
'dist_aar'):
- deps_info['jni']['all_source'] = sorted(set(all_java_sources))
+ deps_info['jni_all_source'] = sorted(set(all_target_sources))
system_jars = [c['unprocessed_jar_path'] for c in system_library_deps]
system_interface_jars = [c['interface_jar_path'] for c in system_library_deps]
@@ -1854,14 +1974,11 @@ def main(argv):
deps_info['proguard_classpath_jars'] = sorted(
set(extra_proguard_classpath_jars))
- # Dependencies for the final dex file of an apk.
- if (is_apk_or_module_target or options.final_dex_path
- or options.type == 'dist_jar'):
- config['final_dex'] = {}
- dex_config = config['final_dex']
- dex_config['path'] = options.final_dex_path
+ if options.final_dex_path:
+ config['final_dex'] = {'path': options.final_dex_path}
if is_apk_or_module_target or options.type == 'dist_jar':
- dex_config['all_dex_files'] = all_dex_files
+ # Dependencies for the final dex file of an apk.
+ deps_info['all_dex_files'] = all_dex_files
if is_java_target:
config['javac']['classpath'] = sorted(javac_classpath)
@@ -1959,17 +2076,17 @@ def main(argv):
all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps)
- native_library_placeholder_paths = build_utils.ParseGnList(
+ native_library_placeholder_paths = action_helpers.parse_gn_list(
options.native_lib_placeholders)
native_library_placeholder_paths.sort()
- secondary_native_library_placeholder_paths = build_utils.ParseGnList(
+ secondary_native_library_placeholder_paths = action_helpers.parse_gn_list(
options.secondary_native_lib_placeholders)
secondary_native_library_placeholder_paths.sort()
- loadable_modules = build_utils.ParseGnList(options.loadable_modules)
+ loadable_modules = action_helpers.parse_gn_list(options.loadable_modules)
loadable_modules.sort()
- secondary_abi_loadable_modules = build_utils.ParseGnList(
+ secondary_abi_loadable_modules = action_helpers.parse_gn_list(
options.secondary_abi_loadable_modules)
secondary_abi_loadable_modules.sort()
@@ -2016,7 +2133,7 @@ def main(argv):
GetDepConfig(p) for p in GetAllDepsConfigsInOrder(
deps_configs_paths, filter_func=ExcludeRecursiveResourcesDeps)
]
- config['extra_android_manifests'] = []
+ config['extra_android_manifests'] = list(mergeable_android_manifests)
for c in extra_manifest_deps:
config['extra_android_manifests'].extend(
c.get('mergeable_android_manifests', []))
@@ -2030,22 +2147,29 @@ def main(argv):
deps_info['java_resources_jar'] = options.java_resources_jar_path
# DYNAMIC FEATURE MODULES:
- # Make sure that dependencies that exist on the base module
- # are not duplicated on the feature module.
+ # There are two approaches to dealing with modules dependencies:
+ # 1) Perform steps in android_apk_or_module(), with only the knowledge of
+ # ancesstor splits. Our implementation currently allows only for 2 levels:
+ # base -> parent -> leaf
+ # Bundletool normally fails if two leaf nodes merge the same manifest or
+ # resources. The fix is to add the common dep to the chrome or base module
+ # so that our deduplication logic will work.
+ # RemoveObjDups() implements this approach.
+ # 2) Perform steps in android_app_bundle(), with knowledge of full set of
+ # modules. This is required for dex because it can handle the case of two
+ # leaf nodes having the same dep, and promoting that dep to their common
+ # parent.
+ # _DedupFeatureModuleSharedCode() implements this approach.
if base_module_build_config:
- base = base_module_build_config
- RemoveObjDups(config, base, 'deps_info', 'dependency_zips')
- RemoveObjDups(config, base, 'deps_info', 'dependency_zip_overlays')
- RemoveObjDups(config, base, 'deps_info', 'extra_package_names')
- RemoveObjDups(config, base, 'deps_info', 'device_classpath')
- RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
- RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
- RemoveObjDups(config, base, 'deps_info', 'jni', 'all_source')
- RemoveObjDups(config, base, 'final_dex', 'all_dex_files')
- RemoveObjDups(config, base, 'extra_android_manifests')
- if options.trace_events_jar_dir:
- RemoveObjDups(config, base, 'deps_info',
- 'trace_event_rewritten_device_classpath')
+ ancestors = [base_module_build_config]
+ if parent_module_build_config is not base_module_build_config:
+ ancestors += [parent_module_build_config]
+ for ancestor in ancestors:
+ RemoveObjDups(config, ancestor, 'deps_info', 'dependency_zips')
+ RemoveObjDups(config, ancestor, 'deps_info', 'dependency_zip_overlays')
+ RemoveObjDups(config, ancestor, 'deps_info', 'extra_package_names')
+ RemoveObjDups(config, ancestor, 'deps_info', 'jni_all_source')
+ RemoveObjDups(config, ancestor, 'extra_android_manifests')
if is_java_target:
jar_to_target = {}
@@ -2053,6 +2177,8 @@ def main(argv):
_AddJarMapping(jar_to_target, all_deps)
if base_module_build_config:
_AddJarMapping(jar_to_target, [base_module_build_config['deps_info']])
+ if parent_module_build_config is not base_module_build_config:
+ _AddJarMapping(jar_to_target, [parent_module_build_config['deps_info']])
if options.tested_apk_config:
_AddJarMapping(jar_to_target, [tested_apk_config])
for jar, target in zip(tested_apk_config['javac_full_classpath'],
@@ -2070,8 +2196,8 @@ def main(argv):
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
- build_utils.WriteDepfile(options.depfile, options.build_config,
- sorted(set(all_inputs)))
+ action_helpers.write_depfile(options.depfile, options.build_config,
+ sorted(set(all_inputs)))
if options.store_deps_for_debugging_to:
GetDepConfig(options.build_config) # Add it to cache.
diff --git a/build/android/gyp/write_build_config.pydeps b/build/android/gyp/write_build_config.pydeps
index 8f4c58e49..fa7209c27 100644
--- a/build/android/gyp/write_build_config.pydeps
+++ b/build/android/gyp/write_build_config.pydeps
@@ -1,8 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py
../../../third_party/jinja2/__init__.py
-../../../third_party/jinja2/_compat.py
../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/async_utils.py
../../../third_party/jinja2/bccache.py
../../../third_party/jinja2/compiler.py
../../../third_party/jinja2/defaults.py
@@ -22,6 +22,7 @@
../../../third_party/markupsafe/__init__.py
../../../third_party/markupsafe/_compat.py
../../../third_party/markupsafe/_native.py
+../../action_helpers.py
../../gn_helpers.py
util/__init__.py
util/build_utils.py
diff --git a/build/android/gyp/write_native_libraries_java.py b/build/android/gyp/write_native_libraries_java.py
index 07f709e93..fb4d2ad18 100755
--- a/build/android/gyp/write_native_libraries_java.py
+++ b/build/android/gyp/write_native_libraries_java.py
@@ -12,6 +12,8 @@ import sys
import zipfile
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
_NATIVE_LIBRARIES_TEMPLATE = """\
@@ -50,7 +52,7 @@ def _FormatLibraryName(library_name):
def main():
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--final', action='store_true', help='Use final fields.')
parser.add_argument(
'--enable-chromium-linker',
@@ -104,18 +106,18 @@ def main():
'LIBRARIES': ','.join(_FormatLibraryName(n) for n in native_libraries),
'CPU_FAMILY': options.cpu_family,
}
- with build_utils.AtomicOutput(options.output) as f:
+ with action_helpers.atomic_output(options.output) as f:
with zipfile.ZipFile(f.name, 'w') as srcjar_file:
- build_utils.AddToZipHermetic(
+ zip_helpers.add_to_zip_hermetic(
zip_file=srcjar_file,
zip_path='org/chromium/build/NativeLibraries.java',
data=_NATIVE_LIBRARIES_TEMPLATE.format(**format_dict))
if options.depfile:
assert options.native_libraries_list
- build_utils.WriteDepfile(options.depfile,
- options.output,
- inputs=[options.native_libraries_list])
+ action_helpers.write_depfile(options.depfile,
+ options.output,
+ inputs=[options.native_libraries_list])
if __name__ == '__main__':
diff --git a/build/android/gyp/write_native_libraries_java.pydeps b/build/android/gyp/write_native_libraries_java.pydeps
index f5176ef78..c47e1652c 100644
--- a/build/android/gyp/write_native_libraries_java.pydeps
+++ b/build/android/gyp/write_native_libraries_java.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_native_libraries_java.pydeps build/android/gyp/write_native_libraries_java.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
util/__init__.py
util/build_utils.py
write_native_libraries_java.py
diff --git a/build/android/gyp/zip.py b/build/android/gyp/zip.py
index fe4743ebd..f4b4acfb2 100755
--- a/build/android/gyp/zip.py
+++ b/build/android/gyp/zip.py
@@ -12,6 +12,8 @@ import sys
import zipfile
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
+import zip_helpers
def main(args):
@@ -39,33 +41,32 @@ def main(args):
metavar='KEY=VALUE',
type=lambda x: x.split('=', 1),
help='Entry to store in JSON-encoded archive comment.')
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
options = parser.parse_args(args)
- with build_utils.AtomicOutput(options.output) as f:
+ with action_helpers.atomic_output(options.output) as f:
with zipfile.ZipFile(f.name, 'w') as out_zip:
depfile_deps = None
if options.input_files:
- files = build_utils.ParseGnList(options.input_files)
- build_utils.DoZip(
- files,
- out_zip,
- base_dir=options.input_files_base_dir,
- compress_fn=lambda _: options.compress)
+ files = action_helpers.parse_gn_list(options.input_files)
+ zip_helpers.add_files_to_zip(files,
+ out_zip,
+ base_dir=options.input_files_base_dir,
+ compress=options.compress)
if options.input_zips:
- files = build_utils.ParseGnList(options.input_zips)
+ files = action_helpers.parse_gn_list(options.input_zips)
depfile_deps = files
path_transform = None
if options.input_zips_excluded_globs:
- globs = build_utils.ParseGnList(options.input_zips_excluded_globs)
+ globs = action_helpers.parse_gn_list(
+ options.input_zips_excluded_globs)
path_transform = (
lambda p: None if build_utils.MatchesGlob(p, globs) else p)
- build_utils.MergeZips(
- out_zip,
- files,
- path_transform=path_transform,
- compress=options.compress)
+ zip_helpers.merge_zips(out_zip,
+ files,
+ path_transform=path_transform,
+ compress=options.compress)
if options.comment_json:
out_zip.comment = json.dumps(dict(options.comment_json),
@@ -73,9 +74,9 @@ def main(args):
# Depfile used only by dist_jar().
if options.depfile:
- build_utils.WriteDepfile(options.depfile,
- options.output,
- inputs=depfile_deps)
+ action_helpers.write_depfile(options.depfile,
+ options.output,
+ inputs=depfile_deps)
if __name__ == '__main__':
diff --git a/build/android/gyp/zip.pydeps b/build/android/gyp/zip.pydeps
index 36affd170..973fe436c 100644
--- a/build/android/gyp/zip.pydeps
+++ b/build/android/gyp/zip.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py
+../../action_helpers.py
../../gn_helpers.py
+../../zip_helpers.py
util/__init__.py
util/build_utils.py
zip.py
diff --git a/build/android/incremental_install/generate_android_manifest.py b/build/android/incremental_install/generate_android_manifest.py
index 38da3dc0b..ffa26c20b 100755
--- a/build/android/incremental_install/generate_android_manifest.py
+++ b/build/android/incremental_install/generate_android_manifest.py
@@ -17,6 +17,7 @@ from xml.etree import ElementTree
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'gyp'))
from util import build_utils
from util import manifest_utils
+import action_helpers # build_utils adds //build to sys.path.
_INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication'
_META_DATA_APP_NAME = 'incremental-install-real-app'
@@ -97,7 +98,7 @@ def main(raw_args):
new_manifest_data = _ProcessManifest(options.src_manifest,
options.disable_isolated_processes)
- with build_utils.AtomicOutput(options.dst_manifest) as out_manifest:
+ with action_helpers.atomic_output(options.dst_manifest) as out_manifest:
out_manifest.write(new_manifest_data)
diff --git a/build/android/incremental_install/generate_android_manifest.pydeps b/build/android/incremental_install/generate_android_manifest.pydeps
index e542e2345..68c832bcc 100644
--- a/build/android/incremental_install/generate_android_manifest.pydeps
+++ b/build/android/incremental_install/generate_android_manifest.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/generate_android_manifest.pydeps build/android/incremental_install/generate_android_manifest.py
+../../action_helpers.py
../../gn_helpers.py
../gyp/util/__init__.py
../gyp/util/build_utils.py
diff --git a/build/android/incremental_install/write_installer_json.py b/build/android/incremental_install/write_installer_json.py
index 072380195..4825a80e1 100755
--- a/build/android/incremental_install/write_installer_json.py
+++ b/build/android/incremental_install/write_installer_json.py
@@ -14,6 +14,7 @@ import sys
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
from util import build_utils
+import action_helpers # build_utils adds //build to sys.path.
def _ParseArgs(args):
@@ -44,8 +45,8 @@ def _ParseArgs(args):
help='Print a warning about proguard being disabled')
options = parser.parse_args(args)
- options.dex_files = build_utils.ParseGnList(options.dex_files)
- options.native_libs = build_utils.ParseGnList(options.native_libs)
+ options.dex_files = action_helpers.parse_gn_list(options.dex_files)
+ options.native_libs = action_helpers.parse_gn_list(options.native_libs)
return options
@@ -60,7 +61,7 @@ def main(args):
'split_globs': options.split_globs,
}
- with build_utils.AtomicOutput(options.output_path, mode='w+') as f:
+ with action_helpers.atomic_output(options.output_path, mode='w+') as f:
json.dump(data, f, indent=2, sort_keys=True)
diff --git a/build/android/incremental_install/write_installer_json.pydeps b/build/android/incremental_install/write_installer_json.pydeps
index 11a263f4a..519281fe6 100644
--- a/build/android/incremental_install/write_installer_json.pydeps
+++ b/build/android/incremental_install/write_installer_json.pydeps
@@ -1,5 +1,6 @@
# Generated by running:
# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/write_installer_json.pydeps build/android/incremental_install/write_installer_json.py
+../../action_helpers.py
../../gn_helpers.py
../gyp/util/__init__.py
../gyp/util/build_utils.py
diff --git a/build/android/lighttpd_server.py b/build/android/lighttpd_server.py
index 2be70daad..9950253a6 100755
--- a/build/android/lighttpd_server.py
+++ b/build/android/lighttpd_server.py
@@ -10,7 +10,6 @@ Usage:
lighttpd_server PATH_TO_DOC_ROOT
"""
-from __future__ import print_function
import codecs
import contextlib
diff --git a/build/android/list_java_targets.py b/build/android/list_java_targets.py
index 6f19be44a..b135b0fca 100755
--- a/build/android/list_java_targets.py
+++ b/build/android/list_java_targets.py
@@ -70,9 +70,12 @@ def _resolve_autoninja():
return 'autoninja'
-def _run_ninja(output_dir, args, quiet=False):
- cmd = [
- _resolve_autoninja(),
+def _run_ninja(output_dir, args, j_value=None, quiet=False):
+ if j_value:
+ cmd = [_resolve_ninja(), '-j', j_value]
+ else:
+ cmd = [_resolve_autoninja()]
+ cmd += [
'-C',
output_dir,
]
@@ -228,6 +231,7 @@ def main():
'--query deps_info.unprocessed_jar_path to show a list '
'of all targets that have a non-empty deps_info dict and '
'non-empty "unprocessed_jar_path" value in that dict.')
+ parser.add_argument('-j', help='Use -j with ninja instead of autoninja.')
parser.add_argument('-v', '--verbose', default=0, action='count')
parser.add_argument('-q', '--quiet', default=0, action='count')
args = parser.parse_args()
@@ -250,6 +254,7 @@ def main():
if args.build:
logging.warning('Building %d .build_config.json files...', len(entries))
_run_ninja(output_dir, [e.ninja_build_config_target for e in entries],
+ j_value=args.j,
quiet=args.quiet)
if args.type:
diff --git a/build/android/method_count.py b/build/android/method_count.py
index e75b15e1e..8556b22c8 100755
--- a/build/android/method_count.py
+++ b/build/android/method_count.py
@@ -3,7 +3,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import argparse
import os
diff --git a/build/android/print_cipd_version.py b/build/android/print_cipd_version.py
new file mode 100755
index 000000000..581295dcd
--- /dev/null
+++ b/build/android/print_cipd_version.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import pathlib
+import re
+import subprocess
+
+_DIR_SOURCE_ROOT = str(pathlib.Path(__file__).absolute().parents[2])
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ # Hide args set by wrappers so that using --help with the wrappers does not
+ # show them.
+ parser.add_argument('--subdir', required=True, help=argparse.SUPPRESS)
+ parser.add_argument('--cipd-package', required=True, help=argparse.SUPPRESS)
+ parser.add_argument('--git-log-url', help=argparse.SUPPRESS)
+ parser.add_argument('--cipd-instance', help='Uses value from DEPS by default')
+ args = parser.parse_args()
+
+ if not args.cipd_instance:
+ args.cipd_instance = subprocess.check_output(
+ ['gclient', 'getdep', '-r', f'src/{args.subdir}:{args.cipd_package}'],
+ cwd=_DIR_SOURCE_ROOT,
+ text=True)
+
+ cmd = ['cipd', 'describe', args.cipd_package, '-version', args.cipd_instance]
+ print(' '.join(cmd))
+ output = subprocess.check_output(cmd, text=True)
+ print(output, end='')
+ if args.git_log_url:
+ git_hashes = re.findall(r'version:.*?@(\w+)', output)
+ if not git_hashes:
+ print('Could not find git hash from output.')
+ else:
+ # Multiple version tags exist when multiple versions have the same sha1.
+ last_version = git_hashes[-1]
+ print()
+ print('Recent commits:', args.git_log_url.format(last_version))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/android/pylib/base/base_test_result.py b/build/android/pylib/base/base_test_result.py
index 8a1610eb7..2c61c383f 100644
--- a/build/android/pylib/base/base_test_result.py
+++ b/build/android/pylib/base/base_test_result.py
@@ -82,6 +82,10 @@ class BaseTestResult:
"""Get the test name."""
return self._name
+ def GetNameForResultSink(self):
+ """Get the test name to be reported to resultsink."""
+ return self._name
+
def SetType(self, test_type):
"""Set the test result type."""
assert test_type in ResultType.GetTypes()
@@ -127,6 +131,10 @@ class BaseTestResult:
"""Get dict containing links to test result data."""
return self._links
+ def GetVariantForResultSink(self): # pylint: disable=no-self-use
+ """Get the variant dict to be reported to result sink."""
+ return None
+
class TestRunResults:
"""Set of results for a test run."""
diff --git a/build/android/pylib/base/output_manager.py b/build/android/pylib/base/output_manager.py
index 89819c6d1..f562be85f 100644
--- a/build/android/pylib/base/output_manager.py
+++ b/build/android/pylib/base/output_manager.py
@@ -51,26 +51,38 @@ class OutputManager:
if not self._allow_upload:
raise Exception('Must run |SetUp| before attempting to upload!')
- f = self._CreateArchivedFile(out_filename, out_subdir, datatype)
+ f = self.CreateArchivedFile(out_filename, out_subdir, datatype)
try:
yield f
finally:
- f.PrepareArchive()
+ self.ArchiveArchivedFile(f, delete=True)
- def archive():
- try:
- f.Archive()
- finally:
- f.Delete()
-
- thread = reraiser_thread.ReraiserThread(func=archive)
- thread.start()
- self._thread_group.Add(thread)
+ def CreateArchivedFile(self, out_filename, out_subdir,
+ datatype=Datatype.TEXT):
+ """Returns an instance of ArchivedFile."""
+ return self._CreateArchivedFile(out_filename, out_subdir, datatype)
def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
- """Returns an instance of ArchivedFile."""
raise NotImplementedError
+ def ArchiveArchivedFile(self, archived_file, delete=False):
+ """Archive an ArchivedFile instance and optionally delete it."""
+ if not isinstance(archived_file, ArchivedFile):
+ raise Exception('Excepting an instance of ArchivedFile, got %s.' %
+ type(archived_file))
+ archived_file.PrepareArchive()
+
+ def archive():
+ try:
+ archived_file.Archive()
+ finally:
+ if delete:
+ archived_file.Delete()
+
+ thread = reraiser_thread.ReraiserThread(func=archive)
+ thread.start()
+ self._thread_group.Add(thread)
+
def SetUp(self):
self._allow_upload = True
self._thread_group = reraiser_thread.ReraiserThreadGroup()
@@ -105,6 +117,12 @@ class ArchivedFile:
def name(self):
return self._f.name
+ def fileno(self, *args, **kwargs):
+ if self._ready_to_archive:
+ raise Exception('Cannot retrieve the integer file descriptor '
+ 'after archiving has begun!')
+ return self._f.fileno(*args, **kwargs)
+
def write(self, *args, **kwargs):
if self._ready_to_archive:
raise Exception('Cannot write to file after archiving has begun!')
diff --git a/build/android/pylib/constants/__init__.py b/build/android/pylib/constants/__init__.py
index b6542eee3..cf57d9fe9 100644
--- a/build/android/pylib/constants/__init__.py
+++ b/build/android/pylib/constants/__init__.py
@@ -25,6 +25,7 @@ keyevent = devil.android.sdk.keyevent
DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, os.pardir)))
+JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
PACKAGE_INFO = dict(chrome.PACKAGE_INFO)
PACKAGE_INFO.update({
@@ -78,6 +79,11 @@ PACKAGE_INFO.update({
chrome.PackageInfo('org.chromium.webview_ui_test',
'org.chromium.webview_ui_test.WebViewUiTestActivity',
'webview-command-line', None),
+ 'weblayer_browsertests':
+ chrome.PackageInfo(
+ 'org.chromium.weblayer_browsertests_apk',
+ 'org.chromium.weblayer_browsertests_apk.WebLayerBrowserTestsActivity',
+ 'chrome-native-tests-command-line', None),
})
diff --git a/build/android/pylib/gtest/gtest_test_instance.py b/build/android/pylib/gtest/gtest_test_instance.py
index 1ddad2b52..a62e3e4e8 100644
--- a/build/android/pylib/gtest/gtest_test_instance.py
+++ b/build/android/pylib/gtest/gtest_test_instance.py
@@ -31,6 +31,7 @@ BROWSER_TEST_SUITES = [
'android_sync_integration_tests',
'components_browsertests',
'content_browsertests',
+ 'weblayer_browsertests',
]
# The max number of tests to run on a shard during the test run.
diff --git a/build/android/pylib/instrumentation/test_result.py b/build/android/pylib/instrumentation/test_result.py
index e7893bfdf..52c04b83a 100644
--- a/build/android/pylib/instrumentation/test_result.py
+++ b/build/android/pylib/instrumentation/test_result.py
@@ -5,6 +5,9 @@
from pylib.base import base_test_result
+# This must match the source adding the suffix: bit.ly/3Zmwwyx
+_MULTIPROCESS_SUFFIX = '__multiprocess_mode'
+
class InstrumentationTestResult(base_test_result.BaseTestResult):
"""Result information for a single instrumentation test."""
@@ -27,6 +30,24 @@ class InstrumentationTestResult(base_test_result.BaseTestResult):
self._class_name = full_name
self._test_name = full_name
+ self._webview_multiprocess_mode = full_name.endswith(_MULTIPROCESS_SUFFIX)
+
def SetDuration(self, duration):
"""Set the test duration."""
self._duration = duration
+
+ def GetNameForResultSink(self):
+ """Get the test name to be reported to resultsink."""
+ raw_name = self.GetName()
+ if self._webview_multiprocess_mode:
+ assert raw_name.endswith(
+ _MULTIPROCESS_SUFFIX
+ ), 'multiprocess mode test raw name should have the corresponding suffix'
+ return raw_name[:-len(_MULTIPROCESS_SUFFIX)]
+ return raw_name
+
+ def GetVariantForResultSink(self):
+ """Get the variant dict to be reported to resultsink."""
+ if self._webview_multiprocess_mode:
+ return {'webview_multiprocess_mode': 'Yes'}
+ return None
diff --git a/build/android/pylib/local/device/local_device_environment.py b/build/android/pylib/local/device/local_device_environment.py
index 4fe1a4fa4..a51f370b2 100644
--- a/build/android/pylib/local/device/local_device_environment.py
+++ b/build/android/pylib/local/device/local_device_environment.py
@@ -34,6 +34,8 @@ LOGCAT_FILTERS = [
'StrictMode:D',
]
+SYSTEM_USER_ID = 0
+
def _DeviceCachePath(device):
file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
@@ -134,6 +136,7 @@ class LocalDeviceEnvironment(environment.Environment):
if hasattr(args, 'trace_all'):
self._trace_all = args.trace_all
self._use_persistent_shell = args.use_persistent_shell
+ self._disable_test_server = args.disable_test_server
devil_chromium.Initialize(
output_directory=constants.GetOutDirectory(),
@@ -182,6 +185,12 @@ class LocalDeviceEnvironment(environment.Environment):
@handle_shard_failures_with(on_failure=self.DenylistDevice)
def prepare_device(d):
d.WaitUntilFullyBooted()
+ if d.GetCurrentUser() != SYSTEM_USER_ID:
+ # Use system user to run tasks to avoid "/sdcard "accessing issue
+ # due to multiple-users. For details, see
+ # https://source.android.com/docs/devices/admin/multi-user-testing
+ logging.info('Switching to user with id %s', SYSTEM_USER_ID)
+ d.SwitchUser(SYSTEM_USER_ID)
if self._enable_device_cache:
cache_path = _DeviceCachePath(d)
@@ -256,6 +265,10 @@ class LocalDeviceEnvironment(environment.Environment):
def trace_output(self):
return self._trace_output
+ @property
+ def disable_test_server(self):
+ return self._disable_test_server
+
#override
def TearDown(self):
if self.trace_output and self._trace_all:
diff --git a/build/android/pylib/local/device/local_device_gtest_run.py b/build/android/pylib/local/device/local_device_gtest_run.py
index 7f8f50fe8..796f614d7 100644
--- a/build/android/pylib/local/device/local_device_gtest_run.py
+++ b/build/android/pylib/local/device/local_device_gtest_run.py
@@ -529,6 +529,10 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
tool.CopyFiles(dev)
tool.SetupEnvironment()
+ if self._env.disable_test_server:
+ logging.warning('Not starting test server. Some tests may fail.')
+ return
+
try:
# See https://crbug.com/1030827.
# This is a hack that may break in the future. We're relying on the
diff --git a/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/build/android/pylib/local/device/local_device_instrumentation_test_run.py
index e1982d1c2..0bbfbe1b7 100644
--- a/build/android/pylib/local/device/local_device_instrumentation_test_run.py
+++ b/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -229,7 +229,7 @@ class LocalDeviceInstrumentationTestRun(
# manually invoke its __enter__ and __exit__ methods in setup and
# teardown.
system_app_context = system_app.ReplaceSystemApp(
- dev, replacement_apk=self._test_instance.replace_system_package)
+ dev, self._test_instance.replace_system_package)
# Pylint is not smart enough to realize that this field has
# an __enter__ method, and will complain loudly.
# pylint: disable=no-member
diff --git a/build/android/pylib/local/emulator/avd.py b/build/android/pylib/local/emulator/avd.py
index 0b588dec4..62db9b591 100644
--- a/build/android/pylib/local/emulator/avd.py
+++ b/build/android/pylib/local/emulator/avd.py
@@ -12,6 +12,7 @@ import socket
import stat
import subprocess
import threading
+import time
from google.protobuf import text_format # pylint: disable=import-error
@@ -31,7 +32,12 @@ from pylib.local.emulator.proto import avd_pb2
# the emulator instance, e.g. emulator binary, system images, AVDs.
COMMON_CIPD_ROOT = os.path.join(constants.DIR_SOURCE_ROOT, '.android_emulator')
-_ALL_PACKAGES = object()
+# Packages that are needed for runtime.
+_PACKAGES_RUNTIME = object()
+# Packages that are needed during AVD creation.
+_PACKAGES_CREATION = object()
+# All the packages that could exist in the AVD config file.
+_PACKAGES_ALL = object()
# These files are used as backing files for corresponding qcow2 images.
_BACKING_FILES = ('system.img', 'vendor.img')
@@ -163,6 +169,8 @@ class _AvdManagerAgent:
self._avd_home,
'AVDMANAGER_OPTS':
'-Dcom.android.sdkmanager.toolsdir=%s' % fake_tools_dir,
+ 'JAVA_HOME':
+ constants.JAVA_HOME,
})
def Create(self, avd_name, system_image, force=False):
@@ -224,6 +232,22 @@ class _AvdManagerAgent:
# pylint: disable=W0707
raise AvdException('AVD deletion failed: %s' % str(e), command=delete_cmd)
+ def List(self):
+ """List existing AVDs by the name."""
+ list_cmd = [
+ _DEFAULT_AVDMANAGER_PATH,
+ '-v',
+ 'list',
+ 'avd',
+ '-c',
+ ]
+ output = cmd_helper.GetCmdOutput(list_cmd, env=self._env)
+ return output.splitlines()
+
+ def IsAvailable(self, avd_name):
+ """Check if an AVD exists or not."""
+ return avd_name in self.List()
+
class AvdConfig:
"""Represents a particular AVD configuration.
@@ -242,44 +266,110 @@ class AvdConfig:
self.avd_proto_path = avd_proto_path
self._config = _Load(avd_proto_path)
- self._emulator_home = os.path.join(COMMON_CIPD_ROOT,
- self._config.avd_package.dest_path)
- self._emulator_sdk_root = os.path.join(
- COMMON_CIPD_ROOT, self._config.emulator_package.dest_path)
- self._emulator_path = os.path.join(self._emulator_sdk_root, 'emulator',
- 'emulator')
- self._qemu_img_path = os.path.join(self._emulator_sdk_root, 'emulator',
- 'qemu-img')
-
self._initialized = False
self._initializer_lock = threading.Lock()
@property
+ def emulator_home(self):
+ """User-specific emulator configuration directory.
+
+ It corresponds to the environment variable $ANDROID_EMULATOR_HOME.
+ Configs like advancedFeatures.ini are expected to be under this dir.
+ """
+ return os.path.join(COMMON_CIPD_ROOT, self._config.avd_package.dest_path)
+
+ @property
+ def emulator_sdk_root(self):
+ """The path to the SDK installation directory.
+
+ It corresponds to the environment variable $ANDROID_HOME.
+
+ To be a valid sdk root, it requires to have the subdirecotries "platforms"
+ and "platform-tools". See http://bit.ly/2YAkyFE for context.
+
+ Also, it is expected to have subdirecotries "emulator" and "system-images".
+ """
+ emulator_sdk_root = os.path.join(COMMON_CIPD_ROOT,
+ self._config.emulator_package.dest_path)
+ # Ensure this is a valid sdk root.
+ required_dirs = [
+ os.path.join(emulator_sdk_root, 'platforms'),
+ os.path.join(emulator_sdk_root, 'platform-tools'),
+ ]
+ for d in required_dirs:
+ if not os.path.exists(d):
+ os.makedirs(d)
+
+ return emulator_sdk_root
+
+ @property
+ def emulator_path(self):
+ """The path to the emulator binary."""
+ return os.path.join(self.emulator_sdk_root, 'emulator', 'emulator')
+
+ @property
+ def qemu_img_path(self):
+ """The path to the qemu-img binary.
+
+ This is used to rebase the paths in qcow2 images.
+ """
+ return os.path.join(self.emulator_sdk_root, 'emulator', 'qemu-img')
+
+ @property
+ def mksdcard_path(self):
+ """The path to the mksdcard binary.
+
+ This is used to create a sdcard image.
+ """
+ return os.path.join(self.emulator_sdk_root, 'emulator', 'mksdcard')
+
+ @property
def avd_settings(self):
+ """The AvdSettings in the avd proto file.
+
+ This defines how to configure the AVD at creation.
+ """
return self._config.avd_settings
@property
def avd_name(self):
+ """The name of the AVD to create or use."""
return self._config.avd_name
@property
- def _avd_home(self):
- return os.path.join(self._emulator_home, 'avd')
+ def avd_home(self):
+ """The path that contains the files of one or multiple AVDs."""
+ avd_home = os.path.join(self.emulator_home, 'avd')
+ if not os.path.exists(avd_home):
+ os.makedirs(avd_home)
+
+ return avd_home
@property
def _avd_dir(self):
- return os.path.join(self._avd_home, '%s.avd' % self._config.avd_name)
+ """The path that contains the files of the given AVD."""
+ return os.path.join(self.avd_home, '%s.avd' % self.avd_name)
@property
def _system_image_dir(self):
+ """The path of the directory that directly contains the system images.
+
+ For example, if the system_image_name is
+ "system-images;android-33;google_apis;x86_64"
+
+ The _system_image_dir will be:
+ <COMMON_CIPD_ROOT>/<dest_path>/system-images/android-33/google_apis/x86_64
+
+ This is used to rebase the paths in qcow2 images.
+ """
return os.path.join(COMMON_CIPD_ROOT,
self._config.system_image_package.dest_path,
*self._config.system_image_name.split(';'))
@property
def _root_ini_path(self):
- """The <avd_name>.ini file."""
- return os.path.join(self._avd_home, '%s.ini' % self._config.avd_name)
+ """The <avd_name>.ini file of the given AVD."""
+ return os.path.join(self.avd_home, '%s.ini' % self.avd_name)
@property
def _config_ini_path(self):
@@ -288,7 +378,35 @@ class AvdConfig:
@property
def _features_ini_path(self):
- return os.path.join(self._emulator_home, 'advancedFeatures.ini')
+ return os.path.join(self.emulator_home, 'advancedFeatures.ini')
+
+ @property
+ def xdg_config_dir(self):
+ """The base directory to store qt config file.
+
+ This dir should be added to the env variable $XDG_CONFIG_DIRS so that
+ _qt_config_path can take effect. See https://bit.ly/3HIQRZ3 for context.
+ """
+ config_dir = os.path.join(self.emulator_home, '.config')
+ if not os.path.exists(config_dir):
+ os.makedirs(config_dir)
+
+ return config_dir
+
+ @property
+ def _qt_config_path(self):
+ """The qt config file for emulator."""
+ qt_config_dir = os.path.join(self.xdg_config_dir,
+ 'Android Open Source Project')
+ if not os.path.exists(qt_config_dir):
+ os.makedirs(qt_config_dir)
+
+ return os.path.join(qt_config_dir, 'Emulator.conf')
+
+ def HasSnapshot(self, snapshot_name):
+ """Check if a given snapshot exists or not."""
+ snapshot_path = os.path.join(self._avd_dir, 'snapshots', snapshot_name)
+ return os.path.exists(snapshot_path)
def Create(self,
force=False,
@@ -328,23 +446,13 @@ class AvdConfig:
after creating the AVD.
"""
logging.info('Installing required packages.')
- self._InstallCipdPackages(packages=[
- self._config.emulator_package,
- self._config.system_image_package,
- *self._config.privileged_apk,
- *self._config.additional_apk,
- ])
-
- android_avd_home = self._avd_home
+ self._InstallCipdPackages(_PACKAGES_CREATION)
- if not os.path.exists(android_avd_home):
- os.makedirs(android_avd_home)
-
- avd_manager = _AvdManagerAgent(avd_home=android_avd_home,
- sdk_root=self._emulator_sdk_root)
+ avd_manager = _AvdManagerAgent(avd_home=self.avd_home,
+ sdk_root=self.emulator_sdk_root)
logging.info('Creating AVD.')
- avd_manager.Create(avd_name=self._config.avd_name,
+ avd_manager.Create(avd_name=self.avd_name,
system_image=self._config.system_image_name,
force=force)
@@ -353,7 +461,7 @@ class AvdConfig:
# Clear out any previous configuration or state from this AVD.
with ini.update_ini_file(self._root_ini_path) as r_ini_contents:
- r_ini_contents['path.rel'] = 'avd/%s.avd' % self._config.avd_name
+ r_ini_contents['path.rel'] = 'avd/%s.avd' % self.avd_name
with ini.update_ini_file(self._features_ini_path) as f_ini_contents:
# features_ini file will not be refreshed by avdmanager during
@@ -386,10 +494,8 @@ class AvdConfig:
config_ini_contents['hw.sdCard'] = 'yes'
if self.avd_settings.sdcard.size:
sdcard_path = os.path.join(self._avd_dir, _SDCARD_NAME)
- mksdcard_path = os.path.join(os.path.dirname(self._emulator_path),
- 'mksdcard')
cmd_helper.RunCmd([
- mksdcard_path,
+ self.mksdcard_path,
self.avd_settings.sdcard.size,
sdcard_path,
])
@@ -420,8 +526,7 @@ class AvdConfig:
# Start & stop the AVD.
self._Initialize()
- instance = _AvdInstance(self._emulator_path, self._emulator_home,
- self._config)
+ instance = _AvdInstance(self)
# Enable debug for snapshot when it is set to True
debug_tags = 'time,init,snapshot' if snapshot else None
# Installing privileged apks requires modifying the system
@@ -483,17 +588,17 @@ class AvdConfig:
'package':
self._config.avd_package.package_name,
'root':
- self._emulator_home,
+ self.emulator_home,
'install_mode':
'copy',
'data': [{
- 'dir': os.path.relpath(self._avd_dir, self._emulator_home)
+ 'dir': os.path.relpath(self._avd_dir, self.emulator_home)
}, {
'file':
- os.path.relpath(self._root_ini_path, self._emulator_home)
+ os.path.relpath(self._root_ini_path, self.emulator_home)
}, {
'file':
- os.path.relpath(self._features_ini_path, self._emulator_home)
+ os.path.relpath(self._features_ini_path, self.emulator_home)
}],
}
@@ -537,14 +642,17 @@ class AvdConfig:
finally:
if not keep:
logging.info('Deleting AVD.')
- avd_manager.Delete(avd_name=self._config.avd_name)
+ avd_manager.Delete(avd_name=self.avd_name)
- def IsAvailable(self, packages=_ALL_PACKAGES):
+ def IsAvailable(self):
"""Returns whether emulator is up-to-date."""
if not os.path.exists(self._config_ini_path):
return False
- for cipd_root, pkgs in self._IterVersionedCipdPackages(packages):
+ # Skip when no version exists to prevent "IsAvailable()" returning False
+ # for emualtors set up using Create() (rather than Install()).
+ for cipd_root, pkgs in self._IterCipdPackages(_PACKAGES_RUNTIME,
+ check_version=False):
stdout = subprocess.run(['cipd', 'installed', '--root', cipd_root],
capture_output=True,
check=False,
@@ -559,7 +667,50 @@ class AvdConfig:
return False
return True
- def Install(self, packages=_ALL_PACKAGES):
+ def Uninstall(self):
+ """Uninstall all the artifacts associated with the given config.
+
+ Artifacts includes:
+ - CIPD packages specified in the avd config.
+ - The local AVD created by `Create`, if present.
+
+ """
+ # Delete any existing local AVD. This must occur before deleting CIPD
+ # packages because a AVD needs system image to be recognized by avdmanager.
+ avd_manager = _AvdManagerAgent(avd_home=self.avd_home,
+ sdk_root=self.emulator_sdk_root)
+ if avd_manager.IsAvailable(self.avd_name):
+ logging.info('Deleting local AVD %s', self.avd_name)
+ avd_manager.Delete(self.avd_name)
+
+ # Delete installed CIPD packages.
+ for cipd_root, _ in self._IterCipdPackages(_PACKAGES_ALL,
+ check_version=False):
+ logging.info('Uninstalling packages in %s', cipd_root)
+ if not os.path.exists(cipd_root):
+ continue
+ # Create an empty ensure file to removed any installed CIPD packages.
+ ensure_path = os.path.join(cipd_root, '.ensure')
+ with open(ensure_path, 'w') as ensure_file:
+ ensure_file.write('$ParanoidMode CheckIntegrity\n\n')
+ ensure_cmd = [
+ 'cipd',
+ 'ensure',
+ '-ensure-file',
+ ensure_path,
+ '-root',
+ cipd_root,
+ ]
+ try:
+ for line in cmd_helper.IterCmdOutputLines(ensure_cmd):
+ logging.info(' %s', line)
+ except subprocess.CalledProcessError as e:
+ # avd.py is executed with python2.
+ # pylint: disable=W0707
+ raise AvdException('Failed to uninstall CIPD packages: %s' % str(e),
+ command=ensure_cmd)
+
+ def Install(self):
"""Installs the requested CIPD packages and prepares them for use.
This includes making files writeable and revising some of the
@@ -568,7 +719,7 @@ class AvdConfig:
Returns: None
Raises: AvdException on failure to install.
"""
- self._InstallCipdPackages(packages=packages)
+ self._InstallCipdPackages(_PACKAGES_RUNTIME)
self._MakeWriteable()
self._UpdateConfigs()
self._RebaseQcow2Images()
@@ -588,7 +739,7 @@ class AvdConfig:
logging.info('Rebasing the qcow2 image %r with the backing file %r',
qcow2_image_path, backing_file_path)
cmd_helper.RunCmd([
- self._qemu_img_path,
+ self.qemu_img_path,
'rebase',
'-u',
'-f',
@@ -599,9 +750,21 @@ class AvdConfig:
qcow2_image_path,
])
- def _IterVersionedCipdPackages(self, packages):
- pkgs_by_dir = collections.defaultdict(list)
- if packages is _ALL_PACKAGES:
+ def _ListPackages(self, packages):
+ if packages is _PACKAGES_RUNTIME:
+ packages = [
+ self._config.avd_package,
+ self._config.emulator_package,
+ self._config.system_image_package,
+ ]
+ elif packages is _PACKAGES_CREATION:
+ packages = [
+ self._config.emulator_package,
+ self._config.system_image_package,
+ *self._config.privileged_apk,
+ *self._config.additional_apk,
+ ]
+ elif packages is _PACKAGES_ALL:
packages = [
self._config.avd_package,
self._config.emulator_package,
@@ -609,18 +772,30 @@ class AvdConfig:
*self._config.privileged_apk,
*self._config.additional_apk,
]
- for pkg in packages:
- # Skip when no version exists to prevent "IsAvailable()" returning False
- # for emualtors set up using Create() (rather than Install()).
+ return packages
+
+ def _IterCipdPackages(self, packages, check_version=True):
+ """Iterate a list of CIPD packages by their CIPD roots.
+
+ Args:
+ packages: a list of packages from an AVD config.
+ check_version: If set, raise Exception when a package has no version.
+ """
+ pkgs_by_dir = collections.defaultdict(list)
+ for pkg in self._ListPackages(packages):
if pkg.version:
pkgs_by_dir[pkg.dest_path].append(pkg)
+ elif check_version:
+ raise AvdException('Expecting a version for the package %s' %
+ pkg.package_name)
for pkg_dir, pkgs in pkgs_by_dir.items():
cipd_root = os.path.join(COMMON_CIPD_ROOT, pkg_dir)
yield cipd_root, pkgs
- def _InstallCipdPackages(self, packages):
- for cipd_root, pkgs in self._IterVersionedCipdPackages(packages):
+ def _InstallCipdPackages(self, packages, check_version=True):
+ for cipd_root, pkgs in self._IterCipdPackages(packages,
+ check_version=check_version):
logging.info('Installing packages in %s', cipd_root)
if not os.path.exists(cipd_root):
os.makedirs(cipd_root)
@@ -651,7 +826,7 @@ class AvdConfig:
def _MakeWriteable(self):
# The emulator requires that some files are writable.
- for dirname, _, filenames in os.walk(self._emulator_home):
+ for dirname, _, filenames in os.walk(self.emulator_home):
for f in filenames:
path = os.path.join(dirname, f)
mode = os.lstat(path).st_mode
@@ -668,16 +843,17 @@ class AvdConfig:
* Emulator instance can be booted correctly.
* The snapshot can be loaded successfully.
"""
+ logging.info('Updating AVD configurations.')
# Update the absolute avd path in root_ini file
with ini.update_ini_file(self._root_ini_path) as r_ini_contents:
r_ini_contents['path'] = self._avd_dir
# Update hardware settings.
- config_files = [self._config_ini_path]
+ config_paths = [self._config_ini_path]
# The file hardware.ini within each snapshot need to be updated as well.
hw_ini_glob_pattern = os.path.join(self._avd_dir, 'snapshots', '*',
'hardware.ini')
- config_files.extend(glob.glob(hw_ini_glob_pattern))
+ config_paths.extend(glob.glob(hw_ini_glob_pattern))
properties = {}
# Update hw.sdCard.path if applicable
@@ -685,10 +861,14 @@ class AvdConfig:
if os.path.exists(sdcard_path):
properties['hw.sdCard.path'] = sdcard_path
- for config_file in config_files:
- with ini.update_ini_file(config_file) as config_contents:
+ for config_path in config_paths:
+ with ini.update_ini_file(config_path) as config_contents:
config_contents.update(properties)
+ # Create qt config file to disable adb warning when launched in window mode.
+ with ini.update_ini_file(self._qt_config_path) as config_contents:
+ config_contents['set'] = {'autoFindAdb': 'false'}
+
def _Initialize(self):
if self._initialized:
return
@@ -700,25 +880,17 @@ class AvdConfig:
# Emulator start-up looks for the adb daemon. Make sure it's running.
adb_wrapper.AdbWrapper.StartServer()
- # Emulator start-up tries to check for the SDK root by looking for
- # platforms/ and platform-tools/. Ensure they exist.
- # See http://bit.ly/2YAkyFE for context.
- required_dirs = [
- os.path.join(self._emulator_sdk_root, 'platforms'),
- os.path.join(self._emulator_sdk_root, 'platform-tools'),
- ]
- for d in required_dirs:
- if not os.path.exists(d):
- os.makedirs(d)
+ # Emulator start-up requires a valid sdk root.
+ assert self.emulator_sdk_root
- def CreateInstance(self):
+ def CreateInstance(self, output_manager=None):
"""Creates an AVD instance without starting it.
Returns:
An _AvdInstance.
"""
self._Initialize()
- return _AvdInstance(self._emulator_path, self._emulator_home, self._config)
+ return _AvdInstance(self, output_manager=output_manager)
def StartInstance(self):
"""Starts an AVD instance.
@@ -738,24 +910,26 @@ class _AvdInstance:
but its other methods can be freely called.
"""
- def __init__(self, emulator_path, emulator_home, avd_config):
+ def __init__(self, avd_config, output_manager=None):
"""Create an _AvdInstance object.
Args:
- emulator_path: path to the emulator binary.
- emulator_home: path to the emulator home directory.
- avd_config: AVD config proto.
+ avd_config: an AvdConfig instance.
+ output_manager: a pylib.base.output_manager.OutputManager instance.
"""
self._avd_config = avd_config
self._avd_name = avd_config.avd_name
- self._emulator_home = emulator_home
- self._emulator_path = emulator_path
+ self._emulator_home = avd_config.emulator_home
+ self._emulator_path = avd_config.emulator_path
self._emulator_proc = None
self._emulator_serial = None
self._emulator_device = None
- self._sink = None
+
+ self._output_manager = output_manager
+ self._output_file = None
self._writable_system = False
+ self._debug_tags = None
def __str__(self):
return '%s|%s' % (self._avd_name, (self._emulator_serial or id(self)))
@@ -824,14 +998,21 @@ class _AvdInstance:
if gpu_mode:
emulator_cmd.extend(['-gpu', gpu_mode])
if debug_tags:
- emulator_cmd.extend(['-debug', debug_tags])
+ self._debug_tags = set(debug_tags.split(','))
+ # Always print timestamp when debug tags are set.
+ self._debug_tags.add('time')
+ emulator_cmd.extend(['-debug', ','.join(self._debug_tags)])
+ if 'kernel' in self._debug_tags:
+ # TODO(crbug.com/1404176): newer API levels need "-virtio-console"
+ # as well to print kernel log.
+ emulator_cmd.append('-show-kernel')
emulator_env = {
# kill immediately when emulator hang.
'ANDROID_EMULATOR_WAIT_TIME_BEFORE_KILL': '0',
+ # Sets the emulator configuration directory
+ 'ANDROID_EMULATOR_HOME': self._emulator_home,
}
- if self._emulator_home:
- emulator_env['ANDROID_EMULATOR_HOME'] = self._emulator_home
if 'DISPLAY' in os.environ:
emulator_env['DISPLAY'] = os.environ.get('DISPLAY')
if window:
@@ -840,6 +1021,12 @@ class _AvdInstance:
else:
emulator_cmd.append('-no-window')
+ # Need this for the qt config file to take effect.
+ xdg_config_dirs = [self._avd_config.xdg_config_dir]
+ if 'XDG_CONFIG_DIRS' in os.environ:
+ xdg_config_dirs.append(os.environ.get('XDG_CONFIG_DIRS'))
+ emulator_env['XDG_CONFIG_DIRS'] = ':'.join(xdg_config_dirs)
+
sock.listen(1)
logging.info('Starting emulator...')
@@ -848,14 +1035,18 @@ class _AvdInstance:
' '.join(['%s=%s' % (k, v) for k, v in emulator_env.items()]))
logging.info(' With commands: %s', ' '.join(emulator_cmd))
- # TODO(jbudorick): Add support for logging emulator stdout & stderr at
- # higher logging levels.
# Enable the emulator log when debug_tags is set.
- if not debug_tags:
- self._sink = open('/dev/null', 'w')
+ if self._debug_tags:
+ # Write to an ArchivedFile if output manager is set, otherwise stdout.
+ if self._output_manager:
+ self._output_file = self._output_manager.CreateArchivedFile(
+ 'emulator_%s' % time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
+ 'emulator')
+ else:
+ self._output_file = open('/dev/null', 'w')
self._emulator_proc = cmd_helper.Popen(emulator_cmd,
- stdout=self._sink,
- stderr=self._sink,
+ stdout=self._output_file,
+ stderr=self._output_file,
env=emulator_env)
# Waits for the emulator to report its serial as requested via
@@ -894,6 +1085,19 @@ class _AvdInstance:
When "force" is True, we will call "terminate" on the emulator process,
which is recommended when emulator is not responding to adb commands.
"""
+ # Close output file first in case emulator process killing goes wrong.
+ if self._output_file:
+ if self._debug_tags:
+ if self._output_manager:
+ self._output_manager.ArchiveArchivedFile(self._output_file,
+ delete=True)
+ link = self._output_file.Link()
+ if link:
+ logging.critical('Emulator logs saved to %s', link)
+ else:
+ self._output_file.close()
+ self._output_file = None
+
if self._emulator_proc:
if self._emulator_proc.poll() is None:
if force or not self.device:
@@ -905,10 +1109,6 @@ class _AvdInstance:
self._emulator_serial = None
self._emulator_device = None
- if self._sink:
- self._sink.close()
- self._sink = None
-
def GetSnapshotName(self):
"""Return the snapshot name to load/save.
@@ -923,10 +1123,7 @@ class _AvdInstance:
def HasSystemSnapshot(self):
"""Check if the instance has the snapshot named _SYSTEM_SNAPSHOT_NAME."""
- snapshot_path = os.path.join(self._emulator_home, 'avd',
- '%s.avd' % self._avd_name, 'snapshots',
- _SYSTEM_SNAPSHOT_NAME)
- return os.path.exists(snapshot_path)
+ return self._avd_config.HasSnapshot(_SYSTEM_SNAPSHOT_NAME)
def SaveSnapshot(self):
snapshot_name = self.GetSnapshotName()
diff --git a/build/android/pylib/local/emulator/ini.py b/build/android/pylib/local/emulator/ini.py
index 396874a20..79eb01580 100644
--- a/build/android/pylib/local/emulator/ini.py
+++ b/build/android/pylib/local/emulator/ini.py
@@ -2,22 +2,59 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""Basic .ini encoding and decoding."""
+"""Basic .ini encoding and decoding.
+
+The basic element in an ini file is the key. Every key is constructed by a name
+and a value, delimited by an equals sign (=).
+
+Keys may be grouped into sections. The secetion name will be a line by itself,
+in square brackets ([ and ]). All keys after the section are associated with
+that section until another section occurs.
+
+Keys that are not under any section are considered at the top level.
+
+Section and key names are case sensitive.
+"""
import contextlib
import os
+def add_key(line, config, strict=True):
+ key, val = line.split('=', 1)
+ key = key.strip()
+ val = val.strip()
+ if strict and key in config:
+ raise ValueError('Multiple entries present for key "%s"' % key)
+ config[key] = val
+
+
def loads(ini_str, strict=True):
+ """Deserialize int_str to a dict (nested dict when has sections) object.
+
+ Duplicated sections will merge their keys.
+
+ When there are multiple entries for a key, at the top level, or under the
+ same section:
+ - If strict is true, ValueError will be raised.
+ - If strict is false, only the last occurrence will be stored.
+ """
ret = {}
+ section = None
for line in ini_str.splitlines():
- key, val = line.split('=', 1)
- key = key.strip()
- val = val.strip()
- if strict and key in ret:
- raise ValueError('Multiple entries present for key "%s"' % key)
- ret[key] = val
+ # Empty line
+ if not line:
+ continue
+ # Section line
+ if line[0] == '[' and line[-1] == ']':
+ section = line[1:-1]
+ if section not in ret:
+ ret[section] = {}
+ # Key line
+ else:
+ config = ret if section is None else ret[section]
+ add_key(line, config, strict=strict)
return ret
@@ -27,10 +64,20 @@ def load(fp):
def dumps(obj):
- ret = ''
+ results = []
+ key_str = ''
+
for k, v in sorted(obj.items()):
- ret += '%s = %s\n' % (k, str(v))
- return ret
+ if isinstance(v, dict):
+ results.append('[%s]\n' % k + dumps(v))
+ else:
+ key_str += '%s = %s\n' % (k, str(v))
+
+ # Insert key_str at the first position, before any sections
+ if key_str:
+ results.insert(0, key_str)
+
+ return '\n'.join(results)
def dump(obj, fp):
@@ -46,11 +93,10 @@ def update_ini_file(ini_file_path):
Yields:
The contents of the file, as a dict
"""
+ ini_contents = {}
if os.path.exists(ini_file_path):
with open(ini_file_path) as ini_file:
ini_contents = load(ini_file)
- else:
- ini_contents = {}
yield ini_contents
diff --git a/build/android/pylib/local/emulator/ini_test.py b/build/android/pylib/local/emulator/ini_test.py
index 752a8357a..327d6bf72 100755
--- a/build/android/pylib/local/emulator/ini_test.py
+++ b/build/android/pylib/local/emulator/ini_test.py
@@ -5,9 +5,13 @@
"""Tests for ini.py."""
+import os
+import sys
import textwrap
import unittest
+sys.path.append(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')))
from pylib.local.emulator import ini
@@ -17,16 +21,47 @@ class IniTest(unittest.TestCase):
foo.bar = 1
foo.baz= example
bar.bad =/path/to/thing
+
+ [section_1]
+ foo.bar = 1
+ foo.baz= example
+
+ [section_2]
+ foo.baz= example
+ bar.bad =/path/to/thing
+
+ [section_1]
+ bar.bad =/path/to/thing
""")
expected = {
'foo.bar': '1',
'foo.baz': 'example',
'bar.bad': '/path/to/thing',
+ 'section_1': {
+ 'foo.bar': '1',
+ 'foo.baz': 'example',
+ 'bar.bad': '/path/to/thing',
+ },
+ 'section_2': {
+ 'foo.baz': 'example',
+ 'bar.bad': '/path/to/thing',
+ },
}
self.assertEqual(expected, ini.loads(ini_str))
- def testLoadsStrictFailure(self):
+ def testLoadsDuplicatedKeysStrictFailure(self):
+ ini_str = textwrap.dedent("""\
+ foo.bar = 1
+ foo.baz = example
+ bar.bad = /path/to/thing
+ foo.bar = duplicate
+ """)
+ with self.assertRaises(ValueError):
+ ini.loads(ini_str, strict=True)
+
+ def testLoadsDuplicatedKeysInSectionStrictFailure(self):
ini_str = textwrap.dedent("""\
+ [section_1]
foo.bar = 1
foo.baz = example
bar.bad = /path/to/thing
@@ -41,11 +76,22 @@ class IniTest(unittest.TestCase):
foo.baz = example
bar.bad = /path/to/thing
foo.bar = duplicate
+
+ [section_1]
+ foo.bar = 1
+ foo.baz = example
+ bar.bad = /path/to/thing
+ foo.bar = duplicate
""")
expected = {
'foo.bar': 'duplicate',
'foo.baz': 'example',
'bar.bad': '/path/to/thing',
+ 'section_1': {
+ 'foo.bar': 'duplicate',
+ 'foo.baz': 'example',
+ 'bar.bad': '/path/to/thing',
+ },
}
self.assertEqual(expected, ini.loads(ini_str, strict=False))
@@ -54,13 +100,53 @@ class IniTest(unittest.TestCase):
'foo.bar': '1',
'foo.baz': 'example',
'bar.bad': '/path/to/thing',
+ 'section_2': {
+ 'foo.baz': 'example',
+ 'bar.bad': '/path/to/thing',
+ },
+ 'section_1': {
+ 'foo.bar': '1',
+ 'foo.baz': 'example',
+ },
}
# ini.dumps is expected to dump to string alphabetically
- # by key.
+ # by key and section name.
expected = textwrap.dedent("""\
bar.bad = /path/to/thing
foo.bar = 1
foo.baz = example
+
+ [section_1]
+ foo.bar = 1
+ foo.baz = example
+
+ [section_2]
+ bar.bad = /path/to/thing
+ foo.baz = example
+ """)
+ self.assertEqual(expected, ini.dumps(ini_contents))
+
+ def testDumpsSections(self):
+ ini_contents = {
+ 'section_2': {
+ 'foo.baz': 'example',
+ 'bar.bad': '/path/to/thing',
+ },
+ 'section_1': {
+ 'foo.bar': '1',
+ 'foo.baz': 'example',
+ },
+ }
+ # ini.dumps is expected to dump to string alphabetically
+ # by key first, and then by section and the associated keys
+ expected = textwrap.dedent("""\
+ [section_1]
+ foo.bar = 1
+ foo.baz = example
+
+ [section_2]
+ bar.bad = /path/to/thing
+ foo.baz = example
""")
self.assertEqual(expected, ini.dumps(ini_contents))
diff --git a/build/android/pylib/local/emulator/local_emulator_environment.py b/build/android/pylib/local/emulator/local_emulator_environment.py
index 730be3c93..d71a38277 100644
--- a/build/android/pylib/local/emulator/local_emulator_environment.py
+++ b/build/android/pylib/local/emulator/local_emulator_environment.py
@@ -33,6 +33,7 @@ class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment):
logging.warning('--emulator-count capped at 16.')
self._emulator_count = min(_MAX_ANDROID_EMULATORS, args.emulator_count)
self._emulator_window = args.emulator_window
+ self._emulator_debug_tags = args.emulator_debug_tags
self._writable_system = ((hasattr(args, 'use_webview_provider')
and args.use_webview_provider)
or (hasattr(args, 'replace_system_package')
@@ -48,7 +49,8 @@ class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment):
self._avd_config.Install()
emulator_instances = [
- self._avd_config.CreateInstance() for _ in range(self._emulator_count)
+ self._avd_config.CreateInstance(output_manager=self.output_manager)
+ for _ in range(self._emulator_count)
]
def start_emulator_instance(inst):
@@ -61,6 +63,7 @@ class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment):
try:
inst.Start(window=self._emulator_window,
writable_system=self._writable_system,
+ debug_tags=self._emulator_debug_tags,
require_fast_start=True)
except avd.AvdException:
logging.exception('Failed to start emulator instance.')
diff --git a/build/android/pylib/local/machine/local_machine_junit_test_run.py b/build/android/pylib/local/machine/local_machine_junit_test_run.py
index b33ae9ba1..5336e6fe0 100644
--- a/build/android/pylib/local/machine/local_machine_junit_test_run.py
+++ b/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -112,10 +112,12 @@ class LocalMachineJunitTestRun(test_run.TestRun):
os.makedirs(self._test_instance.coverage_dir)
elif not os.path.isdir(self._test_instance.coverage_dir):
raise Exception('--coverage-dir takes a directory, not file path.')
+ # Jacoco supports concurrent processes using the same output file:
+ # https://github.com/jacoco/jacoco/blob/6cd3f0bd8e348f8fba7bffec5225407151f1cc91/org.jacoco.agent.rt/src/org/jacoco/agent/rt/internal/output/FileOutput.java#L67
+ # So no need to vary the output based on shard number.
+ jacoco_coverage_file = os.path.join(self._test_instance.coverage_dir,
+ '%s.exec' % self._test_instance.suite)
if self._test_instance.coverage_on_the_fly:
- jacoco_coverage_file = os.path.join(
- self._test_instance.coverage_dir,
- '%s.exec' % self._test_instance.suite)
jacoco_agent_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
'third_party', 'jacoco', 'lib',
'jacocoagent.jar')
@@ -125,9 +127,7 @@ class LocalMachineJunitTestRun(test_run.TestRun):
jvm_args.append(
jacoco_args.format(jacoco_agent_path, jacoco_coverage_file))
else:
- jvm_args.append('-Djacoco-agent.destfile=%s' %
- os.path.join(self._test_instance.coverage_dir,
- '%s.exec' % self._test_instance.suite))
+ jvm_args.append('-Djacoco-agent.destfile=%s' % jacoco_coverage_file)
return jvm_args
@@ -287,6 +287,18 @@ def GroupTestsForShard(num_of_shards, test_classes):
return test_dict
+def _DumpJavaStacks(pid):
+ jcmd = os.path.join(constants.JAVA_HOME, 'bin', 'jcmd')
+ cmd = [jcmd, str(pid), 'Thread.print']
+ result = subprocess.run(cmd,
+ check=False,
+ stdout=subprocess.PIPE,
+ encoding='utf8')
+ if result.returncode:
+ return 'Failed to dump stacks\n' + result.stdout
+ return result.stdout
+
+
def _RunCommandsAndSerializeOutput(cmd_list):
"""Runs multiple commands in parallel and yields serialized output lines.
@@ -321,8 +333,7 @@ def _RunCommandsAndSerializeOutput(cmd_list):
stderr=temp_file,
))
- timeout_time = time.time() + _SHARD_TIMEOUT
- timed_out = False
+ deadline = time.time() + _SHARD_TIMEOUT
yield '\n'
yield 'Shard 0 output:\n'
@@ -330,39 +341,41 @@ def _RunCommandsAndSerializeOutput(cmd_list):
# The following will be run from a thread to pump Shard 0 results, allowing
# live output while allowing timeout.
def pump_stream_to_queue(f, q):
- try:
- for line in iter(f.readline, ''):
- q.put(line)
- except ValueError: # Triggered if |f.close()| gets called.
- pass
+ for line in f:
+ q.put(line)
+ q.put(None)
shard_0_q = queue.Queue()
shard_0_pump = threading.Thread(target=pump_stream_to_queue,
args=(procs[0].stdout, shard_0_q))
shard_0_pump.start()
- # Wait for processes to finish, while forwarding Shard 0 results.
- shard_to_check = 0
- while shard_to_check < num_shards:
- if shard_0_pump.is_alive():
- while not shard_0_q.empty():
- yield shard_0_q.get_nowait()
- if procs[shard_to_check].poll() is not None:
- shard_to_check += 1
- else:
- time.sleep(.1)
- if time.time() > timeout_time:
- timed_out = True
- break
-
- # Handle Shard 0 timeout.
- if shard_0_pump.is_alive():
- procs[0].stdout.close()
- shard_0_pump.join()
+ timeout_dumps = {}
+
+ # Print the first process until timeout or completion.
+ while shard_0_pump.is_alive():
+ try:
+ line = shard_0_q.get(timeout=deadline - time.time())
+ if line is None:
+ break
+ yield line
+ except queue.Empty:
+ if time.time() > deadline:
+ break
+
+ # Wait for remaining processes to finish.
+ for i, proc in enumerate(procs):
+ try:
+ proc.wait(timeout=deadline - time.time())
+ except subprocess.TimeoutExpired:
+ timeout_dumps[i] = _DumpJavaStacks(proc.pid)
+ proc.kill()
- # Emit all output (possibly incomplete due to |time_out|) in shard order.
+ # Output any remaining output from a timed-out first shard.
+ shard_0_pump.join()
while not shard_0_q.empty():
- yield shard_0_q.get_nowait()
+ yield shard_0_q.get()
+
for i in range(1, num_shards):
f = temp_files[i]
yield '\n'
@@ -372,15 +385,18 @@ def _RunCommandsAndSerializeOutput(cmd_list):
yield line
f.close()
- # Handle Shard 1+ timeout.
- if timed_out:
- for i, p in enumerate(procs):
- if p.poll() is None:
- p.kill()
- yield 'Index of timed out shard: %d\n' % i
-
- yield 'Output in shards may be cutoff due to timeout.\n'
+ # Output stacks
+ if timeout_dumps:
yield '\n'
+ yield ('=' * 80) + '\n'
+ yield '\nOne or mord shards timed out.\n'
+ yield ('=' * 80) + '\n'
+ for i, dump in timeout_dumps.items():
+ yield 'Index of timed out shard: %d\n' % i
+ yield 'Thread dump:\n'
+ yield dump
+ yield '\n'
+
raise cmd_helper.TimeoutError('Junit shards timed out.')
diff --git a/build/android/pylib/results/presentation/standard_gtest_merge.py b/build/android/pylib/results/presentation/standard_gtest_merge.py
index 7412b54d3..ab1074e26 100755
--- a/build/android/pylib/results/presentation/standard_gtest_merge.py
+++ b/build/android/pylib/results/presentation/standard_gtest_merge.py
@@ -4,7 +4,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import argparse
import json
diff --git a/build/android/pylib/results/presentation/test_results_presentation.pydeps b/build/android/pylib/results/presentation/test_results_presentation.pydeps
index 994096491..031e1793b 100644
--- a/build/android/pylib/results/presentation/test_results_presentation.pydeps
+++ b/build/android/pylib/results/presentation/test_results_presentation.pydeps
@@ -13,8 +13,8 @@
../../../../../third_party/catapult/devil/devil/utils/__init__.py
../../../../../third_party/catapult/devil/devil/utils/cmd_helper.py
../../../../../third_party/jinja2/__init__.py
-../../../../../third_party/jinja2/_compat.py
../../../../../third_party/jinja2/_identifier.py
+../../../../../third_party/jinja2/async_utils.py
../../../../../third_party/jinja2/bccache.py
../../../../../third_party/jinja2/compiler.py
../../../../../third_party/jinja2/defaults.py
diff --git a/build/android/pylib/results/report_results.py b/build/android/pylib/results/report_results.py
index c152f6df6..de19860bd 100644
--- a/build/android/pylib/results/report_results.py
+++ b/build/android/pylib/results/report_results.py
@@ -4,7 +4,6 @@
"""Module containing utility functions for reporting results."""
-from __future__ import print_function
import logging
import os
diff --git a/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/build/android/pylib/symbols/mock_addr2line/mock_addr2line
index 88b8e9829..431f387f8 100755
--- a/build/android/pylib/symbols/mock_addr2line/mock_addr2line
+++ b/build/android/pylib/symbols/mock_addr2line/mock_addr2line
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -9,7 +9,6 @@ Outputs mock symbol information, with each symbol being a function of the
original address (so it is easy to double-check consistency in unittests).
"""
-from __future__ import print_function
import optparse
import os
diff --git a/build/android/pylib/utils/device_dependencies.py b/build/android/pylib/utils/device_dependencies.py
index d370f600e..8625d0e6d 100644
--- a/build/android/pylib/utils/device_dependencies.py
+++ b/build/android/pylib/utils/device_dependencies.py
@@ -7,16 +7,15 @@ import re
from pylib import constants
-
_EXCLUSIONS = [
re.compile(r'.*OWNERS'), # Should never be included.
re.compile(r'.*\.crx'), # Chrome extension zip files.
- re.compile(os.path.join('.*',
- r'\.git.*')), # Any '.git*' directories/files.
+ re.compile(r'.*/\.git.*'), # Any '.git*' directories/files.
re.compile(r'.*\.so'), # Libraries packed into .apk.
re.compile(r'.*Mojo.*manifest\.json'), # Some source_set()s pull these in.
re.compile(r'.*\.py'), # Some test_support targets include python deps.
re.compile(r'.*\.apk'), # Should be installed separately.
+ re.compile(r'.*\.jar'), # Never need java intermediates.
re.compile(r'.*lib.java/.*'), # Never need java intermediates.
# Test filter files:
@@ -30,21 +29,27 @@ _EXCLUSIONS = [
# v8's blobs and icu data get packaged into APKs.
re.compile(r'.*snapshot_blob.*\.bin'),
- re.compile(r'.*icudtl.bin'),
+ re.compile(r'.*icudtl\.bin'),
# Scripts that are needed by swarming, but not on devices:
re.compile(r'.*llvm-symbolizer'),
- re.compile(r'.*md5sum_bin'),
- re.compile(os.path.join('.*', 'development', 'scripts', 'stack')),
+ re.compile(r'.*md5sum_(?:bin|dist)'),
+ re.compile(r'.*/development/scripts/stack'),
+ re.compile(r'.*/build/android/pylib/symbols'),
+ re.compile(r'.*/build/android/stacktrace'),
# Required for java deobfuscation on the host:
re.compile(r'.*build/android/stacktrace/.*'),
re.compile(r'.*third_party/jdk/.*'),
re.compile(r'.*third_party/proguard/.*'),
+ # Our tests don't need these.
+ re.compile(r'.*/devtools-frontend/src/front_end/.*'),
+
# Build artifacts:
re.compile(r'.*\.stamp'),
- re.compile(r'.*.pak\.info'),
+ re.compile(r'.*\.pak\.info'),
+ re.compile(r'.*\.build_config.json'),
re.compile(r'.*\.incremental\.json'),
]
diff --git a/build/android/resource_sizes.py b/build/android/resource_sizes.py
index 53b6e33dd..22303a9a0 100755
--- a/build/android/resource_sizes.py
+++ b/build/android/resource_sizes.py
@@ -8,7 +8,6 @@
More information at //docs/speed/binary_size/metrics.md.
"""
-from __future__ import print_function
import argparse
import collections
diff --git a/build/android/stacktrace/stackwalker.py b/build/android/stacktrace/stackwalker.py
index d1d9f6eaf..ad60e9982 100755
--- a/build/android/stacktrace/stackwalker.py
+++ b/build/android/stacktrace/stackwalker.py
@@ -4,7 +4,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import argparse
import os
diff --git a/build/android/test_runner.py b/build/android/test_runner.py
index 0584535c0..34b8debd4 100755
--- a/build/android/test_runner.py
+++ b/build/android/test_runner.py
@@ -199,6 +199,7 @@ def AddCommonOptions(parser):
namespace.local_output = True
namespace.num_retries = 0
namespace.skip_clear_data = True
+ namespace.use_persistent_shell = True
parser.add_argument(
'--fast-local-dev',
@@ -207,7 +208,7 @@ def AddCommonOptions(parser):
action=FastLocalDevAction,
help='Alias for: --num-retries=0 --enable-device-cache '
'--enable-concurrent-adb --skip-clear-data '
- '--extract-test-list-from-filter --local-output')
+ '--extract-test-list-from-filter --use-persistent-shell --local-output')
# TODO(jbudorick): Remove this once downstream bots have switched to
# api.test_results.
@@ -236,6 +237,18 @@ def AddCommonOptions(parser):
dest='repeat', type=int, default=0,
help='Number of times to repeat the specified set of tests.')
+ # Not useful for junit tests.
+ parser.add_argument(
+ '--use-persistent-shell',
+ action='store_true',
+ help='Uses a persistent shell connection for the adb connection.')
+
+ parser.add_argument('--disable-test-server',
+ action='store_true',
+ help='Disables SpawnedTestServer which doesn'
+ 't work with remote adb. '
+ 'WARNING: Will break tests which require the server.')
+
# This is currently only implemented for gtests and instrumentation tests.
parser.add_argument(
'--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests',
@@ -315,10 +328,6 @@ def AddDeviceOptions(parser):
action='store_true',
dest='upload_logcats_file',
help='Whether to upload logcat file to logdog.')
- parser.add_argument(
- '--use-persistent-shell',
- action='store_true',
- help='Uses a persistent shell connection for the adb connection.')
logcat_output_group = parser.add_mutually_exclusive_group()
logcat_output_group.add_argument(
@@ -351,6 +360,12 @@ def AddEmulatorOptions(parser):
action='store_true',
default=False,
help='Enable graphical window display on the emulator.')
+ parser.add_argument(
+ '--emulator-debug-tags',
+ help='Comma-separated list of debug tags. This can be used to enable or '
+ 'disable debug messages from specific parts of the emulator, e.g. '
+ 'init,snapshot. See "emulator -help-debug-tags" '
+ 'for a full list of tags.')
def AddGTestOptions(parser):
@@ -912,11 +927,12 @@ def _SinkTestResult(test_result, test_file_name, result_sink_client):
link_url, test_result.GetName())
if https_artifacts:
html_artifact += '<ul>%s</ul>' % '\n'.join(https_artifacts)
- result_sink_client.Post(test_result.GetName(),
+ result_sink_client.Post(test_result.GetNameForResultSink(),
test_result.GetType(),
test_result.GetDuration(),
log_decoded.encode('utf-8'),
test_file_name,
+ variant=test_result.GetVariantForResultSink(),
failure_reason=test_result.GetFailureReason(),
html_artifact=html_artifact)
diff --git a/build/android/test_runner.pydeps b/build/android/test_runner.pydeps
index 572b3de48..f81b0d497 100644
--- a/build/android/test_runner.pydeps
+++ b/build/android/test_runner.pydeps
@@ -104,8 +104,8 @@
../../third_party/colorama/src/colorama/win32.py
../../third_party/colorama/src/colorama/winterm.py
../../third_party/jinja2/__init__.py
-../../third_party/jinja2/_compat.py
../../third_party/jinja2/_identifier.py
+../../third_party/jinja2/async_utils.py
../../third_party/jinja2/bccache.py
../../third_party/jinja2/compiler.py
../../third_party/jinja2/defaults.py
@@ -130,6 +130,7 @@
../../third_party/markupsafe/__init__.py
../../third_party/markupsafe/_compat.py
../../third_party/markupsafe/_native.py
+../action_helpers.py
../gn_helpers.py
../print_python_deps.py
../skia_gold_common/__init__.py
@@ -142,12 +143,12 @@
../util/lib/results/__init__.py
../util/lib/results/result_sink.py
../util/lib/results/result_types.py
+../zip_helpers.py
devil_chromium.py
gyp/dex.py
gyp/util/__init__.py
gyp/util/build_utils.py
gyp/util/md5_check.py
-gyp/util/zipalign.py
incremental_install/__init__.py
incremental_install/installer.py
pylib/__init__.py
diff --git a/build/apple/tweak_info_plist.py b/build/apple/tweak_info_plist.py
index 136fff6f3..8aa28b002 100755
--- a/build/apple/tweak_info_plist.py
+++ b/build/apple/tweak_info_plist.py
@@ -20,7 +20,6 @@
# by the time the app target is done, the info.plist is correct.
#
-from __future__ import print_function
import optparse
import os
diff --git a/build/args/README.txt b/build/args/README.txt
index 825bf64c6..b82fb04ab 100644
--- a/build/args/README.txt
+++ b/build/args/README.txt
@@ -1,10 +1,6 @@
This directory is here to hold .gni files that contain sets of GN build
arguments for given configurations.
-(Currently this directory is empty because we removed the only thing here, but
-this has come up several times so I'm confident we'll need this again. If this
-directory is still empty by 2017, feel free to delete it. --Brett)
-
Some projects or bots may have build configurations with specific combinations
of flags. Rather than making a new global flag for your specific project and
adding it all over the build to each arg it should affect, you can add a .gni
diff --git a/build/args/headless.gn b/build/args/headless.gn
index fa52b43bf..8834eb1bb 100644
--- a/build/args/headless.gn
+++ b/build/args/headless.gn
@@ -17,6 +17,9 @@ angle_enable_swiftshader = true
# Embed resource.pak into binary to simplify deployment.
headless_use_embedded_resources = true
+# Disable headless commands support.
+headless_enable_commands = false
+
# Don't use Prefs component, disabling access to Local State prefs.
headless_use_prefs = false
diff --git a/build/build_config.h b/build/build_config.h
index 6811872a2..6db5d9bca 100644
--- a/build/build_config.h
+++ b/build/build_config.h
@@ -26,6 +26,9 @@
// This file also adds defines specific to the platform, architecture etc.
//
+// Platform:
+// IS_OZONE
+//
// Compiler:
// COMPILER_MSVC / COMPILER_GCC
//
diff --git a/build/check_gn_headers_whitelist.txt b/build/check_gn_headers_whitelist.txt
index e46a59814..dfefd7d24 100644
--- a/build/check_gn_headers_whitelist.txt
+++ b/build/check_gn_headers_whitelist.txt
@@ -20,7 +20,6 @@ cc/layers/performance_properties.h
chrome/browser/android/android_theme_resources.h
chrome/browser/android/resource_id.h
chrome/browser/ash/login/signin/oauth2_login_manager.h
-chrome/browser/ash/login/signin/oauth2_login_verifier.h
chrome/browser/ash/login/signin/oauth2_token_fetcher.h
chrome/browser/ash/profiles/profile_helper.h
chrome/browser/ash/settings/cros_settings.h
@@ -54,8 +53,6 @@ chrome/install_static/install_modes.h
chrome/install_static/install_util.h
chrome/install_static/test/scoped_install_details.h
chrome/installer/util/google_update_settings.h
-components/browser_watcher/features.h
-components/browser_watcher/stability_paths.h
components/cdm/browser/cdm_message_filter_android.h
components/device_event_log/device_event_log_export.h
components/login/login_export.h
diff --git a/build/check_return_value.py b/build/check_return_value.py
index 56b5fe7ee..2337e962d 100755
--- a/build/check_return_value.py
+++ b/build/check_return_value.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -6,7 +6,6 @@
"""This program wraps an arbitrary command and prints "1" if the command ran
successfully."""
-from __future__ import print_function
import os
import subprocess
diff --git a/build/chromeos/.style.yapf b/build/chromeos/.style.yapf
index de0c6a70f..fdd07237c 100644
--- a/build/chromeos/.style.yapf
+++ b/build/chromeos/.style.yapf
@@ -1,2 +1,2 @@
[style]
-based_on_style = chromium
+based_on_style = yapf
diff --git a/build/chromeos/test_runner.py b/build/chromeos/test_runner.py
index 89e369162..14c31e1c4 100755
--- a/build/chromeos/test_runner.py
+++ b/build/chromeos/test_runner.py
@@ -33,7 +33,8 @@ from pylib.base import base_test_result # pylint: disable=import-error
from pylib.results import json_results # pylint: disable=import-error
sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'util'))
-from lib.results import result_sink # pylint: disable=import-error
+# TODO(crbug.com/1421441): Re-enable the 'no-name-in-module' check.
+from lib.results import result_sink # pylint: disable=import-error,no-name-in-module
assert not six.PY2, 'Py2 not supported for this file.'
@@ -246,6 +247,7 @@ class TastTest(RemoteTest):
self._suite_name = args.suite_name
self._tast_vars = args.tast_vars
+ self._tast_retries = args.tast_retries
self._tests = args.tests
# The CQ passes in '--gtest_filter' when specifying tests to skip. Store it
# here and parse it later to integrate it into Tast executions.
@@ -338,6 +340,9 @@ class TastTest(RemoteTest):
for v in self._tast_vars or []:
self._test_cmd.extend(['--tast-var', v])
+ if self._tast_retries:
+ self._test_cmd.append('--tast-retries=%d' % self._tast_retries)
+
# Mounting ash-chrome gives it enough disk space to not need stripping,
# but only for one not instrumented with code coverage.
# Lacros uses --nostrip by default, so there is no need to specify.
@@ -568,7 +573,7 @@ class GTestTest(RemoteTest):
if self._trace_dir:
device_test_script_contents.extend([
'rm -rf %s' % device_trace_dir,
- 'su chronos -c -- "mkdir -p %s"' % device_trace_dir,
+ 'sudo -E -u chronos -- /bin/bash -c "mkdir -p %s"' % device_trace_dir,
])
test_invocation += ' --trace-dir=%s' % device_trace_dir
@@ -582,7 +587,8 @@ class GTestTest(RemoteTest):
# The UI service on the device owns the chronos user session, so shutting
# it down as chronos kills the entire execution of the test. So we'll have
# to run as root up until the test invocation.
- test_invocation = 'su chronos -c -- "%s"' % test_invocation
+ test_invocation = (
+ 'sudo -E -u chronos -- /bin/bash -c "%s"' % test_invocation)
# And we'll need to chown everything since cros_run_test's "--as-chronos"
# option normally does that for us.
device_test_script_contents.append('chown -R chronos: ../..')
@@ -918,6 +924,11 @@ def main():
help='Runtime variables for Tast tests, and the format are expected to '
'be "key=value" pairs.')
tast_test_parser.add_argument(
+ '--tast-retries',
+ type=int,
+ dest='tast_retries',
+ help='Number of retries for failed Tast tests on the same DUT.')
+ tast_test_parser.add_argument(
'--test',
'-t',
action='append',
@@ -932,6 +943,13 @@ def main():
add_common_args(gtest_parser, tast_test_parser, host_cmd_parser)
args, unknown_args = parser.parse_known_args()
+ # Re-add N-1 -v/--verbose flags to the args we'll pass to whatever we are
+ # running. The assumption is that only one verbosity incrase would be meant
+ # for this script since it's a boolean value instead of increasing verbosity
+ # with more instances.
+ verbose_flags = [a for a in sys.argv if a in ('-v', '--verbose')]
+ if verbose_flags:
+ unknown_args += verbose_flags[1:]
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN)
diff --git a/build/chromeos/test_runner_test.py b/build/chromeos/test_runner_test.py
index bb454eac6..c61c7a443 100755
--- a/build/chromeos/test_runner_test.py
+++ b/build/chromeos/test_runner_test.py
@@ -196,6 +196,29 @@ class TastTests(TestRunnerTest):
[True],
[False],
])
+ def test_tast_retries(self, use_vm):
+ """Tests running a tast tests with retries."""
+ with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f:
+ json.dump(_TAST_TEST_RESULTS_JSON, f)
+
+ args = self.get_common_tast_args(use_vm) + [
+ '-t=login.Chrome',
+ '--tast-retries=1',
+ ]
+ with mock.patch.object(sys, 'argv', args),\
+ mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen:
+ mock_popen.return_value.returncode = 0
+ test_runner.main()
+ expected_cmd = self.get_common_tast_expectations(use_vm) + [
+ '--tast', 'login.Chrome', '--tast-retries=1'
+ ]
+
+ self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0])
+
+ @parameterized.expand([
+ [True],
+ [False],
+ ])
def test_tast(self, use_vm):
"""Tests running a tast tests."""
with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f:
diff --git a/build/clobber.py b/build/clobber.py
index 411bb8dc8..e886737dc 100755
--- a/build/clobber.py
+++ b/build/clobber.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -22,36 +22,43 @@ def extract_gn_build_commands(build_ninja_file):
On error, returns the empty string."""
result = ""
with open(build_ninja_file, 'r') as f:
- # Read until the third blank line. The first thing GN writes to the file
- # is "ninja_required_version = x.y.z", then the "rule gn" and the third
- # is the section for "build build.ninja", separated by blank lines.
- num_blank_lines = 0
- while num_blank_lines < 3:
- line = f.readline()
- if len(line) == 0:
- return '' # Unexpected EOF.
+ # Reads until the first empty line after the "build build.ninja:" target.
+ # We assume everything before it necessary as well (eg the
+ # "ninja_required_version" line).
+ found_build_dot_ninja_target = False
+ for line in f.readlines():
result += line
- if line[0] == '\n':
- num_blank_lines = num_blank_lines + 1
- return result
+ if line.startswith('build build.ninja:'):
+ found_build_dot_ninja_target = True
+ if found_build_dot_ninja_target and line[0] == '\n':
+ return result
+ return '' # We got to EOF and didn't find what we were looking for.
-def delete_dir(build_dir):
- if os.path.islink(build_dir):
- return
+def _rmtree(d):
# For unknown reasons (anti-virus?) rmtree of Chromium build directories
# often fails on Windows.
if sys.platform.startswith('win'):
- subprocess.check_call(['rmdir', '/s', '/q', build_dir], shell=True)
+ subprocess.check_call(['rmdir', '/s', '/q', d], shell=True)
else:
- shutil.rmtree(build_dir)
+ shutil.rmtree(d)
+
+
+def _clean_dir(build_dir):
+ # Remove files/sub directories individually instead of recreating the build
+ # dir because it fails when the build dir is symlinked or mounted.
+ for e in os.scandir(build_dir):
+ if e.is_dir():
+ _rmtree(e.path)
+ else:
+ os.remove(e.path)
def delete_build_dir(build_dir):
# GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
if not os.path.exists(build_ninja_d_file):
- delete_dir(build_dir)
+ _clean_dir(build_dir)
return
# GN builds aren't automatically regenerated when you sync. To avoid
@@ -68,15 +75,16 @@ def delete_build_dir(build_dir):
except IOError:
args_contents = ''
- e = None
+ exception_during_rm = None
try:
- # delete_dir and os.mkdir() may fail, such as when chrome.exe is running,
+ # _clean_dir() may fail, such as when chrome.exe is running,
# and we still want to restore args.gn/build.ninja/build.ninja.d, so catch
# the exception and rethrow it later.
- delete_dir(build_dir)
- os.mkdir(build_dir)
+ # We manually rm files inside the build dir rather than using "gn clean/gen"
+ # since we may not have run all necessary DEPS hooks yet at this point.
+ _clean_dir(build_dir)
except Exception as e:
- pass
+ exception_during_rm = e
# Put back the args file (if any).
if args_contents != '':
@@ -105,9 +113,10 @@ build build.ninja: gn
with open(build_ninja_d_file, 'w') as f:
f.write('build.ninja: nonexistant_file.gn\n')
- if e:
+ if exception_during_rm:
# Rethrow the exception we caught earlier.
- raise e
+ raise exception_during_rm
+
def clobber(out_dir):
"""Clobber contents of build directory.
diff --git a/build/clobber_unittest.py b/build/clobber_unittest.py
new file mode 100755
index 000000000..d38c447b1
--- /dev/null
+++ b/build/clobber_unittest.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import pathlib
+import shutil
+import sys
+import tempfile
+import textwrap
+import unittest
+from unittest import mock
+
+import clobber
+
+
+class TestExtractBuildCommand(unittest.TestCase):
+ def setUp(self):
+ self.build_ninja_file, self.build_ninja_path = tempfile.mkstemp(text=True)
+
+ def tearDown(self):
+ os.close(self.build_ninja_file)
+ os.remove(self.build_ninja_path)
+
+ def test_normal_extraction(self):
+ build_ninja_file_contents = textwrap.dedent("""
+ ninja_required_version = 1.7.2
+
+ rule gn
+ command = ../../buildtools/gn --root=../.. -q --regeneration gen .
+ pool = console
+ description = Regenerating ninja files
+
+ build build.ninja.stamp: gn
+ generator = 1
+ depfile = build.ninja.d
+
+ build build.ninja: phony build.ninja.stamp
+ generator = 1
+
+ pool build_toolchain_action_pool
+ depth = 72
+
+ pool build_toolchain_link_pool
+ depth = 23
+
+ subninja toolchain.ninja
+ subninja clang_newlib_x64/toolchain.ninja
+ subninja glibc_x64/toolchain.ninja
+ subninja irt_x64/toolchain.ninja
+ subninja nacl_bootstrap_x64/toolchain.ninja
+ subninja newlib_pnacl/toolchain.ninja
+
+ build blink_python_tests: phony obj/blink_python_tests.stamp
+ build blink_tests: phony obj/blink_tests.stamp
+
+ default all
+ """) # Based off of a standard linux build dir.
+ with open(self.build_ninja_path, 'w') as f:
+ f.write(build_ninja_file_contents)
+
+ expected_build_ninja_file_contents = textwrap.dedent("""
+ ninja_required_version = 1.7.2
+
+ rule gn
+ command = ../../buildtools/gn --root=../.. -q --regeneration gen .
+ pool = console
+ description = Regenerating ninja files
+
+ build build.ninja.stamp: gn
+ generator = 1
+ depfile = build.ninja.d
+
+ build build.ninja: phony build.ninja.stamp
+ generator = 1
+
+ """)
+
+ self.assertEqual(clobber.extract_gn_build_commands(self.build_ninja_path),
+ expected_build_ninja_file_contents)
+
+ def test_unexpected_format(self):
+ # No "build build.ninja:" line should make it return an empty string.
+ build_ninja_file_contents = textwrap.dedent("""
+ ninja_required_version = 1.7.2
+
+ rule gn
+ command = ../../buildtools/gn --root=../.. -q --regeneration gen .
+ pool = console
+ description = Regenerating ninja files
+
+ subninja toolchain.ninja
+
+ build blink_python_tests: phony obj/blink_python_tests.stamp
+ build blink_tests: phony obj/blink_tests.stamp
+
+ """)
+ with open(self.build_ninja_path, 'w') as f:
+ f.write(build_ninja_file_contents)
+
+ self.assertEqual(clobber.extract_gn_build_commands(self.build_ninja_path),
+ '')
+
+
+class TestDelete(unittest.TestCase):
+ def setUp(self):
+ self.build_dir = tempfile.mkdtemp()
+
+ pathlib.Path(os.path.join(self.build_dir, 'build.ninja')).touch()
+ pathlib.Path(os.path.join(self.build_dir, 'build.ninja.d')).touch()
+
+ def tearDown(self):
+ shutil.rmtree(self.build_dir)
+
+ def test_delete_build_dir_full(self):
+ # Create a dummy file in the build dir and ensure it gets removed.
+ dummy_file = os.path.join(self.build_dir, 'dummy')
+ pathlib.Path(dummy_file).touch()
+
+ clobber.delete_build_dir(self.build_dir)
+
+ self.assertFalse(os.path.exists(dummy_file))
+
+ def test_delete_build_dir_fail(self):
+ # Make delete_dir() throw to ensure it's handled gracefully.
+
+ with mock.patch('clobber._clean_dir', side_effect=OSError):
+ with self.assertRaises(OSError):
+ clobber.delete_build_dir(self.build_dir)
+
+ @unittest.skipIf(sys.platform == 'win32', 'Symlinks are not allowed on Windows by default')
+ def test_delete_build_dir_link(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ # create a symlink.
+ build_dir = os.path.join(tmpdir, 'link')
+ os.symlink(self.build_dir, build_dir)
+
+ # create a dummy file.
+ dummy_file = os.path.join(build_dir, 'dummy')
+ pathlib.Path(dummy_file).touch()
+ clobber.delete_build_dir(build_dir)
+
+ self.assertFalse(os.path.exists(dummy_file))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/build/compute_build_timestamp.py b/build/compute_build_timestamp.py
index ca1cc44da..befe84456 100755
--- a/build/compute_build_timestamp.py
+++ b/build/compute_build_timestamp.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -27,7 +27,6 @@ Either way, it is guaranteed to be in the past and always in UTC.
# the symbol server, so rarely changing timestamps can cause conflicts there
# as well. We only upload symbols for official builds to the symbol server.
-from __future__ import print_function
import argparse
import calendar
diff --git a/build/config/BUILD.gn b/build/config/BUILD.gn
index fceb89892..980b88965 100644
--- a/build/config/BUILD.gn
+++ b/build/config/BUILD.gn
@@ -60,6 +60,9 @@ config("feature_flags") {
defines += [ "USE_GLIB=1" ]
}
if (use_ozone && !is_android) {
+ # Chrome code should check BUILDFLAG(IS_OZONE) instead of
+ # defined(USE_OZONE).
+ #
# Note that some Chrome OS builds unconditionally set |use_ozone| to true,
# but they also build some targets with the Android toolchain. This ensures
# that Android targets still build with USE_OZONE=0 in such cases.
@@ -228,6 +231,9 @@ group("common_deps") {
visibility = [
":executable_deps",
":loadable_module_deps",
+ ":rust_bin_deps",
+ ":rust_cdylib_deps",
+ ":rust_dylib_deps",
":shared_library_deps",
]
@@ -269,6 +275,15 @@ group("executable_deps") {
public_configs = [ "//build/config/sanitizers:link_executable" ]
}
+# Only the rust_bin template in BUILDCONFIG.gn should reference this.
+group("rust_bin_deps") {
+ public_deps = [ ":common_deps" ]
+ if (export_libcxxabi_from_executables) {
+ public_deps += [ "//buildtools/third_party/libc++abi" ]
+ }
+ public_configs = [ "//build/config/sanitizers:link_executable" ]
+}
+
# Only the loadable_module template in BUILDCONFIG.gn should reference this.
group("loadable_module_deps") {
public_deps = [ ":common_deps" ]
@@ -283,6 +298,20 @@ group("shared_library_deps") {
public_configs = [ "//build/config/sanitizers:link_shared_library" ]
}
+# Only the rust_dylib template in BUILDCONFIG.gn should reference this.
+group("rust_dylib_deps") {
+ public_deps = [ ":common_deps" ]
+
+ public_configs = [ "//build/config/sanitizers:link_shared_library" ]
+}
+
+# Only the rust_cdylib template in BUILDCONFIG.gn should reference this.
+group("rust_cdylib_deps") {
+ public_deps = [ ":common_deps" ]
+
+ public_configs = [ "//build/config/sanitizers:link_shared_library" ]
+}
+
# Executable configs -----------------------------------------------------------
# Windows linker setup for EXEs and DLLs.
diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn
index cc7e92ddb..ca7debdbb 100644
--- a/build/config/BUILDCONFIG.gn
+++ b/build/config/BUILDCONFIG.gn
@@ -398,6 +398,11 @@ if (is_debug) {
# Static libraries and source sets use only the compiler ones.
set_defaults("static_library") {
configs = default_compiler_configs
+
+ # For Rust, a static library involves linking in all dependencies, and it
+ # performs LTO. But since we will perform LTO in the C++ linker which
+ # consumes the library, we defer LTO from Rust into the linker.
+ configs += [ "//build/config/compiler:rust_defer_lto_to_linker" ]
}
set_defaults("source_set") {
configs = default_compiler_configs
@@ -405,9 +410,6 @@ set_defaults("source_set") {
set_defaults("rust_library") {
configs = default_compiler_configs
}
-set_defaults("rust_proc_macro") {
- configs = default_compiler_configs
-}
# Compute the set of configs common to all linked targets (shared libraries,
# loadable modules, executables) to avoid duplication below.
@@ -421,8 +423,8 @@ if (is_win) {
# that shouldn't use the windows subsystem.
"//build/config/win:console",
]
-} else if (is_mac) {
- _linker_configs = [ "//build/config/mac:strip_all" ]
+} else if (is_apple) {
+ _linker_configs = [ "//build/config/apple:strip_all" ]
} else {
_linker_configs = []
}
@@ -472,6 +474,22 @@ set_defaults("loadable_module") {
}
}
+default_rust_proc_macro_configs =
+ default_shared_library_configs + [ "//build/rust:proc_macro_extern" ] +
+ # Rust proc macros don't support (Thin)LTO, so always remove it.
+ [
+ "//build/config/compiler:thinlto_optimize_default",
+ "//build/config/compiler:thinlto_optimize_max",
+ ] -
+ [
+ "//build/config/compiler:thinlto_optimize_default",
+ "//build/config/compiler:thinlto_optimize_max",
+ ]
+
+set_defaults("rust_proc_macro") {
+ configs = default_rust_proc_macro_configs
+}
+
# A helper for forwarding testonly and visibility.
# Forwarding "*" does not include variables from outer scopes (to avoid copying
# all globals into each template invocation), so it will not pick up
@@ -496,6 +514,14 @@ foreach(_target_type,
"executable",
"loadable_module",
"shared_library",
+
+ # TODO(crbug.com/1426886): When bindgen is not built in the Chromium
+ # build it will be safe to add rust_bin here. Right now it would make
+ # a conflict between the Chromium libc++ that this selects and the
+ # clang libraries that are shipped with the Rust toolchain.
+ # "rust_bin",
+ "rust_dylib",
+ "rust_cdylib",
]) {
template(_target_type) {
# Alias "target_name" because it is clobbered by forward_variables_from().
@@ -572,6 +598,16 @@ foreach(_target_type,
template("component") {
if (is_component_build) {
_component_mode = "shared_library"
+
+ # Generate a unique output_name for a shared library if not set by invoker.
+ if (!defined(invoker.output_name)) {
+ _output_name = get_label_info(":$target_name", "label_no_toolchain")
+ _output_name =
+ string_replace(_output_name, "$target_name:$target_name", target_name)
+ _output_name = string_replace(_output_name, "//", "")
+ _output_name = string_replace(_output_name, "/", "_")
+ _output_name = string_replace(_output_name, ":", "_")
+ }
} else if (defined(invoker.static_component_type)) {
assert(invoker.static_component_type == "static_library" ||
invoker.static_component_type == "source_set")
@@ -584,6 +620,9 @@ template("component") {
_component_mode = "static_library"
}
target(_component_mode, target_name) {
+ if (defined(_output_name)) {
+ output_name = _output_name
+ }
forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
}
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
index d893ebc34..bd3a75e86 100644
--- a/build/config/android/config.gni
+++ b/build/config/android/config.gni
@@ -61,6 +61,10 @@ if (is_android || is_chromeos) {
} else {
android_static_analysis = "on"
}
+
+ # Build incremental targets whenever possible.
+ # See //build/android/incremental_install/README.md for more details.
+ incremental_install = false
}
# Avoid typos when setting android_static_analysis in args.gn.
@@ -71,18 +75,6 @@ if (is_android || is_chromeos) {
# Warn against it.
assert(!(enable_chrome_android_internal && skip_secondary_abi_for_cq))
- declare_args() {
- # Android API level for 32 bits platforms
- android32_ndk_api_level = default_min_sdk_version
-
- # Android API level for 64 bits platforms
- if (default_min_sdk_version < 21) {
- android64_ndk_api_level = 21
- } else {
- android64_ndk_api_level = default_min_sdk_version
- }
- }
-
if (enable_chrome_android_internal) {
import("//clank/config.gni")
} else {
@@ -113,6 +105,13 @@ if (is_android || is_chromeos) {
public_android_sdk = true
}
+ if (android_sdk_release == "tprivacysandbox") {
+ default_android_sdk_root = public_android_sdk_root
+ default_android_sdk_version = "TiramisuPrivacySandbox"
+ default_android_sdk_build_tools_version = "33.0.0"
+ public_android_sdk = true
+ }
+
# For use downstream when we are building with preview Android SDK
if (!defined(final_android_sdk)) {
final_android_sdk = public_android_sdk
@@ -144,26 +143,6 @@ if (is_android || is_chromeos) {
google_play_services_package = "//third_party/android_deps"
}
- if (!defined(dagger_java_target)) {
- dagger_java_target =
- "//third_party/android_deps:com_google_dagger_dagger_java"
- }
-
- if (!defined(dagger_annotation_processor_target)) {
- dagger_annotation_processor_target =
- "//third_party/android_deps:com_google_dagger_dagger_compiler_java"
- }
-
- if (!defined(guava_android_target)) {
- guava_android_target =
- "//third_party/android_deps:com_google_guava_guava_android_java"
- }
-
- if (!defined(material_design_target)) {
- material_design_target =
- "//third_party/android_deps:com_google_android_material_material_java"
- }
-
if (!defined(android_protoc_bin)) {
android_protoc_bin = "//third_party/android_protoc/protoc"
android_proto_runtime =
@@ -184,6 +163,17 @@ if (is_android || is_chromeos) {
android_ndk_version = default_android_ndk_version
android_ndk_major_version = default_android_ndk_major_version
+ # Android API level for 32 bits platforms
+ android32_ndk_api_level = default_min_sdk_version
+
+ # Android API level for 64 bits platforms
+ android64_ndk_api_level = default_min_sdk_version
+
+ if (default_min_sdk_version < 21) {
+ # Android did not support 64 bit before API 21.
+ android64_ndk_api_level = 21
+ }
+
android_sdk_root = default_android_sdk_root
android_sdk_version = default_android_sdk_version
android_sdk_build_tools_version = default_android_sdk_build_tools_version
@@ -217,7 +207,7 @@ if (is_android || is_chromeos) {
# Java debug on Android. Having this on enables multidexing, and turning it
# off will enable proguard.
- is_java_debug = is_debug
+ is_java_debug = is_debug || incremental_install
# Mark APKs as android:debuggable="true".
debuggable_apks = !is_official_build
@@ -225,10 +215,6 @@ if (is_android || is_chromeos) {
# Set to false to disable the Errorprone compiler.
use_errorprone_java_compiler = android_static_analysis != "off"
- # Build incremental targets whenever possible.
- # See //build/android/incremental_install/README.md for more details.
- incremental_install = false
-
# When true, updates all android_aar_prebuilt() .info files during gn gen.
# Refer to android_aar_prebuilt() for more details.
update_android_aar_prebuilts = false
@@ -254,6 +240,11 @@ if (is_android || is_chromeos) {
# support mapping these names.
enable_arsc_obfuscation = true
+ # Controls whether |strip_unused_resources| is respected. Useful when trying
+ # to analyze APKs using tools that do not support missing resources from
+ # resources.arsc.
+ enable_unused_resource_stripping = true
+
# The target to use as the system WebView implementation.
if (android_64bit_target_cpu && skip_secondary_abi_for_cq) {
system_webview_apk_target = "//android_webview:system_webview_64_apk"
@@ -284,7 +275,7 @@ if (is_android || is_chromeos) {
use_hashed_jni_names = !is_java_debug
# Enables JNI multiplexing to reduce JNI native methods overhead.
- enable_jni_multiplexing = false
+ allow_jni_multiplexing = false
# Enables trace event injection on Android views with bytecode rewriting.
# This adds an additional step on android_app_bundle_module targets that
@@ -293,9 +284,8 @@ if (is_android || is_chromeos) {
!is_java_debug && android_channel != "stable"
}
- if (enable_jni_multiplexing) {
- use_hashed_jni_names = false
- }
+ assert(!incremental_install || is_java_debug,
+ "incremental_install=true && is_java_debug=false is not supported.")
# Host stuff -----------------------------------------------------------------
diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni
index 31c20f12a..4643970d8 100644
--- a/build/config/android/internal_rules.gni
+++ b/build/config/android/internal_rules.gni
@@ -4,6 +4,7 @@
# Do not add any imports to non-//build directories here.
# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
+import("//build/config/android/channel.gni")
import("//build/config/android/config.gni")
import("//build/config/compiler/compiler.gni")
import("//build/config/compute_inputs_for_analyze.gni")
@@ -21,7 +22,8 @@ assert(is_android)
default_android_sdk_dep = "//third_party/android_sdk:android_sdk_java"
_jacoco_dep = "//third_party/jacoco:jacocoagent_java"
-_jacoco_host_jar = "$root_build_dir/lib.java/third_party/jacoco/jacocoagent.jar"
+_jacoco_host_jar =
+ "$root_build_dir/lib.java/third_party/jacoco/jacocoagent_java.jar"
_robolectric_libs_dir =
rebase_path(
get_label_info("//:foo($robolectric_toolchain)", "root_out_dir"),
@@ -201,6 +203,8 @@ template("write_build_config") {
_public_deps_configs += [ _dep_config ]
}
}
+ inputs += _deps_configs
+ inputs += _public_deps_configs
_rebased_deps_configs = rebase_path(_deps_configs, root_build_dir)
_rebased_public_deps_configs =
rebase_path(_public_deps_configs, root_build_dir)
@@ -217,6 +221,10 @@ template("write_build_config") {
_target_label,
]
+ if (defined(invoker.preferred_dep) && invoker.preferred_dep) {
+ args += [ "--preferred-dep" ]
+ }
+
if (defined(invoker.aar_path)) {
args += [
"--aar-path",
@@ -253,6 +261,12 @@ template("write_build_config") {
rebase_path(invoker.ijar_path, root_build_dir),
]
}
+ if (defined(invoker.kotlinc_jar_path)) {
+ args += [
+ "--kotlinc-jar-path",
+ rebase_path(invoker.kotlinc_jar_path, root_build_dir),
+ ]
+ }
if (defined(invoker.java_resources_jar)) {
args += [
"--java-resources-jar-path",
@@ -262,15 +276,16 @@ template("write_build_config") {
if (defined(invoker.annotation_processor_deps) &&
invoker.annotation_processor_deps != []) {
_processor_configs = []
- foreach(_processor_dep, invoker.annotation_processor_deps) {
- _dep_label = get_label_info(_processor_dep, "label_no_toolchain")
- _dep_gen_dir = get_label_info(_processor_dep, "target_gen_dir")
- _dep_name = get_label_info(_processor_dep, "name")
+ foreach(_dep_label, invoker.annotation_processor_deps) {
deps += [ "$_dep_label$build_config_target_suffix" ]
- _processor_configs += [ "$_dep_gen_dir/$_dep_name.build_config.json" ]
+ _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir")
+ _dep_name = get_label_info(_dep_label, "name")
+ _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json"
+ _processor_configs += [ _dep_config ]
}
_rebased_processor_configs =
rebase_path(_processor_configs, root_build_dir)
+ inputs += _processor_configs
args += [ "--annotation-processor-configs=$_rebased_processor_configs" ]
}
@@ -307,15 +322,15 @@ template("write_build_config") {
}
if (defined(invoker.apk_under_test)) {
- deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ]
- apk_under_test_gen_dir =
- get_label_info(invoker.apk_under_test, "target_gen_dir")
- apk_under_test_name = get_label_info(invoker.apk_under_test, "name")
- apk_under_test_config =
- "$apk_under_test_gen_dir/$apk_under_test_name.build_config.json"
+ _dep_label = invoker.apk_under_test
+ _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir")
+ _dep_name = get_label_info(_dep_label, "name")
+ _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json"
+ inputs += [ _dep_config ]
+ deps += [ "$_dep_label$build_config_target_suffix" ]
args += [
"--tested-apk-config",
- rebase_path(apk_under_test_config, root_build_dir),
+ rebase_path(_dep_config, root_build_dir),
]
}
@@ -419,10 +434,6 @@ template("write_build_config") {
]
}
- if (defined(invoker.is_base_module) && invoker.is_base_module) {
- args += [ "--is-base-module" ]
- }
-
if (defined(invoker.loadable_modules)) {
_rebased_loadable_modules =
rebase_path(invoker.loadable_modules, root_build_dir)
@@ -478,10 +489,10 @@ template("write_build_config") {
}
}
- if (defined(invoker.java_sources_file)) {
+ if (defined(invoker.target_sources_file)) {
args += [
- "--java-sources-file",
- rebase_path(invoker.java_sources_file, root_build_dir),
+ "--target-sources-file",
+ rebase_path(invoker.target_sources_file, root_build_dir),
]
}
if (defined(invoker.srcjar)) {
@@ -535,18 +546,42 @@ template("write_build_config") {
]
}
if (defined(invoker.base_module_target)) {
- _base_label =
- get_label_info(invoker.base_module_target, "label_no_toolchain")
- _dep_gen_dir = get_label_info(_base_label, "target_gen_dir")
- _dep_name = get_label_info(_base_label, "name")
- deps += [ "$_base_label$build_config_target_suffix" ]
- _base_module_build_config = "$_dep_gen_dir/$_dep_name.build_config.json"
- inputs += [ _base_module_build_config ]
+ _dep_label = invoker.base_module_target
+ _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir")
+ _dep_name = get_label_info(_dep_label, "name")
+ _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json"
+ deps += [ "$_dep_label$build_config_target_suffix" ]
+ inputs += [ _dep_config ]
args += [
"--base-module-build-config",
- rebase_path(_base_module_build_config, root_build_dir),
+ rebase_path(_dep_config, root_build_dir),
+ ]
+ }
+ if (defined(invoker.parent_module_target)) {
+ _dep_label = invoker.parent_module_target
+ _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir")
+ _dep_name = get_label_info(_dep_label, "name")
+ _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json"
+ deps += [ "$_dep_label$build_config_target_suffix" ]
+ inputs += [ _dep_config ]
+ args += [
+ "--parent-module-build-config",
+ rebase_path(_dep_config, root_build_dir),
+ ]
+ }
+ if (defined(invoker.module_name)) {
+ args += [
+ "--module-name",
+ invoker.module_name,
]
}
+ if (defined(invoker.modules)) {
+ foreach(_module, invoker.modules) {
+ if (defined(_module.uses_split)) {
+ args += [ "--uses-split=${_module.name}:${_module.uses_split}" ]
+ }
+ }
+ }
if (defined(invoker.module_build_configs)) {
inputs += invoker.module_build_configs
_rebased_configs =
@@ -1123,7 +1158,7 @@ if (enable_java_templates) {
# Lint requires all source and all resource files to be passed in the
# same invocation for checks like UnusedResources.
- "--java-sources=@FileArg($_rebased_build_config:deps_info:lint_java_sources)",
+ "--sources=@FileArg($_rebased_build_config:deps_info:lint_sources)",
"--aars=@FileArg($_rebased_build_config:deps_info:lint_aars)",
"--srcjars=@FileArg($_rebased_build_config:deps_info:lint_srcjars)",
"--resource-sources=@FileArg($_rebased_build_config:deps_info:lint_resource_sources)",
@@ -1161,6 +1196,24 @@ if (enable_java_templates) {
}
_rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+
+ # This is generally the apk name, and serves to identify the mapping
+ # file that would be required to deobfuscate a stacktrace.
+ _mapping_basename = get_path_info(_mapping_path, "name")
+ _version_code = "@FileArg($_rebased_build_config:deps_info:version_code)"
+ _package_name = "@FileArg($_rebased_build_config:deps_info:package_name)"
+ if (defined(invoker.package_name)) {
+ _package_name = invoker.package_name
+ }
+ if (defined(invoker.version_code)) {
+ _version_code = invoker.version_code
+ }
+
+ # The Mapping ID is parsed to when uploading mapping files.
+ # See: https://crbug.com/1417308
+ _source_file_template =
+ "chromium-$_mapping_basename-$android_channel-$_version_code"
+
_args = [
"--mapping-output",
rebase_path(_mapping_path, root_build_dir),
@@ -1170,33 +1223,17 @@ if (enable_java_templates) {
"@FileArg($_rebased_build_config:android:sdk_jars)",
"--r8-path",
rebase_path(_r8_path, root_build_dir),
+ "--package-name=$_package_name",
+ "--source-file",
+ _source_file_template,
]
if (treat_warnings_as_errors) {
_args += [ "--warnings-as-errors" ]
}
- if (defined(invoker.desugar_jars_paths)) {
- _rebased_desugar_jars_paths =
- rebase_path(invoker.desugar_jars_paths, root_build_dir)
- args += [ "--classpath=${_rebased_desugar_jars_paths}" ]
- }
if ((!defined(invoker.proguard_enable_obfuscation) ||
invoker.proguard_enable_obfuscation) && enable_proguard_obfuscation) {
- _proguard_sourcefile_suffix = ""
- if (defined(invoker.proguard_sourcefile_suffix)) {
- _proguard_sourcefile_suffix = "-${invoker.proguard_sourcefile_suffix}"
- }
-
- # This is generally the apk name, and serves to identify the mapping
- # file that would be required to deobfuscate a stacktrace.
- _mapping_id = get_path_info(_mapping_path, "name")
- _args += [
- "--enable-obfuscation",
- "--sourcefile",
- "chromium-${_mapping_id}${_proguard_sourcefile_suffix}",
- ]
- } else if (defined(invoker.proguard_sourcefile_suffix)) {
- not_needed(invoker, [ "proguard_sourcefile_suffix" ])
+ _args += [ "--enable-obfuscation" ]
}
if (defined(invoker.modules)) {
@@ -1208,11 +1245,13 @@ if (enable_java_templates) {
"--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)",
]
+ # The bundle's build config has the correct classpaths - the individual
+ # modules' build configs may double-use some jars.
if (defined(invoker.add_view_trace_events) &&
invoker.add_view_trace_events) {
- _args += [ "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:trace_event_rewritten_device_classpath)" ]
+ _args += [ "--feature-jars=@FileArg($_rebased_build_config:modules:${_feature_module.name}:trace_event_rewritten_device_classpath)" ]
} else {
- _args += [ "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:device_classpath)" ]
+ _args += [ "--feature-jars=@FileArg($_rebased_build_config:modules:${_feature_module.name}:device_classpath)" ]
}
if (defined(_feature_module.uses_split)) {
@@ -1381,9 +1420,6 @@ if (enable_java_templates) {
]
data += [ _jacoco_host_jar ]
}
- if (use_jacoco_coverage || !treat_warnings_as_errors) {
- args += [ "--noverify" ]
- }
if (defined(invoker.tiered_stop_at_level_one) &&
invoker.tiered_stop_at_level_one) {
args += [ "--tiered-stop-at-level-one" ]
@@ -1397,6 +1433,12 @@ if (enable_java_templates) {
if (defined(invoker.wrapper_script_args)) {
args += [ "--" ] + invoker.wrapper_script_args
}
+ if (defined(invoker.use_jdk_11) && invoker.use_jdk_11) {
+ args += [ "--use-jdk-11" ]
+ deps += [ "//third_party/jdk11:java_data" ]
+ } else {
+ deps += [ "//third_party/jdk:java_data" ]
+ }
}
}
@@ -1465,16 +1507,17 @@ if (enable_java_templates) {
"data",
"data_deps",
"deps",
- "desugar_jars_paths",
"enable_proguard_checks",
"expected_proguard_config",
"expected_proguard_config_base",
"ignore_desugar_missing_deps",
"modules",
+ "package_name",
"proguard_enable_obfuscation",
"proguard_mapping_path",
"proguard_sourcefile_suffix",
"top_target_name",
+ "version_code",
])
inputs = []
if (defined(invoker.inputs)) {
@@ -1530,6 +1573,8 @@ if (enable_java_templates) {
}
} else { # !_proguard_enabled
_is_library = defined(invoker.is_library) && invoker.is_library
+ assert(!(defined(invoker.input_classes_filearg) && _is_library))
+ assert(_is_library == defined(invoker.unprocessed_jar_path))
_input_class_jars = []
if (defined(invoker.input_class_jars)) {
_input_class_jars = invoker.input_class_jars
@@ -1575,7 +1620,7 @@ if (enable_java_templates) {
rebase_path(outputs[0], root_build_dir),
"--min-api=$_min_sdk_version",
"--r8-jar-path",
- rebase_path(_r8_path, root_build_dir),
+ rebase_path(_d8_path, root_build_dir),
"--custom-d8-jar-path",
rebase_path(_custom_d8_path, root_build_dir),
@@ -1623,6 +1668,10 @@ if (enable_java_templates) {
if (defined(invoker.input_classes_filearg)) {
inputs += [ invoker.build_config ]
args += [ "--class-inputs-filearg=${invoker.input_classes_filearg}" ]
+
+ # Required for the same reason as unprocessed_jar_path is added to
+ # classpath (see note below).
+ args += [ "--classpath=${invoker.input_classes_filearg}" ]
}
if (_input_class_jars != []) {
inputs += _input_class_jars
@@ -1654,15 +1703,14 @@ if (enable_java_templates) {
# filtered out via jar_excluded_patterns.
# Must come first in classpath in order to take precedence over
# deps that defined the same classes (via jar_excluded_patterns).
- if (defined(invoker.unprocessed_jar_path_filearg)) {
- args += [
- "--classpath",
- invoker.unprocessed_jar_path_filearg,
- ]
- } else if (defined(invoker.unprocessed_jar_path)) {
+ if (defined(invoker.unprocessed_jar_path)) {
args += [
"--classpath",
rebase_path(invoker.unprocessed_jar_path, root_build_dir),
+
+ # Pass the full classpath to find new dependencies that are not in
+ # the .desugardeps file.
+ "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
]
inputs += [ invoker.unprocessed_jar_path ]
}
@@ -1672,18 +1720,7 @@ if (enable_java_templates) {
"--desugar-dependencies",
rebase_path(_desugar_dependencies_path, root_build_dir),
"--bootclasspath=@FileArg($_rebased_build_config:android:sdk_jars)",
-
- # Pass the full classpath to find new dependencies that are not in
- # the .desugardeps file.
- "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
]
- if (defined(invoker.desugar_jars_paths)) {
- _rebased_desugar_jars_paths =
- rebase_path(invoker.desugar_jars_paths, root_build_dir)
- args += [ "--classpath=${_rebased_desugar_jars_paths}" ]
- }
- } else {
- not_needed(invoker, [ "desugar_jars_paths" ])
}
if (defined(invoker.custom_assertion_handler)) {
@@ -1714,7 +1751,7 @@ if (enable_java_templates) {
_jacococli_jar = "//third_party/jacoco/lib/jacococli.jar"
script = "//build/android/gyp/jacoco_instr.py"
- inputs = invoker.java_files + [
+ inputs = invoker.source_files + [
_jacococli_jar,
invoker.input_jar_path,
]
@@ -1729,8 +1766,8 @@ if (enable_java_templates) {
rebase_path(invoker.output_jar_path, root_build_dir),
"--sources-json-file",
rebase_path(_sources_json_file, root_build_dir),
- "--java-sources-file",
- rebase_path(invoker.java_sources_file, root_build_dir),
+ "--target-sources-file",
+ rebase_path(invoker.target_sources_file, root_build_dir),
"--jacococli-jar",
rebase_path(_jacococli_jar, root_build_dir),
]
@@ -1814,8 +1851,8 @@ if (enable_java_templates) {
deps = [ ":$_filter_jar_target_name" ] + invoker.deps
forward_variables_from(invoker,
[
- "java_files",
- "java_sources_file",
+ "source_files",
+ "target_sources_file",
])
input_jar_path = _filter_jar_output_jar
@@ -2381,18 +2418,20 @@ if (enable_java_templates) {
]
inputs += [ invoker.expected_android_manifest_base ]
}
- if (fail_on_android_expectations) {
- args += [ "--fail-on-expectations" ]
+ if (defined(invoker.expected_android_manifest_version_code_offset)) {
+ args += [
+ "--verification-version-code-offset",
+ invoker.expected_android_manifest_version_code_offset,
+ ]
}
- if (defined(invoker.extra_verification_manifest)) {
- inputs += [ invoker.extra_verification_manifest ]
+ if (defined(invoker.expected_android_manifest_library_version_offset)) {
args += [
- "--extra-verification-manifest",
- rebase_path(invoker.extra_verification_manifest, root_build_dir),
+ "--verification-library-version-offset",
+ invoker.expected_android_manifest_library_version_offset,
]
- if (defined(invoker.extra_verification_manifest_dep)) {
- deps += [ invoker.extra_verification_manifest_dep ]
- }
+ }
+ if (fail_on_android_expectations) {
+ args += [ "--fail-on-expectations" ]
}
}
_deps += [ ":$_expectations_target" ]
@@ -2700,7 +2739,14 @@ if (enable_java_templates) {
_args += [ "--best-compression" ]
}
}
- if (defined(invoker.uncompress_dex) && invoker.uncompress_dex) {
+ if (defined(invoker.uncompress_dex)) {
+ _uncompress_dex = invoker.uncompress_dex
+ } else {
+ # Uncompressed dex support started on Android P.
+ _uncompress_dex = invoker.min_sdk_version >= 28
+ }
+
+ if (_uncompress_dex) {
_args += [ "--uncompress-dex" ]
}
if (defined(invoker.library_always_compress)) {
@@ -2808,29 +2854,26 @@ if (enable_java_templates) {
}
# Compile Java source files into a .jar file, potentially using an
- # annotation processor, and/or the errorprone compiler.
+ # annotation processor, and/or the errorprone compiler. Also includes Kotlin
+ # source files in the resulting info file.
#
# Note that the only way to specify custom annotation processors is
# by using build_config to point to a file that corresponds to a java-related
# target that includes javac:processor_classes entries (i.e. there is no
# variable here that can be used for this purpose).
#
- # Note also the peculiar use of java_files / java_sources_file. The content
- # of the java_files list and the java_sources_file file must match exactly.
- # This rule uses java_files only to list the inputs to the action that
- # calls compile_java.py, but will pass the list of Java source files
- # with the '@${java_sources_file}" command-line syntax. Not a problem in
- # practice since this is only called from java_library_impl() that sets up
- # the variables properly.
+ # Note also the peculiar use of source_files / target_sources_file. The content
+ # of the source_files list and the source files in target_sources_file file must
+ # match exactly.
#
# Variables:
# main_target_name: Used when extracting srcjars for codesearch.
- # java_files: Optional list of Java source file paths.
+ # source_files: Optional list of Java and Kotlin source file paths.
# srcjar_deps: Optional list of .srcjar dependencies (not file paths).
# The corresponding source files they contain will be compiled too.
- # java_sources_file: Optional path to file containing list of Java source
- # file paths. This must always be provided if java_files is not empty
- # and must match it exactly.
+ # target_sources_file: Optional path to file containing list of source file
+ # paths. This must always be provided if java_files is not empty and the
+ # .java files in it must match the list of java_files exactly.
# build_config: Path to the .build_config.json file of the corresponding
# java_library_impl() target. The following entries will be used by this
# template: javac:srcjars, deps_info:javac_full_classpath,
@@ -2875,7 +2918,7 @@ if (enable_java_templates) {
_srcjar_deps = []
if (defined(invoker.srcjar_deps)) {
- _srcjar_deps += invoker.srcjar_deps
+ _srcjar_deps = invoker.srcjar_deps
}
_java_srcjars = []
@@ -2917,9 +2960,9 @@ if (enable_java_templates) {
if (!invoker.enable_errorprone && !invoker.use_turbine) {
outputs += [ invoker.output_jar_path + ".info" ]
}
- inputs = invoker.java_files + _java_srcjars + [ _build_config ]
- if (invoker.java_files != []) {
- inputs += [ invoker.java_sources_file ]
+ inputs = invoker.source_files + _java_srcjars + [ _build_config ]
+ if (invoker.source_files != []) {
+ inputs += [ invoker.target_sources_file ]
}
_rebased_build_config = rebase_path(_build_config, root_build_dir)
@@ -2955,6 +2998,16 @@ if (enable_java_templates) {
args += [ "--classpath=$_header_jar_classpath" ]
}
+ if (defined(invoker.kotlin_jar_path)) {
+ inputs += [ invoker.kotlin_jar_path ]
+ _rebased_kotlin_jar_path =
+ rebase_path(invoker.kotlin_jar_path, root_build_dir)
+ args += [
+ "--kotlin-jar-path=$_rebased_kotlin_jar_path",
+ "--classpath=$_rebased_kotlin_jar_path",
+ ]
+ }
+
if (invoker.use_turbine) {
# Prefer direct deps for turbine as much as possible.
args += [ "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)" ]
@@ -3035,8 +3088,9 @@ if (enable_java_templates) {
[ "--additional-jar-file=" +
rebase_path(file_tuple[0], root_build_dir) + ":" + file_tuple[1] ]
}
- if (invoker.java_files != []) {
- args += [ "@" + rebase_path(invoker.java_sources_file, root_build_dir) ]
+ if (invoker.source_files != []) {
+ args +=
+ [ "@" + rebase_path(invoker.target_sources_file, root_build_dir) ]
}
foreach(e, _javac_args) {
args += [ "--javac-arg=" + e ]
@@ -3044,6 +3098,95 @@ if (enable_java_templates) {
}
}
+ # Compile Kotlin source files into .class files and store them in a .jar.
+ # This explicitly does not run annotation processing on the Kotlin files.
+ # Java files and srcjars are also passed to kotlinc for reference, although
+ # no .class files will be generated for any Java files. A subsequent call to
+ # javac will be required to actually compile Java files into .class files.
+ #
+ # This action also creates a "header" .jar file for the Kotlin source files.
+ # It is similar to using turbine to create headers for Java files, but since
+ # turbine does not support Kotlin files, this is done via a plugin for
+ # kotlinc instead, at the same time as compilation (whereas turbine is run as
+ # a separate action before javac compilation).
+ template("compile_kt") {
+ forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+ _build_config = invoker.build_config
+ _chromium_code = invoker.chromium_code
+
+ _srcjar_deps = []
+ if (defined(invoker.srcjar_deps)) {
+ _srcjar_deps = invoker.srcjar_deps
+ }
+
+ _java_srcjars = []
+ foreach(dep, _srcjar_deps) {
+ _dep_gen_dir = get_label_info(dep, "target_gen_dir")
+ _dep_name = get_label_info(dep, "name")
+ _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+ }
+
+ action_with_pydeps(target_name) {
+ script = "//build/android/gyp/compile_kt.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ deps = _srcjar_deps
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ outputs = [
+ invoker.output_jar_path,
+ invoker.output_interface_jar_path,
+ ]
+ inputs = invoker.source_files + _java_srcjars + [
+ _build_config,
+ invoker.target_sources_file,
+ ]
+
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ _rebased_output_jar_path =
+ rebase_path(invoker.output_jar_path, root_build_dir)
+ _rebased_output_interface_jar_path =
+ rebase_path(invoker.output_interface_jar_path, root_build_dir)
+ _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
+ _rebased_depfile = rebase_path(depfile, root_build_dir)
+ _rebased_generated_dir = rebase_path(
+ "$target_gen_dir/${invoker.main_target_name}/generated_java",
+ root_build_dir)
+ args = [
+ "--depfile=$_rebased_depfile",
+ "--generated-dir=$_rebased_generated_dir",
+ "--jar-path=$_rebased_output_jar_path",
+ "--interface-jar-path=$_rebased_output_interface_jar_path",
+ "--java-srcjars=$_rebased_java_srcjars",
+ ]
+
+ # SDK jar must be first on classpath.
+ if (invoker.include_android_sdk) {
+ args += [ "--classpath=@FileArg($_rebased_build_config:android:sdk_interface_jars)" ]
+ }
+
+ args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)" ]
+
+ if (use_java_goma) {
+ args += [ "--gomacc-path=$goma_dir/gomacc" ]
+
+ # Override the default action_pool when goma is enabled.
+ pool = "//build/config/android:goma_javac_pool"
+ }
+
+ if (_chromium_code) {
+ args += [ "--chromium-code" ]
+ if (treat_warnings_as_errors) {
+ args += [ "--warnings-as-errors" ]
+ }
+ }
+
+ args += [ "@" + rebase_path(invoker.target_sources_file, root_build_dir) ]
+ }
+ }
+
# Create an interface jar from a normal jar.
#
# Variables
@@ -3103,15 +3246,15 @@ if (enable_java_templates) {
# be the name of the main APK target.
# supports_android: Optional. True if target can run on Android.
# requires_android: Optional. True if target can only run on Android.
- # java_files: Optional list of Java source file paths for this target.
+ # source_files: Optional list of Java source file paths for this target.
# javac_args: Optional list of extra arguments to pass to javac.
# errorprone_args: Optional list of extra arguments to pass to.
# srcjar_deps: Optional list of .srcjar targets (not file paths). The Java
# source files they contain will also be compiled for this target.
- # java_sources_file: Optional path to a file which will be written with
- # the content of java_files. If not provided, the file will be written
+ # target_sources_file: Optional path to a file which will be written with
+ # the content of source_files. If not provided, the file will be written
# under $target_gen_dir/$main_target_name.sources. Ignored if
- # java_files is empty. If not
+ # sources_files is empty. If not
# jar_path: Optional path to a prebuilt .jar file for this target.
# Mutually exclusive with java_files and srcjar_deps.
# output_name: Optional output name for the final jar path. Used to
@@ -3142,9 +3285,6 @@ if (enable_java_templates) {
# input_jars_paths: Optional list of additional .jar file paths, which will
# be added to the compile-time classpath when building this target (but
# not to the runtime classpath).
- # desugar_jars_paths: Optional list of additional .jar file paths, which will
- # be added to the desugar classpath when building this target (but not to
- # any other classpath). This is only used to break dependency cycles.
# gradle_treat_as_prebuilt: Cause generate_gradle.py to reference this
# library via its built .jar rather than including its .java sources.
# proguard_enabled: Optional. True to enable ProGuard obfuscation.
@@ -3240,15 +3380,16 @@ if (enable_java_templates) {
_main_target_name = invoker.main_target_name
}
- _java_files = []
+ _source_files = []
if (defined(invoker.sources)) {
- _java_files = invoker.sources
+ _source_files = invoker.sources
}
+
_srcjar_deps = []
if (defined(invoker.srcjar_deps)) {
_srcjar_deps = invoker.srcjar_deps
}
- _has_sources = _java_files != [] || _srcjar_deps != []
+ _has_sources = _source_files != [] || _srcjar_deps != []
if (_is_prebuilt) {
assert(!_has_sources)
@@ -3275,10 +3416,10 @@ if (enable_java_templates) {
_chromium_code =
filter_exclude([ get_label_info(":$_main_target_name", "dir") ],
[ "*\bthird_party\b*" ]) != []
- if (!_chromium_code && !_is_prebuilt && _java_files != []) {
+ if (!_chromium_code && !_is_prebuilt && _source_files != []) {
# Unless third_party code has an org.chromium file in it.
_chromium_code =
- filter_exclude(_java_files, [ "*\bchromium\b*" ]) != _java_files
+ filter_exclude(_source_files, [ "*\bchromium\b*" ]) != _source_files
}
}
@@ -3294,8 +3435,6 @@ if (enable_java_templates) {
if (_is_prebuilt || _has_sources) {
if (defined(invoker.output_name)) {
_output_name = invoker.output_name
- } else if (_is_prebuilt) {
- _output_name = get_path_info(invoker.jar_path, "name")
} else {
_output_name = _main_target_name
}
@@ -3305,7 +3444,7 @@ if (enable_java_templates) {
_build_device_jar = _type != "system_java_library" && _supports_android
_jacoco_instrument =
- use_jacoco_coverage && _chromium_code && _java_files != [] &&
+ use_jacoco_coverage && _chromium_code && _source_files != [] &&
_build_device_jar && (!defined(invoker.testonly) || !invoker.testonly)
if (defined(invoker.jacoco_never_instrument)) {
_jacoco_instrument =
@@ -3450,12 +3589,10 @@ if (enable_java_templates) {
"_javac_classpath_deps",
])
- if (_java_files != []) {
- _java_sources_file = "$target_gen_dir/$_main_target_name.sources"
- if (defined(invoker.java_sources_file)) {
- _java_sources_file = invoker.java_sources_file
- }
- write_file(_java_sources_file, rebase_path(_java_files, root_build_dir))
+ if (_source_files != []) {
+ _target_sources_file = "$target_gen_dir/$_main_target_name.sources"
+ write_file(_target_sources_file,
+ rebase_path(_source_files, root_build_dir))
}
write_build_config(_build_config_target_name) {
@@ -3466,9 +3603,12 @@ if (enable_java_templates) {
"base_allowlist_rtxt_path",
"gradle_treat_as_prebuilt",
"input_jars_paths",
+ "preferred_dep",
"low_classpath_priority",
"main_class",
"mergeable_android_manifests",
+ "module_name",
+ "parent_module_target",
"proguard_configs",
"proguard_enabled",
"proguard_mapping_path",
@@ -3510,7 +3650,6 @@ if (enable_java_templates) {
[
"add_view_trace_events",
"base_module_target",
- "is_base_module",
"module_pathmap_path",
"proto_resources_path",
])
@@ -3557,8 +3696,8 @@ if (enable_java_templates) {
device_jar_path = _device_processed_jar_path
dex_path = _dex_path
}
- if (_java_files != []) {
- java_sources_file = _java_sources_file
+ if (_source_files != []) {
+ target_sources_file = _target_sources_file
}
bundled_srcjars = []
@@ -3584,6 +3723,9 @@ if (enable_java_templates) {
}
if (_has_sources) {
+ _kt_files = filter_include(_source_files, [ "*.kt" ])
+ _java_files = filter_exclude(_source_files, [ "*.kt" ])
+
if (defined(invoker.enable_errorprone)) {
_enable_errorprone = invoker.enable_errorprone
} else {
@@ -3615,6 +3757,29 @@ if (enable_java_templates) {
_srcjar_deps += [ ":$_fake_rjava_target" ]
}
+ if (_kt_files != []) {
+ assert(
+ _kt_files == [ "android/java/src/org/chromium/chrome/browser/tabmodel/AsyncTabParamsManagerImpl.kt" ],
+ "Only a single Kotlin file is allowed for now. Feel free to " +
+ "remove this assert when experimenting locally.")
+ _compile_kt_target_name = "${_main_target_name}__compile_kt"
+ _kotlinc_jar_path = "$target_out_dir/$_output_name.kotlinc.jar"
+ _kotlin_interface_jar_path =
+ "$target_out_dir/$_output_name.kt-jvm-abi.jar"
+ compile_kt(_compile_kt_target_name) {
+ deps = _header_classpath_deps
+ output_jar_path = _kotlinc_jar_path
+ output_interface_jar_path = _kotlin_interface_jar_path
+ main_target_name = _main_target_name
+ build_config = _build_config
+ srcjar_deps = _srcjar_deps
+ source_files = _source_files
+ target_sources_file = _target_sources_file
+ chromium_code = _chromium_code
+ include_android_sdk = _is_robolectric || _requires_android
+ }
+ }
+
template("compile_java_helper") {
_enable_errorprone =
defined(invoker.enable_errorprone) && invoker.enable_errorprone
@@ -3640,6 +3805,10 @@ if (enable_java_templates) {
deps += invoker.deps
}
output_jar_path = invoker.output_jar_path
+ if (defined(invoker.kotlin_jar_path)) {
+ deps += [ ":$_compile_kt_target_name" ]
+ kotlin_jar_path = invoker.kotlin_jar_path
+ }
enable_errorprone = _enable_errorprone
use_turbine = defined(invoker.use_turbine) && invoker.use_turbine
@@ -3647,14 +3816,14 @@ if (enable_java_templates) {
build_config = _build_config
if (_enable_errorprone) {
- java_files = _filtered_java_files
+ source_files = _filtered_java_files
} else {
- java_files = _java_files
+ source_files = _source_files
srcjar_deps = _srcjar_deps
}
- if (java_files != []) {
- java_sources_file = _java_sources_file
+ if (source_files != []) {
+ target_sources_file = _target_sources_file
}
chromium_code = _chromium_code
include_android_sdk = _is_robolectric || _requires_android
@@ -3680,6 +3849,9 @@ if (enable_java_templates) {
output_jar_path = _final_ijar_path
generated_jar_path = _generated_jar_path
deps = _annotation_processor_deps
+ if (_kt_files != []) {
+ kotlin_jar_path = _kotlin_interface_jar_path
+ }
}
_compile_java_target = "${_main_target_name}__compile_java"
@@ -3689,6 +3861,9 @@ if (enable_java_templates) {
deps = [ ":$_header_target_name" ]
header_jar_path = _final_ijar_path
generated_jar_path = _generated_jar_path
+ if (_kt_files != []) {
+ kotlin_jar_path = _kotlinc_jar_path
+ }
}
if (_enable_errorprone) {
_compile_java_errorprone_target = "${_main_target_name}__errorprone"
@@ -3702,6 +3877,9 @@ if (enable_java_templates) {
javac_args += invoker.errorprone_args
}
deps = [ ":$_header_target_name" ]
+ if (_kt_files != []) {
+ kotlin_jar_path = _kotlinc_jar_path
+ }
header_jar_path = _final_ijar_path
generated_jar_path = _generated_jar_path
output_jar_path = "$target_out_dir/$target_name.errorprone.stamp"
@@ -3816,8 +3994,8 @@ if (enable_java_templates) {
output_jar_path = _host_processed_jar_path
jacoco_instrument = _jacoco_instrument
if (_jacoco_instrument) {
- java_files = _java_files
- java_sources_file = _java_sources_file
+ source_files = _source_files
+ target_sources_file = _target_sources_file
}
# _java_host_deps isn't necessary for process_java_library(), but is
@@ -3851,8 +4029,8 @@ if (enable_java_templates) {
output_jar_path = _device_processed_jar_path
jacoco_instrument = _jacoco_instrument
if (_jacoco_instrument) {
- java_files = _java_files
- java_sources_file = _java_sources_file
+ source_files = _source_files
+ target_sources_file = _target_sources_file
}
}
_process_device_jar_deps = [ ":${_process_device_jar_target_name}" ]
@@ -3863,11 +4041,7 @@ if (enable_java_templates) {
_dex_target_name = "${target_name}__dex"
dex(_dex_target_name) {
- forward_variables_from(invoker,
- [
- "desugar_jars_paths",
- "proguard_enable_obfuscation",
- ])
+ forward_variables_from(invoker, [ "proguard_enable_obfuscation" ])
input_class_jars = [ _device_processed_jar_path ]
enable_desugar = _enable_desugar
ignore_desugar_missing_deps = !_enable_bytecode_checks
@@ -3920,10 +4094,7 @@ if (enable_java_templates) {
if (defined(invoker.wrapper_script_name)) {
script_name = invoker.wrapper_script_name
}
- deps = [
- ":$_build_config_target_name",
- "//third_party/jdk:java_data",
- ]
+ deps = [ ":$_build_config_target_name" ]
if (_is_robolectric) {
# For robolectric tests, we also add the normal sdk jar to the
# classpath since whenever we start using a new Android SDK,
@@ -3932,6 +4103,10 @@ if (enable_java_templates) {
# new SDK classes, so providing our normal SDK will allow these
# classes to resolve. For an example, see crbug.com/1350963.
extra_classpath_jars = [ android_sdk_jar ]
+
+ # Mockito bug with JDK17 requires us to use JDK11 until we find a fix
+ # for crbug.com/1409661.
+ use_jdk_11 = true
}
}
}
diff --git a/build/config/android/linker_version_script.gni b/build/config/android/linker_version_script.gni
index 5d14f7e16..7dab72227 100644
--- a/build/config/android/linker_version_script.gni
+++ b/build/config/android/linker_version_script.gni
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/android/config.gni")
import("//build/config/python.gni")
# Generate a custom linker version script that can later be used with
@@ -27,6 +28,9 @@ template("generate_linker_version_script") {
if (defined(invoker.testonly) && invoker.testonly) {
args += [ "--export-fortesting-java-symbols" ]
}
+ if (allow_jni_multiplexing) {
+ args += [ "--jni-multiplexing" ]
+ }
}
if (defined(invoker.export_feature_registrations) &&
diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni
index 7ddc160f9..300a3935e 100644
--- a/build/config/android/rules.gni
+++ b/build/config/android/rules.gni
@@ -4,8 +4,6 @@
# Do not add any imports to non-//build directories here.
# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
-
-import("//build/config/android/channel.gni")
import("//build/config/android/config.gni")
import("//build/config/android/copy_ex.gni")
import("//build/config/clang/clang.gni")
@@ -16,13 +14,8 @@ import("//build/config/rts.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/zip.gni")
import("//build/toolchain/toolchain.gni")
-
assert(is_android || is_robolectric)
-declare_args() {
- enable_jni_tracing = false
-}
-
# Use a dedicated include dir so that files can #include headers from other
# toolchains without affecting non-JNI #includes.
if (target_os == "android") {
@@ -222,7 +215,9 @@ if (enable_java_templates) {
if (!is_robolectric && use_hashed_jni_names) {
args += [ "--use_proxy_hash" ]
}
- if (!is_robolectric && enable_jni_multiplexing) {
+
+ if (!is_robolectric && defined(invoker.enable_jni_multiplexing) &&
+ invoker.enable_jni_multiplexing) {
args += [ "--enable_jni_multiplexing" ]
}
if (defined(invoker.namespace)) {
@@ -252,9 +247,6 @@ if (enable_java_templates) {
if (enable_profiling) {
args += [ "--enable_profiling" ]
}
- if (enable_jni_tracing) {
- args += [ "--enable_tracing" ]
- }
if (current_toolchain != default_toolchain && target_os == "android") {
# Rather than regenerating .h files in secondary toolchains, re-use the
# ones from the primary toolchain by depending on it and adding the
@@ -371,7 +363,7 @@ if (enable_java_templates && is_android) {
# targets: List of .build_config.json supported targets to provide java sources.
# manual_jni_registration: Manually do JNI registration - required for feature
# splits which provide their own native library. (optional)
- # sources_exclusions: List of .java files that should be skipped. (optional)
+ # file_exclusions: List of .java files that should be skipped. (optional)
# namespace: Registration functions will be wrapped into this. (optional)
# require_native_mocks: Enforce that any native calls using
# org.chromium.base.annotations.NativeMethods must have a mock set
@@ -387,7 +379,7 @@ if (enable_java_templates && is_android) {
# generate_jni_registration("chrome_jni_registration") {
# targets = [ ":chrome_public_apk" ]
# manual_jni_registration = false
- # sources_exclusions = [
+ # file_exclusions = [
# "//path/to/Exception.java",
# ]
# }
@@ -418,11 +410,11 @@ if (enable_java_templates && is_android) {
inputs += [ _build_config ]
if (defined(invoker.no_transitive_deps) && invoker.no_transitive_deps) {
- args += [ "--sources-files=@FileArg($_rebased_build_config:deps_info:java_sources_file)" ]
+ args += [ "--sources-files=@FileArg($_rebased_build_config:deps_info:target_sources_file)" ]
} else {
args += [
# This is a list of .sources files.
- "--sources-files=@FileArg($_rebased_build_config:deps_info:jni:all_source)",
+ "--sources-files=@FileArg($_rebased_build_config:deps_info:jni_all_source)",
]
}
}
@@ -432,34 +424,38 @@ if (enable_java_templates && is_android) {
_include_testonly = defined(testonly) && testonly
}
if (_include_testonly) {
- args += [ "--include_test_only" ]
+ args += [ "--include-test-only" ]
}
if (use_hashed_jni_names) {
- args += [ "--use_proxy_hash" ]
+ args += [ "--use-proxy-hash" ]
}
if (defined(invoker.enable_native_mocks) && invoker.enable_native_mocks) {
- args += [ "--enable_proxy_mocks" ]
+ args += [ "--enable-proxy-mocks" ]
if (defined(invoker.require_native_mocks) &&
invoker.require_native_mocks) {
- args += [ "--require_mocks" ]
+ args += [ "--require-mocks" ]
}
}
_manual_jni_registration = defined(invoker.manual_jni_registration) &&
invoker.manual_jni_registration
+ _enable_jni_multiplexing = defined(invoker.enable_jni_multiplexing) &&
+ invoker.enable_jni_multiplexing
+ if (_manual_jni_registration) {
+ args += [ "--manual-jni-registration" ]
+ }
+ if (_enable_jni_multiplexing) {
+ args += [ "--enable-jni-multiplexing" ]
+ }
- if (_manual_jni_registration || enable_jni_multiplexing) {
+ if ((!defined(invoker.prevent_header_output) ||
+ !invoker.prevent_header_output) &&
+ (_manual_jni_registration || _enable_jni_multiplexing)) {
assert(current_toolchain == default_toolchain,
"We do not need >1 toolchain copies of the same header.")
- if (_manual_jni_registration) {
- args += [ "--manual_jni_registration" ]
- }
- if (enable_jni_multiplexing) {
- args += [ "--enable_jni_multiplexing" ]
- }
_subdir = rebase_path(target_gen_dir, root_gen_dir)
_jni_header_output =
@@ -474,15 +470,19 @@ if (enable_java_templates && is_android) {
public_configs = [ "//build/config/android:jni_include_dir" ]
}
- if (defined(invoker.sources_exclusions)) {
- _rebase_sources_exclusions =
- rebase_path(invoker.sources_exclusions, root_build_dir)
- args += [ "--sources-exclusions=$_rebase_sources_exclusions" ]
+ if (defined(invoker.file_exclusions)) {
+ _rebase_file_exclusions =
+ rebase_path(invoker.file_exclusions, root_build_dir)
+ args += [ "--file-exclusions=$_rebase_file_exclusions" ]
}
if (defined(invoker.namespace)) {
args += [ "--namespace=${invoker.namespace}" ]
}
+
+ if (defined(invoker.module_name)) {
+ args += [ "--module-name=${invoker.module_name}" ]
+ }
}
}
@@ -1238,6 +1238,14 @@ if (enable_java_templates && is_android) {
_build_config = "$target_gen_dir/$target_name.build_config.json"
_build_config_target_name = "$target_name$build_config_target_suffix"
+ _sources = []
+ if (defined(invoker.sources)) {
+ _sources = invoker.sources
+ }
+ _renaming_sources = []
+ if (defined(invoker.renaming_sources)) {
+ _renaming_sources = invoker.renaming_sources
+ }
write_build_config(_build_config_target_name) {
type = "android_assets"
build_config = _build_config
@@ -1252,13 +1260,13 @@ if (enable_java_templates && is_android) {
possible_config_deps = invoker.deps
}
- if (defined(invoker.sources)) {
- asset_sources = invoker.sources
+ if (_sources != []) {
+ asset_sources = _sources
}
- if (defined(invoker.renaming_sources)) {
+ if (_renaming_sources != []) {
assert(defined(invoker.renaming_destinations))
_source_count = 0
- foreach(_, invoker.renaming_sources) {
+ foreach(_, _renaming_sources) {
_source_count += 1
}
_dest_count = 0
@@ -1268,14 +1276,36 @@ if (enable_java_templates && is_android) {
assert(
_source_count == _dest_count,
"android_assets() renaming_sources.length != renaming_destinations.length")
- asset_renaming_sources = invoker.renaming_sources
+ asset_renaming_sources = _renaming_sources
asset_renaming_destinations = invoker.renaming_destinations
}
}
- group(target_name) {
- forward_variables_from(invoker, [ "deps" ])
- public_deps = [ ":$_build_config_target_name" ]
+ # Use an action in order to mark sources as "inputs" to a GN target so that
+ # GN will fail if the appropriate deps do not exist, and so that "gn refs"
+ # will know about the sources. We do not add these inputs & deps to the
+ # __build_config target because we want building .build_config.json files
+ # to be fast (and because write_build_config.py does not need the files to
+ # exist).
+ _all_sources = _sources + _renaming_sources
+ if (_all_sources != []) {
+ action(target_name) {
+ forward_variables_from(invoker, [ "deps" ])
+ public_deps = [ ":$_build_config_target_name" ]
+
+ script = "//build/android/gyp/validate_inputs.py"
+ inputs = _all_sources
+ outputs = [ "$target_gen_dir/$target_name.stamp" ]
+ args = [
+ "--stamp",
+ rebase_path(outputs[0], root_build_dir),
+ ] + rebase_path(_all_sources, root_build_dir)
+ }
+ } else {
+ group(target_name) {
+ forward_variables_from(invoker, [ "deps" ])
+ public_deps = [ ":$_build_config_target_name" ]
+ }
}
}
@@ -1291,8 +1321,10 @@ if (enable_java_templates && is_android) {
forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
_build_config_vars = [
"input_jars_paths",
+ "preferred_dep",
"mergeable_android_manifests",
"proguard_configs",
+ "requires_android",
]
_invoker_deps = []
if (defined(invoker.deps)) {
@@ -1860,6 +1892,8 @@ if (enable_java_templates && is_android) {
# proguard_enabled: Whether to enable R8.
# proguard_configs: List of proguard configs.
# proguard_enable_obfuscation: Whether to enable obfuscation (default=true).
+ # package_name: Used in the Proguard map ID.
+ # version_code: Used in the Proguard map ID.
#
# Example
# dist_dex("lib_fatjar") {
@@ -1893,10 +1927,12 @@ if (enable_java_templates && is_android) {
TESTONLY_AND_VISIBILITY + [
"data",
"data_deps",
+ "package_name",
"proguard_configs",
"proguard_enabled",
"proguard_enable_obfuscation",
"min_sdk_version",
+ "version_code",
])
deps = [ ":$_build_config_target_name" ] + _deps
build_config = _build_config
@@ -1911,7 +1947,7 @@ if (enable_java_templates && is_android) {
} else {
_rebased_build_config = rebase_path(_build_config, root_build_dir)
input_dex_filearg =
- "@FileArg(${_rebased_build_config}:final_dex:all_dex_files)"
+ "@FileArg(${_rebased_build_config}:deps_info:all_dex_files)"
}
}
}
@@ -2069,14 +2105,8 @@ if (enable_java_templates && is_android) {
# Supports all variables of java_library(), plus:
# deps: In addition to defining java deps, this can also include
# android_assets() and android_resources() targets.
- # alternative_android_sdk_ijar: if set, the given android_sdk_ijar file
- # replaces the default android_sdk_ijar.
- # alternative_android_sdk_ijar_dep: the target that generates
- # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
- # is used.
- # alternative_android_sdk_jar: actual jar corresponding to
- # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
- # is used.
+ # alternative_android_sdk_dep: android_system_java_prebuilt target to use
+ # in place of the default android.jar.
#
# Example
# android_library("foo_java") {
@@ -2111,7 +2141,7 @@ if (enable_java_templates && is_android) {
"*/R\$*.class",
"*/Manifest.class",
"*/Manifest\$*.class",
- "*/GEN_JNI.class",
+ "*/*GEN_JNI.class",
]
}
}
@@ -2124,14 +2154,6 @@ if (enable_java_templates && is_android) {
# Supports all variables of java_library(), plus:
# deps: In addition to defining java deps, this can also include
# android_assets() and android_resources() targets.
- # alternative_android_sdk_ijar: if set, the given android_sdk_ijar file
- # replaces the default android_sdk_ijar.
- # alternative_android_sdk_ijar_dep: the target that generates
- # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
- # is used.
- # alternative_android_sdk_jar: actual jar corresponding to
- # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
- # is used.
#
# Example
# robolectric_library("foo_junit") {
@@ -2170,7 +2192,7 @@ if (enable_java_templates && is_android) {
"*/R\$*.class",
"*/Manifest.class",
"*/Manifest\$*.class",
- "*/GEN_JNI.class",
+ "*/*GEN_JNI.class",
]
if (!defined(deps)) {
@@ -2359,9 +2381,7 @@ if (enable_java_templates && is_android) {
# generated is solely controlled by this flag. Otherwise, the default behavior
# is NativeLibraries.java will only be generated for the base module/apk when
# its `shared_libraries` is not empty.
- # manual_jni_registration: If true, causes the ${target_name}__final_jni target
- # to additionally output a header file for use with manual JNI registration.
- # jni_sources_exclusions: List of source path to exclude from the
+ # jni_file_exclusions: List of source path to exclude from the
# final_jni step.
# aapt_locale_allowlist: If set, all locales not in this list will be
# stripped from resources.arsc.
@@ -2626,10 +2646,10 @@ if (enable_java_templates && is_android) {
if (!_incremental_apk && !_omit_dex) {
# Bundle modules don't build the dex here, but need to write this path
- # to their .build_config.json file.
+ # to their .build_config.json file only when proguarding.
if (_proguard_enabled) {
_final_dex_path = "$_base_path.r8dex.jar"
- } else {
+ } else if (!_is_bundle_module) {
_final_dex_path = "$_base_path.mergeddex.jar"
}
}
@@ -2711,29 +2731,29 @@ if (enable_java_templates && is_android) {
_compile_resources_emit_ids_out =
"${target_gen_dir}/${_compile_resources_target}.resource_ids"
compile_resources(_compile_resources_target) {
- forward_variables_from(invoker,
- [
- "aapt_locale_allowlist",
- "app_as_shared_lib",
- "enforce_resource_overlays_in_tests",
- "expected_android_manifest",
- "expected_android_manifest_base",
- "extra_verification_manifest",
- "extra_verification_manifest_dep",
- "manifest_package",
- "max_sdk_version",
- "no_xml_namespaces",
- "package_id",
- "package_name",
- "png_to_webp",
- "r_java_root_package_name",
- "resource_exclusion_exceptions",
- "resource_exclusion_regex",
- "resource_values_filter_rules",
- "shared_resources",
- "shared_resources_allowlist_locales",
- "uses_split",
- ])
+ forward_variables_from(
+ invoker,
+ [
+ "aapt_locale_allowlist",
+ "app_as_shared_lib",
+ "enforce_resource_overlays_in_tests",
+ "expected_android_manifest",
+ "expected_android_manifest_base",
+ "expected_android_manifest_library_version_offset",
+ "expected_android_manifest_version_code_offset",
+ "manifest_package",
+ "max_sdk_version",
+ "no_xml_namespaces",
+ "package_id",
+ "png_to_webp",
+ "r_java_root_package_name",
+ "resource_exclusion_exceptions",
+ "resource_exclusion_regex",
+ "resource_values_filter_rules",
+ "shared_resources",
+ "shared_resources_allowlist_locales",
+ "uses_split",
+ ])
android_manifest = _android_manifest
android_manifest_dep = ":$_merge_manifest_target"
version_code = _version_code
@@ -2749,6 +2769,10 @@ if (enable_java_templates && is_android) {
resource_ids_provider_dep = _resource_ids_provider_dep
}
+ if (defined(invoker.module_name)) {
+ package_name = invoker.module_name
+ }
+
if (defined(invoker.post_process_package_resources_script)) {
post_process_script = invoker.post_process_package_resources_script
}
@@ -2823,8 +2847,9 @@ if (enable_java_templates && is_android) {
_strip_resource_names =
defined(invoker.strip_resource_names) &&
invoker.strip_resource_names && enable_arsc_obfuscation
- _strip_unused_resources = defined(invoker.strip_unused_resources) &&
- invoker.strip_unused_resources
+ _strip_unused_resources =
+ defined(invoker.strip_unused_resources) &&
+ invoker.strip_unused_resources && enable_unused_resource_stripping
_optimize_resources = _strip_resource_names || _short_resource_paths ||
_strip_unused_resources
}
@@ -2862,6 +2887,13 @@ if (enable_java_templates && is_android) {
resources_config_paths += invoker.resources_config_paths
}
}
+
+ if (_strip_unused_resources) {
+ # Copy the unused resources config to the final bundle output dir.
+ _copy_unused_resources_target =
+ "${_template_name}__copy_unused_resources"
+ _final_deps += [ ":$_copy_unused_resources_target" ]
+ }
} else {
not_needed(invoker, [ "resources_config_paths" ])
}
@@ -2988,8 +3020,8 @@ if (enable_java_templates && is_android) {
generate_jni_registration("${_template_name}__final_jni") {
forward_variables_from(invoker,
[
+ "enable_jni_multiplexing",
"enable_native_mocks",
- "manual_jni_registration",
"require_native_mocks",
])
if (defined(invoker.bundle_target)) {
@@ -2997,9 +3029,10 @@ if (enable_java_templates && is_android) {
} else {
targets = [ ":$_template_name" ]
}
- if (defined(invoker.jni_sources_exclusions)) {
- sources_exclusions = invoker.jni_sources_exclusions
+ if (defined(invoker.jni_file_exclusions)) {
+ file_exclusions = invoker.jni_file_exclusions
}
+ prevent_header_output = true
}
_srcjar_deps += [ ":${_template_name}__final_jni" ]
} else {
@@ -3028,7 +3061,9 @@ if (enable_java_templates && is_android) {
"jacoco_never_instrument",
"jar_excluded_patterns",
"javac_args",
+ "mergeable_android_manifests",
"native_lib_placeholders",
+ "parent_module_target",
"processor_args_javac",
"secondary_abi_loadable_modules",
"secondary_native_lib_placeholders",
@@ -3040,7 +3075,11 @@ if (enable_java_templates && is_android) {
if (_is_bundle_module) {
type = "android_app_bundle_module"
res_size_info_path = _res_size_info_path
- is_base_module = _is_base_module
+ if (defined(invoker.module_name)) {
+ module_name = invoker.module_name
+ } else {
+ module_name = "base"
+ }
add_view_trace_events = _add_view_trace_events
} else {
type = "android_apk"
@@ -3107,8 +3146,9 @@ if (enable_java_templates && is_android) {
}
}
- if ((_is_bundle_module && _proguard_enabled) || _omit_dex) {
- # No library dep needed.
+ if (_is_bundle_module || _omit_dex) {
+ # Dex generation for app bundle modules take place in the
+ # android_app_bundle template.
not_needed(invoker, [ "custom_assertion_handler" ])
} else if (_incremental_apk) {
not_needed(invoker,
@@ -3117,8 +3157,6 @@ if (enable_java_templates && is_android) {
"custom_assertion_handler",
])
} else {
- # Dex generation for app bundle modules with proguarding enabled takes
- # place later due to synchronized proguarding.
_final_dex_target_name = "${_template_name}__final_dex"
dex(_final_dex_target_name) {
forward_variables_from(invoker,
@@ -3140,19 +3178,17 @@ if (enable_java_templates && is_android) {
# Generates proguard configs
deps += [ ":$_compile_resources_target" ]
proguard_mapping_path = _proguard_mapping_path
- proguard_sourcefile_suffix = "$android_channel-$_version_code"
has_apk_under_test = defined(invoker.apk_under_test)
} else {
if (_min_sdk_version >= default_min_sdk_version) {
# Enable dex merging only when min_sdk_version is >= what the library
# .dex files were created with.
input_dex_filearg =
- "@FileArg(${_rebased_build_config}:final_dex:all_dex_files)"
+ "@FileArg(${_rebased_build_config}:deps_info:all_dex_files)"
# Pure dex-merge.
enable_desugar = false
} else {
- unprocessed_jar_path_filearg = "@FileArg(${_rebased_build_config}:deps_info:unprocessed_jar_path)"
input_classes_filearg =
"@FileArg($_rebased_build_config:deps_info:device_classpath)"
}
@@ -3314,7 +3350,7 @@ if (enable_java_templates && is_android) {
args = [
"--apk-path=$_rebased_incremental_apk_path",
"--output-path=$_rebased_incremental_install_json_path",
- "--dex-file=@FileArg($_rebased_build_config:final_dex:all_dex_files)",
+ "--dex-file=@FileArg($_rebased_build_config:deps_info:all_dex_files)",
]
if (_proguard_enabled) {
args += [ "--show-proguard-warning" ]
@@ -3496,93 +3532,97 @@ if (enable_java_templates && is_android) {
# TODO(crbug.com/1042017): Remove.
not_needed(invoker, [ "no_build_hooks" ])
android_apk_or_module(target_name) {
- forward_variables_from(invoker,
- [
- "aapt_locale_allowlist",
- "additional_jar_files",
- "alternative_android_sdk_dep",
- "android_manifest",
- "android_manifest_dep",
- "annotation_processor_deps",
- "apk_under_test",
- "app_as_shared_lib",
- "assert_no_deps",
- "build_config_include_product_version_resource",
- "bundles_supported",
- "chromium_code",
- "command_line_flags_file",
- "create_apk_script",
- "custom_assertion_handler",
- "data",
- "data_deps",
- "deps",
- "enable_lint",
- "enable_multidex",
- "enable_native_mocks",
- "enable_proguard_checks",
- "enforce_resource_overlays_in_tests",
- "expected_android_manifest",
- "expected_android_manifest_base",
- "expected_libs_and_assets",
- "expected_libs_and_assets_base",
- "generate_buildconfig_java",
- "generate_final_jni",
- "generate_native_libraries_java",
- "include_size_info",
- "input_jars_paths",
- "jacoco_never_instrument",
- "javac_args",
- "jni_sources_exclusions",
- "keystore_name",
- "keystore_password",
- "keystore_path",
- "lint_baseline_file",
- "lint_min_sdk_version",
- "lint_suppressions_dep",
- "lint_suppressions_file",
- "loadable_modules",
- "manifest_package",
- "manual_jni_registration",
- "max_sdk_version",
- "product_config_java_packages",
- "main_component_library",
- "min_sdk_version",
- "native_lib_placeholders",
- "never_incremental",
- "no_xml_namespaces",
- "omit_dex",
- "png_to_webp",
- "post_process_package_resources_script",
- "processor_args_javac",
- "proguard_configs",
- "proguard_enabled",
- "proguard_enable_obfuscation",
- "r_java_root_package_name",
- "resource_exclusion_exceptions",
- "resource_exclusion_regex",
- "resource_ids_provider_dep",
- "resource_values_filter_rules",
- "require_native_mocks",
- "secondary_abi_loadable_modules",
- "secondary_abi_shared_libraries",
- "secondary_native_lib_placeholders",
- "shared_libraries",
- "shared_resources",
- "shared_resources_allowlist_locales",
- "shared_resources_allowlist_target",
- "sources",
- "srcjar_deps",
- "static_library_provider",
- "static_library_provider_use_secondary_abi",
- "target_sdk_version",
- "testonly",
- "uncompress_dex",
- "library_always_compress",
- "use_chromium_linker",
- "version_code",
- "version_name",
- "visibility",
- ])
+ forward_variables_from(
+ invoker,
+ [
+ "aapt_locale_allowlist",
+ "additional_jar_files",
+ "alternative_android_sdk_dep",
+ "android_manifest",
+ "android_manifest_dep",
+ "annotation_processor_deps",
+ "apk_under_test",
+ "app_as_shared_lib",
+ "assert_no_deps",
+ "build_config_include_product_version_resource",
+ "bundles_supported",
+ "chromium_code",
+ "command_line_flags_file",
+ "create_apk_script",
+ "custom_assertion_handler",
+ "data",
+ "data_deps",
+ "deps",
+ "enable_lint",
+ "enable_jni_multiplexing",
+ "enable_multidex",
+ "enable_native_mocks",
+ "enable_proguard_checks",
+ "enforce_resource_overlays_in_tests",
+ "expected_android_manifest",
+ "expected_android_manifest_base",
+ "expected_android_manifest_library_version_offset",
+ "expected_android_manifest_version_code_offset",
+ "expected_libs_and_assets",
+ "expected_libs_and_assets_base",
+ "generate_buildconfig_java",
+ "generate_final_jni",
+ "generate_native_libraries_java",
+ "include_size_info",
+ "input_jars_paths",
+ "jacoco_never_instrument",
+ "javac_args",
+ "jni_file_exclusions",
+ "keystore_name",
+ "keystore_password",
+ "keystore_path",
+ "lint_baseline_file",
+ "lint_min_sdk_version",
+ "lint_suppressions_dep",
+ "lint_suppressions_file",
+ "loadable_modules",
+ "manifest_package",
+ "max_sdk_version",
+ "mergeable_android_manifests",
+ "product_config_java_packages",
+ "main_component_library",
+ "min_sdk_version",
+ "native_lib_placeholders",
+ "never_incremental",
+ "no_xml_namespaces",
+ "omit_dex",
+ "png_to_webp",
+ "post_process_package_resources_script",
+ "processor_args_javac",
+ "proguard_configs",
+ "proguard_enabled",
+ "proguard_enable_obfuscation",
+ "r_java_root_package_name",
+ "resource_exclusion_exceptions",
+ "resource_exclusion_regex",
+ "resource_ids_provider_dep",
+ "resource_values_filter_rules",
+ "require_native_mocks",
+ "secondary_abi_loadable_modules",
+ "secondary_abi_shared_libraries",
+ "secondary_native_lib_placeholders",
+ "shared_libraries",
+ "shared_resources",
+ "shared_resources_allowlist_locales",
+ "shared_resources_allowlist_target",
+ "sources",
+ "srcjar_deps",
+ "static_library_provider",
+ "static_library_provider_use_secondary_abi",
+ "target_sdk_version",
+ "testonly",
+ "uncompress_dex",
+ "library_always_compress",
+ "use_chromium_linker",
+ "version_code",
+ "version_name",
+ "visibility",
+ ])
is_bundle_module = false
name = invoker.apk_name
if (defined(invoker.final_apk_path)) {
@@ -3625,6 +3665,15 @@ if (enable_java_templates && is_android) {
assert(!defined(invoker.bundle_target))
}
+ # android_app_bundle's write_build_config expects module targets to be named
+ # according to java_target_patterns otherwise it ignores them when listed in
+ # possible_config_deps. See https://crbug.com/1418398.
+ if (filter_exclude([ target_name ], [ "*_bundle_module" ]) != []) {
+ assert(false,
+ "Invalid android_app_bundle_module target name ($target_name), " +
+ "must end in _bundle_module.")
+ }
+
# TODO(tiborg): We have several flags that are necessary for workarounds
# that come from the fact that the resources get compiled in the bundle
# module target, but bundle modules have to have certain flags in
@@ -3634,85 +3683,92 @@ if (enable_java_templates && is_android) {
# target. Doing so would keep the bundle modules independent from the bundle
# and potentially reuse the same bundle modules for multiple bundles.
android_apk_or_module(target_name) {
- forward_variables_from(invoker,
- [
- "add_view_trace_events",
- "aapt_locale_allowlist",
- "additional_jar_files",
- "alternative_android_sdk_dep",
- "android_manifest",
- "android_manifest_dep",
- "annotation_processor_deps",
- "app_as_shared_lib",
- "assert_no_deps",
- "base_module_target",
- "build_config_include_product_version_resource",
- "bundle_target",
- "chromium_code",
- "custom_assertion_handler",
- "data",
- "data_deps",
- "deps",
- "enable_multidex",
- "expected_android_manifest",
- "expected_android_manifest_base",
- "extra_verification_manifest",
- "extra_verification_manifest_dep",
- "generate_buildconfig_java",
- "generate_final_jni",
- "generate_native_libraries_java",
- "input_jars_paths",
- "isolated_splits_enabled",
- "is_base_module",
- "jacoco_never_instrument",
- "jar_excluded_patterns",
- "javac_args",
- "jni_sources_exclusions",
- "loadable_modules",
- "product_config_java_packages",
- "main_component_library",
- "manifest_package",
- "manual_jni_registration",
- "max_sdk_version",
- "min_sdk_version",
- "native_lib_placeholders",
- "no_xml_namespaces",
- "package_id",
- "package_name",
- "png_to_webp",
- "processor_args_javac",
- "proguard_configs",
- "proguard_enabled",
- "proguard_enable_obfuscation",
- "resource_exclusion_exceptions",
- "resource_exclusion_regex",
- "resource_ids_provider_dep",
- "resource_values_filter_rules",
- "resources_config_paths",
- "secondary_abi_loadable_modules",
- "secondary_abi_shared_libraries",
- "secondary_native_lib_placeholders",
- "shared_libraries",
- "shared_resources",
- "shared_resources_allowlist_locales",
- "shared_resources_allowlist_target",
- "short_resource_paths",
- "srcjar_deps",
- "static_library_provider",
- "static_library_provider_use_secondary_abi",
- "strip_resource_names",
- "strip_unused_resources",
- "target_sdk_version",
- "testonly",
- "library_always_compress",
- "use_chromium_linker",
- "uses_split",
- "version_code",
- "version_name",
- "visibility",
- ])
+ forward_variables_from(
+ invoker,
+ [
+ "add_view_trace_events",
+ "aapt_locale_allowlist",
+ "additional_jar_files",
+ "alternative_android_sdk_dep",
+ "android_manifest",
+ "android_manifest_dep",
+ "annotation_processor_deps",
+ "app_as_shared_lib",
+ "assert_no_deps",
+ "base_module_target",
+ "build_config_include_product_version_resource",
+ "bundle_target",
+ "chromium_code",
+ "custom_assertion_handler",
+ "data",
+ "data_deps",
+ "deps",
+ "enable_jni_multiplexing",
+ "enable_multidex",
+ "expected_android_manifest",
+ "expected_android_manifest_base",
+ "expected_android_manifest_library_version_offset",
+ "expected_android_manifest_version_code_offset",
+ "generate_buildconfig_java",
+ "generate_final_jni",
+ "generate_native_libraries_java",
+ "input_jars_paths",
+ "isolated_splits_enabled",
+ "is_base_module",
+ "jacoco_never_instrument",
+ "jar_excluded_patterns",
+ "javac_args",
+ "jni_file_exclusions",
+ "loadable_modules",
+ "product_config_java_packages",
+ "main_component_library",
+ "manifest_package",
+ "max_sdk_version",
+ "min_sdk_version",
+ "mergeable_android_manifests",
+ "module_name",
+ "native_lib_placeholders",
+ "no_xml_namespaces",
+ "package_id",
+ "parent_module_target",
+ "png_to_webp",
+ "processor_args_javac",
+ "proguard_configs",
+ "proguard_enabled",
+ "proguard_enable_obfuscation",
+ "resource_exclusion_exceptions",
+ "resource_exclusion_regex",
+ "resource_ids_provider_dep",
+ "resource_values_filter_rules",
+ "resources_config_paths",
+ "secondary_abi_loadable_modules",
+ "secondary_abi_shared_libraries",
+ "secondary_native_lib_placeholders",
+ "shared_libraries",
+ "shared_resources",
+ "shared_resources_allowlist_locales",
+ "shared_resources_allowlist_target",
+ "short_resource_paths",
+ "srcjar_deps",
+ "static_library_provider",
+ "static_library_provider_use_secondary_abi",
+ "strip_resource_names",
+ "strip_unused_resources",
+ "target_sdk_version",
+ "testonly",
+ "library_always_compress",
+ "use_chromium_linker",
+ "uses_split",
+ "version_code",
+ "version_name",
+ "visibility",
+ ])
is_bundle_module = true
generate_buildconfig_java = _is_base_module
+ if (defined(uses_split)) {
+ assert(defined(parent_module_target),
+ "Must set parent_module_target when uses_split is set")
+ }
}
}
@@ -4570,6 +4626,7 @@ if (enable_java_templates && is_android) {
_assets_target_name = "${target_name}__assets"
android_assets(_assets_target_name) {
forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+ deps = [ ":$_unpack_target_name" ]
renaming_sources = []
renaming_destinations = []
foreach(_asset_file, _scanned_files.assets) {
@@ -4583,9 +4640,12 @@ if (enable_java_templates && is_android) {
}
}
+ _target_label = get_label_info(":$target_name", "label_no_toolchain")
+
# Create android_java_prebuilt target for classes.jar.
if (_scanned_files.has_classes_jar) {
_java_library_vars = [
+ "alternative_android_sdk_dep",
"bytecode_rewriter_target",
"enable_bytecode_checks",
"jar_excluded_patterns",
@@ -4610,7 +4670,7 @@ if (enable_java_templates && is_android) {
jar_path = "$_output_path/${_tuple[1]}"
_base_output_name = get_path_info(jar_path, "name")
output_name = "${invoker.target_name}-$_base_output_name"
- public_target_label = invoker.target_name
+ public_target_label = _target_label
}
}
@@ -4646,13 +4706,16 @@ if (enable_java_templates && is_android) {
proguard_configs += [ "$_output_path/proguard.txt" ]
}
}
- public_target_label = invoker.target_name
+ public_target_label = _target_label
}
}
java_group(target_name) {
forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
public_deps = [ ":$_unpack_target_name" ]
+ if (defined(invoker.public_deps)) {
+ public_deps += invoker.public_deps
+ }
deps = []
if (defined(_jar_target_name)) {
deps += [ ":$_jar_target_name" ]
@@ -4772,19 +4835,11 @@ if (enable_java_templates && is_android) {
_proguard_enabled =
defined(invoker.proguard_enabled) && invoker.proguard_enabled
- if (defined(invoker.version_code)) {
- _version_code = invoker.version_code
- } else {
- _version_code = android_default_version_code
- }
-
- if (android_override_version_code != "") {
- _version_code = android_override_version_code
+ _min_sdk_version = default_min_sdk_version
+ if (defined(invoker.min_sdk_version)) {
+ _min_sdk_version = invoker.min_sdk_version
}
- # Prevent "unused variable".
- not_needed([ "_version_code" ])
-
_bundle_base_path = "$root_build_dir/apks"
if (defined(invoker.bundle_base_path)) {
_bundle_base_path = invoker.bundle_base_path
@@ -4819,12 +4874,6 @@ if (enable_java_templates && is_android) {
},
]
- _enable_multidex =
- !defined(invoker.enable_multidex) || invoker.enable_multidex
-
- # Prevent "unused variable".
- not_needed([ "_enable_multidex" ])
-
if (_proguard_enabled) {
_dex_target = "${_target_name}__dex"
_proguard_mapping_path = "${_bundle_path}.mapping"
@@ -4952,6 +5001,7 @@ if (enable_java_templates && is_android) {
build_config = _build_config
proguard_enabled = _proguard_enabled
module_build_configs = _module_build_configs
+ modules = _modules
if (_proguard_enabled) {
add_view_trace_events = _add_view_trace_events
@@ -4960,8 +5010,6 @@ if (enable_java_templates && is_android) {
}
if (_proguard_enabled) {
- # If this Bundle uses a static library, the static library APK will
- # create the synchronized dex file path.
if (_add_view_trace_events) {
_trace_event_rewriter_target =
"//build/android/bytecode:trace_event_adder"
@@ -5005,17 +5053,15 @@ if (enable_java_templates && is_android) {
"custom_assertion_handler",
"expected_proguard_config",
"expected_proguard_config_base",
- "min_sdk_version",
"proguard_enable_obfuscation",
])
if (defined(expected_proguard_config)) {
top_target_name = _target_name
}
+ min_sdk_version = _min_sdk_version
add_view_trace_events = _add_view_trace_events
- enable_multidex = _enable_multidex
proguard_enabled = true
proguard_mapping_path = _proguard_mapping_path
- proguard_sourcefile_suffix = "$android_channel-$_version_code"
build_config = _build_config
deps = _module_java_targets + [ ":$_build_config_target" ]
@@ -5036,10 +5082,26 @@ if (enable_java_templates && is_android) {
_module_build_config_target = _module.build_config_target
if (!_proguard_enabled) {
- _dex_target_for_module = "${_module_target}__final_dex"
- } else {
- _dex_target_for_module = ":$_dex_target"
+ _module_target_name = get_label_info(_module_target, "name")
+ _dex_target = "${_module_target_name}__final_dex"
+ _dex_path = "$target_out_dir/$_module_target_name/$_module_target_name.mergeddex.jar"
+ dex(_dex_target) {
+ forward_variables_from(invoker, [ "custom_assertion_handler" ])
+ min_sdk_version = _min_sdk_version
+ output = _dex_path
+ build_config = _build_config
+
+ # This will be a pure dex-merge.
+ input_dex_filearg = "@FileArg($_rebased_build_config:modules:${_module.name}:all_dex_files)"
+ enable_desugar = false
+
+ deps = [
+ ":$_build_config_target",
+ ":${_module_target_name}__java",
+ ]
+ }
}
+ _dex_target_for_module = ":$_dex_target"
# Generate one module .zip file per bundle module.
#
@@ -5052,13 +5114,17 @@ if (enable_java_templates && is_android) {
forward_variables_from(invoker,
[
"is_multi_abi",
- "min_sdk_version",
"uncompress_dex",
])
module_name = _module.name
+ min_sdk_version = _min_sdk_version
build_config = _module_build_config
module_zip_path = _module_zip_path
native_libraries_config = _native_libraries_config
+ if (!_proguard_enabled) {
+ dex_path = _dex_path
+ # dex_path is read from the build_config in the proguard case.
+ }
if (module_name == "base" &&
defined(invoker.expected_libs_and_assets)) {
@@ -5123,6 +5189,14 @@ if (enable_java_templates && is_android) {
output_config = _unused_resources_config
output_r_txt = _unused_resources_r_txt_out
}
+ _unused_resources_final_path = "${_bundle_path}.unused_resources"
+ _copy_unused_resources_target =
+ "${_base_target_name}__copy_unused_resources"
+ copy(_copy_unused_resources_target) {
+ deps = [ ":$_unused_resources_target" ]
+ sources = [ _unused_resources_config ]
+ outputs = [ _unused_resources_final_path ]
+ }
}
_all_rebased_module_zip_paths =
@@ -5168,10 +5242,6 @@ if (enable_java_templates && is_android) {
invoker.compress_shared_libraries) {
args += [ "--compress-shared-libraries" ]
}
- _min_sdk_version = default_min_sdk_version
- if (defined(invoker.min_sdk_version)) {
- _min_sdk_version = invoker.min_sdk_version
- }
# Android P+ support loading from stored dex.
if (_min_sdk_version < 27) {
@@ -5190,7 +5260,7 @@ if (enable_java_templates && is_android) {
}
if (_enable_language_splits) {
- args += [ "--base-allowlist-rtxt-path=@FileArg(" + "${_rebased_base_module_build_config}:deps_info:base_allowlist_rtxt_path)" ]
+ args += [ "--base-allowlist-rtxt-path=@FileArg($_rebased_base_module_build_config:deps_info:base_allowlist_rtxt_path)" ]
if (_strip_unused_resources) {
# Use the stripped out rtxt file to set resources that are pinned to
# the default language split.
@@ -5201,9 +5271,7 @@ if (enable_java_templates && is_android) {
args +=
[ "--base-module-rtxt-path=$_rebased_unused_resources_r_txt_out" ]
} else {
- args +=
- [ "--base-module-rtxt-path=@FileArg(" +
- "${_rebased_base_module_build_config}:deps_info:r_text_path)" ]
+ args += [ "--base-module-rtxt-path=@FileArg($_rebased_base_module_build_config:deps_info:r_text_path)" ]
}
}
if (defined(invoker.validate_services) && invoker.validate_services) {
@@ -5287,8 +5355,7 @@ if (enable_java_templates && is_android) {
args = [
"--script-output-path",
rebase_path(_bundle_wrapper_script_path, root_build_dir),
- "--package-name=@FileArg(" +
- "$_rebased_base_module_build_config:deps_info:package_name)",
+ "--package-name=@FileArg($_rebased_base_module_build_config:deps_info:package_name)",
"--aapt2",
rebase_path(_android_aapt2_path, root_build_dir),
"--bundle-path",
@@ -5350,7 +5417,6 @@ if (enable_java_templates && is_android) {
"lint_baseline_file",
"lint_jar_path",
"lint_suppressions_file",
- "min_sdk_version",
])
build_config = _build_config
build_config_dep = ":$_build_config_target"
@@ -5360,6 +5426,8 @@ if (enable_java_templates && is_android) {
}
if (defined(invoker.lint_min_sdk_version)) {
min_sdk_version = invoker.lint_min_sdk_version
+ } else {
+ min_sdk_version = _min_sdk_version
}
}
} else {
diff --git a/build/config/android/sdk.gni b/build/config/android/sdk.gni
index 52ef8bc19..fb39315c4 100644
--- a/build/config/android/sdk.gni
+++ b/build/config/android/sdk.gni
@@ -7,4 +7,7 @@
default_android_sdk_release = "t"
# SDK releases against which public builds are supported.
-public_sdk_releases = [ "t" ]
+public_sdk_releases = [
+ "t",
+ "tprivacysandbox",
+]
diff --git a/build/config/android/test/classpath_order/BUILD.gn b/build/config/android/test/classpath_order/BUILD.gn
deleted file mode 100644
index 376d244b1..000000000
--- a/build/config/android/test/classpath_order/BUILD.gn
+++ /dev/null
@@ -1,108 +0,0 @@
-# Copyright 2021 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/config/android/rules.gni")
-
-template("test_resources") {
- jinja_template_resources(target_name) {
- forward_variables_from(invoker, "*")
- testonly = true
- variables = [ "resource_name=$resource_name" ]
- res_dir = "java/res_template"
- resources = [ "java/res_template/values/values.xml" ]
- }
-}
-
-template("generate_dummy_android_library") {
- # No underscores to avoid crbug.com/908819.
- _generate_java_source_target_name = "${target_name}generatejavasource"
- jinja_template(_generate_java_source_target_name) {
- testonly = true
- input = "java/src/org/chromium/build/classpath_order/Dummy.java.jinja2"
- output = "$target_gen_dir/java/src/org/chromium/build/classpath_order/${invoker.class_name}.java"
- variables = [ "class_name=${invoker.class_name}" ]
- }
-
- android_library(target_name) {
- forward_variables_from(invoker, "*")
-
- if (!defined(invoker.deps)) {
- deps = []
- }
-
- sources = get_target_outputs(":${_generate_java_source_target_name}")
- deps += [ ":${_generate_java_source_target_name}" ]
- }
-}
-
-# Test that classpath order keeps resources accessible when multiple targets generate
-# resources for the same package. Specifically, test that an android_library precedes
-# its dependencies regardless of the relative lexographic order.
-
-test_resources("a1_dependency_resources") {
- resource_name = "a1_dependency_resource"
-}
-
-generate_dummy_android_library("a1_dependency_java") {
- testonly = true
- class_name = "A1Dependency"
- resources_package = "org.chromium.build.classpath_order.test1"
- deps = [ ":a1_dependency_resources" ]
-}
-
-test_resources("z1_master_resources") {
- resource_name = "z1_master_resource"
- deps = [ ":a1_dependency_resources" ]
-}
-
-generate_dummy_android_library("z1_master_java") {
- testonly = true
- class_name = "Z1Master"
- resources_package = "org.chromium.build.classpath_order.test1"
- deps = [
- ":a1_dependency_java",
- ":z1_master_resources",
- ]
-}
-
-test_resources("z2_dependency_resources") {
- resource_name = "z2_dependency_resource"
-}
-
-generate_dummy_android_library("z2_dependency_java") {
- testonly = true
- class_name = "Z2Dependency"
- resources_package = "org.chromium.build.classpath_order.test2"
- deps = [ ":z2_dependency_resources" ]
-}
-
-test_resources("a2_master_resources") {
- resource_name = "a2_master_resource"
- deps = [ ":z2_dependency_resources" ]
-}
-
-generate_dummy_android_library("a2_master_java") {
- testonly = true
- class_name = "A2Master"
- resources_package = "org.chromium.build.classpath_order.test2"
- deps = [
- ":a2_master_resources",
- ":z2_dependency_java",
- ]
-}
-
-robolectric_library("junit_tests") {
- sources =
- [ "java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java" ]
- deps = [
- ":a1_dependency_java",
- ":a2_master_java",
- ":z1_master_java",
- ":z2_dependency_java",
- "//testing/android/junit:junit_test_support",
- "//third_party/android_support_test_runner:runner_java",
- "//third_party/androidx:androidx_test_runner_java",
- "//third_party/junit",
- ]
-}
diff --git a/build/config/android/test/classpath_order/java/res_template/values/values.xml b/build/config/android/test/classpath_order/java/res_template/values/values.xml
deleted file mode 100644
index c163c8895..000000000
--- a/build/config/android/test/classpath_order/java/res_template/values/values.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright 2021 The Chromium Authors
- Use of this source code is governed by a BSD-style license that can be
- found in the LICENSE file. -->
-
-
-<resources xmlns:android="http://schemas.android.com/apk/res/android">
- <integer name="{{resource_name}}">42</integer>
-</resources>
diff --git a/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java b/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java
deleted file mode 100644
index 8d1c44483..000000000
--- a/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright 2021 The Chromium Authors
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package org.chromium.build.classpath_order;
-
-import static org.junit.Assert.assertTrue;
-
-import androidx.test.filters.SmallTest;
-
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.robolectric.annotation.Config;
-
-import org.chromium.testing.local.LocalRobolectricTestRunner;
-
-/**
- * Test that resources defined in different android_resources() targets but with the same
- * package are accessible.
- */
-@RunWith(LocalRobolectricTestRunner.class)
-@Config(manifest = Config.NONE)
-public final class ClassPathOrderTest {
- @Test
- @SmallTest
- public void testAll() {
- assertTrue(org.chromium.build.classpath_order.test1.R.integer.a1_dependency_resource >= 0);
- assertTrue(org.chromium.build.classpath_order.test1.R.integer.z1_master_resource >= 0);
- assertTrue(org.chromium.build.classpath_order.test2.R.integer.z2_dependency_resource >= 0);
- assertTrue(org.chromium.build.classpath_order.test2.R.integer.a2_master_resource >= 0);
- }
-}
diff --git a/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2 b/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2
deleted file mode 100644
index 9979f71bf..000000000
--- a/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright 2021 The Chromium Authors
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package org.chromium.build.classpath_order;
-
-public class {{class_name}} {
-}
diff --git a/build/config/android/test/resource_overlay/BUILD.gn b/build/config/android/test/resource_overlay/BUILD.gn
index 0a5812c65..3b7936384 100644
--- a/build/config/android/test/resource_overlay/BUILD.gn
+++ b/build/config/android/test/resource_overlay/BUILD.gn
@@ -53,7 +53,7 @@ android_library("unit_device_javatests") {
":dependency_tagged_root_resources",
":root_tagged_root_resources",
"//base:base_java_test_support",
- "//third_party/android_support_test_runner:runner_java",
+ "//third_party/androidx:androidx_test_monitor_java",
"//third_party/androidx:androidx_test_runner_java",
"//third_party/junit",
]
diff --git a/build/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java b/build/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java
index 8da993839..d42450e5e 100644
--- a/build/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java
+++ b/build/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java
@@ -7,8 +7,8 @@ package org.chromium.build.resource_overlay;
import static org.junit.Assert.assertEquals;
import android.content.res.Resources;
-import android.support.test.InstrumentationRegistry;
+import androidx.test.InstrumentationRegistry;
import androidx.test.filters.SmallTest;
import org.junit.Test;
diff --git a/build/config/apple/BUILD.gn b/build/config/apple/BUILD.gn
new file mode 100644
index 000000000..add2395d6
--- /dev/null
+++ b/build/config/apple/BUILD.gn
@@ -0,0 +1,17 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/apple/symbols.gni")
+
+# The ldflags referenced below are handled by
+# //build/toolchain/apple/linker_driver.py.
+# Remove this config if a target wishes to change the arguments passed to the
+# strip command during linking. This config by default strips all symbols
+# from a binary, but some targets may wish to specify an exports file to
+# preserve specific symbols.
+config("strip_all") {
+ if (enable_stripping) {
+ ldflags = [ "-Wcrl,strip,-x,-S" ]
+ }
+}
diff --git a/build/config/apple/symbols.gni b/build/config/apple/symbols.gni
index ef6faf0a2..3b4dee447 100644
--- a/build/config/apple/symbols.gni
+++ b/build/config/apple/symbols.gni
@@ -17,7 +17,7 @@ declare_args() {
enable_dsyms = is_official_build || using_sanitizer
# Strip symbols from linked targets by default. If this is enabled, the
- # //build/config/mac:strip_all config will be applied to all linked targets.
+ # //build/config/apple:strip_all config will be applied to all linked targets.
# If custom stripping parameters are required, remove that config from a
# linked target and apply custom -Wcrl,strip flags. See
# //build/toolchain/apple/linker_driver.py for more information.
diff --git a/build/config/arm.gni b/build/config/arm.gni
index b50fd629f..cc82ed5ba 100644
--- a/build/config/arm.gni
+++ b/build/config/arm.gni
@@ -133,16 +133,14 @@ if (current_cpu == "arm" || v8_current_cpu == "arm") {
# - "pac": Enables Pointer Authentication Code (PAC, featured in Armv8.3)
# - "standard": Enables both PAC and Branch Target Identification (Armv8.5).
# - "none": No branch protection.
- arm_control_flow_integrity = "standard"
-
- # TODO(cavalcantii): enable the feature for the following OSes next.
- if (is_mac || is_chromeos || is_fuchsia || is_win ||
- target_cpu != "arm64") {
- # target_cpu != "arm64" covers some cases (e.g. the ChromeOS x64 MSAN
- # build) where the target platform is x64, but V8 is configured to use
- # the arm64 simulator. Pointer authentication doesn't work properly
- # in this mode (yet).
- arm_control_flow_integrity = "none"
+ arm_control_flow_integrity = "none"
+
+ if ((is_android || is_linux) && target_cpu == "arm64") {
+ # Enable PAC and BTI on AArch64 Linux/Android systems.
+ # target_cpu == "arm64" filters out some cases (e.g. the ChromeOS x64
+ # MSAN build) where the target platform is x64, but V8 is configured to
+ # use the arm64 simulator.
+ arm_control_flow_integrity = "standard"
}
}
assert(arm_control_flow_integrity == "none" ||
diff --git a/build/config/c++/BUILD.gn b/build/config/c++/BUILD.gn
index a58196c4b..c00dcef4c 100644
--- a/build/config/c++/BUILD.gn
+++ b/build/config/c++/BUILD.gn
@@ -94,19 +94,7 @@ config("runtime_library") {
]
}
}
- if (enable_safe_libcxx) {
- defines += [
- "_LIBCPP_ENABLE_ASSERTIONS_DEFAULT=1",
-
- # The following macro is a kind of a misnomer in that it only controls the
- # default verbose abort handler in the header <__verbose_abort> and not
- # the default libc++ verbose abort handler in verbose_abort.cpp. We
- # define this macro so that the <__verbose_abort> header only includes
- # the declaration for the handler function and not the definition as we
- # rely on the handler in //base:nodebug_assertion for nondebug executables
- # that include base and the default handler in verbose_abort.cpp
- # elsewhere.
- "_LIBCPP_AVAILABILITY_CUSTOM_VERBOSE_ABORT_PROVIDED=1",
- ]
+ if (use_custom_libcxx && enable_safe_libcxx) {
+ defines += [ "_LIBCPP_ENABLE_ASSERTIONS=1" ]
}
}
diff --git a/build/config/c++/c++.gni b/build/config/c++/c++.gni
index e4dae41c3..25ece4cdb 100644
--- a/build/config/c++/c++.gni
+++ b/build/config/c++/c++.gni
@@ -2,8 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import("//build/config/chromeos/ui_mode.gni")
import("//build/config/sanitizers/sanitizers.gni")
+import("//build_overrides/build.gni")
declare_args() {
# Use in-tree libc++ (buildtools/third_party/libc++ and
@@ -11,10 +11,8 @@ declare_args() {
# standard library support.
# Don't check in changes that set this to false for more platforms; doing so
# is not supported.
- use_custom_libcxx =
- is_fuchsia || is_android || is_apple || is_linux || is_chromeos_lacros ||
- (is_win && is_clang) ||
- (is_chromeos && default_toolchain != "//build/toolchain/cros:target")
+ use_custom_libcxx = is_fuchsia || is_android || is_apple || is_linux ||
+ is_chromeos || (is_win && is_clang)
# Use libc++ instead of stdlibc++ when using the host_cpu toolchain, even if
# use_custom_libcxx is false. This is useful for cross-compiles where a custom
@@ -55,13 +53,12 @@ declare_args() {
libcxx_is_shared = use_custom_libcxx && is_component_build
}
-# Enables assertions on safety checks in libc++.
-#
-# Typically, this should be set to true whenever we're using our custom libc++
-# in order to guard against undefined behavior. However, the asserts added by
-# hardening libc++ may introduce some performance overhead, so for now, we
-# pull this out into a separate variable to make it easier to run tests.
-enable_safe_libcxx = use_custom_libcxx
+# TODO(https://crbug.com/1385662): This is temporarily guarded to make it easier
+# to roll out this change. Once the various projects (ANGLE, v8, et cetera)
+# rolling in Chrome's //build have updated, remove this entirely.
+if (!defined(enable_safe_libcxx)) {
+ enable_safe_libcxx = true
+}
# libc++abi needs to be exported from executables to be picked up by shared
# libraries on certain instrumented builds.
diff --git a/build/config/c++/libc++.natvis b/build/config/c++/libc++.natvis
index 70c9840c5..6378548dd 100644
--- a/build/config/c++/libc++.natvis
+++ b/build/config/c++/libc++.natvis
@@ -190,7 +190,7 @@
</RightPointer>
<ValueNode>
((std::Cr::map&lt;$T1,$T2,$T3,$T4&gt;::__node_pointer)this)
- -&gt;__value_.__cc
+ -&gt;__value_.__cc_
</ValueNode>
</TreeItems>
</Expand>
@@ -216,7 +216,7 @@
</RightPointer>
<ValueNode>
((std::Cr::multimap&lt;$T1,$T2,$T3,$T4&gt;::__node_pointer)this)
- -&gt;__value_.__cc
+ -&gt;__value_.__cc_
</ValueNode>
</TreeItems>
</Expand>
diff --git a/build/config/chrome_build.gni b/build/config/chrome_build.gni
index 0e755fb7e..b5156d5c6 100644
--- a/build/config/chrome_build.gni
+++ b/build/config/chrome_build.gni
@@ -8,42 +8,46 @@ declare_args() {
# resources).
is_chrome_branded = false
- # Whether to enable the Chrome for Testing (CfT) branding flavor. This flag
- # is not compatible with `is_chrome_branded`, therefore both of them cannot
- # be simultaneously enabled.
- #
- # Note: CfT is not intended to be used as a test runner to run tests in this
- # repository.
+ # Whether to enable the Chrome for Testing (CfT) flavor. This arg is not
+ # compatible with `is_chrome_branded`.
#
# Design document: https://goo.gle/chrome-for-testing
- is_chrome_for_testing_branded = false
+ is_chrome_for_testing = false
- # Whether to use internal Chrome for Testing (CfT) icons.
- # If set to true, use Google-internal icons, otherwise fall back to Chromium icons.
+ # Whether to use internal Chrome for Testing (CfT).
+ # Requires `src-internal/` and `is_chrome_for_testing = true`.
#
- # This flag can only be set to true for CfT builds.
- use_internal_chrome_for_testing_icons = false
+ # When true, use Google-internal icons, otherwise fall back to Chromium icons.
+ is_chrome_for_testing_branded = false
# Set to true to enable settings for high end Android devices, typically
# enhancing speed at the expense of resources such as binary sizes and memory.
is_high_end_android = false
+
+ if (is_android) {
+ # By default, Trichrome channels are compiled using separate package names.
+ # Set this to 'true' to compile Trichrome channels using the Stable channel's
+ # package name. This currently only affects builds with `android_channel =
+ # "beta"`.
+ use_stable_package_name_for_trichrome = false
+ }
}
assert(
- !is_chrome_for_testing_branded || !is_chrome_branded,
- "Chrome for Testing (`is_chrome_for_testing_branded`) requires Chromium flavor (`is_chrome_branded = False`)")
+ !is_chrome_for_testing || !is_chrome_branded,
+ "`is_chrome_for_testing = true` is incompatible with `is_chrome_branded = true`")
assert(
- is_chrome_for_testing_branded || !use_internal_chrome_for_testing_icons,
- "Use of internal Chrome for Testing icons (`use_internal_chrome_for_testing_icons`) requires CfT flavor (`is_chrome_for_testing_branded = True`)")
+ is_chrome_for_testing || !is_chrome_for_testing_branded,
+ "`is_chrome_for_testing_branded = true` requires `is_chrome_for_testing = true`")
declare_args() {
# Refers to the subdirectory for branding in various places including
# chrome/app/theme.
#
# `branding_path_product` must not contain slashes.
- if (is_chrome_for_testing_branded) {
- if (use_internal_chrome_for_testing_icons) {
+ if (is_chrome_for_testing) {
+ if (is_chrome_for_testing_branded) {
branding_path_component = "google_chrome/google_chrome_for_testing"
} else {
branding_path_component = "chromium"
diff --git a/build/config/chromeos/rules.gni b/build/config/chromeos/rules.gni
index 6fc7921c8..10af886af 100644
--- a/build/config/chromeos/rules.gni
+++ b/build/config/chromeos/rules.gni
@@ -59,8 +59,6 @@ if (cros_sdk_version != "") {
_symlinks = [
# Tast harness & test data.
rebase_path("${_cache_path_prefix}+autotest_server_package.tar.bz2"),
- rebase_path("${_cache_path_prefix}+chromeos-base/tast-cmd"),
- rebase_path("${_cache_path_prefix}+chromeos-base/tast-remote-tests-cros"),
# Binutils (and other toolchain tools) used to deploy Chrome to the device.
rebase_path(
@@ -331,27 +329,23 @@ template("generate_runner_script") {
if (is_tast) {
# Add tast sdk items.
- _sdk_data += [
- _symlink_targets[0],
- _symlink_targets[1],
- _symlink_targets[2],
- ]
+ _sdk_data += [ _symlink_targets[0] ]
}
if (deploy_chrome) {
# To deploy chrome to the VM, it needs to be stripped down to fit into
# the VM. This is done by using binutils in the toolchain. So add the
# toolchain to the data.
_sdk_data += [
- _symlink_targets[3],
- _symlink_targets[4],
+ _symlink_targets[1],
+ _symlink_targets[2],
]
}
if (_cros_is_vm) {
# Add vm sdk items.
_sdk_data += [
+ _symlink_targets[3],
+ _symlink_targets[4],
_symlink_targets[5],
- _symlink_targets[6],
- _symlink_targets[7],
]
}
}
diff --git a/build/config/clang/BUILD.gn b/build/config/clang/BUILD.gn
index 8fd6760f3..ed39cc68c 100644
--- a/build/config/clang/BUILD.gn
+++ b/build/config/clang/BUILD.gn
@@ -20,6 +20,11 @@ config("find_bad_constructs") {
"-plugin-arg-find-bad-constructs",
"-Xclang",
"raw-ref-template-as-trivial-member",
+
+ "-Xclang",
+ "-plugin-arg-find-bad-constructs",
+ "-Xclang",
+ "check-stack-allocated",
]
if (is_linux || is_chromeos || is_android || is_fuchsia) {
@@ -30,6 +35,27 @@ config("find_bad_constructs") {
"check-ipc",
]
}
+
+ if (enable_check_raw_ptr_fields) {
+ cflags += [
+ "-Xclang",
+ "-plugin-arg-find-bad-constructs",
+ "-Xclang",
+ "check-raw-ptr-fields",
+
+ # TODO(keishi): Remove this once crrev.com/c/4387753 is rolled out.
+ "-Xclang",
+ "-plugin-arg-find-bad-constructs",
+ "-Xclang",
+ "raw-ptr-exclude-path=base/no_destructor.h",
+
+ # TODO(keishi): Remove this once crrev.com/c/4086161 lands.
+ "-Xclang",
+ "-plugin-arg-find-bad-constructs",
+ "-Xclang",
+ "raw-ptr-exclude-path=base/containers/span.h",
+ ]
+ }
}
}
diff --git a/build/config/clang/clang.gni b/build/config/clang/clang.gni
index 7196fcf6c..d36d9d1cd 100644
--- a/build/config/clang/clang.gni
+++ b/build/config/clang/clang.gni
@@ -14,5 +14,8 @@ declare_args() {
is_clang && !is_nacl && current_os != "zos" &&
default_toolchain != "//build/toolchain/cros:target"
+ enable_check_raw_ptr_fields =
+ build_with_chromium && !is_official_build && is_linux && !is_castos
+
clang_base_path = default_clang_base_path
}
diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn
index 944a039d0..d0b55ce29 100644
--- a/build/config/compiler/BUILD.gn
+++ b/build/config/compiler/BUILD.gn
@@ -137,6 +137,10 @@ declare_args() {
# needs to be evaluated before enabling it there as well.
init_stack_vars = !(is_android && is_official_build)
+ # Zero init has favorable performance/size tradeoffs for Chrome OS
+ # but was not evaluated for other platforms.
+ init_stack_vars_zero = is_chromeos
+
# This argument is to control whether enabling text section splitting in the
# final binary. When enabled, the separated text sections with prefix
# '.text.hot', '.text.unlikely', '.text.startup' and '.text.exit' will not be
@@ -165,9 +169,18 @@ declare_args() {
# For use by tools/clang/scripts/analyze_includes.py
show_includes = false
+ # Enable Profi algorithm. Profi can infer block and edge counts.
+ # https://clang.llvm.org/docs/UsersManual.html#using-sampling-profilers
+ # TODO(crbug.com/1375958i:) Possibly enable this for Android too.
+ use_profi = is_chromeos
+
# If true, linker crashes will be rerun with `--reproduce` which causes
# a reproducer file to be saved.
save_reproducers_on_lld_crash = false
+
+ # Allow projects that wish to stay on C++17 to override Chromium's default.
+ # TODO(crbug.com/1402249): evaluate removing this end of 2023
+ use_cxx17 = false
}
declare_args() {
@@ -560,11 +573,6 @@ config("compiler") {
}
}
- # Rust compiler setup (for either clang or rustc).
- if (enable_rust) {
- defines += [ "RUST_ENABLED" ]
- }
-
# C11/C++11 compiler flags setup.
# ---------------------------
if (is_linux || is_chromeos || is_android || (is_nacl && is_clang) ||
@@ -601,8 +609,7 @@ config("compiler") {
cflags_cc += [ "-fno-trigraphs" ]
}
} else if (is_clang) {
- if (is_chromeos_device) {
- # TODO(crbug.com/1392471): Support C++20 in CrOS toolchain.
+ if (use_cxx17) {
cflags_cc += [ "-std=${standard_prefix}++17" ]
} else {
cflags_cc += [ "-std=${standard_prefix}++20" ]
@@ -614,7 +621,7 @@ config("compiler") {
}
} else if (is_win) {
cflags_c += [ "/std:c11" ]
- if (!is_clang && defined(msvc_use_cxx17) && msvc_use_cxx17) {
+ if (use_cxx17 || (!is_clang && defined(msvc_use_cxx17) && msvc_use_cxx17)) {
cflags_cc += [ "/std:c++17" ]
} else {
cflags_cc += [ "/std:c++20" ]
@@ -627,9 +634,7 @@ config("compiler") {
# clause, above.
cflags_c += [ "-std=c11" ]
- if (is_fuchsia) {
- # TODO(crbug.com/fuchsia/108751): The FIDL compiler generates code that
- # will not compile in C++20 mode. Switch to C++20 when this is resolved.
+ if (use_cxx17) {
cflags_cc += [ "-std=c++17" ]
} else {
cflags_cc += [ "-std=c++20" ]
@@ -694,14 +699,10 @@ config("compiler") {
# of "all" which means number of hardware threads) is faster.
ldflags += [ "-Wl,--thinlto-jobs=all" ]
if (is_apple) {
- _object_path_suffix = ""
- if (is_ios) {
- _object_path_suffix = ",persist"
- }
ldflags += [
"-Wl,-cache_path_lto," +
rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
- "-Wcrl,object_path_lto" + _object_path_suffix,
+ "-Wcrl,object_path_lto",
]
} else {
ldflags +=
@@ -715,9 +716,8 @@ config("compiler") {
# ARM was originally set lower than x86 to keep the size
# bloat of ThinLTO to <10%, but that's potentially no longer true.
# FIXME(inglorion): maybe tune these?
- if (target_cpu == "arm" || target_cpu == "arm64") {
- import_instr_limit = 20
- }
+ # TODO(b/271459198): Revert limit on amd64 to 30 when fixed.
+ import_instr_limit = 20
} else if (is_android) {
# TODO(crbug.com/1308318): Investigate if we can get the > 6% perf win
# of import_instr_limit 30 with a binary size hit smaller than ~2 MiB.
@@ -770,6 +770,10 @@ config("compiler") {
ldflags += [ "-Wl,--undefined-version" ]
}
+ if (use_lld && is_apple) {
+ ldflags += [ "-Wl,--strict-auto-link" ]
+ }
+
# LLD does call-graph-sorted binary layout by default when profile data is
# present. On Android this increases binary size due to more thinks for long
# jumps. Turn it off by default and enable selectively for targets where it's
@@ -811,9 +815,8 @@ config("compiler") {
# * Windows is not supported as it doesn't use DWARF.
# * Apple platforms (e.g. MacOS, iPhone, iPad) aren't supported because xcode
# lldb doesn't have the needed changes yet.
- # * Fuchsia isn't supported as zxdb doesn't support simple template names yet.
# TODO(crbug.com/1379070): Remove if the upstream default ever changes.
- if (is_clang && !is_nacl && !is_win && !is_apple && !is_fuchsia) {
+ if (is_clang && !is_nacl && !is_win && !is_apple) {
cflags_cc += [ "-gsimple-template-names" ]
}
@@ -861,9 +864,17 @@ config("compiler") {
# we discover a reason to turn them off.
"-Coverflow-checks=on",
- # Turn warnings into the "deny" lint level, which produce compiler errors.
- # The equivalent of -Werror for clang/gcc.
- "-Dwarnings",
+ # By default Rust passes `-nodefaultlibs` to the linker, however this
+ # conflicts with our `--unwind=none` flag for Android dylibs, as the latter
+ # is then unused and produces a warning/error. So this removes the
+ # `-nodefaultlibs` from the linker invocation from Rust, which would be used
+ # to compile dylibs on Android, such as for constructing unit test APKs.
+ "-Cdefault-linker-libraries",
+
+ # Require `unsafe` blocks even in `unsafe` fns. This is intended to become
+ # an error by default eventually; see
+ # https://github.com/rust-lang/rust/issues/71668
+ "-Dunsafe_op_in_unsafe_fn",
# To make Rust .d files compatible with ninja
"-Zdep-info-omit-d-target",
@@ -879,15 +890,8 @@ config("compiler") {
if (rust_abi_target != "") {
rustflags += [ "--target=$rust_abi_target" ]
}
- if (use_lto_in_rustc_linking) {
- rustflags += [ "-Clinker-plugin-lto" ]
- }
- if (!use_thin_lto || !use_chromium_rust_toolchain) {
- # Don't include bitcode if it won't be used, or can't be used. When
- # use_thin_lto is true, we will try to apply LTO to any objects that have
- # the appropriate bitcode. But we have to use Chromium's toolchain in order
- # to use LTO with rust code. Chromium's rustc will have an LLVM backend that
- # matches the C++ clang compiler.
+ if (!use_thin_lto) {
+ # Don't include bitcode if it won't be used.
rustflags += [ "-Cembed-bitcode=no" ]
}
if (is_official_build) {
@@ -895,6 +899,21 @@ config("compiler") {
}
}
+# Defers LTO optimization to the linker, for use when:
+# * Having the C++ toolchain do the linking against Rust staticlibs, and it
+# will be using LTO.
+# * Having Rust toolchain invoke the linker, and you're linking Rust and C++
+# together, so this defers LTO to the linker.
+#
+# Otherwise, Rust does LTO during compilation.
+#
+# https://doc.rust-lang.org/rustc/linker-plugin-lto.html
+config("rust_defer_lto_to_linker") {
+ if (!is_debug && use_thin_lto && is_a_target_toolchain) {
+ rustflags = [ "-Clinker-plugin-lto" ]
+ }
+}
+
# The BUILDCONFIG file sets this config on targets by default, which means when
# building with ThinLTO, no optimization is performed in the link step.
config("thinlto_optimize_default") {
@@ -906,6 +925,8 @@ config("thinlto_optimize_default") {
} else {
ldflags = [ "-Wl,--lto-O" + lto_opt_level ]
}
+
+ rustflags = [ "-Clto=thin" ]
}
}
@@ -930,6 +951,8 @@ config("thinlto_optimize_max") {
} else {
ldflags = [ "-Wl,--lto-O" + lto_opt_level ]
}
+
+ rustflags = [ "-Clto=thin" ]
}
}
@@ -1269,9 +1292,10 @@ config("compiler_codegen") {
configs += [ "//build/config/nacl:compiler_codegen" ]
}
- if (current_cpu == "arm64" && is_android) {
- # On arm64 disable outlining for Android. See crbug.com/931297 for more
- # information.
+ if (current_cpu == "arm64" && !is_win && is_clang) {
+ # Disable outlining everywhere on arm64 except Win. For more information see
+ # crbug.com/931297 for Android and crbug.com/1410297 for iOS.
+ # TODO(crbug.com/1411363): Enable this on Windows if possible.
cflags += [ "-mno-outline" ]
# This can be removed once https://bugs.llvm.org/show_bug.cgi?id=40348
@@ -1395,16 +1419,13 @@ config("clang_revision") {
}
config("rustc_revision") {
- if (enable_rust && defined(rustc_version)) {
- # Similar to the above config, this is here so that all files get
- # recompiled after a rustc roll. Nothing should ever read this cfg.
- # $rustc_version is a gn arg set within //build/config/rust.gni
- # so that users using a custom Rust toolchain can override it.
- # Its accuracy is checked in //build/rust/std:find_stdlib, which
- # most of our Rust targets depend upon.
+ if (rustc_revision != "") {
+ # Similar to the above config, this is here so that all files get recompiled
+ # after a rustc roll. Nothing should ever read this cfg. This will not be
+ # set if a custom toolchain is used.
rustflags = [
"--cfg",
- "rustc_version=\"$rustc_version\"",
+ "cr_rustc_revision=\"$rustc_revision\"",
]
}
}
@@ -1622,6 +1643,9 @@ config("default_warnings") {
# TODO(crbug.com/1352183) Evaluate and possibly enable.
"-Wno-bitfield-constant-conversion",
+
+ # TODO(crbug.com/1412713) Evaluate and possibly enable.
+ "-Wno-deprecated-this-capture",
]
}
}
@@ -1646,7 +1670,7 @@ config("default_warnings") {
# sizes, allocations, indices, or offsets.In cases where type conversion is not
# possible or is superfluous, use base::strict_cast<> or base::checked_cast<>
# to convert to size_t as needed.
-# See also: https://docs.google.com/document/d/14yKUwDaorqqNfgdGqHY_nck2nn02XBQcB5N0ue4fax8
+# See also: https://docs.google.com/document/d/1CTbQ-5cQjnjU8aCOtLiA7G6P0i5C6HpSDNlSNq6nl5E
#
# To enable in a GN target, use:
# configs += [ "//build/config/compiler:prevent_unsafe_narrowing" ]
@@ -1697,6 +1721,15 @@ config("chromium_code") {
cflags += [ "-Wextra" ]
}
+ if (treat_warnings_as_errors) {
+ # Turn rustc warnings into the "deny" lint level, which produce compiler
+ # errors. The equivalent of -Werror for clang/gcc.
+ #
+ # Note we apply the actual lint flags in config("compiler"). All warnings
+ # are suppressed in third-party crates.
+ rustflags = [ "-Dwarnings" ]
+ }
+
# In Chromium code, we define __STDC_foo_MACROS in order to get the
# C99 macros on Mac and Linux.
defines = [
@@ -1714,14 +1747,14 @@ config("chromium_code") {
defines += [ "_FORTIFY_SOURCE=2" ]
}
- if (is_mac) {
- cflags_objc = [ "-Wobjc-missing-property-synthesis" ]
- cflags_objcc = [ "-Wobjc-missing-property-synthesis" ]
+ if (is_apple) {
+ cflags_objc = [ "-Wimplicit-retain-self" ]
+ cflags_objcc = [ "-Wimplicit-retain-self" ]
}
- if (is_ios) {
- cflags_objc = [ "-Wimplicit-retain-self" ]
- cflags_objcc = cflags_objc
+ if (is_mac) {
+ cflags_objc += [ "-Wobjc-missing-property-synthesis" ]
+ cflags_objcc += [ "-Wobjc-missing-property-synthesis" ]
}
}
@@ -1736,8 +1769,8 @@ config("chromium_code") {
]
# TODO(thakis): Enable this more often, https://crbug.com/346399
- # use_libfuzzer: https://crbug.com/1063180
- if ((!is_nacl || is_nacl_saigo) && !use_libfuzzer) {
+ # use_fuzzing_engine_with_lpm: https://crbug.com/1063180
+ if ((!is_nacl || is_nacl_saigo) && !use_fuzzing_engine_with_lpm) {
cflags += [ "-Wunreachable-code-aggressive" ]
}
@@ -2052,7 +2085,7 @@ if (is_win) {
}
config("default_stack_frames") {
- if (is_posix || is_fuchsia) {
+ if (!is_win) {
if (enable_frame_pointers) {
cflags = [ "-fno-omit-frame-pointer" ]
@@ -2278,10 +2311,12 @@ if (is_clang && is_a_target_toolchain) {
if (is_android || is_castos) {
_clang_sample_profile = "//chrome/android/profiles/afdo.prof"
} else {
- assert(chromeos_afdo_platform == "atom" ||
- chromeos_afdo_platform == "bigcore" ||
- chromeos_afdo_platform == "arm",
- "Only atom, bigcore and arm are valid Chrome OS profiles.")
+ assert(
+ chromeos_afdo_platform == "atom" ||
+ chromeos_afdo_platform == "bigcore" ||
+ chromeos_afdo_platform == "arm" ||
+ chromeos_afdo_platform == "arm-exp",
+ "Only 'atom', 'bigcore', 'arm' and 'arm-exp' are valid ChromeOS profiles.")
_clang_sample_profile =
"//chromeos/profiles/${chromeos_afdo_platform}.afdo.prof"
}
@@ -2316,6 +2351,9 @@ config("afdo") {
rebased_clang_sample_profile =
rebase_path(_clang_sample_profile, root_build_dir)
cflags += [ "-fprofile-sample-use=${rebased_clang_sample_profile}" ]
+ if (use_profi) {
+ cflags += [ "-fsample-profile-use-profi" ]
+ }
inputs = [ _clang_sample_profile ]
}
} else if (auto_profile_path != "" && is_a_target_toolchain) {
@@ -2405,11 +2443,12 @@ config("symbols") {
# The gcc-based nacl compilers don't support -fdebug-compilation-dir (see
# elsewhere in this file), so they can't have build-dir-independent output.
+ # Moreover pnacl does not support newer flags such as -fdebug-prefix-map
# Disable symbols for nacl object files to get deterministic,
- # build-directory-independent output. pnacl and nacl-clang do support that
- # flag, so we can use use -g1 for pnacl and nacl-clang compiles.
- # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang.
- if ((!is_nacl || is_clang) && current_os != "zos") {
+ # build-directory-independent output.
+ # Keeping -g2 for saigo as it's the only toolchain whose artifacts that are
+ # part of chromium release (other nacl toolchains are used only for tests).
+ if ((!is_nacl || is_nacl_saigo) && current_os != "zos") {
cflags += [ "-g2" ]
}
@@ -2536,11 +2575,12 @@ config("minimal_symbols") {
# The gcc-based nacl compilers don't support -fdebug-compilation-dir (see
# elsewhere in this file), so they can't have build-dir-independent output.
+ # Moreover pnacl does not support newer flags such as -fdebug-prefix-map
# Disable symbols for nacl object files to get deterministic,
- # build-directory-independent output. pnacl and nacl-clang do support that
- # flag, so we can use use -g1 for pnacl and nacl-clang compiles.
- # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang.
- if (!is_nacl || is_clang) {
+ # build-directory-independent output.
+ # Keeping -g1 for saigo as it's the only toolchain whose artifacts that are
+ # part of chromium release (other nacl toolchains are used only for tests).
+ if (!is_nacl || is_nacl_saigo) {
cflags += [ "-g1" ]
}
@@ -2630,9 +2670,12 @@ if (is_chromeos_ash && is_chromeos_device) {
if (is_android || (is_chromeos_ash && is_chromeos_device)) {
# Use orderfile for linking Chrome on Android and Chrome OS.
# This config enables using an orderfile for linking in LLD.
- # TODO: Consider using call graph sort instead, at least on Android.
config("chrome_orderfile_config") {
- if (chrome_orderfile_path != "" && !enable_call_graph_profile_sort) {
+ # Don't try to use an orderfile with call graph sorting, except on Android,
+ # where we care about memory used by code, so we still want to mandate
+ # ordering.
+ if (chrome_orderfile_path != "" &&
+ (is_android || !enable_call_graph_profile_sort)) {
assert(use_lld)
_rebased_orderfile = rebase_path(chrome_orderfile_path, root_build_dir)
ldflags = [
@@ -2649,12 +2692,7 @@ if (is_android || (is_chromeos_ash && is_chromeos_device)) {
config("default_init_stack_vars") {
cflags = []
if (init_stack_vars && is_clang && !is_nacl && !using_sanitizer) {
- if (is_chromeos && !chromeos_is_browser_only) {
- # TODO(adriandole) remove chromeos_is_browser_only condition
- # once lacros updates toolchain
-
- # Zero init has favorable performance/size tradeoffs for Chrome OS
- # but was not evaluated for other platforms.
+ if (init_stack_vars_zero) {
cflags += [ "-ftrivial-auto-var-init=zero" ]
} else {
cflags += [ "-ftrivial-auto-var-init=pattern" ]
diff --git a/build/config/compiler/compiler.gni b/build/config/compiler/compiler.gni
index e0d44eed7..4738ee80d 100644
--- a/build/config/compiler/compiler.gni
+++ b/build/config/compiler/compiler.gni
@@ -8,6 +8,7 @@ import("//build/config/chromecast_build.gni")
import("//build/config/chromeos/args.gni")
import("//build/config/chromeos/ui_mode.gni")
import("//build/config/compiler/pgo/pgo.gni")
+import("//build/config/cronet/config.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/toolchain/cc_wrapper.gni")
import("//build/toolchain/goma.gni")
@@ -155,6 +156,12 @@ declare_args() {
use_custom_libcxx && !is_component_build
}
+# To try out this combination, delete this assert.
+assert(
+ !use_relative_vtables_abi || !is_cfi,
+ "is_cfi=true is known to conflict with use_relative_vtables_abi=true.\n" +
+ "See https://bugs.chromium.org/p/chromium/issues/detail?id=1375035#c53")
+
assert(!is_cfi || use_thin_lto, "CFI requires ThinLTO")
assert(!enable_profiling || !is_component_build,
"Cannot profile component builds (crbug.com/1199271).")
@@ -166,18 +173,18 @@ if (use_thin_lto && is_debug) {
# Determine whether to enable or disable frame pointers, based on the platform
# and build arguments.
-# TODO(crbug.com/1052397): Consider changing is_chromeos_ash to is_chromeos after
-# lacros-chrome switches to target_os="chromeos".
if (is_chromeos) {
# ChromeOS generally prefers frame pointers, to support CWP.
# However, Clang does not currently generate usable frame pointers in ARM
# 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them
# there to avoid the unnecessary overhead.
enable_frame_pointers = current_cpu != "arm"
-} else if (is_apple || is_linux || is_chromeos) {
+} else if (is_apple || is_linux) {
enable_frame_pointers = true
} else if (is_win) {
# 64-bit Windows ABI doesn't support frame pointers.
+ # NOTE: This setting is actually not used in the BUILD.gn for Windows,
+ # but it still reflects correctly that we don't emit frame pointers on x64.
if (current_cpu == "x64") {
enable_frame_pointers = false
} else {
diff --git a/build/config/compiler/pgo/BUILD.gn b/build/config/compiler/pgo/BUILD.gn
index 1ebe80c4e..1a4548e2e 100644
--- a/build/config/compiler/pgo/BUILD.gn
+++ b/build/config/compiler/pgo/BUILD.gn
@@ -51,12 +51,24 @@ config("pgo_optimization_flags") {
} else {
_pgo_target = "mac"
}
- } else if (is_linux || is_chromeos_lacros) {
+ } else if (is_linux) {
_pgo_target = "linux"
+ } else if (is_chromeos_lacros) {
+ if (target_cpu == "arm") {
+ _pgo_target = "lacros-arm"
+ } else {
+ _pgo_target = "lacros64"
+ }
} else if (is_android) {
# Temporarily use mac-arm profile until Android native PGO support works.
# TODO(crbug.com/1308749): fix this.
_pgo_target = "mac-arm"
+ } else if (is_fuchsia) {
+ if (target_cpu == "arm64") {
+ _pgo_target = "mac-arm"
+ } else {
+ _pgo_target = "mac"
+ }
}
if (_pgo_target == "win64") {
@@ -66,9 +78,13 @@ config("pgo_optimization_flags") {
} else if (_pgo_target == "mac-arm") {
inputs = [ "//chrome/build/mac-arm.pgo.txt" ]
} else if (_pgo_target == "mac") {
- inputs = [ "//chrome/build/mac-arm.pgo.txt" ]
+ inputs = [ "//chrome/build/mac.pgo.txt" ]
} else if (_pgo_target == "linux") {
inputs = [ "//chrome/build/linux.pgo.txt" ]
+ } else if (_pgo_target == "lacros64") {
+ inputs = [ "//chrome/build/lacros64.pgo.txt" ]
+ } else if (_pgo_target == "lacros-arm") {
+ inputs = [ "//chrome/build/lacros-arm.pgo.txt" ]
}
if (_pgo_target != "" && pgo_data_path == "") {
diff --git a/build/config/compiler/pgo/pgo.gni b/build/config/compiler/pgo/pgo.gni
index eac98c646..9e9a0c524 100644
--- a/build/config/compiler/pgo/pgo.gni
+++ b/build/config/compiler/pgo/pgo.gni
@@ -24,7 +24,7 @@ declare_args() {
# TODO(crbug.com/1336055): Update this now-outdated condition with regard
# to chromecast and determine whether chromeos_is_browser_only is
# obsolete.
- (is_high_end_android || is_win || is_mac ||
+ (is_high_end_android || is_win || is_mac || is_fuchsia ||
(is_linux && !is_castos && !chromeos_is_browser_only))) {
chrome_pgo_phase = 2
}
diff --git a/build/config/cronet/config.gni b/build/config/cronet/config.gni
new file mode 100644
index 000000000..1468ec17a
--- /dev/null
+++ b/build/config/cronet/config.gni
@@ -0,0 +1,10 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Control whether cronet is built (this is usually set by the script
+ # components/cronet/tools/cr_cronet.py as cronet requires specific
+ # gn args to build correctly).
+ is_cronet_build = false
+}
diff --git a/build/config/dcheck_always_on.gni b/build/config/dcheck_always_on.gni
index 5a214f5e5..cca3a547c 100644
--- a/build/config/dcheck_always_on.gni
+++ b/build/config/dcheck_always_on.gni
@@ -26,8 +26,11 @@ declare_args() {
}
declare_args() {
- # Set to false to disable EXPENSIVE_DCHECK()s.
- enable_expensive_dchecks = is_debug || dcheck_always_on
+ # Set to false to disable EXPENSIVE_DCHECK()s or to true to enable them in
+ # official builds. These are generally used for really useful DCHECKs that are
+ # too expensive to be enabled in user-facing official+DCHECK builds.
+ enable_expensive_dchecks =
+ is_debug || (dcheck_always_on && !is_official_build)
}
assert(!dcheck_is_configurable || (dcheck_always_on || is_debug),
diff --git a/build/config/features.gni b/build/config/features.gni
index 4cc9cb5e9..852ac56a8 100644
--- a/build/config/features.gni
+++ b/build/config/features.gni
@@ -36,6 +36,8 @@ declare_args() {
use_dbus = is_linux || is_chromeos
use_gio = is_linux && !is_castos
+
+ use_blink = !is_ios
}
#
# =============================================
diff --git a/build/config/fuchsia/BUILD.gn b/build/config/fuchsia/BUILD.gn
index 1becaa43b..bbcd70886 100644
--- a/build/config/fuchsia/BUILD.gn
+++ b/build/config/fuchsia/BUILD.gn
@@ -8,7 +8,7 @@ import("//build/config/fuchsia/generate_runner_scripts.gni")
import("//third_party/fuchsia-sdk/sdk/build/config/config.gni")
assert(is_fuchsia)
-assert(!is_posix)
+assert(!is_posix, "Fuchsia is not POSIX.")
config("compiler") {
configs = [ "//third_party/fuchsia-sdk/sdk/build/config:compiler" ]
@@ -30,7 +30,6 @@ group("deployment_resources") {
"//build/fuchsia/",
"//build/util/lib/",
"//third_party/fuchsia-sdk/sdk/.build-id/",
- "//third_party/fuchsia-sdk/sdk/bin/fpave.sh",
"//third_party/fuchsia-sdk/sdk/bin/fuchsia-common.sh",
"//third_party/fuchsia-sdk/sdk/meta/manifest.json",
"//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/ffx",
diff --git a/build/config/fuchsia/build_cmx_from_fragment.py b/build/config/fuchsia/build_cmx_from_fragment.py
deleted file mode 100644
index 190a260f1..000000000
--- a/build/config/fuchsia/build_cmx_from_fragment.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2020 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Creates a complete CMX (v1) component manifest, from a program name and
- manifest fragment file."""
-
-import argparse
-import json
-import sys
-
-
-def BuildCmxFromFragment(output_file, fragment_file, program_binary):
- """Reads a CMX fragment specifying e.g. features & sandbox, and a program
- binary's filename, and writes out the full CMX.
-
- output_file: Build-relative filename at which to write the full CMX.
- fragment_file: Build-relative filename of the CMX fragment to read from.
- program_binary: Package-relative filename of the program binary.
- """
-
- with open(output_file, 'w') as component_manifest_file:
- component_manifest = json.load(open(fragment_file, 'r'))
- component_manifest.update({
- 'program': {
- 'binary': program_binary
- },
- })
- json.dump(component_manifest, component_manifest_file)
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '--cmx-fragment',
- required=True,
- help='Path to the CMX fragment to read from')
- parser.add_argument(
- '--cmx', required=True, help='Path to write the complete CMX file to')
- parser.add_argument(
- '--program',
- required=True,
- help='Package-relative path to the program binary')
- args = parser.parse_args()
-
- return BuildCmxFromFragment(args.cmx, args.cmx_fragment, args.program)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/config/fuchsia/build_symbol_archive.py b/build/config/fuchsia/build_symbol_archive.py
index 3dcac945f..a595ed8a7 100755
--- a/build/config/fuchsia/build_symbol_archive.py
+++ b/build/config/fuchsia/build_symbol_archive.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
diff --git a/build/config/fuchsia/generate_runner_scripts.gni b/build/config/fuchsia/generate_runner_scripts.gni
index 66974f5d2..e7453092b 100644
--- a/build/config/fuchsia/generate_runner_scripts.gni
+++ b/build/config/fuchsia/generate_runner_scripts.gni
@@ -2,8 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-assert(is_fuchsia)
-
import("//build/config/chromecast_build.gni")
import("//build/config/fuchsia/config.gni")
import("//build/config/fuchsia/fuchsia_package_metadata.gni")
@@ -11,6 +9,8 @@ import("//build/config/gclient_args.gni")
import("//build/config/sysroot.gni")
import("//build/util/generate_wrapper.gni")
+assert(is_fuchsia)
+
declare_args() {
# Sets the Fuchsia Amber repository which will be used by default by the
# generated installation scripts. If not specified, then no default directory
@@ -34,12 +34,6 @@ declare_args() {
# This variable controls the browser included in the Telemetry based test
# targets.
fuchsia_browser_type = "web_engine_shell"
-
- # This variable controls which default version of the testing scripts to use.
- # Individual tests can override this via setting |legacy_script_required| to
- # true.
- # TODO(crbug/1280705): remove when all tests are migrated to CFv2.
- use_v2_script_default = true
}
# Generates a wrapper script under root_build_dir/bin that performs an
@@ -71,7 +65,6 @@ declare_args() {
# include_fuchsia_build_dir: If true, adds
# |default_fuchsia_build_dir_for_installation|
# to executable_args (when set in GN args).
-# use_v2_script: Whether to use the the ssh based v1 or ffx based v2 scripts.
template("fuchsia_run_script_with_packages") {
if (defined(invoker.package_name)) {
_pkg_shortname = invoker.package_name
@@ -121,25 +114,11 @@ template("fuchsia_run_script_with_packages") {
}
# Compute the list of full paths to package files, including dependencies.
- if (!invoker.use_v2_script) {
- _pkg_dir = "$root_out_dir/gen/" + get_label_info(invoker.package, "dir") +
- "/" + _pkg_shortname
- package_paths =
- [ rebase_path("$_pkg_dir/${_pkg_shortname}.far", root_build_dir) ]
- }
if (defined(invoker.package_deps)) {
foreach(package_dep, invoker.package_deps) {
package_dep_target = package_dep[0]
deps += [ package_dep_target ]
data_deps += [ package_dep_target ]
- if (!invoker.use_v2_script) {
- package_dep_name = package_dep[1]
- package_dep_path = rebase_path(
- get_label_info(package_dep_target, "target_gen_dir") + "/" +
- package_dep_name + "/" + package_dep_name + ".far",
- root_build_dir)
- package_paths += [ package_dep_path ]
- }
}
}
@@ -148,17 +127,6 @@ template("fuchsia_run_script_with_packages") {
executable_args = []
}
- if (!invoker.use_v2_script) {
- foreach(package_path, package_paths) {
- executable_args += [
- "--package",
- "@WrappedPath(${package_path})",
- "--package-name",
- _pkg_shortname,
- ]
- }
- }
-
if (defined(include_fuchsia_build_dir) && include_fuchsia_build_dir &&
default_fuchsia_build_dir_for_installation != "") {
executable_args += [
@@ -188,11 +156,6 @@ template("fuchsia_package_installer") {
} else {
pkg_shortname = get_label_info(invoker.package, "name")
}
- if (!defined(invoker.use_v2_script)) {
- use_v2_script = use_v2_script_default
- } else {
- use_v2_script = invoker.use_v2_script
- }
fuchsia_package_metadata(pkg_shortname) {
forward_variables_from(invoker,
TESTONLY_AND_VISIBILITY + [
@@ -205,16 +168,12 @@ template("fuchsia_package_installer") {
"*",
TESTONLY_AND_VISIBILITY + [ "executable_args" ])
forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
- if (use_v2_script) {
- executable = rebase_path("//build/fuchsia/test/deploy_to_fuchsia.py")
- executable_args = [
- "--out-dir",
- "@WrappedPath(.)",
- pkg_shortname,
- ]
- } else {
- executable = rebase_path("//build/fuchsia/deploy_to_pkg_repo.py")
- }
+ executable = rebase_path("//build/fuchsia/test/deploy_to_fuchsia.py")
+ executable_args = [
+ "--out-dir",
+ "@WrappedPath(.)",
+ pkg_shortname,
+ ]
output_name_format = "deploy_%package%"
include_fuchsia_build_dir = true
}
@@ -226,12 +185,6 @@ template("fuchsia_test_runner") {
_run_target = "${target_name}__runner"
_install_target = "${target_name}__installer"
- if (!defined(invoker.use_v2_script)) {
- use_v2_script = use_v2_script_default
- } else {
- use_v2_script = invoker.use_v2_script
- }
-
fuchsia_run_script_with_packages(_run_target) {
forward_variables_from(invoker,
TESTONLY_AND_VISIBILITY + [
@@ -242,11 +195,7 @@ template("fuchsia_test_runner") {
"package_deps",
])
- if (use_v2_script) {
- _test_runner_py = "//build/fuchsia/test/run_test.py"
- } else {
- _test_runner_py = "//build/fuchsia/test_runner.py"
- }
+ _test_runner_py = "//build/fuchsia/test/run_test.py"
executable = rebase_path(_test_runner_py)
@@ -262,31 +211,17 @@ template("fuchsia_test_runner") {
"@WrappedPath(.)",
]
- if (use_v2_script) {
- executable_args += [ package_name ]
- } else {
- executable_args += [
- "--target-cpu",
- target_cpu,
- ]
- }
+ executable_args += [ package_name ]
if (defined(invoker.use_test_server) && invoker.use_test_server) {
executable_args += [ "--enable-test-server" ]
}
if (default_fuchsia_device_node_name != "") {
- if (use_v2_script) {
- executable_args += [
- "--target-id",
- default_fuchsia_device_node_name,
- ]
- } else {
- executable_args += [
- "--node-name",
- default_fuchsia_device_node_name,
- ]
- }
+ executable_args += [
+ "--target-id",
+ default_fuchsia_device_node_name,
+ ]
}
# Declare the files that are needed for test execution on LUCI swarming
@@ -294,11 +229,10 @@ template("fuchsia_test_runner") {
if (!defined(data)) {
data = []
}
- data += [ _test_runner_py ]
-
- if (use_v2_script) {
- data += [ "$root_gen_dir/package_metadata/${invoker.package_name}.meta" ]
- }
+ data += [
+ _test_runner_py,
+ "$root_gen_dir/package_metadata/${invoker.package_name}.meta",
+ ]
# TODO(crbug.com/1256870): Remove this once all out-of-tree references
# to "package_name_override" are migrated to "package_name".
diff --git a/build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni b/build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni
index a08ed454a..f179a66d7 100644
--- a/build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni
+++ b/build/config/fuchsia/packaged_content_embedder_excluded_dirs.gni
@@ -4,6 +4,8 @@
import("//build/config/devtools.gni")
+assert(is_fuchsia)
+
# List of transitively included directories that should be stripped from
# released packages for size reasons. For use with the |excluded_dirs| variable
# of fuchsia_package().
diff --git a/build/config/fuchsia/size_optimized_cast_receiver_args.gn b/build/config/fuchsia/size_optimized_cast_receiver_args.gn
index b65d05d22..9a366c7bf 100644
--- a/build/config/fuchsia/size_optimized_cast_receiver_args.gn
+++ b/build/config/fuchsia/size_optimized_cast_receiver_args.gn
@@ -20,7 +20,9 @@
enable_printing = false
enable_cast_receiver = true
+cast_streaming_enable_remoting = true
enable_dav1d_decoder = false
+enable_v8_compile_hints = false
# //chrome makes many assumptions that Extensions are enabled.
# TODO(crbug.com/1363742): Fix theses assumptions or avoid building it.
diff --git a/build/config/fuchsia/sizes.gni b/build/config/fuchsia/sizes.gni
index 0e9f3f043..fc9767622 100644
--- a/build/config/fuchsia/sizes.gni
+++ b/build/config/fuchsia/sizes.gni
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+assert(is_fuchsia)
+
import("//build/util/generate_wrapper.gni")
template("compute_fuchsia_package_sizes") {
@@ -28,11 +30,14 @@ template("compute_fuchsia_package_sizes") {
# Declares the files that are needed for test execution on the
# swarming test client.
+ # TODO(crbug.com/1347172): Remove arm64 once the execution of fuchsia_sizes
+ # has been migrated to x64 machines.
data += [
"//build/fuchsia/",
"//tools/fuchsia/size_tests/",
"//third_party/fuchsia-sdk/sdk/arch/",
- "//third_party/fuchsia-sdk/sdk/tools/${target_cpu}/",
+ "//third_party/fuchsia-sdk/sdk/tools/arm64/",
+ "//third_party/fuchsia-sdk/sdk/tools/x64/",
]
executable_args = [
diff --git a/build/config/fuchsia/test/README.md b/build/config/fuchsia/test/README.md
index f2084d9c6..d21cdb79a 100644
--- a/build/config/fuchsia/test/README.md
+++ b/build/config/fuchsia/test/README.md
@@ -13,8 +13,11 @@ Runs an `archivist-without-attribution` with custom protocol routing for tests
that want to intercept events written to a `LogSink` by a component.
#### chromium_test_facet.shard.test-cml
-Runs CFv2 tests in the "chromium" test realm. This is generally required for all
-Chromium tests that must interact with true system services.
+Runs tests in the `chromium` test realm, which is mostly hermetic but has access
+to specific system services that cannot (currently) be faked. For more
+information, see https://fxbug.dev/91934. This is generally required for all
+Chromium tests not using the
+[`chromium_system_test_facet`](#chromium_system_test_facetshardtest-cml).
#### fonts.shard.test-cml
For tests that test fonts by providing `fuchsia.fonts.Provider`. This shard
@@ -29,16 +32,41 @@ Required by tests that execute JavaScript. Should only be required in a small
number of tests.
#### minimum.shard.test-cml
-Capabilities required by anything that uses `//base/test`, used as the base
-fragment for all test suites.
+Capabilities required by anything that uses `//base/test` when running in the
+(default) `chromium` test realm. It is the default base fragment for most
+`test()` Components.
-`config-data` is included in the features list so that the platform can offer
-ICU timezone data to these tests when they are being run. A more general
-approach is discussed in https://fxbug.dev/85845.
+The system-wide `config-data` directory capability is routed to tests running in
+the realm so that individual tests may route subdirectories as needed.
+TODO(crbug.com/1360077): Remove this after migrating to the new mechanism.
#### logger.shard.test-cml
For tests that test logging functionality by providing `fuchsia.logger.Log`.
+#### sysmem.shard.test-cml
+For tests that depend on the sysmem service (e.g. to allocate image buffers to
+share with Vulkan and Scenic).
+
+#### system_test_minimum.shard.test-cml
+Capabilities required by anything that uses `//base/test` when running as a
+system test in the `chromium-system` test realm. It is the base fragment for
+`test()` Components that use the
+[`chromium_system_test_facet`](#chromium_system_test_facetshardtest-cml).
+
+Most tests use the [`minimum`](#minimumshardtest-cml) shard.
+
+#### chromium_system_test_facet.shard.test-cml
+Runs tests in the `chromium-system` test realm. This is required for Chromium
+tests that are intended to run against the actual system and its real system
+services. This is required for, for example, performance tests intended to
+measure system performance. Another overlapping use case is tests that need to
+be run in environments without access to the packages containing fake
+implementations of required protocols that other tests use.
+(https://crbug.com/1408597 should make that use case obsolete.)
+
+Most tests should use the
+[`chromium_test_facet`](#chromium_test_facetshardtest-cml).
+
#### test_ui_stack.shard.test-cml
For tests that need an isolated UI subsystem, that supports the Flatland
API set. This allows tests to e.g. run with view-focus unaffected by any
@@ -82,6 +110,3 @@ Most services are required per the FIDL documentation.
Contains services that need to be present when creating a `fuchsia.web.Context`.
Note that the `fuchsia.scheduler.ProfileProvider` service is only used in tests
that encounter memory pressure code.
-
-#### web_instance_host.shard.test-cml
-Contains services that need to be present to use `WebInstanceHost`.
diff --git a/build/config/fuchsia/test/chromium_system_test_facet.shard.test-cml b/build/config/fuchsia/test/chromium_system_test_facet.shard.test-cml
new file mode 100644
index 000000000..cdf9ca7a0
--- /dev/null
+++ b/build/config/fuchsia/test/chromium_system_test_facet.shard.test-cml
@@ -0,0 +1,8 @@
+// Copyright 2023 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+{
+ facets: {
+ "fuchsia.test": { type: "chromium-system" },
+ },
+}
diff --git a/build/config/fuchsia/test/context_provider.shard.test-cml b/build/config/fuchsia/test/context_provider.shard.test-cml
index ddd96e2ad..e5db2f1f6 100644
--- a/build/config/fuchsia/test/context_provider.shard.test-cml
+++ b/build/config/fuchsia/test/context_provider.shard.test-cml
@@ -22,8 +22,6 @@
protocol: [
"fuchsia.feedback.ComponentDataRegister",
"fuchsia.feedback.CrashReportingProductRegister",
- "fuchsia.sys.Environment",
- "fuchsia.sys.Loader",
],
from: "parent",
to: "#context_provider",
diff --git a/build/config/fuchsia/test/fonts.shard.test-cml b/build/config/fuchsia/test/fonts.shard.test-cml
index 0ae7800fb..80fb0cae1 100644
--- a/build/config/fuchsia/test/fonts.shard.test-cml
+++ b/build/config/fuchsia/test/fonts.shard.test-cml
@@ -8,28 +8,31 @@
url: "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cm",
},
],
+ use: [
+ {
+ protocol: "fuchsia.fonts.Provider",
+ from: "#isolated_font_provider",
+ },
+ ],
offer: [
{
directory: "config-data",
- subdir: "fonts",
from: "parent",
to: "#isolated_font_provider",
+ subdir: "fonts",
},
{
- protocol: "fuchsia.logger.LogSink",
+ protocol: [
+ "fuchsia.logger.LogSink",
+ "fuchsia.tracing.provider.Registry",
+ ],
from: "parent",
to: "#isolated_font_provider",
},
],
- use: [
- {
- protocol: "fuchsia.fonts.Provider",
- from: "#isolated_font_provider",
- },
- ],
facets: {
"fuchsia.test": {
- "deprecated-allowed-packages": [ "fonts" ],
+ "deprecated-allowed-packages": [ "fonts" ],
},
},
}
diff --git a/build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml b/build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml
index 5b0d712bb..2e51f033f 100644
--- a/build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml
+++ b/build/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml
@@ -3,9 +3,12 @@
// found in the LICENSE file.
// Used in tests which are hard-coded for the Scenic/GFX API-set.
-// Use test_ui_stack.shard.test-cml when tetsing for Flatland, or when the
+// Use test_ui_stack.shard.test-cml when testing for Flatland, or when the
// choice of API-set is not important.
{
+ include: [
+ "//build/config/fuchsia/test/sysmem.shard.test-cml",
+ ],
children: [
{
name: "test_ui_stack",
diff --git a/build/config/fuchsia/test/minimum.shard.test-cml b/build/config/fuchsia/test/minimum.shard.test-cml
index 3f3784ab3..17b49278a 100644
--- a/build/config/fuchsia/test/minimum.shard.test-cml
+++ b/build/config/fuchsia/test/minimum.shard.test-cml
@@ -9,7 +9,7 @@
children: [
{
name: "build-info-service",
- url: "fuchsia-pkg://fuchsia.com/build-info-service#meta/build-info.cm",
+ url: "fuchsia-pkg://fuchsia.com/fake-build-info#meta/fake_build_info.cm",
},
{
name: "intl_property_manager",
@@ -18,14 +18,9 @@
],
offer: [
{
- directory: "build-info",
- from: "parent",
- to: "#build-info-service",
- },
- {
protocol: "fuchsia.logger.LogSink",
from: "parent",
- to: [ "#build-info-service", "#intl_property_manager" ],
+ to: [ "#intl_property_manager" ],
}
],
use: [
@@ -40,7 +35,6 @@
},
{
storage: "custom_artifacts",
- rights: [ "rw*" ],
path: "/custom_artifacts",
},
{
@@ -64,14 +58,19 @@
"fuchsia.hwinfo.Product",
"fuchsia.media.ProfileProvider",
"fuchsia.process.Launcher",
- "fuchsia.sys.Loader",
],
},
+ {
+ protocol: [
+ "fuchsia.tracing.perfetto.ProducerConnector",
+ ],
+ availability: "optional",
+ },
],
facets: {
"fuchsia.test": {
"deprecated-allowed-packages": [
- "build-info-service",
+ "fake-build-info",
"intl_property_manager",
],
},
diff --git a/build/config/fuchsia/test/platform_video_codecs.shard.test-cml b/build/config/fuchsia/test/platform_video_codecs.shard.test-cml
index 4820ecf89..13b5a1b79 100644
--- a/build/config/fuchsia/test/platform_video_codecs.shard.test-cml
+++ b/build/config/fuchsia/test/platform_video_codecs.shard.test-cml
@@ -2,6 +2,9 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
{
+ include: [
+ "//build/config/fuchsia/test/sysmem.shard.test-cml",
+ ],
children: [
{
// Run an isolated instance of codec_factory so that tests can run on
diff --git a/build/config/fuchsia/test/cfv1_launcher.shard.test-cml b/build/config/fuchsia/test/sysmem.shard.test-cml
index 19447300a..8bebd998b 100644
--- a/build/config/fuchsia/test/cfv1_launcher.shard.test-cml
+++ b/build/config/fuchsia/test/sysmem.shard.test-cml
@@ -4,7 +4,7 @@
{
use: [
{
- protocol: "fuchsia.sys.Launcher",
+ protocol: "fuchsia.sysmem.Allocator",
},
],
}
diff --git a/build/config/fuchsia/test/system_test_minimum.shard.test-cml b/build/config/fuchsia/test/system_test_minimum.shard.test-cml
new file mode 100644
index 000000000..6efde20f7
--- /dev/null
+++ b/build/config/fuchsia/test/system_test_minimum.shard.test-cml
@@ -0,0 +1,46 @@
+// Copyright 2022 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+{
+ include: [
+ "syslog/client.shard.cml",
+ ],
+ use: [
+ {
+ directory: "config-data",
+ rights: [ "r*" ],
+ path: "/config/data",
+ },
+ {
+ storage: "cache",
+ path: "/cache",
+ },
+ {
+ storage: "custom_artifacts",
+ path: "/custom_artifacts",
+ },
+ {
+ storage: "data",
+ path: "/data",
+ },
+ {
+ storage: "tmp",
+ path: "/tmp",
+ },
+ {
+ protocol: [
+ "fuchsia.buildinfo.Provider",
+ "fuchsia.hwinfo.Product",
+ "fuchsia.intl.PropertyProvider",
+ "fuchsia.media.ProfileProvider",
+ "fuchsia.process.Launcher",
+ ],
+ },
+ {
+ protocol: [
+ "fuchsia.tracing.perfetto.ProducerConnector",
+ ],
+ availability: "optional",
+ },
+ ],
+}
diff --git a/build/config/fuchsia/test/test_ui_stack.shard.test-cml b/build/config/fuchsia/test/test_ui_stack.shard.test-cml
index 33f59f792..102867cf1 100644
--- a/build/config/fuchsia/test/test_ui_stack.shard.test-cml
+++ b/build/config/fuchsia/test/test_ui_stack.shard.test-cml
@@ -2,25 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
{
+ include: [ "//build/config/fuchsia/test/sysmem.shard.test-cml" ],
children: [
{
name: "test_ui_stack",
url: "fuchsia-pkg://fuchsia.com/flatland-scene-manager-test-ui-stack#meta/test-ui-stack.cm",
},
],
- offer: [
- {
- protocol: [
- "fuchsia.logger.LogSink",
- "fuchsia.scheduler.ProfileProvider",
- "fuchsia.sysmem.Allocator",
- "fuchsia.tracing.provider.Registry",
- "fuchsia.vulkan.loader.Loader",
- ],
- from: "parent",
- to: "#test_ui_stack",
- },
- ],
use: [
{
protocol: [
@@ -34,9 +22,27 @@
from: "#test_ui_stack",
},
],
+ offer: [
+ {
+ storage: "tmp",
+ from: "parent",
+ to: "#test_ui_stack",
+ },
+ {
+ protocol: [
+ "fuchsia.logger.LogSink",
+ "fuchsia.scheduler.ProfileProvider",
+ "fuchsia.sysmem.Allocator",
+ "fuchsia.tracing.provider.Registry",
+ "fuchsia.vulkan.loader.Loader",
+ ],
+ from: "parent",
+ to: "#test_ui_stack",
+ },
+ ],
facets: {
"fuchsia.test": {
- "deprecated-allowed-packages": [ "flatland-scene-manager-test-ui-stack" ],
+ "deprecated-allowed-packages": [ "flatland-scene-manager-test-ui-stack" ],
},
},
}
diff --git a/build/config/gclient_args.gni b/build/config/gclient_args.gni
index 5ce6741bd..7bb05b5a8 100644
--- a/build/config/gclient_args.gni
+++ b/build/config/gclient_args.gni
@@ -3,7 +3,6 @@ build_with_chromium = true
checkout_android = true
checkout_android_prebuilts_build_tools = false
checkout_android_native_support = true
-checkout_google_benchmark = false
checkout_ios_webkit = false
checkout_nacl = true
checkout_openxr = false
diff --git a/build/config/get_host_byteorder.py b/build/config/get_host_byteorder.py
index 20834a35a..7cc0cdff8 100755
--- a/build/config/get_host_byteorder.py
+++ b/build/config/get_host_byteorder.py
@@ -1,11 +1,10 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2017 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Get Byteorder of host architecture"""
-from __future__ import print_function
import sys
diff --git a/build/config/ios/BUILD.gn b/build/config/ios/BUILD.gn
index f0b85cc65..863d1d0ec 100644
--- a/build/config/ios/BUILD.gn
+++ b/build/config/ios/BUILD.gn
@@ -208,11 +208,6 @@ config("ios_shared_library_flags") {
]
}
-config("disable_implicit_retain_self_warning") {
- cflags_objc = [ "-Wno-implicit-retain-self" ]
- cflags_objcc = cflags_objc
-}
-
config("xctest_config") {
# Add some directories to the system framework search path to make
# them available to the compiler while silencing warnings in the
diff --git a/build/config/ios/asset_catalog.gni b/build/config/ios/asset_catalog.gni
index 79ffdb7e9..8695bf7f9 100644
--- a/build/config/ios/asset_catalog.gni
+++ b/build/config/ios/asset_catalog.gni
@@ -48,69 +48,63 @@ template("asset_catalog") {
assert(defined(invoker.asset_type) && invoker.asset_type != "",
"asset_type must be defined and not empty for $target_name")
- if (is_fat_secondary_toolchain) {
- group(target_name) {
- public_deps = [ ":$target_name($primary_fat_toolchain_name)" ]
- }
- } else {
- _copy_target_name = target_name + "__copy"
- _data_target_name = target_name
+ _copy_target_name = target_name + "__copy"
+ _data_target_name = target_name
- _sources = invoker.sources
- _outputs = []
+ _sources = invoker.sources
+ _outputs = []
- # The compilation of resources into Assets.car is enabled automatically
- # by the "create_bundle" target if any of the "bundle_data" sources's
- # path is in a .xcassets directory and matches one of the know asset
- # catalog type.
- _xcassets_dir = "$target_gen_dir/${target_name}.xcassets"
- _output_dir = "$_xcassets_dir/" +
- get_path_info(get_path_info(_sources[0], "dir"), "file")
+ # The compilation of resources into Assets.car is enabled automatically
+ # by the "create_bundle" target if any of the "bundle_data" sources's
+ # path is in a .xcassets directory and matches one of the know asset
+ # catalog type.
+ _xcassets_dir = "$target_gen_dir/${target_name}.xcassets"
+ _output_dir = "$_xcassets_dir/" +
+ get_path_info(get_path_info(_sources[0], "dir"), "file")
- foreach(_source, invoker.sources) {
- _dir = get_path_info(_source, "dir")
- _outputs += [ "$_output_dir/" + get_path_info(_source, "file") ]
+ foreach(_source, invoker.sources) {
+ _dir = get_path_info(_source, "dir")
+ _outputs += [ "$_output_dir/" + get_path_info(_source, "file") ]
- assert(get_path_info(_dir, "extension") == invoker.asset_type,
- "$_source dirname must have .${invoker.asset_type} extension")
- }
+ assert(get_path_info(_dir, "extension") == invoker.asset_type,
+ "$_source dirname must have .${invoker.asset_type} extension")
+ }
- action(_copy_target_name) {
- # Forward "deps", "public_deps" and "testonly" in case some of the
- # source files are generated.
- forward_variables_from(invoker,
- [
- "deps",
- "public_deps",
- "testonly",
- ])
+ action(_copy_target_name) {
+ # Forward "deps", "public_deps" and "testonly" in case some of the
+ # source files are generated.
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
- script = "//build/config/ios/hardlink.py"
+ script = "//build/config/ios/hardlink.py"
- visibility = [ ":$_data_target_name" ]
- sources = _sources
- outputs = _outputs + [ _xcassets_dir ]
+ visibility = [ ":$_data_target_name" ]
+ sources = _sources
+ outputs = _outputs + [ _xcassets_dir ]
- args = [
- rebase_path(get_path_info(_sources[0], "dir"), root_build_dir),
- rebase_path(_output_dir, root_build_dir),
- ]
- }
+ args = [
+ rebase_path(get_path_info(_sources[0], "dir"), root_build_dir),
+ rebase_path(_output_dir, root_build_dir),
+ ]
+ }
- bundle_data(_data_target_name) {
- forward_variables_from(invoker,
- "*",
- [
- "deps",
- "outputs",
- "public_deps",
- "sources",
- ])
+ bundle_data(_data_target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "deps",
+ "outputs",
+ "public_deps",
+ "sources",
+ ])
- sources = _outputs
- outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
- public_deps = [ ":$_copy_target_name" ]
- }
+ sources = _outputs
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ public_deps = [ ":$_copy_target_name" ]
}
}
diff --git a/build/config/ios/bundle_data_from_filelist.gni b/build/config/ios/bundle_data_from_filelist.gni
new file mode 100644
index 000000000..763dc8673
--- /dev/null
+++ b/build/config/ios/bundle_data_from_filelist.gni
@@ -0,0 +1,24 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(current_os == "ios")
+
+template("bundle_data_from_filelist") {
+ assert(defined(invoker.filelist_name), "Requires setting filelist_name")
+
+ _filelist_content = read_file(invoker.filelist_name, "list lines")
+ bundle_data(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "filelist_name",
+ "sources",
+ ])
+ sources = filter_exclude(_filelist_content, [ "#*" ])
+ if (!defined(outputs)) {
+ outputs = [ "{{bundle_resources_dir}}/" +
+ "{{source_root_relative_dir}}/{{source_file_part}}" ]
+ }
+ }
+}
diff --git a/build/config/ios/codesign.py b/build/config/ios/codesign.py
index a475302e8..fd96f312d 100644
--- a/build/config/ios/codesign.py
+++ b/build/config/ios/codesign.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import argparse
import codecs
diff --git a/build/config/ios/compile_ib_files.py b/build/config/ios/compile_ib_files.py
index 3dd1bb26f..e42001601 100644
--- a/build/config/ios/compile_ib_files.py
+++ b/build/config/ios/compile_ib_files.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import argparse
import logging
diff --git a/build/config/ios/config.gni b/build/config/ios/config.gni
index 74d064532..c5c10c3f7 100644
--- a/build/config/ios/config.gni
+++ b/build/config/ios/config.gni
@@ -9,11 +9,6 @@ declare_args() {
# default is only there for compatibility reasons and will be removed (see
# crbug.com/1138425 for more details).
target_environment = ""
-
- # Control whether cronet is built (this is usually set by the script
- # components/cronet/tools/cr_cronet.py as cronet requires specific
- # gn args to build correctly).
- is_cronet_build = false
}
if (target_environment == "") {
diff --git a/build/config/ios/find_signing_identity.py b/build/config/ios/find_signing_identity.py
index ad7983451..37b3284e9 100644
--- a/build/config/ios/find_signing_identity.py
+++ b/build/config/ios/find_signing_identity.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import argparse
import os
diff --git a/build/config/ios/ios_sdk.gni b/build/config/ios/ios_sdk.gni
index af498ced8..141746968 100644
--- a/build/config/ios/ios_sdk.gni
+++ b/build/config/ios/ios_sdk.gni
@@ -52,38 +52,10 @@ declare_args() {
# identity description and app bundle id prefix.
ios_mobileprovision_files = []
- # If non-empty, this list must contain valid cpu architecture, and the final
- # build will be a multi-architecture build (aka fat build) supporting the
- # main $target_cpu architecture and all of $additional_target_cpus.
- #
- # For example to build an application that will run on both arm64 and armv7
- # devices, you would use the following in args.gn file when running "gn args":
- #
- # target_os = "ios"
- # target_cpu = "arm64"
- # additional_target_cpus = [ "arm" ]
- #
- # You can also pass the value via "--args" parameter for "gn gen" command by
- # using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'.
- additional_target_cpus = []
-
# Set to true if all test apps should use the same bundle id.
ios_use_shared_bundle_id_for_test_apps = true
}
-declare_args() {
- # This variable is set by the toolchain. It is set to true if the toolchain
- # is a secondary toolchain as part of a "fat" build.
- is_fat_secondary_toolchain = false
-
- # This variable is set by the toolchain. It is the name of the primary
- # toolchain for the fat build (could be current_toolchain).
- primary_fat_toolchain_name = ""
-}
-
-assert(custom_toolchain == "" || additional_target_cpus == [],
- "cannot define both custom_toolchain and additional_target_cpus")
-
# If codesigning is enabled, use must configure either a codesigning identity
# or a filter to automatically select the codesigning identity.
if (target_environment == "device" && ios_enable_code_signing) {
@@ -96,25 +68,6 @@ if (target_environment == "device" && ios_enable_code_signing) {
"pattern to match the identity to use).")
}
-# Initialize additional_toolchains from additional_target_cpus. Assert here
-# that the list does not contains $target_cpu nor duplicates as this would
-# cause weird errors during the build.
-additional_toolchains = []
-if (additional_target_cpus != [] && !is_fat_secondary_toolchain) {
- foreach(_additional_target_cpu, additional_target_cpus) {
- assert(_additional_target_cpu != target_cpu,
- "target_cpu must not be listed in additional_target_cpus")
-
- _toolchain = "${current_toolchain}_fat_${_additional_target_cpu}"
- foreach(_additional_toolchain, additional_toolchains) {
- assert(_toolchain != _additional_toolchain,
- "additional_target_cpus must not contains duplicate values")
- }
-
- additional_toolchains += [ _toolchain ]
- }
-}
-
if (ios_sdk_path == "") {
# Compute default target.
if (target_environment == "simulator") {
diff --git a/build/config/ios/ios_sdk_overrides.gni b/build/config/ios/ios_sdk_overrides.gni
index 03178ee93..a2373c6c9 100644
--- a/build/config/ios/ios_sdk_overrides.gni
+++ b/build/config/ios/ios_sdk_overrides.gni
@@ -7,11 +7,11 @@
declare_args() {
# Version of iOS that we're targeting.
- ios_deployment_target = "14.0"
+ ios_deployment_target = "15.0"
}
# Always assert that ios_deployment_target is used on non-iOS platforms to
# prevent unused args warnings.
if (!is_ios) {
- assert(ios_deployment_target == "14.0" || true)
+ assert(ios_deployment_target == "15.0" || true)
}
diff --git a/build/config/ios/ios_test_runner_wrapper.gni b/build/config/ios/ios_test_runner_wrapper.gni
index 25108c5dd..378323c4f 100644
--- a/build/config/ios/ios_test_runner_wrapper.gni
+++ b/build/config/ios/ios_test_runner_wrapper.gni
@@ -131,11 +131,7 @@ template("ios_test_runner_wrapper") {
_wrapper_output_name = wrapper_output_name
}
- # Test targets may attempt to generate multiple wrappers for a suite with
- # multiple different toolchains when running with additional_target_cpus.
- # Generate the wrapper script into root_out_dir rather than root_build_dir
- # to ensure those wrappers are distinct.
- wrapper_script = "${root_out_dir}/bin/${_wrapper_output_name}"
+ wrapper_script = "${root_build_dir}/bin/${_wrapper_output_name}"
data = []
if (defined(invoker.data)) {
diff --git a/build/config/ios/rules.gni b/build/config/ios/rules.gni
index c41850715..2b9b6d2a5 100644
--- a/build/config/ios/rules.gni
+++ b/build/config/ios/rules.gni
@@ -18,116 +18,6 @@ _ios_xcode_appex_bundle_id = "com.apple.product-type.app-extension"
_ios_xcode_xctest_bundle_id = "com.apple.product-type.bundle.unit-test"
_ios_xcode_xcuitest_bundle_id = "com.apple.product-type.bundle.ui-testing"
-# Invokes lipo on multiple arch-specific binaries to create a fat binary.
-#
-# Arguments
-#
-# arch_binary_target
-# name of the target generating the arch-specific binaries, they must
-# be named $target_out_dir/$toolchain_cpu/$arch_binary_output.
-#
-# arch_binary_output
-# (optional, defaults to the name of $arch_binary_target) base name of
-# the arch-specific binary generated by arch_binary_target.
-#
-# output_name
-# (optional, defaults to $target_name) base name of the target output,
-# the full path will be $target_out_dir/$output_name.
-#
-# configs
-# (optional) a list of configurations, this is used to check whether
-# the binary should be stripped, when "enable_stripping" is true.
-#
-template("lipo_binary") {
- assert(defined(invoker.arch_binary_target),
- "arch_binary_target must be defined for $target_name")
-
- assert(!is_fat_secondary_toolchain,
- "lipo_binary can only be used in the primary toolchain of a fat build")
-
- _target_name = target_name
- _output_name = target_name
- if (defined(invoker.output_name)) {
- _output_name = invoker.output_name
- }
-
- _all_target_cpu = [ current_cpu ] + additional_target_cpus
- _all_toolchains = [ current_toolchain ] + additional_toolchains
-
- _arch_binary_target = invoker.arch_binary_target
- _arch_binary_output = get_label_info(_arch_binary_target, "name")
- if (defined(invoker.arch_binary_output)) {
- _arch_binary_output = invoker.arch_binary_output
- }
-
- action(_target_name) {
- forward_variables_from(invoker,
- "*",
- [
- "arch_binary_output",
- "arch_binary_target",
- "configs",
- "output_name",
- ])
-
- script = "//build/toolchain/apple/linker_driver.py"
-
- # http://crbug.com/762840. Fix for bots running out of memory.
- pool = "//build/toolchain:link_pool($default_toolchain)"
-
- outputs = [ "$target_out_dir/$_output_name" ]
-
- deps = []
- _index = 0
- inputs = []
- foreach(_cpu, _all_target_cpu) {
- _toolchain = _all_toolchains[_index]
- _index = _index + 1
-
- inputs +=
- [ get_label_info("$_arch_binary_target($_toolchain)",
- "target_out_dir") + "/$_cpu/$_arch_binary_output" ]
-
- deps += [ "$_arch_binary_target($_toolchain)" ]
- }
-
- args = [
- "xcrun",
- "lipo",
- "-create",
- "-output",
- rebase_path("$target_out_dir/$_output_name", root_build_dir),
- ] + rebase_path(inputs, root_build_dir)
-
- if (enable_dsyms) {
- _dsyms_output_dir = "$root_out_dir/$_output_name.dSYM"
- outputs += [
- "$_dsyms_output_dir/",
- "$_dsyms_output_dir/Contents/Info.plist",
- "$_dsyms_output_dir/Contents/Resources/DWARF/$_output_name",
- ]
- args += [
- "-Wcrl,dsym," + rebase_path("$root_out_dir/.", root_build_dir),
- "-Wcrl,dsymutilpath," +
- rebase_path("//tools/clang/dsymutil/bin/dsymutil", root_build_dir),
- ]
- }
-
- if (enable_stripping) {
- args += [ "-Wcrl,strip,-x,-S" ]
- if (save_unstripped_output) {
- outputs += [ "$root_out_dir/$_output_name.unstripped" ]
- args += [ "-Wcrl,unstripped," +
- rebase_path("$root_out_dir/.", root_build_dir) ]
- }
- }
- if (!is_debug && use_thin_lto && is_a_target_toolchain) {
- args += [ "-Wcrl,clean_objects," +
- string_join(",", rebase_path(inputs, root_build_dir)) ]
- }
- }
-}
-
# Wrapper around create_bundle taking care of code signature settings.
#
# Arguments
@@ -611,11 +501,6 @@ template("ios_app_bundle") {
_output_name = invoker.output_name
}
- _primary_toolchain = current_toolchain
- if (is_fat_secondary_toolchain) {
- _primary_toolchain = primary_fat_toolchain_name
- }
-
assert(
!defined(invoker.bundle_extension),
"bundle_extension must not be set for ios_app_bundle template for $target_name")
@@ -631,10 +516,6 @@ template("ios_app_bundle") {
"$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-")
}
- _arch_executable_source = _target_name + "_arch_executable_sources"
- _arch_executable_target = _target_name + "_arch_executable"
- _lipo_executable_target = _target_name + "_executable"
-
if (defined(invoker.variants) && invoker.variants != []) {
_variants = []
@@ -671,35 +552,11 @@ template("ios_app_bundle") {
_default_variant = _variants[0]
- source_set(_arch_executable_source) {
- forward_variables_from(invoker,
- "*",
- [
- "bundle_deps",
- "bundle_deps_filter",
- "bundle_extension",
- "enable_code_signing",
- "entitlements_path",
- "entitlements_target",
- "extra_substitutions",
- "extra_system_frameworks",
- "info_plist",
- "info_plist_target",
- "output_name",
- "product_type",
- "visibility",
- "xcode_extra_attributes",
- ])
-
- visibility = [ ":$_arch_executable_target" ]
- }
-
- if (!is_fat_secondary_toolchain || target_environment == "simulator") {
- _generate_entitlements_target = _target_name + "_gen_entitlements"
- _generate_entitlements_output =
- get_label_info(":$_generate_entitlements_target($_primary_toolchain)",
- "target_out_dir") + "/$_output_name.xcent"
- }
+ _executable_target = _target_name + "_executable"
+ _generate_entitlements_target = _target_name + "_gen_entitlements"
+ _generate_entitlements_output =
+ get_label_info(":$_generate_entitlements_target", "target_out_dir") +
+ "/$_output_name.xcent"
_product_type = _ios_xcode_app_bundle_id
if (defined(invoker.product_type)) {
@@ -716,7 +573,7 @@ template("ios_app_bundle") {
_is_app_bundle = _product_type == _ios_xcode_app_bundle_id
- executable(_arch_executable_target) {
+ executable(_executable_target) {
forward_variables_from(invoker,
"*",
[
@@ -732,20 +589,14 @@ template("ios_app_bundle") {
"info_plist_target",
"output_name",
"product_type",
- "sources",
"visibility",
"xcode_extra_attributes",
])
- visibility = [ ":$_lipo_executable_target($_primary_toolchain)" ]
- if (is_fat_secondary_toolchain) {
- visibility += [ ":$_target_name" ]
- }
-
- if (!defined(deps)) {
- deps = []
+ visibility = []
+ foreach(_variant, _variants) {
+ visibility += [ ":${_variant.target_name}" ]
}
- deps += [ ":$_arch_executable_source" ]
if (!defined(frameworks)) {
frameworks = []
@@ -753,7 +604,10 @@ template("ios_app_bundle") {
frameworks += [ "UIKit.framework" ]
if (target_environment == "simulator") {
- deps += [ ":$_generate_entitlements_target($_primary_toolchain)" ]
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_generate_entitlements_target" ]
if (!defined(inputs)) {
inputs = []
@@ -769,207 +623,171 @@ template("ios_app_bundle") {
output_name = _output_name
output_prefix_override = true
- output_dir = "$target_out_dir/$current_cpu"
+ output_dir = target_out_dir
}
- if (is_fat_secondary_toolchain) {
- # For fat builds, only the default toolchain will generate an application
- # bundle. For the other toolchains, the template is only used for building
- # the arch-specific binary, thus the default target is just a group().
+ _generate_info_plist = target_name + "_generate_info_plist"
+ ios_info_plist(_generate_info_plist) {
+ forward_variables_from(invoker,
+ [
+ "info_plist",
+ "info_plist_target",
+ ])
- group(_target_name) {
- forward_variables_from(invoker,
- [
- "visibility",
- "testonly",
- ])
- public_deps = [ ":$_arch_executable_target" ]
+ executable_name = _output_name
+
+ extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ]
+ if (defined(invoker.extra_substitutions)) {
+ extra_substitutions += invoker.extra_substitutions
+ }
+ }
+
+ if (!defined(invoker.entitlements_target)) {
+ _entitlements_path = "//build/config/ios/entitlements.plist"
+ if (defined(invoker.entitlements_path)) {
+ _entitlements_path = invoker.entitlements_path
}
} else {
- lipo_binary(_lipo_executable_target) {
- forward_variables_from(invoker,
- [
- "configs",
- "testonly",
- ])
+ assert(!defined(invoker.entitlements_path),
+ "Cannot define both entitlements_path and entitlements_target" +
+ "for $_target_name")
- visibility = []
- foreach(_variant, _variants) {
- visibility += [ ":${_variant.target_name}" ]
- }
+ _entitlements_target_outputs =
+ get_target_outputs(invoker.entitlements_target)
+ _entitlements_path = _entitlements_target_outputs[0]
+ }
- output_name = _output_name
- arch_binary_target = ":$_arch_executable_target"
- arch_binary_output = _output_name
- }
+ action(_generate_entitlements_target) {
+ _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist")
+ _info_plist_path = _gen_info_plist_outputs[0]
- _generate_info_plist = target_name + "_generate_info_plist"
- ios_info_plist(_generate_info_plist) {
- forward_variables_from(invoker,
- [
- "info_plist",
- "info_plist_target",
- ])
+ script = "//build/config/ios/codesign.py"
+ deps = [ ":$_generate_info_plist" ]
+ if (defined(invoker.entitlements_target)) {
+ deps += [ invoker.entitlements_target ]
+ }
+ sources = [
+ _entitlements_path,
+ _info_plist_path,
+ ]
+ sources += ios_mobileprovision_files
- executable_name = _output_name
+ outputs = [ _generate_entitlements_output ]
- extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ]
- if (defined(invoker.extra_substitutions)) {
- extra_substitutions += invoker.extra_substitutions
- }
+ args = [
+ "generate-entitlements",
+ "-e=" + rebase_path(_entitlements_path, root_build_dir),
+ "-p=" + rebase_path(_info_plist_path, root_build_dir),
+ ]
+ foreach(mobileprovision, ios_mobileprovision_files) {
+ args += [ "-m=" + rebase_path(mobileprovision, root_build_dir) ]
}
+ args += rebase_path(outputs, root_build_dir)
+ }
- if (!is_fat_secondary_toolchain) {
- if (!defined(invoker.entitlements_target)) {
- _entitlements_path = "//build/config/ios/entitlements.plist"
- if (defined(invoker.entitlements_path)) {
- _entitlements_path = invoker.entitlements_path
- }
- } else {
- assert(!defined(invoker.entitlements_path),
- "Cannot define both entitlements_path and entitlements_target" +
- "for $_target_name")
+ # Only write PkgInfo for real application, not application extension.
+ if (_is_app_bundle) {
+ _create_pkg_info = target_name + "_pkg_info"
+ action(_create_pkg_info) {
+ forward_variables_from(invoker, [ "testonly" ])
+ script = "//build/apple/write_pkg_info.py"
+ inputs = [ "//build/apple/plist_util.py" ]
+ sources = get_target_outputs(":$_generate_info_plist")
+ outputs = [
+ # Cannot name the output PkgInfo as the name will not be unique if
+ # multiple ios_app_bundle are defined in the same BUILD.gn file. The
+ # file is renamed in the bundle_data outputs to the correct name.
+ "$target_gen_dir/$target_name",
+ ]
+ args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
+ [ "--output" ] + rebase_path(outputs, root_build_dir)
+ deps = [ ":$_generate_info_plist" ]
+ }
- _entitlements_target_outputs =
- get_target_outputs(invoker.entitlements_target)
- _entitlements_path = _entitlements_target_outputs[0]
- }
+ _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info"
+ bundle_data(_bundle_data_pkg_info) {
+ forward_variables_from(invoker, [ "testonly" ])
+ sources = get_target_outputs(":$_create_pkg_info")
+ outputs = [ "{{bundle_resources_dir}}/PkgInfo" ]
+ public_deps = [ ":$_create_pkg_info" ]
+ }
+ }
- action(_generate_entitlements_target) {
- _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist")
- _info_plist_path = _gen_info_plist_outputs[0]
+ foreach(_variant, _variants) {
+ create_signed_bundle(_variant.target_name) {
+ forward_variables_from(invoker,
+ [
+ "bundle_deps",
+ "bundle_deps_filter",
+ "data_deps",
+ "deps",
+ "enable_code_signing",
+ "entitlements_path",
+ "entitlements_target",
+ "extra_system_frameworks",
+ "public_configs",
+ "public_deps",
+ "testonly",
+ "visibility",
+ "xcode_extra_attributes",
+ ])
- script = "//build/config/ios/codesign.py"
- deps = [ ":$_generate_info_plist" ]
- if (defined(invoker.entitlements_target)) {
- deps += [ invoker.entitlements_target ]
- }
- sources = [
- _entitlements_path,
- _info_plist_path,
- ]
- sources += ios_mobileprovision_files
+ output_name = _output_name
+ bundle_gen_dir = _variant.bundle_gen_dir
+ bundle_binary_target = ":$_executable_target"
+ bundle_binary_output = _output_name
+ bundle_extension = _bundle_extension
+ product_type = _product_type
+ xcode_product_bundle_id = _bundle_identifier
- outputs = [ _generate_entitlements_output ]
+ _generate_info_plist_outputs =
+ get_target_outputs(":$_generate_info_plist")
+ primary_info_plist = _generate_info_plist_outputs[0]
+ partial_info_plist =
+ "$target_gen_dir/${_variant.target_name}_partial_info.plist"
- args = [
- "generate-entitlements",
- "-e=" + rebase_path(_entitlements_path, root_build_dir),
- "-p=" + rebase_path(_info_plist_path, root_build_dir),
- ]
- foreach(mobileprovision, ios_mobileprovision_files) {
- args += [ "-m=" + rebase_path(mobileprovision, root_build_dir) ]
- }
- args += rebase_path(outputs, root_build_dir)
+ if (!defined(deps)) {
+ deps = []
}
- }
+ deps += [ ":$_generate_info_plist" ]
- # Only write PkgInfo for real application, not application extension.
- if (_is_app_bundle) {
- _create_pkg_info = target_name + "_pkg_info"
- action(_create_pkg_info) {
- forward_variables_from(invoker, [ "testonly" ])
- script = "//build/apple/write_pkg_info.py"
- inputs = [ "//build/apple/plist_util.py" ]
- sources = get_target_outputs(":$_generate_info_plist")
- outputs = [
- # Cannot name the output PkgInfo as the name will not be unique if
- # multiple ios_app_bundle are defined in the same BUILD.gn file. The
- # file is renamed in the bundle_data outputs to the correct name.
- "$target_gen_dir/$target_name",
- ]
- args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
- [ "--output" ] + rebase_path(outputs, root_build_dir)
- deps = [ ":$_generate_info_plist" ]
+ if (!defined(bundle_deps)) {
+ bundle_deps = []
}
-
- _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info"
- bundle_data(_bundle_data_pkg_info) {
- forward_variables_from(invoker, [ "testonly" ])
- sources = get_target_outputs(":$_create_pkg_info")
- outputs = [ "{{bundle_resources_dir}}/PkgInfo" ]
- public_deps = [ ":$_create_pkg_info" ]
+ if (_is_app_bundle) {
+ bundle_deps += [ ":$_bundle_data_pkg_info" ]
}
- }
-
- foreach(_variant, _variants) {
- create_signed_bundle(_variant.target_name) {
- forward_variables_from(invoker,
- [
- "bundle_deps",
- "bundle_deps_filter",
- "data_deps",
- "deps",
- "enable_code_signing",
- "entitlements_path",
- "entitlements_target",
- "extra_system_frameworks",
- "public_configs",
- "public_deps",
- "testonly",
- "visibility",
- "xcode_extra_attributes",
- ])
-
- output_name = _output_name
- bundle_gen_dir = _variant.bundle_gen_dir
- bundle_binary_target = ":$_lipo_executable_target"
- bundle_binary_output = _output_name
- bundle_extension = _bundle_extension
- product_type = _product_type
- xcode_product_bundle_id = _bundle_identifier
-
- _generate_info_plist_outputs =
- get_target_outputs(":$_generate_info_plist")
- primary_info_plist = _generate_info_plist_outputs[0]
- partial_info_plist =
- "$target_gen_dir/${_variant.target_name}_partial_info.plist"
+ bundle_deps += _variant.bundle_deps
- if (!defined(deps)) {
- deps = []
+ if (target_environment == "simulator") {
+ if (!defined(data_deps)) {
+ data_deps = []
}
- deps += [ ":$_generate_info_plist" ]
-
- if (!defined(bundle_deps)) {
- bundle_deps = []
- }
- if (_is_app_bundle) {
- bundle_deps += [ ":$_bundle_data_pkg_info" ]
- }
- bundle_deps += _variant.bundle_deps
-
- if (target_environment == "simulator") {
- if (!defined(data_deps)) {
- data_deps = []
- }
+ if (build_with_chromium) {
data_deps += [ "//testing/iossim" ]
}
}
}
+ }
- if (_default_variant.name != "") {
- _bundle_short_name = "$_output_name$_bundle_extension"
- action(_target_name) {
- forward_variables_from(invoker, [ "testonly" ])
+ if (_default_variant.name != "") {
+ _bundle_short_name = "$_output_name$_bundle_extension"
+ action(_target_name) {
+ forward_variables_from(invoker, [ "testonly" ])
- script = "//build/config/ios/hardlink.py"
- public_deps = []
- foreach(_variant, _variants) {
- public_deps += [ ":${_variant.target_name}" ]
- }
+ script = "//build/config/ios/hardlink.py"
+ public_deps = []
+ foreach(_variant, _variants) {
+ public_deps += [ ":${_variant.target_name}" ]
+ }
- sources = [ "${_default_variant.bundle_gen_dir}/$_bundle_short_name" ]
- outputs = [ "$root_out_dir/$_bundle_short_name" ]
+ sources = [ "${_default_variant.bundle_gen_dir}/$_bundle_short_name" ]
+ outputs = [ "$root_out_dir/$_bundle_short_name" ]
- args = rebase_path(sources, root_build_dir) +
- rebase_path(outputs, root_build_dir)
- }
+ args = rebase_path(sources, root_build_dir) +
+ rebase_path(outputs, root_build_dir)
}
}
-
- if (is_fat_secondary_toolchain) {
- not_needed("*")
- }
}
set_defaults("ios_app_bundle") {
@@ -1272,27 +1090,12 @@ template("ios_framework_bundle") {
_has_public_headers =
defined(invoker.public_headers) && invoker.public_headers != []
- _primary_toolchain = current_toolchain
- if (is_fat_secondary_toolchain) {
- _primary_toolchain = primary_fat_toolchain_name
- }
-
- # Public configs are not propagated across toolchain (see crbug.com/675224)
- # so some configs have to be defined for both default_toolchain and all others
- # toolchains when performing a fat build. Use "get_label_info" to construct
- # the path since they need to be relative to the default_toolchain.
-
- _default_toolchain_root_out_dir =
- get_label_info("$_target_name($_primary_toolchain)", "root_out_dir")
-
- _arch_shared_library_source = _target_name + "_arch_shared_library_sources"
- _arch_shared_library_target = _target_name + "_arch_shared_library"
- _lipo_shared_library_target = _target_name + "_shared_library"
+ _shared_library_target = _target_name + "_shared_library"
_link_target_name = _target_name + "+link"
if (_has_public_headers) {
_default_toolchain_target_gen_dir =
- get_label_info("$_target_name($_primary_toolchain)", "target_gen_dir")
+ get_label_info("$_target_name", "target_gen_dir")
_framework_headers_target = _target_name + "_framework_headers"
@@ -1301,7 +1104,7 @@ template("ios_framework_bundle") {
"$_default_toolchain_target_gen_dir/$_output_name.headers.hmap"
config(_headers_map_config) {
visibility = [
- ":${_arch_shared_library_source}",
+ ":${_shared_library_target}",
":${_target_name}_signed_bundle",
]
include_dirs = [ _header_map_filename ]
@@ -1310,7 +1113,7 @@ template("ios_framework_bundle") {
_framework_headers_config = _target_name + "_framework_headers_config"
config(_framework_headers_config) {
- framework_dirs = [ _default_toolchain_root_out_dir ]
+ framework_dirs = [ root_out_dir ]
}
_framework_public_config = _target_name + "_public_config"
@@ -1319,7 +1122,7 @@ template("ios_framework_bundle") {
frameworks = [ "$_output_name.framework" ]
}
- source_set(_arch_shared_library_source) {
+ shared_library(_shared_library_target) {
forward_variables_from(invoker,
"*",
[
@@ -1335,7 +1138,13 @@ template("ios_framework_bundle") {
"visibility",
])
- visibility = [ ":$_arch_shared_library_target" ]
+ visibility = [ ":${_target_name}_signed_bundle" ]
+
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags +=
+ [ "-Wl,-install_name,@rpath/$_output_name.framework/$_output_name" ]
if (_has_public_headers) {
configs += [ ":$_headers_map_config" ]
@@ -1343,309 +1152,214 @@ template("ios_framework_bundle") {
if (!defined(deps)) {
deps = []
}
- deps += [ ":$_framework_headers_target($_primary_toolchain)" ]
- }
- }
-
- shared_library(_arch_shared_library_target) {
- forward_variables_from(invoker,
- "*",
- [
- "bundle_deps",
- "bundle_deps_filter",
- "data_deps",
- "enable_code_signing",
- "extra_substitutions",
- "info_plist",
- "info_plist_target",
- "output_name",
- "sources",
- "public_configs",
- "visibility",
- ])
-
- visibility = [ ":$_lipo_shared_library_target($_primary_toolchain)" ]
- if (is_fat_secondary_toolchain) {
- visibility += [
- ":${_target_name}",
- ":${_target_name}_signed_bundle",
- ]
- }
-
- if (!defined(deps)) {
- deps = []
- }
- deps += [ ":$_arch_shared_library_source" ]
- if (_has_public_headers) {
- deps += [ ":$_framework_headers_target($_primary_toolchain)" ]
+ deps += [ ":$_framework_headers_target" ]
}
- if (!defined(ldflags)) {
- ldflags = []
- }
- ldflags +=
- [ "-Wl,-install_name,@rpath/$_output_name.framework/$_output_name" ]
output_extension = ""
output_name = _output_name
output_prefix_override = true
- output_dir = "$target_out_dir/$current_cpu"
+ output_dir = target_out_dir
}
- if (is_fat_secondary_toolchain) {
- # For fat builds, only the default toolchain will generate a framework
- # bundle. For the other toolchains, the template is only used for building
- # the arch-specific binary, thus the default target is just a group().
+ if (_has_public_headers) {
+ _public_headers = invoker.public_headers
- group(_target_name) {
- forward_variables_from(invoker,
- [
- "visibility",
- "testonly",
- ])
- public_deps = [ ":$_arch_shared_library_target" ]
+ _framework_root_dir = "$root_out_dir/$_output_name.framework"
+ if (target_environment == "simulator" || target_environment == "device") {
+ _framework_contents_dir = _framework_root_dir
+ } else if (target_environment == "catalyst") {
+ _framework_contents_dir = "$_framework_root_dir/Versions/A"
}
- group(_link_target_name) {
+ _compile_headers_map_target = _target_name + "_compile_headers_map"
+ action(_compile_headers_map_target) {
+ visibility = [ ":$_framework_headers_target" ]
forward_variables_from(invoker,
[
- "public_configs",
- "visibility",
+ "deps",
+ "public_deps",
"testonly",
])
- public_deps = [ ":$_link_target_name($_primary_toolchain)" ]
-
- if (_has_public_headers) {
- if (!defined(public_configs)) {
- public_configs = []
+ script = "//build/config/ios/write_framework_hmap.py"
+ outputs = [ _header_map_filename ]
+
+ # The header map generation only wants the list of headers, not all of
+ # sources, so filter any non-header source files from "sources". It is
+ # less error prone that having the developer duplicate the list of all
+ # headers in addition to "sources".
+ sources = []
+ foreach(_source, invoker.sources) {
+ if (get_path_info(_source, "extension") == "h") {
+ sources += [ _source ]
}
- public_configs += [ ":$_framework_headers_config" ]
}
- if (!defined(all_dependent_configs)) {
- all_dependent_configs = []
- }
- all_dependent_configs += [ ":$_framework_public_config" ]
- }
- group("$_target_name+bundle") {
- forward_variables_from(invoker, [ "testonly" ])
- public_deps = [ ":$_target_name+bundle($_primary_toolchain)" ]
+ args = [
+ rebase_path(_header_map_filename, root_build_dir),
+ rebase_path(_framework_root_dir, root_build_dir),
+ ] + rebase_path(sources, root_build_dir)
}
- not_needed(invoker, "*")
- } else {
- if (_has_public_headers) {
- _public_headers = invoker.public_headers
-
- _framework_root_dir = "$root_out_dir/$_output_name.framework"
- if (target_environment == "simulator" || target_environment == "device") {
- _framework_contents_dir = _framework_root_dir
- } else if (target_environment == "catalyst") {
- _framework_contents_dir = "$_framework_root_dir/Versions/A"
- }
-
- _compile_headers_map_target = _target_name + "_compile_headers_map"
- action(_compile_headers_map_target) {
- visibility = [ ":$_framework_headers_target" ]
- forward_variables_from(invoker,
- [
- "deps",
- "public_deps",
- "testonly",
- ])
- script = "//build/config/ios/write_framework_hmap.py"
- outputs = [ _header_map_filename ]
-
- # The header map generation only wants the list of headers, not all of
- # sources, so filter any non-header source files from "sources". It is
- # less error prone that having the developer duplicate the list of all
- # headers in addition to "sources".
- sources = []
- foreach(_source, invoker.sources) {
- if (get_path_info(_source, "extension") == "h") {
- sources += [ _source ]
- }
- }
-
- args = [
- rebase_path(_header_map_filename, root_build_dir),
- rebase_path(_framework_root_dir, root_build_dir),
- ] + rebase_path(sources, root_build_dir)
- }
-
- _create_module_map_target = _target_name + "_module_map"
- action(_create_module_map_target) {
- visibility = [ ":$_framework_headers_target" ]
- script = "//build/config/ios/write_framework_modulemap.py"
- outputs = [ "$_framework_contents_dir/Modules/module.modulemap" ]
- args = [
- _output_name,
- rebase_path("$_framework_contents_dir/Modules", root_build_dir),
- ]
- }
-
- _copy_public_headers_target = _target_name + "_copy_public_headers"
- copy(_copy_public_headers_target) {
- forward_variables_from(invoker,
- [
- "testonly",
- "deps",
- ])
- visibility = [ ":$_framework_headers_target" ]
- sources = _public_headers
- outputs = [ "$_framework_contents_dir/Headers/{{source_file_part}}" ]
-
- # Do not use forward_variables_from for "public_deps" as
- # we do not want to forward those dependencies.
- if (defined(invoker.public_deps)) {
- if (!defined(deps)) {
- deps = []
- }
- deps += invoker.public_deps
- }
- }
-
- group(_framework_headers_target) {
- forward_variables_from(invoker, [ "testonly" ])
- deps = [
- ":$_compile_headers_map_target",
- ":$_create_module_map_target",
- ]
- public_deps = [ ":$_copy_public_headers_target" ]
- }
+ _create_module_map_target = _target_name + "_module_map"
+ action(_create_module_map_target) {
+ visibility = [ ":$_framework_headers_target" ]
+ script = "//build/config/ios/write_framework_modulemap.py"
+ outputs = [ "$_framework_contents_dir/Modules/module.modulemap" ]
+ args = [
+ _output_name,
+ rebase_path("$_framework_contents_dir/Modules", root_build_dir),
+ ]
}
- lipo_binary(_lipo_shared_library_target) {
+ _copy_public_headers_target = _target_name + "_copy_public_headers"
+ copy(_copy_public_headers_target) {
forward_variables_from(invoker,
[
- "configs",
"testonly",
+ "deps",
])
+ visibility = [ ":$_framework_headers_target" ]
+ sources = _public_headers
+ outputs = [ "$_framework_contents_dir/Headers/{{source_file_part}}" ]
- visibility = [ ":${_target_name}_signed_bundle" ]
- output_name = _output_name
- arch_binary_target = ":$_arch_shared_library_target"
- arch_binary_output = _output_name
+ # Do not use forward_variables_from for "public_deps" as
+ # we do not want to forward those dependencies.
+ if (defined(invoker.public_deps)) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += invoker.public_deps
+ }
}
- # Bundle identifier should respect rfc1034, so replace "_" with "-".
- _bundle_identifier =
- "$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-")
+ group(_framework_headers_target) {
+ forward_variables_from(invoker, [ "testonly" ])
+ deps = [
+ ":$_compile_headers_map_target",
+ ":$_create_module_map_target",
+ ]
+ public_deps = [ ":$_copy_public_headers_target" ]
+ }
+ }
- _info_plist_target = _target_name + "_info_plist"
- _info_plist_bundle = _target_name + "_info_plist_bundle"
- ios_info_plist(_info_plist_target) {
- visibility = [ ":$_info_plist_bundle" ]
- executable_name = _output_name
- forward_variables_from(invoker,
- [
- "info_plist",
- "info_plist_target",
- ])
+ # Bundle identifier should respect rfc1034, so replace "_" with "-".
+ _bundle_identifier =
+ "$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-")
- extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ]
- if (defined(invoker.extra_substitutions)) {
- extra_substitutions += invoker.extra_substitutions
- }
+ _info_plist_target = _target_name + "_info_plist"
+ _info_plist_bundle = _target_name + "_info_plist_bundle"
+ ios_info_plist(_info_plist_target) {
+ visibility = [ ":$_info_plist_bundle" ]
+ executable_name = _output_name
+ forward_variables_from(invoker,
+ [
+ "info_plist",
+ "info_plist_target",
+ ])
+
+ extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ]
+ if (defined(invoker.extra_substitutions)) {
+ extra_substitutions += invoker.extra_substitutions
}
+ }
- bundle_data(_info_plist_bundle) {
- visibility = [ ":${_target_name}_signed_bundle" ]
- forward_variables_from(invoker, [ "testonly" ])
- sources = get_target_outputs(":$_info_plist_target")
- public_deps = [ ":$_info_plist_target" ]
+ bundle_data(_info_plist_bundle) {
+ visibility = [ ":${_target_name}_signed_bundle" ]
+ forward_variables_from(invoker, [ "testonly" ])
+ sources = get_target_outputs(":$_info_plist_target")
+ public_deps = [ ":$_info_plist_target" ]
- if (target_environment != "catalyst") {
- outputs = [ "{{bundle_contents_dir}}/Info.plist" ]
- } else {
- outputs = [ "{{bundle_resources_dir}}/Info.plist" ]
- }
+ if (target_environment != "catalyst") {
+ outputs = [ "{{bundle_contents_dir}}/Info.plist" ]
+ } else {
+ outputs = [ "{{bundle_resources_dir}}/Info.plist" ]
}
+ }
- create_signed_bundle(_target_name + "_signed_bundle") {
- forward_variables_from(invoker,
- [
- "bundle_deps",
- "bundle_deps_filter",
- "data_deps",
- "deps",
- "enable_code_signing",
- "public_configs",
- "public_deps",
- "testonly",
- "visibility",
- ])
+ create_signed_bundle(_target_name + "_signed_bundle") {
+ forward_variables_from(invoker,
+ [
+ "bundle_deps",
+ "bundle_deps_filter",
+ "data_deps",
+ "deps",
+ "enable_code_signing",
+ "public_configs",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
- product_type = "com.apple.product-type.framework"
- bundle_extension = ".framework"
+ product_type = "com.apple.product-type.framework"
+ bundle_extension = ".framework"
- output_name = _output_name
- bundle_binary_target = ":$_lipo_shared_library_target"
- bundle_binary_output = _output_name
+ output_name = _output_name
+ bundle_binary_target = ":$_shared_library_target"
+ bundle_binary_output = _output_name
- has_public_headers = _has_public_headers
+ has_public_headers = _has_public_headers
- # Framework do not have entitlements nor mobileprovision because they use
- # the one from the bundle using them (.app or .appex) as they are just
- # dynamic library with shared code.
- disable_entitlements = true
- disable_embedded_mobileprovision = true
+ # Framework do not have entitlements nor mobileprovision because they use
+ # the one from the bundle using them (.app or .appex) as they are just
+ # dynamic library with shared code.
+ disable_entitlements = true
+ disable_embedded_mobileprovision = true
- if (!defined(deps)) {
- deps = []
- }
- deps += [ ":$_info_plist_bundle" ]
+ if (!defined(deps)) {
+ deps = []
}
+ deps += [ ":$_info_plist_bundle" ]
+ }
- group(_target_name) {
- forward_variables_from(invoker,
- [
- "public_configs",
- "public_deps",
- "testonly",
- "visibility",
- ])
- if (!defined(public_deps)) {
- public_deps = []
- }
- public_deps += [ ":${_target_name}_signed_bundle" ]
-
- if (_has_public_headers) {
- if (!defined(public_configs)) {
- public_configs = []
- }
- public_configs += [ ":$_framework_headers_config" ]
- }
+ group(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "public_configs",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+ if (!defined(public_deps)) {
+ public_deps = []
}
+ public_deps += [ ":${_target_name}_signed_bundle" ]
- group(_link_target_name) {
- forward_variables_from(invoker,
- [
- "public_configs",
- "public_deps",
- "testonly",
- "visibility",
- ])
- if (!defined(public_deps)) {
- public_deps = []
+ if (_has_public_headers) {
+ if (!defined(public_configs)) {
+ public_configs = []
}
- public_deps += [ ":$_target_name" ]
+ public_configs += [ ":$_framework_headers_config" ]
+ }
+ }
- if (!defined(all_dependent_configs)) {
- all_dependent_configs = []
- }
- all_dependent_configs += [ ":$_framework_public_config" ]
+ group(_link_target_name) {
+ forward_variables_from(invoker,
+ [
+ "public_configs",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+ if (!defined(public_deps)) {
+ public_deps = []
}
+ public_deps += [ ":$_target_name" ]
- bundle_data(_target_name + "+bundle") {
- forward_variables_from(invoker,
- [
- "testonly",
- "visibility",
- ])
- public_deps = [ ":$_target_name" ]
- sources = [ "$root_out_dir/$_output_name.framework" ]
- outputs = [ "{{bundle_contents_dir}}/Frameworks/$_output_name.framework" ]
+ if (!defined(all_dependent_configs)) {
+ all_dependent_configs = []
}
+ all_dependent_configs += [ ":$_framework_public_config" ]
+ }
+
+ bundle_data(_target_name + "+bundle") {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ public_deps = [ ":$_target_name" ]
+ sources = [ "$root_out_dir/$_output_name.framework" ]
+ outputs = [ "{{bundle_contents_dir}}/Frameworks/$_output_name.framework" ]
}
}
@@ -1693,13 +1407,6 @@ template("ios_xctest_bundle") {
assert(defined(invoker.xcode_test_application_name),
"xcode_test_application_name must be defined for $target_name")
- # Silence "assignment had no effect" error for non-default toolchains as
- # following variables are only used in the expansion of the template for the
- # default toolchain.
- if (is_fat_secondary_toolchain) {
- not_needed(invoker, "*")
- }
-
_target_name = target_name
_output_name = target_name
@@ -1707,159 +1414,132 @@ template("ios_xctest_bundle") {
_output_name = invoker.output_name
}
- _arch_loadable_module_source = _target_name + "_arch_loadable_module_source"
- _arch_loadable_module_target = _target_name + "_arch_loadable_module"
- _lipo_loadable_module_target = _target_name + "_loadable_module"
+ _loadable_module_target = _target_name + "_loadable_module"
- _primary_toolchain = current_toolchain
- if (is_fat_secondary_toolchain) {
- _primary_toolchain = primary_fat_toolchain_name
- }
-
- source_set(_arch_loadable_module_source) {
- forward_variables_from(invoker, [ "deps" ])
-
- testonly = true
- visibility = [ ":$_arch_loadable_module_target" ]
- }
+ loadable_module(_loadable_module_target) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "host_target",
+ "output_dir",
+ "output_extension",
+ "output_name",
+ "output_prefix_override",
+ "product_type",
+ "testonly",
+ "visibility",
+ "xcode_test_application_name",
+ "xcode_test_application_output_name",
+ "xctest_bundle_principal_class",
+ ])
- loadable_module(_arch_loadable_module_target) {
testonly = true
- visibility = [ ":$_lipo_loadable_module_target($_primary_toolchain)" ]
- if (is_fat_secondary_toolchain) {
- visibility += [ ":$_target_name" ]
- }
+ visibility = [ ":$_target_name" ]
- deps = [ ":$_arch_loadable_module_source" ]
configs += [ "//build/config/ios:xctest_config" ]
- output_dir = "$target_out_dir/$current_cpu"
+ output_dir = target_out_dir
output_name = _output_name
output_prefix_override = true
output_extension = ""
}
- if (is_fat_secondary_toolchain) {
- # For fat builds, only the default toolchain will generate a test bundle.
- # For the other toolchains, the template is only used for building the
- # arch-specific binary, thus the default target is just a group().
- group(_target_name) {
- forward_variables_from(invoker, [ "visibility" ])
- testonly = true
-
- public_deps = [ ":$_arch_loadable_module_target" ]
- }
-
- not_needed(invoker, "*")
- } else {
- _info_plist_target = _target_name + "_info_plist"
- _info_plist_bundle = _target_name + "_info_plist_bundle"
+ _info_plist_target = _target_name + "_info_plist"
+ _info_plist_bundle = _target_name + "_info_plist_bundle"
- # Bundle identifier should respect rfc1034, so replace "_" with "-".
- _bundle_identifier = "$ios_app_bundle_id_prefix.chrome." +
- string_replace(_output_name, "_", "-")
+ # Bundle identifier should respect rfc1034, so replace "_" with "-".
+ _bundle_identifier = "$ios_app_bundle_id_prefix.chrome." +
+ string_replace(_output_name, "_", "-")
- ios_info_plist(_info_plist_target) {
- testonly = true
- visibility = [ ":$_info_plist_bundle" ]
+ ios_info_plist(_info_plist_target) {
+ testonly = true
+ visibility = [ ":$_info_plist_bundle" ]
- info_plist = "//build/config/ios/Module-Info.plist"
- executable_name = _output_name
+ info_plist = "//build/config/ios/Module-Info.plist"
+ executable_name = _output_name
- if (defined(invoker.xctest_bundle_principal_class)) {
- _principal_class = invoker.xctest_bundle_principal_class
- } else {
- # Fall back to a reasonable default value.
- _principal_class = "NSObject"
- }
- extra_substitutions = [
- "XCTEST_BUNDLE_PRINCIPAL_CLASS=${_principal_class}",
- "BUNDLE_IDENTIFIER=$_bundle_identifier",
- ]
+ if (defined(invoker.xctest_bundle_principal_class)) {
+ _principal_class = invoker.xctest_bundle_principal_class
+ } else {
+ # Fall back to a reasonable default value.
+ _principal_class = "NSObject"
}
+ extra_substitutions = [
+ "XCTEST_BUNDLE_PRINCIPAL_CLASS=${_principal_class}",
+ "BUNDLE_IDENTIFIER=$_bundle_identifier",
+ ]
+ }
- bundle_data(_info_plist_bundle) {
- testonly = true
- visibility = [ ":$_target_name" ]
-
- public_deps = [ ":$_info_plist_target" ]
+ bundle_data(_info_plist_bundle) {
+ testonly = true
+ visibility = [ ":$_target_name" ]
- sources = get_target_outputs(":$_info_plist_target")
- outputs = [ "{{bundle_contents_dir}}/Info.plist" ]
- }
+ public_deps = [ ":$_info_plist_target" ]
- lipo_binary(_lipo_loadable_module_target) {
- forward_variables_from(invoker, [ "configs" ])
+ sources = get_target_outputs(":$_info_plist_target")
+ outputs = [ "{{bundle_contents_dir}}/Info.plist" ]
+ }
- testonly = true
- visibility = [ ":$_target_name" ]
+ _xctest_bundle = _target_name + "_bundle"
+ create_signed_bundle(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "bundle_id",
+ "data_deps",
+ "enable_code_signing",
+ "product_type",
+ "xcode_test_application_name",
+ ])
- output_name = _output_name
- arch_binary_target = ":$_arch_loadable_module_target"
- arch_binary_output = _output_name
- }
+ testonly = true
+ visibility = [ ":$_xctest_bundle" ]
- _xctest_bundle = _target_name + "_bundle"
- create_signed_bundle(_target_name) {
- forward_variables_from(invoker,
- [
- "bundle_id",
- "data_deps",
- "enable_code_signing",
- "product_type",
- "xcode_test_application_name",
- ])
+ bundle_extension = ".xctest"
- testonly = true
- visibility = [ ":$_xctest_bundle" ]
+ output_name = _output_name
+ bundle_binary_target = ":$_loadable_module_target"
+ bundle_binary_output = _output_name
- bundle_extension = ".xctest"
+ xcode_extra_attributes = {
+ IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target
+ PRODUCT_BUNDLE_IDENTIFIER = _bundle_identifier
+ CODE_SIGNING_REQUIRED = "NO"
+ CODE_SIGNING_ALLOWED = "NO"
+ CODE_SIGN_IDENTITY = ""
+ DONT_GENERATE_INFOPLIST_FILE = "YES"
- output_name = _output_name
- bundle_binary_target = ":$_lipo_loadable_module_target"
- bundle_binary_output = _output_name
+ # For XCUITest, Xcode requires specifying the host application name
+ # via the TEST_TARGET_NAME attribute.
+ if (invoker.product_type == _ios_xcode_xcuitest_bundle_id) {
+ TEST_TARGET_NAME = invoker.xcode_test_application_name
+ }
- xcode_extra_attributes = {
- IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target
- PRODUCT_BUNDLE_IDENTIFIER = _bundle_identifier
- CODE_SIGNING_REQUIRED = "NO"
- CODE_SIGNING_ALLOWED = "NO"
- CODE_SIGN_IDENTITY = ""
- DONT_GENERATE_INFOPLIST_FILE = "YES"
-
- # For XCUITest, Xcode requires specifying the host application name
- # via the TEST_TARGET_NAME attribute.
- if (invoker.product_type == _ios_xcode_xcuitest_bundle_id) {
- TEST_TARGET_NAME = invoker.xcode_test_application_name
+ # For XCTest, Xcode requires specifying the host application path via
+ # both BUNDLE_LOADER and TEST_HOST attributes.
+ if (invoker.product_type == _ios_xcode_xctest_bundle_id) {
+ _xcode_app_name = invoker.xcode_test_application_name
+ if (defined(invoker.xcode_test_application_output_name)) {
+ _xcode_app_name = invoker.xcode_test_application_output_name
}
- # For XCTest, Xcode requires specifying the host application path via
- # both BUNDLE_LOADER and TEST_HOST attributes.
- if (invoker.product_type == _ios_xcode_xctest_bundle_id) {
- _xcode_app_name = invoker.xcode_test_application_name
- if (defined(invoker.xcode_test_application_output_name)) {
- _xcode_app_name = invoker.xcode_test_application_output_name
- }
-
- BUNDLE_LOADER = "\$(TEST_HOST)"
- TEST_HOST = "\$(BUILT_PRODUCTS_DIR)/" +
- "${_xcode_app_name}.app/${_xcode_app_name}"
- }
+ BUNDLE_LOADER = "\$(TEST_HOST)"
+ TEST_HOST = "\$(BUILT_PRODUCTS_DIR)/" +
+ "${_xcode_app_name}.app/${_xcode_app_name}"
}
-
- deps = [ ":$_info_plist_bundle" ]
}
- bundle_data(_xctest_bundle) {
- forward_variables_from(invoker, [ "host_target" ])
+ deps = [ ":$_info_plist_bundle" ]
+ }
- testonly = true
- visibility = [ ":$host_target" ]
+ bundle_data(_xctest_bundle) {
+ forward_variables_from(invoker, [ "host_target" ])
- public_deps = [ ":$_target_name" ]
- sources = [ "$root_out_dir/$_output_name.xctest" ]
- outputs = [ "{{bundle_contents_dir}}/PlugIns/$_output_name.xctest" ]
- }
+ testonly = true
+ visibility = [ ":$host_target" ]
+
+ public_deps = [ ":$_target_name" ]
+ sources = [ "$root_out_dir/$_output_name.xctest" ]
+ outputs = [ "{{bundle_contents_dir}}/PlugIns/$_output_name.xctest" ]
}
}
@@ -1946,12 +1626,10 @@ template("ios_xctest_test") {
}
_xctest_bundle = _xctest_target + "_bundle"
- if (!is_fat_secondary_toolchain) {
- if (!defined(bundle_deps)) {
- bundle_deps = []
- }
- bundle_deps += [ ":$_xctest_bundle" ]
+ if (!defined(bundle_deps)) {
+ bundle_deps = []
}
+ bundle_deps += [ ":$_xctest_bundle" ]
}
}
diff --git a/build/config/ios/write_framework_hmap.py b/build/config/ios/write_framework_hmap.py
index 7688e7376..888925349 100644
--- a/build/config/ios/write_framework_hmap.py
+++ b/build/config/ios/write_framework_hmap.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import os
import struct
diff --git a/build/config/linux/pkg-config.py b/build/config/linux/pkg-config.py
index f4133637f..2e38c7ffb 100755
--- a/build/config/linux/pkg-config.py
+++ b/build/config/linux/pkg-config.py
@@ -1,9 +1,8 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import json
import os
diff --git a/build/config/logging.gni b/build/config/logging.gni
index a08195b7d..9f315e533 100644
--- a/build/config/logging.gni
+++ b/build/config/logging.gni
@@ -4,13 +4,15 @@
import("//build/buildflag_header.gni")
import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/compiler/compiler.gni")
import("//build/config/dcheck_always_on.gni")
declare_args() {
# Use LogErrorNotReached() for NOTREACHED().
enable_log_error_not_reached =
is_chromeos_ash && !(is_debug || dcheck_always_on)
- enable_stack_trace_line_numbers = false
+
+ enable_stack_trace_line_numbers = symbol_level > 0
# Use runtime vlog everywhere except for ash-chrome.
# When `use_runtime_vlog` is true,
diff --git a/build/config/mac/BUILD.gn b/build/config/mac/BUILD.gn
index 8cfb2778c..7af3124d4 100644
--- a/build/config/mac/BUILD.gn
+++ b/build/config/mac/BUILD.gn
@@ -98,18 +98,6 @@ config("mac_dynamic_flags") {
}
}
-# The ldflags referenced below are handled by
-# //build/toolchain/apple/linker_driver.py.
-# Remove this config if a target wishes to change the arguments passed to the
-# strip command during linking. This config by default strips all symbols
-# from a binary, but some targets may wish to specify an exports file to
-# preserve specific symbols.
-config("strip_all") {
- if (enable_stripping) {
- ldflags = [ "-Wcrl,strip,-x,-S" ]
- }
-}
-
# When building with Goma, all inputs must be relative to the build directory.
# If using the system Xcode, which typically resides outside the build root, a
# symlink to the SDK is created in the build directory, and the path to that
diff --git a/build/config/ozone.gni b/build/config/ozone.gni
index 9eee45d2d..8bb512ad6 100644
--- a/build/config/ozone.gni
+++ b/build/config/ozone.gni
@@ -92,7 +92,7 @@ declare_args() {
ozone_platform_wayland = true
ozone_platform_x11 = true
} else if (is_fuchsia) {
- ozone_platform = "scenic"
+ ozone_platform = "flatland"
ozone_platform_scenic = true
ozone_platform_flatland = true
}
diff --git a/build/config/riscv.gni b/build/config/riscv.gni
new file mode 100644
index 000000000..b9597a0a9
--- /dev/null
+++ b/build/config/riscv.gni
@@ -0,0 +1,19 @@
+# Copyright 2023 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/v8_target_cpu.gni")
+
+if (current_cpu == "riscv64" || v8_current_cpu == "riscv64") {
+ declare_args() {
+ # RISCV Vector extension compilation flag.
+ riscv_use_rvv = false
+
+ # RISCV Vector extension VELEN. Possible values are:
+ # 128
+ # 256
+ # 512
+ # 1024
+ riscv_rvv_vlen = 128
+ }
+}
diff --git a/build/config/rust.gni b/build/config/rust.gni
index e7032a6c3..6f0584185 100644
--- a/build/config/rust.gni
+++ b/build/config/rust.gni
@@ -4,6 +4,7 @@
import("//build/config/chrome_build.gni")
import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
import("//build/toolchain/toolchain.gni")
if (is_android) {
@@ -14,27 +15,34 @@ declare_args() {
# Whether to allow Rust code to be part of the Chromium *build process*.
# This can be used to create Rust test binaries, even if the flag below
# is false.
- enable_rust = false
+ # This only applies to Chromium itself, so the build_with_chromium check
+ # should not be removed.
+ # TODO(crbug.com/1386212): Mac
+ # TODO(crbug.com/1271215): Windows
+ # TODO(crbug.com/1426472): use_clang_coverage
+ # TODO(crbug.com/1427362): using_sanitizer
+ # TODO(crbug.com/1427364): target_cpu != "x86"
+ # There is no specific bug for target_os == host_os since nor for
+ # !is_official_build, since this is just a matter of rolling things out
+ # slowly and carefully and there may be no actual bugs there.
+ enable_rust = is_linux && !is_official_build && !using_sanitizer &&
+ target_cpu != "x86" && !use_clang_coverage && is_clang &&
+ target_os == host_os && build_with_chromium
- # Individual Rust components.
- #
- # The base::JSONReader implementation.
- enable_rust_json = true
+ # As we incrementally enable Rust on mainstream builders, we want to enable
+ # the toolchain (by switching 'enable_rust' to true) while still disabling
+ # almost all Rust features). Yet we still want to have some builders with
+ # all Rust features enabled.
+ enable_all_rust_features = false
- # Use experimental Rust toolchain built in-tree. See //tools/rust. For now,
- # only use it for linux targets. The package only has prebuilt libs for linux.
- # More targets will be added later.
- #
- # Ideally this should check `current_os` so that e.g. Android builds will use
- # the Android toolchain for target artifacts and the Chromium Rust toolchain
- # for host artifacts. Currently there is an std mixup in //build/rust/std that
- # prevents this.
- #
- # TODO(https://crbug.com/1245714): fix std handling and check `current_os`.
- use_chromium_rust_toolchain = target_os == "linux" && host_os == "linux"
+ # Use the Rust toolchain built in-tree. See //tools/rust.
+ use_chromium_rust_toolchain = true
- # The version string returned by rustc -V, if using an alternative toolchain.
- rustc_version = ""
+ # Build libstd locally with GN and use that instead of the prebuilts, where
+ # applicable. If this is false the prebuilt libstd will always be used. If
+ # true, the local build is only used with the Chromium Rust toolchain and only
+ # on supported platforms and GN targets.
+ enable_local_libstd = true
# Chromium currently has a Rust toolchain for Android and Linux, but
# if you wish to experiment on more platforms you can use this
@@ -44,51 +52,67 @@ declare_args() {
# <home dir>/.rustup/toolchains/nightly-<something>-<something>
rust_sysroot_absolute = ""
+ # If you're using an external Rust toolchain, set this to a Rust
+ # the output of rustc -V.
+ rustc_version = ""
+
+ # If you're using a Rust toolchain as specified by rust_sysroot_absolute,
+ # you can specify whether it supports nacl here.
+ rust_toolchain_supports_nacl = false
+
# Any extra std rlibs in your Rust toolchain, relative to the standard
- # Rust toolchain. Typically used with 'use_unverified_rust_toolchain' = true
+ # Rust toolchain. Typically used with 'rust_sysroot_absolute'
added_rust_stdlib_libs = []
# Any removed std rlibs in your Rust toolchain, relative to the standard
- # Rust toolchain. Typically used with 'use_unverified_rust_toolchain' = true
+ # Rust toolchain. Typically used with 'rust_sysroot_absolute'
removed_rust_stdlib_libs = []
- # Use LTO when using rustc to link binaries. Experimental. Currently
- # incompatible with the options we use in our C++ toolchain to split LTO
- # units. This has no effect on the production of normal Chrome binaries, which
- # are linked by clang/lld rather than rustc. https://crbug.com/1229419
- use_lto_in_rustc_linking = false
+ # Non-rlib libs provided in the toolchain sysroot. Usually this is empty, but
+ # e.g. the Android Rust Toolchain provides a libunwind.a that rustc expects.
+ extra_sysroot_libs = []
# Use goma for Rust builds. Experimental. The only known problem is
# b/193072381, but then again, we don't expect a build speedup before much
# more work is done.
use_goma_rust = false
- # Rust code may end up being linked into a final executable by:
- # * rustc (which calls lld)
- # * our pre-existing C++ linker invocations
- # At the moment, this first pipeline is incompatible with the ldflags we use
- # for thin LTO, due to some problem in escaping gn rules. There's a further
- # problem with -lunwind on Android.
- # However, Rust code is still useful if it's contributing to our existing
- # C++ linker invocations, so this doesn't disable Rust entirely. It does
- # disable Rust unit test executables, so we do need to fix this.
- # https://crbug.com/1229423
- # NB this may be overridden by individual toolchains
- rustc_can_link = !is_android
+ # The host toolchain to use when you don't want sanitizers enabled. By default
+ # it is the regular toolchain, but when that toolchain has sanitizers, then
+ # this variable is changed to avoid them.
+ host_toolchain_no_sanitizers = host_toolchain
}
-# Set rustc_version to the in-tree toolchain version, if enabled, or otherwise
-# the Android toolchain version. If using a custom toolchain it is not changed.
-if (enable_rust) {
- if (use_chromium_rust_toolchain) {
- assert(rustc_version == "",
- "Cannot override rustc_version when using in-tree rust build")
- rustc_version =
- read_file("//third_party/rust-toolchain/VERSION", "trim string")
- } else if (rustc_version == "") {
- # Android toolchain version.
- rustc_version = "rustc 1.64.0-dev (Android Rust Toolchain version 9099361)"
- }
+declare_args() {
+ # Use a separate declare_args so these variables' defaults can depend on the
+ # ones above.
+
+ # When true, uses the locally-built std in all Rust targets.
+ #
+ # As an internal implementation detail this can be overridden on specific
+ # targets (e.g. to run build.rs scripts while building std), but this
+ # generally should not be done.
+ use_local_std_by_default = enable_local_libstd && use_chromium_rust_toolchain
+
+ # Individual Rust components.
+
+ # Conversions between Rust types and C++ types.
+ enable_rust_base_conversions = enable_all_rust_features
+
+ # The base::JSONReader implementation. Requires base conversions.
+ enable_rust_json = enable_all_rust_features
+
+ # Support for chrome://crash-rust to check crash dump collection works.
+ enable_rust_crash = enable_all_rust_features
+
+ # Support for Rust mojo bindings.
+ enable_rust_mojo = enable_all_rust_features
+
+ # Support for the 'gnrt' Rust tool.
+ enable_rust_gnrt = enable_all_rust_features
+
+ # Rust gtest interop
+ enable_rust_gtest_interop = enable_all_rust_features
}
# Platform support for "official" toolchains (Android or Chromium)
@@ -97,19 +121,40 @@ android_toolchain_supports_platform =
(is_android && (current_cpu == "arm" || current_cpu == "arm64" ||
current_cpu == "x64" || current_cpu == "x86"))) ||
(is_linux && current_cpu == "x64")
-chromium_toolchain_supports_platform =
- !is_nacl && is_linux && current_cpu == "x64"
+chromium_toolchain_supports_platform = !is_nacl
+custom_toolchain_supports_platform = !is_nacl || rust_toolchain_supports_nacl
toolchain_has_rust =
enable_rust &&
((use_chromium_rust_toolchain && chromium_toolchain_supports_platform) ||
(!use_chromium_rust_toolchain && android_toolchain_supports_platform) ||
- rust_sysroot_absolute != "")
+ (rust_sysroot_absolute != "" && custom_toolchain_supports_platform))
-# We use the Rust linker for building test executables, so we only build them
-# if we're able to use the Rust linker. We could use the C++ linker for this
-# too, we've just not set up GN to do so at the moment.
-can_build_rust_unit_tests = toolchain_has_rust && rustc_can_link
+# The rustc_revision is used to introduce a dependency on the toolchain version
+# (so e.g. rust targets are rebuilt, and the standard library is re-copied when
+# the toolchain changes). It is left empty for custom toolchains.
+rustc_revision = ""
+if (toolchain_has_rust) {
+ if (use_chromium_rust_toolchain) {
+ update_rust_args = [ "--print-package-version" ]
+ rustc_revision = exec_script("//tools/rust/update_rust.py",
+ update_rust_args,
+ "trim string")
+ } else if (rust_sysroot_absolute != "") {
+ rustc_revision = rustc_version
+ } else {
+ # Android toolchain version.
+ rustc_revision = "rustc 1.64.0-dev (Android Rust Toolchain version 9099361)"
+ }
+}
+
+# TODO(crbug.com/1278030): To build unit tests for Android we need to build
+# them as a dylib and put them into an APK. We should reuse all the same logic
+# for gtests from the `//testing/test:test` template.
+can_build_rust_unit_tests = toolchain_has_rust && !is_android
+
+# Whether to build chrome://crash/rust support.
+build_rust_crash = toolchain_has_rust && enable_rust_crash
# We want to store rust_sysroot as a source-relative variable for ninja
# portability. In practice if an external toolchain was specified, it might
@@ -119,12 +164,6 @@ if (enable_rust) {
rust_sysroot = get_path_info(rust_sysroot_absolute, "abspath")
use_unverified_rust_toolchain = true
} else if (use_chromium_rust_toolchain) {
- if (host_os != "linux") {
- assert(
- false,
- "Attempt to use Chromium Rust toolchain on an unsupported platform")
- }
-
rust_sysroot = "//third_party/rust-toolchain"
use_unverified_rust_toolchain = false
} else {
@@ -135,24 +174,31 @@ if (enable_rust) {
rust_sysroot = "//third_party/android_rust_toolchain/toolchain"
use_unverified_rust_toolchain = false
+ extra_sysroot_libs += [ "libunwind.a" ]
}
}
# Figure out the Rust target triple (aka 'rust_abi_target')
#
-# This is here rather than in the toolchain files because it's used
-# also by //build/rust/std to find the Rust standard library.
+# This is here rather than in the toolchain files because it's used also by
+# //build/rust/std to find the Rust standard library and construct a sysroot for
+# rustc invocations.
#
# The list of architectures supported by Rust is here:
-# https://doc.rust-lang.org/nightly/rustc/platform-support.html.
-# We map Chromium targets to Rust targets comprehensively despite not having
-# official support (see '*_toolchain_supports_platform above') to enable
-# experimentation with other toolchains.
-#
-# It's OK if rust_abi_target is blank. That means we're building for the host
-# and the host stdlib will be used.
+# https://doc.rust-lang.org/nightly/rustc/platform-support.html. We map Chromium
+# targets to Rust targets comprehensively despite not having official support
+# (see '*_toolchain_supports_platform above') to enable experimentation with
+# other toolchains.
rust_abi_target = ""
-if (is_android) {
+if (is_linux) {
+ cpu = current_cpu
+ if (cpu == "arm64") {
+ cpu = "aarch64"
+ } else if (cpu == "x64") {
+ cpu = "x86_64"
+ }
+ rust_abi_target = cpu + "-unknown-linux-gnu"
+} else if (is_android) {
import("//build/config/android/abi.gni")
rust_abi_target = android_abi_target
if (rust_abi_target == "arm-linux-androideabi") {
@@ -184,10 +230,74 @@ if (is_android) {
} else {
assert(false, "Architecture not supported")
}
+} else if (is_mac) {
+ if (current_cpu == "arm64") {
+ rust_abi_target = "aarch64-apple-darwin"
+ } else if (current_cpu == "x64") {
+ rust_abi_target = "x86_64-apple-darwin"
+ } else {
+ assert(false, "Architecture not supported")
+ }
+} else if (is_win) {
+ if (current_cpu == "arm64") {
+ rust_abi_target = "aarch64-pc-windows-msvc"
+ } else if (current_cpu == "x86" || current_cpu == "x64") {
+ rust_abi_target = "x86_64-pc-windows-msvc"
+ } else {
+ assert(false, "Architecture not supported")
+ }
}
+assert(!toolchain_has_rust || rust_abi_target != "")
+
+# This variable is passed to the Rust libstd build.
+rust_target_arch = ""
+if (current_cpu == "x86") {
+ rust_target_arch = "x86"
+} else if (current_cpu == "x64") {
+ rust_target_arch = "x86_64"
+} else if (current_cpu == "arm") {
+ rust_target_arch = "arm"
+} else if (current_cpu == "arm64") {
+ rust_target_arch = "aarch64"
+} else if (current_cpu == "mipsel") {
+ rust_target_arch = "mips"
+} else if (current_cpu == "mips64el") {
+ rust_target_arch = "mips64"
+} else if (current_cpu == "s390x") {
+ rust_target_arch = "s390x"
+} else if (current_cpu == "ppc64") {
+ rust_target_arch = "powerpc64"
+} else if (current_cpu == "riscv64") {
+ rust_target_arch = "riscv64"
+}
+
+assert(!toolchain_has_rust || rust_target_arch != "")
+
+# Must use Chromium Rust toolchain to get precisely matching LLVM versions
+# in order to enable LTO. Some say that LTO probably works if LLVM is "close
+# enough", but we don't want to take that risk.
+assert(!use_thin_lto || !enable_rust || use_chromium_rust_toolchain ||
+ use_unverified_rust_toolchain,
+ "Must use Chromium Rust toolchain for LTO")
+
+# Determine whether the local libstd can and should be built.
+local_libstd_supported = enable_local_libstd && use_chromium_rust_toolchain
+
# Arguments for Rust invocation.
# This is common between gcc/clang, Mac and Windows toolchains so specify once,
# here. This is not the complete command-line: toolchains should add -o
# and probably --emit arguments too.
rustc_common_args = "--crate-name {{crate_name}} {{source}} --crate-type {{crate_type}} {{rustflags}}"
+
+# Rust procedural macros are shared objects loaded into a prebuilt host rustc
+# binary. To build them, we obviously need to build for the host. Not only that,
+# but because the host rustc is prebuilt, it lacks the machinery to be able to
+# load shared objects built using sanitizers (ASAN etc.) For that reason, we need
+# to use a host toolchain that lacks sanitizers. This is only strictly necessary
+# for procedural macros, but we may also choose to build standalone Rust host
+# executable tools using the same toolchain, as they're likely to depend upon
+# similar dependencies (syn, quote etc.) and it saves a little build time.
+if (using_sanitizer || toolchain_disables_sanitizers) {
+ host_toolchain_no_sanitizers = "${host_toolchain}_no_sanitizers"
+}
diff --git a/build/config/sanitizers/BUILD.gn b/build/config/sanitizers/BUILD.gn
index cb07c5471..c320ed8d9 100644
--- a/build/config/sanitizers/BUILD.gn
+++ b/build/config/sanitizers/BUILD.gn
@@ -53,6 +53,12 @@ group("deps") {
public_deps = [ ":asan_runtime_bundle_data" ]
}
}
+ if (use_centipede) {
+ # For executables which aren't actual fuzzers, we need stubs for
+ # the sanitizer coverage symbols, because we'll still be generating
+ # .o files which depend on them.
+ deps += [ "//third_party/centipede:centipede_weak_sancov_stubs" ]
+ }
}
assert(!(is_win && is_asan && current_cpu == "x86"),
@@ -113,7 +119,7 @@ static_library("options_sources") {
# unconditionally linked into targets.
visibility = [
":deps",
- "//:gn_visibility",
+ "//:gn_all",
]
sources = [ "//build/sanitizers/sanitizer_options.cc" ]
@@ -156,21 +162,26 @@ config("default_sanitizer_ldflags") {
]
if (is_posix || is_fuchsia) {
+ sanitizers = [] # sanitizers applicable to both clang and rustc
ldflags = []
+ rustflags = []
if (is_asan) {
- ldflags += [ "-fsanitize=address" ]
+ sanitizers += [ "address" ]
}
if (is_hwasan) {
- ldflags += [ "-fsanitize=hwaddress" ]
+ sanitizers += [ "hwaddress" ]
}
if (is_lsan) {
+ # In Chromium, is_lsan always implies is_asan. ASAN includes LSAN.
+ # It seems harmless to pass both options to clang, but it doesn't
+ # work on rustc, so apply this option to clang only.
ldflags += [ "-fsanitize=leak" ]
}
if (is_tsan) {
- ldflags += [ "-fsanitize=thread" ]
+ sanitizers += [ "thread" ]
}
if (is_msan) {
- ldflags += [ "-fsanitize=memory" ]
+ sanitizers += [ "memory" ]
}
if (is_ubsan || is_ubsan_security) {
ldflags += [ "-fsanitize=undefined" ]
@@ -178,6 +189,10 @@ config("default_sanitizer_ldflags") {
if (is_ubsan_vptr) {
ldflags += [ "-fsanitize=vptr" ]
}
+ foreach(sanitizer, sanitizers) {
+ ldflags += [ "-fsanitize=$sanitizer" ]
+ rustflags += [ "-Zsanitizer=$sanitizer" ]
+ }
if (use_sanitizer_coverage) {
if (use_libfuzzer) {
diff --git a/build/config/sanitizers/sanitizers.gni b/build/config/sanitizers/sanitizers.gni
index ebc3c3e23..446736e79 100644
--- a/build/config/sanitizers/sanitizers.gni
+++ b/build/config/sanitizers/sanitizers.gni
@@ -68,6 +68,10 @@ declare_args() {
# See http://www.chromium.org/developers/testing/libfuzzer
use_libfuzzer = false
+ # Compile for fuzzing with centipede.
+ # See https://github.com/google/centipede
+ use_centipede = false
+
# Compile for fuzzing with AFL.
use_afl = false
@@ -119,7 +123,7 @@ assert(!is_hwasan || (target_os == "android" && target_cpu == "arm64"),
"HWASan only supported on Android ARM64 builds.")
# Disable sanitizers for non-target toolchains.
-if (!is_a_target_toolchain) {
+if (!is_a_target_toolchain || toolchain_disables_sanitizers) {
is_asan = false
is_cfi = false
is_hwasan = false
@@ -133,6 +137,7 @@ if (!is_a_target_toolchain) {
msan_track_origins = 0
sanitizer_coverage_flags = ""
use_afl = false
+ use_centipede = false
use_cfi_diag = false
use_cfi_recover = false
use_libfuzzer = false
@@ -150,8 +155,18 @@ use_prebuilt_instrumented_libraries = is_msan
# Whether we are doing a fuzzer build. Normally this should be checked instead
# of checking "use_libfuzzer || use_afl" because often developers forget to
-# check for "use_afl".
-use_fuzzing_engine = use_libfuzzer || use_afl || use_external_fuzzing_engine
+# check for "use_afl", and "use_centipede" is new.
+use_fuzzing_engine =
+ use_libfuzzer || use_afl || use_centipede || use_external_fuzzing_engine
+
+# Whether the current fuzzing engine supports libprotobuf_mutator. Right now
+# this is just libfuzzer, but others are likely to support this in future,
+# so it's preferable to check this.
+use_fuzzing_engine_with_lpm = use_libfuzzer || use_centipede
+
+# Whether the fuzzing engine supports fuzzers which supply their own
+# "main" function.
+fuzzing_engine_supports_custom_main = use_libfuzzer || use_centipede
# Args that are in turn dependent on other args must be in a separate
# declare_args block. User overrides are only applied at the end of a
@@ -178,6 +193,16 @@ declare_args() {
if (use_fuzzing_engine && sanitizer_coverage_flags == "") {
sanitizer_coverage_flags = "trace-pc-guard"
+ if (use_centipede) {
+ # Centipede's minimal flags are listed in //third_party/centipede/src/clang-flags.txt.
+ # But, for users like Chromium using an up-to-date clang, we can also
+ # enable extra optional types of coverage which may make Centipede more
+ # effective. This list is not currently documented and has been derived
+ # from discussion with centipede creators (though one is warned about at
+ # https://github.com/google/centipede/blob/main/centipede_callbacks.cc#L68)
+ sanitizer_coverage_flags = sanitizer_coverage_flags +
+ ",pc-table,trace-cmp,control-flow,trace-loads"
+ }
} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") {
sanitizer_coverage_flags = "trace-pc-guard,indirect-calls"
}
@@ -284,3 +309,22 @@ if (use_fuzzing_engine) {
}
}
}
+
+# Options common to different fuzzer engines.
+# Engine should be compiled without coverage (infinite loop in trace_cmp).
+fuzzing_engine_remove_configs = [
+ "//build/config/coverage:default_coverage",
+ "//build/config/sanitizers:default_sanitizer_flags",
+]
+
+# Add any sanitizer flags back. In MSAN builds, instrumenting libfuzzer with
+# MSAN is necessary since all parts of the binary need to be instrumented for it
+# to work. ASAN builds are more subtle: libfuzzer depends on features from the
+# C++ STL. If it were not instrumented, templates would be insantiated without
+# ASAN from libfuzzer and with ASAN in other TUs. The linker might merge
+# instrumented template instantiations with non-instrumented ones (which could
+# have a different ABI) in the final binary, which is problematic for TUs
+# expecting one particular ABI (https://crbug.com/915422). The other sanitizers
+# are added back for the same reason.
+fuzzing_engine_add_configs =
+ [ "//build/config/sanitizers:default_sanitizer_flags_but_coverage" ]
diff --git a/build/config/siso/README.md b/build/config/siso/README.md
new file mode 100644
index 000000000..ff38eba47
--- /dev/null
+++ b/build/config/siso/README.md
@@ -0,0 +1,8 @@
+# Build config for Siso
+
+This directory contains configurations for
+[siso](https://chromium.googlesource.com/infra/infra/+/refs/heads/main/go/src/infra/build/siso/)
+build tool.
+
+Please refer to [the config specifications](https://chromium.googlesource.com/infra/infra/+/refs/heads/main/go/src/infra/build/siso/docs/starlark_config.md) in the Siso repo.
+
diff --git a/build/config/siso/linux.star b/build/config/siso/linux.star
new file mode 100644
index 000000000..2e2cc5831
--- /dev/null
+++ b/build/config/siso/linux.star
@@ -0,0 +1,52 @@
+# -*- bazel-starlark -*-
+# Copyright 2023 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+load("@builtin//struct.star", "module")
+load("./simple.star", "simple")
+
+__filegroups = {}
+
+__handlers = {}
+
+def __step_config(ctx, step_config):
+ step_config["platforms"] = {
+ "default": {
+ "OSFamily": "Linux",
+ "container-image": "docker://gcr.io/chops-private-images-prod/rbe/siso-chromium/linux@sha256:d4fcda628ebcdb3dd79b166619c56da08d5d7bd43d1a7b1f69734904cc7a1bb2",
+ },
+ }
+ step_config["input_deps"] = {
+ "third_party/llvm-build/Release+Asserts/bin/clang++": [
+ "third_party/llvm-build/Release+Asserts/bin/clang",
+ ],
+ }
+ step_config["rules"] = [
+ {
+ "name": "clang/cxx",
+ "action": "cxx",
+ "command_prefix": "../../third_party/llvm-build/Release+Asserts/bin/clang++ ",
+ "inputs": [
+ "third_party/llvm-build/Release+Asserts/bin/clang++",
+ ],
+ "remote": True,
+ },
+ {
+ "name": "clang/cc",
+ "action": "cc",
+ "command_prefix": "../../third_party/llvm-build/Release+Asserts/bin/clang ",
+ "inputs": [
+ "third_party/llvm-build/Release+Asserts/bin/clang",
+ ],
+ "remote": True,
+ },
+ ]
+ return step_config
+
+chromium = module(
+ "chromium",
+ step_config = __step_config,
+ filegroups = __filegroups,
+ handlers = __handlers,
+)
diff --git a/build/config/siso/main.star b/build/config/siso/main.star
new file mode 100644
index 000000000..6714344c6
--- /dev/null
+++ b/build/config/siso/main.star
@@ -0,0 +1,39 @@
+# -*- bazel-starlark -*-
+# Copyright 2023 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+load("@builtin//encoding.star", "json")
+load("@builtin//runtime.star", "runtime")
+load("@builtin//struct.star", "module")
+load("./linux.star", chromium_linux = "chromium")
+load("./simple.star", "simple")
+
+def init(ctx):
+ print("runtime: os:%s arch:%s run:%d" % (
+ runtime.os,
+ runtime.arch,
+ runtime.num_cpu,
+ ))
+ host = {
+ "linux": chromium_linux,
+ # add mac, windows
+ }[runtime.os]
+ step_config = {}
+ step_config = host.step_config(ctx, step_config)
+ step_config = simple.step_config(ctx, step_config)
+
+ filegroups = {}
+ filegroups.update(host.filegroups)
+ filegroups.update(simple.filegroups)
+
+ handlers = {}
+ handlers.update(host.handlers)
+ handlers.update(simple.handlers)
+
+ return module(
+ "config",
+ step_config = json.encode(step_config),
+ filegroups = filegroups,
+ handlers = handlers,
+ )
diff --git a/build/config/siso/simple.star b/build/config/siso/simple.star
new file mode 100644
index 000000000..d3fa6a4fe
--- /dev/null
+++ b/build/config/siso/simple.star
@@ -0,0 +1,45 @@
+# -*- bazel-starlark -*-
+# Copyright 2023 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+load("@builtin//struct.star", "module")
+
+def __copy(ctx, cmd):
+ input = cmd.inputs[0]
+ out = cmd.outputs[0]
+ ctx.actions.copy(input, out, recursive = ctx.fs.is_dir(input))
+ ctx.actions.exit(exit_status = 0)
+
+def __stamp(ctx, cmd):
+ out = cmd.outputs[0]
+ ctx.actions.write(out)
+ ctx.actions.exit(exit_status = 0)
+
+__handlers = {
+ "copy": __copy,
+ "stamp": __stamp,
+}
+
+def __step_config(ctx, step_config):
+ step_config["rules"].extend([
+ {
+ "name": "simple/copy",
+ "action": "copy",
+ "handler": "copy",
+ },
+ {
+ "name": "simple/stamp",
+ "action": ".*stamp",
+ "handler": "stamp",
+ "replace": True,
+ },
+ ])
+ return step_config
+
+simple = module(
+ "simple",
+ step_config = __step_config,
+ filegroups = {},
+ handlers = __handlers,
+)
diff --git a/build/config/win/BUILD.gn b/build/config/win/BUILD.gn
index a5075393b..a02f55d83 100644
--- a/build/config/win/BUILD.gn
+++ b/build/config/win/BUILD.gn
@@ -11,6 +11,7 @@ import("//build/config/win/control_flow_guard.gni")
import("//build/config/win/visual_studio_version.gni")
import("//build/timestamp.gni")
import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
import("//build/toolchain/toolchain.gni")
assert(is_win)
@@ -43,9 +44,7 @@ declare_args() {
use_clang_diagnostics_format = false
# Indicates whether to use /pdbpagesize:8192 to allow PDBs larger than 4 GiB.
- # This requires updated debugging and profiling tools which are not widely
- # distributed yet which is why it is currently opt-in.
- use_large_pdbs = false
+ use_large_pdbs = true
}
# This is included by reference in the //build/config/compiler config that
@@ -133,13 +132,13 @@ config("compiler") {
cflags += [ "-msse3" ]
}
- if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") ==
+ # Enable ANSI escape codes if something emulating them is around (cmd.exe
+ # doesn't understand ANSI escape codes by default). Make sure to not enable
+ # this if goma/remoteexec is in use, because this will lower cache hits.
+ if (!use_goma && !use_remoteexec &&
+ exec_script("//build/win/use_ansi_codes.py", [], "trim string") ==
"True") {
- cflags += [
- # cmd.exe doesn't understand ANSI escape codes by default,
- # so only enable them if something emulating them is around.
- "-fansi-escape-codes",
- ]
+ cflags += [ "-fansi-escape-codes" ]
}
if (use_clang_diagnostics_format) {
@@ -175,7 +174,10 @@ config("compiler") {
if (use_large_pdbs) {
# This allows PDBs up to 8 GiB in size. This requires lld-link.exe or
# link.exe from VS 2022 or later.
- ldflags += [ "/pdbpagesize:8192" ]
+ if (!defined(configs)) {
+ configs = []
+ }
+ configs += [ ":pdb_larger_than_4gb" ]
}
if (!is_debug && !is_component_build) {
@@ -284,16 +286,13 @@ config("runtime_library") {
}
}
-# Chromium supports running on Windows 7, but if these constants are set to
-# Windows 7, then newer APIs aren't made available by the Windows SDK.
-# So we set this to Windows 10 and then are careful to check at runtime
-# to only call newer APIs when they're available.
+# Chromium only supports Windowes 10+.
# Some third-party libraries assume that these defines set what version of
# Windows is available at runtime. Targets using these libraries need to
# manually override this config for their compiles.
config("winver") {
defines = [
- "NTDDI_VERSION=NTDDI_WIN10_FE",
+ "NTDDI_VERSION=NTDDI_WIN10_NI",
# We can't say `=_WIN32_WINNT_WIN10` here because some files do
# `#if WINVER < 0x0600` without including windows.h before,
@@ -338,12 +337,16 @@ config("default_cfg_compiler") {
# This is needed to allow functions to be called by code that is built
# with CFG enabled, such as system libraries.
# The CFG guards are only emitted if |win_enable_cfg_guards| is enabled.
- if (is_clang) {
- if (win_enable_cfg_guards) {
+ if (win_enable_cfg_guards) {
+ if (is_clang) {
cflags = [ "/guard:cf" ]
- } else {
+ }
+ rustflags = [ "-Ccontrol-flow-guard" ]
+ } else {
+ if (is_clang) {
cflags = [ "/guard:cf,nochecks" ]
}
+ rustflags = [ "-Ccontrol-flow-guard=nochecks" ]
}
}
@@ -356,6 +359,7 @@ config("disable_guards_cfg_compiler") {
if (is_clang) {
cflags = [ "/guard:cf,nochecks" ]
}
+ rustflags = [ "-Ccontrol-flow-guard=nochecks" ]
}
config("cfi_linker") {
@@ -599,3 +603,15 @@ config("lean_and_mean") {
config("nominmax") {
defines = [ "NOMINMAX" ]
}
+
+# Some binaries create PDBs larger than 4 GiB. Increasing the PDB page size
+# to 8 KiB allows 8 GiB PDBs. The larger page size also allows larger block maps
+# which is a PDB limit that was hit in https://crbug.com/1406510. The page size
+# can easily be increased in the future to allow even larger PDBs or larger
+# block maps.
+config("pdb_larger_than_4gb") {
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [ "/pdbpagesize:8192" ]
+}
diff --git a/build/config/win/visual_studio_version.gni b/build/config/win/visual_studio_version.gni
index d90244682..1da479dd5 100644
--- a/build/config/win/visual_studio_version.gni
+++ b/build/config/win/visual_studio_version.gni
@@ -5,11 +5,10 @@
declare_args() {
# Path to Visual Studio. If empty, the default is used which is to use the
# automatic toolchain in depot_tools. If set, you must also set the
- # visual_studio_version and wdk_path.
+ # visual_studio_version, wdk_path and windows_sdk_version.
visual_studio_path = ""
# Version of Visual Studio pointed to by the visual_studio_path.
- # Currently always "2015".
visual_studio_version = ""
# Directory of the Windows driver kit. If visual_studio_path is empty, this
@@ -20,12 +19,16 @@ declare_args() {
# This value is the default location, override if you have a different
# installation location.
windows_sdk_path = "C:\Program Files (x86)\Windows Kits\10"
+
+ # Version of the Windows SDK pointed to by the windows_sdk_path.
+ windows_sdk_version = ""
}
if (visual_studio_path == "") {
toolchain_data =
exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope")
visual_studio_path = toolchain_data.vs_path
+ windows_sdk_version = toolchain_data.sdk_version
windows_sdk_path = toolchain_data.sdk_path
visual_studio_version = toolchain_data.vs_version
wdk_path = toolchain_data.wdk_dir
@@ -33,6 +36,8 @@ if (visual_studio_path == "") {
} else {
assert(visual_studio_version != "",
"You must set the visual_studio_version if you set the path")
+ assert(windows_sdk_version != "",
+ "You must set the windows_sdk_version if you set the path")
assert(wdk_path != "",
"You must set the wdk_path if you set the visual studio path")
visual_studio_runtime_dirs = []
diff --git a/build/copy_test_data_ios.py b/build/copy_test_data_ios.py
index bb77f9b00..69b957a72 100755
--- a/build/copy_test_data_ios.py
+++ b/build/copy_test_data_ios.py
@@ -1,11 +1,10 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Copies test data files or directories into a given output directory."""
-from __future__ import print_function
import optparse
import os
diff --git a/build/cp.py b/build/cp.py
index 483b2747c..2bcf55cbc 100755
--- a/build/cp.py
+++ b/build/cp.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/detect_host_arch.py b/build/detect_host_arch.py
index 7c7d6d654..c9d47e913 100755
--- a/build/detect_host_arch.py
+++ b/build/detect_host_arch.py
@@ -1,11 +1,10 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Outputs host CPU architecture in format recognized by gyp."""
-from __future__ import print_function
import platform
import re
diff --git a/build/dir_exists.py b/build/dir_exists.py
index f95a52d07..da9813f60 100755
--- a/build/dir_exists.py
+++ b/build/dir_exists.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2011 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/docs/writing_gn_templates.md b/build/docs/writing_gn_templates.md
index e6eb1fb6e..9171265ec 100644
--- a/build/docs/writing_gn_templates.md
+++ b/build/docs/writing_gn_templates.md
@@ -40,6 +40,9 @@ won't exist for the initial build.
depfiles.
* Stale paths in depfiles can cause ninja to complain of circular
dependencies [in some cases](https://bugs.chromium.org/p/chromium/issues/detail?id=639042).
+ * Use [`action_helpers.write_depfile()`] to write these.
+
+[`action_helpers.write_depfile()`]: https://source.chromium.org/chromium/chromium/src/+/main:build/action_helpers.py?q=symbol:%5Cbwrite_depfile
### Ensuring "gn analyze" Knows About your Inputs
"gn analyze" is used by bots to run only affected tests and build only affected
@@ -136,15 +139,19 @@ Outputs should be atomic and take advantage of `restat=1`.
short-circuits a build when output timestamps do not change. This feature is
the reason that the total number of build steps sometimes decreases when
building..
-* Use [`build_utils.AtomicOutput()`](https://cs.chromium.org/chromium/src/build/android/gyp/util/build_utils.py?rcl=7d6ba28e92bec865a7b7876c35b4621d56fb37d8&l=128)
- to perform both of these techniques.
+* Use [`action_helpers.atomic_output()`] to perform both of these techniques.
+
+[`action_helpers.atomic_output()`]: https://source.chromium.org/chromium/chromium/src/+/main:build/action_helpers.py?q=symbol:%5Cbatomic_output
Actions should be deterministic in order to avoid hard-to-reproduce bugs.
Given identical inputs, they should produce byte-for-byte identical outputs.
* Some common mistakes:
* Depending on filesystem iteration order.
- * Writing timestamps in files (or in zip entries).
* Writing absolute paths in outputs.
+ * Writing timestamps in files (or in zip entries).
+ * Tip: Use [`zip_helpers.py`] when writing `.zip` files.
+
+[`zip_helpers.py`]: https://source.chromium.org/chromium/chromium/src/+/main:build/zip_helpers.py
## Style Guide
Chromium GN files follow
diff --git a/build/dotfile_settings.gni b/build/dotfile_settings.gni
index a799786a6..50c04a8c0 100644
--- a/build/dotfile_settings.gni
+++ b/build/dotfile_settings.gni
@@ -23,9 +23,11 @@ build_dotfile_settings = {
"//build/config/mac/mac_sdk.gni",
"//build/config/mac/rules.gni",
"//build/config/posix/BUILD.gn",
+ "//build/config/rust.gni",
"//build/config/sysroot.gni",
"//build/config/win/BUILD.gn",
"//build/config/win/visual_studio_version.gni",
+ "//build/rust/analyze.gni",
"//build/timestamp.gni",
"//build/toolchain/apple/toolchain.gni",
"//build/toolchain/BUILD.gn",
@@ -34,7 +36,7 @@ build_dotfile_settings = {
"//build/toolchain/nacl/BUILD.gn",
"//build/toolchain/toolchain.gni",
"//build/toolchain/win/BUILD.gn",
- "//build/toolchain/win/toolchain.gni",
+ "//build/toolchain/win/win_toolchain_data.gni",
"//build/toolchain/zos/BUILD.gn",
"//build/util/branding.gni",
]
diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py
index 8347a3913..1b86a4bb9 100755
--- a/build/download_nacl_toolchains.py
+++ b/build/download_nacl_toolchains.py
@@ -1,11 +1,10 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Shim to run nacl toolchain download script only if there is a nacl dir."""
-from __future__ import print_function
import os
import shutil
diff --git a/build/env_dump.py b/build/env_dump.py
index 2474b48af..1eaf8dc92 100755
--- a/build/env_dump.py
+++ b/build/env_dump.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py
index 4c003ba69..c7ae6d9f4 100755
--- a/build/extract_from_cab.py
+++ b/build/extract_from_cab.py
@@ -1,11 +1,10 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
-from __future__ import print_function
import os
import shutil
diff --git a/build/extract_partition.py b/build/extract_partition.py
index bbe0e70b3..319ce8fc7 100755
--- a/build/extract_partition.py
+++ b/build/extract_partition.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2019 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/find_depot_tools.py b/build/find_depot_tools.py
index 94985fe7b..f891a4148 100755
--- a/build/find_depot_tools.py
+++ b/build/find_depot_tools.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2011 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -11,7 +11,6 @@ This can also be used as a standalone script to print out the depot_tools
directory location.
"""
-from __future__ import print_function
import os
import sys
diff --git a/build/fix_gn_headers.py b/build/fix_gn_headers.py
index 7b8086b7a..5111b5db4 100755
--- a/build/fix_gn_headers.py
+++ b/build/fix_gn_headers.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2017 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -10,7 +10,6 @@ try to fix them by adding them to the GN files.
Manual cleaning up is likely required afterwards.
"""
-from __future__ import print_function
import argparse
import os
diff --git a/build/fuchsia/PRESUBMIT.py b/build/fuchsia/PRESUBMIT.py
index f8c7df28f..f42f4c230 100644
--- a/build/fuchsia/PRESUBMIT.py
+++ b/build/fuchsia/PRESUBMIT.py
@@ -23,16 +23,12 @@ def CommonChecks(input_api, output_api):
unit_tests = [
J('binary_sizes_test.py'),
J('binary_size_differ_test.py'),
- J('device_target_test.py'),
J('gcs_download_test.py'),
J('update_images_test.py'),
J('update_product_bundles_test.py'),
J('update_sdk_test.py'),
]
- # TODO(1309977): enable on Windows when fixed.
- if os.name != 'nt':
- unit_tests.extend([J('fvdl_target_test.py')])
tests.extend(
input_api.canned_checks.GetUnitTests(input_api,
output_api,
diff --git a/build/fuchsia/binary_size_differ.py b/build/fuchsia/binary_size_differ.py
index d976a73cd..190a1731c 100755
--- a/build/fuchsia/binary_size_differ.py
+++ b/build/fuchsia/binary_size_differ.py
@@ -6,23 +6,11 @@
'''Implements Chrome-Fuchsia package binary size differ.'''
import argparse
-import collections
-import copy
import json
-import logging
-import math
import os
-import re
-import shutil
-import subprocess
import sys
-import tempfile
-import time
import traceback
-import uuid
-from common import GetHostToolPathFromPlatform, GetHostArchFromPlatform
-from common import SDK_ROOT, DIR_SOURCE_ROOT
from binary_sizes import ReadPackageSizesJson
from binary_sizes import PACKAGES_SIZES_FILE
@@ -72,6 +60,8 @@ def ComputePackageDiffs(before_sizes_file, after_sizes_file, author=None):
' {} bytes).<br>').format(
package_name, growth['compressed'][package_name],
growth['uncompressed'][package_name]))
+ summary += ('Note that this bot compares growth against trunk, and is '
+ 'not aware of CL chaining.<br>')
# Allow rollers to pass even with size increases. See crbug.com/1355914.
if author and '-autoroll' in author and status_code == SIZE_FAILURE:
diff --git a/build/fuchsia/binary_size_differ_test.py b/build/fuchsia/binary_size_differ_test.py
index e18f1d0db..6192bf2d6 100755
--- a/build/fuchsia/binary_size_differ_test.py
+++ b/build/fuchsia/binary_size_differ_test.py
@@ -12,8 +12,6 @@ import unittest
import binary_size_differ
import binary_sizes
-from common import DIR_SOURCE_ROOT
-
_EXAMPLE_BLOBS_BEFORE = """
{
"web_engine": [
diff --git a/build/fuchsia/binary_sizes.py b/build/fuchsia/binary_sizes.py
index 2e64e8b0e..b1aa938c4 100755
--- a/build/fuchsia/binary_sizes.py
+++ b/build/fuchsia/binary_sizes.py
@@ -7,9 +7,7 @@
import argparse
import collections
-import copy
import json
-import logging
import math
import os
import re
@@ -21,8 +19,10 @@ import time
import traceback
import uuid
-from common import GetHostToolPathFromPlatform, GetHostArchFromPlatform
-from common import SDK_ROOT, DIR_SOURCE_ROOT
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'test')))
+
+from common import DIR_SRC_ROOT, SDK_ROOT, get_host_tool_path
PACKAGES_BLOBS_FILE = 'package_blobs.json'
PACKAGES_SIZES_FILE = 'package_sizes.json'
@@ -102,7 +102,9 @@ def CreateTestResults(test_status, timestamp):
results = {
'tests': {},
'interrupted': False,
- 'path_delimiter': '.',
+ 'metadata': {
+ 'test_name_prefix': 'build/fuchsia/'
+ },
'version': 3,
'seconds_since_epoch': timestamp,
}
@@ -273,7 +275,7 @@ def ReadPackageSizesJson(json_path):
def GetCompressedSize(file_path):
"""Measures file size after blobfs compression."""
- compressor_path = GetHostToolPathFromPlatform('blobfs-compression')
+ compressor_path = get_host_tool_path('blobfs-compression')
try:
temp_dir = tempfile.mkdtemp()
compressed_file_path = os.path.join(temp_dir, os.path.basename(file_path))
@@ -311,7 +313,7 @@ def GetCompressedSize(file_path):
def ExtractFarFile(file_path, extract_dir):
"""Extracts contents of a Fuchsia archive file to the specified directory."""
- far_tool = GetHostToolPathFromPlatform('far')
+ far_tool = get_host_tool_path('far')
if not os.path.isfile(far_tool):
raise Exception('Could not find FAR host tool "%s".' % far_tool)
@@ -376,7 +378,7 @@ def GetPackageMerkleRoot(far_file_path):
"""Returns a package's Merkle digest."""
# The digest is the first word on the first line of the merkle tool's output.
- merkle_tool = GetHostToolPathFromPlatform('merkleroot')
+ merkle_tool = get_host_tool_path('merkleroot')
output = subprocess.check_output([merkle_tool, far_file_path])
return output.splitlines()[0].split()[0]
@@ -552,7 +554,7 @@ def main():
raise Exception('Could not find build output directory "%s".' %
args.build_out_dir)
- with open(os.path.join(DIR_SOURCE_ROOT, args.sizes_path)) as sizes_file:
+ with open(os.path.join(DIR_SRC_ROOT, args.sizes_path)) as sizes_file:
sizes_config = json.load(sizes_file)
if args.verbose:
diff --git a/build/fuchsia/binary_sizes_test.py b/build/fuchsia/binary_sizes_test.py
index b25c5f235..2f9dcf217 100755
--- a/build/fuchsia/binary_sizes_test.py
+++ b/build/fuchsia/binary_sizes_test.py
@@ -6,14 +6,11 @@
import json
import os
import shutil
-import subprocess
import tempfile
import unittest
import binary_sizes
-from common import DIR_SOURCE_ROOT
-
_EXAMPLE_BLOBS = """
{
diff --git a/build/fuchsia/boot_data.py b/build/fuchsia/boot_data.py
deleted file mode 100644
index df9e45c09..000000000
--- a/build/fuchsia/boot_data.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Functions used to provision Fuchsia boot images."""
-
-import common
-import logging
-import os
-import subprocess
-import tempfile
-import time
-import uuid
-
-_SSH_CONFIG_TEMPLATE = """
-Host *
- CheckHostIP no
- StrictHostKeyChecking no
- ForwardAgent no
- ForwardX11 no
- User fuchsia
- IdentitiesOnly yes
- IdentityFile {identity}
- ServerAliveInterval 2
- ServerAliveCountMax 5
- ControlMaster auto
- ControlPersist 1m
- ControlPath /tmp/ssh-%r@%h:%p
- ConnectTimeout 5
- """
-
-# Specifies boot files intended for use by an emulator.
-TARGET_TYPE_QEMU = 'qemu'
-
-# Specifies boot files intended for use by anything (incl. physical devices).
-TARGET_TYPE_GENERIC = 'generic'
-
-# Defaults used by Fuchsia SDK
-_SSH_DIR = os.path.expanduser('~/.ssh')
-_SSH_CONFIG_DIR = os.path.expanduser('~/.fuchsia')
-
-
-def _GetAuthorizedKeysPath():
- """Returns a path to the authorized keys which get copied to your Fuchsia
- device during paving"""
-
- return os.path.join(_SSH_DIR, 'fuchsia_authorized_keys')
-
-
-def ProvisionSSH():
- """Generates a key pair and config file for SSH using the GN SDK."""
-
- returncode, out, err = common.RunGnSdkFunction('fuchsia-common.sh',
- 'check-fuchsia-ssh-config')
- if returncode != 0:
- logging.error('Command exited with error code %d' % (returncode))
- logging.error('Stdout: %s' % out)
- logging.error('Stderr: %s' % err)
- raise Exception('Failed to provision ssh keys')
-
-
-def GetTargetFile(filename, image_path):
- """Computes a path to |filename| in the Fuchsia boot image directory specific
- to |image_path|."""
-
- return os.path.join(common.IMAGES_ROOT, image_path, filename)
-
-
-def GetSSHConfigPath():
- return os.path.join(_SSH_CONFIG_DIR, 'sshconfig')
-
-
-def GetBootImage(output_dir, image_path, image_name):
- """"Gets a path to the Zircon boot image, with the SSH client public key
- added."""
- ProvisionSSH()
- authkeys_path = _GetAuthorizedKeysPath()
- zbi_tool = common.GetHostToolPathFromPlatform('zbi')
- image_source_path = GetTargetFile(image_name, image_path)
- image_dest_path = os.path.join(output_dir, 'gen', 'fuchsia-with-keys.zbi')
-
- cmd = [
- zbi_tool, '-o', image_dest_path, image_source_path, '-e',
- 'data/ssh/authorized_keys=' + authkeys_path
- ]
- subprocess.check_call(cmd)
-
- return image_dest_path
-
-
-def GetKernelArgs():
- """Returns a list of Zircon commandline arguments to use when booting a
- system."""
- return [
- 'devmgr.epoch=%d' % time.time(),
- 'blobfs.write-compression-algorithm=UNCOMPRESSED'
- ]
-
-
-def AssertBootImagesExist(image_path):
- assert os.path.exists(GetTargetFile('fuchsia.zbi', image_path)), \
- 'This checkout is missing the files necessary for\n' \
- 'booting this configuration of Fuchsia.\n' \
- 'To check out the files, add this entry to the "custom_vars"\n' \
- 'section of your .gclient file:\n\n' \
- ' "checkout_fuchsia_boot_images": "%s"\n\n' % \
- image_path
diff --git a/build/fuchsia/boot_data_test.py b/build/fuchsia/boot_data_test.py
deleted file mode 100755
index 1c0f58bb3..000000000
--- a/build/fuchsia/boot_data_test.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env vpython3
-# Copyright 2021 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import boot_data
-import os
-import unittest
-from boot_data import _SSH_CONFIG_DIR, _SSH_DIR, _GetAuthorizedKeysPath, \
- GetSSHConfigPath
-
-
-class TestBootData(unittest.TestCase):
- def testProvisionSSHGeneratesFiles(self):
- fuchsia_authorized_keys_path = _GetAuthorizedKeysPath()
- fuchsia_id_key_path = os.path.join(_SSH_DIR, 'fuchsia_ed25519')
- fuchsia_pub_key_path = os.path.join(_SSH_DIR, 'fuchsia_ed25519.pub')
- ssh_config_path = GetSSHConfigPath()
- # Check if the keys exists before generating. If they do, delete them
- # afterwards before asserting if ProvisionSSH works.
- authorized_key_before = os.path.exists(fuchsia_authorized_keys_path)
- id_keys_before = os.path.exists(fuchsia_id_key_path)
- pub_keys_before = os.path.exists(fuchsia_pub_key_path)
- ssh_config_before = os.path.exists(ssh_config_path)
- ssh_dir_before = os.path.exists(_SSH_CONFIG_DIR)
- boot_data.ProvisionSSH()
- authorized_key_after = os.path.exists(fuchsia_authorized_keys_path)
- id_keys_after = os.path.exists(fuchsia_id_key_path)
- ssh_config_after = os.path.exists(ssh_config_path)
- if not authorized_key_before:
- os.remove(fuchsia_authorized_keys_path)
- if not id_keys_before:
- os.remove(fuchsia_id_key_path)
- if not pub_keys_before:
- os.remove(fuchsia_pub_key_path)
- if not ssh_config_before:
- os.remove(ssh_config_path)
- if not ssh_dir_before:
- os.rmdir(_SSH_CONFIG_DIR)
- self.assertTrue(os.path.exists(authorized_key_after))
- self.assertTrue(os.path.exists(id_keys_after))
- self.assertTrue(os.path.exists(ssh_config_after))
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/build/fuchsia/cipd/BUILD.gn b/build/fuchsia/cipd/BUILD.gn
index f8c7e8a16..0019b8645 100644
--- a/build/fuchsia/cipd/BUILD.gn
+++ b/build/fuchsia/cipd/BUILD.gn
@@ -44,16 +44,16 @@ _host_tools_directory = "host_tools"
_archive_suffix = "_archive"
-# Extracts the numeric Chrome build ID and writes it to a file in the output
+# Extracts the numeric Chrome version and writes it to a file in the output
# directory.
#
-# To check out the repository on the commit where the build ID was generated,
-# simply call `git checkout <build-id>`, and Git will check out the commit
-# associated with the <build-id> tag.
-process_version("build_id") {
- template_file = "build_id.template"
+# To check out the repository on the commit where the version was generated,
+# simply call `git checkout <version>`, and Git will check out the commit
+# associated with the <version> tag.
+process_version("version") {
+ template_file = "version.template"
sources = [ "//chrome/VERSION" ]
- output = "${target_gen_dir}/build_id.txt"
+ output = "${target_gen_dir}/VERSION"
process_only = true
}
@@ -145,13 +145,15 @@ template("cipd_archive") {
deps += [
":${target_name}_license",
":${target_name}_license_spdx",
+ ":version",
]
if (!defined(sources)) {
sources = []
}
- sources += get_target_outputs(":${target_name}_license")
- sources += get_target_outputs(":${target_name}_license_spdx")
+ sources += get_target_outputs(":${target_name}_license") +
+ get_target_outputs(":${target_name}_license_spdx") +
+ [ "${target_gen_dir}/VERSION" ]
fuchsia_cipd_package("${target_name}${_archive_suffix}") {
package = "${package_base_path}/${package_subdirectory}/${targetarch}/${invoker.target_name}"
@@ -224,14 +226,6 @@ template("cipd_test_archive") {
}
}
-cipd_archive("web_runner") {
- package_subdirectory = _web_engine_directory
- description = "Prebuilt WebRunner binaries for Fuchsia."
-
- deps = [ "//fuchsia_web/runners:web_runner_pkg" ]
- sources = [ "${root_gen_dir}/fuchsia_web/runners/web_runner/web_runner.far" ]
-}
-
cipd_archive("web_engine") {
package_subdirectory = _web_engine_directory
description = "Prebuilt WebEngine binaries for Fuchsia."
@@ -302,8 +296,6 @@ cipd_test_archive("tests") {
deps = [
"//base:base_unittests_pkg",
"//fuchsia_web/runners:cast_runner_integration_tests_pkg",
- "//fuchsia_web/runners:web_runner_integration_tests_pkg",
- "//fuchsia_web/webengine:web_engine_integration_tests_cfv1_pkg",
"//fuchsia_web/webengine:web_engine_integration_tests_pkg",
"//ipc:ipc_tests_pkg",
"//media:media_unittests_pkg",
@@ -318,9 +310,7 @@ cipd_test_archive("tests") {
far_sources = [
"${root_gen_dir}/base/base_unittests/base_unittests.far",
"${root_gen_dir}/fuchsia_web/runners/cast_runner_integration_tests/cast_runner_integration_tests.far",
- "${root_gen_dir}/fuchsia_web/runners/web_runner_integration_tests/web_runner_integration_tests.far",
"${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests/web_engine_integration_tests.far",
- "${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests_cfv1/web_engine_integration_tests_cfv1.far",
"${root_gen_dir}/ipc/ipc_tests/ipc_tests.far",
"${root_gen_dir}/media/media_unittests/media_unittests.far",
"${root_gen_dir}/mojo/mojo_unittests/mojo_unittests.far",
@@ -341,11 +331,7 @@ cipd_test_archive("tests") {
},
{
manifest_path = "${target_gen_dir}/web_engine_tests_manifest.json"
- far_sources = [
- "${root_gen_dir}/fuchsia_web/runners/web_runner_integration_tests/web_runner_integration_tests.far",
- "${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests/web_engine_integration_tests.far",
- "${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests_cfv1/web_engine_integration_tests_cfv1.far",
- ]
+ far_sources = [ "${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests/web_engine_integration_tests.far" ]
},
{
manifest_path = "${target_gen_dir}/cast_runner_tests_manifest.json"
@@ -416,7 +402,6 @@ group("web_engine_production_archives") {
deps = [
":cast_runner${_archive_suffix}",
":web_engine${_archive_suffix}",
- ":web_runner${_archive_suffix}",
]
}
diff --git a/build/fuchsia/cipd/build_id.template b/build/fuchsia/cipd/version.template
index 32a49a4ae..32a49a4ae 100644
--- a/build/fuchsia/cipd/build_id.template
+++ b/build/fuchsia/cipd/version.template
diff --git a/build/fuchsia/common.py b/build/fuchsia/common.py
deleted file mode 100644
index 73c14cd25..000000000
--- a/build/fuchsia/common.py
+++ /dev/null
@@ -1,149 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import platform
-import shutil
-import socket
-import subprocess
-import sys
-
-DIR_SOURCE_ROOT = os.path.abspath(
- os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
-IMAGES_ROOT = os.path.join(
- DIR_SOURCE_ROOT, 'third_party', 'fuchsia-sdk', 'images')
-SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'fuchsia-sdk', 'sdk')
-
-# The number of seconds to wait when trying to attach to a target.
-ATTACH_RETRY_SECONDS = 120
-
-
-def EnsurePathExists(path):
- """Checks that the file |path| exists on the filesystem and returns the path
- if it does, raising an exception otherwise."""
-
- if not os.path.exists(path):
- raise IOError('Missing file: ' + path)
-
- return path
-
-def GetHostOsFromPlatform():
- host_platform = sys.platform
- if host_platform.startswith('linux'):
- return 'linux'
- elif host_platform.startswith('darwin'):
- return 'mac'
- raise Exception('Unsupported host platform: %s' % host_platform)
-
-def GetHostArchFromPlatform():
- host_arch = platform.machine()
- # platform.machine() returns AMD64 on 64-bit Windows.
- if host_arch in ['x86_64', 'AMD64']:
- return 'x64'
- elif host_arch == 'aarch64':
- return 'arm64'
- raise Exception('Unsupported host architecture: %s' % host_arch)
-
-def GetHostToolPathFromPlatform(tool):
- host_arch = platform.machine()
- return os.path.join(SDK_ROOT, 'tools', GetHostArchFromPlatform(), tool)
-
-
-# Remove when arm64 emulator is also included in Fuchsia SDK.
-def GetEmuRootForPlatform(emulator):
- if GetHostArchFromPlatform() == 'x64':
- return GetHostToolPathFromPlatform('{0}_internal'.format(emulator))
- return os.path.join(
- DIR_SOURCE_ROOT, 'third_party', '{0}-{1}-{2}'.format(
- emulator, GetHostOsFromPlatform(), GetHostArchFromPlatform()))
-
-
-def ConnectPortForwardingTask(target, local_port, remote_port = 0):
- """Establishes a port forwarding SSH task to a localhost TCP endpoint hosted
- at port |local_port|. Blocks until port forwarding is established.
-
- Returns the remote port number."""
-
- forwarding_flags = ['-O', 'forward', # Send SSH mux control signal.
- '-R', '%d:localhost:%d' % (remote_port, local_port),
- '-v', # Get forwarded port info from stderr.
- '-NT'] # Don't execute command; don't allocate terminal.
-
- if remote_port != 0:
- # Forward to a known remote port.
- task = target.RunCommand([], ssh_args=forwarding_flags)
- if task.returncode != 0:
- raise Exception('Could not establish a port forwarding connection.')
- return
-
- task = target.RunCommandPiped([],
- ssh_args=forwarding_flags,
- stdout=subprocess.PIPE,
- stderr=open('/dev/null'))
- output = task.stdout.readlines()
- task.wait()
- if task.returncode != 0:
- raise Exception('Got an error code when requesting port forwarding: %d' %
- task.returncode)
-
- parsed_port = int(output[0].strip())
- logging.debug('Port forwarding established (local=%d, device=%d)' %
- (local_port, parsed_port))
- return parsed_port
-
-
-def GetAvailableTcpPort():
- """Finds a (probably) open port by opening and closing a listen socket."""
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.bind(("", 0))
- port = sock.getsockname()[1]
- sock.close()
- return port
-
-
-def RunGnSdkFunction(script, function):
- script_path = os.path.join(SDK_ROOT, 'bin', script)
- function_cmd = ['bash', '-c', '. %s; %s' % (script_path, function)]
- return SubprocessCallWithTimeout(function_cmd)
-
-
-def SubprocessCallWithTimeout(command, timeout_secs=None):
- """Helper function for running a command.
-
- Args:
- command: The command to run.
- timeout_secs: Maximum amount of time allowed for the command to finish.
-
- Returns:
- A tuple of (return code, stdout, stderr) of the command. Raises
- an exception if the subprocess times out.
- """
-
- process = None
- try:
- process = subprocess.run(command,
- capture_output=True,
- timeout=timeout_secs,
- encoding='utf-8')
- except subprocess.TimeoutExpired as te:
- raise TimeoutError(str(te))
-
- return process.returncode, process.stdout, process.stderr
-
-
-def IsRunningUnattended():
- """Returns true if running non-interactively.
-
- When running unattended, confirmation prompts and the like are suppressed.
- """
- # Chromium tests only for the presence of the variable, so match that here.
- return 'CHROME_HEADLESS' in os.environ
-
-
-def MakeCleanDirectory(directory_name):
- """If the directory exists, delete it and then remake it with no contents."""
- if os.path.exists(directory_name):
- shutil.rmtree(directory_name)
- os.mkdir(directory_name)
diff --git a/build/fuchsia/common_args.py b/build/fuchsia/common_args.py
deleted file mode 100644
index 3def52f79..000000000
--- a/build/fuchsia/common_args.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import importlib
-import logging
-import multiprocessing
-import os
-import sys
-
-from common import GetHostArchFromPlatform
-
-BUILTIN_TARGET_NAMES = ['qemu', 'device', 'fvdl']
-
-
-def _AddTargetSpecificationArgs(arg_parser):
- """Returns a parser that handles the target type used for the test run."""
-
- device_args = arg_parser.add_argument_group(
- 'target',
- 'Arguments specifying the Fuchsia target type. To see a list of '
- 'arguments available for a specific target type, specify the desired '
- 'target to use and add the --help flag.')
- device_args.add_argument('--target-cpu',
- default=GetHostArchFromPlatform(),
- help='GN target_cpu setting for the build. Defaults '
- 'to the same architecture as host cpu.')
- device_args.add_argument('--device',
- default=None,
- choices=BUILTIN_TARGET_NAMES + ['custom'],
- help='Choose to run on fvdl|qemu|device. '
- 'By default, Fuchsia will run on Fvdl on x64 '
- 'hosts and QEMU on arm64 hosts. Alternatively, '
- 'setting to custom will require specifying the '
- 'subclass of Target class used via the '
- '--custom-device-target flag.')
- device_args.add_argument('-d',
- action='store_const',
- dest='device',
- const='device',
- help='Run on device instead of emulator.')
- device_args.add_argument('--custom-device-target',
- default=None,
- help='Specify path to file that contains the '
- 'subclass of Target that will be used. Only '
- 'needed if device specific operations is '
- 'required.')
-
-
-def _GetPathToBuiltinTarget(target_name):
- return '%s_target' % target_name
-
-
-def _LoadTargetClass(target_path):
- try:
- loaded_target = importlib.import_module(target_path)
- except ImportError:
- logging.error(
- 'Cannot import from %s. Make sure that --custom-device-target '
- 'is pointing to a file containing a target '
- 'module.' % target_path)
- raise
- return loaded_target.GetTargetType()
-
-
-def _GetDefaultEmulatedCpuCoreCount():
- # Revise the processor count on arm64, the trybots on arm64 are in
- # dockers and cannot use all processors.
- # For x64, fvdl always assumes hyperthreading is supported by intel
- # processors, but the cpu_count returns the number regarding if the core
- # is a physical one or a hyperthreading one, so the number should be
- # divided by 2 to avoid creating more threads than the processor
- # supports.
- if GetHostArchFromPlatform() == 'x64':
- return max(int(multiprocessing.cpu_count() / 2) - 1, 4)
- return 4
-
-
-def AddCommonArgs(arg_parser):
- """Adds command line arguments to |arg_parser| for options which are shared
- across test and executable target types.
-
- Args:
- arg_parser: an ArgumentParser object."""
-
- common_args = arg_parser.add_argument_group('common', 'Common arguments')
- common_args.add_argument('--logs-dir', help='Directory to write logs to.')
- common_args.add_argument('--verbose',
- '-v',
- default=False,
- action='store_true',
- help='Enable debug-level logging.')
- common_args.add_argument(
- '--out-dir',
- type=os.path.realpath,
- help=('Path to the directory in which build files are located. '
- 'Defaults to current directory.'))
- common_args.add_argument('--fuchsia-out-dir',
- default=None,
- help='Path to a Fuchsia build output directory. '
- 'Setting the GN arg '
- '"default_fuchsia_build_dir_for_installation" '
- 'will cause it to be passed here.')
-
- package_args = arg_parser.add_argument_group('package', 'Fuchsia Packages')
- package_args.add_argument(
- '--package',
- action='append',
- help='Paths of the packages to install, including '
- 'all dependencies.')
- package_args.add_argument(
- '--package-name',
- help='Name of the package to execute, defined in ' + 'package metadata.')
-
- emu_args = arg_parser.add_argument_group('emu', 'General emulator arguments')
- emu_args.add_argument('--cpu-cores',
- type=int,
- default=_GetDefaultEmulatedCpuCoreCount(),
- help='Sets the number of CPU cores to provide.')
- emu_args.add_argument('--ram-size-mb',
- type=int,
- default=8192,
- help='Sets the emulated RAM size (MB).'),
- emu_args.add_argument('--allow-no-kvm',
- action='store_false',
- dest='require_kvm',
- default=True,
- help='Do not require KVM acceleration for '
- 'emulators.')
-
-
-# Register the arguments for all known target types and the optional custom
-# target type (specified on the commandline).
-def AddTargetSpecificArgs(arg_parser):
- # Parse the minimal set of arguments to determine if custom targets need to
- # be loaded so that their arguments can be registered.
- target_spec_parser = argparse.ArgumentParser(add_help=False)
- _AddTargetSpecificationArgs(target_spec_parser)
- target_spec_args, _ = target_spec_parser.parse_known_args()
- _AddTargetSpecificationArgs(arg_parser)
-
- for target in BUILTIN_TARGET_NAMES:
- _LoadTargetClass(_GetPathToBuiltinTarget(target)).RegisterArgs(arg_parser)
- if target_spec_args.custom_device_target:
- _LoadTargetClass(
- target_spec_args.custom_device_target).RegisterArgs(arg_parser)
-
-
-def ConfigureLogging(args):
- """Configures the logging level based on command line |args|."""
-
- logging.basicConfig(level=(logging.DEBUG if args.verbose else logging.INFO),
- format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
-
- # The test server spawner is too noisy with INFO level logging, so tweak
- # its verbosity a bit by adjusting its logging level.
- logging.getLogger('chrome_test_server_spawner').setLevel(
- logging.DEBUG if args.verbose else logging.WARN)
-
- # Verbose SCP output can be useful at times but oftentimes is just too noisy.
- # Only enable it if -vv is passed.
- logging.getLogger('ssh').setLevel(
- logging.DEBUG if args.verbose else logging.WARN)
-
-
-def InitializeTargetArgs():
- """Set args for all targets to default values. This is used by test scripts
- that have their own parser but still uses the target classes."""
- parser = argparse.ArgumentParser()
- AddCommonArgs(parser)
- AddTargetSpecificArgs(parser)
- return parser.parse_args([])
-
-
-def GetDeploymentTargetForArgs(args):
- """Constructs a deployment target object using command line arguments.
- If needed, an additional_args dict can be used to supplement the
- command line arguments."""
-
- if args.device == 'custom':
- return _LoadTargetClass(args.custom_device_target).CreateFromArgs(args)
-
- if args.device:
- device = args.device
- else:
- device = 'fvdl' if args.target_cpu == 'x64' else 'qemu'
-
- return _LoadTargetClass(_GetPathToBuiltinTarget(device)).CreateFromArgs(args)
diff --git a/build/fuchsia/deploy_to_pkg_repo.py b/build/fuchsia/deploy_to_pkg_repo.py
deleted file mode 100755
index a1945a67f..000000000
--- a/build/fuchsia/deploy_to_pkg_repo.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env vpython3
-#
-# Copyright 2019 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Deploys Fuchsia packages to a package repository in a Fuchsia
-build output directory."""
-
-import pkg_repo
-import argparse
-import os
-import sys
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument('--package',
- action='append',
- required=True,
- help='Paths to packages to install.')
- parser.add_argument('--fuchsia-out-dir',
- required=True,
- help='Path to a Fuchsia build output directory. '
- 'Setting the GN arg '
- '"default_fuchsia_build_dir_for_installation" '
- 'will cause it to be passed here.')
- args, _ = parser.parse_known_args()
- assert args.package
-
- fuchsia_out_dir = os.path.expanduser(args.fuchsia_out_dir)
- assert os.path.exists(fuchsia_out_dir), \
- '{} not found, check that --fuchsia-out-dir points to a valid out dir.' \
- ' eg. /path/to/fuchsia/out/default'.format(fuchsia_out_dir)
-
- repo = pkg_repo.ExternalPkgRepo(fuchsia_out_dir,
- os.path.join(fuchsia_out_dir, '.build-id'))
- print('Installing packages and symbols in package repo %s...' %
- repo.GetPath())
-
- for package in args.package:
- repo.PublishPackage(package)
-
- print('Installation success.')
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/fuchsia/device_target.py b/build/fuchsia/device_target.py
deleted file mode 100644
index 8f2c48f6a..000000000
--- a/build/fuchsia/device_target.py
+++ /dev/null
@@ -1,404 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Implements commands for running and interacting with Fuchsia on devices."""
-
-import errno
-import itertools
-import logging
-import os
-import pkg_repo
-import re
-import subprocess
-import sys
-import target
-import time
-
-import legacy_ermine_ctl
-import ffx_session
-
-from common import ATTACH_RETRY_SECONDS, EnsurePathExists, \
- GetHostToolPathFromPlatform, RunGnSdkFunction, SDK_ROOT
-
-sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
- 'test')))
-from compatible_utils import get_sdk_hash, pave, find_image_in_sdk
-
-# The maximum times to attempt mDNS resolution when connecting to a freshly
-# booted Fuchsia instance before aborting.
-BOOT_DISCOVERY_ATTEMPTS = 30
-
-# Number of failed connection attempts before redirecting system logs to stdout.
-CONNECT_RETRY_COUNT_BEFORE_LOGGING = 10
-
-# Number of seconds between each device discovery.
-BOOT_DISCOVERY_DELAY_SECS = 4
-
-# Time between a reboot command is issued and when connection attempts from the
-# host begin.
-_REBOOT_SLEEP_PERIOD = 20
-
-# File on device that indicates Fuchsia version.
-_ON_DEVICE_VERSION_FILE = '/config/build-info/version'
-
-# File on device that indicates Fuchsia product.
-_ON_DEVICE_PRODUCT_FILE = '/config/build-info/product'
-
-
-def GetTargetType():
- return DeviceTarget
-
-
-class ProvisionDeviceException(Exception):
- def __init__(self, message: str):
- super(ProvisionDeviceException, self).__init__(message)
-
-
-class DeviceTarget(target.Target):
- """Prepares a device to be used as a deployment target. Depending on the
- command line parameters, it automatically handling a number of preparatory
- steps relating to address resolution.
-
- If |_node_name| is unset:
- If there is one running device, use it for deployment and execution.
-
- If there are more than one running devices, then abort and instruct the
- user to re-run the command with |_node_name|
-
- If |_node_name| is set:
- If there is a running device with a matching nodename, then it is used
- for deployment and execution.
-
- If |_host| is set:
- Deploy to a device at the host IP address as-is."""
-
- def __init__(self, out_dir, target_cpu, host, node_name, port, ssh_config,
- fuchsia_out_dir, os_check, logs_dir, system_image_dir):
- """out_dir: The directory which will contain the files that are
- generated to support the deployment.
- target_cpu: The CPU architecture of the deployment target. Can be
- "x64" or "arm64".
- host: The address of the deployment target device.
- node_name: The node name of the deployment target device.
- port: The port of the SSH service on the deployment target device.
- ssh_config: The path to SSH configuration data.
- fuchsia_out_dir: The path to a Fuchsia build output directory, for
- deployments to devices paved with local Fuchsia builds.
- os_check: If 'check', the target's SDK version must match.
- If 'update', the target will be repaved if the SDK versions
- mismatch.
- If 'ignore', the target's SDK version is ignored.
- system_image_dir: The directory which contains the files used to pave the
- device."""
-
- super(DeviceTarget, self).__init__(out_dir, target_cpu, logs_dir)
-
- self._host = host
- self._port = port
- self._fuchsia_out_dir = None
- self._node_name = node_name or os.environ.get('FUCHSIA_NODENAME')
- self._system_image_dir = system_image_dir
- self._os_check = os_check
- self._pkg_repo = None
- self._target_context = None
- self._ffx_target = None
- self._ermine_ctl = legacy_ermine_ctl.LegacyErmineCtl(self)
-
- if self._os_check != 'ignore':
- if not self._system_image_dir:
- raise Exception(
- "Image directory must be provided if a repave is needed.")
- # Determine if system_image_dir exists and find dynamically if not.
- if not os.path.exists(system_image_dir):
- logging.warning('System image dir does not exist. Assuming it\'s a '
- 'product-bundle and dynamically searching for it')
- sdk_root_parent = os.path.split(SDK_ROOT)[0]
- new_dir = find_image_in_sdk(system_image_dir,
- product_bundle=True,
- sdk_root=sdk_root_parent)
- if not new_dir:
- raise FileNotFoundError(
- errno.ENOENT,
- 'Could not find system image directory in SDK path ' +
- sdk_root_parent, system_image_dir)
- self._system_image_dir = new_dir
-
- if self._host and self._node_name:
- raise Exception('Only one of "--host" or "--name" can be specified.')
-
- if fuchsia_out_dir:
- if ssh_config:
- raise Exception('Only one of "--fuchsia-out-dir" or "--ssh_config" can '
- 'be specified.')
-
- self._fuchsia_out_dir = os.path.expanduser(fuchsia_out_dir)
- # Use SSH keys from the Fuchsia output directory.
- self._ssh_config_path = os.path.join(self._fuchsia_out_dir, 'ssh-keys',
- 'ssh_config')
- self._os_check = 'ignore'
-
- elif ssh_config:
- # Use the SSH config provided via the commandline.
- self._ssh_config_path = os.path.expanduser(ssh_config)
-
- else:
- return_code, ssh_config_raw, _ = RunGnSdkFunction(
- 'fuchsia-common.sh', 'get-fuchsia-sshconfig-file')
- if return_code != 0:
- raise Exception('Could not get Fuchsia ssh config file.')
- self._ssh_config_path = os.path.expanduser(ssh_config_raw.strip())
-
- @staticmethod
- def CreateFromArgs(args):
- return DeviceTarget(args.out_dir, args.target_cpu, args.host,
- args.node_name, args.port, args.ssh_config,
- args.fuchsia_out_dir, args.os_check, args.logs_dir,
- args.system_image_dir)
-
- @staticmethod
- def RegisterArgs(arg_parser):
- device_args = arg_parser.add_argument_group(
- 'device', 'External device deployment arguments')
- device_args.add_argument('--host',
- help='The IP of the target device. Optional.')
- device_args.add_argument('--node-name',
- help='The node-name of the device to boot or '
- 'deploy to. Optional, will use the first '
- 'discovered device if omitted.')
- device_args.add_argument('--port',
- '-p',
- type=int,
- default=None,
- help='The port of the SSH service running on the '
- 'device. Optional.')
- device_args.add_argument('--ssh-config',
- '-F',
- help='The path to the SSH configuration used for '
- 'connecting to the target device.')
- device_args.add_argument(
- '--os-check',
- choices=['check', 'update', 'ignore'],
- default='ignore',
- help="Sets the OS version enforcement policy. If 'check', then the "
- "deployment process will halt if the target\'s version doesn\'t "
- "match. If 'update', then the target device will automatically "
- "be repaved. If 'ignore', then the OS version won\'t be checked.")
- device_args.add_argument('--system-image-dir',
- help="Specify the directory that contains the "
- "Fuchsia image used to pave the device. Only "
- "needs to be specified if 'os_check' is not "
- "'ignore'.")
-
- def _Discover(self):
- """Queries mDNS for the IP address of a booted Fuchsia instance whose name
- matches |_node_name| on the local area network. If |_node_name| is not
- specified and there is only one device on the network, |_node_name| is set
- to that device's name.
-
- Returns:
- True if exactly one device is found, after setting |_host| and |_port| to
- its SSH address. False if no devices are found.
-
- Raises:
- Exception: If more than one device is found.
- """
-
- if self._node_name:
- target = ffx_session.FfxTarget.from_node_name(self._ffx_runner,
- self._node_name)
- else:
- # Get the node name of a single attached target.
- try:
- # Get at most the first 2 valid targets
- targets = list(
- itertools.islice(self._ffx_runner.list_active_targets(), 2))
- except subprocess.CalledProcessError:
- # A failure to list targets could mean that the device is in zedboot.
- # Return false in this case so that Start() will attempt to provision.
- return False
- if not targets:
- return False
-
- if len(targets) > 1:
- raise Exception('More than one device was discovered on the network. '
- 'Use --node-name <name> to specify the device to use.'
- 'List of devices: {}'.format(targets))
- target = targets[0]
-
- # Get the ssh address of the target.
- ssh_address = target.get_ssh_address()
- if ssh_address:
- self._host, self._port = ssh_address
- else:
- return False
-
- logging.info('Found device "%s" at %s.' %
- (self._node_name if self._node_name else '<unknown>',
- ffx_session.format_host_port(self._host, self._port)))
-
- # TODO(crbug.com/1307220): Remove this once the telemetry scripts can handle
- # specifying the port for a device that is not listening on localhost.
- if self._port == 22:
- self._port = None
-
- return True
-
- def _Login(self):
- """Attempts to log into device, if possible.
-
- This method should not be called from anything other than Start,
- though calling it multiple times should have no adverse effect.
- """
- if self._ermine_ctl.exists:
- self._ermine_ctl.take_to_shell()
-
- def Start(self):
- if self._host:
- self._ConnectToTarget()
- self._Login()
- elif self._Discover():
- self._ConnectToTarget()
- if self._os_check == 'ignore':
- self._Login()
- return
-
- # If accessible, check version.
- new_version = get_sdk_hash(self._system_image_dir)
- installed_version = self._GetInstalledSdkVersion()
- if new_version == installed_version:
- logging.info('Fuchsia version installed on device matches Chromium '
- 'SDK version. Skipping pave.')
- else:
- if self._os_check == 'check':
- raise Exception('Image and Fuchsia version installed on device '
- 'does not match. Abort.')
- logging.info('Putting device in recovery mode')
- self.RunCommandPiped(['dm', 'reboot-recovery'],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- self._ProvisionDevice()
- self._Login()
- else:
- if self._node_name:
- logging.info('Could not detect device %s.' % self._node_name)
- if self._os_check == 'update':
- logging.info('Assuming it is in zedboot. Continuing with paving...')
- self._ProvisionDevice()
- self._Login()
- return
- raise Exception('Could not find device. If the device is connected '
- 'to the host remotely, make sure that --host flag '
- 'is set and that remote serving is set up.')
-
- def GetFfxTarget(self):
- assert self._ffx_target
- return self._ffx_target
-
- def _GetInstalledSdkVersion(self):
- """Retrieves installed OS version from device.
-
- Returns:
- Tuple of strings, containing (product, version number)
- """
- return (self.GetFileAsString(_ON_DEVICE_PRODUCT_FILE).strip(),
- self.GetFileAsString(_ON_DEVICE_VERSION_FILE).strip())
-
- def GetPkgRepo(self):
- if not self._pkg_repo:
- if self._fuchsia_out_dir:
- # Deploy to an already-booted device running a local Fuchsia build.
- self._pkg_repo = pkg_repo.ExternalPkgRepo(
- os.path.join(self._fuchsia_out_dir, 'amber-files'),
- os.path.join(self._fuchsia_out_dir, '.build-id'))
- else:
- # Create an ephemeral package repository, then start both "pm serve" as
- # well as the bootserver.
- self._pkg_repo = pkg_repo.ManagedPkgRepo(self)
-
- return self._pkg_repo
-
- def _ParseNodename(self, output):
- # Parse the nodename from bootserver stdout.
- m = re.search(r'.*Proceeding with nodename (?P<nodename>.*)$', output,
- re.MULTILINE)
- if not m:
- raise Exception('Couldn\'t parse nodename from bootserver output.')
- self._node_name = m.groupdict()['nodename']
- logging.info('Booted device "%s".' % self._node_name)
-
- # Repeatedly search for a device for |BOOT_DISCOVERY_ATTEMPT|
- # number of attempts. If a device isn't found, wait
- # |BOOT_DISCOVERY_DELAY_SECS| before searching again.
- logging.info('Waiting for device to join network.')
- for _ in range(BOOT_DISCOVERY_ATTEMPTS):
- if self._Discover():
- break
- time.sleep(BOOT_DISCOVERY_DELAY_SECS)
-
- if not self._host:
- raise Exception('Device %s couldn\'t be discovered via mDNS.' %
- self._node_name)
-
- self._ConnectToTarget()
-
- def _GetEndpoint(self):
- return (self._host, self._port)
-
- def _ConnectToTarget(self):
- logging.info('Connecting to Fuchsia using ffx.')
- # Prefer connecting via node name over address:port.
- if self._node_name:
- # Assume that ffx already knows about the target, so there's no need to
- # add/remove it.
- self._ffx_target = ffx_session.FfxTarget.from_node_name(
- self._ffx_runner, self._node_name)
- else:
- # The target may not be known by ffx. Probe to see if it has already been
- # added.
- ffx_target = ffx_session.FfxTarget.from_address(self._ffx_runner,
- self._host, self._port)
- if ffx_target.get_ssh_address():
- # If we could lookup the address, the target must be reachable. Do not
- # open a new scoped_target_context, as that will `ffx target add` now
- # and then `ffx target remove` later, which will break subsequent
- # interactions with a persistent emulator.
- self._ffx_target = ffx_target
- else:
- # The target is not known, so take on responsibility of adding and
- # removing it.
- self._target_context = self._ffx_runner.scoped_target_context(
- self._host, self._port)
- self._ffx_target = self._target_context.__enter__()
- self._ffx_target.wait(ATTACH_RETRY_SECONDS)
- return super(DeviceTarget, self)._ConnectToTarget()
-
- def _DisconnectFromTarget(self):
- self._ffx_target = None
- if self._target_context:
- self._target_context.__exit__(None, None, None)
- self._target_context = None
- super(DeviceTarget, self)._DisconnectFromTarget()
-
- def _GetSshConfigPath(self):
- return self._ssh_config_path
-
- def _ProvisionDevice(self):
- self._ParseNodename(pave(self._system_image_dir, self._node_name).stderr)
-
- def Restart(self):
- """Restart the device."""
-
- self.RunCommandPiped('dm reboot')
- time.sleep(_REBOOT_SLEEP_PERIOD)
- self.Start()
-
- def Stop(self):
- try:
- self._DisconnectFromTarget()
- # End multiplexed ssh connection, ensure that ssh logging stops before
- # tests/scripts return.
- if self.IsStarted():
- self.RunCommand(['-O', 'exit'])
- finally:
- super(DeviceTarget, self).Stop()
diff --git a/build/fuchsia/device_target_test.py b/build/fuchsia/device_target_test.py
deleted file mode 100755
index 04c4d0d1e..000000000
--- a/build/fuchsia/device_target_test.py
+++ /dev/null
@@ -1,272 +0,0 @@
-#!/usr/bin/env vpython3
-# Copyright 2021 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Tests scenarios with number of devices and invalid devices"""
-import common
-import os
-import subprocess
-import time
-import unittest
-import unittest.mock as mock
-
-from argparse import Namespace
-from device_target import DeviceTarget
-from legacy_ermine_ctl import LegacyErmineCtl
-from ffx_session import FfxRunner, FfxTarget
-from target import Target, FuchsiaTargetException
-
-
-@mock.patch.object(FfxRunner, 'daemon_stop')
-class TestDiscoverDeviceTarget(unittest.TestCase):
- def setUp(self):
- self.args = Namespace(out_dir='out/fuchsia',
- target_cpu='x64',
- host=None,
- node_name=None,
- port=None,
- ssh_config='mock_config',
- fuchsia_out_dir=None,
- os_check='ignore',
- logs_dir=None,
- system_image_dir=None)
-
- def testUnspecifiedNodeNameOneDeviceReturnNoneCheckNameAndAddress(
- self, mock_daemon_stop):
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(FfxRunner, 'list_targets') as mock_list_targets, \
- mock.patch.object(
- FfxTarget, 'get_ssh_address') as mock_get_ssh_address, \
- mock.patch.object(
- DeviceTarget, '_ConnectToTarget') as mock_connecttotarget, \
- mock.patch.object(
- DeviceTarget, '_Login') as mock_login:
- mock_list_targets.return_value = [{
- "nodename": "device_name",
- "rcs_state": "Y",
- "serial": "<unknown>",
- "target_type": "terminal.qemu-x64",
- "target_state": "Product",
- }]
- mock_get_ssh_address.return_value = ('address', 12345)
- mock_connecttotarget.return_value = True
- self.assertIsNone(device_target_instance.Start())
- self.assertEqual(device_target_instance._host, 'address')
- self.assertEqual(device_target_instance._port, 12345)
- mock_daemon_stop.assert_called_once()
-
- def testUnspecifiedNodeNameOneUnknownDeviceReturnNoneCheckAddressAndPort(
- self, mock_daemon_stop):
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(FfxRunner, 'list_targets') as mock_list_targets, \
- mock.patch.object(
- FfxTarget, 'get_ssh_address') as mock_get_ssh_address, \
- mock.patch.object(
- DeviceTarget, '_ConnectToTarget') as mock_connecttotarget, \
- mock.patch.object(
- DeviceTarget, '_Login') as mock_login:
- mock_list_targets.return_value = [{
- "nodename": "<unknown>",
- "rcs_state": "Y",
- "serial": "<unknown>",
- "target_type": "terminal.qemu-x64",
- "target_state": "Product",
- "addresses": ["address"]
- }]
- mock_get_ssh_address.return_value = ('address', 12345)
- mock_connecttotarget.return_value = True
- self.assertIsNone(device_target_instance.Start())
- self.assertEqual(device_target_instance._host, 'address')
- self.assertEqual(device_target_instance._port, 12345)
- mock_login.assert_called_once()
- mock_daemon_stop.assert_called_once()
-
- def testUnspecifiedNodeNameTwoDevicesRaiseExceptionAmbiguousTarget(
- self, mock_daemon_stop):
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(FfxRunner, 'list_targets') as mock_list_targets, \
- mock.patch.object(
- FfxTarget, 'get_ssh_address') as mock_get_ssh_address, \
- self.assertRaisesRegex(Exception, \
- 'More than one device was discovered'):
- mock_get_ssh_address.return_value = ('address', 12345)
- mock_list_targets.return_value = [{
- "nodename": "device_name1",
- "rcs_state": "Y",
- "serial": "<unknown>",
- "target_type": "terminal.qemu-x64",
- "target_state": "Product",
- "addresses": ["address1"]
- }, {
- "nodename": "device_name2",
- "rcs_state": "Y",
- "serial": "<unknown>",
- "target_type": "terminal.qemu-x64",
- "target_state": "Product",
- "addresses": ["address2"]
- }]
- device_target_instance.Start()
- self.assertIsNone(device_target_instance._node_name)
- self.assertIsNone(device_target_instance._host)
- mock_daemon_stop.assert_called_once()
-
- def testNodeNameDefinedDeviceFoundReturnNoneCheckNameAndHost(
- self, mock_daemon_stop):
- self.args.node_name = 'device_name'
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(
- FfxTarget, 'get_ssh_address') as mock_get_ssh_address, \
- mock.patch.object(
- DeviceTarget, '_ConnectToTarget') as mock_connecttotarget, \
- mock.patch.object(
- DeviceTarget, '_Login') as mock_login:
- mock_get_ssh_address.return_value = ('address', 12345)
- mock_connecttotarget.return_value = True
- self.assertIsNone(device_target_instance.Start())
- self.assertEqual(device_target_instance._node_name, 'device_name')
- self.assertEqual(device_target_instance._host, 'address')
- self.assertEqual(device_target_instance._port, 12345)
- mock_login.assert_called_once()
- mock_daemon_stop.assert_called_once()
-
- def testNodeNameDefinedDeviceNotFoundRaiseExceptionCouldNotFind(
- self, mock_daemon_stop):
- self.args.node_name = 'wrong_device_name'
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(
- FfxTarget, 'get_ssh_address') as mock_get_ssh_address, \
- self.assertRaisesRegex(Exception, 'Could not find device.'):
- mock_get_ssh_address.return_value = None
- device_target_instance.Start()
- self.assertIsNone(device_target_instance._node_name)
- self.assertIsNone(device_target_instance._host)
- mock_daemon_stop.assert_called_once()
-
- def testNoDevicesFoundRaiseExceptionCouldNotFind(self, mock_daemon_stop):
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(FfxRunner, 'list_targets') as mock_list_targets, \
- self.assertRaisesRegex(Exception, 'Could not find device.'):
- mock_list_targets.return_value = []
- device_target_instance.Start()
- self.assertIsNone(device_target_instance._node_name)
- self.assertIsNone(device_target_instance._host)
- mock_daemon_stop.assert_called_once()
-
- @mock.patch('os.path.exists', return_value=True)
- def testNoProvisionDeviceIfVersionsMatch(self, unused_mock, mock_daemon_stop):
- self.args.os_check = 'update'
- self.args.system_image_dir = 'mockdir'
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(DeviceTarget, '_Discover') as mock_discover, \
- mock.patch.object(DeviceTarget, '_ConnectToTarget') as mock_connect, \
- mock.patch('device_target.get_sdk_hash') as mock_hash, \
- mock.patch.object(
- DeviceTarget, '_GetInstalledSdkVersion') as mock_version, \
- mock.patch.object(
- DeviceTarget, '_ProvisionDevice') as mock_provision, \
- mock.patch.object(
- DeviceTarget, '_Login') as mock_login:
- mock_discover.return_value = True
- mock_hash.return_value = '1.0'
- mock_version.return_value = '1.0'
- device_target_instance.Start()
- self.assertEqual(mock_provision.call_count, 0)
- mock_login.assert_called_once()
- mock_daemon_stop.assert_called_once()
-
- @mock.patch('os.path.exists', return_value=True)
- def testRaiseExceptionIfCheckVersionsNoMatch(self, unused_mock,
- mock_daemon_stop):
- self.args.os_check = 'check'
- self.args.system_image_dir = 'mockdir'
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(DeviceTarget, '_Discover') as mock_discover, \
- mock.patch.object(DeviceTarget, '_ConnectToTarget') as mock_ready, \
- mock.patch('device_target.get_sdk_hash') as mock_hash, \
- mock.patch.object(
- DeviceTarget, '_GetInstalledSdkVersion') as mock_version, \
- mock.patch.object(
- DeviceTarget, '_ProvisionDevice') as mock_provision, \
- self.assertRaisesRegex(Exception, 'Image and Fuchsia version'):
- mock_discover.return_value = True
- mock_hash.return_value = '2.0'
- mock_version.return_value = '1.0'
- device_target_instance.Start()
- mock_daemon_stop.assert_called_once()
-
- def testLoginCallsOnlyIfErmineExists(self, mock_daemon_stop):
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(
- LegacyErmineCtl, 'exists',
- new_callable=mock.PropertyMock) as mock_exists, \
- mock.patch.object(LegacyErmineCtl, 'take_to_shell') as mock_shell:
- mock_exists.return_value = True
-
- device_target_instance._Login()
-
- mock_exists.assert_called_once()
- mock_shell.assert_called_once()
-
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(
- LegacyErmineCtl, 'exists',
- new_callable=mock.PropertyMock) as mock_exists, \
- mock.patch.object(LegacyErmineCtl, 'take_to_shell') as mock_shell:
- mock_exists.return_value = False
-
- device_target_instance._Login()
-
- mock_exists.assert_called_once()
- self.assertEqual(mock_shell.call_count, 0)
-
- @mock.patch('os.path.exists', return_value=True)
- def testProvisionIfOneNonDetectableDevice(self, unused_mock,
- mock_daemon_stop):
- self.args.os_check = 'update'
- self.args.node_name = 'mocknode'
- self.args.system_image_dir = 'mockdir'
- with DeviceTarget.CreateFromArgs(self.args) as device_target_instance, \
- mock.patch.object(
- FfxTarget, 'get_ssh_address') as mock_get_ssh_address, \
- mock.patch.object(DeviceTarget,
- '_ProvisionDevice') as mock_provision, \
- mock.patch.object(DeviceTarget, '_Login') as mock_bypass:
- mock_get_ssh_address.return_value = None
- device_target_instance.Start()
- self.assertEqual(mock_provision.call_count, 1)
- mock_daemon_stop.assert_called_once()
-
- def testRaiseExceptionIfNoTargetDir(self, mock_daemon_stop):
- self.args.os_check = 'update'
- self.args.system_image_dir = ''
- with self.assertRaises(Exception):
- DeviceTarget.CreateFromArgs(self.args)
-
- def testSearchSDKIfImageDirNotFound(self, mock_daemon_stop):
- self.args.os_check = 'update'
- self.args.system_image_dir = 'product-bundle-instead-of-image'
- with mock.patch('os.path.exists', return_value=False), \
- mock.patch('device_target.find_image_in_sdk',
- return_value='some/path/to/image') as mock_find, \
- mock.patch('device_target.SDK_ROOT', 'some/path/to/sdk'), \
- self.assertLogs():
- target = DeviceTarget.CreateFromArgs(self.args)
- mock_find.assert_called_once_with('product-bundle-instead-of-image',
- product_bundle=True,
- sdk_root='some/path/to')
- self.assertEqual(target._system_image_dir, 'some/path/to/image')
-
- def testSearchSDKThrowsExceptionIfNoPathReturned(self, mock_daemon_stop):
- self.args.os_check = 'update'
- self.args.system_image_dir = 'product-bundle-instead-of-image'
- with mock.patch('os.path.exists', return_value=False), \
- mock.patch('device_target.find_image_in_sdk',
- return_value=None), \
- mock.patch('device_target.SDK_ROOT', 'some/path/to/sdk'), \
- self.assertLogs(), \
- self.assertRaises(FileNotFoundError):
- target = DeviceTarget.CreateFromArgs(self.args)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/build/fuchsia/emu_target.py b/build/fuchsia/emu_target.py
deleted file mode 100644
index d430cdf8a..000000000
--- a/build/fuchsia/emu_target.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# Copyright 2019 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Implements commands for running/interacting with Fuchsia on an emulator."""
-
-import json
-import logging
-import os
-import subprocess
-import sys
-import tempfile
-
-import boot_data
-import common
-import ffx_session
-import pkg_repo
-import target
-
-sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
- 'test')))
-from compatible_utils import find_image_in_sdk, running_unattended
-
-
-class EmuTarget(target.Target):
- LOCAL_ADDRESS = 'localhost'
-
- def __init__(self, out_dir, target_cpu, logs_dir, image):
- """out_dir: The directory which will contain the files that are
- generated to support the emulator deployment.
- target_cpu: The emulated target CPU architecture.
- Can be 'x64' or 'arm64'."""
-
- super(EmuTarget, self).__init__(out_dir, target_cpu, logs_dir)
- self._emu_process = None
- self._pkg_repo = None
- self._target_context = None
- self._ffx_target = None
-
- self._pb_path = self._GetPbPath(image)
- metadata = self._GetEmuMetadata()
- self._disk_image = metadata['disk_images'][0]
- self._kernel = metadata['kernel']
- self._ramdisk = metadata['initial_ramdisk']
-
- def _GetPbPath(self, image):
- if not image:
- image = 'terminal.qemu-%s' % self._target_cpu
- image_path = find_image_in_sdk(image,
- product_bundle=True,
- sdk_root=os.path.dirname(common.IMAGES_ROOT))
- if not image_path:
- raise FileNotFoundError(f'Product bundle {image} is not downloaded. Add '
- 'the image and run "gclient sync" again.')
- return image_path
-
- def _GetEmuMetadata(self):
- with open(os.path.join(self._pb_path, 'product_bundle.json')) as f:
- return json.load(f)['data']['manifests']['emu']
-
- def __enter__(self):
- return self
-
- def _BuildCommand(self):
- """Build the command that will be run to start Fuchsia in the emulator."""
- pass
-
- def _SetEnv(self):
- return os.environ.copy()
-
- def Start(self):
- if running_unattended() and not self._HasNetworking():
- # Bots may accumulate stale manually-added targets with the same address
- # as the one to be added here. Preemtively remove any unknown targets at
- # this address before starting the emulator and adding it as a target.
- self._ffx_runner.remove_stale_targets('127.0.0.1')
- emu_command = self._BuildCommand()
- logging.debug(' '.join(emu_command))
-
- # Zircon sends debug logs to serial port (see kernel.serial=legacy flag
- # above). Serial port is redirected to a file through emulator stdout.
- # If runner_logs are not enabled, we output the kernel serial log
- # to a temporary file, and print that out if we are unable to connect to
- # the emulator guest, to make it easier to diagnose connectivity issues.
- temporary_log_file = None
- if self._log_manager.IsLoggingEnabled():
- stdout = self._log_manager.Open('serial_log')
- else:
- temporary_log_file = tempfile.NamedTemporaryFile('w')
- stdout = temporary_log_file
-
- self.LogProcessStatistics('proc_stat_start_log')
- self.LogSystemStatistics('system_statistics_start_log')
-
- self._emu_process = subprocess.Popen(emu_command,
- stdin=open(os.devnull),
- stdout=stdout,
- stderr=subprocess.STDOUT,
- env=self._SetEnv())
- try:
- self._ConnectToTarget()
- self.LogProcessStatistics('proc_stat_ready_log')
- except target.FuchsiaTargetException:
- self._DisconnectFromTarget()
- if temporary_log_file:
- logging.info('Kernel logs:\n' +
- open(temporary_log_file.name, 'r').read())
- raise
-
- def GetFfxTarget(self):
- assert self._ffx_target
- return self._ffx_target
-
- def Stop(self):
- try:
- self._DisconnectFromTarget()
- self._Shutdown()
- finally:
- self.LogProcessStatistics('proc_stat_end_log')
- self.LogSystemStatistics('system_statistics_end_log')
- super(EmuTarget, self).Stop()
-
- def GetPkgRepo(self):
- if not self._pkg_repo:
- self._pkg_repo = pkg_repo.ManagedPkgRepo(self)
-
- return self._pkg_repo
-
- def _Shutdown(self):
- """Shuts down the emulator."""
- raise NotImplementedError()
-
- def _HasNetworking(self):
- """Returns `True` if the emulator will be started with networking (e.g.,
- TUN/TAP emulated networking).
- """
- raise NotImplementedError()
-
- def _IsEmuStillRunning(self):
- """Returns `True` if the emulator is still running."""
- raise NotImplementedError()
-
- def _GetEndpoint(self):
- raise NotImplementedError()
-
- def _ConnectToTarget(self):
- with_network = self._HasNetworking()
- if not with_network:
- # The target was started without networking, so tell ffx how to find it.
- logging.info('Connecting to Fuchsia using ffx.')
- _, host_ssh_port = self._GetEndpoint()
- self._target_context = self._ffx_runner.scoped_target_context(
- '127.0.0.1', host_ssh_port)
- self._ffx_target = self._target_context.__enter__()
- self._ffx_target.wait(common.ATTACH_RETRY_SECONDS)
- super(EmuTarget, self)._ConnectToTarget()
- if with_network:
- # Interact with the target via its address:port, which ffx should now know
- # about.
- self._ffx_target = ffx_session.FfxTarget.from_address(
- self._ffx_runner, *self._GetEndpoint())
-
- def _DisconnectFromTarget(self):
- self._ffx_target = None
- if self._target_context:
- self._target_context.__exit__(None, None, None)
- self._target_context = None
- super(EmuTarget, self)._DisconnectFromTarget()
-
- def _GetSshConfigPath(self):
- return boot_data.GetSSHConfigPath()
-
- def LogSystemStatistics(self, log_file_name):
- self._LaunchSubprocessWithLogs(['top', '-b', '-n', '1'], log_file_name)
- self._LaunchSubprocessWithLogs(['ps', '-ax'], log_file_name)
-
- def LogProcessStatistics(self, log_file_name):
- self._LaunchSubprocessWithLogs(['cat', '/proc/stat'], log_file_name)
-
- def _LaunchSubprocessWithLogs(self, command, log_file_name):
- """Launch a subprocess and redirect stdout and stderr to log_file_name.
- Command will not be run if logging directory is not set."""
-
- if not self._log_manager.IsLoggingEnabled():
- return
- log = self._log_manager.Open(log_file_name)
- subprocess.call(command,
- stdin=open(os.devnull),
- stdout=log,
- stderr=subprocess.STDOUT)
diff --git a/build/fuchsia/exit_on_sig_term.py b/build/fuchsia/exit_on_sig_term.py
deleted file mode 100644
index cea81f8b5..000000000
--- a/build/fuchsia/exit_on_sig_term.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2022 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import signal
-import sys
-
-
-# TODO(grt): Derive from contextlib.AbstractContextManager when p3 is supported.
-class ExitOnSigTerm():
- """A context manager that calls sys.exit(0) upon receipt of SIGTERM. This
- results in a SystemExit exception being raised, which causes any finally
- clauses to be run and other contexts to be cleaned up.
- """
-
- def __init__(self):
- self._previous_handler = None
-
- def __enter__(self):
- self._previous_handler = signal.signal(
- signal.SIGTERM, lambda sig_num, frame: sys.exit(0))
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- signal.signal(signal.SIGTERM, self._previous_handler)
- return False
diff --git a/build/fuchsia/ffx_session.py b/build/fuchsia/ffx_session.py
deleted file mode 100755
index e95940fb3..000000000
--- a/build/fuchsia/ffx_session.py
+++ /dev/null
@@ -1,620 +0,0 @@
-#!/usr/bin/env vpython3
-# Copyright 2022 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""A helper tool for running Fuchsia's `ffx`.
-"""
-
-# Enable use of the print() built-in function.
-from __future__ import print_function
-
-import argparse
-import contextlib
-import errno
-import json
-import logging
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-import time
-
-import common
-import log_manager
-
-sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
- 'test')))
-from compatible_utils import parse_host_port
-
-RUN_SUMMARY_SCHEMA = \
- 'https://fuchsia.dev/schema/ffx_test/run_summary-8d1dd964.json'
-
-
-def get_ffx_path():
- """Returns the full path to `ffx`."""
- return os.path.join(common.SDK_ROOT, 'tools',
- common.GetHostArchFromPlatform(), 'ffx')
-
-
-def format_host_port(host, port):
- """Formats a host name or IP address and port number into a host:port string.
- """
- # Wrap `host` in brackets if it looks like an IPv6 address
- return ('[%s]:%d' if ':' in host else '%s:%d') % (host, port)
-
-
-class FfxRunner():
- """A helper to run `ffx` commands."""
-
- def __init__(self, log_manager):
- self._ffx = get_ffx_path()
- self._log_manager = log_manager
-
- def _get_daemon_status(self):
- """Determines daemon status via `ffx daemon socket`.
-
- Returns:
- dict of status of the socket. Status will have a key Running or
- NotRunning to indicate if the daemon is running.
- """
- status = json.loads(
- self.run_ffx(['--machine', 'json', 'daemon', 'socket'],
- check=True,
- suppress_repair=True))
- if status.get('pid') and status.get('pid', {}).get('status', {}):
- return status['pid']['status']
- return {'NotRunning': True}
-
- def _is_daemon_running(self):
- return 'Running' in self._get_daemon_status()
-
- def _wait_for_daemon(self, start=True, timeout_seconds=100):
- """Waits for daemon to reach desired state in a polling loop.
-
- Sleeps for 5s between polls.
-
- Args:
- start: bool. Indicates to wait for daemon to start up. If False,
- indicates waiting for daemon to die.
- timeout_seconds: int. Number of seconds to wait for the daemon to reach
- the desired status.
- Raises:
- TimeoutError: if the daemon does not reach the desired state in time.
- """
- wanted_status = 'start' if start else 'stop'
- sleep_period_seconds = 5
- attempts = int(timeout_seconds / sleep_period_seconds)
- for i in range(attempts):
- if self._is_daemon_running() == start:
- return
- if i != attempts:
- logging.info('Waiting for daemon to %s...', wanted_status)
- time.sleep(sleep_period_seconds)
-
- raise TimeoutError(f'Daemon did not {wanted_status} in time.')
-
- def _run_repair_command(self, output):
- """Scans `output` for a self-repair command to run and, if found, runs it.
-
- If logging is enabled, `ffx` is asked to emit its own logs to the log
- directory.
-
- Returns:
- True if a repair command was found and ran successfully. False otherwise.
- """
- # Check for a string along the lines of:
- # "Run `ffx doctor --restart-daemon` for further diagnostics."
- match = re.search('`ffx ([^`]+)`', output)
- if not match or len(match.groups()) != 1:
- return False # No repair command found.
- args = match.groups()[0].split()
- # Tell ffx to include the configuration file without prompting in case
- # logging is enabled.
- with self.scoped_config('doctor.record_config', 'true'):
- # If the repair command is `ffx doctor` and logging is enabled, add the
- # options to emit ffx logs to the logging directory.
- if len(args) and args[0] == 'doctor' and \
- self._log_manager.IsLoggingEnabled():
- args.extend(
- ('--record', '--output-dir', self._log_manager.GetLogDirectory()))
- try:
- self.run_ffx(args, suppress_repair=True)
- self._wait_for_daemon(start=True)
- except subprocess.CalledProcessError as cpe:
- return False # Repair failed.
- return True # Repair succeeded.
-
- def run_ffx(self, args, check=True, suppress_repair=False):
- """Runs `ffx` with the given arguments, waiting for it to exit.
-
- If `ffx` exits with a non-zero exit code, the output is scanned for a
- recommended repair command (e.g., "Run `ffx doctor --restart-daemon` for
- further diagnostics."). If such a command is found, it is run and then the
- original command is retried. This behavior can be suppressed via the
- `suppress_repair` argument.
-
- Args:
- args: A sequence of arguments to ffx.
- check: If True, CalledProcessError is raised if ffx returns a non-zero
- exit code.
- suppress_repair: If True, do not attempt to find and run a repair command.
- Returns:
- A string containing combined stdout and stderr.
- Raises:
- CalledProcessError if `check` is true.
- """
- log_file = self._log_manager.Open('ffx_log') \
- if self._log_manager.IsLoggingEnabled() else None
- command = [self._ffx]
- command.extend(args)
- logging.debug(command)
- if log_file:
- print(command, file=log_file)
- repair_succeeded = False
- try:
- # TODO(grt): Switch to subprocess.run() with encoding='utf-8' when p3 is
- # supported.
- process = subprocess.Popen(command,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout_data, stderr_data = process.communicate()
- stdout_data = stdout_data.decode('utf-8')
- stderr_data = stderr_data.decode('utf-8')
- if check and process.returncode != 0:
- # TODO(grt): Pass stdout and stderr as two args when p2 support is no
- # longer needed.
- raise subprocess.CalledProcessError(
- process.returncode, command, '\n'.join((stdout_data, stderr_data)))
- except subprocess.CalledProcessError as cpe:
- if log_file:
- log_file.write('Process exited with code %d. Output: %s\n' %
- (cpe.returncode, cpe.output.strip()))
- # Let the exception fly unless a repair command is found and succeeds.
- if suppress_repair or not self._run_repair_command(cpe.output):
- raise
- repair_succeeded = True
-
- # If the original command failed but a repair command was found and
- # succeeded, try one more time with the original command.
- if repair_succeeded:
- return self.run_ffx(args, check, suppress_repair=True)
-
- stripped_stdout = stdout_data.strip()
- stripped_stderr = stderr_data.strip()
- if log_file:
- if process.returncode != 0 or stripped_stderr:
- log_file.write('Process exited with code %d.' % process.returncode)
- if stripped_stderr:
- log_file.write(' Stderr:\n%s\n' % stripped_stderr)
- if stripped_stdout:
- log_file.write(' Stdout:\n%s\n' % stripped_stdout)
- if not stripped_stderr and not stripped_stdout:
- log_file.write('\n')
- elif stripped_stdout:
- log_file.write('%s\n' % stripped_stdout)
- logging.debug(
- 'ffx command returned %d with %s%s', process.returncode,
- ('output "%s"' % stripped_stdout if stripped_stdout else 'no output'),
- (' and error "%s".' % stripped_stderr if stripped_stderr else '.'))
- return stdout_data
-
- def open_ffx(self, args):
- """Runs `ffx` with some arguments.
- Args:
- args: A sequence of arguments to ffx.
- Returns:
- A subprocess.Popen object.
- """
- log_file = self._log_manager.Open('ffx_log') \
- if self._log_manager.IsLoggingEnabled() else None
- command = [self._ffx]
- command.extend(args)
- logging.debug(command)
- if log_file:
- print(command, file=log_file)
- try:
- # TODO(grt): Add encoding='utf-8' when p3 is supported.
- return subprocess.Popen(command,
- stdin=open(os.devnull, 'r'),
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- except:
- logging.exception('Failed to open ffx')
- if log_file:
- print('Exception caught while opening ffx: %s' % str(sys.exc_info[1]))
- raise
-
- @contextlib.contextmanager
- def scoped_config(self, name, value):
- """Temporarily overrides `ffx` configuration.
-
- Args:
- name: The name of the property to set.
- value: The value to associate with `name`.
-
- Returns:
- Yields nothing. Restores the previous value upon exit.
- """
- assert value is not None
- # Cache the current value.
- old_value = None
- try:
- old_value = self.run_ffx(['config', 'get', name]).strip()
- except subprocess.CalledProcessError as cpe:
- if cpe.returncode != 2:
- raise # The failure was for something other than value not found.
- # Set the new value if it is different.
- if value != old_value:
- self.run_ffx(['config', 'set', name, value])
- try:
- yield None
- finally:
- if value == old_value:
- return # There is no need to restore an old value.
- # Clear the new value.
- self.run_ffx(['config', 'remove', name])
- if old_value is None:
- return
- # Did removing the new value restore the original value on account of it
- # either being the default or being set in a different scope?
- if (self.run_ffx(['config', 'get', name],
- check=False).strip() == old_value):
- return
- # If not, explicitly set the original value.
- self.run_ffx(['config', 'set', name, old_value])
-
- def list_targets(self):
- """Returns the (possibly empty) list of targets known to ffx.
-
- Returns:
- The list of targets parsed from the JSON output of `ffx target list`.
- """
- json_targets = self.run_ffx(['target', 'list', '-f', 'json'])
- if not json_targets:
- return []
- try:
- return json.loads(json_targets)
- except ValueError:
- # TODO(grt): Change to json.JSONDecodeError once p3 is supported.
- return []
-
- def list_active_targets(self):
- """Gets the list of targets and filters down to the targets that are active.
-
- Returns:
- An iterator over active FfxTargets.
- """
- targets = [
- FfxTarget.from_target_list_json(self, json_target)
- for json_target in self.list_targets()
- ]
- return filter(lambda target: target.get_ssh_address(), targets)
-
- def remove_stale_targets(self, address):
- """Removes any targets from ffx that are listening at a given address.
-
- Args:
- address: A string representation of the target's ip address.
- """
- for target in self.list_targets():
- if target['rcs_state'] == 'N' and address in target['addresses']:
- self.run_ffx(['target', 'remove', address])
-
- @contextlib.contextmanager
- def scoped_target_context(self, address, port):
- """Temporarily adds a new target.
-
- Args:
- address: The IP address at which the target is listening.
- port: The port number on which the target is listening.
-
- Yields:
- An FfxTarget for interacting with the target.
- """
- target_id = format_host_port(address, port)
- # -n allows `target add` to skip waiting for the device to come up,
- # as this can take longer than the default wait period.
- self.run_ffx(['target', 'add', '-n', target_id])
- try:
- yield FfxTarget.from_address(self, address, port)
- finally:
- self.run_ffx(['target', 'remove', target_id], check=False)
-
- def get_node_name(self, address, port):
- """Returns the node name for a target given its SSH address.
-
- Args:
- address: The address at which the target's SSH daemon is listening.
- port: The port number on which the daemon is listening.
-
- Returns:
- The target's node name.
-
- Raises:
- Exception: If the target cannot be found.
- """
- for target in self.list_targets():
- if target['nodename'] and address in target['addresses']:
- ssh_address = FfxTarget.from_target_list_json(target).get_ssh_address()
- if ssh_address and ssh_address[1] == port:
- return target['nodename']
- raise Exception('Failed to determine node name for target at %s' %
- format_host_port(address, port))
-
- def daemon_stop(self):
- """Stops the ffx daemon."""
- self.run_ffx(['daemon', 'stop'], check=False, suppress_repair=True)
- # Daemon should stop at this point.
- self._wait_for_daemon(start=False)
-
-
-class FfxTarget():
- """A helper to run `ffx` commands for a specific target."""
-
- @classmethod
- def from_address(cls, ffx_runner, address, port=None):
- """Args:
- ffx_runner: The runner to use to run ffx.
- address: The target's address.
- port: The target's port, defaults to None in which case it will target
- the first device at the specified address
- """
- return cls(ffx_runner, format_host_port(address, port) if port else address)
-
- @classmethod
- def from_node_name(cls, ffx_runner, node_name):
- """Args:
- ffx_runner: The runner to use to run ffx.
- node_name: The target's node name.
- """
- return cls(ffx_runner, node_name)
-
- @classmethod
- def from_target_list_json(cls, ffx_runner, json_target):
- """Args:
- ffx_runner: The runner to use to run ffx.
- json_target: the json dict as returned from `ffx list targets`
- """
- # Targets seen via `fx serve-remote` frequently have no name, so fall back
- # to using the first address.
- if json_target['nodename'].startswith('<unknown'):
- return cls.from_address(ffx_runner, json_target['addresses'][0])
- return cls.from_node_name(ffx_runner, json_target['nodename'])
-
- def __init__(self, ffx_runner, target_id):
- """Args:
- ffx_runner: The runner to use to run ffx.
- target_id: The target's node name or addr:port string.
- """
- self._ffx_runner = ffx_runner
- self._target_id = target_id
- self._target_args = ('--target', target_id)
-
- def format_runner_options(self):
- """Returns a string holding options suitable for use with the runner scripts
- to run tests on this target."""
- try:
- # First try extracting host:port from the target_id.
- return '-d --host %s --port %d' % parse_host_port(self._target_args[1])
- except ValueError:
- # Must be a simple node name.
- pass
- return '-d --node-name %s' % self._target_args[1]
-
- def wait(self, timeout=None):
- """Waits for ffx to establish a connection with the target.
-
- Args:
- timeout: The number of seconds to wait (60 if not specified).
- """
- command = list(self._target_args)
- command.extend(('target', 'wait'))
- if timeout is not None:
- command.extend(('-t', '%d' % int(timeout)))
- self._ffx_runner.run_ffx(command)
-
- def get_ssh_address(self):
- """Returns the host and port of the target's SSH address
-
- Returns:
- A tuple of a host address string and a port number integer,
- or None if there was an exception
- """
- command = list(self._target_args)
- command.extend(('target', 'get-ssh-address'))
- try:
- return parse_host_port(self._ffx_runner.run_ffx(command))
- except:
- return None
-
- def open_ffx(self, command):
- """Runs `ffx` for the target with some arguments.
- Args:
- command: A command and its arguments to run on the target.
- Returns:
- A subprocess.Popen object.
- """
- args = list(self._target_args)
- args.extend(command)
- return self._ffx_runner.open_ffx(args)
-
- def __str__(self):
- return self._target_id
-
- def __repr__(self):
- return self._target_id
-
-
-# TODO(grt): Derive from contextlib.AbstractContextManager when p3 is supported.
-class FfxSession():
- """A context manager that manages a session for running a test via `ffx`.
-
- Upon entry, an instance of this class configures `ffx` to retrieve files
- generated by a test and prepares a directory to hold these files either in a
- LogManager's log directory or in tmp. On exit, any previous configuration of
- `ffx` is restored and the temporary directory, if used, is deleted.
-
- The prepared directory is used when invoking `ffx test run`.
- """
-
- def __init__(self, log_manager):
- """Args:
- log_manager: A Target's LogManager.
- """
- self._log_manager = log_manager
- self._ffx = FfxRunner(log_manager)
- self._own_output_dir = False
- self._output_dir = None
- self._run_summary = None
- self._suite_summary = None
- self._custom_artifact_directory = None
- self._debug_data_directory = None
-
- def __enter__(self):
- if self._log_manager.IsLoggingEnabled():
- # Use a subdir of the configured log directory to hold test outputs.
- self._output_dir = os.path.join(self._log_manager.GetLogDirectory(),
- 'test_outputs')
- # TODO(grt): Use exist_ok=True when p3 is supported.
- try:
- os.makedirs(self._output_dir)
- except OSError as ose:
- if ose.errno != errno.EEXIST:
- raise
- else:
- # Create a temporary directory to hold test outputs.
- # TODO(grt): Switch to tempfile.TemporaryDirectory when p3 is supported.
- self._own_output_dir = True
- self._output_dir = tempfile.mkdtemp(prefix='ffx_session_tmp_')
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- # Restore the previous test.output_path setting.
- if self._own_output_dir:
- # Clean up the temporary output directory.
- shutil.rmtree(self._output_dir, ignore_errors=True)
- self._own_output_dir = False
- self._output_dir = None
- return False
-
- def get_output_dir(self):
- """Returns the temporary output directory for the session."""
- assert self._output_dir, 'FfxSession must be used in a context'
- return self._output_dir
-
- def test_run(self, ffx_target, component_uri, package_args):
- """Runs a test on a target.
- Args:
- ffx_target: The target on which to run the test.
- component_uri: The test component URI.
- package_args: Arguments to the test package.
- Returns:
- A subprocess.Popen object.
- """
- command = [
- '--config', 'test.experimental_structured_output=false', 'test', 'run',
- '--output-directory', self._output_dir, component_uri, '--'
- ]
- command.extend(package_args)
- return ffx_target.open_ffx(command)
-
- def _parse_test_outputs(self):
- """Parses the output files generated by the test runner.
-
- The instance's `_custom_artifact_directory` member is set to the directory
- holding output files emitted by the test.
-
- This function is idempotent, and performs no work if it has already been
- called.
- """
- if self._run_summary:
- return # The files have already been parsed.
-
- # Parse the main run_summary.json file.
- run_summary_path = os.path.join(self._output_dir, 'run_summary.json')
- try:
- with open(run_summary_path) as run_summary_file:
- self._run_summary = json.load(run_summary_file)
- except IOError as io_error:
- logging.error('Error reading run summary file: %s', str(io_error))
- return
- except ValueError as value_error:
- logging.error('Error parsing run summary file %s: %s', run_summary_path,
- str(value_error))
- return
-
- assert self._run_summary['schema_id'] == RUN_SUMMARY_SCHEMA, \
- 'Unsupported version found in %s' % run_summary_path
-
- run_artifact_dir = self._run_summary.get('data', {})['artifact_dir']
- for artifact_path, artifact in self._run_summary.get(
- 'data', {})['artifacts'].items():
- if artifact['artifact_type'] == 'DEBUG':
- self._debug_data_directory = os.path.join(self._output_dir,
- run_artifact_dir,
- artifact_path)
- break
-
- # There should be precisely one suite for the test that ran.
- self._suite_summary = self._run_summary.get('data', {}).get('suites',
- [{}])[0]
-
- # Get the top-level directory holding all artifacts for this suite.
- artifact_dir = self._suite_summary.get('artifact_dir')
- if not artifact_dir:
- logging.error('Failed to find suite\'s artifact_dir in %s',
- run_summary_path)
- return
-
- # Get the path corresponding to artifacts
- for artifact_path, artifact in self._suite_summary['artifacts'].items():
- if artifact['artifact_type'] == 'CUSTOM':
- self._custom_artifact_directory = os.path.join(self._output_dir,
- artifact_dir,
- artifact_path)
- break
-
- def get_custom_artifact_directory(self):
- """Returns the full path to the directory holding custom artifacts emitted
- by the test, or None if the path cannot be determined.
- """
- self._parse_test_outputs()
- return self._custom_artifact_directory
-
- def get_debug_data_directory(self):
- """Returns the full path to the directory holding custom artifacts emitted
- by the test, or None if the path cannot be determined.
- """
- self._parse_test_outputs()
- return self._debug_data_directory
-
-
-def make_arg_parser():
- parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument('--logs-dir', help='Directory to write logs to.')
- parser.add_argument('--verbose',
- '-v',
- action='store_true',
- default=False,
- help='Enable debug logging')
- return parser
-
-
-def main(args):
- args = make_arg_parser().parse_args(args)
-
- logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s',
- level=logging.DEBUG if args.verbose else logging.INFO)
- log_mgr = log_manager.LogManager(args.logs_dir)
-
- with FfxSession(log_mgr) as ffx_session:
- logging.info(ffx_session.get_output_dir())
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/build/fuchsia/fvdl_target.py b/build/fuchsia/fvdl_target.py
deleted file mode 100644
index 724ebcfc4..000000000
--- a/build/fuchsia/fvdl_target.py
+++ /dev/null
@@ -1,247 +0,0 @@
-# Copyright 2021 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Implements commands for running and interacting with Fuchsia on FVDL."""
-
-import boot_data
-import common
-import emu_target
-import logging
-import os
-import re
-import subprocess
-import tempfile
-
-_SSH_KEY_DIR = os.path.expanduser('~/.ssh')
-_DEFAULT_SSH_PORT = 22
-_DEVICE_PROTO_TEMPLATE = """
-device_spec: {{
- horizontal_resolution: 1024
- vertical_resolution: 600
- vm_heap: 192
- ram: {ramsize}
- cache: 32
- screen_density: 240
-}}
-"""
-
-
-def GetTargetType():
- return FvdlTarget
-
-
-class EmulatorNetworkNotFoundError(Exception):
- """Raised when emulator's address cannot be found"""
- pass
-
-
-class FvdlTarget(emu_target.EmuTarget):
- EMULATOR_NAME = 'aemu'
- _FVDL_PATH = os.path.join(common.SDK_ROOT, 'tools', 'x64', 'fvdl')
-
- def __init__(self, out_dir, target_cpu, require_kvm, enable_graphics,
- hardware_gpu, with_network, cpu_cores, ram_size_mb, logs_dir,
- custom_image):
-
- super(FvdlTarget, self).__init__(out_dir, target_cpu, logs_dir,
- custom_image)
- self._require_kvm = require_kvm
- self._enable_graphics = enable_graphics
- self._hardware_gpu = hardware_gpu
- self._with_network = with_network
- self._cpu_cores = cpu_cores
- self._ram_size_mb = ram_size_mb
-
- self._host = None
- self._pid = None
-
- # Use a temp file for vdl output.
- self._vdl_output_file = tempfile.NamedTemporaryFile()
-
- # Use a temp file for the device proto and write the ram size.
- self._device_proto_file = tempfile.NamedTemporaryFile()
- with open(self._device_proto_file.name, 'w') as file:
- file.write(_DEVICE_PROTO_TEMPLATE.format(ramsize=self._ram_size_mb))
-
- @staticmethod
- def CreateFromArgs(args):
- return FvdlTarget(args.out_dir, args.target_cpu, args.require_kvm,
- args.enable_graphics, args.hardware_gpu,
- args.with_network, args.cpu_cores, args.ram_size_mb,
- args.logs_dir, args.custom_image)
-
- @staticmethod
- def RegisterArgs(arg_parser):
- fvdl_args = arg_parser.add_argument_group('fvdl', 'FVDL arguments')
- fvdl_args.add_argument('--with-network',
- action='store_true',
- default=False,
- help='Run emulator with emulated nic via tun/tap.')
- fvdl_args.add_argument('--custom-image',
- help='Specify an image used for booting up the '\
- 'emulator.')
- fvdl_args.add_argument('--enable-graphics',
- action='store_true',
- default=False,
- help='Start emulator with graphics instead of '\
- 'headless.')
- fvdl_args.add_argument('--hardware-gpu',
- action='store_true',
- default=False,
- help='Use local GPU hardware instead Swiftshader.')
-
- def _BuildCommand(self):
- boot_data.ProvisionSSH()
- self._host_ssh_port = common.GetAvailableTcpPort()
- kernel_image = common.EnsurePathExists(
- boot_data.GetTargetFile(self._kernel, self._pb_path))
- zbi_image = common.EnsurePathExists(
- boot_data.GetTargetFile(self._ramdisk, self._pb_path))
- fvm_image = common.EnsurePathExists(
- boot_data.GetTargetFile(self._disk_image, self._pb_path))
- aemu_path = common.EnsurePathExists(
- os.path.join(common.GetHostToolPathFromPlatform('aemu_internal'),
- 'emulator'))
- emu_command = [
- self._FVDL_PATH,
- '--sdk',
- 'start',
- '--nointeractive',
-
- # Host port mapping for user-networking mode.
- '--port-map',
- 'hostfwd=tcp::{}-:22'.format(self._host_ssh_port),
-
- # no-interactive requires a --vdl-output flag to shutdown the emulator.
- '--vdl-output',
- self._vdl_output_file.name,
- '-c',
- ' '.join(boot_data.GetKernelArgs()),
-
- # Use an existing emulator checked out by Chromium.
- '--aemu-path',
- aemu_path,
-
- # Use existing images instead of downloading new ones.
- '--kernel-image',
- kernel_image,
- '--zbi-image',
- zbi_image,
- '--fvm-image',
- fvm_image,
- '--image-architecture',
- self._target_cpu,
-
- # Use this flag and temp file to define ram size.
- '--device-proto',
- self._device_proto_file.name,
- '--cpu-count',
- str(self._cpu_cores)
- ]
- self._ConfigureEmulatorLog(emu_command)
-
- if not self._require_kvm:
- emu_command.append('--noacceleration')
- if not self._enable_graphics:
- emu_command.append('--headless')
- if self._hardware_gpu:
- emu_command.append('--host-gpu')
- if self._with_network:
- emu_command.append('-N')
-
- return emu_command
-
- def _ConfigureEmulatorLog(self, emu_command):
- if self._log_manager.IsLoggingEnabled():
- emu_command.extend([
- '--emulator-log',
- os.path.join(self._log_manager.GetLogDirectory(), 'emulator_log')
- ])
-
- env_flags = [
- 'ANDROID_EMUGL_LOG_PRINT=1',
- 'ANDROID_EMUGL_VERBOSE=1',
- 'VK_LOADER_DEBUG=info,error',
- ]
- if self._hardware_gpu:
- vulkan_icd_file = os.path.join(
- common.GetHostToolPathFromPlatform('aemu_internal'), 'lib64',
- 'vulkan', 'vk_swiftshader_icd.json')
- env_flags.append('VK_ICD_FILENAMES=%s' % vulkan_icd_file)
- for flag in env_flags:
- emu_command.extend(['--envs', flag])
-
- def _HasNetworking(self):
- return self._with_network
-
- def _ConnectToTarget(self):
- # Wait for the emulator to finish starting up.
- logging.info('Waiting for fvdl to start...')
- self._emu_process.communicate()
- super(FvdlTarget, self)._ConnectToTarget()
-
- def _IsEmuStillRunning(self):
- if not self._pid:
- try:
- with open(self._vdl_output_file.name) as vdl_file:
- for line in vdl_file:
- if 'pid' in line:
- match = re.match(r'.*pid:\s*(\d*).*', line)
- if match:
- self._pid = match.group(1)
- except IOError:
- logging.error('vdl_output file no longer found. '
- 'Cannot get emulator pid.')
- return False
- try:
- if subprocess.check_output(['ps', '-p', self._pid, 'o', 'comm=']):
- return True
- except subprocess.CalledProcessError:
- # The process must be gone.
- pass
- logging.error('Emulator pid no longer found. Emulator must be down.')
- return False
-
- def _GetEndpoint(self):
- if self._with_network:
- return self._GetNetworkAddress()
- return (self.LOCAL_ADDRESS, self._host_ssh_port)
-
- def _GetNetworkAddress(self):
- if self._host:
- return (self._host, _DEFAULT_SSH_PORT)
- try:
- with open(self._vdl_output_file.name) as vdl_file:
- for line in vdl_file:
- if 'network_address' in line:
- address = re.match(r'.*network_address:\s*"\[(.*)\]".*', line)
- if address:
- self._host = address.group(1)
- return (self._host, _DEFAULT_SSH_PORT)
- logging.error('Network address not found.')
- raise EmulatorNetworkNotFoundError()
- except IOError as e:
- logging.error('vdl_output file not found. Cannot get network address.')
- raise
-
- def _Shutdown(self):
- if not self._emu_process:
- logging.error('%s did not start' % (self.EMULATOR_NAME))
- return
- femu_command = [
- self._FVDL_PATH, '--sdk', 'kill', '--launched-proto',
- self._vdl_output_file.name
- ]
- femu_process = subprocess.Popen(femu_command)
- returncode = femu_process.wait()
- if returncode == 0:
- logging.info('FVDL shutdown successfully')
- else:
- logging.info('FVDL kill returned error status {}'.format(returncode))
- self.LogProcessStatistics('proc_stat_end_log')
- self.LogSystemStatistics('system_statistics_end_log')
- self._vdl_output_file.close()
- self._device_proto_file.close()
-
- def _GetSshConfigPath(self):
- return boot_data.GetSSHConfigPath()
diff --git a/build/fuchsia/fvdl_target_test.py b/build/fuchsia/fvdl_target_test.py
deleted file mode 100755
index 77df45ee9..000000000
--- a/build/fuchsia/fvdl_target_test.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/usr/bin/env vpython3
-# Copyright 2021 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests different flags to see if they are being used correctly"""
-
-import boot_data
-import common
-import os
-import tempfile
-import unittest
-import unittest.mock as mock
-
-from argparse import Namespace
-from ffx_session import FfxRunner
-from fvdl_target import FvdlTarget, _SSH_KEY_DIR
-
-_EMU_METADATA = {
- "disk_images": ["fuchsia.blk"],
- "initial_ramdisk": "fuchsia.zbi",
- "kernel": "fuchsia.bin"
-}
-
-
-@mock.patch.object(FfxRunner, 'daemon_stop')
-class TestBuildCommandFvdlTarget(unittest.TestCase):
- def setUp(self):
- self.args = Namespace(out_dir='outdir',
- system_log_file=None,
- target_cpu='x64',
- require_kvm=True,
- enable_graphics=False,
- hardware_gpu=False,
- with_network=False,
- ram_size_mb=8192,
- logs_dir=None,
- custom_image=None,
- cpu_cores=10)
- common.EnsurePathExists = mock.MagicMock(return_value='image')
- boot_data.ProvisionSSH = mock.MagicMock()
- FvdlTarget._GetPbPath = mock.MagicMock(return_value='path')
- FvdlTarget._GetEmuMetadata = mock.MagicMock(return_value=_EMU_METADATA)
- FvdlTarget._Shutdown = mock.MagicMock()
-
- def testBasicEmuCommand(self, mock_daemon_stop):
- with FvdlTarget.CreateFromArgs(self.args) as target:
- build_command = target._BuildCommand()
- self.assertIn(target._FVDL_PATH, build_command)
- self.assertIn('--sdk', build_command)
- self.assertIn('start', build_command)
- self.assertNotIn('--noacceleration', build_command)
- self.assertIn('--headless', build_command)
- self.assertNotIn('--host-gpu', build_command)
- self.assertNotIn('-N', build_command)
- self.assertIn('--device-proto', build_command)
- self.assertNotIn('--emulator-log', build_command)
- self.assertNotIn('--envs', build_command)
- self.assertTrue(os.path.exists(target._device_proto_file.name))
- correct_ram_amount = False
- with open(target._device_proto_file.name) as file:
- for line in file:
- if line.strip() == 'ram: 8192':
- correct_ram_amount = True
- break
- self.assertTrue(correct_ram_amount)
- mock_daemon_stop.assert_called_once()
-
- def testBuildCommandCheckIfNotRequireKVMSetNoAcceleration(
- self, mock_daemon_stop):
- self.args.require_kvm = False
- with FvdlTarget.CreateFromArgs(self.args) as target:
- self.assertIn('--noacceleration', target._BuildCommand())
- mock_daemon_stop.assert_called_once()
-
- def testBuildCommandCheckIfNotEnableGraphicsSetHeadless(
- self, mock_daemon_stop):
- self.args.enable_graphics = True
- with FvdlTarget.CreateFromArgs(self.args) as target:
- self.assertNotIn('--headless', target._BuildCommand())
- mock_daemon_stop.assert_called_once()
-
- def testBuildCommandCheckIfHardwareGpuSetHostGPU(self, mock_daemon_stop):
- self.args.hardware_gpu = True
- with FvdlTarget.CreateFromArgs(self.args) as target:
- self.assertIn('--host-gpu', target._BuildCommand())
- mock_daemon_stop.assert_called_once()
-
- def testBuildCommandCheckIfWithNetworkSetTunTap(self, mock_daemon_stop):
- self.args.with_network = True
- with FvdlTarget.CreateFromArgs(self.args) as target:
- self.assertIn('-N', target._BuildCommand())
- mock_daemon_stop.assert_called_once()
-
- def testBuildCommandCheckRamSizeNot8192SetRamSize(self, mock_daemon_stop):
- custom_ram_size = 4096
- self.args.ram_size_mb = custom_ram_size
- with FvdlTarget.CreateFromArgs(self.args) as target:
- self.assertIn('--device-proto', target._BuildCommand())
- self.assertTrue(os.path.exists(target._device_proto_file.name))
- correct_ram_amount = False
- with open(target._device_proto_file.name, 'r') as f:
- self.assertTrue(' ram: {}\n'.format(custom_ram_size) in f.readlines())
- mock_daemon_stop.assert_called_once()
-
- def testBuildCommandCheckEmulatorLogSetup(self, mock_daemon_stop):
- with tempfile.TemporaryDirectory() as logs_dir:
- self.args.logs_dir = logs_dir
- with FvdlTarget.CreateFromArgs(self.args) as target:
- build_command = target._BuildCommand()
- self.assertIn('--emulator-log', build_command)
- self.assertIn('--envs', build_command)
- mock_daemon_stop.assert_called_once()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/build/fuchsia/gcs_download.py b/build/fuchsia/gcs_download.py
index 7c8f021e1..534091bce 100644
--- a/build/fuchsia/gcs_download.py
+++ b/build/fuchsia/gcs_download.py
@@ -7,10 +7,14 @@ import os
import subprocess
import sys
import tarfile
+import tempfile
-from common import DIR_SOURCE_ROOT
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'test')))
-sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build'))
+from common import DIR_SRC_ROOT
+
+sys.path.append(os.path.join(DIR_SRC_ROOT, 'build'))
import find_depot_tools
@@ -19,28 +23,29 @@ def DownloadAndUnpackFromCloudStorage(url, output_dir):
# Pass the compressed stream directly to 'tarfile'; don't bother writing it
# to disk first.
- cmd = [
- sys.executable,
- os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'cp', url,
- '-'
- ]
-
- logging.debug('Running "%s"', ' '.join(cmd))
- task = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
- tar_data = task.stdout
- task.stdout = None # don't want Popen.communicate() to eat the output
-
- try:
- tarfile.open(mode='r|gz', fileobj=tar_data).extractall(path=output_dir)
- except tarfile.ReadError as exc:
- _, stderr_data = task.communicate()
- stderr_data = stderr_data.decode()
- raise subprocess.CalledProcessError(
- task.returncode, cmd,
- 'Failed to read a tarfile from gsutil.py.\n{}'.format(
- stderr_data if stderr_data else '')) from exc
-
- if task.wait():
- _, stderr_data = task.communicate()
- stderr_data = stderr_data.decode()
- raise subprocess.CalledProcessError(task.returncode, cmd, stderr_data)
+ tmp_file = 'image.tgz'
+ with tempfile.TemporaryDirectory() as tmp_d:
+ tmp_file_location = os.path.join(tmp_d, tmp_file)
+ cmd = [
+ sys.executable,
+ os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'cp', url,
+ tmp_file_location
+ ]
+
+ logging.debug('Running "%s"', ' '.join(cmd))
+ task = subprocess.run(cmd,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ check=True,
+ encoding='utf-8')
+
+ try:
+ tarfile.open(name=tmp_file_location,
+ mode='r|gz').extractall(path=output_dir)
+ except tarfile.ReadError as exc:
+ _, stderr_data = task.communicate()
+ stderr_data = stderr_data.decode()
+ raise subprocess.CalledProcessError(
+ task.returncode, cmd,
+ 'Failed to read a tarfile from gsutil.py.\n{}'.format(
+ stderr_data if stderr_data else '')) from exc
diff --git a/build/fuchsia/gcs_download_test.py b/build/fuchsia/gcs_download_test.py
index c78cfe39c..50b2bf1a0 100755
--- a/build/fuchsia/gcs_download_test.py
+++ b/build/fuchsia/gcs_download_test.py
@@ -3,6 +3,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import os
import subprocess
import tarfile
import unittest
@@ -23,26 +24,64 @@ def _mock_task(status_code: int = 0, stderr: str = '') -> mock.Mock:
return task_mock
-@mock.patch('subprocess.Popen')
+@mock.patch('tempfile.TemporaryDirectory')
+@mock.patch('subprocess.run')
@mock.patch('tarfile.open')
+@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows')
class TestDownloadAndUnpackFromCloudStorage(unittest.TestCase):
- def testHappyPath(self, mock_tarfile, mock_popen):
- mock_popen.return_value = _mock_task()
+ def testHappyPath(self, mock_tarfile, mock_run, mock_tmp_dir):
+ mock_run.return_value = _mock_task()
- DownloadAndUnpackFromCloudStorage('', '')
+ tmp_dir = os.path.join('some', 'tmp', 'dir')
+ mock_tmp_dir.return_value.__enter__.return_value = tmp_dir
- def testFailedTarOpen(self, mock_tarfile, mock_popen):
- mock_popen.return_value = _mock_task(stderr='some error')
+ mock_seq = mock.Mock()
+ mock_seq.attach_mock(mock_run, 'Run')
+ mock_seq.attach_mock(mock_tarfile, 'Untar')
+ mock_seq.attach_mock(mock_tmp_dir, 'MkTmpD')
+
+ output_dir = os.path.join('output', 'dir')
+ DownloadAndUnpackFromCloudStorage('gs://some/url', output_dir)
+
+ image_tgz_path = os.path.join(tmp_dir, 'image.tgz')
+ mock_seq.assert_has_calls([
+ mock.call.MkTmpD(),
+ mock.call.MkTmpD().__enter__(),
+ mock.call.Run(mock.ANY,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ check=True,
+ encoding='utf-8'),
+ mock.call.Untar(name=image_tgz_path, mode='r|gz'),
+ mock.call.Untar().extractall(path=output_dir),
+ mock.call.MkTmpD().__exit__(None, None, None)
+ ],
+ any_order=False)
+
+ # Verify cmd.
+ cmd = ' '.join(mock_run.call_args[0][0])
+ self.assertRegex(
+ cmd, r'.*python3?\s.*gsutil.py\s+cp\s+gs://some/url\s+' + image_tgz_path)
+
+ def testFailedTarOpen(self, mock_tarfile, mock_run, mock_tmp_dir):
+ mock_run.return_value = _mock_task(stderr='some error')
mock_tarfile.side_effect = tarfile.ReadError()
with self.assertRaises(subprocess.CalledProcessError):
DownloadAndUnpackFromCloudStorage('', '')
+ mock_tmp_dir.assert_called_once()
+ mock_run.assert_called_once()
+ mock_tarfile.assert_called_once()
- def testBadTaskStatusCode(self, mock_tarfile, mock_popen):
- mock_popen.return_value = _mock_task(stderr='some error', status_code=1)
+ def testBadTaskStatusCode(self, mock_tarfile, mock_run, mock_tmp_dir):
+ mock_run.side_effect = subprocess.CalledProcessError(cmd='some/command',
+ returncode=1)
with self.assertRaises(subprocess.CalledProcessError):
DownloadAndUnpackFromCloudStorage('', '')
+ mock_run.assert_called_once()
+ mock_tarfile.assert_not_called()
+ mock_tmp_dir.assert_called_once()
if __name__ == '__main__':
diff --git a/build/fuchsia/legacy_ermine_ctl.py b/build/fuchsia/legacy_ermine_ctl.py
deleted file mode 100644
index ed2f3f335..000000000
--- a/build/fuchsia/legacy_ermine_ctl.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2022 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Adds python interface to erminectl tools on workstation products."""
-
-import os
-import subprocess
-import sys
-from typing import Any, List
-
-sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
- 'test')))
-import base_ermine_ctl
-
-
-class LegacyErmineCtl(base_ermine_ctl.BaseErmineCtl):
- def __init__(self, target: Any):
- super().__init__()
- self._target = target
-
- def execute_command_async(self, args: List[str]) -> subprocess.Popen:
- return self._target.RunCommandPiped(args,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- encoding='utf-8')
diff --git a/build/fuchsia/linux_internal.sdk.sha1 b/build/fuchsia/linux_internal.sdk.sha1
index 086956f0d..87334bd75 100644
--- a/build/fuchsia/linux_internal.sdk.sha1
+++ b/build/fuchsia/linux_internal.sdk.sha1
@@ -1 +1 @@
-11.20221214.2.1
+12.20230410.3.1
diff --git a/build/fuchsia/log_manager.py b/build/fuchsia/log_manager.py
deleted file mode 100644
index f2d142af0..000000000
--- a/build/fuchsia/log_manager.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2020 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Creates and manages log file objects.
-
-Provides an object that handles opening and closing file streams for
-logging purposes.
-"""
-
-import os
-
-
-class LogManager(object):
- def __init__(self, logs_dir):
-
- # A dictionary with the log file path as the key and a file stream as value.
- self._logs = {}
-
- self._logs_dir = logs_dir
- if self._logs_dir:
- if not os.path.isdir(self._logs_dir):
- os.makedirs(self._logs_dir)
-
- def IsLoggingEnabled(self):
- return self._logs_dir is not None
-
- def GetLogDirectory(self):
- """Get the directory logs are placed into."""
-
- return self._logs_dir
-
- def Open(self, log_file_name):
- """Open a file stream with log_file_name in the logs directory."""
-
- parent_dir = self.GetLogDirectory()
- if not parent_dir:
- return open(os.devnull, 'w')
- log_file_path = os.path.join(parent_dir, log_file_name)
- if log_file_path in self._logs:
- return self._logs[log_file_path]
- log_file = open(log_file_path, 'w', buffering=1)
- self._logs[log_file_path] = log_file
- return log_file
-
- def Stop(self):
- for log in self._logs.values():
- log.close()
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- self.Stop()
diff --git a/build/fuchsia/net_test_server.py b/build/fuchsia/net_test_server.py
deleted file mode 100644
index 4a617e7a5..000000000
--- a/build/fuchsia/net_test_server.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-import json
-import logging
-import os
-import re
-import socket
-import sys
-import subprocess
-import tempfile
-
-DIR_SOURCE_ROOT = os.path.abspath(
- os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
-sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
-import chrome_test_server_spawner
-
-
-# Implementation of chrome_test_server_spawner.PortForwarder that uses SSH's
-# remote port forwarding feature to forward ports.
-class SSHPortForwarder(chrome_test_server_spawner.PortForwarder):
- def __init__(self, target):
- self._target = target
-
- # Maps the host (server) port to the device port number.
- self._port_mapping = {}
-
- def Map(self, port_pairs):
- for p in port_pairs:
- _, host_port = p
- self._port_mapping[host_port] = \
- common.ConnectPortForwardingTask(self._target, host_port)
-
- def GetDevicePortForHostPort(self, host_port):
- return self._port_mapping[host_port]
-
- def Unmap(self, device_port):
- for host_port, entry in self._port_mapping.items():
- if entry == device_port:
- forwarding_args = [
- '-NT', '-O', 'cancel', '-R', '0:localhost:%d' % host_port]
- task = self._target.RunCommandPiped([],
- ssh_args=forwarding_args,
- stdout=open(os.devnull, 'w'),
- stderr=subprocess.PIPE)
- task.wait()
- if task.returncode != 0:
- raise Exception(
- 'Error %d when unmapping port %d' % (task.returncode,
- device_port))
- del self._port_mapping[host_port]
- return
-
- raise Exception('Unmap called for unknown port: %d' % device_port)
-
-
-def SetupTestServer(target, test_concurrency):
- """Provisions a forwarding test server and configures |target| to use it.
-
- Args:
- target: The target to which port forwarding to the test server will be
- established.
- test_concurrency: The number of parallel test jobs that will be run.
-
- Returns a tuple of a Popen object for the test server process and the local
- url to use on `target` to reach the test server."""
-
- logging.debug('Starting test server.')
- # The TestLauncher can launch more jobs than the limit specified with
- # --test-launcher-jobs so the max number of spawned test servers is set to
- # twice that limit here. See https://crbug.com/913156#c19.
- spawning_server = chrome_test_server_spawner.SpawningServer(
- 0, SSHPortForwarder(target), test_concurrency * 2)
- forwarded_port = common.ConnectPortForwardingTask(
- target, spawning_server.server_port)
- spawning_server.Start()
-
- logging.debug('Test server listening for connections (port=%d)' %
- spawning_server.server_port)
- logging.debug('Forwarded port is %d' % forwarded_port)
-
- return (spawning_server, 'http://localhost:%d' % forwarded_port)
diff --git a/build/fuchsia/pkg_repo.py b/build/fuchsia/pkg_repo.py
deleted file mode 100644
index e772db21d..000000000
--- a/build/fuchsia/pkg_repo.py
+++ /dev/null
@@ -1,236 +0,0 @@
-# Copyright 2019 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-import json
-import logging
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import time
-
-sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
- 'test')))
-from compatible_utils import install_symbols
-
-# Maximum amount of time to block while waiting for "pm serve" to come up.
-_PM_SERVE_LISTEN_TIMEOUT_SECS = 10
-
-# Amount of time to sleep in between busywaits for "pm serve"'s port file.
-_PM_SERVE_POLL_INTERVAL = 0.1
-
-_MANAGED_REPO_NAME = 'chromium-test-package-server'
-
-_HOSTS = ['fuchsia.com', 'chrome.com', 'chromium.org']
-
-
-class PkgRepo(object):
- """Abstract interface for a repository used to serve packages to devices."""
-
- def __init__(self):
- pass
-
- def PublishPackage(self, package_path):
- pm_tool = common.GetHostToolPathFromPlatform('pm')
- # Flags for `pm publish`:
- # https://fuchsia.googlesource.com/fuchsia/+/refs/heads/main/src/sys/pkg/bin/pm/cmd/pm/publish/publish.go
- # https://fuchsia.googlesource.com/fuchsia/+/refs/heads/main/src/sys/pkg/bin/pm/repo/config.go
- # -a: Publish archived package
- # -f <path>: Path to packages
- # -r <path>: Path to repository
- # -vt: Repo versioning based on time rather than monotonic version number
- # increase
- # -v: Verbose output
- subprocess.check_call([
- pm_tool, 'publish', '-a', '-f', package_path, '-r',
- self.GetPath(), '-vt', '-v'
- ], stderr=subprocess.STDOUT)
-
- def GetPath(self):
- pass
-
-
-class ManagedPkgRepo(PkgRepo):
- """Creates and serves packages from an ephemeral repository."""
-
- def __init__(self, target):
- super(ManagedPkgRepo, self).__init__()
- self._with_count = 0
- self._target = target
-
- self._pkg_root = tempfile.mkdtemp()
- pm_tool = common.GetHostToolPathFromPlatform('pm')
- subprocess.check_call([pm_tool, 'newrepo', '-repo', self._pkg_root])
- logging.debug('Creating and serving temporary package root: {}.'.format(
- self._pkg_root))
-
- with tempfile.NamedTemporaryFile() as pm_port_file:
- # Flags for `pm serve`:
- # https://fuchsia.googlesource.com/fuchsia/+/refs/heads/main/src/sys/pkg/bin/pm/cmd/pm/serve/serve.go
- self._pm_serve_task = subprocess.Popen([
- pm_tool, 'serve',
- '-d', os.path.join(self._pkg_root, 'repository'),
- '-c', '2', # Use config.json format v2, the default for pkgctl.
- '-q', # Don't log transfer activity.
- '-l', ':0', # Bind to ephemeral port.
- '-f', pm_port_file.name # Publish port number to |pm_port_file|.
- ]) # yapf: disable
-
- # Busywait until 'pm serve' starts the server and publishes its port to
- # a temporary file.
- timeout = time.time() + _PM_SERVE_LISTEN_TIMEOUT_SECS
- serve_port = None
- while not serve_port:
- if time.time() > timeout:
- raise Exception(
- 'Timeout waiting for \'pm serve\' to publish its port.')
-
- with open(pm_port_file.name, 'r', encoding='utf8') as serve_port_file:
- serve_port = serve_port_file.read()
-
- time.sleep(_PM_SERVE_POLL_INTERVAL)
-
- serve_port = int(serve_port)
- logging.debug('pm serve is active on port {}.'.format(serve_port))
-
- remote_port = common.ConnectPortForwardingTask(target, serve_port, 0)
- self._RegisterPkgRepository(self._pkg_root, remote_port)
-
- def __enter__(self):
- self._with_count += 1
- return self
-
- def __exit__(self, type, value, tb):
- # Allows the repository to delete itself when it leaves the scope of a
- # 'with' block.
- self._with_count -= 1
- if self._with_count > 0:
- return
-
- self._UnregisterPkgRepository()
- self._pm_serve_task.kill()
- self._pm_serve_task = None
-
- logging.info('Cleaning up package root: ' + self._pkg_root)
- shutil.rmtree(self._pkg_root)
- self._pkg_root = None
-
- def GetPath(self):
- return self._pkg_root
-
- def _RegisterPkgRepository(self, tuf_repo, remote_port):
- """Configures a device to use a local TUF repository as an installation
- source for packages.
- |tuf_repo|: The host filesystem path to the TUF repository.
- |remote_port|: The reverse-forwarded port used to connect to instance of
- `pm serve` that is serving the contents of |tuf_repo|."""
-
- # Extract the public signing key for inclusion in the config file.
- root_keys = []
- root_json_path = os.path.join(tuf_repo, 'repository', 'root.json')
- root_json = json.load(open(root_json_path, 'r'))
- for root_key_id in root_json['signed']['roles']['root']['keyids']:
- root_keys.append({
- 'type':
- root_json['signed']['keys'][root_key_id]['keytype'],
- 'value':
- root_json['signed']['keys'][root_key_id]['keyval']['public']
- })
-
- # "pm serve" can automatically generate a "config.json" file at query time,
- # but the file is unusable because it specifies URLs with port
- # numbers that are unreachable from across the port forwarding boundary.
- # So instead, we generate our own config file with the forwarded port
- # numbers instead.
- config_file = open(os.path.join(tuf_repo, 'repository', 'repo_config.json'),
- 'w')
- json.dump(
- {
- 'repo_url':
- 'fuchsia-pkg://{}'.format(_MANAGED_REPO_NAME),
- 'root_keys':
- root_keys,
- 'mirrors': [{
- "mirror_url": 'http://127.0.0.1:{}'.format(remote_port),
- "subscribe": True
- }],
- 'root_threshold':
- 1,
- 'root_version':
- 1
- }, config_file)
- config_file.close()
-
- # Register the repo.
- return_code = self._target.RunCommand([
- ('pkgctl repo rm fuchsia-pkg://{}; ' +
- 'pkgctl repo add url http://127.0.0.1:{}/repo_config.json; ').format(
- _MANAGED_REPO_NAME, remote_port)
- ])
- if return_code != 0:
- raise Exception(
- 'Error code {} when running pkgctl repo add.'.format(return_code))
-
- self._AddHostReplacementRule(_MANAGED_REPO_NAME)
-
- def _UnregisterPkgRepository(self):
- """Unregisters the package repository."""
-
- logging.debug('Unregistering package repository.')
- self._target.RunCommand(
- ['pkgctl', 'repo', 'rm', 'fuchsia-pkg://{}'.format(_MANAGED_REPO_NAME)])
-
- # Re-enable 'devhost' repo if it's present. This is useful for devices that
- # were booted with 'fx serve'.
- self._AddHostReplacementRule('devhost', silent=True)
-
- def _AddHostReplacementRule(self, host_replacement, silent=False):
- rule = json.dumps({
- 'version':
- '1',
- 'content': [{
- 'host_match': host,
- 'host_replacement': host_replacement,
- 'path_prefix_match': '/',
- 'path_prefix_replacement': '/'
- } for host in _HOSTS]
- })
-
- return_code = self._target.RunCommand(
- ['pkgctl', 'rule', 'replace', 'json', "'{}'".format(rule)])
- if not silent and return_code != 0:
- raise Exception(
- 'Error code {} when running pkgctl rule replace with {}'.format(
- return_code, rule))
-
-
-class ExternalPkgRepo(PkgRepo):
- """Publishes packages to a package repository located and served externally
- (ie. located under a Fuchsia build directory and served by "fx serve"."""
-
- def __init__(self, fuchsia_out_dir):
- super(PkgRepo, self).__init__()
-
- self._fuchsia_out_dir = fuchsia_out_dir
- self._pkg_root = os.path.join(fuchsia_out_dir, 'amber-files')
-
- logging.info('Using existing package root: {}'.format(self._pkg_root))
- logging.info('ATTENTION: This will not start a package server. ' +
- 'Please run "fx serve" manually.')
-
- def GetPath(self):
- return self._pkg_root
-
- def PublishPackage(self, package_path):
- super(ExternalPkgRepo, self).PublishPackage(package_path)
-
- install_symbols((package_path), self._fuchsia_out_dir)
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, tb):
- pass
diff --git a/build/fuchsia/qemu_image.py b/build/fuchsia/qemu_image.py
deleted file mode 100644
index 8c2f9b851..000000000
--- a/build/fuchsia/qemu_image.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright 2020 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Workaround for qemu-img bug on arm64 platforms with multiple cores.
-
-Runs qemu-img command with timeout and retries the command if it hangs.
-
-See:
-crbug.com/1046861 QEMU is out of date; current version of qemu-img
-is unstable
-
-https://bugs.launchpad.net/qemu/+bug/1805256 qemu-img hangs on
-rcu_call_ready_event logic in Aarch64 when converting images
-
-TODO(crbug.com/1046861): Remove this workaround when the bug is fixed.
-"""
-
-import logging
-import subprocess
-import tempfile
-import time
-
-
-# qemu-img p99 run time on Cavium ThunderX2 servers is 26 seconds.
-# Using 2x the p99 time as the timeout.
-QEMU_IMG_TIMEOUT_SEC = 52
-
-
-def _ExecQemuImgWithTimeout(command):
- """Execute qemu-img command in subprocess with timeout.
-
- Returns: None if command timed out or return code if command completed.
- """
-
- logging.info('qemu-img starting')
- command_output_file = tempfile.NamedTemporaryFile('w')
- p = subprocess.Popen(command, stdout=command_output_file,
- stderr=subprocess.STDOUT)
- start_sec = time.time()
- while p.poll() is None and time.time() - start_sec < QEMU_IMG_TIMEOUT_SEC:
- time.sleep(1)
- stop_sec = time.time()
- logging.info('qemu-img duration: %f' % float(stop_sec - start_sec))
-
- if p.poll() is None:
- returncode = None
- p.kill()
- p.wait()
- else:
- returncode = p.returncode
-
- log_level = logging.WARN if returncode else logging.DEBUG
- for line in open(command_output_file.name, 'r'):
- logging.log(log_level, 'qemu-img stdout: ' + line.strip())
-
- return returncode
-
-
-def ExecQemuImgWithRetry(command):
- """ Execute qemu-img command in subprocess with 2 retries.
-
- Raises CalledProcessError if command does not complete successfully.
- """
-
- tries = 0
- status = None
- while status is None and tries <= 2:
- tries += 1
- status = _ExecQemuImgWithTimeout(command)
-
- if status is None:
- raise subprocess.CalledProcessError(-1, command)
- if status:
- raise subprocess.CalledProcessError(status, command)
diff --git a/build/fuchsia/qemu_target.py b/build/fuchsia/qemu_target.py
deleted file mode 100644
index da3458f8d..000000000
--- a/build/fuchsia/qemu_target.py
+++ /dev/null
@@ -1,274 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Implements commands for running and interacting with Fuchsia on QEMU."""
-
-import boot_data
-import common
-import emu_target
-import hashlib
-import logging
-import os
-import platform
-import qemu_image
-import shutil
-import subprocess
-import sys
-import tempfile
-
-from common import EnsurePathExists, GetHostArchFromPlatform, \
- GetEmuRootForPlatform
-from qemu_image import ExecQemuImgWithRetry
-from target import FuchsiaTargetException
-
-
-# Virtual networking configuration data for QEMU.
-HOST_IP_ADDRESS = '10.0.2.2'
-GUEST_MAC_ADDRESS = '52:54:00:63:5e:7b'
-
-# Capacity of the system's blobstore volume.
-EXTENDED_BLOBSTORE_SIZE = 2147483648 # 2GB
-
-
-def GetTargetType():
- return QemuTarget
-
-
-class QemuTarget(emu_target.EmuTarget):
- EMULATOR_NAME = 'qemu'
-
- def __init__(self, out_dir, target_cpu, cpu_cores, require_kvm, ram_size_mb,
- logs_dir):
- super(QemuTarget, self).__init__(out_dir, target_cpu, logs_dir, None)
- self._cpu_cores=cpu_cores
- self._require_kvm=require_kvm
- self._ram_size_mb=ram_size_mb
- self._host_ssh_port = None
-
- @staticmethod
- def CreateFromArgs(args):
- return QemuTarget(args.out_dir, args.target_cpu, args.cpu_cores,
- args.require_kvm, args.ram_size_mb, args.logs_dir)
-
- def _IsKvmEnabled(self):
- kvm_supported = sys.platform.startswith('linux') and \
- os.access('/dev/kvm', os.R_OK | os.W_OK)
- same_arch = \
- (self._target_cpu == 'arm64' and platform.machine() == 'aarch64') or \
- (self._target_cpu == 'x64' and platform.machine() == 'x86_64')
- if kvm_supported and same_arch:
- return True
- elif self._require_kvm:
- if same_arch:
- if not os.path.exists('/dev/kvm'):
- kvm_error = 'File /dev/kvm does not exist. Please install KVM first.'
- else:
- kvm_error = 'To use KVM acceleration, add user to the kvm group '\
- 'with "sudo usermod -a -G kvm $USER". Log out and back '\
- 'in for the change to take effect.'
- raise FuchsiaTargetException(kvm_error)
- else:
- raise FuchsiaTargetException('KVM unavailable when CPU architecture '\
- 'of host is different from that of'\
- ' target. See --allow-no-kvm.')
- else:
- return False
-
- def _BuildQemuConfig(self):
- boot_data.AssertBootImagesExist(self._pb_path)
-
- emu_command = [
- '-kernel',
- EnsurePathExists(boot_data.GetTargetFile(self._kernel, self._pb_path)),
- '-initrd',
- EnsurePathExists(
- boot_data.GetBootImage(self._out_dir, self._pb_path,
- self._ramdisk)),
- '-m',
- str(self._ram_size_mb),
- '-smp',
- str(self._cpu_cores),
-
- # Attach the blobstore and data volumes. Use snapshot mode to discard
- # any changes.
- '-snapshot',
- '-drive',
- 'file=%s,format=qcow2,if=none,id=blobstore,snapshot=on,cache=unsafe' %
- _EnsureBlobstoreQcowAndReturnPath(self._out_dir, self._disk_image,
- self._pb_path),
- '-object',
- 'iothread,id=iothread0',
- '-device',
- 'virtio-blk-pci,drive=blobstore,iothread=iothread0',
-
- # Use stdio for the guest OS only; don't attach the QEMU interactive
- # monitor.
- '-serial',
- 'stdio',
- '-monitor',
- 'none',
- ]
-
- # Configure the machine to emulate, based on the target architecture.
- if self._target_cpu == 'arm64':
- emu_command.extend([
- '-machine',
- 'virt-2.12,gic-version=host',
- ])
- else:
- emu_command.extend([
- '-machine', 'q35',
- ])
-
- # Configure virtual network.
- netdev_type = 'virtio-net-pci'
- netdev_config = 'type=user,id=net0,restrict=off'
-
- self._host_ssh_port = common.GetAvailableTcpPort()
- netdev_config += ",hostfwd=tcp::%s-:22" % self._host_ssh_port
- emu_command.extend([
- '-netdev', netdev_config,
- '-device', '%s,netdev=net0,mac=%s' % (netdev_type, GUEST_MAC_ADDRESS),
- ])
-
- # Configure the CPU to emulate.
- # On Linux, we can enable lightweight virtualization (KVM) if the host and
- # guest architectures are the same.
- if self._IsKvmEnabled():
- kvm_command = ['-enable-kvm', '-cpu']
- if self._target_cpu == 'arm64':
- kvm_command.append('host')
- else:
- kvm_command.append('host,migratable=no,+invtsc')
- else:
- logging.warning('Unable to launch %s with KVM acceleration. '
- 'The guest VM will be slow.' % (self.EMULATOR_NAME))
- if self._target_cpu == 'arm64':
- kvm_command = ['-cpu', 'cortex-a53']
- else:
- kvm_command = ['-cpu', 'Haswell,+smap,-check,-fsgsbase']
-
- emu_command.extend(kvm_command)
-
- kernel_args = boot_data.GetKernelArgs()
-
- # TERM=dumb tells the guest OS to not emit ANSI commands that trigger
- # noisy ANSI spew from the user's terminal emulator.
- kernel_args.append('TERM=dumb')
-
- # Construct kernel cmd line
- kernel_args.append('kernel.serial=legacy')
-
- # Don't 'reboot' the emulator if the kernel crashes
- kernel_args.append('kernel.halt-on-panic=true')
-
- emu_command.extend(['-append', ' '.join(kernel_args)])
-
- return emu_command
-
- def _BuildCommand(self):
- if self._target_cpu == 'arm64':
- qemu_exec = 'qemu-system-' + 'aarch64'
- elif self._target_cpu == 'x64':
- qemu_exec = 'qemu-system-' + 'x86_64'
- else:
- raise Exception('Unknown target_cpu %s:' % self._target_cpu)
-
- qemu_command = [
- os.path.join(GetEmuRootForPlatform(self.EMULATOR_NAME), 'bin',
- qemu_exec)
- ]
- qemu_command.extend(self._BuildQemuConfig())
- qemu_command.append('-nographic')
- return qemu_command
-
- def _Shutdown(self):
- if not self._emu_process:
- logging.error('%s did not start' % (self.EMULATOR_NAME))
- return
- returncode = self._emu_process.poll()
- if returncode == None:
- logging.info('Shutting down %s' % (self.EMULATOR_NAME))
- self._emu_process.kill()
- elif returncode == 0:
- logging.info('%s quit unexpectedly without errors' % self.EMULATOR_NAME)
- elif returncode < 0:
- logging.error('%s was terminated by signal %d' %
- (self.EMULATOR_NAME, -returncode))
- else:
- logging.error('%s quit unexpectedly with exit code %d' %
- (self.EMULATOR_NAME, returncode))
-
- def _HasNetworking(self):
- return False
-
- def _IsEmuStillRunning(self):
- if not self._emu_process:
- return False
- return os.waitpid(self._emu_process.pid, os.WNOHANG)[0] == 0
-
- def _GetEndpoint(self):
- if not self._IsEmuStillRunning():
- raise Exception('%s quit unexpectedly.' % (self.EMULATOR_NAME))
- return (self.LOCAL_ADDRESS, self._host_ssh_port)
-
-
-def _ComputeFileHash(filename):
- hasher = hashlib.md5()
- with open(filename, 'rb') as f:
- buf = f.read(4096)
- while buf:
- hasher.update(buf)
- buf = f.read(4096)
-
- return hasher.hexdigest()
-
-
-def _EnsureBlobstoreQcowAndReturnPath(out_dir, kernel, image_path):
- """Returns a file containing the Fuchsia blobstore in a QCOW format,
- with extra buffer space added for growth."""
-
- qimg_tool = os.path.join(common.GetEmuRootForPlatform('qemu'),
- 'bin', 'qemu-img')
- fvm_tool = common.GetHostToolPathFromPlatform('fvm')
- blobstore_path = boot_data.GetTargetFile(kernel, image_path)
- qcow_path = os.path.join(out_dir, 'gen', 'blobstore.qcow')
-
- # Check a hash of the blobstore to determine if we can re-use an existing
- # extended version of it.
- blobstore_hash_path = os.path.join(out_dir, 'gen', 'blobstore.hash')
- current_blobstore_hash = _ComputeFileHash(blobstore_path)
-
- if os.path.exists(blobstore_hash_path) and os.path.exists(qcow_path):
- if current_blobstore_hash == open(blobstore_hash_path, 'r').read():
- return qcow_path
-
- # Add some extra room for growth to the Blobstore volume.
- # Fuchsia is unable to automatically extend FVM volumes at runtime so the
- # volume enlargement must be performed prior to QEMU startup.
-
- # The 'fvm' tool only supports extending volumes in-place, so make a
- # temporary copy of 'blobstore.bin' before it's mutated.
- extended_blobstore = tempfile.NamedTemporaryFile()
- shutil.copyfile(blobstore_path, extended_blobstore.name)
- subprocess.check_call([fvm_tool, extended_blobstore.name, 'extend',
- '--length', str(EXTENDED_BLOBSTORE_SIZE),
- blobstore_path])
-
- # Construct a QCOW image from the extended, temporary FVM volume.
- # The result will be retained in the build output directory for re-use.
- qemu_img_cmd = [qimg_tool, 'convert', '-f', 'raw', '-O', 'qcow2',
- '-c', extended_blobstore.name, qcow_path]
- # TODO(crbug.com/1046861): Remove arm64 call with retries when bug is fixed.
- if common.GetHostArchFromPlatform() == 'arm64':
- qemu_image.ExecQemuImgWithRetry(qemu_img_cmd)
- else:
- subprocess.check_call(qemu_img_cmd)
-
- # Write out a hash of the original blobstore file, so that subsequent runs
- # can trivially check if a cached extended FVM volume is available for reuse.
- with open(blobstore_hash_path, 'w') as blobstore_hash_file:
- blobstore_hash_file.write(current_blobstore_hash)
-
- return qcow_path
diff --git a/build/fuchsia/qemu_target_test.py b/build/fuchsia/qemu_target_test.py
deleted file mode 100755
index bdec7992b..000000000
--- a/build/fuchsia/qemu_target_test.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import qemu_target
-import shutil
-import subprocess
-import tempfile
-import time
-import unittest
-
-TEST_PAYLOAD = "Let's get this payload across the finish line!"
-
-tmpdir = tempfile.mkdtemp()
-
-# Register the target with the context manager so that it always gets
-# torn down on process exit. Otherwise there might be lingering QEMU instances
-# if Python crashes or is interrupted.
-with qemu_target.QemuTarget(tmpdir, 'x64') as target:
- class TestQemuTarget(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- target.Start()
-
- @classmethod
- def tearDownClass(cls):
- target.Shutdown()
- shutil.rmtree(tmpdir)
-
- def testRunCommand(self):
- self.assertEqual(0, target.RunCommand(['true']))
- self.assertEqual(1, target.RunCommand(['false']))
-
- def testRunCommandPiped(self):
- proc = target.RunCommandPiped(['cat'],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE)
- proc.stdin.write(TEST_PAYLOAD)
- proc.stdin.flush()
- proc.stdin.close()
- self.assertEqual(TEST_PAYLOAD, proc.stdout.readline())
- proc.kill()
-
-
- if __name__ == '__main__':
- unittest.main()
diff --git a/build/fuchsia/remote_cmd.py b/build/fuchsia/remote_cmd.py
deleted file mode 100644
index e103851bd..000000000
--- a/build/fuchsia/remote_cmd.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import subprocess
-
-from common import SubprocessCallWithTimeout
-
-_SSH = ['ssh']
-_SCP = ['scp', '-C'] # Use gzip compression.
-_SSH_LOGGER = logging.getLogger('ssh')
-
-COPY_TO_TARGET = 0
-COPY_FROM_TARGET = 1
-
-
-def _IsLinkLocalIPv6(hostname):
- return hostname.startswith('fe80::')
-
-def _EscapeIfIPv6Address(address):
- if ':' in address:
- return '[' + address + ']'
- else:
- return address
-
-class CommandRunner(object):
- """Helper class used to execute commands on a remote host over SSH."""
-
- def __init__(self, config_path, host, port):
- """Creates a CommandRunner that connects to the specified |host| and |port|
- using the ssh config at the specified |config_path|.
-
- config_path: Full path to SSH configuration.
- host: The hostname or IP address of the remote host.
- port: The port to connect to."""
-
- self._config_path = config_path
- self._host = host
- self._port = port
-
- def _GetSshCommandLinePrefix(self):
- cmd_prefix = _SSH + ['-F', self._config_path, self._host]
- if self._port:
- cmd_prefix += ['-p', str(self._port)]
- return cmd_prefix
-
- def RunCommand(self, command, silent=False, timeout_secs=None):
- """Executes an SSH command on the remote host and blocks until completion.
-
- command: A list of strings containing the command and its arguments.
- silent: Suppresses all logging in case of success or failure.
- timeout_secs: If set, limits the amount of time that |command| may run.
- Commands which exceed the timeout are killed.
-
- Returns the exit code from the remote command."""
-
- ssh_command = self._GetSshCommandLinePrefix() + command
- _SSH_LOGGER.debug('ssh exec: ' + ' '.join(ssh_command))
- retval, stdout, stderr = SubprocessCallWithTimeout(ssh_command,
- timeout_secs)
- if silent:
- return retval
-
- stripped_stdout = stdout.strip()
- stripped_stderr = stderr.strip()
- if retval:
- _SSH_LOGGER.error('"%s" failed with exit code %d%s%s',
- ' '.join(ssh_command),
- retval,
- (' and stdout: "%s"' % stripped_stdout) \
- if stripped_stdout else '',
- (' and stderr: "%s"' % stripped_stderr) \
- if stripped_stderr else '',
- )
- elif stripped_stdout or stripped_stderr:
- _SSH_LOGGER.debug('succeeded with%s%s',
- (' stdout: "%s"' % stripped_stdout) \
- if stripped_stdout else '',
- (' stderr: "%s"' % stripped_stderr) \
- if stripped_stderr else '',
- )
-
- return retval
-
- def RunCommandPiped(self, command, stdout, stderr, ssh_args = None, **kwargs):
- """Executes an SSH command on the remote host and returns a process object
- with access to the command's stdio streams. Does not block.
-
- command: A list of strings containing the command and its arguments.
- stdout: subprocess stdout. Must not be None.
- stderr: subprocess stderr. Must not be None.
- ssh_args: Arguments that will be passed to SSH.
- kwargs: A dictionary of parameters to be passed to subprocess.Popen().
- The parameters can be used to override stdin and stdout, for
- example.
-
- Returns a Popen object for the command."""
-
- if not stdout or not stderr:
- raise Exception('Stdout/stderr must be specified explicitly')
-
- if not ssh_args:
- ssh_args = []
-
- ssh_command = self._GetSshCommandLinePrefix() + ssh_args + ['--'] + command
- _SSH_LOGGER.debug(' '.join(ssh_command))
- return subprocess.Popen(ssh_command, stdout=stdout, stderr=stderr, **kwargs)
-
-
- def RunScp(self, sources, dest, direction, recursive=False):
- """Copies a file to or from a remote host using SCP and blocks until
- completion.
-
- sources: Paths of the files to be copied.
- dest: The path that |source| will be copied to.
- direction: Indicates whether the file should be copied to
- or from the remote side.
- Valid values are COPY_TO_TARGET or COPY_FROM_TARGET.
- recursive: If true, performs a recursive copy.
-
- Function will raise an assertion if a failure occurred."""
-
- scp_command = _SCP[:]
- if _SSH_LOGGER.getEffectiveLevel() == logging.DEBUG:
- scp_command.append('-v')
- if recursive:
- scp_command.append('-r')
-
- host = _EscapeIfIPv6Address(self._host)
-
- if direction == COPY_TO_TARGET:
- dest = "%s:%s" % (host, dest)
- else:
- sources = ["%s:%s" % (host, source) for source in sources]
-
- scp_command += ['-F', self._config_path]
- if self._port:
- scp_command += ['-P', str(self._port)]
- scp_command += sources
- scp_command += [dest]
-
- _SSH_LOGGER.debug(' '.join(scp_command))
- try:
- scp_output = subprocess.check_output(scp_command,
- stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError as error:
- _SSH_LOGGER.info(error.output)
- raise
diff --git a/build/fuchsia/run_test_package.py b/build/fuchsia/run_test_package.py
deleted file mode 100644
index 13c9ac30c..000000000
--- a/build/fuchsia/run_test_package.py
+++ /dev/null
@@ -1,213 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Contains a helper function for deploying and executing a packaged
-executable on a Target."""
-
-from __future__ import print_function
-
-import common
-import hashlib
-import logging
-import multiprocessing
-import os
-import re
-import select
-import subprocess
-import sys
-import threading
-import time
-import uuid
-
-from exit_on_sig_term import ExitOnSigTerm
-from symbolizer import BuildIdsPaths, RunSymbolizer
-
-FAR = common.GetHostToolPathFromPlatform('far')
-
-# Amount of time to wait for the termination of the system log output thread.
-_JOIN_TIMEOUT_SECS = 5
-
-
-def _AttachKernelLogReader(target):
- """Attaches a kernel log reader as a long-running SSH task."""
-
- logging.info('Attaching kernel logger.')
- return target.RunCommandPiped(['log_listener',
- '--since_now',
- '--hide_metadata',
- '--tag',
- 'klog',
- ],
- stdin=open(os.devnull, 'r'),
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
-
-
-class MergedInputStream(object):
- """Merges a number of input streams into a UTF-8 encoded UNIX pipe on a
- dedicated thread. Terminates when the file descriptor of the primary stream
- (the first in the sequence) is closed."""
-
- def __init__(self, streams):
- assert len(streams) > 0
- self._streams = streams
- self._output_stream = None
- self._thread = None
-
- def Start(self):
- """Returns a pipe to the merged output stream."""
-
- read_pipe, write_pipe = os.pipe()
-
- self._output_stream = os.fdopen(write_pipe, 'wb', 0)
- self._thread = threading.Thread(target=self._Run)
- self._thread.start()
-
- return os.fdopen(read_pipe, 'r')
-
- def _Run(self):
- streams_by_fd = {}
- primary_fd = self._streams[0].fileno()
- for s in self._streams:
- streams_by_fd[s.fileno()] = s
-
- # Set when the primary FD is closed. Input from other FDs will continue to
- # be processed until select() runs dry.
- flush = False
-
- # The lifetime of the MergedInputStream is bound to the lifetime of
- # |primary_fd|.
- while primary_fd:
- # When not flushing: block until data is read or an exception occurs.
- rlist, _, xlist = select.select(streams_by_fd, [], streams_by_fd)
-
- if len(rlist) == 0 and flush:
- break
-
- for fileno in xlist:
- del streams_by_fd[fileno]
- if fileno == primary_fd:
- primary_fd = None
-
- for fileno in rlist:
- line = streams_by_fd[fileno].readline()
- if line:
- self._output_stream.write(line)
- else:
- del streams_by_fd[fileno]
- if fileno == primary_fd:
- primary_fd = None
-
- # Flush the streams by executing nonblocking reads from the input file
- # descriptors until no more data is available, or all the streams are
- # closed.
- while streams_by_fd:
- rlist, _, _ = select.select(streams_by_fd, [], [], 0)
-
- if not rlist:
- break
-
- for fileno in rlist:
- line = streams_by_fd[fileno].readline()
- if line:
- self._output_stream.write(line)
- else:
- del streams_by_fd[fileno]
-
-
-def _GetComponentUri(package_name):
- return 'fuchsia-pkg://fuchsia.com/%s#meta/%s.cm' % (package_name,
- package_name)
-
-
-def _DrainStreamToStdout(stream, quit_event):
- """Outputs the contents of |stream| until |quit_event| is set."""
-
- while not quit_event.is_set():
- rlist, _, _ = select.select([stream], [], [], 0.1)
- if rlist:
- line = rlist[0].readline()
- if not line:
- return
- print(line.rstrip())
-
-
-def _SymbolizeStream(input_fd, ids_txt_files):
- """Returns a Popen object for a symbolizer process invocation.
- input_fd: The data to symbolize.
- ids_txt_files: A list of ids.txt files which contain symbol data."""
-
- return RunSymbolizer(input_fd, subprocess.PIPE, ids_txt_files)
-
-
-def RunTestPackage(target, ffx_session, package_paths, package_name,
- package_args):
- """Installs the Fuchsia package at |package_path| on the target,
- executes it with |package_args|, and symbolizes its output.
-
- target: The deployment Target object that will run the package.
- ffx_session: An FfxSession object.
- package_paths: The paths to the .far packages to be installed.
- package_name: The name of the primary package to run.
- package_args: The arguments which will be passed to the Fuchsia process.
-
- Returns the exit code of the remote package process."""
-
- assert ffx_session
- kernel_logger = _AttachKernelLogReader(target)
- try:
- # Spin up a thread to asynchronously dump the system log to stdout
- # for easier diagnoses of early, pre-execution failures.
- log_output_quit_event = multiprocessing.Event()
- log_output_thread = threading.Thread(target=lambda: _DrainStreamToStdout(
- kernel_logger.stdout, log_output_quit_event))
- log_output_thread.daemon = True
- log_output_thread.start()
-
- with ExitOnSigTerm(), target.GetPkgRepo():
- on_target = True
- start_time = time.time()
- target.InstallPackage(package_paths)
- logging.info('Test installed in {:.2f} seconds.'.format(time.time() -
- start_time))
-
- log_output_quit_event.set()
- log_output_thread.join(timeout=_JOIN_TIMEOUT_SECS)
-
- logging.info('Running application.')
-
- component_uri = _GetComponentUri(package_name)
- process = ffx_session.test_run(target.GetFfxTarget(), component_uri,
- package_args)
-
- # Symbolize klog and systemlog as separate streams. The symbolizer
- # protocol is stateful, so comingled raw stack dumps can yield
- # unsymbolizable garbage data.
- ids_txt_paths = BuildIdsPaths(package_paths)
- with _SymbolizeStream(process.stdout, ids_txt_paths) as \
- symbolized_stdout, \
- _SymbolizeStream(kernel_logger.stdout, ids_txt_paths) as \
- symbolized_klog:
- output_stream = MergedInputStream([symbolized_stdout.stdout,
- symbolized_klog.stdout]).Start()
- for next_line in output_stream:
- print(next_line.rstrip())
- symbolized_stdout.wait() # Should return instantly.
- symbolized_klog.kill() # klog is never-ending and must be killed.
-
- process.wait()
- if process.returncode == 0:
- logging.info('Process exited normally with status code 0.')
- else:
- # The test runner returns an error status code if *any* tests fail,
- # so we should proceed anyway.
- logging.warning('Process exited with status code %d.' %
- process.returncode)
-
- finally:
- logging.info('Terminating kernel log reader.')
- log_output_quit_event.set()
- log_output_thread.join()
- kernel_logger.kill()
-
- return process.returncode
diff --git a/build/fuchsia/runner_exceptions.py b/build/fuchsia/runner_exceptions.py
deleted file mode 100644
index ca465c4a9..000000000
--- a/build/fuchsia/runner_exceptions.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2020 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Converts exceptions to return codes and prints error messages.
-
-This makes it easier to query build tables for particular error types as
-exit codes are visible to queries while exception stack traces are not."""
-
-import errno
-import fcntl
-import logging
-import os
-import subprocess
-import sys
-import traceback
-
-from device_target import ProvisionDeviceException
-from target import FuchsiaTargetException
-
-def _PrintException(value, trace):
- """Prints stack trace and error message for the current exception."""
-
- traceback.print_tb(trace)
- print(str(value))
-
-
-def IsStdoutBlocking():
- """Returns True if sys.stdout is blocking or False if non-blocking.
-
- sys.stdout should always be blocking. Non-blocking is associated with
- intermittent IOErrors (crbug.com/1080858).
- """
-
- nonblocking = fcntl.fcntl(sys.stdout, fcntl.F_GETFL) & os.O_NONBLOCK
- return not nonblocking
-
-
-def HandleExceptionAndReturnExitCode():
- """Maps the current exception to a return code and prints error messages.
-
- Mapped exception types are assigned blocks of 8 return codes starting at 64.
- The choice of 64 as the starting code is based on the Advanced Bash-Scripting
- Guide (http://tldp.org/LDP/abs/html/exitcodes.html).
-
- A generic exception is mapped to the start of the block. More specific
- exceptions are mapped to numbers inside the block. For example, a
- FuchsiaTargetException is mapped to return code 64, unless it involves SSH
- in which case it is mapped to return code 65.
-
- Exceptions not specifically mapped go to return code 1.
-
- Returns the mapped return code."""
-
- (type, value, trace) = sys.exc_info()
- _PrintException(value, trace)
-
- if type is FuchsiaTargetException:
- if 'ssh' in str(value).lower():
- print('Error: FuchsiaTargetException: SSH to Fuchsia target failed.')
- return 65
- return 64
- elif type is IOError:
- if value.errno == errno.EAGAIN:
- logging.info('Python print to sys.stdout probably failed')
- if not IsStdoutBlocking():
- logging.warn('sys.stdout is non-blocking')
- return 73
- return 72
- elif type is subprocess.CalledProcessError:
- if os.path.basename(value.cmd[0]) == 'scp':
- print('Error: scp operation failed - %s' % str(value))
- return 81
- if os.path.basename(value.cmd[0]) == 'qemu-img':
- print('Error: qemu-img fuchsia image generation failed.')
- return 82
- return 80
- elif type is ProvisionDeviceException:
- print('Error: Failed to pave device')
- return 90
- else:
- return 1
diff --git a/build/fuchsia/start_emulator.py b/build/fuchsia/start_emulator.py
deleted file mode 100755
index 29005bd58..000000000
--- a/build/fuchsia/start_emulator.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env vpython3
-# Copyright 2021 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Starts up a long running emulator for unit testing and developer use."""
-
-import argparse
-import common
-import common_args
-import logging
-import os
-import time
-import subprocess
-
-from exit_on_sig_term import ExitOnSigTerm
-from fvdl_target import FvdlTarget
-
-
-def main():
- parser = argparse.ArgumentParser(
- description='Launches a long-running emulator that can '
- 'be re-used for multiple test runs.')
- AddLongRunningArgs(parser)
- FvdlTarget.RegisterArgs(parser)
- common_args.AddCommonArgs(parser)
- args = parser.parse_args()
- args.out_dir = None
- args.device = 'fvdl'
- args.cpu_cores = 4
- common_args.ConfigureLogging(args)
- with ExitOnSigTerm(), \
- common_args.GetDeploymentTargetForArgs(args) as fvdl_target:
- if fvdl_target._with_network:
- logging.info('If you haven\'t set up tuntap, you may be prompted '
- 'for your sudo password to set up tuntap.')
- fvdl_target.Start()
- logging.info(
- 'Emulator successfully started. You can now run Chrome '
- 'Fuchsia tests with "%s" to target this emulator.',
- fvdl_target.GetFfxTarget().format_runner_options())
- logging.info('Type Ctrl-C in this terminal to shut down the emulator.')
- try:
- while fvdl_target._IsEmuStillRunning():
- time.sleep(10)
- except KeyboardInterrupt:
- logging.info('Ctrl-C received; shutting down the emulator.')
- pass # Silently shut down the emulator
- except SystemExit:
- logging.info('SIGTERM received; shutting down the emulator.')
- pass # Silently shut down the emulator
-
-
-def AddLongRunningArgs(arg_parser):
- fvdl_args = arg_parser.add_argument_group('FVDL arguments')
- fvdl_args.add_argument('--target-cpu',
- default=common_args.GetHostArchFromPlatform(),
- help='Set target_cpu for the emulator. Defaults '
- 'to the same architecture as host cpu.')
- fvdl_args.add_argument('--without-network',
- action='store_false',
- dest='with_network',
- default=True,
- help='Run emulator without emulated nic via tun/tap.')
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/fuchsia/symbolizer.py b/build/fuchsia/symbolizer.py
deleted file mode 100644
index aea537740..000000000
--- a/build/fuchsia/symbolizer.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import subprocess
-
-from common import SDK_ROOT
-from common import GetHostArchFromPlatform
-from common import GetHostToolPathFromPlatform
-
-
-def BuildIdsPaths(package_paths):
- """Generates build ids paths for symbolizer processes."""
-
- return [
- os.path.join(os.path.dirname(package_path), 'ids.txt')
- for package_path in package_paths
- ]
-
-
-def RunSymbolizer(input_fd, output_fd, ids_txt_paths):
- """Starts a symbolizer process.
-
- input_fd: Input file to be symbolized.
- output_fd: Output file for symbolizer stdout and stderr.
- ids_txt_paths: Path to the ids.txt files which map build IDs to
- unstripped binaries on the filesystem.
- Returns a Popen object for the started process."""
-
- symbolizer = GetHostToolPathFromPlatform('symbolizer')
- symbolizer_cmd = [
- symbolizer, '--omit-module-lines', '--build-id-dir',
- os.path.join(SDK_ROOT, '.build-id')
- ]
- for ids_txt in ids_txt_paths:
- symbolizer_cmd.extend(['--ids-txt', ids_txt])
-
- logging.debug('Running "%s".' % ' '.join(symbolizer_cmd))
- return subprocess.Popen(symbolizer_cmd,
- stdin=input_fd,
- stdout=output_fd,
- stderr=subprocess.STDOUT,
- close_fds=True)
diff --git a/build/fuchsia/target.py b/build/fuchsia/target.py
deleted file mode 100644
index bf09a7405..000000000
--- a/build/fuchsia/target.py
+++ /dev/null
@@ -1,339 +0,0 @@
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import subprocess
-import time
-
-import common
-import ffx_session
-import remote_cmd
-
-from log_manager import LogManager
-from symbolizer import BuildIdsPaths, RunSymbolizer
-
-
-_SHUTDOWN_CMD = ['dm', 'poweroff']
-_ATTACH_RETRY_INTERVAL = 1
-
-# Amount of time to wait for a complete package installation, as a
-# mitigation against hangs due to pkg/network-related failures.
-_INSTALL_TIMEOUT_SECS = 10 * 60
-
-
-def _GetPackageUri(package_name):
- """Returns the URI for the specified package name."""
- return 'fuchsia-pkg://fuchsia.com/%s' % (package_name)
-
-
-def _GetPackageInfo(package_path):
- """Returns a tuple with the name and version of a package."""
-
- # Query the metadata file which resides next to the package file.
- package_info = json.load(
- open(os.path.join(os.path.dirname(package_path), 'package')))
- return package_info['name'], package_info['version'],
-
-
-class _MapIsolatedPathsForPackage:
- """Callable object which remaps /data and /tmp paths to their component-
- specific locations, based on the package name and test realm path."""
-
- def __init__(self, package_name, package_version, realms):
- realms_path_fragment = '/r/'.join(['r/sys'] + realms)
- package_sub_path = '{2}/fuchsia.com:{0}:{1}#meta:{0}.cmx/'.format(
- package_name, package_version, realms_path_fragment)
- self.isolated_format = '{0}' + package_sub_path + '{1}'
-
- def __call__(self, path):
- for isolated_directory in ['/data/' , '/tmp/']:
- if (path+'/').startswith(isolated_directory):
- return self.isolated_format.format(isolated_directory,
- path[len(isolated_directory):])
- return path
-
-
-class FuchsiaTargetException(Exception):
- def __init__(self, message):
- super(FuchsiaTargetException, self).__init__(message)
-
-
-# TODO(crbug.com/1250803): Factor high level commands out of target.
-class Target(object):
- """Base class representing a Fuchsia deployment target."""
-
- def __init__(self, out_dir, target_cpu, logs_dir):
- self._out_dir = out_dir
- self._target_cpu = target_cpu
- self._command_runner = None
- self._symbolizer_proc = None
- self._log_listener_proc = None
- self._dry_run = False
- self._started = False
- self._log_manager = LogManager(logs_dir)
- self._ffx_runner = ffx_session.FfxRunner(self._log_manager)
-
- @staticmethod
- def CreateFromArgs(args):
- raise NotImplementedError()
-
- @staticmethod
- def RegisterArgs(arg_parser):
- pass
-
- # Functions used by the Python context manager for teardown.
- def __enter__(self):
- return self
- def __exit__(self, exc_type, exc_val, exc_tb):
- try:
- self.Stop()
- finally:
- # Stop the ffx daemon, since the target device is going / has gone away.
- # This ensures that the daemon does not become "hung" if the target device
- # stops responding to network I/O (e.g., due to emulator instance
- # teardown). The daemon will be automatically restarted by the next `ffx`
- # call.
- self._ffx_runner.daemon_stop()
- # Stop the log manager only after the last use of _ffx_runner.
- self._log_manager.Stop()
-
- def Start(self):
- """Handles the instantiation and connection process for the Fuchsia
- target instance."""
- raise NotImplementedError()
-
- def IsStarted(self):
- """Returns True if the Fuchsia target instance is ready to accept
- commands."""
- return self._started
-
- def GetFfxTarget(self):
- """Returns the FfxTarget instance to use to interact with this target."""
- raise NotImplementedError()
-
- def Stop(self):
- """Stop all subprocesses and close log streams."""
- if self._symbolizer_proc:
- self._symbolizer_proc.kill()
- if self._log_listener_proc:
- self._log_listener_proc.kill()
-
- def IsNewInstance(self):
- """Returns True if the connected target instance is newly provisioned."""
- return True
-
- def GetCommandRunner(self):
- """Returns CommandRunner that can be used to execute commands on the
- target. Most clients should prefer RunCommandPiped() and RunCommand()."""
- self._AssertIsStarted()
-
- if self._command_runner is None:
- host, port = self._GetEndpoint()
- self._command_runner = \
- remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port)
-
- return self._command_runner
-
- def StartSystemLog(self, package_paths):
- """Start a system log reader as a long-running SSH task."""
- system_log = self._log_manager.Open('system_log')
- if package_paths:
- self._log_listener_proc = self.RunCommandPiped(['log_listener'],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- self._symbolizer_proc = RunSymbolizer(self._log_listener_proc.stdout,
- system_log,
- BuildIdsPaths(package_paths))
- else:
- self._log_listener_proc = self.RunCommandPiped(['log_listener'],
- stdout=system_log,
- stderr=subprocess.STDOUT)
-
- def RunCommandPiped(self, command, **kwargs):
- """Starts a remote command and immediately returns a Popen object for the
- command. The caller may interact with the streams, inspect the status code,
- wait on command termination, etc.
-
- command: A list of strings representing the command and arguments.
- kwargs: A dictionary of parameters to be passed to subprocess.Popen().
- The parameters can be used to override stdin and stdout, for
- example.
-
- Returns: a Popen object.
-
- Note: method does not block.
- """
- logging.debug('running (non-blocking) \'%s\'.', ' '.join(command))
- return self.GetCommandRunner().RunCommandPiped(command, **kwargs)
-
- def RunCommand(self, command, silent=False, timeout_secs=None):
- """Executes a remote command and waits for it to finish executing.
-
- Returns the exit code of the command.
- """
- return self.GetCommandRunner().RunCommand(command, silent, timeout_secs)
-
- def EnsureIsolatedPathsExist(self, for_package, for_realms):
- """Ensures that the package's isolated /data and /tmp exist."""
- for isolated_directory in ['/data', '/tmp']:
- self.RunCommand([
- 'mkdir', '-p',
- _MapIsolatedPathsForPackage(for_package, 0,
- for_realms)(isolated_directory)
- ])
-
- def GetFile(self,
- source,
- dest,
- for_package=None,
- for_realms=(),
- recursive=False):
- """Copies a file from the target filesystem to the local filesystem.
-
- source: The path of the file being copied.
- dest: The path on the local filesystem which will be copied to.
- for_package: If specified, /data in paths in |sources| is mapped to the
- package's isolated /data location.
- for_realms: If specified, identifies the sub-realm of 'sys' under which
- isolated paths (see |for_package|) are stored.
- recursive: If true, performs a recursive copy.
- """
- assert type(source) is str
- self.GetFiles([source], dest, for_package, for_realms, recursive)
-
- def GetFiles(self,
- sources,
- dest,
- for_package=None,
- for_realms=(),
- recursive=False):
- """Copies files from the target filesystem to the local filesystem.
-
- sources: List of remote file paths to copy.
- dest: The path on the local filesystem which will be copied to.
- for_package: If specified, /data in paths in |sources| is mapped to the
- package's isolated /data location.
- for_realms: If specified, identifies the sub-realm of 'sys' under which
- isolated paths (see |for_package|) are stored.
- recursive: If true, performs a recursive copy.
- """
- assert type(sources) is tuple or type(sources) is list
- self._AssertIsStarted()
- if for_package:
- sources = map(_MapIsolatedPathsForPackage(for_package, 0, for_realms),
- sources)
- logging.debug('copy remote:%s => local:%s', sources, dest)
- return self.GetCommandRunner().RunScp(sources, dest,
- remote_cmd.COPY_FROM_TARGET,
- recursive)
-
- def GetFileAsString(self, source):
- """Reads a file on the device and returns it as a string.
-
- source: The remote file path to read.
- """
- cat_proc = self.RunCommandPiped(['cat', source],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- stdout, _ = cat_proc.communicate()
- if cat_proc.returncode != 0:
- raise Exception('Could not read file %s on device.', source)
- return stdout.decode('utf-8')
-
- def _GetEndpoint(self):
- """Returns a (host, port) tuple for the SSH connection to the target."""
- raise NotImplementedError()
-
- def _GetTargetSdkArch(self):
- """Returns the Fuchsia SDK architecture name for the target CPU."""
- if self._target_cpu == 'arm64' or self._target_cpu == 'x64':
- return self._target_cpu
- raise FuchsiaTargetException('Unknown target_cpu:' + self._target_cpu)
-
- def _AssertIsStarted(self):
- assert self.IsStarted()
-
- def _ConnectToTarget(self):
- logging.info('Connecting to Fuchsia using SSH.')
-
- host, port = self._GetEndpoint()
- end_time = time.time() + common.ATTACH_RETRY_SECONDS
- ssh_diagnostic_log = self._log_manager.Open('ssh_diagnostic_log')
- while time.time() < end_time:
- runner = remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port)
- ssh_proc = runner.RunCommandPiped(['true'],
- ssh_args=['-v'],
- stdout=ssh_diagnostic_log,
- stderr=subprocess.STDOUT)
- if ssh_proc.wait() == 0:
- logging.info('Connected!')
- self._started = True
- self._command_runner = runner
- return True
- time.sleep(_ATTACH_RETRY_INTERVAL)
-
- logging.error('Timeout limit reached.')
-
- raise FuchsiaTargetException('Couldn\'t connect using SSH.')
-
- def _DisconnectFromTarget(self):
- pass
-
- def _GetSshConfigPath(self, path):
- raise NotImplementedError()
-
- def GetPkgRepo(self):
- """Returns an PkgRepo instance which serves packages for this Target.
- Callers should typically call GetPkgRepo() in a |with| statement, and
- install and execute commands inside the |with| block, so that the returned
- PkgRepo can teardown correctly, if necessary.
- """
- raise NotImplementedError()
-
- def InstallPackage(self, package_paths):
- """Installs a package and it's dependencies on the device. If the package is
- already installed then it will be updated to the new version.
-
- package_paths: Paths to the .far files to install.
- """
- with self.GetPkgRepo() as pkg_repo:
- # Publish all packages to the serving TUF repository under |tuf_root|.
- for package_path in package_paths:
- pkg_repo.PublishPackage(package_path)
-
- # Resolve all packages, to have them pulled into the device/VM cache.
- for package_path in package_paths:
- package_name, package_version = _GetPackageInfo(package_path)
- logging.info('Installing %s...', package_name)
- return_code = self.RunCommand(
- ['pkgctl', 'resolve',
- _GetPackageUri(package_name)],
- timeout_secs=_INSTALL_TIMEOUT_SECS)
- if return_code != 0:
- raise Exception(
- 'Error {} while resolving {}.'.format(return_code, package_name))
-
- # Verify that the newly resolved versions of packages are reported.
- for package_path in package_paths:
- # Use pkgctl get-hash to determine which version will be resolved.
- package_name, package_version = _GetPackageInfo(package_path)
- pkgctl = self.RunCommandPiped(
- ['pkgctl', 'get-hash',
- _GetPackageUri(package_name)],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- pkgctl_out, pkgctl_err = pkgctl.communicate()
- pkgctl_out = pkgctl_out.strip()
-
- # Read the expected version from the meta.far Merkel hash file alongside
- # the package's FAR.
- meta_far_path = os.path.join(os.path.dirname(package_path), 'meta.far')
- meta_far_merkle = subprocess.check_output(
- [common.GetHostToolPathFromPlatform('merkleroot'),
- meta_far_path]).split()[0]
- if pkgctl_out != meta_far_merkle:
- raise Exception('Hash mismatch for %s after resolve (%s vs %s).' %
- (package_name, pkgctl_out, meta_far_merkle))
diff --git a/build/fuchsia/test/PRESUBMIT.py b/build/fuchsia/test/PRESUBMIT.py
index 9047d2dba..fc5dcfe8f 100644
--- a/build/fuchsia/test/PRESUBMIT.py
+++ b/build/fuchsia/test/PRESUBMIT.py
@@ -13,6 +13,10 @@ _EXTRA_PATHS_COMPONENTS = [('testing', )]
# pylint: disable=invalid-name,missing-function-docstring
def CommonChecks(input_api, output_api):
+ # Neither running nor linting Fuchsia tests is supported on Windows.
+ if input_api.is_windows:
+ return []
+
tests = []
chromium_src_path = input_api.os_path.realpath(
diff --git a/build/fuchsia/test/common.py b/build/fuchsia/test/common.py
index 2e1b1f56e..406af0853 100644
--- a/build/fuchsia/test/common.py
+++ b/build/fuchsia/test/common.py
@@ -3,37 +3,133 @@
# found in the LICENSE file.
"""Common methods and variables used by Cr-Fuchsia testing infrastructure."""
+import enum
import json
import logging
import os
import re
+import signal
+import shutil
import subprocess
+import sys
import time
from argparse import ArgumentParser
-from typing import Iterable, List, Optional
+from typing import Iterable, List, Optional, Tuple
-from compatible_utils import get_ssh_prefix, get_host_arch, running_unattended
+from compatible_utils import get_ssh_prefix, get_host_arch
DIR_SRC_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
+IMAGES_ROOT = os.path.join(DIR_SRC_ROOT, 'third_party', 'fuchsia-sdk',
+ 'images')
REPO_ALIAS = 'fuchsia.com'
SDK_ROOT = os.path.join(DIR_SRC_ROOT, 'third_party', 'fuchsia-sdk', 'sdk')
-
SDK_TOOLS_DIR = os.path.join(SDK_ROOT, 'tools', get_host_arch())
+_ENABLE_ZEDBOOT = 'discovery.zedboot.enabled=true'
_FFX_TOOL = os.path.join(SDK_TOOLS_DIR, 'ffx')
# This global variable is used to set the environment variable
# |FFX_ISOLATE_DIR| when running ffx commands in E2E testing scripts.
_FFX_ISOLATE_DIR = None
-# TODO(crbug.com/1280705): Remove each entry when they are migrated to v2.
-_V1_PACKAGE_LIST = [
- 'chrome_v1',
- 'web_engine',
- 'web_engine_with_webui',
- 'web_runner',
-]
+
+class TargetState(enum.Enum):
+ """State of a target."""
+ UNKNOWN = enum.auto()
+ DISCONNECTED = enum.auto()
+ PRODUCT = enum.auto()
+ FASTBOOT = enum.auto()
+ ZEDBOOT = enum.auto()
+
+
+class BootMode(enum.Enum):
+ """Specifies boot mode for device."""
+ REGULAR = enum.auto()
+ RECOVERY = enum.auto()
+ BOOTLOADER = enum.auto()
+
+
+_STATE_TO_BOOTMODE = {
+ TargetState.PRODUCT: BootMode.REGULAR,
+ TargetState.FASTBOOT: BootMode.BOOTLOADER,
+ TargetState.ZEDBOOT: BootMode.RECOVERY
+}
+
+_BOOTMODE_TO_STATE = {value: key for key, value in _STATE_TO_BOOTMODE.items()}
+
+
+def _state_string_to_state(state_str: str) -> TargetState:
+ state_str = state_str.strip().lower()
+ if state_str == 'product':
+ return TargetState.PRODUCT
+ if state_str == 'zedboot (r)':
+ return TargetState.ZEDBOOT
+ if state_str == 'fastboot':
+ return TargetState.FASTBOOT
+ if state_str == 'unknown':
+ return TargetState.UNKNOWN
+ if state_str == 'disconnected':
+ return TargetState.DISCONNECTED
+
+ raise NotImplementedError(f'State {state_str} not supported')
+
+
+def _retry(count: int, sleep: Optional[int] = None):
+ def first_func(func):
+ def wrapper(*args, **kwargs):
+ exception = None
+ for _ in range(count):
+ try:
+ return func(*args, **kwargs)
+ # pylint: disable=broad-except
+ except Exception as generic_exception:
+ exception = generic_exception
+ logging.warning('Function %s failed. Retrying...',
+ str(func))
+ if sleep:
+ time.sleep(sleep)
+ # pylint: enable=broad-except
+ raise exception
+
+ return wrapper
+
+ return first_func
+
+
+@_retry(count=3, sleep=30)
+def get_target_state(target_id: Optional[str]) -> TargetState:
+ """Return state of target or the default target.
+
+ Args:
+ target_id: Optional nodename of the target. If not given, default target
+ is used.
+
+ Returns:
+ TargetState of the given node, if found.
+
+ Raises:
+ RuntimeError: If target cannot be found, or default target is not
+ defined if |target_id| is not given.
+ """
+ targets = json.loads(
+ run_ffx_command(('target', 'list'),
+ configs=[_ENABLE_ZEDBOOT],
+ check=True,
+ capture_output=True,
+ json_out=True).stdout.strip())
+ for target in targets:
+ if target_id is None and target['is_default']:
+ return _state_string_to_state(target['target_state'])
+ if target_id == target['nodename']:
+ return _state_string_to_state(target['target_state'])
+
+ # Could not find a state for given target.
+ error_target = target_id
+ if target_id is None:
+ error_target = 'default target'
+
+ raise RuntimeError(f'Could not find state for {error_target}')
def set_ffx_isolate_dir(isolate_dir: str) -> None:
@@ -43,6 +139,31 @@ def set_ffx_isolate_dir(isolate_dir: str) -> None:
_FFX_ISOLATE_DIR = isolate_dir
+def get_host_tool_path(tool):
+ """Get a tool from the SDK."""
+
+ return os.path.join(SDK_TOOLS_DIR, tool)
+
+
+def get_host_os():
+ """Get host operating system."""
+
+ host_platform = sys.platform
+ if host_platform.startswith('linux'):
+ return 'linux'
+ if host_platform.startswith('darwin'):
+ return 'mac'
+ raise Exception('Unsupported host platform: %s' % host_platform)
+
+
+def make_clean_directory(directory_name):
+ """If the directory exists, delete it and remake with no contents."""
+
+ if os.path.exists(directory_name):
+ shutil.rmtree(directory_name)
+ os.mkdir(directory_name)
+
+
def _get_daemon_status():
"""Determines daemon status via `ffx daemon socket`.
@@ -51,9 +172,10 @@ def _get_daemon_status():
NotRunning to indicate if the daemon is running.
"""
status = json.loads(
- run_ffx_command(['--machine', 'json', 'daemon', 'socket'],
+ run_ffx_command(('daemon', 'socket'),
check=True,
capture_output=True,
+ json_out=True,
suppress_repair=True).stdout.strip())
return status.get('pid', {}).get('status', {'NotRunning': True})
@@ -123,6 +245,7 @@ def run_ffx_command(cmd: Iterable[str],
check: bool = True,
suppress_repair: bool = False,
configs: Optional[List[str]] = None,
+ json_out: bool = False,
**kwargs) -> subprocess.CompletedProcess:
"""Runs `ffx` with the given arguments, waiting for it to exit.
@@ -141,6 +264,8 @@ def run_ffx_command(cmd: Iterable[str],
suppress_repair: If True, do not attempt to find and run a repair
command.
configs: A list of configs to be applied to the current command.
+ json_out: Have command output returned as JSON. Must be parsed by
+ caller.
Returns:
A CompletedProcess instance
Raises:
@@ -148,6 +273,8 @@ def run_ffx_command(cmd: Iterable[str],
"""
ffx_cmd = [_FFX_TOOL]
+ if json_out:
+ ffx_cmd.extend(('--machine', 'json'))
if target_id:
ffx_cmd.extend(('--target', target_id))
if configs:
@@ -174,13 +301,19 @@ def run_ffx_command(cmd: Iterable[str],
env=env,
**kwargs)
except subprocess.CalledProcessError as cpe:
+ logging.error('%s %s failed with returncode %s.',
+ os.path.relpath(_FFX_TOOL),
+ subprocess.list2cmdline(ffx_cmd[1:]), cpe.returncode)
+ if cpe.output:
+ logging.error('stdout of the command: %s', cpe.output)
if suppress_repair or (cpe.output
and not _run_repair_command(cpe.output)):
raise
# If the original command failed but a repair command was found and
# succeeded, try one more time with the original command.
- return run_ffx_command(cmd, target_id, check, True, **kwargs)
+ return run_ffx_command(cmd, target_id, check, True, configs, json_out,
+ **kwargs)
def run_continuous_ffx_command(cmd: Iterable[str],
@@ -251,10 +384,7 @@ def resolve_packages(packages: List[str], target_id: Optional[str]) -> None:
"""Ensure that all |packages| are installed on a device."""
ssh_prefix = get_ssh_prefix(get_ssh_address(target_id))
-
- # Garbage collection for swarming bots.
- if running_unattended():
- subprocess.run(ssh_prefix + ['--', 'pkgctl', 'gc'], check=False)
+ subprocess.run(ssh_prefix + ['--', 'pkgctl', 'gc'], check=False)
for package in packages:
resolve_cmd = [
@@ -284,3 +414,154 @@ def get_ssh_address(target_id: Optional[str]) -> str:
return run_ffx_command(('target', 'get-ssh-address'),
target_id,
capture_output=True).stdout.strip()
+
+
+def find_in_dir(target_name: str, parent_dir: str) -> Optional[str]:
+ """Finds path in SDK.
+
+ Args:
+ target_name: Name of target to find, as a string.
+ parent_dir: Directory to start search in.
+
+ Returns:
+ Full path to the target, None if not found.
+ """
+ # Doesn't make sense to look for a full path. Only extract the basename.
+ target_name = os.path.basename(target_name)
+ for root, dirs, _ in os.walk(parent_dir):
+ if target_name in dirs:
+ return os.path.abspath(os.path.join(root, target_name))
+
+ return None
+
+
+def find_image_in_sdk(product_name: str) -> Optional[str]:
+ """Finds image dir in SDK for product given.
+
+ Args:
+ product_name: Name of product's image directory to find.
+
+ Returns:
+ Full path to the target, None if not found.
+ """
+ top_image_dir = os.path.join(SDK_ROOT, os.pardir, 'images')
+ path = find_in_dir(product_name, parent_dir=top_image_dir)
+ if path:
+ return find_in_dir('images', parent_dir=path)
+ return path
+
+
+def catch_sigterm() -> None:
+ """Catches the kill signal and allows the process to exit cleanly."""
+ def _sigterm_handler(*_):
+ sys.exit(0)
+
+ signal.signal(signal.SIGTERM, _sigterm_handler)
+
+
+def get_system_info(target: Optional[str] = None) -> Tuple[str, str]:
+ """Retrieves installed OS version frm device.
+
+ Returns:
+ Tuple of strings, containing {product, version number), or a pair of
+ empty strings to indicate an error.
+ """
+ info_cmd = run_ffx_command(('target', 'show', '--json'),
+ target_id=target,
+ capture_output=True,
+ check=False)
+ if info_cmd.returncode == 0:
+ info_json = json.loads(info_cmd.stdout.strip())
+ for info in info_json:
+ if info['title'] == 'Build':
+ return (info['child'][1]['value'], info['child'][0]['value'])
+
+ # If the information was not retrieved, return empty strings to indicate
+ # unknown system info.
+ return ('', '')
+
+
+def boot_device(target_id: Optional[str],
+ mode: BootMode,
+ must_boot: bool = False) -> None:
+ """Boot device into desired mode, with fallback to SSH on failure.
+
+ Args:
+ target_id: Optional target_id of device.
+ mode: Desired boot mode.
+ must_boot: Forces device to boot, regardless of current state.
+ """
+ # Skip boot call if already in the state and not skipping check.
+ if not must_boot:
+ state = get_target_state(target_id)
+ wanted_state = _BOOTMODE_TO_STATE.get(mode)
+ logging.debug('Current state %s. Want state %s', str(state),
+ str(wanted_state))
+ must_boot = state != wanted_state
+
+ if not must_boot:
+ logging.debug('Skipping boot - already in good state')
+ return
+
+ _boot_device_ffx(target_id, mode)
+
+ exception = None
+ for _ in range(30):
+ try:
+ state = get_target_state(target_id)
+ if state == wanted_state:
+ return
+ raise RuntimeError('Mode is not correct. Expected '
+ f'{wanted_state}, got {state}')
+ except RuntimeError as runtime_e:
+ exception = runtime_e
+ time.sleep(2)
+ if exception:
+ # Fallback to SSH, with no retry if we tried with ffx.
+ if state != _BOOTMODE_TO_STATE.get(mode):
+ _boot_device_dm(target_id, mode)
+ else:
+ raise exception
+
+
+def _boot_device_ffx(target_id: Optional[str], mode: BootMode):
+ cmd = ['target', 'reboot']
+ if mode == BootMode.REGULAR:
+ logging.info('Triggering regular boot')
+ elif mode == BootMode.RECOVERY:
+ cmd.append('-r')
+ elif mode == BootMode.BOOTLOADER:
+ cmd.append('-b')
+ else:
+ raise NotImplementedError(f'BootMode {mode} not supported')
+
+ run_ffx_command(cmd,
+ target_id=target_id,
+ configs=[_ENABLE_ZEDBOOT],
+ check=False)
+
+
+def _boot_device_dm(target_id: Optional[str], mode: BootMode):
+ # Can only use DM if device is in regular boot.
+ state = get_target_state(target_id)
+ if state != TargetState.PRODUCT:
+ _boot_device_ffx(target_id, mode.REGULAR)
+ if mode == BootMode.REGULAR:
+ return
+
+ ssh_prefix = get_ssh_prefix(get_ssh_address(target_id))
+
+ reboot_cmd = None
+
+ if mode == BootMode.REGULAR:
+ reboot_cmd = 'reboot'
+ elif mode == BootMode.RECOVERY:
+ reboot_cmd = 'reboot-recovery'
+ elif mode == BootMode.BOOTLOADER:
+ reboot_cmd = 'reboot-bootloader'
+ else:
+ raise NotImplementedError(f'BootMode {mode} not supported')
+
+ # Boot commands can fail due to SSH connections timeout.
+ full_cmd = ssh_prefix + ['--', 'dm', reboot_cmd]
+ subprocess.run(full_cmd, check=False)
diff --git a/build/fuchsia/test/common_unittests.py b/build/fuchsia/test/common_unittests.py
new file mode 100755
index 000000000..4e419c902
--- /dev/null
+++ b/build/fuchsia/test/common_unittests.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env vpython3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""File for testing common.py."""
+
+import os
+import tempfile
+import unittest
+import unittest.mock as mock
+
+import common
+
+
+@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows')
+class CommonTest(unittest.TestCase):
+ """Test common.py methods."""
+ def test_find_in_dir_returns_file_or_dir_if_searching(self) -> None:
+ """Test |find_in_dir| returns files if searching for file, or None."""
+ # Make the directory structure.
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ with tempfile.NamedTemporaryFile(dir=tmp_dir) as tmp_file, \
+ tempfile.TemporaryDirectory(dir=tmp_dir) as inner_tmp_dir:
+
+ # Structure is now:
+ # temp_dir/
+ # temp_dir/inner_dir1
+ # temp_dir/tempfile1
+ # File is not a dir, so returns None.
+ self.assertIsNone(
+ common.find_in_dir(os.path.basename(tmp_file.name),
+ parent_dir=tmp_dir))
+
+ # Repeat for directory.
+ self.assertEqual(
+ common.find_in_dir(inner_tmp_dir, parent_dir=tmp_dir),
+ inner_tmp_dir)
+
+ def test_find_image_in_sdk_searches_images_in_product_bundle(self):
+ """Test |find_image_in_sdk| searches for 'images' if product-bundle."""
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ os.makedirs(os.path.join(tmp_dir, 'sdk'), exist_ok=True)
+ os.makedirs(os.path.join(tmp_dir, 'images', 'workstation-product',
+ 'images'),
+ exist_ok=True)
+ with mock.patch('common.SDK_ROOT', os.path.join(tmp_dir, 'sdk')):
+ self.assertEqual(
+ common.find_image_in_sdk('workstation-product'),
+ os.path.join(tmp_dir, 'images', 'workstation-product',
+ 'images'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/build/fuchsia/test/compatible_utils.py b/build/fuchsia/test/compatible_utils.py
index 39b4664ba..b917a656d 100644
--- a/build/fuchsia/test/compatible_utils.py
+++ b/build/fuchsia/test/compatible_utils.py
@@ -36,8 +36,8 @@ def running_unattended() -> bool:
When running unattended, confirmation prompts and the like are suppressed.
"""
- # Chromium tests only for the presence of the variable, so match that here.
- return 'CHROME_HEADLESS' in os.environ
+ # TODO(crbug/1401387): Change to mixin based approach.
+ return 'SWARMING_SERVER' in os.environ
def get_host_arch() -> str:
@@ -46,7 +46,7 @@ def get_host_arch() -> str:
# platform.machine() returns AMD64 on 64-bit Windows.
if host_arch in ['x86_64', 'AMD64']:
return 'x64'
- if host_arch == 'aarch64':
+ if host_arch in ['aarch64', 'arm64']:
return 'arm64'
raise NotImplementedError('Unsupported host architecture: %s' % host_arch)
@@ -205,61 +205,3 @@ def get_sdk_hash(system_image_dir: str) -> Tuple[str, str]:
(args_file, contents))
return (version_info[product_key], version_info[version_key])
-
-
-def find_in_dir(target_name: str,
- parent_dir: str,
- search_for_dir: bool = False) -> Optional[str]:
- """Finds path in SDK.
-
- Args:
- target_name: Name of target to find, as a string.
- parent_dir: Directory to start search in.
- search_for_dir: boolean, whether to search for a directory or file.
-
- Returns:
- Optional full path to the target, if found. None if not found.
- """
- # Doesn't make sense to look for a full path. Only extract the basename.
- target_name = os.path.basename(target_name)
- for root, dirs, files in os.walk(parent_dir):
- # Removing these parens causes the following equivalent operation order:
- # if (target_name in dirs) if search_for_dir else files, which is
- # incorrect.
- #pylint: disable=superfluous-parens
- if target_name in (dirs if search_for_dir else files):
- return os.path.abspath(os.path.join(root, target_name))
- #pylint: enable=superfluous-parens
-
- return None
-
-
-def find_image_in_sdk(product_name: str, product_bundle: bool,
- sdk_root: str) -> Optional[str]:
- """Finds image dir in SDK for product given.
-
- Args:
- product_name: Name of product's image directory to find.
- product_bundle: boolean, whether image will be in a product-bundle or not.
- Product bundle images use a different directory format.
- sdk_root: String path to root of SDK (third_party/fuchsia-sdk).
-
- Returns:
- Optional full path to the target, if found. None if not found.
- """
- if product_bundle:
- top_image_dir = os.path.join(sdk_root, 'images')
- path = find_in_dir(product_name,
- parent_dir=top_image_dir,
- search_for_dir=True)
- return find_in_dir('images', parent_dir=path, search_for_dir=True)
-
- # Non-product-bundle directories take some massaging.
- top_image_dir = os.path.join(sdk_root, 'images-internal')
- product, board = product_name.split('.')
- board_dir = find_in_dir(board,
- parent_dir=top_image_dir,
- search_for_dir=True)
-
- # The board dir IS the images dir
- return find_in_dir(product, parent_dir=board_dir, search_for_dir=True)
diff --git a/build/fuchsia/test/compatible_utils_unittests.py b/build/fuchsia/test/compatible_utils_unittests.py
index 88c570fff..02815921c 100755
--- a/build/fuchsia/test/compatible_utils_unittests.py
+++ b/build/fuchsia/test/compatible_utils_unittests.py
@@ -14,12 +14,13 @@ import unittest.mock as mock
import compatible_utils
+@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows')
class CompatibleUtilsTest(unittest.TestCase):
"""Test compatible_utils.py methods."""
def test_running_unattended_returns_true_if_headless_set(self) -> None:
"""Test |running_unattended| returns True if CHROME_HEADLESS is set."""
- with mock.patch('os.environ', {'CHROME_HEADLESS': 0}):
+ with mock.patch('os.environ', {'SWARMING_SERVER': 0}):
self.assertTrue(compatible_utils.running_unattended())
with mock.patch('os.environ', {'FOO_HEADLESS': 0}):
@@ -190,80 +191,20 @@ universe_package_labels += []
self.assertRaises(compatible_utils.VersionNotFoundError):
compatible_utils.get_sdk_hash('some/dir')
- def test_find_in_dir_returns_file_or_dir_if_searching(self) -> None:
- """Test |find_in_dir| returns files if searching for file, or None."""
- # Make the directory structure.
- with tempfile.TemporaryDirectory() as tmp_dir:
- with tempfile.NamedTemporaryFile(dir=tmp_dir) as tmp_file, \
- tempfile.TemporaryDirectory(dir=tmp_dir) as inner_tmp_dir:
-
- # Structure is now:
- # temp_dir/
- # temp_dir/inner_dir1
- # temp_dir/tempfile1
- self.assertEqual(
- compatible_utils.find_in_dir(
- os.path.basename(tmp_file.name),
- parent_dir=tmp_dir,
- search_for_dir=False), tmp_file.name)
- # File is not a dir, so returns None.
- self.assertIsNone(
- compatible_utils.find_in_dir(os.path.basename(
- tmp_file.name),
- parent_dir=tmp_dir,
- search_for_dir=True))
-
- # Repeat for directory.
- self.assertEqual(
- compatible_utils.find_in_dir(inner_tmp_dir,
- parent_dir=tmp_dir,
- search_for_dir=True),
- inner_tmp_dir)
- self.assertIsNone(
- compatible_utils.find_in_dir(inner_tmp_dir,
- parent_dir=tmp_dir,
- search_for_dir=False))
- with tempfile.NamedTemporaryFile(
- dir=inner_tmp_dir) as inner_tmp_file:
- self.assertEqual(
- compatible_utils.find_in_dir(
- os.path.basename(inner_tmp_file.name),
- parent_dir=tmp_dir,
- search_for_dir=False), inner_tmp_file.name)
- self.assertEqual(
- compatible_utils.find_in_dir(
- os.path.basename(inner_tmp_file.name),
- parent_dir=inner_tmp_dir,
- search_for_dir=False), inner_tmp_file.name)
-
- def test_find_image_in_sdk_searches_images_in_product_bundle(self):
- """Test |find_image_in_sdk| searches for 'images' if product-bundle."""
- with tempfile.TemporaryDirectory() as tmp_dir:
- os.makedirs(os.path.join(tmp_dir, 'images', 'workstation-product',
- 'images'),
- exist_ok=True)
- self.assertEqual(
- compatible_utils.find_image_in_sdk('workstation-product',
- product_bundle=True,
- sdk_root=tmp_dir),
- os.path.join(tmp_dir, 'images', 'workstation-product',
- 'images'))
-
- def test_find_image_in_sdk_searches_images_in_prebuilt(self):
- """Test |find_image_in_sdk| searches dir if not product-bundle."""
- with tempfile.TemporaryDirectory() as tmp_dir:
- os.makedirs(os.path.join(tmp_dir, 'images-internal',
- 'chromebook-x64', 'workstation_eng'),
- exist_ok=True)
- self.assertEqual(
- compatible_utils.find_image_in_sdk(
- 'workstation_eng.chromebook-x64',
- product_bundle=False,
- sdk_root=tmp_dir),
- os.path.join(tmp_dir, 'images-internal', 'chromebook-x64',
- 'workstation_eng'))
+ def trim_noop_prefixes(self, path):
+ """Helper function to trim no-op path name prefixes that are
+ introduced by os.path.realpath on some platforms. These break
+ the unit tests, but have no actual effect on behavior."""
+ # These must all end in the path separator character for the
+ # string length computation to be correct on all platforms.
+ noop_prefixes = ['/private/']
+ for prefix in noop_prefixes:
+ if path.startswith(prefix):
+ return path[len(prefix) - 1:]
+ return path
def test_install_symbols(self):
+
"""Test |install_symbols|."""
with tempfile.TemporaryDirectory() as fuchsia_out_dir:
@@ -277,8 +218,9 @@ universe_package_labels += []
f.write(f'{build_id} {binary_relpath}')
compatible_utils.install_symbols([id_path], fuchsia_out_dir)
self.assertTrue(os.path.islink(symbol_file))
- self.assertEqual(os.path.realpath(symbol_file),
- os.path.join(fuchsia_out_dir, binary_relpath))
+ self.assertEqual(
+ self.trim_noop_prefixes(os.path.realpath(symbol_file)),
+ os.path.join(fuchsia_out_dir, binary_relpath))
new_binary_relpath = 'path/to/new/binary'
with open(id_path, 'w') as f:
@@ -286,7 +228,7 @@ universe_package_labels += []
compatible_utils.install_symbols([id_path], fuchsia_out_dir)
self.assertTrue(os.path.islink(symbol_file))
self.assertEqual(
- os.path.realpath(symbol_file),
+ self.trim_noop_prefixes(os.path.realpath(symbol_file)),
os.path.join(fuchsia_out_dir, new_binary_relpath))
finally:
os.remove(id_path)
diff --git a/build/fuchsia/test/coveragetest.py b/build/fuchsia/test/coveragetest.py
index 6d8121ac7..3a82e53c2 100755
--- a/build/fuchsia/test/coveragetest.py
+++ b/build/fuchsia/test/coveragetest.py
@@ -6,26 +6,36 @@
import importlib
import io
+import os
import sys
import unittest
import coverage # pylint: disable=import-error
+# The files need to have sufficient coverages.
COVERED_FILES = [
'compatible_utils.py', 'deploy_to_fuchsia.py', 'flash_device.py',
'log_manager.py', 'publish_package.py', 'serve_repo.py', 'test_server.py'
]
+# The files will be tested without coverage requirements.
+TESTED_FILES = ['common.py', 'ffx_emulator.py']
+
def main():
"""Gather coverage data, ensure included files are 100% covered."""
+ # Fuchsia tests not supported on Windows
+ if os.name == 'nt':
+ return 0
+
cov = coverage.coverage(data_file=None,
include=COVERED_FILES,
config_file=True)
cov.start()
- for file in COVERED_FILES:
+ for file in COVERED_FILES + TESTED_FILES:
+ print('Testing ' + file + ' ...')
# pylint: disable=import-outside-toplevel
# import tests after coverage start to also cover definition lines.
module = importlib.import_module(file.replace('.py', '_unittests'))
diff --git a/build/fuchsia/test/ffx_emulator.py b/build/fuchsia/test/ffx_emulator.py
new file mode 100644
index 000000000..3860ec8ab
--- /dev/null
+++ b/build/fuchsia/test/ffx_emulator.py
@@ -0,0 +1,158 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Provide helpers for running Fuchsia's `ffx emu`."""
+
+import argparse
+import ast
+import logging
+import os
+import json
+import random
+import subprocess
+
+from contextlib import AbstractContextManager
+
+from common import check_ssh_config_file, find_image_in_sdk, get_system_info, \
+ run_ffx_command, SDK_ROOT
+from compatible_utils import get_host_arch, get_sdk_hash
+
+_EMU_COMMAND_RETRIES = 3
+
+
+class FfxEmulator(AbstractContextManager):
+ """A helper for managing emulators."""
+ def __init__(self, args: argparse.Namespace) -> None:
+ if args.product_bundle:
+ self._product_bundle = args.product_bundle
+ else:
+ self._product_bundle = 'terminal.qemu-' + get_host_arch()
+
+ self._enable_graphics = args.enable_graphics
+ self._hardware_gpu = args.hardware_gpu
+ self._logs_dir = args.logs_dir
+ self._with_network = args.with_network
+ if args.everlasting:
+ # Do not change the name, it will break the logic.
+ # ffx has a prefix-matching logic, so 'fuchsia-emulator' is not
+ # usable to avoid breaking local development workflow. I.e.
+ # developers can create an everlasting emulator and an ephemeral one
+ # without interfering each other.
+ self._node_name = 'fuchsia-everlasting-emulator'
+ assert self._everlasting()
+ else:
+ self._node_name = 'fuchsia-emulator-' + str(random.randint(
+ 1, 9999))
+
+ # Set the download path parallel to Fuchsia SDK directory
+ # permanently so that scripts can always find the product bundles.
+ run_ffx_command(('config', 'set', 'pbms.storage.path',
+ os.path.join(SDK_ROOT, os.pardir, 'images')))
+
+ def _everlasting(self) -> bool:
+ return self._node_name == 'fuchsia-everlasting-emulator'
+
+ def _start_emulator(self) -> None:
+ """Start the emulator."""
+ logging.info('Starting emulator %s', self._node_name)
+ check_ssh_config_file()
+ emu_command = [
+ 'emu', 'start', self._product_bundle, '--name', self._node_name
+ ]
+ if not self._enable_graphics:
+ emu_command.append('-H')
+ if self._hardware_gpu:
+ emu_command.append('--gpu')
+ if self._logs_dir:
+ emu_command.extend(
+ ('-l', os.path.join(self._logs_dir, 'emulator_log')))
+ if self._with_network:
+ emu_command.extend(('--net', 'tap'))
+
+ # TODO(https://crbug.com/1336776): remove when ffx has native support
+ # for starting emulator on arm64 host.
+ if get_host_arch() == 'arm64':
+
+ arm64_qemu_dir = os.path.join(SDK_ROOT, 'tools', 'arm64',
+ 'qemu_internal')
+
+ # The arm64 emulator binaries are downloaded separately, so add
+ # a symlink to the expected location inside the SDK.
+ if not os.path.isdir(arm64_qemu_dir):
+ os.symlink(
+ os.path.join(SDK_ROOT, '..', '..', 'qemu-linux-arm64'),
+ arm64_qemu_dir)
+
+ # Add the arm64 emulator binaries to the SDK's manifest.json file.
+ sdk_manifest = os.path.join(SDK_ROOT, 'meta', 'manifest.json')
+ with open(sdk_manifest, 'r+') as f:
+ data = json.load(f)
+ for part in data['parts']:
+ if part['meta'] == 'tools/x64/qemu_internal-meta.json':
+ part['meta'] = 'tools/arm64/qemu_internal-meta.json'
+ break
+ f.seek(0)
+ json.dump(data, f)
+ f.truncate()
+
+ # Generate a meta file for the arm64 emulator binaries using its
+ # x64 counterpart.
+ qemu_arm64_meta_file = os.path.join(SDK_ROOT, 'tools', 'arm64',
+ 'qemu_internal-meta.json')
+ qemu_x64_meta_file = os.path.join(SDK_ROOT, 'tools', 'x64',
+ 'qemu_internal-meta.json')
+ with open(qemu_x64_meta_file) as f:
+ data = str(json.load(f))
+ qemu_arm64_meta = data.replace(r'tools/x64', 'tools/arm64')
+ with open(qemu_arm64_meta_file, "w+") as f:
+ json.dump(ast.literal_eval(qemu_arm64_meta), f)
+ emu_command.extend(['--engine', 'qemu'])
+
+ for _ in range(_EMU_COMMAND_RETRIES):
+
+ # If the ffx daemon fails to establish a connection with
+ # the emulator after 85 seconds, that means the emulator
+ # failed to be brought up and a retry is needed.
+ # TODO(fxb/103540): Remove retry when start up issue is fixed.
+ try:
+ run_ffx_command(emu_command,
+ timeout=85,
+ configs=['emu.start.timeout=90'])
+ break
+ except (subprocess.TimeoutExpired, subprocess.CalledProcessError):
+ run_ffx_command(('emu', 'stop'))
+
+ def _shutdown_emulator(self) -> None:
+ """Shutdown the emulator."""
+
+ logging.info('Stopping the emulator %s', self._node_name)
+ # The emulator might have shut down unexpectedly, so this command
+ # might fail.
+ run_ffx_command(('emu', 'stop', self._node_name), check=False)
+
+ def __enter__(self) -> str:
+ """Start the emulator if necessary.
+
+ Returns:
+ The node name of the emulator.
+ """
+
+ if self._everlasting():
+ sdk_hash = get_sdk_hash(find_image_in_sdk(self._product_bundle))
+ sys_info = get_system_info(self._node_name)
+ if sdk_hash == sys_info:
+ return self._node_name
+ logging.info(
+ ('The emulator version [%s] does not match the SDK [%s], '
+ 'updating...'), sys_info, sdk_hash)
+
+ self._start_emulator()
+ return self._node_name
+
+ def __exit__(self, exc_type, exc_value, traceback) -> bool:
+ """Shutdown the emulator if necessary."""
+
+ if not self._everlasting():
+ self._shutdown_emulator()
+ # Do not suppress exceptions.
+ return False
diff --git a/build/fuchsia/test/ffx_emulator_unittests.py b/build/fuchsia/test/ffx_emulator_unittests.py
new file mode 100755
index 000000000..e12f13aa9
--- /dev/null
+++ b/build/fuchsia/test/ffx_emulator_unittests.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env vpython3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""File for testing ffx_emulator.py."""
+
+import argparse
+import unittest
+
+from ffx_emulator import FfxEmulator
+
+
+class FfxEmulatorTest(unittest.TestCase):
+ """Unittests for ffx_emulator.py"""
+ def test_use_fixed_node_name(self) -> None:
+ """FfxEmulator should use a fixed node name."""
+ # Allowing the test case to access FfxEmulator._node_name directly.
+ # pylint: disable=protected-access
+ self.assertEqual(
+ FfxEmulator(
+ argparse.Namespace(
+ **{
+ 'product_bundle': None,
+ 'enable_graphics': False,
+ 'hardware_gpu': False,
+ 'logs_dir': '.',
+ 'with_network': False,
+ 'everlasting': True
+ }))._node_name, 'fuchsia-everlasting-emulator')
+
+ def test_use_random_node_name(self) -> None:
+ """FfxEmulator should not use a fixed node name."""
+ # Allowing the test case to access FfxEmulator._node_name directly.
+ # pylint: disable=protected-access
+ self.assertNotEqual(
+ FfxEmulator(
+ argparse.Namespace(
+ **{
+ 'product_bundle': None,
+ 'enable_graphics': False,
+ 'hardware_gpu': False,
+ 'logs_dir': '.',
+ 'with_network': False,
+ 'everlasting': False
+ }))._node_name, 'fuchsia-everlasting-emulator')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/build/fuchsia/test/ffx_integration.py b/build/fuchsia/test/ffx_integration.py
index 0c09c0841..08cd6fa37 100644
--- a/build/fuchsia/test/ffx_integration.py
+++ b/build/fuchsia/test/ffx_integration.py
@@ -3,23 +3,18 @@
# found in the LICENSE file.
"""Provide helpers for running Fuchsia's `ffx`."""
-import ast
import logging
import os
import json
-import random
import subprocess
import sys
import tempfile
from contextlib import AbstractContextManager
-from typing import Iterable, Optional
+from typing import IO, Iterable, List, Optional
-from common import check_ssh_config_file, run_ffx_command, \
- run_continuous_ffx_command, SDK_ROOT
-from compatible_utils import get_host_arch
+from common import run_continuous_ffx_command, run_ffx_command, SDK_ROOT
-_EMU_COMMAND_RETRIES = 3
RUN_SUMMARY_SCHEMA = \
'https://fuchsia.dev/schema/ffx_test/run_summary-8d1dd964.json'
@@ -62,7 +57,14 @@ class ScopedFfxConfig(AbstractContextManager):
def __exit__(self, exc_type, exc_val, exc_tb) -> bool:
if self._new_value != self._old_value:
- run_ffx_command(['config', 'remove', self._name])
+
+ # Allow removal of config to fail.
+ remove_cmd = run_ffx_command(['config', 'remove', self._name],
+ check=False)
+ if remove_cmd.returncode != 0:
+ logging.warning('Error when removing ffx config %s',
+ self._name)
+
if self._old_value is not None:
# Explicitly set the value back only if removing the new value
# doesn't already restore the old value.
@@ -82,134 +84,6 @@ def test_connection(target_id: Optional[str]) -> None:
run_ffx_command(('target', 'echo'), target_id)
-class FfxEmulator(AbstractContextManager):
- """A helper for managing emulators."""
-
- def __init__(self,
- enable_graphics: bool,
- hardware_gpu: bool,
- product_bundle: Optional[str],
- with_network: bool,
- logs_dir: Optional[str] = None) -> None:
- if product_bundle:
- self._product_bundle = product_bundle
- else:
- target_cpu = get_host_arch()
- self._product_bundle = f'terminal.qemu-{target_cpu}'
-
- self._enable_graphics = enable_graphics
- self._hardware_gpu = hardware_gpu
- self._logs_dir = logs_dir
- self._with_network = with_network
- node_name_suffix = random.randint(1, 9999)
- self._node_name = f'fuchsia-emulator-{node_name_suffix}'
-
- # Set the download path parallel to Fuchsia SDK directory
- # permanently so that scripts can always find the product bundles.
- run_ffx_command(('config', 'set', 'pbms.storage.path',
- os.path.join(SDK_ROOT, os.pardir, 'images')))
-
- override_file = os.path.join(os.path.dirname(__file__), os.pardir,
- 'sdk_override.txt')
- self._scoped_pb_metadata = None
- if os.path.exists(override_file):
- with open(override_file) as f:
- pb_metadata = f.read().split('\n')
- pb_metadata.append('{sdk.root}/*.json')
- self._scoped_pb_metadata = ScopedFfxConfig(
- 'pbms.metadata', json.dumps((pb_metadata)))
-
- def __enter__(self) -> str:
- """Start the emulator.
-
- Returns:
- The node name of the emulator.
- """
-
- if self._scoped_pb_metadata:
- self._scoped_pb_metadata.__enter__()
- check_ssh_config_file()
- emu_command = [
- 'emu', 'start', self._product_bundle, '--name', self._node_name
- ]
- if not self._enable_graphics:
- emu_command.append('-H')
- if self._hardware_gpu:
- emu_command.append('--gpu')
- if self._logs_dir:
- emu_command.extend(
- ('-l', os.path.join(self._logs_dir, 'emulator_log')))
- if self._with_network:
- emu_command.extend(('--net', 'tap'))
-
- # TODO(https://crbug.com/1336776): remove when ffx has native support
- # for starting emulator on arm64 host.
- if get_host_arch() == 'arm64':
-
- arm64_qemu_dir = os.path.join(SDK_ROOT, 'tools', 'arm64',
- 'qemu_internal')
-
- # The arm64 emulator binaries are downloaded separately, so add
- # a symlink to the expected location inside the SDK.
- if not os.path.isdir(arm64_qemu_dir):
- os.symlink(
- os.path.join(SDK_ROOT, '..', '..', 'qemu-linux-arm64'),
- arm64_qemu_dir)
-
- # Add the arm64 emulator binaries to the SDK's manifest.json file.
- sdk_manifest = os.path.join(SDK_ROOT, 'meta', 'manifest.json')
- with open(sdk_manifest, 'r+') as f:
- data = json.load(f)
- for part in data['parts']:
- if part['meta'] == 'tools/x64/qemu_internal-meta.json':
- part['meta'] = 'tools/arm64/qemu_internal-meta.json'
- break
- f.seek(0)
- json.dump(data, f)
- f.truncate()
-
- # Generate a meta file for the arm64 emulator binaries using its
- # x64 counterpart.
- qemu_arm64_meta_file = os.path.join(SDK_ROOT, 'tools', 'arm64',
- 'qemu_internal-meta.json')
- qemu_x64_meta_file = os.path.join(SDK_ROOT, 'tools', 'x64',
- 'qemu_internal-meta.json')
- with open(qemu_x64_meta_file) as f:
- data = str(json.load(f))
- qemu_arm64_meta = data.replace(r'tools/x64', 'tools/arm64')
- with open(qemu_arm64_meta_file, "w+") as f:
- json.dump(ast.literal_eval(qemu_arm64_meta), f)
- emu_command.extend(['--engine', 'qemu'])
-
- with ScopedFfxConfig('emu.start.timeout', '90'):
- for _ in range(_EMU_COMMAND_RETRIES):
-
- # If the ffx daemon fails to establish a connection with
- # the emulator after 85 seconds, that means the emulator
- # failed to be brought up and a retry is needed.
- # TODO(fxb/103540): Remove retry when start up issue is fixed.
- try:
- run_ffx_command(emu_command, timeout=85)
- break
- except (subprocess.TimeoutExpired,
- subprocess.CalledProcessError):
- run_ffx_command(('emu', 'stop'))
- return self._node_name
-
- def __exit__(self, exc_type, exc_value, traceback) -> bool:
- """Shutdown the emulator."""
-
- # The emulator might have shut down unexpectedly, so this command
- # might fail.
- run_ffx_command(('emu', 'stop', self._node_name), check=False)
-
- if self._scoped_pb_metadata:
- self._scoped_pb_metadata.__exit__(exc_type, exc_value, traceback)
-
- # Do not suppress exceptions.
- return False
-
-
class FfxTestRunner(AbstractContextManager):
"""A context manager that manages a session for running a test via `ffx`.
@@ -347,3 +221,19 @@ class FfxTestRunner(AbstractContextManager):
"""
self._parse_test_outputs()
return self._debug_data_directory
+
+
+def run_symbolizer(symbol_paths: List[str], input_fd: IO,
+ output_fd: IO) -> subprocess.Popen:
+ """Runs symbolizer that symbolizes |input| and outputs to |output|."""
+
+ symbolize_cmd = ([
+ 'debug', 'symbolize', '--', '--omit-module-lines', '--build-id-dir',
+ os.path.join(SDK_ROOT, '.build-id')
+ ])
+ for path in symbol_paths:
+ symbolize_cmd.extend(['--ids-txt', path])
+ return run_continuous_ffx_command(symbolize_cmd,
+ stdin=input_fd,
+ stdout=output_fd,
+ stderr=subprocess.STDOUT)
diff --git a/build/fuchsia/test/flash_device.py b/build/fuchsia/test/flash_device.py
index 5e0b91653..b50272b0e 100755
--- a/build/fuchsia/test/flash_device.py
+++ b/build/fuchsia/test/flash_device.py
@@ -5,7 +5,6 @@
"""Implements commands for flashing a Fuchsia device."""
import argparse
-import json
import logging
import os
import subprocess
@@ -14,11 +13,18 @@ import time
from typing import Optional, Tuple
-from common import check_ssh_config_file, register_device_args, \
- run_ffx_command, SDK_ROOT
+import common
+from common import BootMode, boot_device, check_ssh_config_file, \
+ get_system_info, find_image_in_sdk, register_device_args
from compatible_utils import get_sdk_hash, get_ssh_keys, pave, \
- running_unattended, add_exec_to_file, get_host_arch, find_image_in_sdk
-from ffx_integration import ScopedFfxConfig
+ running_unattended, add_exec_to_file, get_host_arch
+from lockfile import lock
+
+# Flash-file lock. Used to restrict number of flash operations per host.
+# File lock should be marked as stale after 15 mins.
+_FF_LOCK = os.path.join('/tmp', 'flash.lock')
+_FF_LOCK_STALE_SECS = 60 * 15
+_FF_LOCK_ACQ_TIMEOUT = _FF_LOCK_STALE_SECS
def _get_system_info(target: Optional[str]) -> Tuple[str, str]:
@@ -31,23 +37,14 @@ def _get_system_info(target: Optional[str]) -> Tuple[str, str]:
# TODO(b/242191374): Remove when devices in swarming are no longer booted
# into zedboot.
if running_unattended():
- with ScopedFfxConfig('discovery.zedboot.enabled', 'true'):
- run_ffx_command(('target', 'reboot'), target_id=target)
- run_ffx_command(('target', 'wait'), target)
-
- info_cmd = run_ffx_command(('target', 'show', '--json'),
- target_id=target,
- capture_output=True,
- check=False)
- if info_cmd.returncode == 0:
- info_json = json.loads(info_cmd.stdout.strip())
- for info in info_json:
- if info['title'] == 'Build':
- return (info['child'][1]['value'], info['child'][0]['value'])
+ boot_device(target, BootMode.REGULAR)
+ wait_cmd = common.run_ffx_command(('target', 'wait', '-t', '180'),
+ target,
+ check=False)
+ if wait_cmd.returncode != 0:
+ return ('', '')
- # If the information was not retrieved, return empty strings to indicate
- # unknown system info.
- return ('', '')
+ return get_system_info(target)
def update_required(os_check, system_image_dir: Optional[str],
@@ -63,12 +60,7 @@ def update_required(os_check, system_image_dir: Optional[str],
'System image directory does not exist. Assuming it\'s '
'a product-bundle name and dynamically searching for '
'image directory')
- # SDK_ROOT points to third_party/fuchsia-sdk/sdk, but we want the root
- # of the overall fuchsia-sdk package.
- sdk_root_parent = os.path.split(SDK_ROOT)[0]
- path = find_image_in_sdk(system_image_dir,
- product_bundle=True,
- sdk_root=sdk_root_parent)
+ path = find_image_in_sdk(system_image_dir)
if not path:
raise FileNotFoundError(
f'System image directory {system_image_dir} could not'
@@ -107,41 +99,54 @@ def _run_flash_command(system_image_dir: str, target_id: Optional[str]):
if running_unattended():
flash_cmd = [
os.path.join(system_image_dir, 'flash.sh'),
- '--ssh-key=%s' % get_ssh_keys(),
+ '--ssh-key=%s' % get_ssh_keys()
]
+ # Target ID could be the nodename or the Serial number.
if target_id:
flash_cmd.extend(('-s', target_id))
subprocess.run(flash_cmd, check=True, timeout=240)
return
manifest = os.path.join(system_image_dir, 'flash-manifest.manifest')
- run_ffx_command(('target', 'flash', manifest, '--no-bootloader-reboot'),
- target_id=target_id,
- configs=[
- 'fastboot.usb.disabled=true',
- 'ffx.fastboot.inline_target=true'
- ])
+ common.run_ffx_command(
+ ('target', 'flash', manifest, '--no-bootloader-reboot'),
+ target_id=target_id,
+ configs=[
+ 'fastboot.usb.disabled=true', 'ffx.fastboot.inline_target=true',
+ 'fastboot.reboot.reconnect_timeout=120'
+ ])
+
+
+def _remove_stale_flash_file_lock() -> None:
+ """Check if flash file lock is stale, and delete if so."""
+ try:
+ stat = os.stat(_FF_LOCK)
+ if time.time() - stat.st_mtime > _FF_LOCK_STALE_SECS:
+ os.remove(_FF_LOCK)
+ except FileNotFoundError:
+ logging.info('No lock file found - assuming it is up for grabs')
def flash(system_image_dir: str,
target: Optional[str],
serial_num: Optional[str] = None) -> None:
"""Flash the device."""
- with ScopedFfxConfig('fastboot.reboot.reconnect_timeout', '120'):
+ _remove_stale_flash_file_lock()
+ # Flash only with a file lock acquired.
+ # This prevents multiple fastboot binaries from flashing concurrently,
+ # which should increase the odds of flashing success.
+ with lock(_FF_LOCK, timeout=_FF_LOCK_ACQ_TIMEOUT):
if serial_num:
- with ScopedFfxConfig('discovery.zedboot.enabled', 'true'):
- run_ffx_command(('target', 'reboot', '-b'),
- target,
- check=False)
+ boot_device(target, BootMode.BOOTLOADER)
for _ in range(10):
time.sleep(10)
- if run_ffx_command(('target', 'list', serial_num),
- check=False).returncode == 0:
+ if common.run_ffx_command(('target', 'list', serial_num),
+ check=False).returncode == 0:
break
_run_flash_command(system_image_dir, serial_num)
else:
_run_flash_command(system_image_dir, target)
- run_ffx_command(('target', 'wait'), target)
+ common.run_ffx_command(('target', 'wait'), target)
def update(system_image_dir: str,
@@ -168,8 +173,19 @@ def update(system_image_dir: str,
if running_unattended():
assert target, ('Target ID must be specified on swarming when'
' paving.')
- pave(system_image_dir, target)
- time.sleep(120)
+ # TODO(crbug.com/1405525): We should check the device state
+ # before and after rebooting it to avoid unnecessary reboot or
+ # undesired state.
+ boot_device(target, BootMode.RECOVERY)
+ try:
+ pave(system_image_dir, target)
+ time.sleep(180)
+ except subprocess.TimeoutExpired:
+ # Fallback to flashing, just in case it might work.
+ # This could recover the device and make it usable.
+ # If it fails, device is unpaveable anyway, and should be taken
+ # out of fleet - this will do that.
+ flash(system_image_dir, target, serial_num)
else:
flash(system_image_dir, target, serial_num)
@@ -215,7 +231,7 @@ def main():
"""Stand-alone function for flashing a device."""
parser = argparse.ArgumentParser()
register_device_args(parser)
- register_update_args(parser)
+ register_update_args(parser, default_os_check='update', default_pave=False)
args = parser.parse_args()
update(args.system_image_dir, args.os_check, args.target_id,
args.serial_num, args.pave)
diff --git a/build/fuchsia/test/flash_device_unittests.py b/build/fuchsia/test/flash_device_unittests.py
index fb7e1267c..a1c72e3bd 100755
--- a/build/fuchsia/test/flash_device_unittests.py
+++ b/build/fuchsia/test/flash_device_unittests.py
@@ -5,9 +5,11 @@
"""File for testing flash_device.py."""
import os
+import subprocess
import unittest
import unittest.mock as mock
+import common
import flash_device
_TEST_IMAGE_DIR = 'test/image/dir'
@@ -15,6 +17,7 @@ _TEST_PRODUCT = 'test_product'
_TEST_VERSION = 'test.version'
+# pylint: disable=too-many-public-methods,protected-access
class FlashDeviceTest(unittest.TestCase):
"""Unittests for flash_device.py."""
@@ -22,24 +25,24 @@ class FlashDeviceTest(unittest.TestCase):
context_mock = mock.Mock()
context_mock.__enter__ = mock.Mock(return_value=None)
context_mock.__exit__ = mock.Mock(return_value=None)
- self._config_patcher = mock.patch('flash_device.ScopedFfxConfig',
- return_value=context_mock)
ffx_mock = mock.Mock()
ffx_mock.returncode = 0
- self._ffx_patcher = mock.patch('flash_device.run_ffx_command',
- return_value=ffx_mock)
- self._sdk_hash_patcher = mock.patch('flash_device.get_sdk_hash',
- return_value=(_TEST_PRODUCT,
- _TEST_VERSION))
- self._check_patcher = mock.patch('flash_device.check_ssh_config_file')
- self._config_mock = self._config_patcher.start()
- self._ffx_mock = self._ffx_patcher.start()
- self._sdk_hash_mock = self._sdk_hash_patcher.start()
- self._check_patcher_mock = self._check_patcher.start()
- self.addCleanup(self._config_mock.stop)
+ ffx_patcher = mock.patch('common.run_ffx_command',
+ return_value=ffx_mock)
+ sdk_hash_patcher = mock.patch('flash_device.get_sdk_hash',
+ return_value=(_TEST_PRODUCT,
+ _TEST_VERSION))
+ swarming_patcher = mock.patch('flash_device.running_unattended',
+ return_value=False)
+ check_patcher = mock.patch('flash_device.check_ssh_config_file')
+ self._ffx_mock = ffx_patcher.start()
+ self._sdk_hash_mock = sdk_hash_patcher.start()
+ self._check_patcher_mock = check_patcher.start()
+ self._swarming_mock = swarming_patcher.start()
self.addCleanup(self._ffx_mock.stop)
self.addCleanup(self._sdk_hash_mock.stop)
self.addCleanup(self._check_patcher_mock.stop)
+ self.addCleanup(self._swarming_mock.stop)
def test_update_required_on_ignore_returns_immediately(self) -> None:
"""Test |os_check|='ignore' skips all checks."""
@@ -69,23 +72,21 @@ class FlashDeviceTest(unittest.TestCase):
with mock.patch('os.path.exists', return_value=False), \
mock.patch('flash_device.find_image_in_sdk') as mock_find, \
mock.patch('flash_device._get_system_info'), \
- mock.patch('flash_device.SDK_ROOT', 'path/to/sdk/dir'), \
+ mock.patch('common.SDK_ROOT', 'path/to/sdk/dir'), \
self.assertLogs():
mock_find.return_value = 'path/to/image/dir'
update_required, new_image_dir = flash_device.update_required(
'update', 'product-bundle', None)
self.assertTrue(update_required)
self.assertEqual(new_image_dir, 'path/to/image/dir')
- mock_find.assert_called_once_with('product-bundle',
- product_bundle=True,
- sdk_root='path/to/sdk')
+ mock_find.assert_called_once_with('product-bundle')
def test_update_required_raises_file_not_found_error(self) -> None:
"""Test |os_check|!='ignore' raises FileNotFoundError if no path."""
with mock.patch('os.path.exists', return_value=False), \
mock.patch('flash_device.find_image_in_sdk',
return_value=None), \
- mock.patch('flash_device.SDK_ROOT', 'path/to/sdk/dir'), \
+ mock.patch('common.SDK_ROOT', 'path/to/sdk/dir'), \
self.assertLogs(), \
self.assertRaises(FileNotFoundError):
flash_device.update_required('update', 'product-bundle', None)
@@ -118,10 +119,10 @@ class FlashDeviceTest(unittest.TestCase):
"""Test update when |os_check| is 'check' and system info does not
match."""
+ self._swarming_mock.return_value = True
with mock.patch('os.path.exists', return_value=True), \
mock.patch('flash_device._add_exec_to_flash_binaries'), \
- mock.patch('flash_device.running_unattended',
- return_value=True), \
+ mock.patch('flash_device.boot_device') as mock_boot, \
mock.patch('flash_device.subprocess.run'):
self._ffx_mock.return_value.stdout = \
'[{"title": "Build", "child": [{"value": "wrong.version"}, ' \
@@ -130,7 +131,10 @@ class FlashDeviceTest(unittest.TestCase):
'check',
None,
should_pave=False)
- self.assertEqual(self._ffx_mock.call_count, 4)
+ # Regular boot is to check the versions.
+ mock_boot.assert_called_once_with(mock.ANY,
+ common.BootMode.REGULAR)
+ self.assertEqual(self._ffx_mock.call_count, 3)
def test_update_system_info_mismatch_adds_exec_to_flash_binaries(self
) -> None:
@@ -197,24 +201,45 @@ class FlashDeviceTest(unittest.TestCase):
with mock.patch('time.sleep'), \
mock.patch('os.path.exists', return_value=True), \
+ mock.patch('flash_device.boot_device') as mock_boot, \
mock.patch('flash_device._add_exec_to_flash_binaries'):
flash_device.update(_TEST_IMAGE_DIR,
'update',
None,
'test_serial',
should_pave=False)
- self.assertEqual(self._ffx_mock.call_count, 4)
+ mock_boot.assert_called_once_with(mock.ANY,
+ common.BootMode.BOOTLOADER)
+ self.assertEqual(self._ffx_mock.call_count, 3)
+
+ def test_reboot_failure(self) -> None:
+ """Test update when |serial_num| is specified."""
+ self._ffx_mock.return_value.returncode = 1
+ with mock.patch('time.sleep'), \
+ mock.patch('os.path.exists', return_value=True), \
+ mock.patch('flash_device.running_unattended',
+ return_value=True), \
+ mock.patch('flash_device.boot_device'):
+ required, _ = flash_device.update_required('check',
+ _TEST_IMAGE_DIR, None)
+ self.assertEqual(required, True)
# pylint: disable=no-self-use
def test_update_calls_paving_if_specified(self) -> None:
"""Test update calls pave if specified."""
with mock.patch('time.sleep'), \
mock.patch('os.path.exists', return_value=True), \
+ mock.patch('flash_device.running_unattended',
+ return_value=True), \
+ mock.patch('flash_device.boot_device') as mock_boot, \
mock.patch('flash_device.pave') as mock_pave:
flash_device.update(_TEST_IMAGE_DIR,
'update',
'some-target-id',
should_pave=True)
+
+ mock_boot.assert_called_once_with('some-target-id',
+ common.BootMode.RECOVERY)
mock_pave.assert_called_once_with(_TEST_IMAGE_DIR,
'some-target-id')
@@ -222,10 +247,11 @@ class FlashDeviceTest(unittest.TestCase):
def test_update_raises_error_if_unattended_with_no_target(self) -> None:
"""Test update calls pave if specified."""
+
+ self._swarming_mock.return_value = True
with mock.patch('time.sleep'), \
mock.patch('flash_device.pave'), \
- mock.patch('os.path.exists', return_value=True), \
- mock.patch('flash_device.running_unattended', return_value=True):
+ mock.patch('os.path.exists', return_value=True):
self.assertRaises(AssertionError,
flash_device.update,
_TEST_IMAGE_DIR,
@@ -236,18 +262,77 @@ class FlashDeviceTest(unittest.TestCase):
def test_update_on_swarming(self) -> None:
"""Test update on swarming bots."""
+ self._swarming_mock.return_value = True
with mock.patch('time.sleep'), \
mock.patch('os.path.exists', return_value=True), \
mock.patch('flash_device._add_exec_to_flash_binaries'), \
- mock.patch('flash_device.running_unattended',
- return_value = True), \
+ mock.patch('flash_device.boot_device') as mock_boot, \
mock.patch('subprocess.run'):
flash_device.update(_TEST_IMAGE_DIR,
'update',
None,
'test_serial',
should_pave=False)
- self.assertEqual(self._ffx_mock.call_count, 3)
+ mock_boot.assert_called_once_with(mock.ANY,
+ common.BootMode.BOOTLOADER)
+ self.assertEqual(self._ffx_mock.call_count, 2)
+
+ # pylint: disable=no-self-use
+ def test_update_with_pave_timeout_defaults_to_flash(self) -> None:
+ """Test update falls back to flash if pave fails."""
+ with mock.patch('time.sleep'), \
+ mock.patch('os.path.exists', return_value=True), \
+ mock.patch('flash_device.running_unattended',
+ return_value=True), \
+ mock.patch('flash_device.pave') as mock_pave, \
+ mock.patch('flash_device.boot_device'), \
+ mock.patch('flash_device.flash') as mock_flash:
+ mock_pave.side_effect = subprocess.TimeoutExpired(
+ cmd='/some/cmd',
+ timeout=0,
+ )
+ flash_device.update(_TEST_IMAGE_DIR,
+ 'update',
+ 'some-target-id',
+ should_pave=True)
+ mock_pave.assert_called_once_with(_TEST_IMAGE_DIR,
+ 'some-target-id')
+ mock_flash.assert_called_once_with(_TEST_IMAGE_DIR,
+ 'some-target-id', None)
+
+ def test_remove_stale_removes_stale_file_lock(self) -> None:
+ """Test remove_stale_flash_file_lock removes stale file lock."""
+ with mock.patch('time.time') as mock_time, \
+ mock.patch('os.remove') as mock_remove, \
+ mock.patch('os.stat') as mock_stat:
+ mock_time.return_value = 60 * 20
+ # Set st_mtime
+ mock_stat.return_value = os.stat_result((0, ) * 8 + (100, 0))
+ flash_device._remove_stale_flash_file_lock()
+ mock_stat.assert_called_once_with(flash_device._FF_LOCK)
+ mock_remove.assert_called_once_with(flash_device._FF_LOCK)
+
+ def test_remove_stale_does_not_remove_non_stale_file(self) -> None:
+ """Test remove_stale_flash_file_lock does not remove fresh file."""
+ with mock.patch('time.time') as mock_time, \
+ mock.patch('os.remove') as mock_remove, \
+ mock.patch('os.stat') as mock_stat:
+ mock_time.return_value = 60 * 10
+ # Set st_mtime
+ mock_stat.return_value = os.stat_result((0, ) * 8 + (100, 0))
+ flash_device._remove_stale_flash_file_lock()
+ mock_remove.assert_not_called()
+
+ def test_remove_stale_does_not_raise_file_not_found(self) -> None:
+ """Test remove_stale_flash_file_lock does not raise FileNotFound."""
+ with mock.patch('time.time'), \
+ mock.patch('os.remove'), \
+ mock.patch('os.stat') as mock_stat:
+ mock_stat.side_effect = FileNotFoundError
+ flash_device._remove_stale_flash_file_lock()
+ mock_stat.assert_called_once_with(flash_device._FF_LOCK)
+
+ # pylint: enable=no-self-use
def test_main(self) -> None:
"""Tests |main| function."""
@@ -258,6 +343,7 @@ class FlashDeviceTest(unittest.TestCase):
with mock.patch.dict(os.environ, {}):
flash_device.main()
self.assertEqual(self._ffx_mock.call_count, 0)
+# pylint: enable=too-many-public-methods,protected-access
if __name__ == '__main__':
diff --git a/build/fuchsia/test/lockfile.py b/build/fuchsia/test/lockfile.py
new file mode 100644
index 000000000..422cfe4c2
--- /dev/null
+++ b/build/fuchsia/test/lockfile.py
@@ -0,0 +1,79 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Exclusive filelocking for all supported platforms.
+
+Copied from third_party/depot_tools/lockfile.py.
+"""
+
+import contextlib
+import fcntl
+import logging
+import os
+import time
+
+
+class LockError(Exception):
+ """Error raised if timeout or lock (without timeout) fails."""
+
+
+def _open_file(lockfile):
+ open_flags = (os.O_CREAT | os.O_WRONLY)
+ return os.open(lockfile, open_flags, 0o644)
+
+
+def _close_file(file_descriptor):
+ os.close(file_descriptor)
+
+
+def _lock_file(file_descriptor):
+ fcntl.flock(file_descriptor, fcntl.LOCK_EX | fcntl.LOCK_NB)
+
+
+def _try_lock(lockfile):
+ f = _open_file(lockfile)
+ try:
+ _lock_file(f)
+ except Exception:
+ _close_file(f)
+ raise
+ return lambda: _close_file(f)
+
+
+def _lock(path, timeout=0):
+ """_lock returns function to release the lock if locking was successful.
+
+ _lock also implements simple retry logic."""
+ elapsed = 0
+ while True:
+ try:
+ return _try_lock(path + '.locked')
+ except (OSError, IOError) as error:
+ if elapsed < timeout:
+ sleep_time = min(10, timeout - elapsed)
+ logging.info(
+ 'Could not create lockfile; will retry after sleep(%d).',
+ sleep_time)
+ elapsed += sleep_time
+ time.sleep(sleep_time)
+ continue
+ raise LockError("Error locking %s (err: %s)" %
+ (path, str(error))) from error
+
+
+@contextlib.contextmanager
+def lock(path, timeout=0):
+ """Get exclusive lock to path.
+
+ Usage:
+ import lockfile
+ with lockfile.lock(path, timeout):
+ # Do something
+ pass
+
+ """
+ release_fn = _lock(path, timeout)
+ try:
+ yield
+ finally:
+ release_fn()
diff --git a/build/fuchsia/test/log_manager.py b/build/fuchsia/test/log_manager.py
index eb22e3c1c..98b711d57 100755
--- a/build/fuchsia/test/log_manager.py
+++ b/build/fuchsia/test/log_manager.py
@@ -13,10 +13,10 @@ import time
from contextlib import AbstractContextManager
from typing import Iterable, Optional, TextIO
-from common import SDK_ROOT, read_package_paths, register_common_args, \
+from common import catch_sigterm, read_package_paths, register_common_args, \
register_device_args, run_continuous_ffx_command, \
run_ffx_command
-from ffx_integration import ScopedFfxConfig
+from ffx_integration import ScopedFfxConfig, run_symbolizer
class LogManager(AbstractContextManager):
@@ -111,7 +111,7 @@ def start_system_log(log_manager: LogManager,
system_log = sys.stdout
else:
system_log = log_manager.open_log_file('system_log')
- log_cmd = ['log', '--no-symbols']
+ log_cmd = ['log', '--raw']
if log_args:
log_cmd.extend(log_args)
if symbol_paths:
@@ -119,19 +119,8 @@ def start_system_log(log_manager: LogManager,
target_id,
stdout=subprocess.PIPE)
log_manager.add_log_process(log_proc)
- symbolize_cmd = ([
- 'debug', 'symbolize', '--', '--omit-module-lines',
- '--build-id-dir',
- os.path.join(SDK_ROOT, '.build-id')
- ])
- for symbol_path in symbol_paths:
- symbolize_cmd.extend(['--ids-txt', symbol_path])
log_manager.add_log_process(
- run_continuous_ffx_command(symbolize_cmd,
- target_id,
- stdin=log_proc.stdout,
- stdout=system_log,
- stderr=subprocess.STDOUT))
+ run_symbolizer(symbol_paths, log_proc.stdout, system_log))
else:
log_manager.add_log_process(
run_continuous_ffx_command(log_cmd, target_id, stdout=system_log))
@@ -142,6 +131,7 @@ def main():
Runs until the process is killed or interrupted (i.e. user presses CTRL-C).
"""
+ catch_sigterm()
parser = argparse.ArgumentParser()
register_common_args(parser)
register_device_args(parser)
diff --git a/build/fuchsia/test/log_manager_unittests.py b/build/fuchsia/test/log_manager_unittests.py
index 6cc2f98ac..66830a836 100755
--- a/build/fuchsia/test/log_manager_unittests.py
+++ b/build/fuchsia/test/log_manager_unittests.py
@@ -4,15 +4,12 @@
# found in the LICENSE file.
"""File for testing log_manager.py."""
-import os
import sys
import unittest
import unittest.mock as mock
import log_manager
-from common import SDK_ROOT
-
_LOGS_DIR = 'test_logs_dir'
@@ -54,7 +51,7 @@ class LogManagerTest(unittest.TestCase):
log = log_manager.LogManager(None)
log_manager.start_system_log(log, True, log_args=['test_log_args'])
self.assertEqual(mock_ffx.call_args_list[0][0][0],
- ['log', '--no-symbols', 'test_log_args'])
+ ['log', '--raw', 'test_log_args'])
self.assertEqual(mock_ffx.call_count, 1)
@mock.patch('log_manager.run_continuous_ffx_command')
@@ -62,20 +59,13 @@ class LogManagerTest(unittest.TestCase):
"""Test symbols are used when pkg_paths are set."""
log = log_manager.LogManager(_LOGS_DIR)
- with mock.patch('os.path.isfile', return_value=True):
- with mock.patch('builtins.open'):
- log_manager.start_system_log(log,
- False,
- pkg_paths=['test_pkg'])
- log.stop()
- self.assertEqual(mock_ffx.call_args_list[0][0][0],
- ['log', '--no-symbols'])
- self.assertEqual(mock_ffx.call_args_list[1][0][0], [
- 'debug', 'symbolize', '--', '--omit-module-lines',
- '--build-id-dir',
- os.path.join(SDK_ROOT, '.build-id'), '--ids-txt', 'ids.txt'
- ])
- self.assertEqual(mock_ffx.call_count, 2)
+ with mock.patch('os.path.isfile', return_value=True), \
+ mock.patch('builtins.open'), \
+ mock.patch('log_manager.run_symbolizer'):
+ log_manager.start_system_log(log, False, pkg_paths=['test_pkg'])
+ log.stop()
+ self.assertEqual(mock_ffx.call_count, 1)
+ self.assertEqual(mock_ffx.call_args_list[0][0][0], ['log', '--raw'])
def test_no_logging_dir_exception(self) -> None:
"""Tests empty LogManager throws an exception on |open_log_file|."""
diff --git a/build/fuchsia/test/run_executable_test.py b/build/fuchsia/test/run_executable_test.py
index c01a1b164..7c6772be7 100755
--- a/build/fuchsia/test/run_executable_test.py
+++ b/build/fuchsia/test/run_executable_test.py
@@ -13,11 +13,11 @@ import sys
from typing import List, Optional
-from common import SDK_ROOT, get_component_uri, get_host_arch, \
+from common import get_component_uri, get_host_arch, \
register_common_args, register_device_args, \
- register_log_args, run_ffx_command
+ register_log_args
from compatible_utils import map_filter_file_to_package_file
-from ffx_integration import FfxTestRunner
+from ffx_integration import FfxTestRunner, run_symbolizer
from test_runner import TestRunner
from test_server import setup_test_server
@@ -38,11 +38,11 @@ def _copy_custom_output_file(test_runner: FfxTestRunner, file: str,
def _copy_coverage_files(test_runner: FfxTestRunner, dest: str) -> None:
- """Copy debug data file from the device to the host."""
+ """Copy debug data file from the device to the host if it exists."""
coverage_dir = test_runner.get_debug_data_directory()
if not coverage_dir:
- logging.error(
+ logging.info(
'Failed to parse coverage data directory from test summary '
'output files. Not copying coverage files from the device.')
return
@@ -76,15 +76,16 @@ class ExecutableTestRunner(TestRunner):
test_args: List[str],
test_name: str,
target_id: Optional[str],
- code_coverage_dir: Optional[str],
+ code_coverage_dir: str,
logs_dir: Optional[str] = None) -> None:
super().__init__(out_dir, test_args, [test_name], target_id)
if not self._test_args:
self._test_args = []
self._test_name = test_name
- self._code_coverage_dir = code_coverage_dir
+ self._code_coverage_dir = os.path.basename(code_coverage_dir)
self._custom_artifact_directory = None
self._isolated_script_test_output = None
+ self._isolated_script_test_perf_output = None
self._logs_dir = logs_dir
self._test_launcher_summary_output = None
self._test_server = None
@@ -94,9 +95,9 @@ class ExecutableTestRunner(TestRunner):
parser.add_argument(
'--isolated-script-test-output',
help='If present, store test results on this path.')
- # This argument has been deprecated.
parser.add_argument('--isolated-script-test-perf-output',
- help=argparse.SUPPRESS)
+ help='If present, store chartjson results on this '
+ 'path.')
parser.add_argument(
'--test-launcher-shard-index',
type=int,
@@ -135,6 +136,12 @@ class ExecutableTestRunner(TestRunner):
child_args.append(
'--isolated-script-test-output=/custom_artifacts/%s' %
os.path.basename(self._isolated_script_test_output))
+ if args.isolated_script_test_perf_output:
+ self._isolated_script_test_perf_output = \
+ args.isolated_script_test_perf_output
+ child_args.append(
+ '--isolated-script-test-perf-output=/custom_artifacts/%s' %
+ os.path.basename(self._isolated_script_test_perf_output))
if args.test_launcher_shard_index is not None:
child_args.append('--test-launcher-shard-index=%d' %
args.test_launcher_shard_index)
@@ -180,9 +187,12 @@ class ExecutableTestRunner(TestRunner):
test_runner,
os.path.basename(self._isolated_script_test_output),
self._isolated_script_test_output)
- if self._code_coverage_dir:
- _copy_coverage_files(test_runner,
- os.path.basename(self._code_coverage_dir))
+ if self._isolated_script_test_perf_output:
+ _copy_custom_output_file(
+ test_runner,
+ os.path.basename(self._isolated_script_test_perf_output),
+ self._isolated_script_test_perf_output)
+ _copy_coverage_files(test_runner, self._code_coverage_dir)
def run_test(self) -> subprocess.Popen:
test_args = self._get_args()
@@ -190,20 +200,14 @@ class ExecutableTestRunner(TestRunner):
test_proc = test_runner.run_test(
get_component_uri(self._test_name), test_args, self._target_id)
- # Symbolize output from test process and print to terminal.
- symbolize_cmd = [
- 'debug', 'symbolize', '--', '--omit-module-lines',
- '--build-id-dir',
- os.path.join(SDK_ROOT, '.build-id')
- ]
+ symbol_paths = []
for pkg_path in self._package_deps.values():
- symbol_path = os.path.join(os.path.dirname(pkg_path),
- 'ids.txt')
- symbolize_cmd.extend(('--ids-txt', symbol_path))
- run_ffx_command(symbolize_cmd,
- stdin=test_proc.stdout,
- stdout=sys.stdout,
- stderr=subprocess.STDOUT)
+ symbol_paths.append(
+ os.path.join(os.path.dirname(pkg_path), 'ids.txt'))
+ # Symbolize output from test process and print to terminal.
+ symbolizer_proc = run_symbolizer(symbol_paths, test_proc.stdout,
+ sys.stdout)
+ symbolizer_proc.communicate()
if test_proc.wait() == 0:
logging.info('Process exited normally with status code 0.')
@@ -220,8 +224,6 @@ def create_executable_test_runner(runner_args: argparse.Namespace,
test_args: List[str]):
"""Helper for creating an ExecutableTestRunner."""
- if not runner_args.code_coverage:
- runner_args.code_coverage_dir = None
return ExecutableTestRunner(runner_args.out_dir, test_args,
runner_args.test_type, runner_args.target_id,
runner_args.code_coverage_dir,
@@ -232,15 +234,12 @@ def register_executable_test_args(parser: argparse.ArgumentParser) -> None:
"""Register common arguments for ExecutableTestRunner."""
test_args = parser.add_argument_group('test', 'arguments for test running')
- test_args.add_argument('--code-coverage',
- default=False,
- action='store_true',
- help='Gather code coverage information.')
test_args.add_argument('--code-coverage-dir',
default=os.getcwd(),
help='Directory to place code coverage '
- 'information. Only relevant when --code-coverage '
- 'is set to true. Defaults to current directory.')
+ 'information. Only relevant when the target was '
+ 'built with |fuchsia_code_coverage| set to true. '
+ 'Defaults to current directory.')
test_args.add_argument('--test-name',
dest='test_type',
help='Name of the test package (e.g. '
diff --git a/build/fuchsia/test/run_test.py b/build/fuchsia/test/run_test.py
index 7979024d9..5a2952ab6 100755
--- a/build/fuchsia/test/run_test.py
+++ b/build/fuchsia/test/run_test.py
@@ -12,7 +12,7 @@ from contextlib import ExitStack
from typing import List
from common import register_common_args, register_device_args, \
- register_log_args, resolve_packages, \
+ register_log_args, resolve_packages, run_ffx_command, \
set_ffx_isolate_dir
from compatible_utils import running_unattended
from ffx_integration import ScopedFfxConfig, test_connection
@@ -23,6 +23,7 @@ from run_blink_test import BlinkTestRunner
from run_executable_test import create_executable_test_runner, \
register_executable_test_args
from run_telemetry_test import TelemetryTestRunner
+from run_webpage_test import WebpageTestRunner
from serve_repo import register_serve_args, serve_repository
from start_emulator import create_emulator_from_args, register_emulator_args
from test_runner import TestRunner
@@ -39,6 +40,9 @@ def _get_test_runner(runner_args: argparse.Namespace,
if runner_args.test_type in ['gpu', 'perf']:
return TelemetryTestRunner(runner_args.test_type, runner_args.out_dir,
test_args, runner_args.target_id)
+ if runner_args.test_type in ['webpage']:
+ return WebpageTestRunner(runner_args.out_dir, test_args,
+ runner_args.target_id)
return create_executable_test_runner(runner_args, test_args)
@@ -78,8 +82,10 @@ def main():
if running_unattended():
set_ffx_isolate_dir(
stack.enter_context(tempfile.TemporaryDirectory()))
- stack.enter_context(
- ScopedFfxConfig('repository.server.listen', '"[::]:0"'))
+ run_ffx_command(('daemon', 'stop'), check=False)
+ if running_unattended():
+ stack.enter_context(
+ ScopedFfxConfig('repository.server.listen', '"[::]:0"'))
log_manager = stack.enter_context(LogManager(runner_args.logs_dir))
if runner_args.device:
update(runner_args.system_image_dir, runner_args.os_check,
diff --git a/build/fuchsia/test/run_webpage_test.py b/build/fuchsia/test/run_webpage_test.py
new file mode 100644
index 000000000..31fa0a32c
--- /dev/null
+++ b/build/fuchsia/test/run_webpage_test.py
@@ -0,0 +1,60 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Implements commands for running webpage tests."""
+
+import argparse
+import logging
+import time
+
+from typing import List, Optional
+
+from common import catch_sigterm, run_continuous_ffx_command
+from test_runner import TestRunner
+
+
+class WebpageTestRunner(TestRunner):
+ """Test runner for running GPU tests."""
+
+ def __init__(self, out_dir: str, test_args: List[str],
+ target_id: Optional[str]) -> None:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--browser',
+ choices=['web-engine-shell', 'chrome'],
+ help='The browser to use for Telemetry based tests.')
+ args, _ = parser.parse_known_args(test_args)
+
+ if args.browser == 'web-engine-shell':
+ packages = ['web_engine_shell']
+ else:
+ packages = ['chrome']
+
+ super().__init__(out_dir, test_args, packages, target_id)
+
+ def run_test(self):
+ catch_sigterm()
+ browser_cmd = [
+ 'test',
+ 'run',
+ '-t',
+ '3600', # Keep the webpage running for an hour.
+ f'fuchsia-pkg://fuchsia.com/{self._packages[0]}#meta/'
+ f'{self._packages[0]}.cm'
+ ]
+ browser_cmd.extend(
+ ['--', '--web-engine-package-name=web_engine_with_webui'])
+ if self._test_args:
+ browser_cmd.extend(self._test_args)
+ logging.info('Starting %s', self._packages[0])
+ try:
+ browser_proc = run_continuous_ffx_command(browser_cmd)
+ while True:
+ time.sleep(10000)
+ except KeyboardInterrupt:
+ logging.info('Ctrl-C received; shutting down the webpage.')
+ browser_proc.kill()
+ except SystemExit:
+ logging.info('SIGTERM received; shutting down the webpage.')
+ browser_proc.kill()
+ return browser_proc
diff --git a/build/fuchsia/test/start_emulator.py b/build/fuchsia/test/start_emulator.py
index 9c96d669d..cd16505f4 100755
--- a/build/fuchsia/test/start_emulator.py
+++ b/build/fuchsia/test/start_emulator.py
@@ -9,8 +9,10 @@ import logging
import sys
import time
-from common import register_log_args
-from ffx_integration import FfxEmulator
+from contextlib import AbstractContextManager
+
+from common import catch_sigterm, register_log_args
+from ffx_emulator import FfxEmulator
def register_emulator_args(parser: argparse.ArgumentParser,
@@ -42,22 +44,32 @@ def register_emulator_args(parser: argparse.ArgumentParser,
femu_args.add_argument('--with-network',
action='store_true',
help='Run emulator with emulated nic via tun/tap.')
+ femu_args.add_argument('--everlasting',
+ action='store_true',
+ help='If the emulator should be long-living.')
-def create_emulator_from_args(args: argparse.Namespace) -> FfxEmulator:
+def create_emulator_from_args(
+ args: argparse.Namespace) -> AbstractContextManager:
"""Helper method for initializing an FfxEmulator class with parsed
arguments."""
- return FfxEmulator(args.enable_graphics, args.hardware_gpu,
- args.product_bundle, args.with_network, args.logs_dir)
+ return FfxEmulator(args)
def main():
"""Stand-alone function for starting an emulator."""
+
+ catch_sigterm()
+ logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser()
register_emulator_args(parser, True)
register_log_args(parser)
args = parser.parse_args()
- with create_emulator_from_args(args):
+ with create_emulator_from_args(args) as target_id:
+ logging.info(
+ 'Emulator successfully started. You can now run Chrome '
+ 'Fuchsia tests with --target-id=%s to target this emulator.',
+ target_id)
try:
while True:
time.sleep(10000)
diff --git a/build/fuchsia/test_runner.py b/build/fuchsia/test_runner.py
deleted file mode 100755
index d0b012829..000000000
--- a/build/fuchsia/test_runner.py
+++ /dev/null
@@ -1,313 +0,0 @@
-#!/usr/bin/env vpython3
-#
-# Copyright 2018 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Deploys and runs a test package on a Fuchsia target."""
-
-import argparse
-import logging
-import os
-import shutil
-import sys
-
-import ffx_session
-from common_args import AddCommonArgs, AddTargetSpecificArgs, \
- ConfigureLogging, GetDeploymentTargetForArgs
-from net_test_server import SetupTestServer
-from run_test_package import RunTestPackage
-from runner_exceptions import HandleExceptionAndReturnExitCode
-
-sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
- 'test')))
-from compatible_utils import map_filter_file_to_package_file
-
-DEFAULT_TEST_SERVER_CONCURRENCY = 4
-
-TEST_LLVM_PROFILE_DIR = 'llvm-profile'
-TEST_PERF_RESULT_FILE = 'test_perf_summary.json'
-TEST_RESULT_FILE = 'test_summary.json'
-
-TEST_REALM_NAME = 'chromium_tests'
-
-
-class CustomArtifactsTestOutputs():
- """A TestOutputs implementation for CFv2 tests, where tests emit files into
- /custom_artifacts that are retrieved from the device automatically via ffx."""
-
- def __init__(self, target):
- super(CustomArtifactsTestOutputs, self).__init__()
- self._target = target
- self._ffx_session_context = ffx_session.FfxSession(target._log_manager)
- self._ffx_session = None
-
- def __enter__(self):
- self._ffx_session = self._ffx_session_context.__enter__()
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self._ffx_session = None
- self._ffx_session_context.__exit__(exc_type, exc_val, exc_tb)
- return False
-
- def GetFfxSession(self):
- assert self._ffx_session
- return self._ffx_session
-
- def GetDevicePath(self, path):
- return '/custom_artifacts/' + path
-
- def GetOutputDirectory(self):
- return self._ffx_session.get_output_dir()
-
- def GetFile(self, glob, destination):
- """Places all files/directories matched by a glob into a destination."""
- directory = self._ffx_session.get_custom_artifact_directory()
- if not directory:
- logging.error(
- 'Failed to parse custom artifact directory from test summary output '
- 'files. Not copying %s from the device', glob)
- return
- shutil.copy(os.path.join(directory, glob), destination)
-
- def GetCoverageProfiles(self, destination):
- directory = self._ffx_session.get_debug_data_directory()
- if not directory:
- logging.error(
- 'Failed to parse debug data directory from test summary output '
- 'files. Not copying coverage profiles from the device')
- return
- coverage_dir = os.path.join(directory, TEST_LLVM_PROFILE_DIR)
- shutil.copytree(coverage_dir, destination, dirs_exist_ok=True)
-
-
-def AddTestExecutionArgs(arg_parser):
- test_args = arg_parser.add_argument_group('testing',
- 'Test execution arguments')
- test_args.add_argument('--gtest_filter',
- help='GTest filter to use in place of any default.')
- test_args.add_argument(
- '--gtest_repeat',
- help='GTest repeat value to use. This also disables the '
- 'test launcher timeout.')
- test_args.add_argument(
- '--test-launcher-retry-limit',
- help='Number of times that test suite will retry failing '
- 'tests. This is multiplicative with --gtest_repeat.')
- test_args.add_argument('--test-launcher-print-test-stdio',
- choices=['auto', 'always', 'never'],
- help='Controls when full test output is printed.'
- 'auto means to print it when the test failed.')
- test_args.add_argument('--test-launcher-shard-index',
- type=int,
- default=os.environ.get('GTEST_SHARD_INDEX'),
- help='Index of this instance amongst swarming shards.')
- test_args.add_argument('--test-launcher-total-shards',
- type=int,
- default=os.environ.get('GTEST_TOTAL_SHARDS'),
- help='Total number of swarming shards of this suite.')
- test_args.add_argument('--gtest_break_on_failure',
- action='store_true',
- default=False,
- help='Should GTest break on failure; useful with '
- '--gtest_repeat.')
- test_args.add_argument('--single-process-tests',
- action='store_true',
- default=False,
- help='Runs the tests and the launcher in the same '
- 'process. Useful for debugging.')
- test_args.add_argument('--test-launcher-batch-limit',
- type=int,
- help='Sets the limit of test batch to run in a single '
- 'process.')
- # --test-launcher-filter-file is specified relative to --out-dir,
- # so specifying type=os.path.* will break it.
- test_args.add_argument(
- '--test-launcher-filter-file',
- default=None,
- help='Filter file(s) passed to target test process. Use ";" to separate '
- 'multiple filter files ')
- test_args.add_argument('--test-launcher-jobs',
- type=int,
- help='Sets the number of parallel test jobs.')
- test_args.add_argument('--test-launcher-summary-output',
- help='Where the test launcher will output its json.')
- test_args.add_argument('--enable-test-server',
- action='store_true',
- default=False,
- help='Enable Chrome test server spawner.')
- test_args.add_argument(
- '--test-launcher-bot-mode',
- action='store_true',
- default=False,
- help='Informs the TestLauncher to that it should enable '
- 'special allowances for running on a test bot.')
- test_args.add_argument('--isolated-script-test-output',
- help='If present, store test results on this path.')
- test_args.add_argument(
- '--isolated-script-test-perf-output',
- help='If present, store chartjson results on this path.')
- test_args.add_argument(
- '--code-coverage',
- default=False,
- action='store_true',
- help='Gather code coverage information and place it in '
- 'the output directory.')
- test_args.add_argument('--code-coverage-dir',
- default=os.getcwd(),
- help='Directory to place code coverage information. '
- 'Only relevant when --code-coverage set to true. '
- 'Defaults to current directory.')
- test_args.add_argument('--gtest_also_run_disabled_tests',
- default=False,
- action='store_true',
- help='Run tests prefixed with DISABLED_')
- test_args.add_argument('--test-arg',
- dest='test_args',
- action='append',
- help='Argument for the test process.')
- test_args.add_argument('child_args',
- nargs='*',
- help='Arguments for the test process.')
- test_args.add_argument('--use-vulkan',
- help='\'native\', \'swiftshader\' or \'none\'.')
-
-
-def main():
- parser = argparse.ArgumentParser()
- AddTestExecutionArgs(parser)
- AddCommonArgs(parser)
- AddTargetSpecificArgs(parser)
- args = parser.parse_args()
-
- # Flag out_dir is required for tests launched with this script.
- if not args.out_dir:
- raise ValueError("out-dir must be specified.")
-
- ConfigureLogging(args)
-
- child_args = []
- if args.test_launcher_shard_index != None:
- child_args.append(
- '--test-launcher-shard-index=%d' % args.test_launcher_shard_index)
- if args.test_launcher_total_shards != None:
- child_args.append(
- '--test-launcher-total-shards=%d' % args.test_launcher_total_shards)
- if args.single_process_tests:
- child_args.append('--single-process-tests')
- if args.test_launcher_bot_mode:
- child_args.append('--test-launcher-bot-mode')
- if args.test_launcher_batch_limit:
- child_args.append('--test-launcher-batch-limit=%d' %
- args.test_launcher_batch_limit)
-
- # Only set --test-launcher-jobs if the caller specifies it, in general.
- # If the caller enables the test-server then we need to launch the right
- # number of instances to match the maximum number of parallel test jobs, so
- # in that case we set --test-launcher-jobs based on the number of CPU cores
- # specified for the emulator to use.
- test_concurrency = None
- if args.test_launcher_jobs:
- test_concurrency = args.test_launcher_jobs
- elif args.enable_test_server:
- if args.device == 'device':
- test_concurrency = DEFAULT_TEST_SERVER_CONCURRENCY
- else:
- test_concurrency = args.cpu_cores
- if test_concurrency:
- child_args.append('--test-launcher-jobs=%d' % test_concurrency)
- if args.test_launcher_print_test_stdio:
- child_args.append('--test-launcher-print-test-stdio=%s' %
- args.test_launcher_print_test_stdio)
-
- if args.gtest_filter:
- child_args.append('--gtest_filter=' + args.gtest_filter)
- if args.gtest_repeat:
- child_args.append('--gtest_repeat=' + args.gtest_repeat)
- child_args.append('--test-launcher-timeout=-1')
- if args.test_launcher_retry_limit:
- child_args.append(
- '--test-launcher-retry-limit=' + args.test_launcher_retry_limit)
- if args.gtest_break_on_failure:
- child_args.append('--gtest_break_on_failure')
- if args.gtest_also_run_disabled_tests:
- child_args.append('--gtest_also_run_disabled_tests')
- if args.test_args:
- child_args.extend(args.test_args)
-
- if args.child_args:
- child_args.extend(args.child_args)
-
- if args.use_vulkan:
- child_args.append('--use-vulkan=' + args.use_vulkan)
- elif args.target_cpu == 'x64':
- # TODO(crbug.com/1261646) Remove once Vulkan is enabled by default.
- child_args.append('--use-vulkan=native')
- else:
- # Use swiftshader on arm64 by default because most arm64 bots currently
- # don't support Vulkan emulation.
- child_args.append('--use-vulkan=swiftshader')
- child_args.append('--ozone-platform=headless')
-
- try:
- with GetDeploymentTargetForArgs(args) as target, \
- CustomArtifactsTestOutputs(target) as test_outputs:
- if args.test_launcher_summary_output:
- child_args.append('--test-launcher-summary-output=' +
- test_outputs.GetDevicePath(TEST_RESULT_FILE))
- if args.isolated_script_test_output:
- child_args.append('--isolated-script-test-output=' +
- test_outputs.GetDevicePath(TEST_RESULT_FILE))
- if args.isolated_script_test_perf_output:
- child_args.append('--isolated-script-test-perf-output=' +
- test_outputs.GetDevicePath(TEST_PERF_RESULT_FILE))
-
- target.Start()
- target.StartSystemLog(args.package)
-
- if args.test_launcher_filter_file:
- # TODO(crbug.com/1279803): Until one can send file to the device when
- # running a test, filter files must be read from the test package.
- test_launcher_filter_files = map(
- map_filter_file_to_package_file,
- args.test_launcher_filter_file.split(';'))
- child_args.append('--test-launcher-filter-file=' +
- ';'.join(test_launcher_filter_files))
-
- test_server = None
- if args.enable_test_server:
- assert test_concurrency
- (test_server,
- spawner_url_base) = SetupTestServer(target, test_concurrency)
- child_args.append('--remote-test-server-spawner-url-base=' +
- spawner_url_base)
-
- returncode = RunTestPackage(target, test_outputs.GetFfxSession(),
- args.package, args.package_name, child_args)
-
- if test_server:
- test_server.Stop()
-
- if args.code_coverage:
- test_outputs.GetCoverageProfiles(args.code_coverage_dir)
-
- if args.test_launcher_summary_output:
- test_outputs.GetFile(TEST_RESULT_FILE,
- args.test_launcher_summary_output)
-
- if args.isolated_script_test_output:
- test_outputs.GetFile(TEST_RESULT_FILE, args.isolated_script_test_output)
-
- if args.isolated_script_test_perf_output:
- test_outputs.GetFile(TEST_PERF_RESULT_FILE,
- args.isolated_script_test_perf_output)
-
- return returncode
-
- except:
- return HandleExceptionAndReturnExitCode()
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/fuchsia/update_images.py b/build/fuchsia/update_images.py
index 8c76121a4..5251f98e4 100755
--- a/build/fuchsia/update_images.py
+++ b/build/fuchsia/update_images.py
@@ -15,10 +15,11 @@ import subprocess
import sys
from typing import Dict, Optional
-from common import DIR_SOURCE_ROOT
-from common import GetHostOsFromPlatform
-from common import IMAGES_ROOT
-from common import MakeCleanDirectory
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'test')))
+
+from common import DIR_SRC_ROOT, IMAGES_ROOT, get_host_os, \
+ make_clean_directory
from gcs_download import DownloadAndUnpackFromCloudStorage
@@ -51,7 +52,7 @@ def VarLookup(local_scope):
def GetImageHashList(bucket):
"""Read filename entries from sdk-hash-files.list (one per line), substitute
{platform} in each entry if present, and read from each filename."""
- assert (GetHostOsFromPlatform() == 'linux')
+ assert (get_host_os() == 'linux')
filenames = [
line.strip() for line in ReadFile('sdk-hash-files.list').replace(
'{platform}', 'linux_internal').splitlines()
@@ -82,7 +83,7 @@ def GetImageHash(bucket):
if bucket == 'fuchsia-sdk':
hashes = GetImageHashList(bucket)
return max(hashes)
- deps_file = os.path.join(DIR_SOURCE_ROOT, 'DEPS')
+ deps_file = os.path.join(DIR_SRC_ROOT, 'DEPS')
return ParseDepsFile(deps_file)['vars']['fuchsia_version'].split(':')[1]
@@ -141,6 +142,7 @@ def DownloadBootImages(bucket, image_hash, boot_image_names, image_root_dir):
except subprocess.CalledProcessError as e:
logging.exception('Failed to download image %s from URL: %s',
image_to_download, images_tarball_url)
+ raise e
def _GetImageOverrideInfo() -> Optional[Dict[str, str]]:
@@ -159,22 +161,25 @@ def _GetImageOverrideInfo() -> Optional[Dict[str, str]]:
}
-def GetImageLocationInfo(default_bucket: str) -> Dict[str, str]:
+def GetImageLocationInfo(default_bucket: str,
+ allow_override: bool = True) -> Dict[str, str]:
"""Figures out where to pull the image from.
Defaults to the provided default bucket and generates the hash from defaults.
- If sdk_override.txt exists, it uses that bucket instead.
+ If sdk_override.txt exists (and is allowed) it uses that bucket instead.
Args:
default_bucket: a given default for what bucket to use
+ allow_override: allow SDK override to be used.
Returns:
A dictionary containing the bucket and image_hash
"""
- # if sdk_override.txt exists, use the image from that bucket
- override = _GetImageOverrideInfo()
- if override:
- return override
+ # if sdk_override.txt exists (and is allowed) use the image from that bucket.
+ if allow_override:
+ override = _GetImageOverrideInfo()
+ if override:
+ return override
# Use the bucket in sdk-bucket.txt if an entry exists.
# Otherwise use the default bucket.
@@ -207,6 +212,12 @@ def main():
'--image-root-dir',
default=IMAGES_ROOT,
help='Specify the root directory of the downloaded images. Optional')
+ parser.add_argument(
+ '--allow-override',
+ default=True,
+ type=bool,
+ help='Whether sdk_override.txt can be used for fetching the image, if '
+ 'it exists.')
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
@@ -216,9 +227,9 @@ def main():
return 0
# Check whether there's Fuchsia support for this platform.
- GetHostOsFromPlatform()
+ get_host_os()
- image_info = GetImageLocationInfo(args.default_bucket)
+ image_info = GetImageLocationInfo(args.default_bucket, args.allow_override)
bucket = image_info['bucket']
image_hash = image_info['image_hash']
@@ -233,7 +244,7 @@ def main():
if current_signature != new_signature:
logging.info('Downloading Fuchsia images %s from bucket %s...', image_hash,
bucket)
- MakeCleanDirectory(args.image_root_dir)
+ make_clean_directory(args.image_root_dir)
try:
DownloadBootImages(bucket, image_hash, args.boot_images,
@@ -244,6 +255,9 @@ def main():
logging.exception("command '%s' failed with status %d.%s",
' '.join(e.cmd), e.returncode,
' Details: ' + e.output if e.output else '')
+ raise e
+ else:
+ logging.info('Signatures matched! Got %s', new_signature)
return 0
diff --git a/build/fuchsia/update_images_test.py b/build/fuchsia/update_images_test.py
index 49d7f2f9b..f5be774cd 100755
--- a/build/fuchsia/update_images_test.py
+++ b/build/fuchsia/update_images_test.py
@@ -76,6 +76,22 @@ class TestGetImageLocationInfo(unittest.TestCase):
actual = GetImageLocationInfo('my-bucket')
self.assertEqual(actual, override_info)
+ def testNoAllowOverride(self, mock_image_override, mock_override_bucket,
+ mock_image_hash):
+ override_info = {
+ 'bucket': 'override-bucket',
+ 'image_hash': 'override-hash',
+ }
+ mock_image_override.return_value = override_info
+ mock_override_bucket.return_value = None
+ mock_image_hash.return_value = 'image-hash'
+
+ actual = GetImageLocationInfo('my-bucket', allow_override=False)
+ self.assertEqual(actual, {
+ 'bucket': 'my-bucket',
+ 'image_hash': 'image-hash',
+ })
+
if __name__ == '__main__':
unittest.main()
diff --git a/build/fuchsia/update_product_bundles.py b/build/fuchsia/update_product_bundles.py
index 1cde6c4bc..79ad39709 100755
--- a/build/fuchsia/update_product_bundles.py
+++ b/build/fuchsia/update_product_bundles.py
@@ -15,9 +15,12 @@ import subprocess
import sys
from contextlib import ExitStack
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'test')))
+
import common
-import ffx_session
-import log_manager
+import ffx_integration
_PRODUCT_BUNDLES = [
'core.x64-dfv2',
@@ -26,6 +29,7 @@ _PRODUCT_BUNDLES = [
'workstation_eng.chromebook-x64',
'workstation_eng.chromebook-x64-dfv2',
'workstation_eng.qemu-x64',
+ 'workstation_eng.x64',
]
# TODO(crbug/1361089): Remove when the old scripts have been deprecated.
@@ -72,27 +76,23 @@ def get_hash_from_sdk():
return json.load(f)['id']
-def remove_repositories(repo_names_to_remove, ffx_runner):
+def remove_repositories(repo_names_to_remove):
"""Removes given repos from repo list.
Repo MUST be present in list to succeed.
Args:
repo_names_to_remove: List of repo names (as strings) to remove.
- ffx_runner: ffx_session.FfxRunner instance to run the command.
"""
for repo_name in repo_names_to_remove:
- ffx_runner.run_ffx(('repository', 'remove', repo_name), check=True)
+ common.run_ffx_command(('repository', 'remove', repo_name), check=True)
-def get_repositories(ffx_runner):
+def get_repositories():
"""Lists repositories that are available on disk.
Also prunes repositories that are listed, but do not have an actual packages
directory.
- Args:
- ffx_runner: An FfxRunner instance.
-
Returns:
List of dictionaries containing info about the repositories. They have the
following structure:
@@ -106,7 +106,9 @@ def get_repositories(ffx_runner):
"""
repos = json.loads(
- ffx_runner.run_ffx(('--machine', 'json', 'repository', 'list')).strip())
+ common.run_ffx_command(('--machine', 'json', 'repository', 'list'),
+ check=True,
+ capture_output=True).stdout.strip())
to_prune = set()
sdk_root_abspath = os.path.abspath(os.path.dirname(common.SDK_ROOT))
for repo in repos:
@@ -121,26 +123,23 @@ def get_repositories(ffx_runner):
repos = [repo for repo in repos if repo['name'] not in to_prune]
- remove_repositories(to_prune, ffx_runner)
+ remove_repositories(to_prune)
return repos
-def update_repositories_list(ffx_runner):
+def update_repositories_list():
"""Used to prune stale repositories."""
- get_repositories(ffx_runner)
+ get_repositories()
-def remove_product_bundle(product_bundle, ffx_runner):
+def remove_product_bundle(product_bundle):
"""Removes product-bundle given."""
- ffx_runner.run_ffx(('product-bundle', 'remove', '-f', product_bundle))
+ common.run_ffx_command(('product-bundle', 'remove', '-f', product_bundle))
-def get_product_bundle_urls(ffx_runner):
+def get_product_bundle_urls():
"""Retrieves URLs of available product-bundles.
- Args:
- ffx_runner: An FfxRunner instance.
-
Returns:
List of dictionaries of structure, indicating whether the product-bundle
has been downloaded.
@@ -150,8 +149,8 @@ def get_product_bundle_urls(ffx_runner):
}
"""
# TODO(fxb/115328): Replaces with JSON API when available.
- bundles = ffx_runner.run_ffx(('product-bundle', 'list'), check=True)
-
+ bundles = common.run_ffx_command(('product-bundle', 'list'),
+ capture_output=True).stdout.strip()
urls = [
line.strip() for line in bundles.splitlines() if 'gs://fuchsia' in line
]
@@ -165,40 +164,34 @@ def get_product_bundle_urls(ffx_runner):
return structured_urls
-def keep_product_bundles_by_sdk_version(sdk_version, ffx_runner):
+def keep_product_bundles_by_sdk_version(sdk_version):
"""Prunes product bundles not containing the sdk_version given."""
- urls = get_product_bundle_urls(ffx_runner)
+ urls = get_product_bundle_urls()
for url in urls:
if url['downloaded'] and sdk_version not in url['url']:
- remove_product_bundle(url['url'], ffx_runner)
+ remove_product_bundle(url['url'])
-def get_product_bundles(ffx_runner):
+def get_product_bundles():
"""Lists all downloaded product-bundles for the given SDK.
Cross-references the repositories with downloaded packages and the stated
downloaded product-bundles to validate whether or not a product-bundle is
present. Prunes invalid product-bundles with each call as well.
- Args:
- ffx_runner: An FfxRunner instance.
-
Returns:
List of strings of product-bundle names downloaded and that FFX is aware
of.
"""
downloaded_bundles = []
- for url in get_product_bundle_urls(ffx_runner):
+ for url in get_product_bundle_urls():
if url['downloaded']:
# The product is separated by a #
product = url['url'].split('#')
downloaded_bundles.append(product[1])
- # For each downloaded bundle, need to verify whether ffx repository believes
- # it exists.
- to_prune_bundles_index = []
- repos = get_repositories(ffx_runner)
+ repos = get_repositories()
# Some repo names do not match product-bundle names due to underscores.
# Normalize them both.
@@ -211,41 +204,40 @@ def get_product_bundles(ffx_runner):
if name.replace('-', '_') in repo_names:
return True
- remove_product_bundle(name, ffx_runner)
+ remove_product_bundle(name)
return False
return list(filter(bundle_is_active, downloaded_bundles))
-def download_product_bundle(product_bundle, ffx_runner):
+def download_product_bundle(product_bundle, download_config):
"""Download product bundles using the SDK."""
# This also updates the repository list, in case it is stale.
- update_repositories_list(ffx_runner)
+ update_repositories_list()
try:
- ffx_runner.run_ffx(('product-bundle', 'get', product_bundle))
+ common.run_ffx_command(
+ ('product-bundle', 'get', product_bundle, '--force-repo'),
+ configs=download_config)
except subprocess.CalledProcessError as cpe:
logging.error('Product bundle download has failed. ' +
_PRODUCT_BUNDLE_FIX_INSTRUCTIONS)
raise
-def get_current_signature(ffx_runner):
+def get_current_signature():
"""Determines the SDK version of the product-bundles associated with the SDK.
Parses this information from the URLs of the product-bundle.
- Args:
- ffx_runner: An FfxRunner instance.
-
Returns:
An SDK version string, or None if no product-bundle versions are downloaded.
"""
- product_bundles = get_product_bundles(ffx_runner)
+ product_bundles = get_product_bundles()
if not product_bundles:
- logging.warning('No product bundles - signature will default to None')
+ logging.info('No product bundles - signature will default to None')
return None
- product_bundle_urls = get_product_bundle_urls(ffx_runner)
+ product_bundle_urls = get_product_bundle_urls()
# Get the numbers, hope they're the same.
signatures = set()
@@ -277,7 +269,7 @@ def main():
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
# Check whether there's Fuchsia support for this platform.
- common.GetHostOsFromPlatform()
+ common.get_host_os()
new_product_bundles = convert_to_product_bundle(
args.product_bundles.split(','))
@@ -293,11 +285,10 @@ def main():
'found in the DEPS file.')
with ExitStack() as stack:
- ffx_runner = ffx_session.FfxRunner(log_manager.LogManager(None))
# Re-set the directory to which product bundles are downloaded so that
# these bundles are located inside the Chromium codebase.
- ffx_runner.run_ffx(
+ common.run_ffx_command(
('config', 'set', 'pbms.storage.path', common.IMAGES_ROOT))
logging.debug('Checking for override file')
@@ -305,21 +296,20 @@ def main():
# TODO(crbug/1380807): Remove when product bundles can be downloaded
# for custom SDKs without editing metadata
override_file = os.path.join(os.path.dirname(__file__), 'sdk_override.txt')
+ pb_metadata = None
if os.path.isfile(override_file):
with open(override_file) as f:
pb_metadata = f.read().strip().split('\n')
pb_metadata.append('{sdk.root}/*.json')
- stack.enter_context(
- ffx_runner.scoped_config('pbms.metadata', json.dumps((pb_metadata))))
logging.debug('Applied overrides')
logging.debug('Getting new SDK hash')
new_sdk_hash = get_hash_from_sdk()
- keep_product_bundles_by_sdk_version(new_sdk_hash, ffx_runner)
+ keep_product_bundles_by_sdk_version(new_sdk_hash)
logging.debug('Checking for current signature')
- curr_signature = get_current_signature(ffx_runner)
+ curr_signature = get_current_signature()
- current_images = get_product_bundles(ffx_runner)
+ current_images = get_product_bundles()
# If SDK versions match, remove the product bundles that are no longer
# needed and download missing ones.
@@ -329,34 +319,35 @@ def main():
for image in current_images:
if image not in new_product_bundles:
logging.debug('Removing no longer needed Fuchsia image %s' % image)
- remove_product_bundle(image, ffx_runner)
+ remove_product_bundle(image)
bundles_to_download = set(new_product_bundles) - \
set(current_images)
for bundle in bundles_to_download:
logging.debug('Downloading image: %s', image)
- download_product_bundle(bundle, ffx_runner)
+ download_product_bundle(bundle)
return 0
# If SDK versions do not match, remove all existing product bundles
# and download the ones required.
for pb in current_images:
- remove_product_bundle(pb, ffx_runner)
+ remove_product_bundle(pb)
logging.debug('Make clean images root')
- curr_subdir = []
- if os.path.exists(common.IMAGES_ROOT):
- curr_subdir = os.listdir(common.IMAGES_ROOT)
- common.MakeCleanDirectory(common.IMAGES_ROOT)
+ common.make_clean_directory(common.IMAGES_ROOT)
+ download_config = None
+ if pb_metadata:
+ download_config = [
+ '{"pbms":{"metadata": %s}}' % json.dumps((pb_metadata))
+ ]
for pb in new_product_bundles:
logging.debug('Downloading bundle: %s', pb)
- download_product_bundle(pb, ffx_runner)
+ download_product_bundle(pb, download_config)
- current_pb = get_product_bundles(ffx_runner)
+ current_pb = get_product_bundles()
- diff = set(current_pb) - set(new_product_bundles)
assert set(current_pb) == set(new_product_bundles), (
'Failed to download expected set of product-bundles. '
f'Expected {new_product_bundles}, got {current_pb}')
diff --git a/build/fuchsia/update_product_bundles_test.py b/build/fuchsia/update_product_bundles_test.py
index 8c65e553e..0ffc20cce 100755
--- a/build/fuchsia/update_product_bundles_test.py
+++ b/build/fuchsia/update_product_bundles_test.py
@@ -6,17 +6,29 @@
import io
import json
import os
+import sys
import unittest
from unittest import mock
from parameterized import parameterized
-import common
-import ffx_session
import update_product_bundles
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'test')))
+
+import common
+
class TestUpdateProductBundles(unittest.TestCase):
+ def setUp(self):
+ ffx_mock = mock.Mock()
+ ffx_mock.returncode = 0
+ self._ffx_patcher = mock.patch('common.run_ffx_command',
+ return_value=ffx_mock)
+ self._ffx_mock = self._ffx_patcher.start()
+ self.addCleanup(self._ffx_mock.stop)
+
def testConvertToProductBundleDefaultsUnknownImage(self):
self.assertEqual(
update_product_bundles.convert_to_product_bundle(['unknown-image']),
@@ -57,13 +69,11 @@ class TestUpdateProductBundles(unittest.TestCase):
self.assertRaises(RuntimeError, update_product_bundles.get_hash_from_sdk)
- def testRemoveRepositoriesRunsRemoveOnGivenRepos(self):
- ffx_runner = mock.create_autospec(ffx_session.FfxRunner, instance=True)
-
- update_product_bundles.remove_repositories(['foo', 'bar', 'fizz', 'buzz'],
- ffx_runner)
+ @mock.patch('common.run_ffx_command')
+ def testRemoveRepositoriesRunsRemoveOnGivenRepos(self, ffx_mock):
+ update_product_bundles.remove_repositories(['foo', 'bar', 'fizz', 'buzz'])
- ffx_runner.run_ffx.assert_has_calls([
+ ffx_mock.assert_has_calls([
mock.call(('repository', 'remove', 'foo'), check=True),
mock.call(('repository', 'remove', 'bar'), check=True),
mock.call(('repository', 'remove', 'fizz'), check=True),
@@ -75,8 +85,7 @@ class TestUpdateProductBundles(unittest.TestCase):
def testGetRepositoriesPrunesReposThatDoNotExist(self, mock_abspath,
mock_exists):
with mock.patch('common.SDK_ROOT', 'some/path'):
- ffx_runner = mock.create_autospec(ffx_session.FfxRunner, instance=True)
- ffx_runner.run_ffx.return_value = json.dumps([{
+ self._ffx_mock.return_value.stdout = json.dumps([{
"name": "terminal.qemu-x64",
"spec": {
"type": "pm",
@@ -92,34 +101,30 @@ class TestUpdateProductBundles(unittest.TestCase):
mock_exists.side_effect = [True, False]
mock_abspath.side_effect = lambda x: x
- self.assertEqual(update_product_bundles.get_repositories(ffx_runner),
- [{
- "name": "terminal.qemu-x64",
- "spec": {
- "type": "pm",
- "path": "some/path/that/exists"
- }
- }])
-
- ffx_runner.run_ffx.assert_has_calls([
- mock.call(('--machine', 'json', 'repository', 'list')),
+ self.assertEqual(update_product_bundles.get_repositories(), [{
+ "name": "terminal.qemu-x64",
+ "spec": {
+ "type": "pm",
+ "path": "some/path/that/exists"
+ }
+ }])
+
+ self._ffx_mock.assert_has_calls([
+ mock.call(('--machine', 'json', 'repository', 'list'),
+ capture_output=True,
+ check=True),
mock.call(('repository', 'remove', 'workstation-eng.chromebook-x64'),
check=True)
])
def testRemoveProductBundle(self):
- ffx_runner = mock.create_autospec(ffx_session.FfxRunner, instance=True)
-
- update_product_bundles.remove_product_bundle('some-bundle-foo-bar',
- ffx_runner)
+ update_product_bundles.remove_product_bundle('some-bundle-foo-bar')
- ffx_runner.run_ffx.assert_called_once_with(
+ self._ffx_mock.assert_called_once_with(
('product-bundle', 'remove', '-f', 'some-bundle-foo-bar'))
def _InitFFXRunWithProductBundleList(self, sdk_version='10.20221114.2.1'):
- ffx_runner = mock.create_autospec(ffx_session.FfxRunner, instance=True)
-
- ffx_runner.run_ffx.return_value = f"""
+ self._ffx_mock.return_value.stdout = f"""
gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.qemu-x64
gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.chromebook-x64-dfv2
* gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.chromebook-x64
@@ -129,11 +134,10 @@ class TestUpdateProductBundles(unittest.TestCase):
*No need to fetch with `ffx product-bundle get ...`.
"""
- return ffx_runner
def testGetProductBundleUrlsMarksDesiredAsDownloaded(self):
- urls = update_product_bundles.get_product_bundle_urls(
- self._InitFFXRunWithProductBundleList())
+ self._InitFFXRunWithProductBundleList()
+ urls = update_product_bundles.get_product_bundle_urls()
expected_urls = [{
'url':
'gs://fuchsia/10.20221114.2.1/bundles.json#workstation_eng.qemu-x64',
@@ -164,7 +168,7 @@ class TestUpdateProductBundles(unittest.TestCase):
@mock.patch('update_product_bundles.get_repositories')
def testGetProductBundlesExtractsProductBundlesFromURLs(self, mock_get_repos):
- ffx_runner = self._InitFFXRunWithProductBundleList()
+ self._InitFFXRunWithProductBundleList()
mock_get_repos.return_value = [{
'name': 'workstation-eng.chromebook-x64'
}, {
@@ -174,7 +178,7 @@ class TestUpdateProductBundles(unittest.TestCase):
}]
self.assertEqual(
- set(update_product_bundles.get_product_bundles(ffx_runner)),
+ set(update_product_bundles.get_product_bundles()),
set([
'workstation_eng.chromebook-x64',
'terminal.qemu-x64',
@@ -184,7 +188,7 @@ class TestUpdateProductBundles(unittest.TestCase):
@mock.patch('update_product_bundles.get_repositories')
def testGetProductBundlesExtractsProductBundlesFromURLsFiltersMissingRepos(
self, mock_get_repos):
- ffx_runner = self._InitFFXRunWithProductBundleList()
+ self._InitFFXRunWithProductBundleList()
# This will be missing two repos from the bundle list:
# core and terminal.qemu-x64
@@ -196,33 +200,35 @@ class TestUpdateProductBundles(unittest.TestCase):
'name': 'terminal.qemu-arm64'
}]
- self.assertEqual(update_product_bundles.get_product_bundles(ffx_runner),
+ self.assertEqual(update_product_bundles.get_product_bundles(),
['workstation_eng.chromebook-x64'])
- ffx_runner.run_ffx.assert_has_calls([
+ self._ffx_mock.assert_has_calls([
mock.call(('product-bundle', 'remove', '-f', 'terminal.qemu-x64')),
mock.call(('product-bundle', 'remove', '-f', 'core.x64-dfv2')),
],
- any_order=True)
+ any_order=True)
+ @mock.patch('common.run_ffx_command')
@mock.patch('update_product_bundles.update_repositories_list')
def testDownloadProductBundleUpdatesRepoListBeforeCall(
- self, mock_update_repo):
- ffx_runner = mock.create_autospec(ffx_session.FfxRunner, instance=True)
+ self, mock_update_repo, mock_ffx):
mock_sequence = mock.Mock()
mock_sequence.attach_mock(mock_update_repo, 'update_repo_list')
- mock_sequence.attach_mock(ffx_runner.run_ffx, 'run_ffx')
+ mock_sequence.attach_mock(mock_ffx, 'run_ffx_command')
- update_product_bundles.download_product_bundle('some-bundle', ffx_runner)
+ update_product_bundles.download_product_bundle('some-bundle', None)
mock_sequence.assert_has_calls([
- mock.call.update_repo_list(ffx_runner),
- mock.call.run_ffx(('product-bundle', 'get', 'some-bundle'))
+ mock.call.update_repo_list(),
+ mock.call.run_ffx_command(
+ ('product-bundle', 'get', 'some-bundle', '--force-repo'),
+ configs=None)
])
+ @mock.patch('common.run_ffx_command')
@mock.patch('update_product_bundles.get_product_bundle_urls')
def testFilterProductBundleURLsRemovesBundlesWithoutGivenString(
- self, mock_get_urls):
- ffx_runner = mock.create_autospec(ffx_session.FfxRunner, instance=True)
+ self, mock_get_urls, mock_ffx):
mock_get_urls.return_value = [
{
'url': 'some-url-has-buzz',
@@ -237,27 +243,25 @@ class TestUpdateProductBundles(unittest.TestCase):
'downloaded': False,
},
]
- update_product_bundles.keep_product_bundles_by_sdk_version(
- 'buzz', ffx_runner)
- ffx_runner.run_ffx.assert_called_once_with(
+ update_product_bundles.keep_product_bundles_by_sdk_version('buzz')
+ mock_ffx.assert_called_once_with(
('product-bundle', 'remove', '-f', 'some-url-to-remove-has-foo'))
@mock.patch('update_product_bundles.get_repositories')
def testGetCurrentSignatureReturnsNoneIfNoProductBundles(
self, mock_get_repos):
- ffx_runner = self._InitFFXRunWithProductBundleList()
+ self._InitFFXRunWithProductBundleList()
# Forces no product-bundles
mock_get_repos.return_value = []
# Mutes logs
with self.assertLogs():
- self.assertIsNone(
- update_product_bundles.get_current_signature(ffx_runner))
+ self.assertIsNone(update_product_bundles.get_current_signature())
@mock.patch('update_product_bundles.get_repositories')
def testGetCurrentSignatureParsesVersionCorrectly(self, mock_get_repos):
- ffx_runner = self._InitFFXRunWithProductBundleList()
+ self._InitFFXRunWithProductBundleList()
mock_get_repos.return_value = [{
'name': 'workstation-eng.chromebook-x64'
}, {
@@ -265,20 +269,19 @@ class TestUpdateProductBundles(unittest.TestCase):
}]
self.assertEqual('10.20221114.2.1',
- update_product_bundles.get_current_signature(ffx_runner))
+ update_product_bundles.get_current_signature())
@mock.patch('update_product_bundles.get_repositories')
def testGetCurrentSignatureParsesCustomArtifactsCorrectlys(
self, mock_get_repos):
- ffx_runner = self._InitFFXRunWithProductBundleList(sdk_version='51390009')
+ self._InitFFXRunWithProductBundleList(sdk_version='51390009')
mock_get_repos.return_value = [{
'name': 'workstation-eng.chromebook-x64'
}, {
'name': 'terminal.qemu-x64'
}]
- self.assertEqual('51390009',
- update_product_bundles.get_current_signature(ffx_runner))
+ self.assertEqual('51390009', update_product_bundles.get_current_signature())
if __name__ == '__main__':
diff --git a/build/fuchsia/update_sdk.py b/build/fuchsia/update_sdk.py
index f2e669291..2b30a9c3c 100755
--- a/build/fuchsia/update_sdk.py
+++ b/build/fuchsia/update_sdk.py
@@ -14,12 +14,13 @@ import subprocess
import sys
from typing import Optional
-from common import GetHostOsFromPlatform
-from common import MakeCleanDirectory
-from common import SDK_ROOT
-
from gcs_download import DownloadAndUnpackFromCloudStorage
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'test')))
+
+from common import SDK_ROOT, get_host_arch, get_host_os, make_clean_directory
+
def _GetHostArch():
host_arch = platform.machine()
@@ -54,7 +55,7 @@ def GetSDKOverrideGCSPath(path: Optional[str] = None) -> Optional[str]:
def _GetTarballPath(gcs_tarball_prefix: str) -> str:
"""Get the full path to the sdk tarball on GCS"""
- platform = GetHostOsFromPlatform()
+ platform = get_host_os()
arch = _GetHostArch()
return f'{gcs_tarball_prefix}/{platform}-{arch}/gn.tar.gz'
@@ -73,7 +74,7 @@ def main():
# Exit if there's no SDK support for this platform.
try:
- host_plat = GetHostOsFromPlatform()
+ host_plat = get_host_os()
except:
logging.warning('Fuchsia SDK is not supported on this platform.')
return 0
@@ -98,7 +99,7 @@ def main():
# Always re-download the SDK.
logging.info('Downloading GN SDK from GCS...')
- MakeCleanDirectory(SDK_ROOT)
+ make_clean_directory(SDK_ROOT)
DownloadAndUnpackFromCloudStorage(_GetTarballPath(gcs_tarball_prefix),
SDK_ROOT)
return 0
diff --git a/build/fuchsia/update_sdk_test.py b/build/fuchsia/update_sdk_test.py
index 7d8bcc7b9..5def6796d 100755
--- a/build/fuchsia/update_sdk_test.py
+++ b/build/fuchsia/update_sdk_test.py
@@ -55,7 +55,7 @@ class TestGetSDKOverrideGCSPath(unittest.TestCase):
@mock.patch('update_sdk._GetHostArch')
-@mock.patch('update_sdk.GetHostOsFromPlatform')
+@mock.patch('update_sdk.get_host_os')
class TestGetTarballPath(unittest.TestCase):
def testGetTarballPath(self, mock_get_host_os, mock_host_arch):
mock_get_host_os.return_value = 'linux'
diff --git a/build/get_landmines.py b/build/get_landmines.py
index dead8542b..6155d71de 100755
--- a/build/get_landmines.py
+++ b/build/get_landmines.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,7 +8,6 @@ This file emits the list of reasons why a particular build needs to be clobbered
(or a list of 'landmines').
"""
-from __future__ import print_function
import sys
@@ -78,6 +77,9 @@ def print_landmines():
print('The Great Blink mv for source files (crbug.com/768828)')
if host_os() == 'linux':
print('Clobber to workaround buggy .ninja_deps cycle (crbug.com/934404)')
+ print('Clobber to flush stale generated files. See crbug.com/1406628')
+ print('Clobber to flush old .ninja_log files for updating ninja. '
+ 'See crbug.com/1406628#c14')
def main():
diff --git a/build/get_symlink_targets.py b/build/get_symlink_targets.py
index 33362c0f0..850bbae70 100755
--- a/build/get_symlink_targets.py
+++ b/build/get_symlink_targets.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2019 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/gn_run_binary.py b/build/gn_run_binary.py
index 29035e7e8..414f6952f 100644
--- a/build/gn_run_binary.py
+++ b/build/gn_run_binary.py
@@ -8,7 +8,6 @@ Run with:
python gn_run_binary.py <binary_name> [args ...]
"""
-from __future__ import print_function
import os
import subprocess
diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh
deleted file mode 100755
index 56257c140..000000000
--- a/build/install-build-deps-android.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-
-# Copyright 2012 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Script to install everything needed to build chromium on android, including
-# items requiring sudo privileges.
-# See https://www.chromium.org/developers/how-tos/android-build-instructions
-
-args="$@"
-
-if ! uname -m | egrep -q "i686|x86_64"; then
- echo "Only x86 architectures are currently supported" >&2
- exit
-fi
-
-# Exit if any commands fail.
-set -e
-
-lsb_release=$(lsb_release --codename --short)
-
-# Install first the default Linux build deps.
-"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
- --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}"
-
-# Fix deps
-sudo apt-get -f install
-
-# common
-sudo apt-get -y install lib32z1 lighttpd xvfb x11-utils
-
-# Some binaries in the Android SDK require 32-bit libraries on the host.
-# See https://developer.android.com/sdk/installing/index.html?pkg=tools
-sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
-
-echo "install-build-deps-android.sh complete."
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
index 428671fc8..33ba6a33c 100755
--- a/build/install-build-deps.sh
+++ b/build/install-build-deps.sh
@@ -5,13 +5,16 @@
# found in the LICENSE file.
# Script to install everything needed to build chromium (well, ideally, anyway)
+# including items requiring sudo privileges.
# See https://chromium.googlesource.com/chromium/src/+/main/docs/linux/build_instructions.md
+# and https://chromium.googlesource.com/chromium/src/+/HEAD/docs/android_build_instructions.md
usage() {
echo "Usage: $0 [--options]"
echo "Options:"
echo "--[no-]syms: enable or disable installation of debugging symbols"
echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot"
+ echo "--[no-]android: enable or disable installation of android dependencies"
echo "--[no-]arm: enable or disable installation of arm cross toolchain"
echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
"fonts"
@@ -58,6 +61,7 @@ package_exists() {
do_inst_arm=0
do_inst_nacl=0
+do_inst_android=0
while [ "$1" != "" ]
do
@@ -65,6 +69,8 @@ do
--syms) do_inst_syms=1;;
--no-syms) do_inst_syms=0;;
--lib32) do_inst_lib32=1;;
+ --android) do_inst_android=1;;
+ --no-android) do_inst_android=0;;
--arm) do_inst_arm=1;;
--no-arm) do_inst_arm=0;;
--chromeos-fonts) do_inst_chromeos_fonts=1;;
@@ -88,6 +94,10 @@ if [ "$do_inst_arm" = "1" ]; then
do_inst_lib32=1
fi
+if [ "$do_inst_android" = "1" ]; then
+ do_inst_lib32=1
+fi
+
# Check for lsb_release command in $PATH
if ! which lsb_release > /dev/null; then
echo "ERROR: lsb_release not found in \$PATH" >&2
@@ -97,14 +107,12 @@ fi
distro_codename=$(lsb_release --codename --short)
distro_id=$(lsb_release --id --short)
-# TODO(crbug.com/1199405): Remove 16.04 (xenial).
-supported_codenames="(xenial|bionic|focal|jammy)"
+supported_codenames="(bionic|focal|jammy)"
supported_ids="(Debian)"
if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
if [[ ! $distro_codename =~ $supported_codenames &&
! $distro_id =~ $supported_ids ]]; then
echo -e "ERROR: The only supported distros are\n" \
- "\tUbuntu 16.04 LTS (xenial with EoL April 2024)\n" \
"\tUbuntu 18.04 LTS (bionic with EoL April 2028)\n" \
"\tUbuntu 20.04 LTS (focal with EoL April 2030)\n" \
"\tUbuntu 22.04 LTS (jammy with EoL April 2032)\n" \
@@ -112,6 +120,7 @@ if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
exit 1
fi
+# Check system architecture
if ! uname -m | egrep -q "i686|x86_64"; then
echo "Only x86 architectures are currently supported" >&2
exit
@@ -183,6 +192,7 @@ dev_list="\
libspeechd-dev
libsqlite3-dev
libssl-dev
+ libsystemd-dev
libudev-dev
libva-dev
libwww-perl
@@ -191,6 +201,7 @@ dev_list="\
libxss-dev
libxt-dev
libxtst-dev
+ lighttpd
locales
openbox
p7zip
@@ -227,23 +238,28 @@ chromeos_lib_list="libpulse0 libbz2-1.0"
# List of required run-time libraries
common_lib_list="\
+ lib32z1
libasound2
libatk1.0-0
libatspi2.0-0
libc6
libcairo2
libcap2
+ libcgi-session-perl
libcups2
libdrm2
+ libegl1
libevdev2
libexpat1
libfontconfig1
libfreetype6
libgbm1
libglib2.0-0
+ libgl1
libgtk-3-0
libpam0g
libpango-1.0-0
+ libpangocairo-1.0-0
libpci3
libpcre3
libpixman-1-0
@@ -251,6 +267,7 @@ common_lib_list="\
libstdc++6
libsqlite3-0
libuuid1
+ libwayland-egl1
libwayland-egl1-mesa
libx11-6
libx11-xcb1
@@ -267,6 +284,8 @@ common_lib_list="\
libxrandr2
libxrender1
libxtst6
+ x11-utils
+ xvfb
zlib1g
"
@@ -284,51 +303,34 @@ lib_list="\
$chromeos_lib_list
"
-# this can be moved into the lib list without a guard when xenial is deprecated
-if package_exists libgl1; then
- lib_list="${lib_list} libgl1"
-fi
-if package_exists libegl1; then
- lib_list="${lib_list} libegl1"
-fi
-if package_exists libwayland-egl1; then
- lib_list="${lib_list} libwayland-egl1"
-fi
-if package_exists libpangocairo-1.0-0; then
- lib_list="${lib_list} libpangocairo-1.0-0"
-fi
-if package_exists libgl1:i386; then
- lib_list="${lib_list} libgl1:i386"
-fi
-if package_exists libegl1:i386; then
- lib_list="${lib_list} libegl1:i386"
-fi
-if package_exists libwayland-egl1:i386; then
- lib_list="${lib_list} libwayland-egl1:i386"
-fi
-if package_exists libpangocairo-1.0-0:i386; then
- lib_list="${lib_list} libpangocairo-1.0-0:i386"
-fi
-
# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf
lib32_list="linux-libc-dev:i386 libpci3:i386"
# 32-bit libraries needed for a 32-bit build
+# includes some 32-bit libraries required by the Android SDK
+# See https://developer.android.com/sdk/installing/index.html?pkg=tools
lib32_list="$lib32_list
libasound2:i386
libatk-bridge2.0-0:i386
libatk1.0-0:i386
libatspi2.0-0:i386
libdbus-1-3:i386
+ libegl1:i386
+ libgl1:i386
libglib2.0-0:i386
+ libncurses5:i386
libnss3:i386
libpango-1.0-0:i386
+ libpangocairo-1.0-0:i386
+ libstdc++6:i386
+ libwayland-egl1:i386
libx11-xcb1:i386
libxcomposite1:i386
libxdamage1:i386
libxkbcommon0:i386
libxrandr2:i386
libxtst6:i386
+ zlib1g:i386
"
# Packages that have been removed from this script. Regardless of configuration
@@ -342,6 +344,15 @@ backwards_compatible_list="\
fonts-thai-tlwg
fonts-tlwg-garuda
g++
+ g++-4.8-multilib-arm-linux-gnueabihf
+ gcc-4.8-multilib-arm-linux-gnueabihf
+ g++-9-multilib-arm-linux-gnueabihf
+ gcc-9-multilib-arm-linux-gnueabihf
+ gcc-arm-linux-gnueabihf
+ g++-10-multilib-arm-linux-gnueabihf
+ gcc-10-multilib-arm-linux-gnueabihf
+ g++-10-arm-linux-gnueabihf
+ gcc-10-arm-linux-gnueabihf
git-svn
language-pack-da
language-pack-fr
@@ -358,16 +369,26 @@ backwards_compatible_list="\
libexif12
libexif12:i386
libgbm-dev
+ libgbm-dev-lts-trusty
+ libgbm-dev-lts-xenial
libgconf-2-4:i386
libgconf2-dev
libgl1-mesa-dev
+ libgl1-mesa-dev-lts-trusty
+ libgl1-mesa-dev-lts-xenial
libgl1-mesa-glx:i386
+ libgl1-mesa-glx-lts-trusty:i386
+ libgl1-mesa-glx-lts-xenial:i386
libgles2-mesa-dev
+ libgles2-mesa-dev-lts-trusty
+ libgles2-mesa-dev-lts-xenial
libgtk-3-0:i386
libgtk2.0-0
libgtk2.0-0:i386
libgtk2.0-dev
mesa-common-dev
+ mesa-common-dev-lts-trusty
+ mesa-common-dev-lts-xenial
msttcorefonts
python-dev
python-setuptools
@@ -429,17 +450,6 @@ else
backwards_compatible_list="${backwards_compatible_list} php5-cgi libapache2-mod-php5"
fi
-case $distro_codename in
- xenial)
- backwards_compatible_list+=" \
- libgbm-dev-lts-xenial
- libgl1-mesa-dev-lts-xenial
- libgl1-mesa-glx-lts-xenial:i386
- libgles2-mesa-dev-lts-xenial
- mesa-common-dev-lts-xenial"
- ;;
-esac
-
# arm cross toolchain packages needed to build chrome on armhf
arm_list="libc6-dev-armhf-cross
linux-libc-dev-armhf-cross
@@ -447,7 +457,7 @@ arm_list="libc6-dev-armhf-cross
# Work around for dependency issue Ubuntu: http://crbug.com/435056
case $distro_codename in
- xenial|bionic)
+ bionic)
arm_list+=" g++-5-multilib-arm-linux-gnueabihf
gcc-5-multilib-arm-linux-gnueabihf
gcc-arm-linux-gnueabihf"
@@ -655,6 +665,12 @@ else
lib32_list=
fi
+if [ "$do_inst_android" = "1" ]; then
+ echo "Including Android dependencies."
+else
+ echo "Skipping Android dependencies."
+fi
+
if [ "$do_inst_arm" = "1" ]; then
echo "Including ARM cross toolchain."
else
diff --git a/build/ios/PRESUBMIT.py b/build/ios/PRESUBMIT.py
new file mode 100644
index 000000000..0c7a35580
--- /dev/null
+++ b/build/ios/PRESUBMIT.py
@@ -0,0 +1,20 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+PRESUBMIT_VERSION = '2.0.0'
+
+USE_PYTHON3 = True
+
+TEST_PATTERNS = [r'.+_test.py$']
+
+
+def CheckUnitTests(input_api, output_api):
+ # Runs all unit tests under the build/ios folder.
+ return input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api,
+ output_api,
+ '.',
+ files_to_check=TEST_PATTERNS,
+ run_on_python2=False,
+ skip_shebang_check=True)
diff --git a/build/ios/presubmit_support.py b/build/ios/presubmit_support.py
new file mode 100644
index 000000000..773d1586e
--- /dev/null
+++ b/build/ios/presubmit_support.py
@@ -0,0 +1,39 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Presubmit helpers for ios
+
+See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into depot_tools.
+"""
+
+from . import update_bundle_filelist
+
+
+def CheckBundleData(input_api, output_api, base, globroot='//'):
+ root = input_api.change.RepositoryRoot()
+ filelist = input_api.os_path.join(input_api.PresubmitLocalPath(),
+ base + '.filelist')
+ globlist = input_api.os_path.join(input_api.PresubmitLocalPath(),
+ base + '.globlist')
+ if globroot.startswith('//'):
+ globroot = input_api.os_path.join(input_api.change.RepositoryRoot(),
+ globroot[2:])
+ else:
+ globroot = input_api.os_path.join(input_api.PresubmitLocalPath(), globroot)
+ if update_bundle_filelist.process_filelist(filelist,
+ globlist,
+ globroot,
+ check=True,
+ verbose=input_api.verbose) == 0:
+ return []
+ else:
+ script = input_api.os_path.join(input_api.change.RepositoryRoot(), 'build',
+ 'ios', 'update_bundle_filelist.py')
+
+ return [
+ output_api.PresubmitError(
+ 'Filelist needs to be re-generated. Please run \'python3 %s %s %s '
+ '%s\' and include the changes in this CL' %
+ (script, filelist, globlist, globroot))
+ ]
diff --git a/build/ios/presubmit_support_test.py b/build/ios/presubmit_support_test.py
new file mode 100755
index 000000000..6bbc6024e
--- /dev/null
+++ b/build/ios/presubmit_support_test.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import datetime
+import os.path
+import sys
+import tempfile
+import unittest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
+
+from PRESUBMIT_test_mocks import MockInputApi, MockOutputApi
+from build.ios import presubmit_support
+
+_TEMP_FILELIST_CONTENTS = """# Copyright %d The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+""" % (datetime.datetime.now().year)
+
+_TEMP_GLOBLIST_CONTENTS = """**
+-*.globlist
+-*.filelist
+"""
+
+
+class BundleDataPresubmit(unittest.TestCase):
+ def setUp(self):
+ self.mock_input_api = MockInputApi()
+ self.mock_input_api.change.RepositoryRoot = lambda: os.path.join(
+ os.path.dirname(__file__), '..', '..')
+ self.mock_input_api.PresubmitLocalPath = lambda: os.path.dirname(__file__)
+ self.mock_output_api = MockOutputApi()
+
+ def testBasic(self):
+ """
+ Checks that a glob can be expanded to build a file list and if it
+ matches the existing file list, we should see no error.
+ """
+ results = presubmit_support.CheckBundleData(self.mock_input_api,
+ self.mock_output_api,
+ 'test_data/basic', '.')
+ self.assertEqual([], results)
+
+ def testExclusion(self):
+ """
+ Check that globs can be used to exclude files from file lists.
+ """
+ results = presubmit_support.CheckBundleData(self.mock_input_api,
+ self.mock_output_api,
+ 'test_data/exclusions', '.')
+ self.assertEqual([], results)
+
+ def testDifferentLocalPath(self):
+ """
+ Checks the case where the presubmit directory is not the same as the
+ globroot, but it is still local (i.e., not relative to the repository
+ root)
+ """
+ results = presubmit_support.CheckBundleData(
+ self.mock_input_api, self.mock_output_api,
+ 'test_data/different_local_path', 'test_data')
+ self.assertEqual([], results)
+
+ def testRepositoryRelative(self):
+ """
+ Checks the case where globs are relative to the repository root.
+ """
+ results = presubmit_support.CheckBundleData(
+ self.mock_input_api, self.mock_output_api,
+ 'test_data/repository_relative')
+ self.assertEqual([], results)
+
+ def testMissingFilesInFilelist(self):
+ """
+ Checks that we do indeed return an error if the filelist is missing a
+ file. In this case, all of the test .filelist and .globlist files are
+ excluded.
+ """
+ results = presubmit_support.CheckBundleData(self.mock_input_api,
+ self.mock_output_api,
+ 'test_data/missing', '.')
+ self.assertEqual(1, len(results))
+
+ def testExtraFilesInFilelist(self):
+ """
+ Checks the case where extra files have been included in the file list.
+ """
+ results = presubmit_support.CheckBundleData(self.mock_input_api,
+ self.mock_output_api,
+ 'test_data/extra', '.')
+ self.assertEqual(1, len(results))
+
+ def testOrderInsensitive(self):
+ """
+ Checks that we do not trigger an error for cases where the file list is
+ correct, but in a different order than the globlist expansion.
+ """
+ results = presubmit_support.CheckBundleData(self.mock_input_api,
+ self.mock_output_api,
+ 'test_data/reorder', '.')
+ self.assertEqual([], results)
+
+ def testUnexpectedHeader(self):
+ """
+ Checks an unexpected header in a file list causes an error.
+ """
+ results = presubmit_support.CheckBundleData(self.mock_input_api,
+ self.mock_output_api,
+ 'test_data/comment', '.')
+ self.assertEqual(1, len(results))
+
+ def testUntrackedFiles(self):
+ """
+ Checks that the untracked files are correctly ignored.
+ """
+ with tempfile.TemporaryDirectory() as temp_dir:
+ with open(os.path.join(temp_dir, 'untracked.filelist'), 'w') as f:
+ f.write(_TEMP_FILELIST_CONTENTS)
+ with open(os.path.join(temp_dir, 'untracked.globlist'), 'w') as f:
+ f.write(_TEMP_GLOBLIST_CONTENTS)
+ with open(os.path.join(temp_dir, 'untracked.txt'), 'w') as f:
+ f.write('Hello, World!')
+ path = os.path.join(temp_dir, 'untracked')
+ self.mock_input_api.change.RepositoryRoot = lambda: temp_dir
+ self.mock_input_api.PresubmitLocalPath = lambda: temp_dir
+ results = presubmit_support.CheckBundleData(self.mock_input_api,
+ self.mock_output_api,
+ 'untracked')
+ self.assertEqual([], results)
+
+ def testExcludeDuplicates(self):
+ """
+ Checks that duplicate filenames are not added to a filelist.
+ """
+ results = presubmit_support.CheckBundleData(self.mock_input_api,
+ self.mock_output_api,
+ 'test_data/duplicates', '.')
+ self.assertEqual([], results)
+
+ def testCheckOutsideGloblistDir(self):
+ """
+ Checks that including files outside the globlist directory is an error.
+ """
+ results = presubmit_support.CheckBundleData(
+ self.mock_input_api, self.mock_output_api,
+ 'test_data/outside_globlist_dir', '.')
+ self.assertEqual(1, len(results))
+
+ def testCheckIgnoreOutsideGloblistDir(self):
+ """
+ Checks that files outside the globlist directory can be ignored.
+ """
+ results = presubmit_support.CheckBundleData(
+ self.mock_input_api, self.mock_output_api,
+ 'test_data/ignore_outside_globlist_dir', '.')
+ self.assertEqual([], results)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/build/ios/test_data/bar.html b/build/ios/test_data/bar.html
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/build/ios/test_data/bar.html
diff --git a/build/ios/test_data/basic.filelist b/build/ios/test_data/basic.filelist
new file mode 100644
index 000000000..496dcbda1
--- /dev/null
+++ b/build/ios/test_data/basic.filelist
@@ -0,0 +1,7 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+test_data/subdirectory/baz.txt
diff --git a/build/ios/test_data/basic.globlist b/build/ios/test_data/basic.globlist
new file mode 100644
index 000000000..b4d7d66aa
--- /dev/null
+++ b/build/ios/test_data/basic.globlist
@@ -0,0 +1,5 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test_data/subdirectory/*
diff --git a/build/ios/test_data/comment.filelist b/build/ios/test_data/comment.filelist
new file mode 100644
index 000000000..0f6c30fcd
--- /dev/null
+++ b/build/ios/test_data/comment.filelist
@@ -0,0 +1,2 @@
+# This comment is an unexpected header.
+test_data/subdirectory/baz.txt
diff --git a/build/ios/test_data/comment.globlist b/build/ios/test_data/comment.globlist
new file mode 100644
index 000000000..93c82c612
--- /dev/null
+++ b/build/ios/test_data/comment.globlist
@@ -0,0 +1,7 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Some comment followed by an empty line.
+
+test_data/subdirectory/*
diff --git a/build/ios/test_data/different_local_path.filelist b/build/ios/test_data/different_local_path.filelist
new file mode 100644
index 000000000..a45d180b2
--- /dev/null
+++ b/build/ios/test_data/different_local_path.filelist
@@ -0,0 +1,9 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+bar.html
+foo.css
+subdirectory/baz.txt
diff --git a/build/ios/test_data/different_local_path.globlist b/build/ios/test_data/different_local_path.globlist
new file mode 100644
index 000000000..a17a781de
--- /dev/null
+++ b/build/ios/test_data/different_local_path.globlist
@@ -0,0 +1,6 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+**
+-**list
diff --git a/build/ios/test_data/duplicates.filelist b/build/ios/test_data/duplicates.filelist
new file mode 100644
index 000000000..496dcbda1
--- /dev/null
+++ b/build/ios/test_data/duplicates.filelist
@@ -0,0 +1,7 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+test_data/subdirectory/baz.txt
diff --git a/build/ios/test_data/duplicates.globlist b/build/ios/test_data/duplicates.globlist
new file mode 100644
index 000000000..79bf591da
--- /dev/null
+++ b/build/ios/test_data/duplicates.globlist
@@ -0,0 +1,7 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test_data/subdirectory/*
+# This duplicate glob should have no effect on the resulting filelist.
+test_data/subdirectory/*
diff --git a/build/ios/test_data/exclusions.filelist b/build/ios/test_data/exclusions.filelist
new file mode 100644
index 000000000..d9e69f187
--- /dev/null
+++ b/build/ios/test_data/exclusions.filelist
@@ -0,0 +1,9 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+test_data/bar.html
+test_data/foo.css
+test_data/subdirectory/baz.txt
diff --git a/build/ios/test_data/exclusions.globlist b/build/ios/test_data/exclusions.globlist
new file mode 100644
index 000000000..92c241a70
--- /dev/null
+++ b/build/ios/test_data/exclusions.globlist
@@ -0,0 +1,6 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test_data/**
+-test_data/**list
diff --git a/build/ios/test_data/extra.filelist b/build/ios/test_data/extra.filelist
new file mode 100644
index 000000000..3597a457d
--- /dev/null
+++ b/build/ios/test_data/extra.filelist
@@ -0,0 +1,8 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+test_data/bar.html
+test_data/foo.css
diff --git a/build/ios/test_data/extra.globlist b/build/ios/test_data/extra.globlist
new file mode 100644
index 000000000..9fe758f1f
--- /dev/null
+++ b/build/ios/test_data/extra.globlist
@@ -0,0 +1,5 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test_data/*.css
diff --git a/build/ios/test_data/foo.css b/build/ios/test_data/foo.css
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/build/ios/test_data/foo.css
diff --git a/build/ios/test_data/ignore_outside_globlist_dir.filelist b/build/ios/test_data/ignore_outside_globlist_dir.filelist
new file mode 100644
index 000000000..a306b7ea0
--- /dev/null
+++ b/build/ios/test_data/ignore_outside_globlist_dir.filelist
@@ -0,0 +1,8 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+presubmit_support_test.py
+test_data/subdirectory/baz.txt
diff --git a/build/ios/test_data/ignore_outside_globlist_dir.globlist b/build/ios/test_data/ignore_outside_globlist_dir.globlist
new file mode 100644
index 000000000..471a0c46f
--- /dev/null
+++ b/build/ios/test_data/ignore_outside_globlist_dir.globlist
@@ -0,0 +1,8 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test_data/subdirectory/*
+# push(ignore-relative)
+presubmit_support_test.py
+# pop(ignore-relative)
diff --git a/build/ios/test_data/missing.filelist b/build/ios/test_data/missing.filelist
new file mode 100644
index 000000000..d9e69f187
--- /dev/null
+++ b/build/ios/test_data/missing.filelist
@@ -0,0 +1,9 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+test_data/bar.html
+test_data/foo.css
+test_data/subdirectory/baz.txt
diff --git a/build/ios/test_data/missing.globlist b/build/ios/test_data/missing.globlist
new file mode 100644
index 000000000..267b25246
--- /dev/null
+++ b/build/ios/test_data/missing.globlist
@@ -0,0 +1,8 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This should cover every file in test_data/ and its subdirectories (including
+# test files).
+
+test_data/**
diff --git a/build/ios/test_data/outside_globlist_dir.filelist b/build/ios/test_data/outside_globlist_dir.filelist
new file mode 100644
index 000000000..a81d5ad73
--- /dev/null
+++ b/build/ios/test_data/outside_globlist_dir.filelist
@@ -0,0 +1,8 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+test_data/subdirectory/baz.txt
+presubmit_support_test.py
diff --git a/build/ios/test_data/outside_globlist_dir.globlist b/build/ios/test_data/outside_globlist_dir.globlist
new file mode 100644
index 000000000..31bb073bc
--- /dev/null
+++ b/build/ios/test_data/outside_globlist_dir.globlist
@@ -0,0 +1,6 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test_data/subdirectory/*
+presubmit_support_test.py
diff --git a/build/ios/test_data/reorder.filelist b/build/ios/test_data/reorder.filelist
new file mode 100644
index 000000000..58921bc4f
--- /dev/null
+++ b/build/ios/test_data/reorder.filelist
@@ -0,0 +1,9 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+test_data/subdirectory/baz.txt
+test_data/foo.css
+test_data/bar.html
diff --git a/build/ios/test_data/reorder.globlist b/build/ios/test_data/reorder.globlist
new file mode 100644
index 000000000..92c241a70
--- /dev/null
+++ b/build/ios/test_data/reorder.globlist
@@ -0,0 +1,6 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test_data/**
+-test_data/**list
diff --git a/build/ios/test_data/repository_relative.filelist b/build/ios/test_data/repository_relative.filelist
new file mode 100644
index 000000000..796087b1d
--- /dev/null
+++ b/build/ios/test_data/repository_relative.filelist
@@ -0,0 +1,9 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+//build/ios/test_data/bar.html
+//build/ios/test_data/foo.css
+//build/ios/test_data/subdirectory/baz.txt
diff --git a/build/ios/test_data/repository_relative.globlist b/build/ios/test_data/repository_relative.globlist
new file mode 100644
index 000000000..b7c42100a
--- /dev/null
+++ b/build/ios/test_data/repository_relative.globlist
@@ -0,0 +1,6 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+//build/ios/test_data/**
+-//build/ios/test_data/**list
diff --git a/build/ios/test_data/subdirectory/baz.txt b/build/ios/test_data/subdirectory/baz.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/build/ios/test_data/subdirectory/baz.txt
diff --git a/build/ios/update_bundle_filelist.py b/build/ios/update_bundle_filelist.py
new file mode 100755
index 000000000..2e21205c3
--- /dev/null
+++ b/build/ios/update_bundle_filelist.py
@@ -0,0 +1,318 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Updates .filelist files using data from corresponding .globlist files (or
+checks whether they are up to date).
+
+bundle_data targets require an explicit source list, but maintaining these large
+lists can be cumbersome. This script aims to simplify the process of updating
+these lists by either expanding globs to update file lists or check that an
+existing file list matches such an expansion (i.e., checking during presubmit).
+
+The .globlist file contains a list of globs that will be expanded to either
+compare or replace a corresponding .filelist. It is possible to exclude items
+from the file list with globs as well. These lines are prefixed with '-' and are
+processed in order, so be sure that exclusions succeed inclusions in the list of
+globs. Comments and empty lines are permitted in .globfiles; comments are
+prefixed with '#'.
+
+By convention, the base name of the .globlist and .filelist files matches the
+label of their corresponding bundle_data from the .gn file. In order to ensure
+that these filelists don't get stale, there should also be a PRESUBMIT.py
+which uses this script to check that list is up to date.
+
+By default, the script will update the file list to match the expanded globs.
+"""
+
+import argparse
+import datetime
+import difflib
+import glob
+import os.path
+import re
+import subprocess
+import sys
+
+# Character to set colors in terminal. Taken, along with the printing routine
+# below, from update_deps.py.
+TERMINAL_ERROR_COLOR = '\033[91m'
+TERMINAL_RESET_COLOR = '\033[0m'
+
+_HEADER = """# Copyright %d The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+""" % (datetime.datetime.now().year)
+
+_HEADER_PATTERN = re.compile(r"""# Copyright [0-9]+ The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# NOTE: this file is generated by build/ios/update_bundle_filelist.py
+# If it requires updating, you should get a presubmit error with
+# instructions on how to regenerate. Otherwise, do not edit.
+""")
+
+_HEADER_HEIGHT = 6
+
+_START_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR = '# push(ignore-relative)'
+_STOP_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR = '# pop(ignore-relative)'
+
+
+def parse_filelist(filelist_name):
+ try:
+ with open(filelist_name) as filelist:
+ unfiltered = [l for l in filelist]
+ header = ''.join(unfiltered[:_HEADER_HEIGHT])
+ files = sorted(l.strip() for l in unfiltered[_HEADER_HEIGHT:])
+ return (files, header)
+ except Exception as e:
+ print_error(f'Could not read file list: {filelist_name}', f'{type(e)}: {e}')
+ return []
+
+
+def get_git_command_name():
+ if sys.platform.startswith('win'):
+ return 'git.bat'
+ return 'git'
+
+
+def get_tracked_files(directory, globroot, repository_root_relative, verbose):
+ try:
+ git_cmd = get_git_command_name()
+ with subprocess.Popen([git_cmd, 'ls-files', '--error-unmatch', directory],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=globroot) as p:
+ output = p.communicate()
+ if p.returncode != 0:
+ if verbose:
+ print_error(
+ f'Could not gather a list of tracked files in {directory}',
+ f'{output[1]}')
+ return set()
+
+ files = [f.decode('utf-8') for f in output[0].splitlines()]
+
+ # Need paths to be relative to directory in order to match expansions.
+ # This should happen naturally due to cwd above, but we need to take
+ # special care if relative to the repository root.
+ if repository_root_relative:
+ files = ['//' + f for f in files]
+
+ # Handle Windows backslashes
+ files = [f.replace('\\', '/') for f in files]
+
+ return set(files)
+
+ except Exception as e:
+ if verbose:
+ print_error(f'Could not gather a list of tracked files in {directory}',
+ f'{type(e)}: {e}')
+ return set()
+
+
+def combine_potentially_repository_root_relative_paths(a, b):
+ if b.startswith('//'):
+ # If b is relative to the repository root, os.path will consider it absolute
+ # and os.path.join will fail. In this case, we can simply concatenate the
+ # paths.
+ return (a + b, True)
+ else:
+ return (os.path.join(a, b), False)
+
+
+def parse_and_expand_globlist(globlist_name, glob_root):
+ # The following expects glob_root not to end in a trailing slash.
+ if glob_root.endswith('/'):
+ glob_root = glob_root[:-1]
+
+ check_expansions_outside_globlist_dir = True
+ globlist_dir = os.path.dirname(globlist_name)
+
+ with open(globlist_name) as globlist:
+ # Paths in |files| and |to_check| must use unix separators. Using a set
+ # ensures no unwanted duplicates. The files in |to_check| must be in the
+ # globroot or a subdirectory.
+ files = set()
+ to_check = set()
+ for g in globlist:
+ g = g.strip()
+
+ # Ignore blank lines
+ if not g:
+ continue
+
+ # Toggle error checking.
+ if g == _START_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR:
+ check_expansions_outside_globlist_dir = False
+ elif g == _STOP_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR:
+ check_expansions_outside_globlist_dir = True
+
+ # Ignore comments.
+ if not g or g.startswith('#'):
+ continue
+
+ # Exclusions are prefixed with '-'.
+ is_exclusion = g.startswith('-')
+ if is_exclusion:
+ g = g[1:]
+
+ (combined,
+ root_relative) = combine_potentially_repository_root_relative_paths(
+ glob_root, g)
+
+ prefix_size = len(glob_root)
+ if not root_relative:
+ # We need to account for the separator.
+ prefix_size += 1
+
+ expansion = glob.glob(combined, recursive=True)
+
+ # Filter out directories.
+ expansion = [f for f in expansion if os.path.isfile(f)]
+
+ if check_expansions_outside_globlist_dir:
+ for f in expansion:
+ relative = os.path.relpath(f, globlist_dir)
+ if relative.startswith('..'):
+ raise Exception(f'Globlist expansion outside globlist dir: {f}')
+
+ # Make relative to |glob_root|.
+ expansion = [f[prefix_size:] for f in expansion]
+
+ # Handle Windows backslashes
+ expansion = [f.replace('\\', '/') for f in expansion]
+
+ # Since paths in |expansion| only use unix separators, it is safe to
+ # compare for both the purpose of exclusion and addition.
+ if is_exclusion:
+ files = files.difference(expansion)
+ else:
+ files = files.union(expansion)
+
+ # Return a sorted list.
+ return sorted(files)
+
+
+def compare_lists(a, b):
+ differ = difflib.Differ()
+ full_diff = differ.compare(a, b)
+ lines = [d for d in full_diff if not d.startswith(' ')]
+ additions = [l[2:] for l in lines if l.startswith('+ ')]
+ removals = [l[2:] for l in lines if l.startswith('- ')]
+ return (additions, removals)
+
+
+def write_filelist(filelist_name, files, header):
+ try:
+ with open(filelist_name, 'w', encoding='utf-8', newline='') as filelist:
+ if not _HEADER_PATTERN.search(header):
+ header = _HEADER
+ filelist.write(header)
+ for file in files:
+ filelist.write(f'{file}\n')
+ except Exception as e:
+ print_error(f'Could not write file list: {filelist_name}',
+ f'{type(e)}: {e}')
+ return []
+
+
+def process_filelist(filelist, globlist, globroot, check=False, verbose=False):
+ files_from_globlist = []
+ try:
+ files_from_globlist = parse_and_expand_globlist(globlist, globroot)
+ except Exception as e:
+ if verbose:
+ print_error(f'Could not read glob list: {globlist}', f'{type(e)}: {e}')
+ return 1
+
+ (files, header) = parse_filelist(filelist)
+
+ (additions, removals) = compare_lists(files, files_from_globlist)
+ to_ignore = set()
+
+ # Ignore additions of untracked files.
+ if additions:
+ directories = set([os.path.dirname(f) for f in additions])
+ tracked_files = set()
+ for d in directories:
+ (combined,
+ root_relative) = combine_potentially_repository_root_relative_paths(
+ globroot, d)
+ relative = os.path.relpath(combined, globroot)
+ tracked_files = tracked_files.union(
+ get_tracked_files(relative, globroot, root_relative, verbose))
+ to_ignore = set(additions).difference(tracked_files)
+ additions = [f for f in additions if f in tracked_files]
+
+ files_from_globlist = [f for f in files_from_globlist if f not in to_ignore]
+
+ if check:
+ if not _HEADER_PATTERN.search(header):
+ if verbose:
+ print_error(f'Unexpected header for {filelist}', f'{header}')
+ return 1
+ if not additions and not removals:
+ return 0
+ if verbose:
+ pretty_additions = ['+ ' + f for f in additions]
+ pretty_removals = ['- ' + f for f in removals]
+ pretty_diff = '\n'.join(pretty_additions + pretty_removals)
+ print_error('File list does not match glob expansion', f'{pretty_diff}')
+ return 1
+ else:
+ write_filelist(filelist, files_from_globlist, header)
+ return 0
+
+
+def main(args):
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawTextHelpFormatter)
+ parser.add_argument('filelist', help='Contains one file per line')
+ parser.add_argument('globlist',
+ help='Contains globs that, when expanded, '
+ 'should match the filelist. Use '
+ '--help for details on syntax')
+ parser.add_argument('globroot',
+ help='Directory from which globs are relative')
+ parser.add_argument('-c',
+ '--check',
+ action='store_true',
+ help='Prevents modifying the file list')
+ parser.add_argument('-v',
+ '--verbose',
+ action='store_true',
+ help='Use this to print details on differences')
+ args = parser.parse_args()
+ return process_filelist(args.filelist,
+ args.globlist,
+ args.globroot,
+ check=args.check,
+ verbose=args.verbose)
+
+
+def print_error(error_message, error_info):
+ """ Print the `error_message` with additional `error_info` """
+ color_start, color_end = adapted_color_for_output(TERMINAL_ERROR_COLOR,
+ TERMINAL_RESET_COLOR)
+
+ error_message = color_start + 'ERROR: ' + error_message + color_end
+ if len(error_info) > 0:
+ error_message = error_message + '\n' + error_info
+ print(error_message, file=sys.stderr)
+
+
+def adapted_color_for_output(color_start, color_end):
+ """ Returns a the `color_start`, `color_end` tuple if the output is a
+ terminal, or empty strings otherwise """
+ if not sys.stdout.isatty():
+ return '', ''
+ return color_start, color_end
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/lacros/README.md b/build/lacros/README.md
new file mode 100644
index 000000000..be0a243f0
--- /dev/null
+++ b/build/lacros/README.md
@@ -0,0 +1,11 @@
+This folder contains code for running lacros in tests.
+
+This includes:
+* test_runner.py
+Run linux-lacros related tests.
+
+* mojo_connection_lacros_launcher
+Script for launching lacros for debugging.
+
+* lacros_resource_sizes.py
+Monitoring lacros binary size script used by builders.
diff --git a/build/lacros/test_runner.py b/build/lacros/test_runner.py
index 13cc24e43..7786b3261 100755
--- a/build/lacros/test_runner.py
+++ b/build/lacros/test_runner.py
@@ -63,14 +63,10 @@ _SRC_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir))
sys.path.append(os.path.join(_SRC_ROOT, 'third_party', 'depot_tools'))
-# Base GS URL to store prebuilt ash-chrome.
-_GS_URL_BASE = 'gs://ash-chromium-on-linux-prebuilts/x86_64'
-# Latest file version.
-_GS_URL_LATEST_FILE = _GS_URL_BASE + '/latest/ash-chromium.txt'
+# The cipd path for prebuilt ash chrome.
+_ASH_CIPD_PATH = 'chromium/testing/linux-ash-chromium/x86_64/ash.zip'
-# GS path to the zipped ash-chrome build with any given version.
-_GS_ASH_CHROME_PATH = 'ash-chromium.zip'
# Directory to cache downloaded ash-chrome versions to avoid re-downloading.
_PREBUILT_ASH_CHROME_DIR = os.path.join(os.path.dirname(__file__),
@@ -170,28 +166,71 @@ def _remove_unused_ash_chrome_versions(version_to_skip):
'past %d days', p, days)
shutil.rmtree(p)
-def _GsutilCopyWithRetry(gs_path, local_name, retry_times=3):
- """Gsutil copy with retry.
+
+def _GetLatestVersionOfAshChrome():
+ '''Get the latest ash chrome version.
+
+ Get the package version info with canary ref.
+
+ Returns:
+ A string with the chrome version.
+
+ Raises:
+ RuntimeError: if we can not get the version.
+ '''
+ cp = subprocess.run(
+ ['cipd', 'describe', _ASH_CIPD_PATH, '-version', 'canary'],
+ capture_output=True)
+ assert (cp.returncode == 0)
+ groups = re.search(r'version:(?P<version>[\d\.]+)', str(cp.stdout))
+ if not groups:
+ raise RuntimeError('Can not find the version. Error message: %s' %
+ cp.stdout)
+ return groups.group('version')
+
+
+def _DownloadAshChromeFromCipd(path, version):
+ '''Download the ash chrome with the requested version.
Args:
- gs_path: The gs path for remote location.
- local_name: The local file name.
- retry_times: The total try times if the gsutil call fails.
+ path: string for the downloaded ash chrome folder.
+ version: string for the ash chrome version.
+
+ Returns:
+ A string representing the path for the downloaded ash chrome.
+ '''
+ with tempfile.TemporaryDirectory() as temp_dir:
+ ensure_file_path = os.path.join(temp_dir, 'ensure_file.txt')
+ f = open(ensure_file_path, 'w+')
+ f.write(_ASH_CIPD_PATH + ' version:' + version)
+ f.close()
+ subprocess.run(
+ ['cipd', 'ensure', '-ensure-file', ensure_file_path, '-root', path])
+
+
+def _DoubleCheckDownloadedAshChrome(path, version):
+ '''Check the downloaded ash is the expected version.
+
+ Double check by running the chrome binary with --version.
+
+ Args:
+ path: string for the downloaded ash chrome folder.
+ version: string for the expected ash chrome version.
Raises:
- RuntimeError: If failed to download the specified version, for example,
- if the version is not present on gcs.
- """
- import download_from_google_storage
- gsutil = download_from_google_storage.Gsutil(
- download_from_google_storage.GSUTIL_DEFAULT_PATH)
- exit_code = 1
- retry = 0
- while exit_code and retry < retry_times:
- retry += 1
- exit_code = gsutil.call('cp', gs_path, local_name)
- if exit_code:
- raise RuntimeError('Failed to download: "%s"' % gs_path)
+ RuntimeError if no test_ash_chrome binary can be found.
+ '''
+ test_ash_chrome = os.path.join(path, 'test_ash_chrome')
+ if not os.path.exists(test_ash_chrome):
+ raise RuntimeError('Can not find test_ash_chrome binary under %s' % path)
+ cp = subprocess.run([test_ash_chrome, '--version'], capture_output=True)
+ assert (cp.returncode == 0)
+ if str(cp.stdout).find(version) == -1:
+ logging.warning(
+ 'The downloaded ash chrome version is %s, but the '
+ 'expected ash chrome is %s. There is a version mismatch. Please '
+ 'file a bug to OS>Lacros so someone can take a look.' %
+ (cp.stdout, version))
def _DownloadAshChromeIfNecessary(version):
@@ -219,38 +258,11 @@ def _DownloadAshChromeIfNecessary(version):
shutil.rmtree(ash_chrome_dir, ignore_errors=True)
os.makedirs(ash_chrome_dir)
- with tempfile.NamedTemporaryFile() as tmp:
- logging.info('Ash-chrome version: %s', version)
- gs_path = _GS_URL_BASE + '/' + version + '/' + _GS_ASH_CHROME_PATH
- _GsutilCopyWithRetry(gs_path, tmp.name)
-
- # https://bugs.python.org/issue15795. ZipFile doesn't preserve permissions.
- # And in order to workaround the issue, this function is created and used
- # instead of ZipFile.extractall().
- # The solution is copied from:
- # https://stackoverflow.com/questions/42326428/zipfile-in-python-file-permission
- def ExtractFile(zf, info, extract_dir):
- zf.extract(info.filename, path=extract_dir)
- perm = info.external_attr >> 16
- os.chmod(os.path.join(extract_dir, info.filename), perm)
-
- with zipfile.ZipFile(tmp.name, 'r') as zf:
- # Extra all files instead of just 'chrome' binary because 'chrome' needs
- # other resources and libraries to run.
- for info in zf.infolist():
- ExtractFile(zf, info, ash_chrome_dir)
-
+ _DownloadAshChromeFromCipd(ash_chrome_dir, version)
+ _DoubleCheckDownloadedAshChrome(ash_chrome_dir, version)
_remove_unused_ash_chrome_versions(version)
-def _GetLatestVersionOfAshChrome():
- """Returns the latest version of uploaded ash-chrome."""
- with tempfile.NamedTemporaryFile() as tmp:
- _GsutilCopyWithRetry(_GS_URL_LATEST_FILE, tmp.name)
- with open(tmp.name, 'r') as f:
- return f.read().strip()
-
-
def _WaitForAshChromeToStart(tmp_xdg_dir, lacros_mojo_socket_file,
enable_mojo_crosapi, ash_ready_file):
"""Waits for Ash-Chrome to be up and running and returns a boolean indicator.
@@ -490,8 +502,10 @@ lacros_version_skew_tests_v92.0.4515.130/test_ash_chrome
'--user-data-dir=%s' % tmp_ash_data_dir_name,
'--enable-wayland-server',
'--no-startup-window',
+ '--disable-input-event-activation-protection',
'--disable-lacros-keep-alive',
'--disable-login-lacros-opening',
+ '--enable-field-trial-config',
'--enable-features=LacrosSupport,LacrosPrimary,LacrosOnly',
'--ash-ready-file-path=%s' % ash_ready_file,
'--wayland-server-socket=%s' % ash_wayland_socket_name,
@@ -595,6 +609,7 @@ lacros_version_skew_tests_v92.0.4515.130/test_ash_chrome
if enable_mojo_crosapi:
forward_args.append(lacros_mojo_socket_arg)
+ forward_args.append('--ash-chrome-path=' + ash_chrome_file)
test_env = os.environ.copy()
test_env['WAYLAND_DISPLAY'] = ash_wayland_socket_name
test_env['EGL_PLATFORM'] = 'surfaceless'
diff --git a/build/lacros/test_runner_test.py b/build/lacros/test_runner_test.py
index bfb59b3b2..77f7325f2 100755
--- a/build/lacros/test_runner_test.py
+++ b/build/lacros/test_runner_test.py
@@ -103,8 +103,10 @@ class TestRunnerTest(unittest.TestCase):
'--user-data-dir=/tmp/ash-data',
'--enable-wayland-server',
'--no-startup-window',
+ '--disable-input-event-activation-protection',
'--disable-lacros-keep-alive',
'--disable-login-lacros-opening',
+ '--enable-field-trial-config',
'--enable-features=LacrosSupport,LacrosPrimary,LacrosOnly',
'--ash-ready-file-path=/tmp/ash-data/ash_ready.txt',
'--wayland-server-socket=wayland-exo',
@@ -124,6 +126,7 @@ class TestRunnerTest(unittest.TestCase):
command,
'--test-launcher-filter-file=/a/b/filter',
'--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock',
+ '--ash-chrome-path=' + ash_chrome_args[0],
], test_args)
else:
self.assertListEqual(test_args[:len(command_parts)], command_parts)
diff --git a/build/landmines.py b/build/landmines.py
index 2078b2472..844ee3808 100755
--- a/build/landmines.py
+++ b/build/landmines.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/locale_tool.py b/build/locale_tool.py
index 8eda9cd7e..c9fd395b4 100755
--- a/build/locale_tool.py
+++ b/build/locale_tool.py
@@ -28,7 +28,6 @@ inside its <outputs> section that breaks the script. The check will fail, and
trying to fix it too, but at least the file will not be modified.
"""
-from __future__ import print_function
import argparse
import json
diff --git a/build/mac/find_sdk.py b/build/mac/find_sdk.py
index 50c1e3c1d..3dcc4d5d3 100755
--- a/build/mac/find_sdk.py
+++ b/build/mac/find_sdk.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -21,7 +21,6 @@ Sample Output:
10.14
"""
-from __future__ import print_function
import os
import plistlib
diff --git a/build/mac/should_use_hermetic_xcode.py b/build/mac/should_use_hermetic_xcode.py
index 9ef0c2918..e4cea4a5a 100755
--- a/build/mac/should_use_hermetic_xcode.py
+++ b/build/mac/should_use_hermetic_xcode.py
@@ -15,7 +15,6 @@ Usage:
python should_use_hermetic_xcode.py <target_os>
"""
-from __future__ import print_function
import argparse
import os
diff --git a/build/nocompile.gni b/build/nocompile.gni
index 093cf66eb..942ad9ece 100644
--- a/build/nocompile.gni
+++ b/build/nocompile.gni
@@ -63,6 +63,10 @@ import("//build/config/python.gni")
import("//build/toolchain/toolchain.gni")
import("//testing/test.gni")
+if (is_mac) {
+ import("//build/config/mac/mac_sdk.gni")
+}
+
declare_args() {
# TODO(crbug.com/105388): make sure no-compile test is not flaky.
enable_nocompile_tests = (is_linux || is_chromeos || is_apple) && is_clang &&
@@ -110,8 +114,11 @@ if (enable_nocompile_tests) {
"-Wno-implicit-int-float-conversion",
]
- if (is_apple && host_os != "mac") {
- args += [ "--target=x86_64-apple-macos" ]
+ if (is_mac && host_os != "mac") {
+ args += [
+ "--target=x86_64-apple-macos",
+ "-mmacos-version-min=$mac_deployment_target",
+ ]
}
# Iterate over any extra include dirs and append them to the command line.
diff --git a/build/private_code_test/BUILD.gn b/build/private_code_test/BUILD.gn
new file mode 100644
index 000000000..8fcdd5407
--- /dev/null
+++ b/build/private_code_test/BUILD.gn
@@ -0,0 +1,47 @@
+# Copyright 2023 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/python.gni")
+import("//build_overrides/build.gni")
+import("private_code_test.gni")
+
+action("private_paths") {
+ script = "list_gclient_deps.py"
+ outputs = [ "$target_gen_dir/private_paths.txt" ]
+ args = [
+ "--source-filter",
+ "chrome-internal",
+ "--output",
+ rebase_path(outputs[0], root_build_dir),
+ ]
+ inputs = [ "//../.gclient_entries" ]
+}
+
+# --collect-inputs-only requires a source_set dep or !is_component_build.
+if (!is_component_build) {
+ action("private_code_test_gclient_deps") {
+ script = "list_gclient_deps.py"
+ outputs = [ "$target_gen_dir/test_private_paths.txt" ]
+ args = [
+ "--source-filter",
+ "v8.git",
+ "--output",
+ rebase_path(outputs[0], root_build_dir),
+ ]
+ inputs = [ "//../.gclient_entries" ]
+ }
+
+ shared_library("private_code_test_inputs") {
+ deps = [ "//v8" ]
+ ldflags = [ "--collect-inputs-only" ]
+ }
+
+ # Test that ensures the checker fails when it is supposed to.
+ private_code_test("private_code_failure_test") {
+ linker_inputs_dep = ":private_code_test_inputs"
+ private_paths_dep = ":private_code_test_gclient_deps"
+ private_paths_file = "$target_gen_dir/test_private_paths.txt"
+ expect_failure = true
+ }
+}
diff --git a/build/private_code_test/README.md b/build/private_code_test/README.md
new file mode 100644
index 000000000..75329b02a
--- /dev/null
+++ b/build/private_code_test/README.md
@@ -0,0 +1,36 @@
+# Private Code Test
+
+This directory provides a mechanism for testing that native does not link in
+object files from unwanted directories. The test finds all linker inputs, and
+checks that none live inside a list of internal paths.
+
+Original bug: https://bugs.chromium.org/p/chromium/issues/detail?id=1266989
+
+## Determining Internal Directories
+
+This is done by parsing the `.gclient_entries` file for all paths coming from
+https://chrome-internal.googlesource.com. I chose this approach since it is
+simple.
+
+The main alternative I found was to use `gclient flatten`. Example output:
+
+```
+ # src -> src-internal
+ "src-internal": {
+ "url": "https://chrome-internal.googlesource.com/chrome/src-internal.git@c649c6a155fe65c3730e2d663d7d2058d33bf1f9",
+ "condition": 'checkout_src_internal',
+ },
+```
+
+* Paths could be found in this way by looking for `checkout_src_internal`
+ within `condition`, and by looking for the comment line for `recurse_deps`
+ that went through an internal repo.
+
+## Determining Linker Inputs
+
+This is done by performing a custom link step with a linker that just records
+inputs. This seemed like the simplest approach.
+
+Two alternatives:
+1) Dump paths found in debug information.
+2) Scan a linker map file for input paths.
diff --git a/build/private_code_test/list_gclient_deps.py b/build/private_code_test/list_gclient_deps.py
new file mode 100755
index 000000000..6a34fc4ab
--- /dev/null
+++ b/build/private_code_test/list_gclient_deps.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import pathlib
+import sys
+
+_REPO_ROOT = pathlib.Path(__file__).resolve().parents[3]
+_ENTRIES_FILE = _REPO_ROOT / '.gclient_entries'
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--source-filter', required=True)
+ parser.add_argument('--output', required=True)
+ args = parser.parse_args()
+
+ source_filter = args.source_filter
+
+ # Ninja validates that the file exists since it's marked as an input.
+ try:
+ text = _ENTRIES_FILE.read_text()
+ result = {}
+ exec(text, result)
+ entries = result['entries']
+ private_dirs = sorted(d for d, s in entries.items()
+ if s and source_filter in s)
+ except Exception as e:
+ # Make the test fail rather than the compile step so that failures here do
+ # not prevent other bot functionality.
+ private_dirs = [
+ '# ERROR parsing .gclient_entries',
+ str(e), '', 'File was:', text
+ ]
+
+ pathlib.Path(args.output).write_text('\n'.join(private_dirs) + '\n')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/private_code_test/private_code_test.gni b/build/private_code_test/private_code_test.gni
new file mode 100644
index 000000000..6ce82f032
--- /dev/null
+++ b/build/private_code_test/private_code_test.gni
@@ -0,0 +1,63 @@
+# Copyright 2023 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//testing/test.gni")
+
+template("private_code_test") {
+ isolated_script_test(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "data_deps",
+ ])
+ script = "//build/private_code_test/private_code_test.py"
+ _linker_inputs_dep = invoker.linker_inputs_dep
+ if (shlib_prefix != "") {
+ _so_name = shlib_prefix + get_label_info(_linker_inputs_dep, "name")
+ _so_name = string_replace(_so_name,
+ "${shlib_prefix}${shlib_prefix}",
+ shlib_prefix)
+ }
+ _dir = get_label_info(_linker_inputs_dep, "root_out_dir")
+ if (is_android) {
+ _dir += "/lib.unstripped"
+ }
+ _linker_inputs_file = "$_dir/${_so_name}$shlib_extension"
+ if (defined(invoker.private_paths_dep)) {
+ _private_paths_dep = invoker.private_paths_dep
+ _private_paths_file = invoker.private_paths_file
+ } else {
+ _private_paths_dep =
+ "//build/private_code_test:private_paths($default_toolchain)"
+ _private_paths_file =
+ get_label_info(_private_paths_dep, "target_gen_dir") +
+ "/private_paths.txt"
+ }
+
+ data_deps = [
+ _linker_inputs_dep,
+ _private_paths_dep,
+ ]
+ args = [
+ "--linker-inputs",
+ "@WrappedPath(" + rebase_path(_linker_inputs_file, root_build_dir) + ")",
+ "--private-paths-file",
+ "@WrappedPath(" + rebase_path(_private_paths_file, root_build_dir) + ")",
+ "--root-out-dir",
+ rebase_path(get_label_info(_linker_inputs_dep, "root_out_dir"),
+ root_build_dir),
+ ]
+ if (defined(invoker.allowed_violations)) {
+ foreach(_glob, invoker.allowed_violations) {
+ args += [
+ "--allow-violation",
+ _glob,
+ ]
+ }
+ }
+ if (defined(invoker.expect_failure) && invoker.expect_failure) {
+ args += [ "--expect-failure" ]
+ }
+ }
+}
diff --git a/build/private_code_test/private_code_test.py b/build/private_code_test/private_code_test.py
new file mode 100755
index 000000000..a164741a4
--- /dev/null
+++ b/build/private_code_test/private_code_test.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests that no linker inputs are from private paths."""
+
+import argparse
+import fnmatch
+import os
+import pathlib
+import sys
+
+_DIR_SRC_ROOT = pathlib.Path(__file__).resolve().parents[2]
+
+
+def _print_paths(paths, limit):
+ for path in paths[:limit]:
+ print(path)
+ if len(paths) > limit:
+ print(f'... and {len(paths) - limit} more.')
+ print()
+
+
+def _apply_allowlist(found, globs):
+ ignored_paths = []
+ new_found = []
+ for path in found:
+ for pattern in globs:
+ if fnmatch.fnmatch(path, pattern):
+ ignored_paths.append(path)
+ break
+ else:
+ new_found.append(path)
+ return new_found, ignored_paths
+
+
+def _find_private_paths(linker_inputs, private_paths, root_out_dir):
+ seen = set()
+ found = []
+ for linker_input in linker_inputs:
+ dirname = os.path.dirname(linker_input)
+ if dirname in seen:
+ continue
+
+ to_check = dirname
+ # Strip ../ prefix.
+ if to_check.startswith('..'):
+ to_check = os.path.relpath(to_check, _DIR_SRC_ROOT)
+ else:
+ if root_out_dir:
+ # Strip secondary toolchain subdir
+ to_check = to_check[len(root_out_dir) + 1:]
+ # Strip top-level dir (e.g. "obj", "gen").
+ parts = to_check.split(os.path.sep, 1)
+ if len(parts) == 1:
+ continue
+ to_check = parts[1]
+
+ if any(to_check.startswith(p) for p in private_paths):
+ found.append(linker_input)
+ else:
+ seen.add(dirname)
+ return found
+
+
+def _read_private_paths(path):
+ text = pathlib.Path(path).read_text()
+
+ # Check if .gclient_entries was not valid. https://crbug.com/1427829
+ if text.startswith('# ERROR: '):
+ sys.stderr.write(text)
+ sys.exit(1)
+
+ # Remove src/ prefix from paths.
+ # We care only about paths within src/ since GN cannot reference files
+ # outside of // (and what would the obj/ path for them look like?).
+ ret = [p[4:] for p in text.splitlines() if p.startswith('src/')]
+ if not ret:
+ sys.stderr.write(f'No src/ paths found in {args.private_paths_file}\n')
+ sys.stderr.write(f'This test should not be run on public bots.\n')
+ sys.stderr.write(f'File contents:\n')
+ sys.stderr.write(text)
+ sys.exit(1)
+
+ return ret
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--linker-inputs',
+ required=True,
+ help='Path to file containing one linker input per line, '
+ 'relative to --root-out-dir')
+ parser.add_argument('--private-paths-file',
+ required=True,
+ help='Path to file containing list of paths that are '
+ 'considered private, relative gclient root.')
+ parser.add_argument('--root-out-dir',
+ required=True,
+ help='See --linker-inputs.')
+ parser.add_argument('--allow-violation',
+ action='append',
+ help='globs of private paths to allow.')
+ parser.add_argument('--expect-failure',
+ action='store_true',
+ help='Invert exit code.')
+ args = parser.parse_args()
+
+ private_paths = _read_private_paths(args.private_paths_file)
+ linker_inputs = pathlib.Path(args.linker_inputs).read_text().splitlines()
+
+ root_out_dir = args.root_out_dir
+ if root_out_dir == '.':
+ root_out_dir = ''
+
+ found = _find_private_paths(linker_inputs, private_paths, root_out_dir)
+
+ if args.allow_violation:
+ found, ignored_paths = _apply_allowlist(found, args.allow_violation)
+ if ignored_paths:
+ print('Ignoring {len(ignored_paths)} allowlisted private paths:')
+ _print_paths(sorted(ignored_paths), 10)
+
+ if found:
+ limit = 10 if args.expect_failure else 1000
+ print(f'Found {len(found)} private paths being linked into public code:')
+ _print_paths(found, limit)
+ elif args.expect_failure:
+ print('Expected to find a private path, but none were found.')
+
+ sys.exit(0 if bool(found) == args.expect_failure else 1)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/protoc_java.py b/build/protoc_java.py
index 3c0b313fa..8f25e3a5e 100755
--- a/build/protoc_java.py
+++ b/build/protoc_java.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -15,7 +15,6 @@ It performs the following steps:
4. Creates a new stamp file.
"""
-from __future__ import print_function
import argparse
import os
@@ -23,6 +22,9 @@ import shutil
import subprocess
import sys
+import action_helpers
+import zip_helpers
+
sys.path.append(os.path.join(os.path.dirname(__file__), 'android', 'gyp'))
from util import build_utils
@@ -42,7 +44,7 @@ def _EnforceJavaPackage(proto_srcs):
def main(argv):
parser = argparse.ArgumentParser()
- build_utils.AddDepfileOption(parser)
+ action_helpers.add_depfile_arg(parser)
parser.add_argument('--protoc', required=True, help='Path to protoc binary.')
parser.add_argument('--plugin', help='Path to plugin executable')
parser.add_argument('--proto-path',
@@ -92,12 +94,13 @@ def main(argv):
build_utils.DeleteDirectory(options.java_out_dir)
shutil.copytree(temp_dir, options.java_out_dir)
else:
- build_utils.ZipDir(options.srcjar, temp_dir)
+ with action_helpers.atomic_output(options.srcjar) as f:
+ zip_helpers.zip_directory(f, temp_dir)
if options.depfile:
assert options.srcjar
deps = options.protos + [options.protoc]
- build_utils.WriteDepfile(options.depfile, options.srcjar, deps)
+ action_helpers.write_depfile(options.depfile, options.srcjar, deps)
if options.stamp:
build_utils.Touch(options.stamp)
diff --git a/build/protoc_java.pydeps b/build/protoc_java.pydeps
index c3ed2be81..467907f86 100644
--- a/build/protoc_java.pydeps
+++ b/build/protoc_java.pydeps
@@ -1,6 +1,8 @@
# Generated by running:
# build/print_python_deps.py --root build --output build/protoc_java.pydeps build/protoc_java.py
+action_helpers.py
android/gyp/util/__init__.py
android/gyp/util/build_utils.py
gn_helpers.py
protoc_java.py
+zip_helpers.py
diff --git a/build/redirect_stdout.py b/build/redirect_stdout.py
index eb7d36350..16494fa6c 100644
--- a/build/redirect_stdout.py
+++ b/build/redirect_stdout.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import os
import subprocess
diff --git a/build/rm.py b/build/rm.py
index 5f4aea106..11e8a6439 100755
--- a/build/rm.py
+++ b/build/rm.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2016 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,7 +8,6 @@
This module works much like the rm posix command.
"""
-from __future__ import print_function
import argparse
import os
diff --git a/build/rust/BUILD.gn b/build/rust/BUILD.gn
index 29f8a1aea..01831d158 100644
--- a/build/rust/BUILD.gn
+++ b/build/rust/BUILD.gn
@@ -5,11 +5,6 @@
import("//build/buildflag_header.gni")
import("//build/config/rust.gni")
-buildflag_header("rust_buildflags") {
- header = "rust_buildflags.h"
- flags = [ "TOOLCHAIN_HAS_RUST=$toolchain_has_rust" ]
-}
-
if (toolchain_has_rust) {
config("edition_2021") {
rustflags = [ "--edition=2021" ]
@@ -46,10 +41,13 @@ if (toolchain_has_rust) {
# on any first-party Rust target. But in this case, it's conceivable
# that pure-C++ targets will not depend on any 1p Rust code so we'll add
# the Rust stdlib explicitly.
- deps = [
- ":cxx_rustdeps",
- "//build/rust/std",
- ]
+ deps = [ ":cxx_rustdeps" ]
+
+ if (use_local_std_by_default) {
+ deps += [ "//build/rust/std:link_local_std" ]
+ } else {
+ deps += [ "//build/rust/std:link_prebuilt_std" ]
+ }
}
# The required dependencies for cxx-generated bindings, that must be included
@@ -57,13 +55,28 @@ if (toolchain_has_rust) {
group("cxx_rustdeps") {
public_deps = [ "//third_party/rust/cxx/v1:lib" ]
}
+}
- # Enables code behind #[cfg(test)]. This should only be used for targets where
- # testonly=true.
- config("test") {
- rustflags = [
- "--cfg",
- "test",
- ]
- }
+# Enables code behind #[cfg(test)]. This should only be used for targets where
+# testonly=true.
+config("test") {
+ rustflags = [
+ "--cfg",
+ "test",
+ ]
+}
+
+# TODO(crbug.com/gn/104): GN rust_proc_macro targets are missing this
+# command line flag, for the proc_macro crate which is provided by rustc for
+# compiling proc-macros.
+config("proc_macro_extern") {
+ rustflags = [
+ "--extern",
+ "proc_macro",
+ ]
+}
+
+# Forbids unsafe code in crates with this config.
+config("forbid_unsafe") {
+ rustflags = [ "-Funsafe_code" ]
}
diff --git a/build/rust/analyze.gni b/build/rust/analyze.gni
new file mode 100644
index 000000000..36c06112d
--- /dev/null
+++ b/build/rust/analyze.gni
@@ -0,0 +1,79 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/compute_inputs_for_analyze.gni")
+import("//build/config/rust.gni")
+
+if (compute_inputs_for_analyze) {
+ template("analyze_rust") {
+ _target_name = target_name
+ assert(defined(invoker.crate_root))
+
+ action("${_target_name}_collect_sources") {
+ forward_variables_from(invoker,
+ "*",
+ TESTONLY_AND_VISIBILITY + [
+ "inputs",
+ "script",
+ "sources",
+ "depfile",
+ "outputs",
+ "args",
+ ])
+ forward_variables_from(invoker, [ "testonly" ])
+
+ script = "//build/rust/collect_rust_sources.py"
+ depfile = "${target_gen_dir}/${target_name}.verify.d"
+ outputs = [ depfile ]
+
+ args = [
+ "--generate-depfile",
+ "${rust_sysroot}/bin/rustc",
+ rebase_path(crate_root, root_build_dir),
+ rebase_path(depfile, root_build_dir),
+ "{{rustflags}}",
+ ]
+ }
+
+ action(_target_name) {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ # Constructs a depfile of all rust sources in the crate.
+ deps = [ ":${_target_name}_collect_sources" ]
+
+ # This target is reached once during `gn gen` and then again during
+ # `gn analyze`.
+ #
+ # 1. When doing `gn gen`, the ':${_target_name}_collect_sources'
+ # target generates a depfile containing all the rust sources of
+ # the crate. The exec_script() below runs first, and it produces an
+ # empty result.
+ # 2. When doing `gn analyze`, the exec_script() reads the depfile that
+ # was written during `gn gen` and puts each Rust file in the crate
+ # into `inputs`.
+ depfile_path = []
+ foreach(d, get_target_outputs(":${_target_name}_collect_sources")) {
+ depfile_path += [ rebase_path(d, root_build_dir) ]
+ }
+
+ # Here we read the depfile from `gn gen` when doing `gn analyze`, and
+ # add all the rust files in the crate to `inputs`. This ensures that
+ # analyze considers them as affecting tests that depend on the crate.
+ rust_srcs = exec_script("//build/rust/collect_rust_sources.py",
+ [ "--read-depfile" ] + depfile_path,
+ "list lines")
+ inputs = []
+ foreach(s, rust_srcs) {
+ inputs += [ rebase_path(s, "//", root_build_dir) ]
+ }
+ script = "//build/rust/collect_rust_sources.py"
+ args = [
+ "--stamp",
+ rebase_path("${target_gen_dir}/${target_name}.verify.stamp",
+ root_build_dir),
+ ]
+ outputs = [ "${target_gen_dir}/${target_name}.verify.stamp" ]
+ }
+ }
+}
diff --git a/build/rust/cargo_crate.gni b/build/rust/cargo_crate.gni
index c42059826..9793de7c0 100644
--- a/build/rust/cargo_crate.gni
+++ b/build/rust/cargo_crate.gni
@@ -24,6 +24,7 @@ import("//build/rust/rust_target.gni")
# crate_root
# epoch
# deps
+# aliased_deps
# features
# build_native_rust_unit_tests
# edition
@@ -75,9 +76,9 @@ import("//build/rust/rust_target.gni")
# command line tool help)
template("cargo_crate") {
- orig_target_name = target_name
+ _orig_target_name = target_name
- _crate_name = orig_target_name
+ _crate_name = _orig_target_name
if (defined(invoker.crate_name)) {
_crate_name = invoker.crate_name
}
@@ -98,32 +99,16 @@ template("cargo_crate") {
_rustenv = invoker.rustenv
}
if (defined(invoker.cargo_pkg_authors)) {
- _rustenv += [ string_join("=",
- [
- "CARGO_PKG_AUTHORS",
- invoker.cargo_pkg_authors,
- ]) ]
+ _rustenv += [ "CARGO_PKG_AUTHORS=${invoker.cargo_pkg_authors}" ]
}
if (defined(invoker.cargo_pkg_version)) {
- _rustenv += [ string_join("=",
- [
- "CARGO_PKG_VERSION",
- invoker.cargo_pkg_version,
- ]) ]
+ _rustenv += [ "CARGO_PKG_VERSION=${invoker.cargo_pkg_version}" ]
}
if (defined(invoker.cargo_pkg_name)) {
- _rustenv += [ string_join("=",
- [
- "CARGO_PKG_NAME",
- invoker.cargo_pkg_name,
- ]) ]
+ _rustenv += [ "CARGO_PKG_NAME=${invoker.cargo_pkg_name}" ]
}
if (defined(invoker.cargo_pkg_description)) {
- _rustenv += [ string_join("=",
- [
- "CARGO_PKG_DESCRIPTION",
- invoker.cargo_pkg_description,
- ]) ]
+ _rustenv += [ "CARGO_PKG_DESCRIPTION=${invoker.cargo_pkg_description}" ]
}
# The main target, either a Rust source set or an executable.
@@ -136,7 +121,6 @@ template("cargo_crate") {
"build_sources",
"build_script_inputs",
"build_script_outputs",
- "output_dir",
"unit_test_target",
"target_type",
"configs",
@@ -149,6 +133,11 @@ template("cargo_crate") {
if (defined(invoker.crate_type)) {
crate_type = invoker.crate_type
}
+
+ # TODO(crbug.com/1422745): don't default to true. This requires changes to
+ # third_party.toml and gnrt when generating third-party build targets.
+ allow_unsafe = true
+
if (!defined(rustflags)) {
rustflags = []
}
@@ -167,29 +156,32 @@ template("cargo_crate") {
build_native_rust_unit_tests = true
}
- # We may generate multiple build rules for the same Cargo crate, as they may
- # have multiple build configurations: for use from deps, build-deps or
- # dev-deps. But that would try to build multiple crates with the same name,
- # colliding on the libcratename.rlib outputs. So we specify an output_dir
- # for Cargo crates which includes the GN target name to disambiguate them.
- output_dir = "${target_out_dir}/${orig_target_name}"
-
# The unit tests for each target, if generated, should be unique as well.
# a) It needs to be unique even if multiple build targets have the same
# `crate_name`, but different target names.
# b) It needs to be unique even if multiple build targets have the same
# `crate_name` and target name, but different epochs.
_unit_test_unique_target_name = ""
- if (_crate_name != orig_target_name) {
- _unit_test_unique_target_name = "${orig_target_name}_"
+ if (_crate_name != _orig_target_name) {
+ _unit_test_unique_target_name = "${_orig_target_name}_"
}
_unit_test_unique_epoch = ""
if (defined(invoker.epoch)) {
_epoch_str = string_replace(invoker.epoch, ".", "_")
_unit_test_unique_epoch = "v${_epoch_str}_"
}
+ if (defined(output_dir) && output_dir != "") {
+ unit_test_output_dir = output_dir
+ }
unit_test_target = "${_unit_test_unique_target_name}${_crate_name}_${_unit_test_unique_epoch}unittests"
+ if ((!defined(output_dir) || output_dir == "") && crate_type == "rlib") {
+ # Cargo crate rlibs can be compiled differently for tests, and must not
+ # collide with the production outputs. This does *not* override the
+ # unit_test_output_dir, which is set above, as that target is not an rlib.
+ output_dir = "$target_out_dir/$_orig_target_name"
+ }
+
if (defined(invoker.build_root)) {
# Uh-oh, we have a build script
if (!defined(deps)) {
@@ -202,11 +194,14 @@ template("cargo_crate") {
test_deps = invoker.dev_deps
}
- # Re-compute the directory the build script used as its `--out-dir`. This
- # logic needs to match that in `action("${build_script_name}_output")`.
+ # Another copy of the `_build_script_env_out_dir` from the
+ # `${build_script_name}_output` target, which is where the OUT_DIR will
+ # point when running the build script exe. See the other definition of
+ # `_build_script_target_out_dir` for how this is constructed.
_build_script_target_out_dir =
get_label_info(":${build_script_name}_output", "target_out_dir")
- _build_script_out_dir = "$_build_script_target_out_dir/$orig_target_name"
+ _build_script_env_out_dir =
+ "$_build_script_target_out_dir/$_orig_target_name"
# This... is a bit weird. We generate a file called cargo_flags.rs which
# does not actually contain Rust code, but instead some flags to add
@@ -214,7 +209,7 @@ template("cargo_crate") {
# we can include it in the 'sources' line and thus have dependency
# calculation done correctly. data_deps won't work because targets don't
# require them to be present until runtime.
- flags_file = "$_build_script_out_dir/cargo_flags.rs"
+ flags_file = "$_build_script_env_out_dir/cargo_flags.rs"
rustflags += [ "@" + rebase_path(flags_file, root_build_dir) ]
sources += [ flags_file ]
if (defined(invoker.build_script_outputs)) {
@@ -224,17 +219,15 @@ template("cargo_crate") {
# about the dependency either way.
foreach(extra_source,
filter_include(invoker.build_script_outputs, [ "*.rs" ])) {
- sources += [ "$_build_script_out_dir/$extra_source" ]
+ sources += [ "$_build_script_env_out_dir/$extra_source" ]
}
inputs = []
foreach(extra_source,
filter_exclude(invoker.build_script_outputs, [ "*.rs" ])) {
- inputs += [ "$_build_script_out_dir/$extra_source" ]
+ inputs += [ "$_build_script_env_out_dir/$extra_source" ]
}
}
deps += [ ":${build_script_name}_output" ]
- } else {
- not_needed([ "orig_target_name" ])
}
}
@@ -242,30 +235,39 @@ template("cargo_crate") {
# Extra targets required to make build script work
action("${build_script_name}_output") {
script = rebase_path("//build/rust/run_build_script.py")
- build_script_target = ":${build_script_name}($host_toolchain)"
+ build_script_target =
+ ":${build_script_name}($host_toolchain_no_sanitizers)"
deps = [ build_script_target ]
- _build_script_exe_dir =
- get_label_info(build_script_target, "root_out_dir")
- build_script = "$_build_script_exe_dir/${build_script_name}"
+ # The build script output is always in the name-specific output dir. It
+ # may be built with a different toolchain when cross-compiling (the host
+ # toolchain) so we must find the path relative to that.
+ _build_script_target_out_dir =
+ get_label_info(build_script_target, "target_out_dir")
+ _build_script_exe =
+ "$_build_script_target_out_dir/$_orig_target_name/$build_script_name"
if (is_win) {
- build_script = "${build_script}.exe"
+ _build_script_exe = "${_build_script_exe}.exe"
}
- _build_script_out_dir = "$target_out_dir/$orig_target_name"
- flags_file = "$_build_script_out_dir/cargo_flags.rs"
+ # Unlike the location of _build_script_exe, the OUT_DIR when running the
+ # exe will always be in the target toolchain's output directory, under a
+ # unique directory for the GN target name to avoid collisions from a
+ # single BUILD.gn with multiple build scripts.
+ _build_script_env_out_dir = "$target_out_dir/$_orig_target_name"
+ _flags_file = "$_build_script_env_out_dir/cargo_flags.rs"
- inputs = [ build_script ]
- outputs = [ flags_file ]
+ inputs = [ _build_script_exe ]
+ outputs = [ _flags_file ]
args = [
"--build-script",
- rebase_path(build_script, root_build_dir),
+ rebase_path(_build_script_exe, root_build_dir),
"--output",
- rebase_path(flags_file, root_build_dir),
+ rebase_path(_flags_file, root_build_dir),
"--rust-prefix",
rebase_path("${rust_sysroot}/bin"),
"--out-dir",
- rebase_path(_build_script_out_dir, root_build_dir),
+ rebase_path(_build_script_env_out_dir, root_build_dir),
"--src-dir",
rebase_path(get_path_info(invoker.build_root, "dir"), root_build_dir),
]
@@ -283,7 +285,7 @@ template("cargo_crate") {
args += [ "--generated-files" ]
args += invoker.build_script_outputs
foreach(generated_file, invoker.build_script_outputs) {
- outputs += [ "$_build_script_out_dir/$generated_file" ]
+ outputs += [ "$_build_script_env_out_dir/$generated_file" ]
}
}
if (_rustenv != []) {
@@ -295,7 +297,7 @@ template("cargo_crate") {
}
}
- if (current_toolchain == host_toolchain) {
+ if (current_toolchain == host_toolchain_no_sanitizers) {
rust_target(build_script_name) {
target_type = "executable"
sources = invoker.build_sources
@@ -303,6 +305,11 @@ template("cargo_crate") {
if (defined(invoker.build_deps)) {
deps = invoker.build_deps
}
+
+ # An rlib's build script may be built differently for tests and for
+ # production, so they must be in a name specific to the GN target. The
+ # ${build_script_name}_output target looks for the exe in this location.
+ output_dir = "$target_out_dir/$_orig_target_name"
rustenv = _rustenv
forward_variables_from(invoker,
[
@@ -323,6 +330,11 @@ template("cargo_crate") {
"build_script_outputs",
])
}
+ } else {
+ not_needed([
+ "_name_specific_output_dir",
+ "_orig_target_name",
+ ])
}
}
diff --git a/build/rust/clanglibs/BUILD.gn b/build/rust/clanglibs/BUILD.gn
index eb30c9890..79b6aaaca 100644
--- a/build/rust/clanglibs/BUILD.gn
+++ b/build/rust/clanglibs/BUILD.gn
@@ -9,7 +9,7 @@
import("//build/rust/rust_static_library.gni")
-_clang_libs_dir = "//third_party/llvm-build/Release+Asserts/lib"
+_clang_libs_dir = "//third_party/rust-toolchain/lib/llvmlib/lib"
if (toolchain_has_rust) {
# This requires you to configure your .gclient file with
@@ -17,6 +17,10 @@ if (toolchain_has_rust) {
# "checkout_clang_libs": True,
# }
rust_static_library("clanglibs") {
+ # Clang libs are used to build developer tools that run on a developer
+ # machine.
+ assert(!is_android)
+
crate_root = get_label_info(":find_clanglibs", "target_gen_dir") + "/lib.rs"
sources = [ crate_root ]
deps = [ ":find_clanglibs" ]
diff --git a/build/rust/clanglibs/find_clanglibs.py b/build/rust/clanglibs/find_clanglibs.py
index cb2034efe..8373b2c6b 100755
--- a/build/rust/clanglibs/find_clanglibs.py
+++ b/build/rust/clanglibs/find_clanglibs.py
@@ -48,9 +48,13 @@ def main():
basename = m.group(1)
output.write("#[link(name=\"{}\")]\n".format(basename))
full_path = os.path.join(args.clang_libs_dir, f)
- depfile.write(" {}\n".format(full_path))
- output.write("#[link(name=\"stdc++\")]\n")
- output.write("#[link(name=\"z\")]\n")
+ depfile.write(" {} \\\n".format(full_path))
+ if sys.platform.startswith('linux'):
+ output.write("#[link(name=\"stdc++\")]\n")
+ else:
+ output.write("#[link(name=\"c++\")]\n")
+ if sys.platform.startswith('linux') or sys.platform == 'darwin':
+ output.write("#[link(name=\"z\")]\n")
output.write("extern {}\n")
diff --git a/build/rust/collect_rust_sources.py b/build/rust/collect_rust_sources.py
new file mode 100755
index 000000000..48f2f1f52
--- /dev/null
+++ b/build/rust/collect_rust_sources.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+'''Is used to find all rust files in a crate, and write the result to a
+depfile. Then, used again to read the same depfile and pull out just the
+source files. Lastly, it is also used to write a stamp file at the same
+location as the depfile.'''
+
+import argparse
+import re
+import subprocess
+import sys
+
+FILE_REGEX = re.compile('^(.*):')
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Collect Rust sources for a crate')
+ parser.add_argument('--stamp',
+ action='store_true',
+ help='Generate a stamp file')
+ parser.add_argument('--generate-depfile',
+ action='store_true',
+ help='Generate a depfile')
+ parser.add_argument('--read-depfile',
+ action='store_true',
+ help='Read the previously generated depfile')
+ args, rest = parser.parse_known_args()
+
+ if (args.stamp):
+ stampfile = rest[0]
+ with open(stampfile, "w") as f:
+ f.write("stamp")
+ elif (args.generate_depfile):
+ rustc = rest[0]
+ crate_root = rest[1]
+ depfile = rest[2]
+ rustflags = rest[3:]
+
+ rustc_args = [
+ "--emit=dep-info=" + depfile, "-Zdep-info-omit-d-target", crate_root
+ ]
+ subprocess.check_call([rustc] + rustc_args + rustflags)
+ elif (args.read_depfile):
+ depfile = rest[0]
+ try:
+ with open(depfile, "r") as f:
+ files = [FILE_REGEX.match(l) for l in f.readlines()]
+ for f in files:
+ if f:
+ print(f.group(1))
+ except:
+ pass
+ else:
+ print("ERROR: Unknown action")
+ parser.print_help()
+ return 1
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/build/rust/run_bindgen.py b/build/rust/run_bindgen.py
index da84f4bd0..8f80a3c8d 100755
--- a/build/rust/run_bindgen.py
+++ b/build/rust/run_bindgen.py
@@ -9,18 +9,18 @@ import os
import subprocess
import sys
-# Set up path to be able to import build_utils.
+# Set up path to be able to import action_helpers.
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir,
- os.pardir, 'build', 'android', 'gyp'))
-from util import build_utils
+ os.pardir, 'build'))
+import action_helpers
from filter_clang_args import filter_clang_args
def atomic_copy(in_path, out_path):
with open(in_path, 'rb') as input:
- with build_utils.AtomicOutput(out_path, only_if_changed=True) as output:
+ with action_helpers.atomic_output(out_path) as output:
content = input.read()
output.write(content)
@@ -39,7 +39,9 @@ def main():
parser.add_argument("--depfile",
help="depfile to output with header dependencies")
parser.add_argument("--output", help="output .rs bindings", required=True)
- parser.add_argument("--ld-library-path", help="LD_LIBRARY_PATH to set")
+ parser.add_argument("--ld-library-path",
+ help="LD_LIBRARY_PATH (or DYLD_LIBRARY_PATH on Mac) to "
+ "set")
parser.add_argument("-I", "--include", help="include path", action="append")
parser.add_argument(
"clangargs",
@@ -53,6 +55,9 @@ def main():
# Bindgen settings we use for Chromium
genargs.append('--no-layout-tests')
genargs.append('--size_t-is-usize')
+ # TODO(danakj): We need to point bindgen to
+ # //third_party/rust-toolchain/bin/rustfmt.
+ genargs.append('--no-rustfmt-bindings')
genargs += ['--rust-target', 'nightly']
if args.depfile:
@@ -65,7 +70,10 @@ def main():
genargs.extend(filter_clang_args(args.clangargs))
env = os.environ
if args.ld_library_path:
- env["LD_LIBRARY_PATH"] = args.ld_library_path
+ if sys.platform == 'darwin':
+ env["DYLD_LIBRARY_PATH"] = args.ld_library_path
+ else:
+ env["LD_LIBRARY_PATH"] = args.ld_library_path
returncode = subprocess.run([args.exe, *genargs], env=env).returncode
if returncode != 0:
# Make sure we don't emit anything if bindgen failed.
diff --git a/build/rust/run_build_script.py b/build/rust/run_build_script.py
index f184edcb0..0db5cb56f 100755
--- a/build/rust/run_build_script.py
+++ b/build/rust/run_build_script.py
@@ -24,21 +24,21 @@
# That's it. We don't even support the other standard cargo:rustc-
# output messages.
+import argparse
+import io
import os
+import platform
+import re
+import subprocess
import sys
+import tempfile
-# Set up path to be able to import build_utils
+# Set up path to be able to import action_helpers
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir,
- os.pardir, 'build', 'android', 'gyp'))
-from util import build_utils
+ os.pardir, 'build'))
+import action_helpers
-import argparse
-import io
-import subprocess
-import re
-import platform
-import tempfile
RUSTC_VERSION_LINE = re.compile(r"(\w+): (.*)")
@@ -142,7 +142,7 @@ def main():
# AtomicOutput will ensure we only write to the file on disk if what we
# give to write() is different than what's currently on disk.
- with build_utils.AtomicOutput(args.output) as output:
+ with action_helpers.atomic_output(args.output) as output:
output.write(flags.encode("utf-8"))
# Copy any generated code out of the temporary directory,
@@ -155,7 +155,7 @@ def main():
if not os.path.exists(out_dir):
os.makedirs(out_dir)
with open(in_path, 'rb') as input:
- with build_utils.AtomicOutput(out_path) as output:
+ with action_helpers.atomic_output(out_path) as output:
content = input.read()
output.write(content)
diff --git a/build/rust/run_rs_bindings_from_cc.py b/build/rust/run_rs_bindings_from_cc.py
index 6885f3348..0b6ed4aa8 100755
--- a/build/rust/run_rs_bindings_from_cc.py
+++ b/build/rust/run_rs_bindings_from_cc.py
@@ -10,13 +10,10 @@ import os
import subprocess
import sys
-# Set up path to be able to import build_utils.
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
CHROMIUM_SRC_DIR = os.path.relpath(os.path.join(THIS_DIR, os.pardir, os.pardir))
sys.path.append(THIS_DIR)
-sys.path.append(os.path.join(CHROMIUM_SRC_DIR, 'build', 'android', 'gyp'))
from run_bindgen import filter_clang_args
-from util import build_utils
RUST_TOOLCHAIN_DIR = os.path.join(CHROMIUM_SRC_DIR, "third_party",
"rust-toolchain")
diff --git a/build/rust/rust_bindgen.gni b/build/rust/rust_bindgen.gni
index caad79e81..bd5aec81a 100644
--- a/build/rust/rust_bindgen.gni
+++ b/build/rust/rust_bindgen.gni
@@ -7,6 +7,24 @@ import("//build/config/rust.gni")
import("//build/config/sysroot.gni")
import("//build/rust/rust_static_library.gni")
+if (is_win) {
+ import("//build/toolchain/win/win_toolchain_data.gni")
+}
+
+_rustc_base_path = rust_sysroot
+
+# TODO(danakj): When we're using the Android prebuilt toolchain, there's no
+# bindgen present. bindgen is for the host platform so using the linux one will
+# work.
+if (!use_chromium_rust_toolchain) {
+ _rustc_base_path = "//third_party/rust-toolchain"
+}
+
+_bindgen_path = "${_rustc_base_path}/bin/bindgen"
+if (is_win) {
+ _bindgen_path = "${_bindgen_path}.exe"
+}
+
# Template to build Rust/C bindings with bindgen.
#
# This template expands to a static_library containing the Rust side of the
@@ -30,60 +48,47 @@ import("//build/rust/rust_static_library.gni")
template("rust_bindgen") {
assert(defined(invoker.header),
"Must specify the C header file to make bindings for.")
- _target_name = target_name
- _testonly = false
- if (defined(invoker.testonly)) {
- _testonly = invoker.testonly
- }
- if (defined(invoker.visibility)) {
- _visibility = invoker.visibility
- }
- _deps = []
- if (defined(invoker.deps)) {
- _deps += invoker.deps
- }
-
- action(_target_name) {
- testonly = _testonly
- if (defined(_visibility)) {
- visibility = _visibility
- }
+ action(target_name) {
+ # bindgen relies on knowing the {{defines}} and {{include_dirs}} required
+ # to build the C++ headers which it's parsing. These are passed to the
+ # script's args and are populated using deps and configs.
+ forward_variables_from(invoker,
+ TESTONLY_AND_VISIBILITY + [
+ "deps",
+ "configs",
+ ])
sources = [ invoker.header ]
- # Several important compiler flags come from default_compiler_configs
- configs = default_compiler_configs
- if (defined(invoker.configs)) {
- configs += invoker.configs
+ if (!defined(configs)) {
+ configs = []
}
- bindgen_target = "//third_party/rust/bindgen/v0_60:bindgen($host_toolchain)"
-
- bindgen_obj_dir = get_label_info(bindgen_target, "root_out_dir")
- bindgen_executable = "${bindgen_obj_dir}/bindgen"
- if (is_win) {
- bindgen_executable = "${bindgen_executable}.exe"
- }
+ # Several important compiler flags come from default_compiler_configs
+ configs += default_compiler_configs
output_dir = "$target_gen_dir"
out_gen_rs = "$output_dir/${target_name}.rs"
script = rebase_path("//build/rust/run_bindgen.py")
- inputs = [ bindgen_executable ]
+ inputs = [ _bindgen_path ]
depfile = "$target_out_dir/${target_name}.d"
outputs = [ out_gen_rs ]
- deps = [ bindgen_target ]
-
- # bindgen relies on knowing the {{defines}} and {{include_dirs}} required
- # to build the C++ headers which it's parsing. These are passed to the
- # script's args and are populated using deps and configs.
- deps += _deps
+ lib_path = ""
+ if (is_linux) {
+ # Linux clang, and clang libs, use a shared libstdc++, which we must
+ # point to.
+ clang_ld_path = rebase_path(clang_base_path + "/lib", root_build_dir)
+ lib_path += "${clang_ld_path}:"
+ }
+ rust_ld_path = rebase_path(_rustc_base_path + "/lib", root_build_dir)
+ lib_path += "${rust_ld_path}"
args = [
"--exe",
- rebase_path(bindgen_executable),
+ rebase_path(_bindgen_path),
"--header",
rebase_path(invoker.header, root_build_dir),
"--depfile",
@@ -91,7 +96,7 @@ template("rust_bindgen") {
"--output",
rebase_path(out_gen_rs, root_build_dir),
"--ld-library-path",
- rebase_path(clang_base_path + "/lib", root_build_dir),
+ lib_path,
]
args += [
@@ -100,22 +105,66 @@ template("rust_bindgen") {
"{{include_dirs}}",
"{{cflags}}",
"{{cflags_c}}",
-
- # This path contains important C headers (e.g. stddef.h) and {{cflags}}
- # does not include it. Normally this path is implicitly added by clang but
- # it does not happen for libclang.
- #
- # Add it last so includes from deps and configs take precedence.
- "-isystem" + rebase_path(
- clang_base_path + "/lib/clang/" + clang_version + "/include",
- root_build_dir),
-
- # Passes C comments through as rustdoc attributes.
- "-fparse-all-comments",
-
- # Default configs include "-fvisibility=hidden", and for some reason this
- # causes bindgen not to emit function bindings. Override it.
- "-fvisibility=default",
]
+
+ # Clang ships with some headers, which are installed along side the binary,
+ # and which clang itself finds by default, but libclang does not (see also
+ # https://reviews.llvm.org/D95396 which would resolve this but was reverted).
+ clang_headers = rebase_path(
+ clang_base_path + "/lib/clang/" + clang_version + "/include",
+ root_build_dir)
+ if (is_win) {
+ args += [ "-imsvc" + clang_headers ]
+ } else {
+ args += [ "-isystem" + clang_headers ]
+ }
+
+ if (is_win) {
+ # On Windows we fall back to using system headers from a sysroot from
+ # depot_tools. This is negotiated by python scripts and the result is
+ # available in //build/toolchain/win/win_toolchain_data.gni. From there
+ # we get the `include_flags_imsvc` which point to the system headers.
+ if (host_cpu == "x86") {
+ win_toolchain_data = win_toolchain_data_x86
+ } else if (host_cpu == "x64") {
+ win_toolchain_data = win_toolchain_data_x64
+ } else if (host_cpu == "arm64") {
+ win_toolchain_data = win_toolchain_data_arm64
+ } else {
+ error("Unsupported host_cpu, add it to win_toolchain_data.gni")
+ }
+ args += [ "${win_toolchain_data.include_flags_imsvc}" ]
+ }
+
+ # Passes C comments through as rustdoc attributes.
+ if (is_win) {
+ args += [ "/clang:-fparse-all-comments" ]
+ } else {
+ args += [ "-fparse-all-comments" ]
+ }
+
+ # Default configs include "-fvisibility=hidden", and for some reason this
+ # causes bindgen not to emit function bindings. Override it.
+ if (!is_win) {
+ args += [ "-fvisibility=default" ]
+ }
+
+ if (is_win) {
+ # We pass MSVC style flags to clang on Windows, and libclang needs to be
+ # told explicitly to accept them.
+ args += [ "--driver-mode=cl" ]
+
+ # On Windows, libclang adds arguments that it then fails to understand.
+ # -fno-spell-checking
+ # -fallow-editor-placeholders
+ # These should not cause bindgen to fail.
+ args += [ "-Wno-unknown-argument" ]
+
+ # Replace these two arguments with a version that clang-cl can parse.
+ args += [
+ "/clang:-fno-spell-checking",
+ "/clang:-fallow-editor-placeholders",
+ ]
+ }
}
}
diff --git a/build/rust/rust_static_library.gni b/build/rust/rust_static_library.gni
index 6abea12b4..94f2442d1 100644
--- a/build/rust/rust_static_library.gni
+++ b/build/rust/rust_static_library.gni
@@ -41,6 +41,9 @@ import("//build/rust/rust_target.gni")
# Edition of the Rust language to be used.
# Options are "2015", "2018" and "2021". Defaults to "2021".
#
+# allow_unsafe (optional)
+# Set to true to allow unsafe code in this target. Defaults to false.
+#
# configs (optional)
# A list of config labels (in the GN meaning) applying to this target.
#
@@ -61,6 +64,12 @@ import("//build/rust/rust_target.gni")
# List of GN targets on which this crate's tests depend, in addition
# to deps.
#
+# is_gtest_unittests (optional)
+# Should only be set to true for rlibs of gtest unit tests. This ensures
+# all objects in the rlib are linked into the final target, rather than
+# pruning dead code, so that the tests themselves are not discarded by the
+# linker.
+#
# mutually_dependent_target (optional)
# mutually_dependent_public_deps (optional)
# These is for use by the mixed_target() template.
@@ -142,7 +151,19 @@ import("//build/rust/rust_target.gni")
# in Fuchsia.
template("rust_static_library") {
exclude_forwards = TESTONLY_AND_VISIBILITY + [ "configs" ]
- rust_target(target_name) {
+ _target_name = target_name
+
+ # For Gtest unittests, we must avoid dropping the object files even when
+ # there's no edges into them, as the tests register themselves through
+ # static initializers. For C++ this is done by not putting them in a library
+ # at all, but Rust must be compiled into an rlib.
+ #
+ # On AIX there's no flag to avoid pruning unit tests in this library from
+ # the linking step.
+ _whole_archive = defined(invoker.is_gtest_unittests) &&
+ invoker.is_gtest_unittests && current_os != "aix"
+
+ rust_target(_target_name) {
forward_variables_from(invoker, "*", exclude_forwards)
forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
if (defined(invoker.configs)) {
@@ -150,6 +171,16 @@ template("rust_static_library") {
library_configs = invoker.configs
}
target_type = "rust_library"
+
+ if (_whole_archive && !is_win) {
+ all_dependent_configs = [ ":${_target_name}_whole_archive" ]
+ }
+ }
+
+ if (_whole_archive && !is_win) {
+ config("${_target_name}_whole_archive") {
+ ldflags = [ "-LinkWrapper,add-whole-archive=${_target_name}" ]
+ }
}
}
diff --git a/build/rust/rust_target.gni b/build/rust/rust_target.gni
index 34670781c..73b23ec92 100644
--- a/build/rust/rust_target.gni
+++ b/build/rust/rust_target.gni
@@ -3,6 +3,7 @@
# found in the LICENSE file.
import("//build/config/rust.gni")
+import("//build/rust/analyze.gni")
import("//build/rust/rust_unit_test.gni")
# The //build directory is re-used for non-Chromium products. We do not support
@@ -44,9 +45,20 @@ template("rust_target") {
}
if (defined(invoker.output_dir) && invoker.output_dir != "") {
- _out_dir = invoker.output_dir
+ # This is where the build target (.exe, .rlib, etc) goes.
+ _output_dir = invoker.output_dir
+
+ # This is where the OUT_DIR environment variable points to when running a
+ # build script and when compiling the build target, for consuming generated
+ # files.
+ _env_out_dir = invoker.output_dir
} else {
- _out_dir = target_out_dir
+ _env_out_dir = target_out_dir
+ }
+
+ _allow_unsafe = false
+ if (defined(invoker.allow_unsafe)) {
+ _allow_unsafe = invoker.allow_unsafe
}
if (defined(invoker.generate_crate_root) && invoker.generate_crate_root) {
@@ -94,6 +106,11 @@ template("rust_target") {
_visibility = invoker.visibility
}
+ _use_local_std = use_local_std_by_default
+ if (defined(invoker.use_local_std)) {
+ _use_local_std = invoker.use_local_std
+ }
+
_rustflags = []
if (defined(invoker.rustflags)) {
_rustflags += invoker.rustflags
@@ -107,15 +124,17 @@ template("rust_target") {
if (defined(invoker.edition)) {
_edition = invoker.edition
}
- _configs = [ string_join("",
- [
- "//build/rust:edition_",
- _edition,
- ]) ]
+ _configs = [ "//build/rust:edition_${_edition}" ]
+ _test_configs = []
if (invoker.target_type == "executable") {
if (defined(invoker.executable_configs)) {
_configs += invoker.executable_configs
}
+ } else if (invoker.target_type == "rust_proc_macro") {
+ if (defined(invoker.proc_macro_configs)) {
+ _configs += invoker.proc_macro_configs
+ _test_configs += [ "//build/rust:proc_macro_extern" ]
+ }
} else {
if (defined(invoker.library_configs)) {
_configs += invoker.library_configs
@@ -123,15 +142,7 @@ template("rust_target") {
}
_forward_to_host_toolchain = false
if (invoker.target_type == "rust_proc_macro") {
- # TODO(crbug.com/gn/104): GN rust_proc_macro targets are missing this
- # command line flag, for the proc_macro crate which is provided by rustc for
- # compiling proc-macros.
- _rustflags += [
- "--extern",
- "proc_macro",
- ]
-
- if (current_toolchain != host_toolchain) {
+ if (current_toolchain != host_toolchain_no_sanitizers) {
_forward_to_host_toolchain = true
}
_main_target_suffix = "${target_name}__proc_macro"
@@ -147,6 +158,12 @@ template("rust_target") {
if (defined(invoker.public_deps)) {
_public_deps += invoker.public_deps
}
+ if (defined(invoker.aliased_deps)) {
+ _aliased_deps = invoker.aliased_deps
+ } else {
+ _aliased_deps = {
+ }
+ }
_build_unit_tests = false
if (defined(invoker.build_native_rust_unit_tests)) {
@@ -165,7 +182,7 @@ template("rust_target") {
"outside the Chromium build.")
_cxx_bindings = invoker.cxx_bindings
}
- _rustenv = [ "OUT_DIR=" + rebase_path(_out_dir) ]
+ _rustenv = [ "OUT_DIR=" + rebase_path(_env_out_dir) ]
if (defined(invoker.rustenv)) {
_rustenv += invoker.rustenv
}
@@ -192,24 +209,28 @@ template("rust_target") {
if (defined(_visibility)) {
visibility = _visibility
}
- public_deps =
- [ ":${_target_name}${_main_target_suffix}($host_toolchain)" ]
+ public_deps = [
+ ":${_target_name}${_main_target_suffix}($host_toolchain_no_sanitizers)",
+ ]
}
not_needed(invoker, "*")
not_needed([
+ "_allow_unsafe",
"_build_unit_tests",
"_crate_root",
"_crate_name",
"_cxx_bindings",
"_deps",
+ "_aliased_deps",
"_metadata",
"_out_dir",
"_public_deps",
"_rustenv",
+ "_rustflags",
"_support_use_from_cpp",
- "_test_deps",
"_testonly",
+ "_use_local_std",
"_visibility",
])
} else {
@@ -231,15 +252,26 @@ template("rust_target") {
# target that depends on a rust target directly may need access to Cxx
# as well, which means it must appear in public_deps.
public_deps += [ "//build/rust:cxx_cppdeps" ]
- } else {
+
+ # cxx_cppdeps pulls in the default libstd, so make sure the default was
+ # not overridden.
+ assert(
+ _use_local_std == use_local_std_by_default,
+ "Rust targets with cxx bindings cannot override the default libstd")
+ } else if (!defined(invoker.no_std) || !invoker.no_std) {
# If C++ depends on and links in the library, we need to make sure C++
# links in the Rust stdlib. This is orthogonal to if the library exports
# bindings for C++ to use.
- deps = [ "//build/rust/std" ]
+ if (_use_local_std) {
+ deps = [ "//build/rust/std:link_local_std" ]
+ } else {
+ deps = [ "//build/rust/std:link_prebuilt_std" ]
+ }
}
}
_rust_deps = _deps
+ _rust_aliased_deps = _aliased_deps
_rust_public_deps = _public_deps
_cxx_deps = _deps
@@ -253,6 +285,16 @@ template("rust_target") {
_rust_public_deps += [ ":${_target_name}_cxx_generated" ]
}
+ if (!defined(invoker.no_std) || !invoker.no_std) {
+ if (_use_local_std) {
+ _rust_deps += [ "//build/rust/std:local_std_for_rustc" ]
+ } else {
+ _rust_deps += [ "//build/rust/std:prebuilt_std_for_rustc" ]
+ }
+ } else {
+ not_needed([ "_use_local_std" ])
+ }
+
# You must go through the groups above to get to these targets.
_visibility = []
_visibility = [ ":${_target_name}" ]
@@ -263,11 +305,12 @@ template("rust_target") {
TESTONLY_AND_VISIBILITY + [
"features",
"deps",
+ "aliased_deps",
"public_deps",
"rustflags",
"rustenv",
"configs",
- "output_dir",
+ "unit_test_output_dir",
"unit_test_target",
"test_inputs",
])
@@ -279,22 +322,38 @@ template("rust_target") {
configs = []
configs = _configs
deps = _rust_deps
+ aliased_deps = _rust_aliased_deps
public_deps = _rust_public_deps
rustflags = _rustflags
- rustflags += [ string_join("",
- [
- "-Cmetadata=",
- _metadata,
- ]) ]
+ rustflags += [ "-Cmetadata=${_metadata}" ]
rustenv = _rustenv
# The Rust tool() declarations, like C++ ones, use the output_name and
# output_dir, so that GN targets can override these if needed. Here we
# give them their default values, or allow them to be overridden.
- output_dir = _out_dir
+ if (defined(_output_dir)) {
+ output_dir = _output_dir
+ }
if (!defined(output_name) || output_name == "") {
output_name = crate_name
}
+
+ if (compute_inputs_for_analyze) {
+ deps += [ ":${_target_name}_analyze" ]
+ }
+
+ if (!_allow_unsafe) {
+ configs += [ "//build/rust:forbid_unsafe" ]
+ }
+ }
+
+ if (compute_inputs_for_analyze) {
+ # Find and depend on all rust files in the crate for the purpose of `gn
+ # analyze`.
+ analyze_rust("${_target_name}_analyze") {
+ forward_variables_from(invoker, "*", [ "crate_root" ])
+ crate_root = _crate_root
+ }
}
if (_cxx_bindings != []) {
@@ -332,8 +391,12 @@ template("rust_target") {
testonly = true
crate_root = _crate_root
rustflags = _rustflags
- output_dir = _out_dir
+ env_out_dir = _env_out_dir
+ if (defined(invoker.unit_test_output_dir)) {
+ output_dir = invoker.unit_test_output_dir
+ }
deps = _rust_deps + _public_deps
+ aliased_deps = _rust_aliased_deps
public_deps = [ ":${_target_name}" ]
if (defined(invoker.test_deps)) {
deps += invoker.test_deps
@@ -348,14 +411,22 @@ template("rust_target") {
if (defined(invoker.executable_configs)) {
configs = []
configs = invoker.executable_configs
+ } else if (!defined(configs)) {
+ configs = []
}
+ configs += _test_configs
rustenv = _rustenv
+
+ if (!_allow_unsafe) {
+ configs += [ "//build/rust:forbid_unsafe" ]
+ }
}
} else {
not_needed([
"_crate_root",
"_crate_name",
"_metadata",
+ "_test_configs",
])
}
}
@@ -364,4 +435,5 @@ template("rust_target") {
set_defaults("rust_target") {
executable_configs = default_executable_configs
library_configs = default_compiler_configs
+ proc_macro_configs = default_rust_proc_macro_configs
}
diff --git a/build/rust/rust_unit_test.gni b/build/rust/rust_unit_test.gni
index 879cb84a9..8212498c7 100644
--- a/build/rust/rust_unit_test.gni
+++ b/build/rust/rust_unit_test.gni
@@ -15,6 +15,7 @@ import("//build/rust/rust_unit_tests_group.gni")
#
# sources
# edition (optional)
+# allow_unsafe (optional)
# configs (optional)
# deps (optional)
# crate_root (optional)
@@ -62,11 +63,7 @@ template("rust_unit_test") {
if (defined(invoker.edition)) {
_edition = invoker.edition
}
- _configs += [ string_join("",
- [
- "//build/rust:edition_",
- _edition,
- ]) ]
+ _configs += [ "//build/rust:edition_${_edition}" ]
# We require that all source files are listed, even though this is
# not a requirement for rustc. The reason is to ensure that tools
@@ -80,6 +77,15 @@ template("rust_unit_test") {
deps = [ ":$_exe_target_name" ]
}
+ # The OUT_DIR for a crate's tests should point to the same OUT_DIR that the
+ # library it's testing used. The `env_out_dir` variable can be used to specify
+ # that directory.
+ if (defined(invoker.env_out_dir)) {
+ _env_out_dir = invoker.env_out_dir
+ } else {
+ _env_out_dir = target_out_dir
+ }
+
# TODO(crbug.com/1229320): Arrange to run test executables on try bots.
# TODO(crbug.com/gn/146): Allow Rust executables to depend on C/C++ source
# sets.
@@ -90,21 +96,15 @@ template("rust_unit_test") {
forward_variables_from(invoker,
"*",
[
+ "allow_unsafe",
"edition",
"features",
"rustflags",
"configs",
- "output_name",
"crate_name",
"crate_root",
+ "env_out_dir",
])
-
- if (defined(output_dir) && output_dir != "") {
- _out_dir = output_dir
- } else {
- _out_dir = target_out_dir
- }
-
if (!defined(output_name) || output_name == "") {
output_name = _crate_name
}
@@ -122,7 +122,8 @@ template("rust_unit_test") {
if (!defined(rustenv)) {
rustenv = []
}
- rustenv += [ "OUT_DIR=" + rebase_path(_out_dir) ]
+
+ rustenv += [ "OUT_DIR=" + rebase_path(_env_out_dir) ]
metadata = {
# Consumed by "rust_unit_tests_group" gni template.
rust_unit_test_executables = [ _crate_name ]
diff --git a/build/rust/rustc_wrapper.py b/build/rust/rustc_wrapper.py
index 726352c4f..212ad44d0 100755
--- a/build/rust/rustc_wrapper.py
+++ b/build/rust/rustc_wrapper.py
@@ -11,16 +11,17 @@ import os
import sys
import re
-# Set up path to be able to import build_utils
+# Set up path to be able to import action_helpers.
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir,
- os.pardir, 'build', 'android', 'gyp'))
-from util import build_utils
+ os.pardir, 'build'))
+import action_helpers
-# This script wraps rustc for (currently) three reasons:
+# This script wraps rustc for (currently) these reasons:
# * To work around some ldflags escaping performed by ninja/gn
# * To remove dependencies on some environment variables from the .d file.
# * To enable use of .rsp files.
+# * To work around two gn bugs on Windows
#
# LDFLAGS ESCAPING
#
@@ -55,6 +56,11 @@ from util import build_utils
# the adjustment. This works around a gn issue:
# TODO(https://bugs.chromium.org/p/gn/issues/detail?id=249): fix this
#
+# WORKAROUND WINDOWS BUGS:
+#
+# On Windows platforms, this temporarily works around some issues in gn.
+# See comments inline, linking to the relevant gn fixes.
+#
# Usage:
# rustc_wrapper.py --rustc <path to rustc> --depfile <path to .d file>
# -- <normal rustc args> LDFLAGS {{ldflags}} RUSTENV {{rustenv}}
@@ -76,6 +82,13 @@ from util import build_utils
# script.
+# Equivalent of python3.9 built-in
+def remove_lib_suffix_from_l_args(text):
+ if text.startswith("-l") and text.endswith(".lib"):
+ return text[:-len(".lib")]
+ return text
+
+
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--rustc', required=True, type=pathlib.Path)
@@ -93,12 +106,21 @@ def main():
ldflags = remaining_args[ldflags_separator + 1:rustenv_separator]
rustenv = remaining_args[rustenv_separator + 1:]
+ is_windows = os.name == 'nt'
+
rustc_args.extend(["-Clink-arg=%s" % arg for arg in ldflags])
# Workaround for https://bugs.chromium.org/p/gn/issues/detail?id=249
if args.rsp:
with open(args.rsp) as rspfile:
rsp_args = [l.rstrip() for l in rspfile.read().split(' ') if l.rstrip()]
+ if is_windows:
+ # Work around for hard-coded string in gn; full fix will come from
+ # https://gn-review.googlesource.com/c/gn/+/12460
+ rsp_args = [arg for arg in rsp_args if not arg.endswith("-Bdynamic")]
+ # Work around for "-l<foo>.lib", where ".lib" suffix is undesirable.
+ # Full fix will come from https://gn-review.googlesource.com/c/gn/+/12480
+ rsp_args = [remove_lib_suffix_from_l_args(arg) for arg in rsp_args]
with open(args.rsp, 'w') as rspfile:
rspfile.write("\n".join(rsp_args))
rustc_args.append(f'@{args.rsp}')
@@ -127,7 +149,7 @@ def main():
else:
replacement_lines.append(line)
if dirty: # we made a change, let's write out the file
- with build_utils.AtomicOutput(args.depfile) as output:
+ with action_helpers.atomic_output(args.depfile) as output:
output.write("\n".join(replacement_lines).encode("utf-8"))
diff --git a/build/rust/std/BUILD.gn b/build/rust/std/BUILD.gn
index 54b8e9855..40e282369 100644
--- a/build/rust/std/BUILD.gn
+++ b/build/rust/std/BUILD.gn
@@ -55,7 +55,6 @@ if (toolchain_has_rust) {
"object",
"panic_abort",
"panic_unwind",
- "proc_macro",
"rustc_demangle",
"std_detect",
"test",
@@ -63,56 +62,72 @@ if (toolchain_has_rust) {
"unwind",
]
+ # rlibs explicitly ignored when copying prebuilt sysroot libraries.
+ # find_std_rlibs.py rightfully errors out if an unexpected prebuilt lib is
+ # encountered, since it usually indicates we missed something. This ignore
+ # list is also passed to it. This has no effect on the local std build.
+ ignore_stdlib_files = []
+
+ # proc_macro is special: we only run proc macros on the host, so we only want
+ # it for our host toolchain.
+ if (current_toolchain == host_toolchain_no_sanitizers) {
+ # Directs the local_std_for_rustc target to depend on proc_macro, and
+ # includes proc_macro in the prebuilts copied in find_stdlib. Otherwise it
+ # is not built or copied.
+ stdlib_files += [ "proc_macro" ]
+ } else {
+ # Explicitly ignore it from the prebuilts. Nothing needs to be done for the
+ # local std build.
+ ignore_stdlib_files += [ "proc_macro" ]
+ }
+
# Different Rust toolchains may add or remove files relative to the above
# list. That can be specified in gn args for anyone using (for instance)
# nightly or some other experimental toolchain, prior to it becoming official.
stdlib_files -= removed_rust_stdlib_libs
stdlib_files += added_rust_stdlib_libs
- if (!use_unverified_rust_toolchain) {
- # rlib files which are distributed alongside Rust's prebuilt stdlib, but we
- # don't need to pass to the C++ linker because they're used for specialized
- # purposes.
- skip_stdlib_files = [
- "profiler_builtins",
- "rustc_std_workspace_alloc",
- "rustc_std_workspace_core",
- "rustc_std_workspace_std",
- ]
- }
+ # rlib files which are distributed alongside Rust's prebuilt stdlib, but we
+ # don't need to pass to the C++ linker because they're used for specialized
+ # purposes.
+ skip_stdlib_files = [
+ "profiler_builtins",
+ "rustc_std_workspace_alloc",
+ "rustc_std_workspace_core",
+ "rustc_std_workspace_std",
+ ]
action("find_stdlib") {
- # Specifics of what we're doing here.
- #
- # We are using prebuilt Rust rlibs supplied along with the toolchain.
- # The Rust standard library consists of rlibs with roughly all the names
- # above.
+ # Collect prebuilt Rust libraries from toolchain package and copy to a known
+ # location.
#
- # However, their filenames are not predictable, and therefore we can't
- # have ninja rules which depend upon them. (gn offers a facility to
- # build rules dynamically, but it's frowned upon because a script needs
- # to run each time).
+ # The Rust toolchain contains prebuilt rlibs for the standard library and
+ # its dependencies. However, they have unstable names: an unpredictable
+ # metadata hash is appended to the known crate name.
#
- # Instead therefore we copy these unpredictable .rlib paths to apredictable
- # location. That's what this script does. Furthermore, it generates a
- # .d file in order to teach Ninja that it only needs to do this copying
- # once, unless the source .rlibs change.
+ # We must depend on these rlibs explicitly when rustc is not in charge of
+ # linking. However, it is difficult to construct GN rules to do so when the
+ # names can't be known statically.
#
- # The script accepts the list of known libraries and will raise an
- # exception if the list on disk differs. (Either 'Found stdlib rlib
- # that wasn't expected' or 'We failed to find all expected stdlib
- # rlibs').
+ # This action copies the prebuilt rlibs to a known location, removing the
+ # metadata part of the name. In the process it verifies we have all the
+ # libraries we expect and none that we don't. A depfile is generated so this
+ # step is re-run when any libraries change. The action script additionally
+ # verifies rustc matches the expected version, which is unrelated but this
+ # is a convenient place to do so.
#
- # The script does one final job, which is to check that the rustc
- # version matches that in the gn arg 'rustc_version'. This is
- # technically orthogonal to the stdlib-finding job that we do here,
- # but it's something we want to be sure of running during any
- # typical Rust build, and this target happens to be depended upon
- # almost everywhere, so it's a good fit.
+ # The action refers to `stdlib_files`, `skip_stdlib_files`, and the
+ # associated //build/config/rust.gni vars `removed_rust_stdlib_libs` and
+ # `added_rust_stdlib_libs` for which rlib files to expect.
+ # `extra_sysroot_libs` is also used to copy non-std libs, if any.
script = "find_std_rlibs.py"
depfile = "$target_out_dir/stdlib.d"
out_libdir = rebase_path(target_out_dir, root_build_dir)
out_depfile = rebase_path(depfile, root_build_dir)
+
+ # For the rustc sysroot we must include even the rlibs we don't pass to the
+ # C++ linker.
+ all_stdlibs_to_copy = stdlib_files + skip_stdlib_files
args = [
"--rust-bin-dir",
rebase_path("${rust_sysroot}/bin", root_build_dir),
@@ -127,37 +142,152 @@ if (toolchain_has_rust) {
"--depfile-target",
stdlib_files[0],
- "--expected-rustc-version",
- rustc_version,
+ # Create a dependency on the rustc version so this action is re-run when
+ # it changes. This argument is not actually read by the script.
+ "--rustc-revision",
+ rustc_revision,
]
+
if (!use_unverified_rust_toolchain) {
args += [
"--stdlibs",
- string_join(",", stdlib_files),
- "--skip",
- string_join(",", skip_stdlib_files),
+ string_join(",", all_stdlibs_to_copy),
]
+
+ if (ignore_stdlib_files != []) {
+ args += [
+ "--ignore-stdlibs",
+ string_join(",", ignore_stdlib_files),
+ ]
+ }
}
- if (rust_abi_target != "") {
+
+ if (extra_sysroot_libs != []) {
args += [
- "--target",
- rust_abi_target,
+ "--extra-libs",
+ string_join(",", extra_sysroot_libs),
]
}
+ args += [
+ "--target",
+ rust_abi_target,
+ ]
+
outputs = []
- foreach(lib, stdlib_files) {
+ foreach(lib, all_stdlibs_to_copy) {
outputs += [ "$target_out_dir/lib$lib.rlib" ]
}
+ foreach(lib, extra_sysroot_libs) {
+ outputs += [ "$target_out_dir/$lib" ]
+ }
}
- config("rust_stdlib_config") {
+ # Construct sysroots for rustc invocations to better control what libraries
+ # are linked. We have two: one with copied prebuilt libraries, and one with
+ # our locally-built std. Both reside in root_out_dir: we must only have one of
+ # each per GN toolchain anyway.
+
+ prebuilt_rustc_sysroot = "$root_out_dir/prebuilt_rustc_sysroot"
+ local_rustc_sysroot = "$root_out_dir/local_rustc_sysroot"
+ sysroot_lib_subdir = "lib/rustlib/$rust_abi_target/lib"
+
+ copy("prebuilt_rustc_sysroot") {
+ deps = [ ":find_stdlib" ]
+ sources = get_target_outputs(":find_stdlib")
+ outputs =
+ [ "$prebuilt_rustc_sysroot/$sysroot_lib_subdir/{{source_file_part}}" ]
+ }
+
+ if (local_libstd_supported) {
+ # All std targets starting with core build with our sysroot. It starts empty
+ # and is incrementally built. The directory must exist at the start.
+ generated_file("empty_sysroot_for_std_build") {
+ outputs = [ "$local_rustc_sysroot/$sysroot_lib_subdir/.empty" ]
+ contents = ""
+ }
+
+ config("local_stdlib_for_rustc") {
+ sysroot = rebase_path(local_rustc_sysroot, root_build_dir)
+ rustflags = [ "--sysroot=$sysroot" ]
+ }
+
+ # Target to be depended on by std build targets. Creates the initially empty
+ # sysroot.
+ group("std_build_deps") {
+ deps = [ ":empty_sysroot_for_std_build" ]
+ public_configs = [ ":local_stdlib_for_rustc" ]
+ }
+ } else {
+ not_needed([ "local_rustc_sysroot" ])
+ }
+
+ config("prebuilt_stdlib_for_rustc") {
+ # Match the output directory of :prebuilt_rustc_sysroot
+ sysroot = rebase_path(prebuilt_rustc_sysroot, root_build_dir)
+ rustflags = [ "--sysroot=$sysroot" ]
+ }
+
+ if (local_libstd_supported) {
+ # Use the sysroot generated by :local_rustc_sysroot, which transitively builds
+ # std. Only for use in specific tests for now.
+ group("local_std_for_rustc") {
+ assert(
+ enable_rust,
+ "Some C++ target is including Rust code even though enable_rust=false")
+ all_dependent_configs = [ ":local_stdlib_for_rustc" ]
+
+ deps = []
+ foreach(libname, stdlib_files + skip_stdlib_files) {
+ deps += [ "rules:$libname" ]
+ }
+ }
+ }
+
+ # Use the sysroot generated by :prebuilt_rustc_sysroot. Almost all Rust targets should depend
+ # on this.
+ group("prebuilt_std_for_rustc") {
+ assert(
+ enable_rust,
+ "Some C++ target is including Rust code even though enable_rust=false")
+ all_dependent_configs = [ ":prebuilt_stdlib_for_rustc" ]
+ deps = [ ":prebuilt_rustc_sysroot" ]
+ }
+
+ config("prebuilt_rust_stdlib_config") {
ldflags = []
- out_libdir = rebase_path(target_out_dir, root_build_dir)
+ lib_dir = rebase_path("$prebuilt_rustc_sysroot/$sysroot_lib_subdir",
+ root_build_dir)
+ foreach(lib, stdlib_files) {
+ this_file = "$lib_dir/lib$lib.rlib"
+ ldflags += [ this_file ]
+ }
+ if (is_win) {
+ # Our C++ builds already link against a wide variety of Windows API import libraries,
+ # but the Rust stdlib requires a few extra.
+ ldflags += [
+ "bcrypt.lib",
+ "userenv.lib",
+ ]
+ }
+ }
+
+ config("local_rust_stdlib_config") {
+ ldflags = []
+ lib_dir =
+ rebase_path("$local_rustc_sysroot/$sysroot_lib_subdir", root_build_dir)
foreach(lib, stdlib_files) {
- this_file = "$out_libdir/lib$lib.rlib"
+ this_file = "$lib_dir/lib$lib.rlib"
ldflags += [ this_file ]
}
+ if (is_win) {
+ # Our C++ builds already link against a wide variety of Windows API import libraries,
+ # but the Rust stdlib requires a few extra.
+ ldflags += [
+ "bcrypt.lib",
+ "userenv.lib",
+ ]
+ }
}
source_set("remap_alloc") {
@@ -167,14 +297,31 @@ if (toolchain_has_rust) {
]
}
- group("std") {
+ # TODO(crbug.com/1368806): rework this so when using locally-built std, we
+ # don't link the prebuilt std as well.
+
+ # Provides std libs to non-rustc linkers.
+ group("link_prebuilt_std") {
assert(
enable_rust,
"Some C++ target is including Rust code even though enable_rust=false")
- all_dependent_configs = [ ":rust_stdlib_config" ]
+ all_dependent_configs = [ ":prebuilt_rust_stdlib_config" ]
deps = [
- ":find_stdlib",
+ ":prebuilt_rustc_sysroot",
":remap_alloc",
]
}
+
+ if (local_libstd_supported) {
+ group("link_local_std") {
+ assert(
+ enable_rust,
+ "Some C++ target is including Rust code even though enable_rust=false")
+ all_dependent_configs = [ ":local_rust_stdlib_config" ]
+ deps = [
+ ":local_std_for_rustc",
+ ":remap_alloc",
+ ]
+ }
+ }
}
diff --git a/build/rust/std/fake_root/.cargo/config.toml b/build/rust/std/fake_root/.cargo/config.toml
new file mode 100644
index 000000000..72e14991c
--- /dev/null
+++ b/build/rust/std/fake_root/.cargo/config.toml
@@ -0,0 +1,5 @@
+[source.crates-io]
+replace-with = 'vendored-sources'
+
+[source.vendored-sources]
+directory = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor'
diff --git a/build/rust/std/fake_root/Cargo.toml b/build/rust/std/fake_root/Cargo.toml
new file mode 100644
index 000000000..55f3a079a
--- /dev/null
+++ b/build/rust/std/fake_root/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "fake_root"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+test = { path = "../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/test" }
+
+[dependencies.std]
+path = "../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/std"
+features = ["backtrace", "profiler"]
+
+[patch.crates-io]
+rustc-std-workspace-core = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core' }
+rustc-std-workspace-alloc = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc' }
+rustc-std-workspace-std = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std' }
diff --git a/build/rust/std/fake_root/README.md b/build/rust/std/fake_root/README.md
new file mode 100644
index 000000000..754a4b6b9
--- /dev/null
+++ b/build/rust/std/fake_root/README.md
@@ -0,0 +1,2 @@
+This package is used to discover the libstd deps using `cargo metadata`. gnrt
+uses it when generating libstd GN bindings.
diff --git a/build/rust/std/fake_root/src/main.rs b/build/rust/std/fake_root/src/main.rs
new file mode 100644
index 000000000..2c54a5228
--- /dev/null
+++ b/build/rust/std/fake_root/src/main.rs
@@ -0,0 +1,3 @@
+// Copyright 2023 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
diff --git a/build/rust/std/find_std_rlibs.py b/build/rust/std/find_std_rlibs.py
index 7d84d84f9..85ab477a9 100755
--- a/build/rust/std/find_std_rlibs.py
+++ b/build/rust/std/find_std_rlibs.py
@@ -35,11 +35,13 @@ def main():
required=True)
parser.add_argument("--stdlibs",
help="Expected list of standard library libraries")
- parser.add_argument("--skip-stdlibs",
- help="Standard library files to skip",
- default="")
- parser.add_argument("--expected-rustc-version",
- help="The string we expect to be reported by 'rustc -V'")
+ parser.add_argument("--ignore-stdlibs",
+ help="List of sysroot libraries to ignore")
+ parser.add_argument("--extra-libs",
+ help="List of extra non-libstd sysroot libraries")
+ parser.add_argument("--rustc-revision",
+ help="Not used, just passed from GN to add a dependency"
+ " on the rustc version.")
args = parser.parse_args()
# Expected rlibs by concise name (the crate name, plus a disambiguating suffix
@@ -54,24 +56,19 @@ def main():
rlibs_expected.add(name)
else:
rlibs_expected.add(f"{name}-{version}")
+ ignore_rlibs = set()
+ if args.ignore_stdlibs is not None:
+ ignore_rlibs = set(args.ignore_stdlibs.split(','))
else:
rlibs_expected = None
- rlibs_to_skip = set(args.skip_stdlibs.split(','))
-
- # First, ask rustc to confirm it's the version expected.
- rustc = os.path.join(args.rust_bin_dir, "rustc")
- if args.expected_rustc_version:
- proc = subprocess.run([rustc, "-V"], capture_output=True, text=True)
- proc.check_returncode()
- rustc_version = proc.stdout.rstrip()
- if rustc_version != args.expected_rustc_version:
- raise Exception("gn arguments state that the rustc_version is %s "
- "but it was actually %s. Please adjust your "
- "gn arguments to match." %
- (args.expected_rustc_version, rustc_version))
+ extra_libs = set()
+ if args.extra_libs:
+ for lib in args.extra_libs.split(','):
+ extra_libs.add(lib)
# Ask rustc where to find the stdlib for this target.
+ rustc = os.path.join(args.rust_bin_dir, "rustc")
rustc_args = [rustc, "--print", "target-libdir"]
if args.target:
rustc_args.extend(["--target", args.target])
@@ -90,6 +87,15 @@ def main():
depfile.write(
"%s:" % (os.path.join(args.output, "lib%s.rlib" % args.depfile_target)))
+ def copy_file(infile, outfile):
+ depfile.write(f" {infile}")
+ if (not os.path.exists(outfile)
+ or os.stat(infile).st_mtime != os.stat(outfile).st_mtime):
+ if os.path.exists(outfile):
+ st = os.stat(outfile)
+ os.chmod(outfile, st.st_mode | stat.S_IWUSR)
+ shutil.copy(infile, outfile)
+
# Each rlib is named "lib<crate_name>-<metadata>.rlib". The metadata
# disambiguates multiple crates of the same name. We want to throw away the
# metadata and use stable names. To do so, we replace the metadata bit with
@@ -116,8 +122,6 @@ def main():
# that, and it would prevent us having the predictable filenames
# which we need for statically computable gn dependency rules.
(crate_name, metadata) = RLIB_NAME_REGEX.match(f).group(1, 2)
- if crate_name in rlibs_to_skip:
- continue
# Use the number of times we've seen this name to disambiguate the output
# filenames. Since we sort the input filenames including the metadata,
@@ -135,19 +139,20 @@ def main():
output_filename = f"lib{concise_name}.rlib"
if rlibs_expected is not None:
+ if concise_name in ignore_rlibs:
+ continue
if concise_name not in rlibs_expected:
raise Exception("Found stdlib rlib that wasn't expected: %s" % f)
rlibs_expected.remove(concise_name)
infile = os.path.join(rustlib_dir, f)
outfile = os.path.join(args.output, output_filename)
- depfile.write(" %s" % infile)
- if (not os.path.exists(outfile)
- or os.stat(infile).st_mtime != os.stat(outfile).st_mtime):
- if os.path.exists(outfile):
- st = os.stat(outfile)
- os.chmod(outfile, st.st_mode | stat.S_IWUSR)
- shutil.copy(infile, outfile)
+ copy_file(infile, outfile)
+
+ for f in extra_libs:
+ infile = os.path.join(rustlib_dir, f)
+ outfile = os.path.join(args.output, f)
+ copy_file(infile, outfile)
depfile.write("\n")
if rlibs_expected:
diff --git a/build/rust/std/gnrt_config.toml b/build/rust/std/gnrt_config.toml
new file mode 100644
index 000000000..cb6cca533
--- /dev/null
+++ b/build/rust/std/gnrt_config.toml
@@ -0,0 +1,53 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Provides per-crate and overall configuration options to gnrt.
+
+[all]
+# force-unstable-if-unmarked prevents normal crates from inadvertently using
+# symbols from std-internal dependencies in the sysroot. This is normally passed
+# during an x.py build, but we have to do it manually.
+rustflags = ['-Zforce-unstable-if-unmarked']
+
+# Override the GN output dir. We direct std targets to output directly to the
+# sysroot we'll use later. This must stay in sync with `local_rustc_sysroot` in
+# //build/rust/std/BUILD.gn
+output_dir = '$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/'
+
+# This target provides setup needed for building std.
+extra_gn_deps = ['//build/rust/std:std_build_deps']
+
+[crate.libc]
+# Requires:
+# * cfg(libc_align) for new enough rustc, which is normally provided by build.rs
+# but we don't run build scripts for std crates.
+# * cfg(libc_priv_mod_use) is required for the below to work properly.
+# * cfg(libc_core_cvoid) to use the same ffi c_void definition as libcore.
+#
+# See https://github.com/rust-lang/libc/blob/master/build.rs
+cfg = ['libc_align', 'libc_priv_mod_use', 'libc_core_cvoid']
+
+[crate.std]
+# Requires:
+# * cfg(backtrace_in_libstd) because it directly includes .rs files from the
+# backtrace code rather than including it as a dependency. backtrace's
+# implementation has special-purpose code to handle this.
+# * STD_ENV_ARCH is referenced in architecture-dependent code. Note this is the
+# target arch, and as such `$rust_target_arch` is passed literally to GN. This
+# variable is set at build time in build/config/rust.gni
+#
+# See https://github.com/rust-lang/rust/blob/master/library/std/build.rs
+cfg = ['backtrace_in_libstd']
+env = ['STD_ENV_ARCH=$rust_target_arch']
+
+[crate.test]
+# Requires:
+# * CFG_DISABLE_UNSTABLE_FEATURES=0 to match how it's built by x.py.
+env = ['CFG_DISABLE_UNSTABLE_FEATURES=0']
+
+# test only depends on proc_macro as an internal detail of the Rust build, so
+# it's implicitly included with std/test. However, we list the std crates and
+# construct the sysroot explicitly. We don't need this, and we don't even want
+# it during cross-compiles (since we will only build host proc_macro crates).
+exclude_deps_in_gn = ['proc_macro']
diff --git a/build/rust/std/remap_alloc.cc b/build/rust/std/remap_alloc.cc
index 42e189c97..ceac7567b 100644
--- a/build/rust/std/remap_alloc.cc
+++ b/build/rust/std/remap_alloc.cc
@@ -42,7 +42,11 @@
// do that here, or we could build a crate with a #[global_allocator] and
// redirect these symbols to that crate instead. The advantage of the latter
// is that it would work equally well for those cases where rustc is doing
-// the final linking.
+// the final linking. At present, this is not necessary because
+// PartitionAlloc-Everywhere successfully handles the calls to malloc which
+// result from passing through this code. We might want to call into
+// PA directly if we wished for Rust allocations to be in a different
+// partition, or similar, in future.
//
// They're weak symbols, because this file will sometimes end up in targets
// which are linked by rustc, and thus we would otherwise get duplicate
diff --git a/build/rust/std/rules/BUILD.gn b/build/rust/std/rules/BUILD.gn
new file mode 100644
index 000000000..e7f4f97d6
--- /dev/null
+++ b/build/rust/std/rules/BUILD.gn
@@ -0,0 +1,865 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/rust/cargo_crate.gni")
+
+cargo_crate("addr2line") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/addr2line-0.17.0/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/addr2line-0.17.0/src/lib.rs" ]
+ edition = "2015"
+ cargo_pkg_version = "0.17.0"
+ cargo_pkg_name = "addr2line"
+ cargo_pkg_description =
+ "A cross-platform symbolication library written in Rust, using `gimli`"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":gimli",
+ ":rustc_std_workspace_alloc",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ alloc = ":rustc_std_workspace_alloc__rlib"
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "alloc",
+ "compiler_builtins",
+ "core",
+ "rustc-dep-of-std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("adler") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/adler-1.0.2/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/adler-1.0.2/src/lib.rs" ]
+ edition = "2015"
+ cargo_pkg_version = "1.0.2"
+ cargo_pkg_authors = "Jonas Schievink <jonasschievink@gmail.com>"
+ cargo_pkg_name = "adler"
+ cargo_pkg_description =
+ "A simple clean-room implementation of the Adler-32 checksum"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "compiler_builtins",
+ "core",
+ "rustc-dep-of-std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("alloc") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/alloc/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/alloc/src/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "alloc"
+ cargo_pkg_description = "The Rust core allocation and collections library"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":core",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("cfg_if") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/cfg-if-1.0.0/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/cfg-if-1.0.0/src/lib.rs" ]
+ edition = "2018"
+ cargo_pkg_version = "1.0.0"
+ cargo_pkg_authors = "Alex Crichton <alex@alexcrichton.com>"
+ cargo_pkg_name = "cfg-if"
+ cargo_pkg_description = "A macro to ergonomically define an item depending on a large number of #[cfg] parameters. Structured like an if-else chain, the first matching branch is the item that gets emitted."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "compiler_builtins",
+ "core",
+ "rustc-dep-of-std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("compiler_builtins") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/compiler_builtins-0.1.87/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/compiler_builtins-0.1.87/src/lib.rs" ]
+ edition = "2015"
+ cargo_pkg_version = "0.1.87"
+ cargo_pkg_authors = "Jorge Aparicio <japaricious@gmail.com>"
+ cargo_pkg_name = "compiler_builtins"
+ cargo_pkg_description = "Compiler intrinsics used by the Rust compiler. Also available for other targets if necessary!"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "compiler-builtins",
+ "core",
+ "rustc-dep-of-std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("core") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/core/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [
+ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/core/src/lib.rs",
+ ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "core"
+ cargo_pkg_description = "The Rust Core Library"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [ "//build/rust/std:std_build_deps" ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("getopts") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/getopts-0.2.21/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/getopts-0.2.21/src/lib.rs" ]
+ edition = "2015"
+ cargo_pkg_version = "0.2.21"
+ cargo_pkg_authors = "The Rust Project Developers"
+ cargo_pkg_name = "getopts"
+ cargo_pkg_description = "getopts-like option parsing."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":rustc_std_workspace_core",
+ ":rustc_std_workspace_std",
+ ":unicode_width",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ core = ":rustc_std_workspace_core__rlib"
+ std = ":rustc_std_workspace_std__rlib"
+ }
+ features = [
+ "core",
+ "rustc-dep-of-std",
+ "std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("gimli") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/gimli-0.26.2/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/gimli-0.26.2/src/lib.rs" ]
+ edition = "2018"
+ cargo_pkg_version = "0.26.2"
+ cargo_pkg_name = "gimli"
+ cargo_pkg_description =
+ "A library for reading and writing the DWARF debugging format."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":rustc_std_workspace_alloc",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ alloc = ":rustc_std_workspace_alloc__rlib"
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "alloc",
+ "compiler_builtins",
+ "core",
+ "read",
+ "read-core",
+ "rustc-dep-of-std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("hashbrown") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/hashbrown-0.12.3/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/hashbrown-0.12.3/src/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "0.12.3"
+ cargo_pkg_authors = "Amanieu d'Antras <amanieu@gmail.com>"
+ cargo_pkg_name = "hashbrown"
+ cargo_pkg_description = "A Rust port of Google's SwissTable hash map"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":rustc_std_workspace_alloc",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ alloc = ":rustc_std_workspace_alloc__rlib"
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "alloc",
+ "compiler_builtins",
+ "core",
+ "nightly",
+ "rustc-dep-of-std",
+ "rustc-internal-api",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("libc") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/libc-0.2.139/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/libc-0.2.139/src/lib.rs" ]
+ edition = "2015"
+ cargo_pkg_version = "0.2.139"
+ cargo_pkg_authors = "The Rust Project Developers"
+ cargo_pkg_name = "libc"
+ cargo_pkg_description = "Raw FFI bindings to platform libraries like libc."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ features = [
+ "align",
+ "rustc-dep-of-std",
+ "rustc-std-workspace-core",
+ ]
+ rustflags = [
+ "--cfg=libc_align",
+ "--cfg=libc_priv_mod_use",
+ "--cfg=libc_core_cvoid",
+ "-Zforce-unstable-if-unmarked",
+ ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("memchr") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/memchr-2.5.0/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/memchr-2.5.0/src/lib.rs" ]
+ edition = "2018"
+ cargo_pkg_version = "2.5.0"
+ cargo_pkg_authors = "Andrew Gallant <jamslam@gmail.com>, bluss"
+ cargo_pkg_name = "memchr"
+ cargo_pkg_description = "Safe interface to memchr."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "compiler_builtins",
+ "core",
+ "rustc-dep-of-std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("miniz_oxide") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/miniz_oxide-0.5.3/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/miniz_oxide-0.5.3/src/lib.rs" ]
+ edition = "2018"
+ cargo_pkg_version = "0.5.3"
+ cargo_pkg_authors = "Frommi <daniil.liferenko@gmail.com>, oyvindln <oyvindln@users.noreply.github.com>"
+ cargo_pkg_name = "miniz_oxide"
+ cargo_pkg_description = "DEFLATE compression and decompression library rewritten in Rust based on miniz"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":adler",
+ ":compiler_builtins",
+ ":rustc_std_workspace_alloc",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ alloc = ":rustc_std_workspace_alloc__rlib"
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "alloc",
+ "compiler_builtins",
+ "core",
+ "rustc-dep-of-std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("object") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/object-0.29.0/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/object-0.29.0/src/lib.rs" ]
+ edition = "2018"
+ cargo_pkg_version = "0.29.0"
+ cargo_pkg_name = "object"
+ cargo_pkg_description =
+ "A unified interface for reading and writing object file formats."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":memchr",
+ ":rustc_std_workspace_alloc",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ alloc = ":rustc_std_workspace_alloc__rlib"
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "alloc",
+ "archive",
+ "coff",
+ "compiler_builtins",
+ "core",
+ "elf",
+ "macho",
+ "pe",
+ "read_core",
+ "rustc-dep-of-std",
+ "unaligned",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("panic_abort") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_abort/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_abort/src/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "panic_abort"
+ cargo_pkg_description = "Implementation of Rust panics via process aborts"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":alloc",
+ ":cfg_if",
+ ":compiler_builtins",
+ ":core",
+ ":libc",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("panic_unwind") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_unwind/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_unwind/src/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "panic_unwind"
+ cargo_pkg_description = "Implementation of Rust panics via stack unwinding"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":alloc",
+ ":cfg_if",
+ ":compiler_builtins",
+ ":core",
+ ":libc",
+ ":unwind",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("proc_macro") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/proc_macro/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/proc_macro/src/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "proc_macro"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":core",
+ ":std",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("profiler_builtins") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/profiler_builtins/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/profiler_builtins/src/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "profiler_builtins"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":core",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("rustc_demangle") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/rustc-demangle-0.1.21/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/rustc-demangle-0.1.21/src/lib.rs" ]
+ edition = "2015"
+ cargo_pkg_version = "0.1.21"
+ cargo_pkg_authors = "Alex Crichton <alex@alexcrichton.com>"
+ cargo_pkg_name = "rustc-demangle"
+ cargo_pkg_description = "Rust compiler symbol demangling."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "compiler_builtins",
+ "core",
+ "rustc-dep-of-std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("rustc_std_workspace_alloc") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "1.99.0"
+ cargo_pkg_name = "rustc-std-workspace-alloc"
+ cargo_pkg_description = "Hack for the compiler's own build system"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":alloc",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("rustc_std_workspace_core") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "1.99.0"
+ cargo_pkg_name = "rustc-std-workspace-core"
+ cargo_pkg_description = "Hack for the compiler's own build system"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":core",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("rustc_std_workspace_std") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "1.99.0"
+ cargo_pkg_name = "rustc-std-workspace-std"
+ cargo_pkg_description = "Hack for the compiler's own build system"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":std",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("std") {
+ crate_type = "rlib"
+ crate_root =
+ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/std/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [
+ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/std/src/lib.rs",
+ ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "std"
+ cargo_pkg_description = "The Rust Standard Library"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":addr2line",
+ ":alloc",
+ ":cfg_if",
+ ":compiler_builtins",
+ ":core",
+ ":hashbrown",
+ ":libc",
+ ":miniz_oxide",
+ ":object",
+ ":panic_abort",
+ ":panic_unwind",
+ ":profiler_builtins",
+ ":rustc_demangle",
+ ":std_detect",
+ ":unwind",
+ "//build/rust/std:std_build_deps",
+ ]
+ features = [
+ "addr2line",
+ "backtrace",
+ "gimli-symbolize",
+ "miniz_oxide",
+ "object",
+ "panic_unwind",
+ "profiler",
+ "profiler_builtins",
+ "std_detect_dlsym_getauxval",
+ "std_detect_file_io",
+ ]
+ rustenv = [ "STD_ENV_ARCH=$rust_target_arch" ]
+ rustflags = [
+ "--cfg=backtrace_in_libstd",
+ "-Zforce-unstable-if-unmarked",
+ ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("std_detect") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/stdarch/crates/std_detect/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/stdarch/crates/std_detect/src/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "0.1.5"
+ cargo_pkg_authors = "Alex Crichton <alex@alexcrichton.com>, Andrew Gallant <jamslam@gmail.com>, Gonzalo Brito Gadeschi <gonzalobg88@gmail.com>"
+ cargo_pkg_name = "std_detect"
+ cargo_pkg_description =
+ "`std::detect` - Rust's standard library run-time CPU feature detection."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":cfg_if",
+ ":compiler_builtins",
+ ":libc",
+ ":rustc_std_workspace_alloc",
+ ":rustc_std_workspace_core",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ alloc = ":rustc_std_workspace_alloc__rlib"
+ core = ":rustc_std_workspace_core__rlib"
+ }
+ features = [
+ "alloc",
+ "compiler_builtins",
+ "core",
+ "libc",
+ "rustc-dep-of-std",
+ "std_detect_dlsym_getauxval",
+ "std_detect_file_io",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("test") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/test/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [
+ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/test/src/lib.rs",
+ ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "test"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":core",
+ ":getopts",
+ ":panic_abort",
+ ":panic_unwind",
+ ":std",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustenv = [ "CFG_DISABLE_UNSTABLE_FEATURES=0" ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("unicode_width") {
+ crate_type = "rlib"
+ crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/unicode-width-0.1.10/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/unicode-width-0.1.10/src/lib.rs" ]
+ edition = "2015"
+ cargo_pkg_version = "0.1.10"
+ cargo_pkg_authors =
+ "kwantam <kwantam@gmail.com>, Manish Goregaokar <manishsmail@gmail.com>"
+ cargo_pkg_name = "unicode-width"
+ cargo_pkg_description = "Determine displayed width of `char` and `str` types according to Unicode Standard Annex #11 rules."
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":compiler_builtins",
+ ":rustc_std_workspace_core",
+ ":rustc_std_workspace_std",
+ "//build/rust/std:std_build_deps",
+ ]
+ aliased_deps = {
+ core = ":rustc_std_workspace_core__rlib"
+ std = ":rustc_std_workspace_std__rlib"
+ }
+ features = [
+ "compiler_builtins",
+ "core",
+ "rustc-dep-of-std",
+ "std",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
+cargo_crate("unwind") {
+ crate_type = "rlib"
+ crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/unwind/src/lib.rs"
+ no_std = true
+
+ # Unit tests skipped. Generate with --with-tests to include them.
+ build_native_rust_unit_tests = false
+ sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/unwind/src/lib.rs" ]
+ edition = "2021"
+ cargo_pkg_version = "0.0.0"
+ cargo_pkg_name = "unwind"
+ library_configs -= [ "//build/config/compiler:chromium_code" ]
+ library_configs += [ "//build/config/compiler:no_chromium_code" ]
+ executable_configs -= [ "//build/config/compiler:chromium_code" ]
+ executable_configs += [ "//build/config/compiler:no_chromium_code" ]
+ deps = [
+ ":cfg_if",
+ ":compiler_builtins",
+ ":core",
+ ":libc",
+ "//build/rust/std:std_build_deps",
+ ]
+ rustflags = [ "-Zforce-unstable-if-unmarked" ]
+ output_dir =
+ "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/"
+}
diff --git a/build/rust/tests/BUILD.gn b/build/rust/tests/BUILD.gn
index 64840c409..9ecdd7f88 100644
--- a/build/rust/tests/BUILD.gn
+++ b/build/rust/tests/BUILD.gn
@@ -5,9 +5,13 @@
import("//build/config/rust.gni")
import("//build/rust/rust_unit_tests_group.gni")
+if (toolchain_has_rust) {
+ target_has_exe = !is_android
+}
+
+# Build some minimal binaries to exercise the Rust toolchain
+# only if that toolchain is enabled in gn args.
group("tests") {
- # Build some minimal binaries to exercise the Rust toolchain
- # only if that toolchain is enabled in gn args.
testonly = true
deps = [ ":deps" ]
@@ -23,27 +27,42 @@ group("deps") {
# All the rest require Rust.
if (toolchain_has_rust) {
deps += [
+ "test_aliased_deps",
"test_cpp_including_rust",
"test_rlib_crate:target1",
"test_rlib_crate:target2",
"test_rust_static_library",
"test_serde_json_lenient",
+
+ # TODO(https://crbug.com/1329611): Enable the additional target below
+ # once `rs_bindings_from_cc` is distributed via `gclient sync`. In the
+ # meantime see the instructions in
+ # `//build/rust/run_rs_bindings_from_cc.py`.
+ #"test_rs_bindings_from_cc:test_rs_bindings_from_cc",
]
- if (rustc_can_link) {
+ if (target_has_exe) {
deps += [
"bindgen_test",
+ "test_aliased_deps:test_aliased_deps_exe",
+ "test_bin_crate",
+ "test_rlib_crate:test_rlib_crate_associated_bin",
+ "test_rust_exe",
+ "test_rust_multiple_dep_versions_exe",
"test_rust_shared_library",
+ "test_simple_rust_exe",
]
}
if (can_build_rust_unit_tests) {
deps += [
"bindgen_test:bindgen_test_lib_unittests",
+ "test_aliased_deps:test_aliased_deps_unittests",
"test_cpp_including_rust:test_cpp_including_rust_unittests",
"test_rlib_crate:target1_test_rlib_crate_v0_2_unittests",
"test_rlib_crate:target2_test_rlib_crate_v0_2_unittests",
"test_rust_exe:test_rust_exe_unittests",
"test_rust_multiple_dep_versions_exe/v1:test_lib_v1_unittests",
"test_rust_multiple_dep_versions_exe/v2:test_lib_v2_unittests",
+ "test_rust_shared_library:test_rust_shared_library_unittests",
"test_rust_static_library:test_rust_static_library_unittests",
"test_rust_static_library_non_standard_arrangement:foo_tests",
"test_rust_unittests",
@@ -54,38 +73,32 @@ group("deps") {
# `//build/rust/run_rs_bindings_from_cc.py`.
#"test_rs_bindings_from_cc:test_rs_bindings_from_cc_unittests",
]
+ if (current_toolchain == host_toolchain_no_sanitizers) {
+ # Build these proc macro tests only on toolchains where we'd build the
+ # proc macro itself.
+ deps += [ "test_proc_macro_crate:test_proc_macro_crate_v0_2_unittests" ]
+ }
+ }
- # TODO(crbug.com/1297592): The bot isolate does not end up including any
- # .so files so the tests fail:
- #
- # error while loading shared libraries: libtest_rust_shared_library.so:
- # cannot open shared object file: No such file or directory
- if (false) {
- deps +=
- [ "test_rust_shared_library:test_rust_shared_library_unittests" ]
+ if (local_libstd_supported) {
+ deps += [ "test_local_std" ]
+ if (target_has_exe) {
+ deps += [ "test_local_std:test_local_std_exe" ]
+ }
+ if (can_build_rust_unit_tests) {
+ deps += [ "test_local_std:test_local_std_unittests" ]
}
}
- if (rustc_can_link) {
- deps += [
- "test_bin_crate",
- "test_rlib_crate:test_rlib_crate_associated_bin",
- "test_rust_exe",
- "test_rust_multiple_dep_versions_exe",
- "test_simple_rust_exe",
- "//third_party/rust/bindgen/v0_60:bindgen",
- # TODO(https://crbug.com/1329611): Enable the additional target below
- # once `rs_bindings_from_cc` is distributed via `gclient sync`. In the
- # meantime see the instructions in
- # `//build/rust/run_rs_bindings_from_cc.py`.
- #"test_rs_bindings_from_cc:test_rs_bindings_from_cc",
- ]
+ if (is_win) {
+ deps += [ "test_control_flow_guard" ]
}
}
}
if (can_build_rust_unit_tests) {
- # A group covering all native Rust unit tests under //build/rust directory.
+ # Generates a script that will run all the native Rust unit tests, in order
+ # to have them all part of a single test step on infra bots.
rust_unit_tests_group("build_rust_tests") {
deps = [ ":deps" ]
}
diff --git a/build/rust/tests/bindgen_test/BUILD.gn b/build/rust/tests/bindgen_test/BUILD.gn
index ce158e998..ce0fbc435 100644
--- a/build/rust/tests/bindgen_test/BUILD.gn
+++ b/build/rust/tests/bindgen_test/BUILD.gn
@@ -27,6 +27,7 @@ rust_bindgen("c_lib_bindgen") {
}
rust_static_library("bindgen_test_lib") {
+ allow_unsafe = true
deps = [
":c_lib",
":c_lib_bindgen",
diff --git a/build/rust/tests/bindgen_test/src/lib.rs b/build/rust/tests/bindgen_test/src/lib.rs
index dfd231de2..c8672e06a 100644
--- a/build/rust/tests/bindgen_test/src/lib.rs
+++ b/build/rust/tests/bindgen_test/src/lib.rs
@@ -4,6 +4,7 @@
mod c_ffi {
#![allow(dead_code)]
+ #![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
include!(env!("BINDGEN_RS_FILE"));
diff --git a/build/rust/tests/test_aliased_deps/BUILD.gn b/build/rust/tests/test_aliased_deps/BUILD.gn
new file mode 100644
index 000000000..45ad73f44
--- /dev/null
+++ b/build/rust/tests/test_aliased_deps/BUILD.gn
@@ -0,0 +1,30 @@
+# Copyright 2022 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/rust.gni")
+import("//build/rust/rust_executable.gni")
+import("//build/rust/rust_static_library.gni")
+
+rust_executable("test_aliased_deps_exe") {
+ crate_root = "main.rs"
+ sources = [ crate_root ]
+ deps = [ ":test_aliased_deps" ]
+}
+
+rust_static_library("test_aliased_deps") {
+ crate_root = "lib.rs"
+ sources = [ crate_root ]
+ deps = [ ":real_name" ]
+ aliased_deps = {
+ # Unfortunately we have to know the `__rlib` suffix which is attached to the
+ # actual rlib in `rust_static_library()`.
+ other_name = ":real_name__rlib"
+ }
+ build_native_rust_unit_tests = true
+}
+
+rust_static_library("real_name") {
+ crate_root = "real_name.rs"
+ sources = [ crate_root ]
+}
diff --git a/build/rust/tests/test_aliased_deps/lib.rs b/build/rust/tests/test_aliased_deps/lib.rs
new file mode 100644
index 000000000..dcaa3431c
--- /dev/null
+++ b/build/rust/tests/test_aliased_deps/lib.rs
@@ -0,0 +1,11 @@
+// Copyright 2023 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+pub use other_name;
+
+#[cfg(test)]
+#[test]
+fn test_add_from_renamed_dep() {
+ assert_eq!(other_name::add(2, 3), 5);
+}
diff --git a/build/config/fuchsia/test/web_instance_host.shard.test-cml b/build/rust/tests/test_aliased_deps/main.rs
index a46a956e6..8f33abece 100644
--- a/build/config/fuchsia/test/web_instance_host.shard.test-cml
+++ b/build/rust/tests/test_aliased_deps/main.rs
@@ -1,12 +1,7 @@
// Copyright 2022 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-{
- use: [
- {
- protocol: [
- "fuchsia.sys.Environment",
- ],
- },
- ],
+
+fn main() {
+ test_aliased_deps::other_name::hello_world();
}
diff --git a/build/rust/tests/test_aliased_deps/real_name.rs b/build/rust/tests/test_aliased_deps/real_name.rs
new file mode 100644
index 000000000..15f084f3f
--- /dev/null
+++ b/build/rust/tests/test_aliased_deps/real_name.rs
@@ -0,0 +1,11 @@
+// Copyright 2022 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+pub fn hello_world() {
+ println!("hello world");
+}
+
+pub fn add(a: u32, b: u32) -> u32 {
+ a + b
+}
diff --git a/build/rust/tests/test_control_flow_guard/BUILD.gn b/build/rust/tests/test_control_flow_guard/BUILD.gn
new file mode 100644
index 000000000..33401219f
--- /dev/null
+++ b/build/rust/tests/test_control_flow_guard/BUILD.gn
@@ -0,0 +1,11 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/rust/rust_executable.gni")
+
+rust_executable("test_control_flow_guard") {
+ allow_unsafe = true
+ crate_root = "test_control_flow_guard.rs"
+ sources = [ crate_root ]
+}
diff --git a/build/rust/tests/test_control_flow_guard/test_control_flow_guard.rs b/build/rust/tests/test_control_flow_guard/test_control_flow_guard.rs
new file mode 100644
index 000000000..d303d3dc8
--- /dev/null
+++ b/build/rust/tests/test_control_flow_guard/test_control_flow_guard.rs
@@ -0,0 +1,43 @@
+// Copyright 2023 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+//! To test that CFG is working, build this executable on Windows and run it
+//! as:
+//!
+//! `out\Release\cdb\cdb.exe -G -g -o .\out\Release\test_control_flow_guard.exe`
+//!
+//! Which should print:
+//! ```
+//! (a2d4.bcd8): Security check failure or stack buffer overrun - code c0000409
+//! (!!! second chance !!!)
+//! Subcode: 0xa FAST_FAIL_GUARD_ICALL_CHECK_FAILURE
+//! ```
+//!
+//! If cdb.exe is not present, first run `ninja -C out\Release cdb\cdb.exe`.
+
+use std::arch::asm;
+
+#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+const NOP_INSTRUCTION_SIZE: usize = 1;
+#[cfg(target_arch = "aarch64")]
+const NOP_INSTRUCTION_SIZE: usize = 4;
+
+#[inline(never)]
+fn nop_sled() {
+ unsafe { asm!("nop", "nop", "ret",) }
+}
+
+#[inline(never)]
+fn indirect_call(func: fn()) {
+ func();
+}
+
+fn main() {
+ let fptr =
+ unsafe { std::mem::transmute::<usize, fn()>(nop_sled as usize + NOP_INSTRUCTION_SIZE) };
+ // Generates a FAST_FAIL_GUARD_ICALL_CHECK_FAILURE if CFG triggers.
+ indirect_call(fptr);
+ // Should only reach here if CFG is disabled.
+ eprintln!("failed");
+}
diff --git a/build/rust/tests/test_local_std/BUILD.gn b/build/rust/tests/test_local_std/BUILD.gn
new file mode 100644
index 000000000..499aebdd7
--- /dev/null
+++ b/build/rust/tests/test_local_std/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/rust.gni")
+import("//build/rust/rust_executable.gni")
+import("//build/rust/rust_static_library.gni")
+
+assert(local_libstd_supported)
+
+rust_static_library("test_local_std") {
+ sources = [ "lib.rs" ]
+ crate_root = "lib.rs"
+ build_native_rust_unit_tests = true
+ use_local_std = true
+}
+
+rust_executable("test_local_std_exe") {
+ sources = [ "main.rs" ]
+ crate_root = "main.rs"
+ deps = [ ":test_local_std" ]
+ use_local_std = true
+}
diff --git a/build/rust/tests/test_local_std/lib.rs b/build/rust/tests/test_local_std/lib.rs
new file mode 100644
index 000000000..6328cf415
--- /dev/null
+++ b/build/rust/tests/test_local_std/lib.rs
@@ -0,0 +1,8 @@
+// Copyright 2023 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#[test]
+fn test_test() {
+ assert_eq!(1, 1);
+}
diff --git a/build/rust/tests/test_local_std/main.rs b/build/rust/tests/test_local_std/main.rs
new file mode 100644
index 000000000..746e0216e
--- /dev/null
+++ b/build/rust/tests/test_local_std/main.rs
@@ -0,0 +1,7 @@
+// Copyright 2023 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+fn main() {
+ println!("hello world");
+}
diff --git a/build/rust/tests/test_rlib_crate/BUILD.gn b/build/rust/tests/test_rlib_crate/BUILD.gn
index f25a11188..9410316a2 100644
--- a/build/rust/tests/test_rlib_crate/BUILD.gn
+++ b/build/rust/tests/test_rlib_crate/BUILD.gn
@@ -37,21 +37,19 @@ cargo_crate("target2") {
rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ]
}
-if (rustc_can_link) {
- # Exists to test the case that a single crate has both a library
- # and a binary, to ensure that shared build products (e.g. the
- # build script) don't conflict.
- cargo_crate("test_rlib_crate_associated_bin") {
- crate_root = "crate/src/main.rs"
- crate_type = "bin"
- sources = [ "crate/src/main.rs" ]
- build_sources = [ "crate/build.rs" ]
- build_root = "crate/build.rs"
- features = [
- "my-feature_a",
- "my-feature_b",
- ]
- rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ]
- deps = [ ":target1" ]
- }
+# Exists to test the case that a single crate has both a library
+# and a binary, to ensure that shared build products (e.g. the
+# build script) don't conflict.
+cargo_crate("test_rlib_crate_associated_bin") {
+ crate_root = "crate/src/main.rs"
+ crate_type = "bin"
+ sources = [ "crate/src/main.rs" ]
+ build_sources = [ "crate/build.rs" ]
+ build_root = "crate/build.rs"
+ features = [
+ "my-feature_a",
+ "my-feature_b",
+ ]
+ rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ]
+ deps = [ ":target1" ]
}
diff --git a/build/rust/tests/test_rust_shared_library/BUILD.gn b/build/rust/tests/test_rust_shared_library/BUILD.gn
index 3aac2934e..f2396c761 100644
--- a/build/rust/tests/test_rust_shared_library/BUILD.gn
+++ b/build/rust/tests/test_rust_shared_library/BUILD.gn
@@ -4,10 +4,9 @@
import("//build/rust/rust_shared_library.gni")
-if (rustc_can_link) {
- rust_shared_library("test_rust_shared_library") {
- sources = [ "src/lib.rs" ]
- cxx_bindings = [ "src/lib.rs" ]
- build_native_rust_unit_tests = true
- }
+rust_shared_library("test_rust_shared_library") {
+ allow_unsafe = true
+ sources = [ "src/lib.rs" ]
+ cxx_bindings = [ "src/lib.rs" ]
+ build_native_rust_unit_tests = true
}
diff --git a/build/rust/tests/test_rust_shared_library/src/lib.rs b/build/rust/tests/test_rust_shared_library/src/lib.rs
index 54788b675..eabfa274a 100644
--- a/build/rust/tests/test_rust_shared_library/src/lib.rs
+++ b/build/rust/tests/test_rust_shared_library/src/lib.rs
@@ -2,6 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+// Requires this allow since cxx generates unsafe code.
+//
+// TODO(crbug.com/1422745): patch upstream cxx to generate compatible code.
+#[allow(unsafe_op_in_unsafe_fn)]
#[cxx::bridge]
mod ffi {
pub struct SomeStruct {
diff --git a/build/rust/tests/test_rust_static_library/BUILD.gn b/build/rust/tests/test_rust_static_library/BUILD.gn
index 39ccbb4a1..28a48cbc1 100644
--- a/build/rust/tests/test_rust_static_library/BUILD.gn
+++ b/build/rust/tests/test_rust_static_library/BUILD.gn
@@ -5,6 +5,7 @@
import("//build/rust/rust_static_library.gni")
rust_static_library("test_rust_static_library") {
+ allow_unsafe = true
sources = [ "src/lib.rs" ]
cxx_bindings = [ "src/lib.rs" ]
build_native_rust_unit_tests = true
diff --git a/build/rust/tests/test_rust_static_library/src/lib.rs b/build/rust/tests/test_rust_static_library/src/lib.rs
index 9fd9fe7b9..1fcabe3f2 100644
--- a/build/rust/tests/test_rust_static_library/src/lib.rs
+++ b/build/rust/tests/test_rust_static_library/src/lib.rs
@@ -2,6 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+// Requires this allow since cxx generates unsafe code.
+//
+// TODO(crbug.com/1422745): patch upstream cxx to generate compatible code.
+#[allow(unsafe_op_in_unsafe_fn)]
#[cxx::bridge]
mod ffi {
pub struct SomeStruct {
diff --git a/build/rust/tests/test_serde_json_lenient/BUILD.gn b/build/rust/tests/test_serde_json_lenient/BUILD.gn
index 0f0da7a6f..dbc954e08 100644
--- a/build/rust/tests/test_serde_json_lenient/BUILD.gn
+++ b/build/rust/tests/test_serde_json_lenient/BUILD.gn
@@ -7,6 +7,7 @@ import("//testing/test.gni")
rust_static_library("test_serde_json_lenient_rs") {
crate_root = "lib.rs"
+ allow_unsafe = true
sources = [ "lib.rs" ]
cxx_bindings = [ "lib.rs" ]
deps = [ "//third_party/rust/serde_json_lenient/v0_1:lib" ]
diff --git a/build/rust/tests/test_simple_rust_exe/BUILD.gn b/build/rust/tests/test_simple_rust_exe/BUILD.gn
index 59a707310..a800720fb 100644
--- a/build/rust/tests/test_simple_rust_exe/BUILD.gn
+++ b/build/rust/tests/test_simple_rust_exe/BUILD.gn
@@ -8,4 +8,5 @@
# //build/rust/rust_executable.gni.
executable("test_simple_rust_exe") {
crate_root = "main.rs"
+ deps = [ "//build/rust/std:local_std_for_rustc" ]
}
diff --git a/build/sanitizers/lsan_suppressions.cc b/build/sanitizers/lsan_suppressions.cc
index 10cd761b2..dceab4b50 100644
--- a/build/sanitizers/lsan_suppressions.cc
+++ b/build/sanitizers/lsan_suppressions.cc
@@ -69,9 +69,6 @@ char kLSanDefaultSuppressions[] =
// impossible, i.e. when enabling leak detection for the first time for a
// test target with pre-existing leaks.
- // https://crbug.com/755670
- "leak:third_party/yasm/\n"
-
// v8 leaks caused by weak ref not call
"leak:blink::DOMWrapperWorld::Create\n"
"leak:blink::ScriptState::Create\n"
@@ -93,7 +90,7 @@ char kLSanDefaultSuppressions[] =
// Suppress leak in SurfaceDrawContext. crbug.com/1265033
"leak:skgpu::v1::SurfaceDrawContext::drawGlyphRunList\n"
// Suppress leak in BluetoothServerSocket. crbug.com/1278970
- "leak:location::nearby::chrome::BluetoothServerSocket::"
+ "leak:nearby::chrome::BluetoothServerSocket::"
"BluetoothServerSocket\n"
// Suppress leak in NearbyConnectionBrokerImpl. crbug.com/1279578
"leak:ash::secure_channel::NearbyConnectionBrokerImpl\n"
diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc
index 36eb6d769..d90546efc 100644
--- a/build/sanitizers/tsan_suppressions.cc
+++ b/build/sanitizers/tsan_suppressions.cc
@@ -30,35 +30,13 @@ char kTSanDefaultSuppressions[] =
// Intentional race in ToolsSanityTest.DataRace in base_unittests.
"race:base/tools_sanity_unittest.cc\n"
- // Data race on WatchdogCounter [test-only].
- "race:base/threading/watchdog_unittest.cc\n"
-
// Data race caused by swapping out the network change notifier with a mock
// [test-only]. http://crbug.com/927330.
"race:content/browser/net_info_browsertest.cc\n"
- // http://crbug.com/120808
- "race:base/threading/watchdog.cc\n"
-
- // http://crbug.com/157586
- "race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
-
// http://crbug.com/244856
"race:libpulsecommon*.so\n"
- // http://crbug.com/258479
- "race:g_trace_state\n"
-
- // http://crbug.com/268924
- "race:base::g_power_monitor\n"
- "race:base::PowerMonitor::PowerMonitor\n"
- "race:base::PowerMonitor::AddObserver\n"
- "race:base::PowerMonitor::RemoveObserver\n"
- "race:base::PowerMonitor::IsOnBatteryPower\n"
-
- // http://crbug.com/308590
- "race:CustomThreadWatcher::~CustomThreadWatcher\n"
-
// http://crbug.com/476529
"deadlock:cc::VideoLayerImpl::WillDraw\n"
@@ -74,14 +52,6 @@ char kTSanDefaultSuppressions[] =
// http://crbug.com/380554
"deadlock:g_type_add_interface_static\n"
- // http://crbug.com/397022
- "deadlock:"
- "base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::"
- "TestBody\n"
-
- // http://crbug.com/415472
- "deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n"
-
// Lock inversion in third party code, won't fix.
// https://crbug.com/455638
"deadlock:dbus::Bus::ShutdownAndBlock\n"
@@ -89,9 +59,6 @@ char kTSanDefaultSuppressions[] =
// https://crbug.com/459429
"race:randomnessPid\n"
- // http://crbug.com/633145
- "race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n"
-
// http://crbug.com/691029
"deadlock:libGLX.so*\n"
@@ -105,17 +72,6 @@ char kTSanDefaultSuppressions[] =
// http://crbug.com/927330
"race:net::(anonymous namespace)::g_network_change_notifier\n"
- // https://crbug.com/977085
- "race:vp3_update_thread_context\n"
-
- // Benign data race in libjpeg-turbo, won't fix
- // (https://github.com/libjpeg-turbo/libjpeg-turbo/issues/87).
- // https://crbug.com/1056011
- "race:third_party/libjpeg_turbo/simd/x86_64/jsimd.c\n"
-
- // https://crbug.com/1158622
- "race:absl::synchronization_internal::Waiter::Post\n"
-
// Harmless data races, see WTF::StringImpl::Release code comments.
"race:scoped_refptr<WTF::StringImpl>::AddRef\n"
"race:scoped_refptr<WTF::StringImpl>::Release\n"
@@ -124,6 +80,16 @@ char kTSanDefaultSuppressions[] =
// ipcz::BlockAllocator::Allocate().
"race:ipcz::BlockAllocator::Allocate\n"
+ // https://crbug.com/1405439
+ "race:*::perfetto_track_event::internal::g_category_state_storage\n"
+ "race:perfetto::DataSource*::static_state_\n"
+ "race:perfetto::*::ResetForTesting\n"
+
+ // In V8 each global safepoint might lock isolate mutexes in a different
+ // order. This is allowed in this context as it is always guarded by a
+ // single global mutex.
+ "deadlock:GlobalSafepoint::EnterGlobalSafepointScope\n"
+
// End of suppressions.
; // Please keep this semicolon.
diff --git a/build/shim_headers.gni b/build/shim_headers.gni
index 104b2d782..3bef6c0de 100644
--- a/build/shim_headers.gni
+++ b/build/shim_headers.gni
@@ -28,8 +28,10 @@ template("shim_headers") {
}
args += invoker.headers
- outputs = process_file_template(invoker.headers,
- "${shim_headers_path}/{{source_file_part}}")
+ outputs = []
+ foreach(h, invoker.headers) {
+ outputs += [ shim_headers_path + "/" + rebase_path(invoker.root_path,"//") + "/" + h ]
+ }
}
group(target_name) {
diff --git a/build/skia_gold_common/PRESUBMIT.py b/build/skia_gold_common/PRESUBMIT.py
index ba2fa89dc..f3cc772d8 100644
--- a/build/skia_gold_common/PRESUBMIT.py
+++ b/build/skia_gold_common/PRESUBMIT.py
@@ -9,31 +9,31 @@ for more details on the presubmit API built into depot_tools.
USE_PYTHON3 = True
+PRESUBMIT_VERSION = '2.0.0'
-def CommonChecks(input_api, output_api):
- output = []
+
+def _GetSkiaGoldEnv(input_api):
+ """Gets the common environment for running Skia Gold tests."""
build_path = input_api.os_path.join(input_api.PresubmitLocalPath(), '..')
skia_gold_env = dict(input_api.environ)
skia_gold_env.update({
'PYTHONPATH': build_path,
'PYTHONDONTWRITEBYTECODE': '1',
})
- output.extend(
- input_api.canned_checks.RunUnitTestsInDirectory(
- input_api,
- output_api,
- input_api.PresubmitLocalPath(), [r'^.+_unittest\.py$'],
- env=skia_gold_env,
- run_on_python2=False,
- skip_shebang_check=True))
- output.extend(
- input_api.canned_checks.RunPylint(input_api, output_api, version='2.7'))
- return output
+ return skia_gold_env
-def CheckChangeOnUpload(input_api, output_api):
- return CommonChecks(input_api, output_api)
+def CheckSkiaGoldCommonUnittests(input_api, output_api):
+ """Runs the unittests for the build/skia_gold_common/ directory."""
+ return input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api,
+ output_api,
+ input_api.PresubmitLocalPath(), [r'^.+_unittest\.py$'],
+ env=_GetSkiaGoldEnv(input_api),
+ run_on_python2=False,
+ skip_shebang_check=True)
-def CheckChangeOnCommit(input_api, output_api):
- return CommonChecks(input_api, output_api)
+def CheckPylint(input_api, output_api):
+ """Runs pylint on all directory content and subdirectories."""
+ return input_api.canned_checks.RunPylint(input_api, output_api, version='2.7')
diff --git a/build/symlink.gni b/build/symlink.gni
index 96b917789..e71128643 100644
--- a/build/symlink.gni
+++ b/build/symlink.gni
@@ -22,6 +22,9 @@ template("symlink") {
rebase_path(invoker.source, get_path_info(invoker.output, "dir")),
rebase_path(invoker.output, root_build_dir),
]
+ if (defined(invoker.touch) && invoker.touch) {
+ args += [ "--touch=" + rebase_path(invoker.source, root_build_dir) ]
+ }
}
}
diff --git a/build/symlink.py b/build/symlink.py
index 38a610204..ad938072d 100755
--- a/build/symlink.py
+++ b/build/symlink.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -83,6 +83,7 @@ def Main(argv):
if options.touch:
+ os.makedirs(os.path.dirname(options.touch), exist_ok=True)
with open(options.touch, 'w'):
pass
diff --git a/build/toolchain/apple/filter_libtool.py b/build/toolchain/apple/filter_libtool.py
index 6b065270e..269093bbb 100644
--- a/build/toolchain/apple/filter_libtool.py
+++ b/build/toolchain/apple/filter_libtool.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import os
import re
diff --git a/build/toolchain/apple/get_tool_mtime.py b/build/toolchain/apple/get_tool_mtime.py
index 15e1aebe6..4ce19e1cc 100644
--- a/build/toolchain/apple/get_tool_mtime.py
+++ b/build/toolchain/apple/get_tool_mtime.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import os
import sys
diff --git a/build/toolchain/apple/linker_driver.py b/build/toolchain/apple/linker_driver.py
index 9827770b8..415a9fd21 100755
--- a/build/toolchain/apple/linker_driver.py
+++ b/build/toolchain/apple/linker_driver.py
@@ -11,6 +11,11 @@ import subprocess
import sys
import tempfile
+# The path to `whole_archive`.
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
+
+import whole_archive
+
# Prefix for all custom linker driver arguments.
LINKER_DRIVER_ARG_PREFIX = '-Wcrl,'
# Linker action to create a directory and pass it to the linker as
@@ -63,19 +68,8 @@ OBJECT_PATH_LTO = 'object_path_lto'
# -Wcrl,strippath,<strip_path>
# Sets the path to the strip to run with -Wcrl,strip, in which case
# `xcrun` is not used to invoke it.
-# TODO(crbug.com/1337780): Remove 'persist' when iOS no longer builds universal
-# binaries.
-# -Wcrl,object_path_lto,<persist>(optional)
-# Creates temporary directory for LTO object files. If `persist` is passed
-# the directory is named after the linker output and it's expected that a
-# later invocation will pass `-Wcrl,clean_objects,<output_name>` to clean it
-# up. Otherwise, the temp directory is deleted at the end of this run.
-# TODO(crbug.com/1337780): Remove this action when iOS no longer builds
-# universal binaries.
-# -Wcrl,clean_objects,<arguments,...>
-# Cleans up LTO object file directories for the targets in arguments.
-# For each argument, looks for and deletes a directory at
-# "{argument}.lto_objects"
+# -Wcrl,object_path_lto
+# Creates temporary directory for LTO object files.
class LinkerDriver(object):
@@ -101,7 +95,6 @@ class LinkerDriver(object):
('unstripped,', self.run_save_unstripped),
('strippath,', self.set_strip_path),
('strip,', self.run_strip),
- ('clean_objects', self.run_clean_objects),
]
# Linker driver actions can modify the these values.
@@ -113,10 +106,6 @@ class LinkerDriver(object):
self._linker_output = None
# The temporary directory for intermediate LTO object files. If it
# exists, it will clean itself up on script exit.
- self._object_path_lto_temp = None
- # The path for intermediate LTO object files. This is either the name
- # of `self._object_path_lto_temp` or a named directory that will be
- # cleaned up in a future invocation.
self._object_path_lto = None
def run(self):
@@ -140,11 +129,17 @@ class LinkerDriver(object):
if self._object_path_lto is not None:
compiler_driver_args.append('-Wl,-object_path_lto,{}'.format(
- self._object_path_lto))
+ self._object_path_lto.name))
if self._get_linker_output() is None:
raise ValueError(
'Could not find path to linker output (-o or --output)')
+ # We want to link rlibs as --whole-archive if they are part of a unit
+ # test target. This is determined by switch
+ # `-LinkWrapper,add-whole-archive`.
+ compiler_driver_args = whole_archive.wrap_with_whole_archive(
+ compiler_driver_args)
+
linker_driver_outputs = [self._get_linker_output()]
try:
@@ -199,17 +194,8 @@ class LinkerDriver(object):
# TODO(lgrey): Remove if/when we start running `dsymutil`
# through the clang driver. See https://crbug.com/1324104
if sub_arg == OBJECT_PATH_LTO:
- self._object_path_lto_temp = tempfile.TemporaryDirectory(
+ self._object_path_lto = tempfile.TemporaryDirectory(
dir=os.getcwd())
- self._object_path_lto = self._object_path_lto_temp.name
- return (OBJECT_PATH_LTO, lambda: [])
- elif sub_arg.startswith(OBJECT_PATH_LTO):
- assert sub_arg[len(OBJECT_PATH_LTO):] == ',persist'
- output = self._get_linker_output()
- assert output
- self._object_path_lto = output + '.lto_objects'
- _remove_path(self._object_path_lto)
- os.mkdir(self._object_path_lto)
return (OBJECT_PATH_LTO, lambda: [])
for driver_action in self._actions:
@@ -354,25 +340,6 @@ class LinkerDriver(object):
self._strip_cmd = [strip_path]
return []
- def run_clean_objects(self, args_string):
- """Linker driver action for -Wcrl,clean_objects,<arguments>.
-
- For each argument, looks for a directory called "${argument}.lto_objects
- and deletes it.
-
- Args:
- arguments: string, Comma-separated prefixes of LTO object
- directories to clean up
-
- Returns:
- No output
- """
- for output in args_string.lstrip(',').split(','):
- name = output + '.lto_objects'
- assert os.path.isdir(name)
- shutil.rmtree(name)
- return []
-
def _find_tools_paths(full_args):
"""Finds all paths where the script should look for additional tools."""
diff --git a/build/toolchain/apple/toolchain.gni b/build/toolchain/apple/toolchain.gni
index 7f65f4921..d7f2bd5a2 100644
--- a/build/toolchain/apple/toolchain.gni
+++ b/build/toolchain/apple/toolchain.gni
@@ -85,8 +85,11 @@ tool_versions =
"trim scope")
# Shared toolchain definition. Invocations should set current_os to set the
-# build args in this definition.
-template("apple_toolchain") {
+# build args in this definition. This is titled "single_apple_toolchain"
+# because it makes exactly one toolchain. Callers will normally want to
+# invoke instead "apple_toolchain" which may make an additional toolchain
+# without sanitizers.
+template("single_apple_toolchain") {
toolchain(target_name) {
# When invoking this toolchain not as the default one, these args will be
# passed to the build. They are ignored when this is the default toolchain.
@@ -102,8 +105,9 @@ template("apple_toolchain") {
# set on those toolchains.
host_toolchain = host_toolchain
- # Respect the global setting for whether rustc can make binaries.
- rustc_can_link = rustc_can_link
+ # Similarly for the host toolchain which can be used to make .dylibs
+ # that will successfully load into prebuilt tools.
+ host_toolchain_no_sanitizers = host_toolchain_no_sanitizers
}
# When the invoker has explicitly overridden use_goma or cc_wrapper in the
@@ -219,18 +223,8 @@ template("apple_toolchain") {
_strippath = invoker.bin_path + "strip"
_installnametoolpath = invoker.bin_path + "install_name_tool"
linker_driver += " -Wcrl,strippath,${_strippath} -Wcrl,installnametoolpath,${_installnametoolpath}"
-
- # On iOS, the final applications are assembled using lipo (to support fat
- # builds). The correct flags are passed to the linker_driver.py script
- # directly during the lipo call. The test is against the target_os because
- # there is no need to create .dSYMs for targets compiled for the host.
- if (defined(invoker.strip_with_lipo) && invoker.strip_with_lipo) {
- _enable_dsyms = false
- _save_unstripped_output = false
- } else {
- _enable_dsyms = enable_dsyms
- _save_unstripped_output = save_unstripped_output
- }
+ _enable_dsyms = enable_dsyms
+ _save_unstripped_output = save_unstripped_output
# Make these apply to all tools below.
lib_switch = "-l"
@@ -272,67 +266,132 @@ template("apple_toolchain") {
rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir)
rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py")
- # Ideally, we'd add -Clink-args=\"{{ldflags}}\" to each of the Rust
- # tools below which may link (i.e. rust_bin, rust_cdylib, rust_macro).
- # However, it seems -fuse-ld=lld causes difficulties.
-
tool("rust_staticlib") {
- rust_outfile = "{{output_dir}}/{{target_output_name}}.a"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
+ libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ rspfile = "$libname.rsp"
+ depfile = "$libname.d"
+
+ default_output_extension = ".a"
+ output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(STATICLIB) {{output}}"
+ outputs = [ libname ]
+
+ # TODO(danakj): When `!toolchain_uses_lld` do we need to specify a path
+ # to libtool like the "alink" rule?
+
rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"$_cxx\" $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"$_cxx\" $rustc_common_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}"
rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
}
tool("rust_rlib") {
- rust_outfile = "{{output_dir}}/lib{{target_output_name}}.rlib"
- depfile = "{{output}}.d"
+ # We must always prefix with `lib` even if the library already starts
+ # with that prefix or else our stdlib is unable to find libc.rlib (or
+ # actually liblibc.rlib).
+ rlibname =
+ "{{output_dir}}/lib{{target_output_name}}{{output_extension}}"
+ depfile = "$rlibname.d"
# Do not use rsp files in this (common) case because they occupy the
# ninja main thread, and {{rlibs}} have shorter command lines than
# fully linked targets.
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"$_cxx\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
+
+ default_output_extension = ".rlib"
+
+ # This is prefixed unconditionally in `rlibname`.
+ # output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST {{output}}"
+ outputs = [ rlibname ]
+
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"$_cxx\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rlibname LDFLAGS RUSTENV {{rustenv}}"
rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
}
- if (toolchain_args.rustc_can_link) {
- tool("rust_bin") {
- rust_outfile = "{{root_out_dir}}/{{target_output_name}}"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS {{ldflags}} RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ tool("rust_bin") {
+ exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ rspfile = "$exename.rsp"
+ depfile = "$exename.d"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
- tool("rust_cdylib") {
- rust_outfile = "{{output_dir}}/lib{{target_output_name}}.dylib"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS {{ldflags}} RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ # TODO(danakj): solink can generate TOC files for re-exporting library
+ # symbols, and we should do the same here.
- tool("rust_macro") {
- rust_outfile = "{{output_dir}}/lib{{target_output_name}}.dylib"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS {{ldflags}} RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(BIN) {{output}}"
+ outputs = [ exename ]
+
+ # TODO(danakj): Support dsym_switch like C++ targets.
+ # link_command += dsym_switch
+ # if (_enable_dsyms) {
+ # outputs += dsym_output
+ # }
+ # if (_save_unstripped_output) {
+ # outputs += [ _unstripped_output ]
+ # }
+
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
+ }
+
+ tool("rust_cdylib") {
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ rspfile = "$dllname.rsp"
+ depfile = "$dllname.d"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ # TODO(danakj): solink can generate TOC files for re-exporting library
+ # symbols, and we should do the same here.
+
+ default_output_extension = ".dylib"
+ output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(CDYLIB) {{output}}"
+ outputs = [ dllname ]
+
+ # TODO(danakj): Support dsym_switch like C++ targets.
+ # link_command += dsym_switch
+ # if (_enable_dsyms) {
+ # outputs += dsym_output
+ # }
+ # if (_save_unstripped_output) {
+ # outputs += [ _unstripped_output ]
+ # }
+
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
+ }
+
+ tool("rust_macro") {
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ rspfile = "$dllname.rsp"
+ depfile = "$dllname.d"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ # TODO(danakj): solink can generate TOC files for re-exporting library
+ # symbols, and we should do the same here.
+
+ default_output_extension = ".dylib"
+ output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(MACRO) {{output}}"
+ outputs = [ dllname ]
+
+ # TODO(danakj): Support dsym_switch like C++ targets.
+ # link_command += dsym_switch
+ # if (_enable_dsyms) {
+ # outputs += dsym_output
+ # }
+ # if (_save_unstripped_output) {
+ # outputs += [ _unstripped_output ]
+ # }
+
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
}
}
@@ -696,7 +755,6 @@ template("apple_toolchain") {
"-module-path {{target_gen_dir}}/{{module_name}}.swiftmodule " +
"-header-path {{target_gen_dir}}/{{target_output_name}}.h " +
"-depfile {{target_out_dir}}/{{module_name}}.d " +
- "-depfile-filter {{target_gen_dir}}/{{module_name}}.swiftmodule " +
"-bridge-header {{bridge_header}} $_extra_flags " +
"{{swiftflags}} {{include_dirs}} {{module_dirs}} {{inputs}}"
}
@@ -733,3 +791,41 @@ template("apple_toolchain") {
}
}
}
+
+# Makes a single Apple toolchain, or possibly two if we need a
+# sanitizer-free equivalent.
+template("apple_toolchain") {
+ single_apple_toolchain(target_name) {
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must declare toolchain_args")
+ forward_variables_from(invoker,
+ "*",
+ [
+ "visibility",
+ "test_only",
+ ])
+
+ # No need to forward visibility and test_only as they apply to targets not
+ # toolchains, but presubmit checks require that we explicitly exclude them
+ }
+
+ if (using_sanitizer) {
+ # Make an additional toolchain with no sanitizers.
+ single_apple_toolchain("${target_name}_no_sanitizers") {
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must declare toolchain_args")
+ forward_variables_from(invoker,
+ "*",
+ [
+ "toolchain_args",
+ "visibility",
+ "test_only",
+ ])
+ toolchain_args = {
+ # Populate toolchain args from the invoker.
+ forward_variables_from(invoker.toolchain_args, "*")
+ toolchain_disables_sanitizers = true
+ }
+ }
+ }
+}
diff --git a/build/toolchain/clang_code_coverage_wrapper.py b/build/toolchain/clang_code_coverage_wrapper.py
index de92ca1ae..5c9090114 100755
--- a/build/toolchain/clang_code_coverage_wrapper.py
+++ b/build/toolchain/clang_code_coverage_wrapper.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -46,7 +46,6 @@ Example usage:
--files-to-instrument=coverage_instrumentation_input.txt
"""
-from __future__ import print_function
import argparse
import os
diff --git a/build/toolchain/gcc_link_wrapper.py b/build/toolchain/gcc_link_wrapper.py
index 60176daa0..5c08a7e4a 100755
--- a/build/toolchain/gcc_link_wrapper.py
+++ b/build/toolchain/gcc_link_wrapper.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/toolchain/gcc_solink_wrapper.py b/build/toolchain/gcc_solink_wrapper.py
index 193f4fe19..03ef04261 100755
--- a/build/toolchain/gcc_solink_wrapper.py
+++ b/build/toolchain/gcc_solink_wrapper.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/toolchain/gcc_toolchain.gni b/build/toolchain/gcc_toolchain.gni
index 7abfd87f1..3ab89c97d 100644
--- a/build/toolchain/gcc_toolchain.gni
+++ b/build/toolchain/gcc_toolchain.gni
@@ -107,7 +107,11 @@ if (enable_resource_allowlist_generation) {
# Location of the strip executable. When specified, strip will be run on
# all shared libraries and executables as they are built. The pre-stripped
# artifacts will be put in lib.unstripped/ and exe.unstripped/.
-template("gcc_toolchain") {
+#
+# Callers will normally want to invoke "gcc_toolchain" instead, which makes
+# a toolchain just like this but may additionally create an extra toolchain
+# without sanitizers for host-side tools.
+template("single_gcc_toolchain") {
toolchain(target_name) {
assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
@@ -145,14 +149,13 @@ template("gcc_toolchain") {
# set on those toolchains.
host_toolchain = host_toolchain
+ # The same applies to the toolchain we use to build Rust procedural
+ # macros, which is probably the same but might have sanitizers disabled.
+ host_toolchain_no_sanitizers = host_toolchain_no_sanitizers
+
if (!defined(invoker_toolchain_args.v8_current_cpu)) {
v8_current_cpu = invoker_toolchain_args.current_cpu
}
-
- # Whether rustc can successfully link binaries. (If not, rust code
- # can still be linked by our C++ toolchain). See
- # //build/config/rust.gni
- rustc_can_link = current_os != "android"
}
# When the invoker has explicitly overridden use_remoteexec, use_goma or
@@ -237,7 +240,7 @@ template("gcc_toolchain") {
# number of jobs, but this results in too low a value on
# Chrome OS builders. So we pass in an explicit value.
link_prefix =
- "$python_path " +
+ "\"$python_path\" " +
rebase_path("//tools/clang/scripts/remote_ld.py", root_build_dir) +
" --wrapper ${goma_path} --jobs 200 -- "
} else {
@@ -288,7 +291,8 @@ template("gcc_toolchain") {
rebase_path(toolchain_coverage_instrumentation_input_file,
root_build_dir)
}
- compiler_prefix = "$python_path ${_coverage_wrapper} " + compiler_prefix
+ compiler_prefix =
+ "\"$python_path\" ${_coverage_wrapper} " + compiler_prefix
}
cc = compiler_prefix + invoker.cc
@@ -420,7 +424,7 @@ template("gcc_toolchain") {
if (host_os == "win") {
tool_wrapper_path =
rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
- command = "cmd /c $python_path $tool_wrapper_path delete-file {{output}} && $command"
+ command = "cmd /s /c \"\"$python_path\" $tool_wrapper_path delete-file {{output}} && $command\""
} else {
command = "rm -f {{output}} && $command"
}
@@ -461,7 +465,7 @@ template("gcc_toolchain") {
# -soname flag is not available on aix ld
soname_flag = "-Wl,-soname=\"$soname\""
}
- link_command = "$ld -shared $soname_flag {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\""
+ link_command = "$ld -shared $soname_flag {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\" {{rlibs}}"
# Generate a map file to be used for binary size analysis.
# Map file adds ~10% to the link time on a z620.
@@ -491,16 +495,16 @@ template("gcc_toolchain") {
# (skipped on Aix)
solink_extra_flags = "--partitioned-library"
}
- command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch$dwp_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command $solink_extra_flags"
+ command = "\"$python_path\" \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch$dwp_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command $solink_extra_flags"
if (target_cpu == "mipsel" && is_component_build && is_android) {
- rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--end-group -Wl,--whole-archive {{rlibs}} -Wl,--no-whole-archive"
+ rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--end-group"
} else if (current_os == "aix") {
# --whole-archive, --no-whole-archive flags are not available on the aix
# ld.
- rspfile_content = "{{inputs}} {{solibs}} {{libs}} {{rlibs}}"
+ rspfile_content = "{{inputs}} {{solibs}} {{libs}}"
} else {
- rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--whole-archive {{rlibs}} -Wl,--no-whole-archive"
+ rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}"
}
description = "SOLINK $sofile"
@@ -567,7 +571,8 @@ template("gcc_toolchain") {
whole_archive_flag = ""
no_whole_archive_flag = ""
if (current_os != "aix") {
- # -soname, --whole-archive, --no-whole-archive flags are not available on aix ld
+ # -soname, --whole-archive, --no-whole-archive flags are not available
+ # on aix ld
soname_flag = "-Wl,-soname=\"$soname\""
whole_archive_flag = "-Wl,--whole-archive"
no_whole_archive_flag = "-Wl,--no-whole-archive"
@@ -578,7 +583,7 @@ template("gcc_toolchain") {
strip_command = "${invoker.strip} -o \"$sofile\" \"$unstripped_sofile\""
command += " && " + strip_command
}
- rspfile_content = "$whole_archive_flag {{inputs}} {{solibs}} $no_whole_archive_flag {{libs}} $whole_archive_flag {{rlibs}} $no_whole_archive_flag"
+ rspfile_content = "$whole_archive_flag {{inputs}} {{solibs}} $no_whole_archive_flag {{libs}} {{rlibs}}"
description = "SOLINK_MODULE $sofile"
@@ -627,11 +632,12 @@ template("gcc_toolchain") {
start_group_flag = ""
end_group_flag = ""
if (current_os != "aix") {
- # the "--start-group .. --end-group" feature isn't available on the aix ld.
+ # the "--start-group .. --end-group" feature isn't available on the aix
+ # ld.
start_group_flag = "-Wl,--start-group"
end_group_flag = "-Wl,--end-group "
}
- link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" $start_group_flag @\"$rspfile\" {{solibs}} $end_group_flag {{libs}}"
+ link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" $start_group_flag @\"$rspfile\" {{solibs}} $end_group_flag {{libs}} {{rlibs}}"
# Generate a map file to be used for binary size analysis.
# Map file adds ~10% to the link time on a z620.
@@ -649,30 +655,11 @@ template("gcc_toolchain") {
link_wrapper =
rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
- command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch$dwp_switch -- $link_command"
+ command = "\"$python_path\" \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch$dwp_switch -- $link_command"
description = "LINK $outfile"
- whole_archive_flag = ""
- no_whole_archive_flag = ""
- if (current_os != "aix") {
- # --whole-archive, --no-whole-archive flags are not available on aix ld
- whole_archive_flag = "-Wl,--whole-archive"
- no_whole_archive_flag = "-Wl,--no-whole-archive"
- }
-
- # TODO(crbug.com/1296284): The {{rlibs}} contains the object files from
- # the build target's source files. They should be treated as peers of the
- # C++ object files in {{inputs}}, however an rlib is linked like a static
- # library, which means object files can be dropped if the linker see them
- # as being unused. This then drops test files, for example.
- #
- # However the rust stdlib is also present in the {{rlibs}} and should not
- # be equally treated as the target's .o files. The linked bug should
- # address this by moving the std library to be linked as a library and not
- # as a set of rlibs.
- rspfile_content =
- "{{inputs}} $whole_archive_flag {{rlibs}} $no_whole_archive_flag "
+ rspfile_content = "{{inputs}}"
outputs = [ outfile ]
if (outfile != unstripped_outfile) {
outputs += [ unstripped_outfile ]
@@ -727,62 +714,105 @@ template("gcc_toolchain") {
# RSP manipulation due to https://bugs.chromium.org/p/gn/issues/detail?id=249
tool("rust_staticlib") {
- rust_outfile = "{{output_dir}}/{{target_output_name}}.a"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
+ libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ rspfile = "$libname.rsp"
+ depfile = "$libname.d"
+
+ default_output_extension = ".a"
+ output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(STATICLIB) {{output}}"
+ outputs = [ libname ]
+
rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}"
rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
}
tool("rust_rlib") {
- rust_outfile = "{{output_dir}}/lib{{target_output_name}}.rlib"
- depfile = "{{output}}.d"
+ # We must always prefix with `lib` even if the library already starts
+ # with that prefix or else our stdlib is unable to find libc.rlib (or
+ # actually liblibc.rlib).
+ rlibname =
+ "{{output_dir}}/lib{{target_output_name}}{{output_extension}}"
+ depfile = "$rlibname.d"
# Do not use rsp files in this (common) case because they occupy the
# ninja main thread, and {{rlibs}} have shorter command lines than
# fully linked targets.
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
+
+ default_output_extension = ".rlib"
+
+ # This is prefixed unconditionally in `rlibname`.
+ # output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST {{output}}"
+ outputs = [ rlibname ]
+
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rlibname LDFLAGS RUSTENV {{rustenv}}"
rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
-
- if (toolchain_args.rustc_can_link) {
- tool("rust_bin") {
- rust_outfile = "{{root_out_dir}}/{{target_output_name}}"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ }
- tool("rust_cdylib") {
- rust_outfile = "{{output_dir}}/lib{{target_output_name}}.so"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ tool("rust_bin") {
+ exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ depfile = "$exename.d"
+ rspfile = "$exename.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
- tool("rust_macro") {
- rust_outfile = "{{output_dir}}/lib{{target_output_name}}.so"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ default_output_extension = default_executable_extension
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(BIN) {{output}}"
+ outputs = [ exename ]
+
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
+ }
+
+ # Rustc attempts to link `libgcc` unconditionally on Android, but it's a
+ # mistake to do so. We get our unwinding from `libunwind`, which we link
+ # in explicitly ourselves, and we have no `libgcc` present. To avoid a
+ # missing library, we point rustc to an empty `libgcc.a` file. This allows
+ # rustc to link .so files targeting android: http://crbug.com/1278030
+ dummy_libgcc = ""
+ if (is_android) {
+ dummy_libgcc_path =
+ rebase_path("//build/android/dummy_libgcc", root_build_dir)
+ dummy_libgcc = "-L$dummy_libgcc_path"
+ }
+
+ tool("rust_cdylib") {
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ depfile = "$dllname.d"
+ rspfile = "$dllname.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ default_output_extension = default_shlib_extension
+ output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}${default_shlib_subdir}"
+ description = "RUST(CDYLIB) {{output}}"
+ outputs = [ dllname ]
+
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} ${extra_ldflags} ${dummy_libgcc} RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
+ }
+
+ tool("rust_macro") {
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ depfile = "$dllname.d"
+ rspfile = "$dllname.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ default_output_extension = default_shlib_extension
+ output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}${default_shlib_subdir}"
+ description = "RUST(MACRO) {{output}}"
+ outputs = [ dllname ]
+
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
}
}
@@ -794,6 +824,44 @@ template("gcc_toolchain") {
}
}
+# Makes a single GCC toolchain, or possibly two if we need
+# an equivalent toolchain without sanitizers.
+template("gcc_toolchain") {
+ single_gcc_toolchain(target_name) {
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must declare toolchain_args")
+ forward_variables_from(invoker,
+ "*",
+ [
+ "visibility",
+ "test_only",
+ ])
+
+ # No need to forward visibility and test_only as they apply to targets not
+ # toolchains, but presubmit checks require that we explicitly exclude them
+ }
+
+ if (using_sanitizer) {
+ # Make an additional toolchain with no sanitizers.
+ single_gcc_toolchain("${target_name}_no_sanitizers") {
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must declare toolchain_args")
+ forward_variables_from(invoker,
+ "*",
+ [
+ "toolchain_args",
+ "visibility",
+ "test_only",
+ ])
+ toolchain_args = {
+ # Populate toolchain args from the invoker.
+ forward_variables_from(invoker.toolchain_args, "*")
+ toolchain_disables_sanitizers = true
+ }
+ }
+ }
+}
+
# This is a shorthand for gcc_toolchain instances based on the Chromium-built
# version of Clang. Only the toolchain_cpu and toolchain_os variables need to
# be specified by the invoker, and optionally toolprefix if it's a
diff --git a/build/toolchain/get_cpu_count.py b/build/toolchain/get_cpu_count.py
index c68cd25c8..f7cf9511d 100644
--- a/build/toolchain/get_cpu_count.py
+++ b/build/toolchain/get_cpu_count.py
@@ -4,7 +4,6 @@
# This script shows cpu count to specify capacity of action pool.
-from __future__ import print_function
import multiprocessing
import sys
diff --git a/build/toolchain/ios/BUILD.gn b/build/toolchain/ios/BUILD.gn
index cfcbdc660..5623a84f5 100644
--- a/build/toolchain/ios/BUILD.gn
+++ b/build/toolchain/ios/BUILD.gn
@@ -18,7 +18,6 @@ template("ios_toolchain") {
deployment_target = ios_deployment_target
target_environment = target_environment
bin_path = ios_bin_path
- strip_with_lipo = true
toolchain_args = {
forward_variables_from(invoker.toolchain_args, "*")
@@ -41,14 +40,6 @@ ios_toolchain("ios_clang_arm64_13_4") {
}
}
-ios_toolchain("ios_clang_arm64_fat_arm") {
- toolchain_args = {
- current_cpu = "arm"
- is_fat_secondary_toolchain = true
- primary_fat_toolchain_name = "//build/toolchain/ios:ios_clang_arm64"
- }
-}
-
ios_toolchain("ios_clang_arm") {
toolchain_args = {
current_cpu = "arm"
@@ -61,14 +52,6 @@ ios_toolchain("ios_clang_x64") {
}
}
-ios_toolchain("ios_clang_x64_fat_arm64") {
- toolchain_args = {
- current_cpu = "arm64"
- is_fat_secondary_toolchain = true
- primary_fat_toolchain_name = "//build/toolchain/ios:ios_clang_x64"
- }
-}
-
ios_toolchain("ios_clang_x64_13_4") {
toolchain_args = {
current_cpu = "x64"
@@ -76,23 +59,6 @@ ios_toolchain("ios_clang_x64_13_4") {
}
}
-ios_toolchain("ios_clang_x64_13_4_fat_arm64") {
- toolchain_args = {
- current_cpu = "arm64"
- is_fat_secondary_toolchain = true
- primary_fat_toolchain_name = "//build/toolchain/ios:ios_clang_x64_13_4"
- ios_deployment_target = "13.4"
- }
-}
-
-ios_toolchain("ios_clang_x64_fat_x86") {
- toolchain_args = {
- current_cpu = "x86"
- is_fat_secondary_toolchain = true
- primary_fat_toolchain_name = "//build/toolchain/ios:ios_clang_x64"
- }
-}
-
ios_toolchain("ios_clang_x86") {
toolchain_args = {
current_cpu = "x86"
diff --git a/build/toolchain/ios/swiftc.py b/build/toolchain/ios/swiftc.py
index 8c7779f8e..e77e0b1ff 100644
--- a/build/toolchain/ios/swiftc.py
+++ b/build/toolchain/ios/swiftc.py
@@ -242,12 +242,8 @@ def compile_module(module, sources, settings, extras, tmpdir):
path = os.path.relpath(path, out_dir)
depfile_content[key].add(path)
- if not settings.depfile_filter:
- keys = depfile_content.keys()
- else:
- keys = (key for key in settings.depfile_filter if key in depfile_content)
-
with open(settings.depfile, 'w') as depfile:
+ keys = sorted(depfile_content.keys())
for key in sorted(keys):
depfile.write('%s : %s\n' % (key, ' '.join(sorted(depfile_content[key]))))
@@ -279,9 +275,6 @@ def main(args):
parser.add_argument('-depfile', help='path to the generated depfile')
parser.add_argument('-swift-version',
help='version of Swift language to support')
- parser.add_argument('-depfile-filter',
- action='append',
- help='limit depfile to those files')
parser.add_argument('-target',
action='store',
help='generate code for the given target <triple>')
diff --git a/build/toolchain/linux/BUILD.gn b/build/toolchain/linux/BUILD.gn
index f559f5272..3d6bb5659 100644
--- a/build/toolchain/linux/BUILD.gn
+++ b/build/toolchain/linux/BUILD.gn
@@ -181,6 +181,7 @@ clang_v8_toolchain("clang_x64_v8_loong64") {
# In a LaCrOS build, this toolchain is intended to be used as an alternate
# toolchain to build Ash-Chrome in a subdirectory.
+# This is a test-only toolchain.
clang_toolchain("ash_clang_x64") {
toolchain_args = {
# This turns the toolchain into the "Linux ChromeOS" build
@@ -192,11 +193,17 @@ clang_toolchain("ash_clang_x64") {
also_build_ash_chrome = false
chromeos_is_browser_only = false
use_clang_coverage = false
+
+ # Never build ash with asan. It is too slow for builders and cause
+ # tests being flaky.
+ is_asan = false
+ is_lsan = false
}
}
# In an ash build, this toolchain is intended to be used as an alternate
# toolchain to build lacros-Chrome in a subdirectory.
+# This is a test-only toolchain.
clang_toolchain("lacros_clang_x64") {
toolchain_args = {
# This turns the toolchain into the "Lacros" build
@@ -209,7 +216,6 @@ clang_toolchain("lacros_clang_x64") {
chromeos_is_browser_only = true
use_clang_coverage = false
dcheck_always_on = false
- symbol_level = 1
}
}
diff --git a/build/toolchain/nacl_toolchain.gni b/build/toolchain/nacl_toolchain.gni
index 1a5bf7f4a..bebdbd80c 100644
--- a/build/toolchain/nacl_toolchain.gni
+++ b/build/toolchain/nacl_toolchain.gni
@@ -64,6 +64,10 @@ template("nacl_toolchain") {
rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_linux.cfg"
}
}
+
+ if (use_remoteexec_links) {
+ rbe_link_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_linux_link.cfg"
+ }
}
}
}
diff --git a/build/toolchain/rbe.gni b/build/toolchain/rbe.gni
index 2519f22b1..cd86e22c0 100644
--- a/build/toolchain/rbe.gni
+++ b/build/toolchain/rbe.gni
@@ -3,13 +3,13 @@
# The directory where the re-client tooling binaries are.
rbe_bin_dir = rebase_path("//buildtools/reclient", root_build_dir)
-# Execution root - this should be the root of the source tree.
-# This is defined here instead of in the config file because
-# this will vary depending on where the user has placed the
-# chromium source on their system.
-rbe_exec_root = rebase_path("//")
-
declare_args() {
+ # Execution root - this should be the root of the source tree.
+ # This is defined here instead of in the config file because
+ # this will vary depending on where the user has placed the
+ # chromium source on their system.
+ rbe_exec_root = rebase_path("//")
+
# Set to true to enable remote compilation using reclient.
use_remoteexec = false
diff --git a/build/toolchain/toolchain.gni b/build/toolchain/toolchain.gni
index 844bc8e54..d32d7d0e9 100644
--- a/build/toolchain/toolchain.gni
+++ b/build/toolchain/toolchain.gni
@@ -19,6 +19,11 @@ declare_args() {
# Used for binary size analysis.
generate_linker_map = is_android && is_official_build
+
+ # Whether this toolchain should avoid building any sanitizer support
+ # because it's a host toolchain where we aim to make shared objects that may
+ # be loaded by prebuilt binaries without sanitizer support.
+ toolchain_disables_sanitizers = false
}
if (generate_linker_map) {
@@ -33,7 +38,7 @@ if (generate_linker_map) {
}
declare_args() {
- clang_version = "16"
+ clang_version = "17"
}
# Extension for shared library files (including leading dot).
@@ -79,12 +84,16 @@ if (host_os == "win") {
stamp_command = "cmd /c type nul > \"{{output}}\""
copy_command =
- "$python_path $_tool_wrapper_path recursive-mirror {{source}} {{output}}"
+ "\"$python_path\" $_tool_wrapper_path recursive-mirror {{source}} {{output}}"
} else {
stamp_command = "touch {{output}}"
copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
}
+if (!defined(toolchain_disables_sanitizers)) {
+ toolchain_disables_sanitizers = false
+}
+
# This variable is true if the current toolchain is one of the target
# toolchains, i.e. a toolchain which is being used to build the main Chrome
# binary. This generally means "not the host toolchain", but in the case where
diff --git a/build/toolchain/whole_archive.py b/build/toolchain/whole_archive.py
new file mode 100644
index 000000000..6b5fe6065
--- /dev/null
+++ b/build/toolchain/whole_archive.py
@@ -0,0 +1,51 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+
+def wrap_with_whole_archive(command):
+ """Modify and return `command` such that -LinkWrapper,add-whole-archive=X
+ becomes a linking inclusion X (-lX) but wrapped in whole-archive
+ modifiers."""
+
+ # We want to link rlibs as --whole-archive if they are part of a unit test
+ # target. This is determined by switch `-LinkWrapper,add-whole-archive`.
+ #
+ # TODO(danakj): If the linking command line gets too large we could move
+ # {{rlibs}} into the rsp file, but then this script needs to modify the rsp
+ # file instead of the command line.
+ def extract_libname(s):
+ m = re.match(r'-LinkWrapper,add-whole-archive=(.+)', s)
+ return m.group(1) + ".rlib"
+
+ # The set of libraries we want to apply `--whole-archive`` to.
+ whole_archive_libs = [
+ extract_libname(x) for x in command
+ if x.startswith("-LinkWrapper,add-whole-archive=")
+ ]
+
+ # Remove the arguments meant for consumption by this LinkWrapper script.
+ command = [x for x in command if not x.startswith("-LinkWrapper,")]
+
+ def has_any_suffix(string, suffixes):
+ for suffix in suffixes:
+ if string.endswith(suffix):
+ return True
+ return False
+
+ def wrap_libs_with(command, libnames, before, after):
+ out = []
+ for arg in command:
+ # The arg is a full path to a library, we look if the the library name (a
+ # suffix of the full arg) is one of `libnames`.
+ if has_any_suffix(arg, libnames):
+ out.extend([before, arg, after])
+ else:
+ out.append(arg)
+ return out
+
+ # Apply --whole-archive to the libraries that desire it.
+ return wrap_libs_with(command, whole_archive_libs, "-Wl,--whole-archive",
+ "-Wl,--no-whole-archive")
diff --git a/build/toolchain/win/BUILD.gn b/build/toolchain/win/BUILD.gn
index 612938527..9ef257a25 100644
--- a/build/toolchain/win/BUILD.gn
+++ b/build/toolchain/win/BUILD.gn
@@ -4,6 +4,7 @@
import("//build/config/win/visual_studio_version.gni")
import("//build/toolchain/win/toolchain.gni")
+import("//build/toolchain/win/win_toolchain_data.gni")
assert(is_win, "Should only be running on Windows")
@@ -63,19 +64,15 @@ win_toolchains("nacl_win64") {
# WinUWP toolchains. Only define these when targeting them.
if (target_os == "winuwp") {
- assert(target_cpu == "x64" || target_cpu == "x86" || target_cpu == "arm" ||
- target_cpu == "arm64")
- store_cpu_toolchain_data =
- exec_script("//build/toolchain/win/setup_toolchain.py",
- [
- visual_studio_path,
- windows_sdk_path,
- visual_studio_runtime_dirs,
- target_os,
- target_cpu,
- "environment.store_" + target_cpu,
- ],
- "scope")
+ if (target_cpu == "x86") {
+ store_cpu_toolchain_data = win_toolchain_data_x86
+ } else if (target_cpu == "x64") {
+ store_cpu_toolchain_data = win_toolchain_data_x64
+ } else if (target_cpu == "arm64") {
+ store_cpu_toolchain_data = win_toolchain_data_arm64
+ } else {
+ error("Unsupported target_cpu, add it to win_toolchain_data.gni")
+ }
msvc_toolchain("uwp_" + target_cpu) {
environment = "environment.store_" + target_cpu
diff --git a/build/toolchain/win/midl.py b/build/toolchain/win/midl.py
index 1921402c3..9c0d0a4cc 100644
--- a/build/toolchain/win/midl.py
+++ b/build/toolchain/win/midl.py
@@ -3,7 +3,6 @@
# found in the LICENSE file.
from __future__ import division
-from __future__ import print_function
import array
import difflib
diff --git a/build/toolchain/win/ml.py b/build/toolchain/win/ml.py
index 733c10c4d..8cc2c9e1e 100755
--- a/build/toolchain/win/ml.py
+++ b/build/toolchain/win/ml.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/toolchain/win/rc/rc.py b/build/toolchain/win/rc/rc.py
index 2e5ec6b26..a650506a1 100755
--- a/build/toolchain/win/rc/rc.py
+++ b/build/toolchain/win/rc/rc.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2017 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -16,7 +16,6 @@ options:
/nologo Ignored (rc.py doesn't print a logo by default).
/showIncludes Print referenced header and resource files."""
-from __future__ import print_function
from collections import namedtuple
import codecs
import os
diff --git a/build/toolchain/win/setup_toolchain.py b/build/toolchain/win/setup_toolchain.py
index 112690a7f..d2f5798ce 100644
--- a/build/toolchain/win/setup_toolchain.py
+++ b/build/toolchain/win/setup_toolchain.py
@@ -10,7 +10,6 @@
# win tool. The script assumes that the root build directory is the current dir
# and the files will be written to the current directory.
-from __future__ import print_function
import errno
import json
@@ -23,6 +22,8 @@ sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
import gn_helpers
SCRIPT_DIR = os.path.dirname(__file__)
+SDK_VERSION = '10.0.22621.0'
+
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
@@ -184,7 +185,7 @@ def _LoadToolchainEnv(cpu, toolchain_root, sdk_dir, target_store):
# Explicitly specifying the SDK version to build with to avoid accidentally
# building with a new and untested SDK. This should stay in sync with the
# packaged toolchain in build/vs_toolchain.py.
- args.append('10.0.20348.0')
+ args.append(SDK_VERSION)
variables = _LoadEnvFromBat(args)
return _ExtractImportantEnvironment(variables)
diff --git a/build/toolchain/win/tool_wrapper.py b/build/toolchain/win/tool_wrapper.py
index c562390a0..47bbfe2a0 100644
--- a/build/toolchain/win/tool_wrapper.py
+++ b/build/toolchain/win/tool_wrapper.py
@@ -8,7 +8,6 @@ This file is copied to the build directory as part of toolchain setup and
is used to set up calls to tools used by the build that need wrappers.
"""
-from __future__ import print_function
import os
import re
diff --git a/build/toolchain/win/toolchain.gni b/build/toolchain/win/toolchain.gni
index 275cfcf9c..968a4a20e 100644
--- a/build/toolchain/win/toolchain.gni
+++ b/build/toolchain/win/toolchain.gni
@@ -11,6 +11,7 @@ import("//build/toolchain/cc_wrapper.gni")
import("//build/toolchain/goma.gni")
import("//build/toolchain/rbe.gni")
import("//build/toolchain/toolchain.gni")
+import("//build/toolchain/win/win_toolchain_data.gni")
assert(is_win, "Should only be running on Windows")
@@ -26,13 +27,17 @@ if (host_os == "win") {
_clang_bin_path = rebase_path("$clang_base_path/bin", root_build_dir)
+# Makes a single MSVC toolchain. Callers should normally instead invoke
+# "msvc_toolchain" which might make an additional toolchain available
+# without sanitizers if required.
+#
# Parameters:
# environment: File name of environment file.
#
# You would also define a toolchain_args variable with at least these set:
# current_cpu: current_cpu to pass as a build arg
# current_os: current_os to pass as a build arg
-template("msvc_toolchain") {
+template("single_msvc_toolchain") {
toolchain(target_name) {
# When invoking this toolchain not as the default one, these args will be
# passed to the build. They are ignored when this is the default toolchain.
@@ -43,8 +48,8 @@ template("msvc_toolchain") {
# This value needs to be passed through unchanged.
host_toolchain = host_toolchain
- # Respect the global setting for whether rustc can make binaries.
- rustc_can_link = rustc_can_link
+ # This value needs to be passed through unchanged.
+ host_toolchain_no_sanitizers = host_toolchain_no_sanitizers
}
if (defined(toolchain_args.is_clang)) {
@@ -149,7 +154,7 @@ template("msvc_toolchain") {
# (it needs %LIB% set to find libraries), and to work around its bugs.
# Note trailing space:
linker_wrapper =
- "$python_path $_tool_wrapper_path link-wrapper $env False "
+ "\"$python_path\" $_tool_wrapper_path link-wrapper $env False "
sys_lib_flags = ""
}
@@ -180,7 +185,7 @@ template("msvc_toolchain") {
rebase_path(toolchain_coverage_instrumentation_input_file,
root_build_dir)
}
- coverage_wrapper = "$python_path " + coverage_wrapper + " "
+ coverage_wrapper = "\"$python_path\" " + coverage_wrapper + " "
} else {
coverage_wrapper = ""
}
@@ -230,7 +235,7 @@ template("msvc_toolchain") {
}
tool("rc") {
- command = "$python_path $_tool_wrapper_path rc-wrapper $env rc.exe /nologo $sys_include_flags{{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+ command = "\"$python_path\" $_tool_wrapper_path rc-wrapper $env rc.exe /nologo $sys_include_flags{{defines}} {{include_dirs}} /fo{{output}} {{source}}"
depsformat = "msvc"
outputs = [ "$object_subdir/{{source_name_part}}.res" ]
description = "RC {{output}}"
@@ -284,17 +289,17 @@ template("msvc_toolchain") {
# It's lld only because the script zaps obj Timestamp which
# link.exe /incremental looks at.
ml_py = rebase_path("//build/toolchain/win/ml.py", root_build_dir)
- ml = "$python_path $ml_py $ml"
+ ml = "\"$python_path\" $ml_py $ml"
}
}
if (toolchain_args.current_cpu != "arm64" || toolchain_is_clang) {
# TODO(thakis): Stop using asm-wrapper when using clang.
- command = "$python_path $_tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} {{source}}"
+ command = "\"$python_path\" $_tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} {{source}}"
} else {
# armasm64.exe does not support definitions passed via the command
# line. (Fortunately, they're not needed for compiling the V8
# snapshot, which is the only time this assembler is required.)
- command = "$python_path $_tool_wrapper_path asm-wrapper $env $ml {{include_dirs}} {{asmflags}} {{source}}"
+ command = "\"$python_path\" $_tool_wrapper_path asm-wrapper $env $ml {{include_dirs}} {{asmflags}} {{source}}"
}
description = "ASM {{output}}"
@@ -303,72 +308,140 @@ template("msvc_toolchain") {
if (toolchain_has_rust) {
rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py")
-
- # TODO(https://crbug.com/1271215)
- # Other toolchains (Linux, Mac) pass {{ldflags}} to rustc.
- # This is necessary for some types of build such as
- # LTO and sanitizers. However, such ldflags are not yet fully
- # compatible with the arguments passed from rustc to the linker,
- # so we'll do this at a later stage.
rustc = rebase_path("${rust_sysroot}/bin/rustc", root_build_dir)
rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir)
+ rustc_windows_args = " -Clinker=$link $rustc_common_args"
+
tool("rust_staticlib") {
- rust_outfile = "{{output_dir}}/{{target_output_name}}.lib"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
+ libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ rspfile = "$libname.rsp"
+ depfile = "$libname.d"
+
+ default_output_extension = ".lib"
+ output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(STATICLIB) {{output}}"
+ outputs = [ libname ]
+
rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}"
rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
}
tool("rust_rlib") {
- rust_outfile = "{{output_dir}}/lib{{target_output_name}}.rlib"
- depfile = "{{output}}.d"
+ # We must always prefix with `lib` even if the library already starts
+ # with that prefix or else our stdlib is unable to find libc.rlib (or
+ # actually liblibc.rlib).
+ rlibname =
+ "{{output_dir}}/lib{{target_output_name}}{{output_extension}}"
+ depfile = "$rlibname.d"
# Do not use rsp files in this (common) case because they occupy the
# ninja main thread, and {{rlibs}} have shorter command lines than
# fully linked targets.
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile {{rustdeps}} {{externs}} LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
+
+ default_output_extension = ".rlib"
+
+ # This is prefixed unconditionally in `rlibname`.
+ # output_prefix = "lib"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST {{output}}"
+ outputs = [ rlibname ]
+
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $rlibname {{rustdeps}} {{externs}} LDFLAGS RUSTENV {{rustenv}}"
rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
}
- if (toolchain_args.rustc_can_link) {
- tool("rust_bin") {
- rust_outfile = "{{root_out_dir}}/{{target_output_name}}.exe"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ tool("rust_bin") {
+ exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ pdbname = "$exename.pdb"
+ rspfile = "$exename.rsp"
+ depfile = "$exename.d"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ default_output_extension = ".exe"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(BIN) {{output}}"
+ outputs = [
+ # The first entry here is used for dependency tracking.
+ exename,
+ pdbname,
+ ]
+ runtime_outputs = outputs
- tool("rust_cdylib") {
- rust_outfile = "{{output_dir}}/lib{{target_output_name}}.dll"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
+ }
- tool("rust_macro") {
- rust_outfile = "{{output_dir}}/{{target_output_name}}.dll"
- depfile = "{{output}}.d"
- rspfile = "$rust_outfile.rsp"
- rspfile_content = "{{rustdeps}} {{externs}}"
- command = "$python_path \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_common_args --emit=dep-info=$depfile,link -o $rust_outfile LDFLAGS RUSTENV {{rustenv}}"
- description = "RUST $rust_outfile"
- rust_sysroot = rust_sysroot_relative_to_out
- outputs = [ rust_outfile ]
- }
+ tool("rust_cdylib") {
+ # E.g. "foo.dll":
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ libname = "$dllname.lib" # e.g. foo.dll.lib
+ pdbname = "$dllname.pdb"
+ rspfile = "$dllname.rsp"
+ depfile = "$dllname.d"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ default_output_extension = ".dll"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(CDYLIB) {{output}}"
+ outputs = [
+ # The first entry here is used for dependency tracking. Dylibs are
+ # linked into other targets and that linking must be done through
+ # the .lib file, not the .dll file. So the .lib file is the primary
+ # output here.
+ libname,
+ dllname,
+ pdbname,
+ ]
+ runtime_outputs = [
+ dllname,
+ pdbname,
+ ]
+
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname /IMPLIB:$libname RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
+
+ # Since the above commands only updates the .lib file when it changes,
+ # ask Ninja to check if the timestamp actually changed to know if
+ # downstream dependencies should be recompiled.
+ restat = true
+ }
+
+ tool("rust_macro") {
+ # E.g. "foo.dll":
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ pdbname = "$dllname.pdb"
+ rspfile = "$dllname.rsp"
+ depfile = "$dllname.d"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ default_output_extension = ".dll"
+ default_output_dir = "{{root_out_dir}}"
+ description = "RUST(MACRO) {{output}}"
+ outputs = [
+ # The first entry here is used for dependency tracking. Proc macros
+ # are consumed as dlls directly, loaded a runtime, so the dll is the
+ # primary output here. If we make a .lib file the primary output, we
+ # end up trying to load the .lib file as a procmacro which fails.
+ #
+ # Since depending on a macro target for linking would fail (it would
+ # try to link primary .dll target) we omit the .lib here entirely.
+ dllname,
+ pdbname,
+ ]
+ runtime_outputs = outputs
+
+ rspfile_content = "{{rustdeps}} {{externs}}"
+ command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname RUSTENV {{rustenv}}"
+ rust_sysroot = rust_sysroot_relative_to_out
+
+ # Since the above commands only updates the .lib file when it changes,
+ # ask Ninja to check if the timestamp actually changed to know if
+ # downstream dependencies should be recompiled.
+ restat = true
}
}
@@ -493,20 +566,57 @@ template("msvc_toolchain") {
}
}
+# Makes a single MSVC toolchain, or possibly two if we
+# need an additional toolchain without sanitizers enabled.
+template("msvc_toolchain") {
+ single_msvc_toolchain(target_name) {
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must declare toolchain_args")
+ forward_variables_from(invoker,
+ "*",
+ [
+ "visibility",
+ "test_only",
+ ])
+
+ # No need to forward visibility and test_only as they apply to targets not
+ # toolchains, but presubmit checks require that we explicitly exclude them
+ }
+
+ if (using_sanitizer) {
+ # Make an additional toolchain with no sanitizers.
+ single_msvc_toolchain("${target_name}_no_sanitizers") {
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must declare toolchain_args")
+ forward_variables_from(invoker,
+ "*",
+ [
+ "toolchain_args",
+ "visibility",
+ "test_only",
+ ])
+ toolchain_args = {
+ # Populate toolchain args from the invoker.
+ forward_variables_from(invoker.toolchain_args, "*")
+ toolchain_disables_sanitizers = true
+ }
+ }
+ }
+}
+
template("win_toolchains") {
assert(defined(invoker.toolchain_arch))
toolchain_arch = invoker.toolchain_arch
- win_toolchain_data = exec_script("//build/toolchain/win/setup_toolchain.py",
- [
- visual_studio_path,
- windows_sdk_path,
- visual_studio_runtime_dirs,
- "win",
- toolchain_arch,
- "environment." + toolchain_arch,
- ],
- "scope")
+ if (toolchain_arch == "x86") {
+ win_toolchain_data = win_toolchain_data_x86
+ } else if (toolchain_arch == "x64") {
+ win_toolchain_data = win_toolchain_data_x64
+ } else if (toolchain_arch == "arm64") {
+ win_toolchain_data = win_toolchain_data_arm64
+ } else {
+ error("Unsupported toolchain_arch, add it to win_toolchain_data.gni")
+ }
# The toolchain using MSVC only makes sense when not doing cross builds.
# Chromium exclusively uses the win_clang_ toolchain below, but V8 and
diff --git a/build/toolchain/win/win_toolchain_data.gni b/build/toolchain/win/win_toolchain_data.gni
new file mode 100644
index 000000000..505d0ce50
--- /dev/null
+++ b/build/toolchain/win/win_toolchain_data.gni
@@ -0,0 +1,43 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/win/visual_studio_version.gni")
+
+declare_args() {
+ win_toolchain_data_x86 =
+ exec_script("//build/toolchain/win/setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ "win",
+ "x86",
+ "environment.x86",
+ ],
+ "scope")
+
+ win_toolchain_data_x64 =
+ exec_script("//build/toolchain/win/setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ "win",
+ "x64",
+ "environment.x64",
+ ],
+ "scope")
+
+ win_toolchain_data_arm64 =
+ exec_script("//build/toolchain/win/setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ "win",
+ "arm64",
+ "environment.arm64",
+ ],
+ "scope")
+}
diff --git a/build/toolchain/wrapper_utils.py b/build/toolchain/wrapper_utils.py
index 31f71a2ac..f01e159fd 100644
--- a/build/toolchain/wrapper_utils.py
+++ b/build/toolchain/wrapper_utils.py
@@ -13,6 +13,8 @@ import shutil
import sys
import threading
+import whole_archive
+
_BAT_PREFIX = 'cmd /c call '
@@ -70,6 +72,10 @@ def RunLinkWithOptionalMapFile(command, env=None, map_file=None):
elif map_file:
command.append('-Wl,-Map,' + map_file)
+ # We want to link rlibs as --whole-archive if they are part of a unit test
+ # target. This is determined by switch `-LinkWrapper,add-whole-archive`.
+ command = whole_archive.wrap_with_whole_archive(command)
+
result = subprocess.call(command, env=env)
if tmp_map_path and result == 0:
diff --git a/build/util/LASTCHANGE b/build/util/LASTCHANGE
index 524ffa034..564068ddb 100644
--- a/build/util/LASTCHANGE
+++ b/build/util/LASTCHANGE
@@ -1,2 +1,2 @@
-LASTCHANGE=7946990e1bb93eade39cc6383d2ff19939f9f4fb-refs/branch-heads/5481_123@{#5}
-LASTCHANGE_YEAR=2023
+LASTCHANGE=0
+LASTCHANGE_YEAR=1970
diff --git a/build/util/LASTCHANGE.committime b/build/util/LASTCHANGE.committime
index 8a7eac9b3..c22708346 100644
--- a/build/util/LASTCHANGE.committime
+++ b/build/util/LASTCHANGE.committime
@@ -1 +1 @@
-1677005917 \ No newline at end of file
+0 \ No newline at end of file
diff --git a/build/util/action_remote.py b/build/util/action_remote.py
index 1e9517ddf..ea2e13244 100755
--- a/build/util/action_remote.py
+++ b/build/util/action_remote.py
@@ -14,6 +14,10 @@ import subprocess
import sys
from enum import Enum
+_THIS_DIR = os.path.realpath(os.path.dirname(__file__))
+_SRC_DIR = os.path.dirname(os.path.dirname(_THIS_DIR))
+_MOJOM_DIR = os.path.join(_SRC_DIR, 'mojo', 'public', 'tools', 'mojom')
+
class CustomProcessor(Enum):
mojom_parser = 'mojom_parser'
@@ -22,16 +26,20 @@ class CustomProcessor(Enum):
return self.value
-def _process_build_metadata_json(bm_file, exec_root, working_dir, output_root,
- re_outputs, processed_inputs):
+def _process_build_metadata_json(bm_file, input_roots, output_root, re_outputs,
+ processed_inputs):
"""Recursively find mojom_parser inputs from a build_metadata file."""
+ # Import Mojo-specific dep here so non-Mojo remote actions don't need it.
+ if _MOJOM_DIR not in sys.path:
+ sys.path.insert(0, _MOJOM_DIR)
+ from mojom_parser import RebaseAbsolutePath
+
if bm_file in processed_inputs:
return
processed_inputs.add(bm_file)
bm_dir = os.path.dirname(bm_file)
- wd_rel = os.path.relpath(working_dir, exec_root)
with open(bm_file) as f:
bm = json.load(f)
@@ -41,8 +49,9 @@ def _process_build_metadata_json(bm_file, exec_root, working_dir, output_root,
src = os.path.normpath(os.path.join(bm_dir, s))
if src not in processed_inputs and os.path.exists(src):
processed_inputs.add(src)
- src_module = os.path.normpath(
- os.path.join(output_root, os.path.join(wd_rel, src + "-module")))
+ src_module = os.path.join(
+ output_root,
+ RebaseAbsolutePath(os.path.abspath(src), input_roots) + "-module")
if src_module in re_outputs:
continue
if src_module not in processed_inputs and os.path.exists(src_module):
@@ -51,8 +60,8 @@ def _process_build_metadata_json(bm_file, exec_root, working_dir, output_root,
# Recurse into build_metadata deps.
for d in bm["deps"]:
dep = os.path.normpath(os.path.join(bm_dir, d))
- _process_build_metadata_json(dep, exec_root, working_dir, output_root,
- re_outputs, processed_inputs)
+ _process_build_metadata_json(dep, input_roots, output_root, re_outputs,
+ processed_inputs)
def _get_mojom_parser_inputs(exec_root, output_files, extra_args):
@@ -67,12 +76,17 @@ def _get_mojom_parser_inputs(exec_root, output_files, extra_args):
argparser = argparse.ArgumentParser()
argparser.add_argument('--check-imports', dest='check_imports', required=True)
argparser.add_argument('--output-root', dest='output_root', required=True)
+ argparser.add_argument('--input-root',
+ default=[],
+ action='append',
+ dest='input_root_paths')
mojom_parser_args, _ = argparser.parse_known_args(args=extra_args)
+ input_roots = list(map(os.path.abspath, mojom_parser_args.input_root_paths))
+ output_root = os.path.abspath(mojom_parser_args.output_root)
processed_inputs = set()
- _process_build_metadata_json(mojom_parser_args.check_imports, exec_root,
- os.getcwd(), mojom_parser_args.output_root,
- output_files, processed_inputs)
+ _process_build_metadata_json(mojom_parser_args.check_imports, input_roots,
+ output_root, output_files, processed_inputs)
# Rebase paths onto rewrapper exec root.
return map(lambda dep: os.path.normpath(os.path.relpath(dep, exec_root)),
diff --git a/build/util/android_chrome_version.py b/build/util/android_chrome_version.py
index 910e304ba..876f48546 100755
--- a/build/util/android_chrome_version.py
+++ b/build/util/android_chrome_version.py
@@ -55,6 +55,7 @@ _PACKAGE_NAMES = {
'MONOCHROME': 20,
'TRICHROME': 30,
'TRICHROME_BETA': 40,
+ 'TRICHROME_AUTO': 50,
'WEBVIEW_STABLE': 0,
'WEBVIEW_BETA': 10,
'WEBVIEW_DEV': 20,
@@ -101,14 +102,18 @@ _APKS = {
('TRICHROME_32', 'TRICHROME', '32'),
('TRICHROME_32_64', 'TRICHROME', '32_64'),
('TRICHROME_64_32', 'TRICHROME', '64_32'),
+ ('TRICHROME_64_32_HIGH', 'TRICHROME', '64_32_high'),
('TRICHROME_64', 'TRICHROME', '64'),
- ('TRICHROME_64_HIGH', 'TRICHROME', '64_high'),
+ ('TRICHROME_64_HIGH', 'TRICHROME', '64_32_high'), # Deprecated.
+ ('TRICHROME_AUTO_64_32', 'TRICHROME_AUTO', '64_32'),
('TRICHROME_BETA', 'TRICHROME_BETA', '32_64'),
('TRICHROME_32_BETA', 'TRICHROME_BETA', '32'),
('TRICHROME_32_64_BETA', 'TRICHROME_BETA', '32_64'),
('TRICHROME_64_32_BETA', 'TRICHROME_BETA', '64_32'),
+ ('TRICHROME_64_32_HIGH_BETA', 'TRICHROME_BETA', '64_32_high'),
('TRICHROME_64_BETA', 'TRICHROME_BETA', '64'),
- ('TRICHROME_64_HIGH_BETA', 'TRICHROME_BETA', '64_high'),
+ # Deprecated
+ ('TRICHROME_64_HIGH_BETA', 'TRICHROME_BETA', '64_32_high'),
('WEBVIEW_STABLE', 'WEBVIEW_STABLE', '32_64'),
('WEBVIEW_BETA', 'WEBVIEW_BETA', '32_64'),
('WEBVIEW_DEV', 'WEBVIEW_DEV', '32_64'),
@@ -176,7 +181,7 @@ _ABIS_TO_DIGIT_MASK = {
'32_64': 3,
'64_32': 4,
'64': 5,
- '64_high': 9,
+ '64_32_high': 9,
},
'intel': {
'32': 1,
@@ -291,7 +296,7 @@ def GenerateVersionCodes(version_values, arch, is_next_build):
version_codes = {}
for apk, package, abis in _APKS[bitness]:
- if abis == '64_high' and arch != 'arm64':
+ if abis == '64_32_high' and arch != 'arm64':
continue
abi_part = _ABIS_TO_DIGIT_MASK[mfg][abis]
package_part = _PACKAGE_NAMES[package]
diff --git a/build/util/android_chrome_version_test.py b/build/util/android_chrome_version_test.py
index 496b8cfbe..f1f750530 100644
--- a/build/util/android_chrome_version_test.py
+++ b/build/util/android_chrome_version_test.py
@@ -169,9 +169,11 @@ class _VersionTest(unittest.TestCase):
arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE']
arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE']
arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE']
- arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE']
- arch_trichrome_64_high_version_code = output[
+ arch_trichrome_64_32_high_version_code = output[
'TRICHROME_64_HIGH_VERSION_CODE']
+ arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE']
+ arch_trichrome_auto_64_32_version_code = output[
+ 'TRICHROME_AUTO_64_32_VERSION_CODE']
self.assertEqual(arch_monochrome_32_version_code, '484400020')
self.assertEqual(arch_monochrome_32_64_version_code, '484400023')
@@ -182,8 +184,9 @@ class _VersionTest(unittest.TestCase):
self.assertEqual(arch_trichrome_32_64_version_code, '484400033')
self.assertEqual(arch_trichrome_version_code, '484400033')
self.assertEqual(arch_trichrome_64_32_version_code, '484400034')
+ self.assertEqual(arch_trichrome_64_32_high_version_code, '484400039')
self.assertEqual(arch_trichrome_64_version_code, '484400035')
- self.assertEqual(arch_trichrome_64_high_version_code, '484400039')
+ self.assertEqual(arch_trichrome_auto_64_32_version_code, '484400054')
def testGenerateVersionCodesAndroidArchX64(self):
"""Assert it handles different architectures correctly.
@@ -219,6 +222,8 @@ class _VersionTest(unittest.TestCase):
arch_trichrome_version_code = output['TRICHROME_VERSION_CODE']
arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE']
arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE']
+ arch_trichrome_auto_64_32_version_code = output[
+ 'TRICHROME_AUTO_64_32_VERSION_CODE']
self.assertEqual(arch_monochrome_32_version_code, '484400021')
self.assertEqual(arch_monochrome_32_64_version_code, '484400026')
@@ -230,6 +235,7 @@ class _VersionTest(unittest.TestCase):
self.assertEqual(arch_trichrome_version_code, '484400036')
self.assertEqual(arch_trichrome_64_32_version_code, '484400037')
self.assertEqual(arch_trichrome_64_version_code, '484400038')
+ self.assertEqual(arch_trichrome_auto_64_32_version_code, '484400057')
def testGenerateVersionCodesAndroidArchOrderArm(self):
"""Assert it handles different architectures correctly.
@@ -421,14 +427,24 @@ class _VersionCodeTest(unittest.TestCase):
self.assertEqual(abi, 'arm_64_32')
self.assertEqual(is_next_build, False)
- def testArm_64BuildTranslate(self):
- """Test for a build with Trichrome and arm_64_high."""
+ def testArm_Auto_64_32Translate(self):
+ """Test for an auto build with Trichrome and arm_64_32."""
+ build, patch, package, abi, is_next_build = TranslateVersionCode(
+ '499900054')
+ self.assertEqual(build, 4999)
+ self.assertEqual(patch, 0)
+ self.assertEqual(package, 'TRICHROME_AUTO')
+ self.assertEqual(abi, 'arm_64_32')
+ self.assertEqual(is_next_build, False)
+
+ def testArm_64_32HighTranslate(self):
+ """Test for a build with Trichrome and arm_64_32_high."""
build, patch, package, abi, is_next_build = TranslateVersionCode(
'534613739')
self.assertEqual(build, 5346)
self.assertEqual(patch, 137)
self.assertEqual(package, 'TRICHROME')
- self.assertEqual(abi, 'arm_64_high')
+ self.assertEqual(abi, 'arm_64_32_high')
self.assertEqual(is_next_build, False)
def testX86_64Translate(self):
@@ -461,6 +477,16 @@ class _VersionCodeTest(unittest.TestCase):
self.assertEqual(abi, 'x86_64_32')
self.assertEqual(is_next_build, False)
+ def testX86_Auto_64_32Translate(self):
+ """Test for an auto build with x86_64_32."""
+ build, patch, package, abi, is_next_build = TranslateVersionCode(
+ '499900057')
+ self.assertEqual(build, 4999)
+ self.assertEqual(patch, 0)
+ self.assertEqual(package, 'TRICHROME_AUTO')
+ self.assertEqual(abi, 'x86_64_32')
+ self.assertEqual(is_next_build, False)
+
def testWebviewTranslate(self):
"""Test for a build with Webview."""
build, patch, package, abi, is_next_build = TranslateVersionCode(
diff --git a/build/util/generate_wrapper.gni b/build/util/generate_wrapper.gni
index 92badac7f..e2ceccc9e 100644
--- a/build/util/generate_wrapper.gni
+++ b/build/util/generate_wrapper.gni
@@ -41,43 +41,28 @@
# wrapper_script = "$root_build_dir/bin/run_sample_build_product"
# }
template("generate_wrapper") {
- _generator_script = "//build/util/generate_wrapper.py"
- if (defined(invoker.generator_script)) {
- _generator_script = invoker.generator_script
- }
- _executable_to_wrap = invoker.executable
- _wrapper_script = invoker.wrapper_script
- if (is_win) {
- _wrapper_script += ".bat"
- }
- if (defined(invoker.executable_args)) {
- _wrapped_arguments = invoker.executable_args
- } else {
- _wrapped_arguments = []
- }
-
action(target_name) {
- forward_variables_from(invoker,
- TESTONLY_AND_VISIBILITY + [
- "assert_no_deps",
- "data",
- "data_deps",
- "deps",
- "public_deps",
- "sources",
- ])
- script = _generator_script
- if (!defined(data)) {
- data = []
+ if (defined(invoker.generator_script)) {
+ script = invoker.generator_script
+ } else {
+ script = "//build/util/generate_wrapper.py"
}
- data += [
+ _wrapper_script = invoker.wrapper_script
+ if (is_win) {
+ _wrapper_script += ".bat"
+ }
+
+ data = [
_wrapper_script,
"//.vpython3",
]
+ if (defined(invoker.data)) {
+ data += invoker.data
+ }
outputs = [ _wrapper_script ]
_rebased_executable_to_wrap =
- rebase_path(_executable_to_wrap, root_build_dir)
+ rebase_path(invoker.executable, root_build_dir)
_rebased_wrapper_script = rebase_path(_wrapper_script, root_build_dir)
if (is_win) {
_script_language = "batch"
@@ -95,11 +80,19 @@ template("generate_wrapper") {
_script_language,
]
- args += [ "--" ]
- args += _wrapped_arguments
-
- if (defined(invoker.write_runtime_deps)) {
- write_runtime_deps = invoker.write_runtime_deps
+ if (defined(invoker.executable_args)) {
+ args += [ "--" ] + invoker.executable_args
}
+
+ forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+ forward_variables_from(invoker,
+ "*",
+ TESTONLY_AND_VISIBILITY + [
+ "data",
+ "executable",
+ "executable_args",
+ "generator_script",
+ "wrapper_script",
+ ])
}
}
diff --git a/build/util/java_action.py b/build/util/java_action.py
index a19e06d8a..6382dc23b 100755
--- a/build/util/java_action.py
+++ b/build/util/java_action.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/util/lastchange.py b/build/util/lastchange.py
index 2caf94506..98a6360b4 100755
--- a/build/util/lastchange.py
+++ b/build/util/lastchange.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2012 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -6,7 +6,6 @@
"""
lastchange.py -- Chromium revision fetching utility.
"""
-from __future__ import print_function
import argparse
import collections
diff --git a/build/util/lib/common/perf_tests_results_helper.py b/build/util/lib/common/perf_tests_results_helper.py
index a5327796b..8246e206c 100644
--- a/build/util/lib/common/perf_tests_results_helper.py
+++ b/build/util/lib/common/perf_tests_results_helper.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import re
import sys
diff --git a/build/util/lib/common/unittest_util_test.py b/build/util/lib/common/unittest_util_test.py
index 472982bf3..e775e1a5e 100755
--- a/build/util/lib/common/unittest_util_test.py
+++ b/build/util/lib/common/unittest_util_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/util/lib/results/DIR_METADATA b/build/util/lib/results/DIR_METADATA
new file mode 100644
index 000000000..aea61c0bf
--- /dev/null
+++ b/build/util/lib/results/DIR_METADATA
@@ -0,0 +1,11 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Infra>Client>Chrome"
+}
diff --git a/build/util/lib/results/result_sink.py b/build/util/lib/results/result_sink.py
index 003fa39b9..3996b65d8 100644
--- a/build/util/lib/results/result_sink.py
+++ b/build/util/lib/results/result_sink.py
@@ -77,6 +77,7 @@ class ResultSinkClient(object):
duration,
test_log,
test_file,
+ variant=None,
artifacts=None,
failure_reason=None,
html_artifact=None):
@@ -91,6 +92,9 @@ class ResultSinkClient(object):
duration: An int representing time in ms.
test_log: A string representing the test's output.
test_file: A string representing the file location of the test.
+ variant: An optional dict of variant key value pairs as the
+ additional variant sent from test runners, which can override
+ or add to the variants passed to `rdb stream` command.
artifacts: An optional dict of artifacts to attach to the test.
failure_reason: An optional string with the reason why the test failed.
Should be None if the test did not fail.
@@ -128,6 +132,9 @@ class ResultSinkClient(object):
}
}
+ if variant:
+ tr['variant'] = {'def': variant}
+
artifacts = artifacts or {}
tr['summaryHtml'] = html_artifact if html_artifact else ''
if test_log:
diff --git a/build/util/lib/results/result_sink_test.py b/build/util/lib/results/result_sink_test.py
index c6dd8209b..7d6567777 100755
--- a/build/util/lib/results/result_sink_test.py
+++ b/build/util/lib/results/result_sink_test.py
@@ -115,6 +115,24 @@ class ClientTest(unittest.TestCase):
'some-test')
self.assertIsNotNone(data['testResults'][0]['summaryHtml'])
+ @mock.patch('requests.Session.post')
+ def testPostWithVariant(self, mock_post):
+ self.client.Post('some-test',
+ result_types.PASS,
+ 0,
+ 'some-test-log',
+ None,
+ variant={
+ 'key1': 'value1',
+ 'key2': 'value2'
+ })
+ data = json.loads(mock_post.call_args[1]['data'])
+ self.assertEqual(data['testResults'][0]['variant'],
+ {'def': {
+ 'key1': 'value1',
+ 'key2': 'value2'
+ }})
+
if __name__ == '__main__':
unittest.main()
diff --git a/build/util/version.py b/build/util/version.py
index bf7a59eab..9bf51cd35 100755
--- a/build/util/version.py
+++ b/build/util/version.py
@@ -7,7 +7,6 @@
version.py -- Chromium version string substitution utility.
"""
-from __future__ import print_function
import argparse
import os
diff --git a/build/vs_toolchain.py b/build/vs_toolchain.py
index d426fcd84..a9cd6f03d 100755
--- a/build/vs_toolchain.py
+++ b/build/vs_toolchain.py
@@ -1,15 +1,13 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import collections
import glob
import json
import os
-import pipes
import platform
import re
import shutil
@@ -19,8 +17,7 @@ import sys
from gn_helpers import ToGNString
-# VS 2019 16.61 with 10.0.20348.0 SDK, 10.0.19041 version of Debuggers
-# with ARM64 libraries and UWP support.
+# VS 2022 17.4 with 10.0.22621.0 SDK with ARM64 libraries and UWP support.
# See go/chromium-msvc-toolchain for instructions about how to update the
# toolchain.
#
@@ -29,11 +26,21 @@ from gn_helpers import ToGNString
#
# * //base/win/windows_version.cc NTDDI preprocessor check
# Triggers a compiler error if the available SDK is older than the minimum.
+# * SDK_VERSION in this file
+# Must match the packaged/required SDK version.
+# * SDK_VERSION in build/toolchain/win/setup_toolchain.py.
# * //build/config/win/BUILD.gn NTDDI_VERSION value
# Affects the availability of APIs in the toolchain headers.
# * //docs/windows_build_instructions.md mentions of VS or Windows SDK.
# Keeps the document consistent with the toolchain version.
-TOOLCHAIN_HASH = '1023ce2e82'
+# * //tools/win/setenv.py
+# Add/remove VS versions when upgrading to a new VS version.
+# * MSVC_TOOLSET_VERSION in this file
+# Maps between Visual Studio version and MSVC toolset
+# * MSVS_VERSIONS in this file
+# Records the packaged and default version of Visual Studio
+TOOLCHAIN_HASH = '27370823e7'
+SDK_VERSION = '10.0.22621.0'
script_dir = os.path.dirname(os.path.realpath(__file__))
json_data_file = os.path.join(script_dir, 'win_toolchain.json')
@@ -42,8 +49,8 @@ json_data_file = os.path.join(script_dir, 'win_toolchain.json')
# The first version is assumed by this script to be the one that is packaged,
# which makes a difference for the arm64 runtime.
MSVS_VERSIONS = collections.OrderedDict([
- ('2019', '16.0'), # Default and packaged version of Visual Studio.
- ('2022', '17.0'),
+ ('2022', '17.0'), # Default and packaged version of Visual Studio.
+ ('2019', '16.0'),
('2017', '15.0'),
])
@@ -301,47 +308,28 @@ def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, suffix):
target = os.path.join(target_dir, dll)
source = os.path.join(source_dir, dll)
_CopyRuntimeImpl(target, source)
- # Copy the UCRT files from the Windows SDK. This location includes the
- # api-ms-win-crt-*.dll files that are not found in the Windows directory.
- # These files are needed for component builds. If WINDOWSSDKDIR is not set
- # use the default SDK path. This will be the case when
- # DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
- win_sdk_dir = os.path.normpath(
- os.environ.get('WINDOWSSDKDIR',
- os.path.expandvars('%ProgramFiles(x86)%'
- '\\Windows Kits\\10')))
- # ARM64 doesn't have a redist for the ucrt DLLs because they are always
- # present in the OS.
- if target_cpu != 'arm64':
- # Starting with the 10.0.17763 SDK the ucrt files are in a version-named
- # directory - this handles both cases.
- redist_dir = os.path.join(win_sdk_dir, 'Redist')
- version_dirs = glob.glob(os.path.join(redist_dir, '10.*'))
- if len(version_dirs) > 0:
- _SortByHighestVersionNumberFirst(version_dirs)
- redist_dir = version_dirs[0]
- ucrt_dll_dirs = os.path.join(redist_dir, 'ucrt', 'DLLs', target_cpu)
- ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll'))
- assert len(ucrt_files) > 0
- for ucrt_src_file in ucrt_files:
- file_part = os.path.basename(ucrt_src_file)
- ucrt_dst_file = os.path.join(target_dir, file_part)
- _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False)
- # We must copy ucrtbase.dll for x64/x86, and ucrtbased.dll for all CPU types.
- if target_cpu != 'arm64' or not suffix.startswith('.'):
- if not suffix.startswith('.'):
- # ucrtbased.dll is located at {win_sdk_dir}/bin/{a.b.c.d}/{target_cpu}/
- # ucrt/.
- sdk_bin_root = os.path.join(win_sdk_dir, 'bin')
- sdk_bin_sub_dirs = glob.glob(os.path.join(sdk_bin_root, '10.*'))
- # Select the most recent SDK if there are multiple versions installed.
- _SortByHighestVersionNumberFirst(sdk_bin_sub_dirs)
- for directory in sdk_bin_sub_dirs:
- sdk_redist_root_version = os.path.join(sdk_bin_root, directory)
- if not os.path.isdir(sdk_redist_root_version):
- continue
- source_dir = os.path.join(sdk_redist_root_version, target_cpu, 'ucrt')
- break
+ # We must copy ucrtbased.dll for all CPU types. The rest of the Universal CRT
+ # is installed as part of the OS in Windows 10 and beyond.
+ if not suffix.startswith('.'):
+ win_sdk_dir = os.path.normpath(
+ os.environ.get(
+ 'WINDOWSSDKDIR',
+ os.path.expandvars('%ProgramFiles(x86)%'
+ '\\Windows Kits\\10')))
+ # ucrtbased.dll is located at {win_sdk_dir}/bin/{a.b.c.d}/{target_cpu}/
+ # ucrt/.
+ sdk_bin_root = os.path.join(win_sdk_dir, 'bin')
+ sdk_bin_sub_dirs = glob.glob(os.path.join(sdk_bin_root, '10.*'))
+ # Select the most recent SDK if there are multiple versions installed.
+ _SortByHighestVersionNumberFirst(sdk_bin_sub_dirs)
+ for directory in sdk_bin_sub_dirs:
+ sdk_redist_root_version = os.path.join(sdk_bin_root, directory)
+ if not os.path.isdir(sdk_redist_root_version):
+ continue
+ source_dir = os.path.join(sdk_redist_root_version, target_cpu, 'ucrt')
+ if not os.path.isdir(source_dir):
+ continue
+ break
_CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix),
os.path.join(source_dir, 'ucrtbase' + suffix))
@@ -410,20 +398,33 @@ def CopyDlls(target_dir, configuration, target_cpu):
if configuration == 'Debug':
_CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True)
_CopyDebugger(target_dir, target_cpu)
+ if target_cpu == 'arm64':
+ target_dir = os.path.join(target_dir, 'win_clang_x64')
+ target_cpu = 'x64'
+ runtime_dir = x64_runtime
+ os.makedirs(target_dir, exist_ok=True)
+ _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False)
+ if configuration == 'Debug':
+ _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True)
+ _CopyDebugger(target_dir, target_cpu)
def _CopyDebugger(target_dir, target_cpu):
- """Copy dbghelp.dll and dbgcore.dll into the requested directory as needed.
+ """Copy dbghelp.dll, dbgcore.dll, and msdia140.dll into the requested
+ directory.
target_cpu is one of 'x86', 'x64' or 'arm64'.
dbghelp.dll is used when Chrome needs to symbolize stacks. Copying this file
from the SDK directory avoids using the system copy of dbghelp.dll which then
- ensures compatibility with recent debug information formats, such as VS
- 2017 /debug:fastlink PDBs.
+ ensures compatibility with recent debug information formats, such as
+ large-page PDBs. Note that for these DLLs to be deployed to swarming bots they
+ also need to be listed in group("runtime_libs").
dbgcore.dll is needed when using some functions from dbghelp.dll (like
MinidumpWriteDump).
+
+ msdia140.dll is needed for tools like symupload.exe and dump_syms.exe.
"""
win_sdk_dir = SetEnvironmentAndGetSDKDir()
if not win_sdk_dir:
@@ -432,10 +433,6 @@ def _CopyDebugger(target_dir, target_cpu):
# List of debug files that should be copied, the first element of the tuple is
# the name of the file and the second indicates if it's optional.
debug_files = [('dbghelp.dll', False), ('dbgcore.dll', True)]
- # The UCRT is not a redistributable component on arm64.
- if target_cpu != 'arm64':
- debug_files.extend([('api-ms-win-downlevel-kernel32-l2-1-0.dll', False),
- ('api-ms-win-eventing-provider-l1-1-0.dll', False)])
for debug_file, is_optional in debug_files:
full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file)
if not os.path.exists(full_path):
@@ -443,12 +440,18 @@ def _CopyDebugger(target_dir, target_cpu):
continue
else:
raise Exception('%s not found in "%s"\r\nYou must install '
- 'Windows 10 SDK version 10.0.20348.0 including the '
+ 'Windows 10 SDK version %s including the '
'"Debugging Tools for Windows" feature.' %
- (debug_file, full_path))
+ (debug_file, full_path, SDK_VERSION))
target_path = os.path.join(target_dir, debug_file)
_CopyRuntimeImpl(target_path, full_path)
+ # The x64 version of msdia140.dll is always used because symupload and
+ # dump_syms are always built as x64 binaries.
+ dia_path = os.path.join(NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH']),
+ 'DIA SDK', 'bin', 'amd64', 'msdia140.dll')
+ _CopyRuntimeImpl(os.path.join(target_dir, 'msdia140.dll'), dia_path)
+
def _GetDesiredVsToolchainHashes():
"""Load a list of SHA1s corresponding to the toolchains that we want installed
@@ -558,11 +561,13 @@ def GetToolchainDir():
win_sdk_dir = SetEnvironmentAndGetSDKDir()
print('''vs_path = %s
+sdk_version = %s
sdk_path = %s
vs_version = %s
wdk_dir = %s
runtime_dirs = %s
-''' % (ToGNString(NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH'])),
+''' % (ToGNString(NormalizePath(
+ os.environ['GYP_MSVS_OVERRIDE_PATH'])), ToGNString(SDK_VERSION),
ToGNString(win_sdk_dir), ToGNString(GetVisualStudioVersion()),
ToGNString(NormalizePath(os.environ.get('WDK_DIR', ''))),
ToGNString(os.path.pathsep.join(runtime_dll_dirs or ['None']))))
diff --git a/build/win/BUILD.gn b/build/win/BUILD.gn
index ed5548083..864581851 100644
--- a/build/win/BUILD.gn
+++ b/build/win/BUILD.gn
@@ -36,54 +36,6 @@ if (is_win) {
"$root_out_dir/cdb/winxp/exts.dll",
"$root_out_dir/cdb/winxp/ntsdexts.dll",
]
- if (current_cpu != "arm64") {
- # The UCRT files are not redistributable for ARM64 Win32.
- outputs += [
- "$root_out_dir/cdb/api-ms-win-core-console-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-datetime-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-debug-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-errorhandling-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-file-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-file-l1-2-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-file-l2-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-handle-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-heap-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-interlocked-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-libraryloader-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-localization-l1-2-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-memory-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-namedpipe-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-processenvironment-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-1.dll",
- "$root_out_dir/cdb/api-ms-win-core-profile-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-rtlsupport-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-string-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-synch-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-synch-l1-2-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-sysinfo-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-timezone-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-core-util-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-conio-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-convert-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-environment-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-filesystem-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-heap-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-locale-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-math-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-multibyte-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-private-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-process-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-runtime-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-stdio-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-string-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-time-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-crt-utility-l1-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-downlevel-kernel32-l2-1-0.dll",
- "$root_out_dir/cdb/api-ms-win-eventing-provider-l1-1-0.dll",
- "$root_out_dir/cdb/ucrtbase.dll",
- ]
- }
args = [
rebase_path("$root_out_dir/cdb", root_out_dir),
current_cpu,
@@ -91,7 +43,11 @@ if (is_win) {
}
group("runtime_libs") {
- data = []
+ # These are needed for any tests that need to decode stacks.
+ data = [
+ "$root_out_dir/dbghelp.dll",
+ "$root_out_dir/dbgcore.dll",
+ ]
if (is_component_build) {
# Copy the VS runtime DLLs into the isolate so that they don't have to be
# preinstalled on the target machine. The debug runtimes have a "d" at
@@ -120,56 +76,6 @@ if (is_win) {
"ASan is only supported in 64-bit builds on Windows.")
data += [ "$clang_base_path/lib/clang/$clang_version/lib/windows/clang_rt.asan_dynamic-x86_64.dll" ]
}
- if (current_cpu != "arm64") {
- data += [
- # Universal Windows 10 CRT files
- "$root_out_dir/api-ms-win-core-console-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-datetime-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-debug-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-errorhandling-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-file-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-file-l1-2-0.dll",
- "$root_out_dir/api-ms-win-core-file-l2-1-0.dll",
- "$root_out_dir/api-ms-win-core-handle-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-heap-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-interlocked-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-libraryloader-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-localization-l1-2-0.dll",
- "$root_out_dir/api-ms-win-core-memory-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-namedpipe-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-processenvironment-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-processthreads-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-processthreads-l1-1-1.dll",
- "$root_out_dir/api-ms-win-core-profile-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-rtlsupport-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-string-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-synch-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-synch-l1-2-0.dll",
- "$root_out_dir/api-ms-win-core-sysinfo-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-timezone-l1-1-0.dll",
- "$root_out_dir/api-ms-win-core-util-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-conio-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-convert-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-environment-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-filesystem-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-heap-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-locale-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-math-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-multibyte-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-private-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-process-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-runtime-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-stdio-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-string-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-time-l1-1-0.dll",
- "$root_out_dir/api-ms-win-crt-utility-l1-1-0.dll",
- "$root_out_dir/api-ms-win-downlevel-kernel32-l2-1-0.dll",
- "$root_out_dir/api-ms-win-eventing-provider-l1-1-0.dll",
- ]
- if (!is_debug) {
- data += [ "$root_out_dir/ucrtbase.dll" ]
- }
- }
}
}
}
diff --git a/build/win/copy_cdb_to_output.py b/build/win/copy_cdb_to_output.py
index 0076103d5..5d1244033 100755
--- a/build/win/copy_cdb_to_output.py
+++ b/build/win/copy_cdb_to_output.py
@@ -1,9 +1,8 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2016 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import glob
import hashlib
@@ -101,17 +100,6 @@ def _CopyCDBToOutput(output_dir, target_arch):
_CopyImpl('uext.dll', dst_winext_dir, src_winext_dir)
_CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir)
_CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir)
- if src_arch in ['x64', 'x86']:
- # Copy all UCRT files from the debuggers directory, for compatibility with
- # the Windows 10 18362 SDK (one UCRT file) and later versions (two UCRT
- # files). The new file is api-ms-win-downlevel-kernel32-l2-1-0.dll and
- # should be added to the copy_cdb_to_output outputs when we require a newer
- # SDK.
- for file in glob.glob(os.path.join(src_dir, 'api-ms-win*.dll')):
- _CopyImpl(os.path.split(file)[1], output_dir, src_dir)
- _CopyImpl('ucrtbase.dll', output_dir, src_crt_dir)
- for dll_path in glob.glob(os.path.join(src_crt_dir, 'api-ms-win-*.dll')):
- _CopyImpl(os.path.split(dll_path)[1], output_dir, src_crt_dir)
return 0
diff --git a/build/win/gn_meta_sln.py b/build/win/gn_meta_sln.py
index 33a03501d..2165a1301 100644
--- a/build/win/gn_meta_sln.py
+++ b/build/win/gn_meta_sln.py
@@ -6,7 +6,6 @@
# Helper utility to combine GN-generated Visual Studio projects into
# a single meta-solution.
-from __future__ import print_function
import os
import glob
diff --git a/build/win/message_compiler.py b/build/win/message_compiler.py
index 5d579a5b5..9759c1fd1 100644
--- a/build/win/message_compiler.py
+++ b/build/win/message_compiler.py
@@ -6,7 +6,6 @@
#
# Usage: message_compiler.py <environment_file> [<args to mc.exe>*]
-from __future__ import print_function
import difflib
import distutils.dir_util
diff --git a/build/win/reorder-imports.py b/build/win/reorder-imports.py
index ff40cd63e..7dd8e1d8b 100755
--- a/build/win/reorder-imports.py
+++ b/build/win/reorder-imports.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2014 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/win/use_ansi_codes.py b/build/win/use_ansi_codes.py
index 85469e5bd..dfc3c3322 100755
--- a/build/win/use_ansi_codes.py
+++ b/build/win/use_ansi_codes.py
@@ -1,10 +1,9 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints if the the terminal is likely to understand ANSI codes."""
-from __future__ import print_function
import os
diff --git a/build/write_build_date_header.py b/build/write_build_date_header.py
deleted file mode 100755
index 3b7941788..000000000
--- a/build/write_build_date_header.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The Chromium Authors
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Takes a timestamp and writes it in as readable text to a .h file."""
-
-import argparse
-import datetime
-import os
-import sys
-
-
-def main():
- argument_parser = argparse.ArgumentParser()
- argument_parser.add_argument('output_file', help='The file to write to')
- argument_parser.add_argument('timestamp')
- args = argument_parser.parse_args()
-
- date = datetime.datetime.utcfromtimestamp(int(args.timestamp))
- output = ('// Generated by //build/write_build_date_header.py\n'
- '#ifndef BUILD_DATE\n'
- '#define BUILD_DATE "{:%b %d %Y %H:%M:%S}"\n'
- '#endif // BUILD_DATE\n'.format(date))
-
- current_contents = ''
- if os.path.isfile(args.output_file):
- with open(args.output_file, 'r') as current_file:
- current_contents = current_file.read()
-
- if current_contents != output:
- with open(args.output_file, 'w') as output_file:
- output_file.write(output)
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/write_buildflag_header.py b/build/write_buildflag_header.py
index 44da8a66f..89a073761 100755
--- a/build/write_buildflag_header.py
+++ b/build/write_buildflag_header.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2015 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
diff --git a/build/zip_helpers.py b/build/zip_helpers.py
new file mode 100644
index 000000000..4e36b36d7
--- /dev/null
+++ b/build/zip_helpers.py
@@ -0,0 +1,222 @@
+# Copyright 2023 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helper functions for dealing with .zip files."""
+
+import os
+import pathlib
+import posixpath
+import stat
+import time
+import zipfile
+
+_FIXED_ZIP_HEADER_LEN = 30
+
+
+def _set_alignment(zip_obj, zip_info, alignment):
+ """Sets a ZipInfo's extra field such that the file will be aligned.
+
+ Args:
+ zip_obj: The ZipFile object that is being written.
+ zip_info: The ZipInfo object about to be written.
+ alignment: The amount of alignment (e.g. 4, or 4*1024).
+ """
+ header_size = _FIXED_ZIP_HEADER_LEN + len(zip_info.filename)
+ pos = zip_obj.fp.tell() + header_size
+ padding_needed = (alignment - (pos % alignment)) % alignment
+
+ # Python writes |extra| to both the local file header and the central
+ # directory's file header. Android's zipalign tool writes only to the
+ # local file header, so there is more overhead in using Python to align.
+ zip_info.extra = b'\0' * padding_needed
+
+
+def _hermetic_date_time(timestamp=None):
+ if not timestamp:
+ return (2001, 1, 1, 0, 0, 0)
+ utc_time = time.gmtime(timestamp)
+ return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour,
+ utc_time.tm_min, utc_time.tm_sec)
+
+
+def add_to_zip_hermetic(zip_file,
+ zip_path,
+ *,
+ src_path=None,
+ data=None,
+ compress=None,
+ alignment=None,
+ timestamp=None):
+ """Adds a file to the given ZipFile with a hard-coded modified time.
+
+ Args:
+ zip_file: ZipFile instance to add the file to.
+ zip_path: Destination path within the zip file (or ZipInfo instance).
+ src_path: Path of the source file. Mutually exclusive with |data|.
+ data: File data as a string.
+ compress: Whether to enable compression. Default is taken from ZipFile
+ constructor.
+ alignment: If set, align the data of the entry to this many bytes.
+ timestamp: The last modification date and time for the archive member.
+ """
+ assert (src_path is None) != (data is None), (
+ '|src_path| and |data| are mutually exclusive.')
+ if isinstance(zip_path, zipfile.ZipInfo):
+ zipinfo = zip_path
+ zip_path = zipinfo.filename
+ else:
+ zipinfo = zipfile.ZipInfo(filename=zip_path)
+ zipinfo.external_attr = 0o644 << 16
+
+ zipinfo.date_time = _hermetic_date_time(timestamp)
+
+ if alignment:
+ _set_alignment(zip_file, zipinfo, alignment)
+
+ # Filenames can contain backslashes, but it is more likely that we've
+ # forgotten to use forward slashes as a directory separator.
+ assert '\\' not in zip_path, 'zip_path should not contain \\: ' + zip_path
+ assert not posixpath.isabs(zip_path), 'Absolute zip path: ' + zip_path
+ assert not zip_path.startswith('..'), 'Should not start with ..: ' + zip_path
+ assert posixpath.normpath(zip_path) == zip_path, (
+ f'Non-canonical zip_path: {zip_path} vs: {posixpath.normpath(zip_path)}')
+ assert zip_path not in zip_file.namelist(), (
+ 'Tried to add a duplicate zip entry: ' + zip_path)
+
+ if src_path and os.path.islink(src_path):
+ zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink
+ zip_file.writestr(zipinfo, os.readlink(src_path))
+ return
+
+ # Maintain the executable bit.
+ if src_path:
+ st = os.stat(src_path)
+ for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+ if st.st_mode & mode:
+ zipinfo.external_attr |= mode << 16
+
+ if src_path:
+ with open(src_path, 'rb') as f:
+ data = f.read()
+
+ # zipfile will deflate even when it makes the file bigger. To avoid
+ # growing files, disable compression at an arbitrary cut off point.
+ if len(data) < 16:
+ compress = False
+
+ # None converts to ZIP_STORED, when passed explicitly rather than the
+ # default passed to the ZipFile constructor.
+ compress_type = zip_file.compression
+ if compress is not None:
+ compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ zip_file.writestr(zipinfo, data, compress_type)
+
+
+def add_files_to_zip(inputs,
+ output,
+ *,
+ base_dir=None,
+ compress=None,
+ zip_prefix_path=None,
+ timestamp=None):
+ """Creates a zip file from a list of files.
+
+ Args:
+ inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+ output: Path, fileobj, or ZipFile instance to add files to.
+ base_dir: Prefix to strip from inputs.
+ compress: Whether to compress
+ zip_prefix_path: Path prepended to file path in zip file.
+ timestamp: Unix timestamp to use for files in the archive.
+ """
+ if base_dir is None:
+ base_dir = '.'
+ input_tuples = []
+ for tup in inputs:
+ if isinstance(tup, str):
+ src_path = tup
+ zip_path = os.path.relpath(src_path, base_dir)
+ # Zip files always use / as path separator.
+ if os.path.sep != posixpath.sep:
+ zip_path = str(pathlib.Path(zip_path).as_posix())
+ tup = (zip_path, src_path)
+ input_tuples.append(tup)
+
+ # Sort by zip path to ensure stable zip ordering.
+ input_tuples.sort(key=lambda tup: tup[0])
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
+ for zip_path, fs_path in input_tuples:
+ if zip_prefix_path:
+ zip_path = posixpath.join(zip_prefix_path, zip_path)
+ add_to_zip_hermetic(out_zip,
+ zip_path,
+ src_path=fs_path,
+ compress=compress,
+ timestamp=timestamp)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def zip_directory(output, base_dir, **kwargs):
+ """Zips all files in the given directory."""
+ inputs = []
+ for root, _, files in os.walk(base_dir):
+ for f in files:
+ inputs.append(os.path.join(root, f))
+
+ add_files_to_zip(inputs, output, base_dir=base_dir, **kwargs)
+
+
+def merge_zips(output, input_zips, path_transform=None, compress=None):
+ """Combines all files from |input_zips| into |output|.
+
+ Args:
+ output: Path, fileobj, or ZipFile instance to add files to.
+ input_zips: Iterable of paths to zip files to merge.
+ path_transform: Called for each entry path. Returns a new path, or None to
+ skip the file.
+ compress: Overrides compression setting from origin zip entries.
+ """
+ assert not isinstance(input_zips, str) # Easy mistake to make.
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ # Include paths in the existing zip here to avoid adding duplicate files.
+ added_names = set(out_zip.namelist())
+
+ try:
+ for in_file in input_zips:
+ with zipfile.ZipFile(in_file, 'r') as in_zip:
+ for info in in_zip.infolist():
+ # Ignore directories.
+ if info.filename[-1] == '/':
+ continue
+ if path_transform:
+ dst_name = path_transform(info.filename)
+ if dst_name is None:
+ continue
+ else:
+ dst_name = info.filename
+
+ # TODO(agrieve): Fail if duplicate entry is not identical.
+ already_added = dst_name in added_names
+ if not already_added:
+ if compress is not None:
+ compress_entry = compress
+ else:
+ compress_entry = info.compress_type != zipfile.ZIP_STORED
+ add_to_zip_hermetic(out_zip,
+ dst_name,
+ data=in_zip.read(info),
+ compress=compress_entry)
+ added_names.add(dst_name)
+ finally:
+ if output is not out_zip:
+ out_zip.close()