aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorandroid-build-team Robot <android-build-team-robot@google.com>2020-04-28 20:24:56 +0000
committerandroid-build-team Robot <android-build-team-robot@google.com>2020-04-28 20:24:56 +0000
commit656c7c0fa0d9b776c019f5423e01a9a28979d2bd (patch)
tree220a95d346d58fa5090d8033991bcfef2ad03d99
parent091406baac88deeb2ef9c71d5047bd9daf3915ef (diff)
parentccead650bb8e050400f55dc44b60f989b1f8e117 (diff)
downloadtoolchain-utils-android10-android13-mainline-tzdata-release.tar.gz
Change-Id: I3464301d9b41d48e11dfd361a411d60a1bd5a429
-rw-r--r--.gitignore3
-rw-r--r--.style.yapf3
-rw-r--r--OWNERS10
-rw-r--r--OWNERS.toolchain12
-rw-r--r--PRESUBMIT.cfg2
-rw-r--r--README10
-rw-r--r--README.md18
-rw-r--r--afdo_metadata/README.md21
-rw-r--r--afdo_metadata/chrome_afdo.json14
-rw-r--r--afdo_metadata/kernel_afdo.json11
-rwxr-xr-xafdo_redaction/redact_profile.py236
-rwxr-xr-xafdo_redaction/redact_profile_test.py134
-rwxr-xr-xafdo_redaction/remove_indirect_calls.py81
-rwxr-xr-xafdo_redaction/remove_indirect_calls_test.py67
-rw-r--r--afdo_tools/bisection/README.md71
-rwxr-xr-xafdo_tools/bisection/afdo_prof_analysis.py438
-rwxr-xr-xafdo_tools/bisection/afdo_prof_analysis_e2e_test.py281
-rwxr-xr-xafdo_tools/bisection/afdo_prof_analysis_test.py154
-rwxr-xr-xafdo_tools/bisection/e2e_external.sh29
-rwxr-xr-xafdo_tools/bisection/problemstatus_external.sh3
-rwxr-xr-xafdo_tools/bisection/state_assumption_external.sh40
-rwxr-xr-xafdo_tools/bisection/state_assumption_interrupt.sh38
-rwxr-xr-xafdo_tools/generate_afdo_from_tryjob.py165
-rwxr-xr-xafdo_tools/run_afdo_tryjob.py171
-rwxr-xr-xafe_lock_machine.py661
-rw-r--r--android_bench_suite/README.md49
-rw-r--r--android_bench_suite/README.txt41
-rw-r--r--android_bench_suite/autotest.diff8
-rwxr-xr-xandroid_bench_suite/fix_skia_results.py2
-rwxr-xr-xandroid_bench_suite/gen_json.py2
-rwxr-xr-xandroid_bench_suite/run.py4
-rwxr-xr-xandroid_merge_from_upstream.sh82
-rwxr-xr-xauto_delete_nightly_test_data.py14
-rwxr-xr-xautomation/clients/android.py2
-rwxr-xr-xautomation/clients/chromeos.py2
-rwxr-xr-xautomation/clients/crosstool.py2
-rwxr-xr-xautomation/clients/dejagnu_compiler.py2
-rwxr-xr-xautomation/clients/nightly.py2
-rwxr-xr-xautomation/clients/output_test.py2
-rwxr-xr-xautomation/clients/pwd_test.py2
-rwxr-xr-xautomation/clients/report/validate_failures.py2
-rwxr-xr-xautomation/common/command_executer_test.py2
-rwxr-xr-xautomation/common/machine_test.py2
-rwxr-xr-xautomation/server/machine_manager_test.py2
-rwxr-xr-xautomation/server/monitor/manage.py2
-rwxr-xr-xautomation/server/server.py2
-rwxr-xr-xautomation/server/server_test.py2
-rw-r--r--bestflags/README (renamed from bestflags/README.md)6
-rw-r--r--bestflags/examples/omnetpp/README (renamed from bestflags/examples/omnetpp/README.md)25
-rwxr-xr-xbin/tc_pyformat36
-rw-r--r--binary_search_tool/README.bisect217
-rw-r--r--binary_search_tool/README.bisect.md248
-rw-r--r--binary_search_tool/README.pass_bisect.md83
-rw-r--r--binary_search_tool/README.testing (renamed from binary_search_tool/README.testing.md)76
-rw-r--r--binary_search_tool/android/README.android186
-rw-r--r--binary_search_tool/android/README.android.md209
-rwxr-xr-xbinary_search_tool/android/switch_to_bad.sh2
-rwxr-xr-xbinary_search_tool/android/switch_to_good.sh2
-rwxr-xr-xbinary_search_tool/binary_search_state.py67
-rw-r--r--binary_search_tool/bisect_driver.py7
-rw-r--r--binary_search_tool/common.py16
-rw-r--r--binary_search_tool/cros_pkg/README.cros_pkg_triage185
-rw-r--r--binary_search_tool/cros_pkg/README.cros_pkg_triage.md193
-rw-r--r--binary_search_tool/ndk/README84
-rw-r--r--binary_search_tool/ndk/README.md89
-rw-r--r--binary_search_tool/sysroot_wrapper/README28
-rw-r--r--binary_search_tool/sysroot_wrapper/README.md35
-rwxr-xr-xbinary_search_tool/test/binary_search_tool_tester.py152
-rwxr-xr-xbinary_search_tool/test/cmd_script.py71
-rw-r--r--binary_search_tool/test/cmd_script_no_support.py23
-rwxr-xr-xbinary_search_tool/test/gen_obj.py2
-rwxr-xr-xbinary_search_tool/test/generate_cmd.py25
-rwxr-xr-xbuild_tc.py3
-rwxr-xr-xbuild_tool.py5
-rwxr-xr-xbuildbot_test_llvm.py80
-rwxr-xr-xbuildbot_test_toolchains.py37
-rwxr-xr-xchromiumos_image_diff.py38
-rw-r--r--compiler_wrapper/README.md36
-rw-r--r--compiler_wrapper/android_config_test.go133
-rw-r--r--compiler_wrapper/bisect_flag.go66
-rw-r--r--compiler_wrapper/bisect_flag_test.go182
-rwxr-xr-xcompiler_wrapper/build.py73
-rw-r--r--compiler_wrapper/bundle.README18
-rwxr-xr-xcompiler_wrapper/bundle.py67
-rw-r--r--compiler_wrapper/ccache_flag.go64
-rw-r--r--compiler_wrapper/ccache_flag_test.go173
-rw-r--r--compiler_wrapper/clang_flags.go215
-rw-r--r--compiler_wrapper/clang_flags_test.go310
-rw-r--r--compiler_wrapper/clang_syntax_flag.go37
-rw-r--r--compiler_wrapper/clang_syntax_flag_test.go180
-rw-r--r--compiler_wrapper/clang_tidy_flag.go100
-rw-r--r--compiler_wrapper/clang_tidy_flag_test.go281
-rw-r--r--compiler_wrapper/command.go260
-rw-r--r--compiler_wrapper/command_test.go39
-rw-r--r--compiler_wrapper/compile_with_fallback.go105
-rw-r--r--compiler_wrapper/compile_with_fallback_test.go292
-rw-r--r--compiler_wrapper/compiler_wrapper.go266
-rw-r--r--compiler_wrapper/compiler_wrapper_test.go198
-rw-r--r--compiler_wrapper/config.go197
-rw-r--r--compiler_wrapper/config_test.go143
-rw-r--r--compiler_wrapper/cros_hardened_config_test.go590
-rw-r--r--compiler_wrapper/cros_host_config_test.go96
-rw-r--r--compiler_wrapper/cros_nonhardened_config_test.go26
-rw-r--r--compiler_wrapper/disable_werror_flag.go134
-rw-r--r--compiler_wrapper/disable_werror_flag_test.go374
-rw-r--r--compiler_wrapper/env.go146
-rw-r--r--compiler_wrapper/env_test.go215
-rw-r--r--compiler_wrapper/errors.go86
-rw-r--r--compiler_wrapper/errors_test.go72
-rw-r--r--compiler_wrapper/gcc_flags.go31
-rw-r--r--compiler_wrapper/gcc_flags_test.go63
-rw-r--r--compiler_wrapper/goldenutil_test.go202
-rw-r--r--compiler_wrapper/gomacc_flag.go41
-rw-r--r--compiler_wrapper/gomacc_flag_test.go94
-rw-r--r--compiler_wrapper/libc_exec.go67
-rw-r--r--compiler_wrapper/main.go49
-rw-r--r--compiler_wrapper/oldwrapper.go392
-rw-r--r--compiler_wrapper/oldwrapper_test.go431
-rw-r--r--compiler_wrapper/pie_flags.go43
-rw-r--r--compiler_wrapper/pie_flags_test.go84
-rw-r--r--compiler_wrapper/print_cmdline_flag.go19
-rw-r--r--compiler_wrapper/print_cmdline_flag_test.go85
-rw-r--r--compiler_wrapper/print_config_flag.go21
-rw-r--r--compiler_wrapper/print_config_flag_test.go36
-rw-r--r--compiler_wrapper/rusage_flag.go70
-rw-r--r--compiler_wrapper/rusage_flag_test.go174
-rw-r--r--compiler_wrapper/sanitizer_flags.go58
-rw-r--r--compiler_wrapper/sanitizer_flags_test.go152
-rw-r--r--compiler_wrapper/stackprotector_flags.go29
-rw-r--r--compiler_wrapper/stackprotector_flags_test.go57
-rw-r--r--compiler_wrapper/sysroot_flag.go32
-rw-r--r--compiler_wrapper/sysroot_flag_test.go83
-rw-r--r--compiler_wrapper/testdata/android_golden/bisect.json103
-rw-r--r--compiler_wrapper/testdata/android_golden/clang_path.json230
-rw-r--r--compiler_wrapper/testdata/android_golden/compile_with_fallback.json115
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/bisect.json130
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/clang_ftrapv_maincc_target_specific.json281
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/clang_host_wrapper.json32
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/clang_maincc_target_specific.json272
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/clang_path.json419
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/clang_sanitizer_args.json266
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/clang_specific_args.json235
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/clangtidy.json268
-rw-r--r--compiler_wrapper/testdata/cros_clang_host_golden/force_disable_werror.json146
-rw-r--r--compiler_wrapper/testdata/cros_gcc_host_golden/gcc_host_wrapper.json26
-rw-r--r--compiler_wrapper/testdata/cros_gcc_host_golden/gcc_maincc_target_specific.json218
-rw-r--r--compiler_wrapper/testdata/cros_gcc_host_golden/gcc_path.json155
-rw-r--r--compiler_wrapper/testdata/cros_gcc_host_golden/gcc_specific_args.json80
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/bisect.json178
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/clang_ftrapv_maincc_target_specific.json434
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/clang_maincc_target_specific.json416
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/clang_path.json605
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/clang_sanitizer_args.json387
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/clang_specific_args.json347
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/clang_sysroot_wrapper_common.json310
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/clangtidy.json343
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/force_disable_werror.json226
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/gcc_clang_syntax.json253
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/gcc_maincc_target_specific.json329
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/gcc_path.json233
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/gcc_sanitizer_args.json320
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/gcc_specific_args.json119
-rw-r--r--compiler_wrapper/testdata/cros_hardened_golden/gcc_sysroot_wrapper_common.json256
-rw-r--r--compiler_wrapper/testdata/cros_hardened_llvmnext_golden/bisect.json193
-rw-r--r--compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clang_path.json665
-rw-r--r--compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clangtidy.json383
-rw-r--r--compiler_wrapper/testdata/cros_hardened_llvmnext_golden/force_disable_werror.json251
-rw-r--r--compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_clang_syntax.json273
-rw-r--r--compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_path.json233
-rw-r--r--compiler_wrapper/testdata/cros_hardened_noccache_golden/bisect.json157
-rw-r--r--compiler_wrapper/testdata/cros_hardened_noccache_golden/clang_path.json528
-rw-r--r--compiler_wrapper/testdata/cros_hardened_noccache_golden/clangtidy.json343
-rw-r--r--compiler_wrapper/testdata/cros_hardened_noccache_golden/force_disable_werror.json191
-rw-r--r--compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_clang_syntax.json241
-rw-r--r--compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_path.json197
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/bisect.json154
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/clang_ftrapv_maincc_target_specific.json366
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/clang_maincc_target_specific.json348
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/clang_path.json509
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/clang_sanitizer_args.json330
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/clang_specific_args.json291
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/clang_sysroot_wrapper_common.json267
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/clangtidy.json279
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/force_disable_werror.json186
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/gcc_clang_syntax.json209
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/gcc_maincc_target_specific.json297
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/gcc_path.json209
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sanitizer_args.json288
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/gcc_specific_args.json107
-rw-r--r--compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sysroot_wrapper_common.json237
-rw-r--r--compiler_wrapper/testutil_test.go336
-rw-r--r--compiler_wrapper/thumb_flags.go27
-rw-r--r--compiler_wrapper/thumb_flags_test.go113
-rw-r--r--compiler_wrapper/unsupported_flags.go14
-rw-r--r--compiler_wrapper/unsupported_flags_test.go20
-rw-r--r--compiler_wrapper/x64_flags.go21
-rw-r--r--compiler_wrapper/x64_flags_test.go39
-rwxr-xr-xcrb/crb_driver.py2
-rwxr-xr-xcros_utils/buildbot_json.py6
-rw-r--r--cros_utils/buildbot_utils.py19
-rwxr-xr-xcros_utils/buildbot_utils_unittest.py34
-rw-r--r--cros_utils/command_executer.py49
-rwxr-xr-xcros_utils/command_executer_unittest.py2
-rw-r--r--cros_utils/contextlib3.py116
-rwxr-xr-xcros_utils/contextlib3_test.py195
-rwxr-xr-xcros_utils/email_sender.py16
-rw-r--r--cros_utils/locks.py25
-rw-r--r--cros_utils/misc.py13
-rw-r--r--[-rwxr-xr-x]cros_utils/no_pseudo_terminal_test.py20
-rw-r--r--cros_utils/tabulator.py435
-rw-r--r--cros_utils/tabulator_test.py73
-rw-r--r--crosperf/benchmark.py15
-rw-r--r--crosperf/benchmark_run.py38
-rwxr-xr-xcrosperf/benchmark_run_unittest.py85
-rwxr-xr-xcrosperf/benchmark_unittest.py13
-rwxr-xr-xcrosperf/crosperf.py24
-rwxr-xr-xcrosperf/crosperf_test.py44
-rwxr-xr-xcrosperf/crosperf_unittest.py37
-rw-r--r--crosperf/default-telemetry-results.json32
-rw-r--r--crosperf/default_remotes5
-rw-r--r--crosperf/download_images.py105
-rwxr-xr-xcrosperf/download_images_unittest.py50
-rw-r--r--crosperf/experiment.py95
-rw-r--r--crosperf/experiment_factory.py339
-rwxr-xr-xcrosperf/experiment_factory_unittest.py263
-rw-r--r--crosperf/experiment_file.py33
-rwxr-xr-xcrosperf/experiment_file_unittest.py127
-rw-r--r--crosperf/experiment_files/README (renamed from crosperf/experiment_files/README.md)15
-rw-r--r--crosperf/experiment_files/dut_config.exp66
-rw-r--r--crosperf/experiment_files/enable_aslr.exp37
-rw-r--r--crosperf/experiment_runner.py115
-rwxr-xr-xcrosperf/experiment_runner_unittest.py65
-rw-r--r--crosperf/label.py20
-rw-r--r--crosperf/machine_manager.py20
-rwxr-xr-xcrosperf/machine_manager_unittest.py32
-rw-r--r--crosperf/mock_instance.py14
-rw-r--r--crosperf/results_cache.py576
-rwxr-xr-xcrosperf/results_cache_unittest.py1044
-rw-r--r--crosperf/results_organizer.py30
-rwxr-xr-xcrosperf/results_organizer_unittest.py20
-rw-r--r--crosperf/results_report.py156
-rwxr-xr-xcrosperf/results_report_unittest.py46
-rwxr-xr-xcrosperf/run_tests.sh28
-rw-r--r--crosperf/schedv2.py200
-rwxr-xr-xcrosperf/schedv2_unittest.py22
-rw-r--r--crosperf/settings.py21
-rw-r--r--crosperf/settings_factory.py140
-rwxr-xr-xcrosperf/settings_factory_unittest.py26
-rwxr-xr-xcrosperf/settings_unittest.py38
-rw-r--r--crosperf/suite_runner.py575
-rwxr-xr-xcrosperf/suite_runner_unittest.py846
-rwxr-xr-xcwp/bartlett/server.py2
-rwxr-xr-xdebug_info_test/debug_info_test.py2
-rw-r--r--debug_info_test/exist_debug_info.whitelist1
-rwxr-xr-xdejagnu/gdb_dejagnu.py4
-rwxr-xr-xdejagnu/run_dejagnu.py2
-rwxr-xr-xfdo_scripts/divide_and_merge_profiles.py2
-rwxr-xr-xfdo_scripts/divide_and_merge_profiles_test.py2
-rwxr-xr-xfdo_scripts/profile_cycler.py2
-rwxr-xr-xgo/chromeos/build_go11
-rwxr-xr-xgo/chromeos/go_chell11
-rwxr-xr-xgo/chromeos/go_chell_exec12
-rwxr-xr-xgo/chromeos/go_elm11
-rwxr-xr-xgo/chromeos/go_elm3211
-rwxr-xr-xgo/chromeos/go_elm32_exec12
-rwxr-xr-xgo/chromeos/go_elm_exec12
-rwxr-xr-xgo/chromeos/push_glibc4
-rwxr-xr-xgo/chromeos/setup_chromeos_testing.py250
-rwxr-xr-xgo/chromeos/target_cp4
-rwxr-xr-xgo/chromeos/target_sh2
-rw-r--r--go/patch/go-1.10.3/go0.patch27
-rw-r--r--go/patch/go-1.10.3/go1.patch50
-rw-r--r--go/patch/go-1.10.3/go2.patch267
-rw-r--r--go/patch/go-1.10.3/go3.patch732
-rw-r--r--go/patch/go-1.10.3/go4.patch199
-rw-r--r--go/patch/go-1.10.3/go5.patch106
-rw-r--r--go/patch/go-1.10.3/go6.patch142
-rw-r--r--go/patch/go-1.11.2/go0.patch27
-rw-r--r--go/patch/go-1.11.2/go1.patch50
-rw-r--r--go/patch/go-1.11.2/go2.patch277
-rw-r--r--go/patch/go-1.11.2/go3.patch730
-rw-r--r--go/patch/go-1.11.2/go4.patch199
-rw-r--r--go/patch/go-1.11.2/go5.patch106
-rw-r--r--go/patch/go-1.11.2/go6.patch149
-rw-r--r--go/patch/go0.patch (renamed from go/patch/go-1.10.2/go0.patch)0
-rw-r--r--go/patch/go1.patch (renamed from go/patch/go-1.10.2/go1.patch)0
-rw-r--r--go/patch/go2.patch (renamed from go/patch/go-1.10.2/go2.patch)0
-rw-r--r--go/patch/go3.patch (renamed from go/patch/go-1.10.2/go3.patch)0
-rw-r--r--go/patch/go4.patch (renamed from go/patch/go-1.10.2/go4.patch)0
-rw-r--r--go/patch/go5.patch (renamed from go/patch/go-1.10.2/go5.patch)0
-rw-r--r--go/patch/go6.patch (renamed from go/patch/go-1.10.2/go6.patch)0
-rwxr-xr-xheat_map.py148
-rwxr-xr-xheatmaps/heat_map.py185
-rwxr-xr-xheatmaps/heat_map_test.py157
-rw-r--r--heatmaps/heatmap_generator.py468
-rwxr-xr-xheatmaps/heatmap_generator_test.py315
-rwxr-xr-xheatmaps/perf-to-inst-page.sh68
-rwxr-xr-ximage_chromeos.py43
-rw-r--r--llvm_tools/README.md480
-rw-r--r--llvm_tools/assert_not_in_chroot.py24
-rwxr-xr-xllvm_tools/auto_llvm_bisection.py134
-rwxr-xr-xllvm_tools/auto_llvm_bisection_unittest.py232
-rwxr-xr-xllvm_tools/copy_helpers_to_chromiumos_overlay.py66
-rwxr-xr-xllvm_tools/custom_script_example.py72
-rw-r--r--llvm_tools/failure_modes.py23
-rwxr-xr-xllvm_tools/get_llvm_hash.py437
-rwxr-xr-xllvm_tools/get_llvm_hash_unittest.py150
-rwxr-xr-xllvm_tools/llvm_bisection.py463
-rwxr-xr-xllvm_tools/llvm_bisection_unittest.py595
-rwxr-xr-xllvm_tools/llvm_patch_management.py280
-rwxr-xr-xllvm_tools/llvm_patch_management_unittest.py305
-rwxr-xr-xllvm_tools/modify_a_tryjob.py288
-rwxr-xr-xllvm_tools/modify_a_tryjob_unittest.py401
-rwxr-xr-xllvm_tools/patch_manager.py748
-rwxr-xr-xllvm_tools/patch_manager_unittest.py911
-rw-r--r--llvm_tools/subprocess_helpers.py58
-rw-r--r--llvm_tools/test_helpers.py89
-rwxr-xr-xllvm_tools/update_all_tryjobs_with_auto.py81
-rwxr-xr-xllvm_tools/update_chromeos_llvm_next_hash.py715
-rwxr-xr-xllvm_tools/update_chromeos_llvm_next_hash_unittest.py941
-rwxr-xr-xllvm_tools/update_packages_and_run_tryjobs.py314
-rwxr-xr-xllvm_tools/update_packages_and_run_tryjobs_unittest.py292
-rwxr-xr-xllvm_tools/update_tryjob_status.py323
-rwxr-xr-xllvm_tools/update_tryjob_status_unittest.py617
-rwxr-xr-xlock_machine.py618
-rw-r--r--[-rwxr-xr-x]lock_machine_test.py (renamed from file_lock_machine_test.py)36
-rw-r--r--mem_tests/README46
-rw-r--r--mem_tests/README.md52
-rwxr-xr-xmem_tests/clean_data.py2
-rwxr-xr-xmem_tests/mem_groups.py2
-rwxr-xr-xmem_tests/total_mem_actual.py2
-rwxr-xr-xmem_tests/total_mem_sampled.py2
-rwxr-xr-xorderfile/post_process_orderfile.py91
-rwxr-xr-xorderfile/post_process_orderfile_test.py94
-rwxr-xr-xperf-to-inst-page.sh85
-rwxr-xr-xpgo_tools/merge_profdata_and_upload.py271
-rwxr-xr-xrun_tests.py23
-rwxr-xr-xrun_tests_for.py256
-rwxr-xr-xtest_gcc_dejagnu.py2
-rwxr-xr-xtest_gdb_dejagnu.py2
-rwxr-xr-xtoolchain_utils_githooks/check-format122
-rwxr-xr-xtoolchain_utils_githooks/check-lint91
-rwxr-xr-xtoolchain_utils_githooks/check-presubmit64
-rwxr-xr-xtoolchain_utils_githooks/pre-push.real53
344 files changed, 3219 insertions, 47652 deletions
diff --git a/.gitignore b/.gitignore
index 45abe3a9..d0b39e30 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,3 @@
logs
*.pyc
-llvm-project-copy/
-compiler_wrapper/compiler_wrapper
+
diff --git a/.style.yapf b/.style.yapf
deleted file mode 100644
index 97f7d657..00000000
--- a/.style.yapf
+++ /dev/null
@@ -1,3 +0,0 @@
-[style]
-based_on_style = chromium
-blank_line_before_module_docstring = true
diff --git a/OWNERS b/OWNERS
index 01737d3f..73ac9270 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,2 +1,10 @@
set noparent
-include OWNERS.toolchain
+cmtice@google.com
+gbiv@google.com
+laszio@google.com
+llozano@google.com
+manojgupta@google.com
+rahulchaudhry@google.com
+yunlian@google.com
+zhizhouy@google.com
+# bjanakiraman@chromium.org
diff --git a/OWNERS.toolchain b/OWNERS.toolchain
deleted file mode 100644
index f7d6e3f6..00000000
--- a/OWNERS.toolchain
+++ /dev/null
@@ -1,12 +0,0 @@
-cmtice@chromium.org
-denik@chromium.org
-gbiv@chromium.org
-jiancai@chromium.org
-llozano@chromium.org
-manojgupta@chromium.org
-tbosch@chromium.org
-tcwang@chromium.org
-zhizhouy@chromium.org
-
-# Temporary; see comment #2 on crbug.com/982498
-llozano@google.com
diff --git a/PRESUBMIT.cfg b/PRESUBMIT.cfg
deleted file mode 100644
index d69d375b..00000000
--- a/PRESUBMIT.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[Hook Scripts]
-toolchain_utils_presubmits = ./toolchain_utils_githooks/check-presubmit ${PRESUBMIT_FILES}
diff --git a/README b/README
new file mode 100644
index 00000000..3ab3d7b7
--- /dev/null
+++ b/README
@@ -0,0 +1,10 @@
+To run scripts in this directory, first run:
+
+export PYTHONPATH=$(readlink -f .):$PYTHONPATH
+
+from this directory.
+
+Then you can run any script.
+
+To get help on any script, type in python <script> --help, or refer to the
+header of the script for more information.
diff --git a/README.md b/README.md
deleted file mode 100644
index a318e38d..00000000
--- a/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# `toolchain-utils`
-
-Various utilities used by the Chrome OS toolchain team.
-
-## Note
-
-To run Python scripts in this directory, first run:
-
-```
-export PYTHONPATH=$(readlink -f .):$PYTHONPATH
-```
-
-from this directory.
-
-Then you can run any script.
-
-To get help on any script, type in `python <script> --help`, or refer to the
-header of the script for more information.
diff --git a/afdo_metadata/README.md b/afdo_metadata/README.md
deleted file mode 100644
index 2338404c..00000000
--- a/afdo_metadata/README.md
+++ /dev/null
@@ -1,21 +0,0 @@
-# Overview
-This directory contains JSON files describing metadata of AFDO profiles
-used to compile packages (Chrome and kernel) in Chrome OS.
-
-# Description of each JSON Files
-kernel_afdo.json contains the name of the latest AFDO profiles for each
-kernel revision.
-
-chrome_afdo.json contains the name of the latest AFDO profiles used in
-latest Chrome revision, including both benchmark and CWP profiles.
-
-# Usage
-## Updates
-When a new profile (kernel or Chrome) is successfully uploaded to the
-production GS bucket, a bot submits to modify the corresponding JSON
-file to reflect the updates.
-
-## Roll to Chrome OS
-There will be scheduler jobs listening to the changes made to these
-JSON files. When changes detected, buildbot will roll these changes into
-corresponding Chrome OS packages.
diff --git a/afdo_metadata/chrome_afdo.json b/afdo_metadata/chrome_afdo.json
deleted file mode 100644
index e46e05f7..00000000
--- a/afdo_metadata/chrome_afdo.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "silvermont": {
- "name": "R79-3928.0-1571049528.afdo"
- },
- "benchmark": {
- "name": "chromeos-chrome-amd64-79.0.3940.0_rc-r1.afdo"
- },
- "airmont": {
- "name": "R79-3931.2-1571054549.afdo"
- },
- "broadwell": {
- "name": "R79-3904.41-1571046112.afdo"
- }
-} \ No newline at end of file
diff --git a/afdo_metadata/kernel_afdo.json b/afdo_metadata/kernel_afdo.json
deleted file mode 100644
index 1f23b170..00000000
--- a/afdo_metadata/kernel_afdo.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "chromeos-kernel-3_14": {
- "name": "R79-12499.14-1569836097"
- },
- "chromeos-kernel-4_4": {
- "name": "R79-12576.0-1571045606"
- },
- "chromeos-kernel-3_18": {
- "name": "R79-12564.0-1570440773"
- }
-} \ No newline at end of file
diff --git a/afdo_redaction/redact_profile.py b/afdo_redaction/redact_profile.py
deleted file mode 100755
index 96375fee..00000000
--- a/afdo_redaction/redact_profile.py
+++ /dev/null
@@ -1,236 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to redact apparent ICF'ed symbolsfrom textual AFDO profiles.
-
-AFDO sampling and ICF have an unfortunate interaction that causes a huge
-inflation in sample counts. Essentially, if you have N functions ICF'ed to the
-same location, one AFDO sample in any of those N functions will count as one
-sample in *each* of those N functions.
-
-In practice, there are a few forms of function bodies that are very heavily
-ICF'ed (e.g. `ret`, `xor %eax, %eax; ret`, destructors for widely-used types
-like std::map...). Recording 28,000 samples across all N thousand logical
-functions that point to the same body really hurts our AFDO numbers, given that
-our actual sample count across all of Chrome is something around 10,000,000.
-(No, really, these are actual numbers. In practice, at the time of writing,
-this script eliminates >90% of our AFDO samples by count. Sometimes as high as
-98%.)
-
-It reads a textual AFDO profile from stdin, and prints a 'fixed' version of it
-to stdout. A summary of what the script actually did is printed to stderr.
-"""
-
-from __future__ import division, print_function
-
-import collections
-import re
-import sys
-
-def _count_samples(samples):
- """Count the total number of samples in a function."""
- line_re = re.compile(r'^(\s*)\d+(?:\.\d+)?: (\d+)\s*$')
-
- top_level_samples = 0
- all_samples = 0
- for line in samples:
- m = line_re.match(line)
- if not m:
- continue
-
- spaces, n = m.groups()
- n = int(n)
- all_samples += n
- if len(spaces) == 1:
- top_level_samples += n
-
- return top_level_samples, all_samples
-
-
-# A ProfileRecord is a set of samples for a top-level symbol in a textual AFDO
-# profile. function_line is the top line of said function, and `samples` is
-# a list of all of the sample lines below that.
-#
-# We rely on the format of these strings in some places in this script. For
-# reference, a full function sample will look something like:
-#
-# _ZNK5blink10PaintLayer19GetCompositingStateEv:4530:185
-# 6: 83
-# 15: 126
-# 62832: 126
-# 6: _ZNK5blink10PaintLayer14GroupedMappingEv:2349
-# 1: 206
-# 1: _ZNK5blink10PaintLayer14GroupedMappersEv:2060
-# 1: 206
-# 11: _ZNK5blink10PaintLayer25GetCompositedLayerMappingEv:800
-# 2.1: 80
-#
-#
-# In that case, function_line is
-# '_ZNK5blink10PaintLayer19GetCompositingStateEv:4530:185', and samples will be
-# every line below that.
-#
-# Function lines look like;
-# function_symbol:entry_count:dont_care
-#
-# And samples look like one of:
-# arbitrary_number: sample_count
-# arbitrary_number: inlined_function_symbol:inlined_entry_count
-ProfileRecord = collections.namedtuple('ProfileRecord',
- ['function_line', 'samples'])
-
-
-def _normalize_samples(samples):
- """Normalizes the samples in the given function body.
-
- Normalization just means that we redact inlined function names. This is
- done so that a bit of templating doesn't make two function bodies look
- distinct. Namely:
-
- template <typename T>
- __attribute__((noinline))
- int getNumber() { return 1; }
-
- template <typename T>
- __attribute__((noinline))
- int getNumberIndirectly() { return getNumber<T>(); }
-
- int main() {
- return getNumber<int>() + getNumber<float>();
- }
-
- If the profile has the mangled name for getNumber<float> in
- getNumberIndirectly<float> (and similar for <int>), we'll consider them to
- be distinct when they're not.
- """
-
- # I'm not actually sure if this ends up being an issue in practice, but it's
- # simple enough to guard against.
- inlined_re = re.compile(r'(^\s*\d+): [^:]+:(\s*\d+)\s*$')
- result = []
- for s in samples:
- m = inlined_re.match(s)
- if m:
- result.append('%s: __REDACTED__:%s' % m.groups())
- else:
- result.append(s)
- return tuple(result)
-
-
-def _read_textual_afdo_profile(stream):
- """Parses an AFDO profile from a line stream into ProfileRecords."""
- # ProfileRecords are actually nested, due to inlining. For the purpose of
- # this script, that doesn't matter.
- lines = (line.rstrip() for line in stream)
- function_line = None
- samples = []
- for line in lines:
- if not line:
- continue
-
- if line[0].isspace():
- assert function_line is not None, "sample exists outside of a function?"
- samples.append(line)
- continue
-
- if function_line is not None:
- yield ProfileRecord(function_line=function_line, samples=tuple(samples))
- function_line = line
- samples = []
-
- if function_line is not None:
- yield ProfileRecord(function_line=function_line, samples=tuple(samples))
-
-
-# The default of 100 is arbitrarily selected, but it does make the overwhelming
-# majority of obvious sample duplication disappear.
-#
-# We experimented shortly with an nm-powered version of this script (rather
-# than structural matching, we'd see which functions mapped to the same literal
-# address). Running this with a high value (100) for max_repeats produced
-# results basically indistinguishable from nm, so ...
-#
-# Non-nm based approaches are superior because they don't require any prior
-# build artifacts; just an AFDO profile.
-def dedup_records(profile_records, summary_file, max_repeats=100):
- """Removes heavily duplicated records from profile_records.
-
- profile_records is expected to be an iterable of ProfileRecord.
- max_repeats ia how many functions must share identical bodies for us to
- consider it 'heavily duplicated' and remove the results.
- """
-
- # Build a mapping of function structure -> list of functions with identical
- # structure and sample counts
- counts = collections.defaultdict(list)
- for record in profile_records:
- counts[_normalize_samples(record.samples)].append(record)
-
- # Be sure that we didn't see any duplicate functions, since that's bad...
- total_functions_recorded = sum(len(records)
- for records in counts.itervalues())
-
- unique_function_names = set(record.function_line.split(':')[0]
- for records in counts.itervalues()
- for record in records)
-
- assert len(unique_function_names) == total_functions_recorded, \
- 'duplicate function names?'
-
- num_kept = 0
- num_samples_kept = 0
- num_top_samples_kept = 0
- num_total = 0
- num_samples_total = 0
- num_top_samples_total = 0
-
- for normalized_samples, records in counts.iteritems():
- top_sample_count, all_sample_count = _count_samples(normalized_samples)
- top_sample_count *= len(records)
- all_sample_count *= len(records)
-
- num_total += len(records)
- num_samples_total += all_sample_count
- num_top_samples_total += top_sample_count
-
- if len(records) >= max_repeats:
- continue
-
- num_kept += len(records)
- num_samples_kept += all_sample_count
- num_top_samples_kept += top_sample_count
- for record in records:
- yield record
-
- print('Retained {:,}/{:,} functions'.format(num_kept, num_total),
- file=summary_file)
- print('Retained {:,}/{:,} samples, total'.format(num_samples_kept,
- num_samples_total),
- file=summary_file)
- print('Retained {:,}/{:,} top-level samples' \
- .format(num_top_samples_kept, num_top_samples_total),
- file=summary_file)
-
-
-def run(profile_input_file, summary_output_file, profile_output_file):
- profile_records = _read_textual_afdo_profile(profile_input_file)
-
- # Sort this so we get deterministic output. AFDO doesn't care what order it's
- # in.
- deduped = sorted(dedup_records(profile_records, summary_output_file),
- key=lambda r: r.function_line)
- for function_line, samples in deduped:
- print(function_line, file=profile_output_file)
- print('\n'.join(samples), file=profile_output_file)
-
-
-def _main():
- run(profile_input_file=sys.stdin, summary_output_file=sys.stderr,
- profile_output_file=sys.stdout)
-
-
-if __name__ == '__main__':
- _main()
diff --git a/afdo_redaction/redact_profile_test.py b/afdo_redaction/redact_profile_test.py
deleted file mode 100755
index 27fb534e..00000000
--- a/afdo_redaction/redact_profile_test.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for redact_profile.py."""
-
-from __future__ import division, print_function
-
-import StringIO
-import unittest
-
-import redact_profile
-
-_redact_limit = redact_profile.dedup_records.func_defaults[0]
-
-
-def _redact(input_lines, summary_to=None):
- if isinstance(input_lines, str):
- input_lines = input_lines.splitlines()
-
- if summary_to is None:
- summary_to = StringIO.StringIO()
-
- output_to = StringIO.StringIO()
- redact_profile.run(profile_input_file=input_lines,
- summary_output_file=summary_to,
- profile_output_file=output_to)
- return output_to.getvalue()
-
-
-def _redact_with_summary(input_lines):
- summary = StringIO.StringIO()
- result = _redact(input_lines, summary_to=summary)
- return result, summary.getvalue()
-
-
-def _generate_repeated_function_body(repeats, fn_name='_some_name'):
- # Arbitrary function body ripped from a textual AFDO profile.
- function_header = fn_name + ':1234:185'
- function_body = [
- ' 6: 83',
- ' 15: 126',
- ' 62832: 126',
- ' 6: _ZNK5blink10PaintLayer14GroupedMappingEv:2349',
- ' 1: 206',
- ' 1: _ZNK5blink10PaintLayer14GroupedMappersEv:2060',
- ' 1: 206',
- ' 11: _ZNK5blink10PaintLayer25GetCompositedLayerMappingEv:800',
- ' 2.1: 80',
- ]
-
- # Be sure to zfill this, so the functions are output in sorted order.
- num_width = len(str(repeats))
-
- lines = []
- for i in range(repeats):
- num = str(i).zfill(num_width)
- lines.append(num + function_header)
- lines.extend(function_body)
- return lines
-
-
-class Tests(unittest.TestCase):
- """All of our tests for redact_profile."""
- def test_no_input_works(self):
- self.assertEqual(_redact(''), '')
-
- def test_single_function_works(self):
- lines = _generate_repeated_function_body(1)
- result_file = '\n'.join(lines) + '\n'
- self.assertEqual(_redact(lines), result_file)
-
- def test_duplicate_of_single_function_works(self):
- lines = _generate_repeated_function_body(2)
- result_file = '\n'.join(lines) + '\n'
- self.assertEqual(_redact(lines), result_file)
-
- def test_not_too_many_duplicates_of_single_function_redacts_none(self):
- lines = _generate_repeated_function_body(_redact_limit - 1)
- result_file = '\n'.join(lines) + '\n'
- self.assertEqual(_redact(lines), result_file)
-
- def test_many_duplicates_of_single_function_redacts_them_all(self):
- lines = _generate_repeated_function_body(_redact_limit)
- self.assertEqual(_redact(lines), '')
-
- def test_many_duplicates_of_single_function_leaves_other_functions(self):
- kept_lines = _generate_repeated_function_body(1, fn_name='_keep_me')
- # Something to distinguish us from the rest. Just bump a random counter.
- kept_lines[1] += '1'
-
- result_file = '\n'.join(kept_lines) + '\n'
-
- lines = _generate_repeated_function_body(_redact_limit,
- fn_name='_discard_me')
- self.assertEqual(_redact(kept_lines + lines), result_file)
- self.assertEqual(_redact(lines + kept_lines), result_file)
-
- more_lines = _generate_repeated_function_body(_redact_limit,
- fn_name='_and_discard_me')
- self.assertEqual(_redact(lines + kept_lines + more_lines), result_file)
- self.assertEqual(_redact(lines + more_lines), '')
-
- def test_correct_summary_is_printed_when_nothing_is_redacted(self):
- lines = _generate_repeated_function_body(1)
- _, summary = _redact_with_summary(lines)
- self.assertIn('Retained 1/1 functions', summary)
- self.assertIn('Retained 827/827 samples, total', summary)
- # Note that top-level samples == "samples without inlining taken into
- # account," not "sum(entry_counts)"
- self.assertIn('Retained 335/335 top-level samples', summary)
-
- def test_correct_summary_is_printed_when_everything_is_redacted(self):
- lines = _generate_repeated_function_body(_redact_limit)
- _, summary = _redact_with_summary(lines)
- self.assertIn('Retained 0/100 functions', summary)
- self.assertIn('Retained 0/82,700 samples, total', summary)
- self.assertIn('Retained 0/33,500 top-level samples', summary)
-
- def test_correct_summary_is_printed_when_most_everything_is_redacted(self):
- kept_lines = _generate_repeated_function_body(1, fn_name='_keep_me')
- kept_lines[1] += '1'
-
- lines = _generate_repeated_function_body(_redact_limit)
- _, summary = _redact_with_summary(kept_lines + lines)
- self.assertIn('Retained 1/101 functions', summary)
- self.assertIn('Retained 1,575/84,275 samples, total', summary)
- self.assertIn('Retained 1,083/34,583 top-level samples', summary)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/afdo_redaction/remove_indirect_calls.py b/afdo_redaction/remove_indirect_calls.py
deleted file mode 100755
index b879b2f0..00000000
--- a/afdo_redaction/remove_indirect_calls.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to remove all indirect call targets from textual AFDO profiles.
-
-Indirect call samples can cause code to appear 'live' when it otherwise
-wouldn't be. This resurrection can happen either by the way of profile-based
-speculative devirtualization, or because of imprecision in LLVM's liveness
-calculations when performing LTO.
-
-This generally isn't a problem when an AFDO profile is applied to the binary it
-was collected on. However, because we e.g., build NaCl from the same set of
-objects as Chrome, this can become problematic, and lead to NaCl doubling in
-size (or worse). See crbug.com/1005023 and crbug.com/916130.
-"""
-
-from __future__ import division, print_function
-
-import argparse
-import re
-import sys
-
-
-def _remove_indirect_call_targets(lines):
- # Lines with indirect call targets look like:
- # 1.1: 1234 foo:111 bar:122
- #
- # Where 1.1 is the line info/discriminator, 1234 is the total number of
- # samples seen for that line/discriminator, foo:111 is "111 of the calls here
- # went to foo," and bar:122 is "122 of the calls here went to bar."
- call_target_re = re.compile(
- r"""
- ^\s+ # Top-level lines are function records.
- \d+(?:\.\d+)?: # Line info/discriminator
- \s+
- \d+ # Total sample count
- \s+
- ((?:[^\s:]+:\d+\s*)+) # Indirect call target(s)
- $
- """, re.VERBOSE)
- for line in lines:
- line = line.rstrip()
-
- match = call_target_re.match(line)
- if not match:
- yield line + '\n'
- continue
-
- group_start, group_end = match.span(1)
- assert group_end == len(line)
- yield line[:group_start].rstrip() + '\n'
-
-
-def run(input_stream, output_stream):
- for line in _remove_indirect_call_targets(input_stream):
- output_stream.write(line)
-
-
-def main():
- parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '--input',
- default='/dev/stdin',
- help='File to read from. Defaults to stdin.')
- parser.add_argument(
- '--output',
- default='/dev/stdout',
- help='File to write to. Defaults to stdout.')
- args = parser.parse_args()
-
- with open(args.input) as stdin:
- with open(args.output, 'w') as stdout:
- run(stdin, stdout)
-
-
-if __name__ == '__main__':
- main()
diff --git a/afdo_redaction/remove_indirect_calls_test.py b/afdo_redaction/remove_indirect_calls_test.py
deleted file mode 100755
index 1499af25..00000000
--- a/afdo_redaction/remove_indirect_calls_test.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for remove_indirect_calls"""
-
-from __future__ import print_function
-
-import io
-import unittest
-
-import remove_indirect_calls
-
-
-def _run_test(input_lines):
- input_buf = io.BytesIO('\n'.join(input_lines))
- output_buf = io.BytesIO()
- remove_indirect_calls.run(input_buf, output_buf)
- return output_buf.getvalue().splitlines()
-
-
-class Test(unittest.TestCase):
- """Tests"""
-
- def test_empty_profile(self):
- self.assertEqual(_run_test([]), [])
-
- def test_removal_on_real_world_code(self):
- # These are copied from an actual textual AFDO profile, but the names made
- # lints unhappy due to their length, so I had to be creative.
- profile_lines = """_ZLongSymbolName:52862:1766
- 14: 2483
- 8.1: _SomeInlinedSym:45413
- 11: _AndAnother:35481
- 2: 2483
- 2.1: _YetAnother:25549
- 3: 2483
- 3.1: 351
- 3.3: 2526 IndirectTarg1:675 Targ2:397 Targ3:77
- 13.2: Whee:9932
- 1.1: Whoo:9932
- 0: BleepBloop:9932
- 0: 2483
- """.strip().splitlines()
-
- expected_lines = """_ZLongSymbolName:52862:1766
- 14: 2483
- 8.1: _SomeInlinedSym:45413
- 11: _AndAnother:35481
- 2: 2483
- 2.1: _YetAnother:25549
- 3: 2483
- 3.1: 351
- 3.3: 2526
- 13.2: Whee:9932
- 1.1: Whoo:9932
- 0: BleepBloop:9932
- 0: 2483
- """.strip().splitlines()
-
- self.assertEqual(_run_test(profile_lines), expected_lines)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/afdo_tools/bisection/README.md b/afdo_tools/bisection/README.md
deleted file mode 100644
index 558b2ef0..00000000
--- a/afdo_tools/bisection/README.md
+++ /dev/null
@@ -1,71 +0,0 @@
-# `afdo_prof_analysis.py`
-
-`afdo_prof_analysis.py` is the main script and entrypoint for this AFDO profile
-analysis tool. This tool attempts to determine which part of a "bad" profile is
-bad. It does this using several analysis techniques which iterate over provided
-good and bad profiles to isolate the problematic portion of the bad profile.
-Goodness and badness are determined by the user, by passing a user-provided
-bash script. If the program runs successfully to completion, results will be
-output to the path specified by `analysis_output_file` as a JSON with the
-following keys:
-
-* `seed`: Float, the seed to randomness for this analysis
-* `bisect_results`: a sub-JSON with the following keys:
- + `ranges`: 2d list, where each element is a list of functions that are
- problematic in conjunction with one another.
- + `individuals`: individual functions with a bad profile
-* `good_only_functions`: Boolean: is the bad profile just missing some
- function profiles (that only the good profile has?)
-* `bad_only_functions`: Boolean: does the bad profile have extra function
- profiles (i.e. the good profile doesn't have these functions) causing
- bad-ness?
-
-## Resuming
-
-`afdo_prof_analysis.py` offers the ability to resume profile analysis in case
-it was interrupted and the user does not want to restart analysis from the
-beginning. On every iteration of the analysis, it saves state to disk (as
-specified by the `state_file` flag). By default the tool will resume from this
-state file, and this behavior can be disabled by providing the `no_resume` flag
-when running the script.
-
-## Usage
-
-### Example Invocation
- `python afdo_prof_analysis.py --good_prof good.txt --bad_prof bad.txt
- --external_decider profile_test.sh --analysis_output_file afdo_results.json`
-
-### Required flags:
-
- * `good_prof`: A "good" text-based AFDO profile as outputted by
- bin/llvm-profdata (within an LLVM build).
- * `bad_prof`: A "bad" text-based AFDO profile as outputted by
- bin/llvm-profdata (within an LLVM build).
- * `external_decider`: A user-provided bash script that, given a text-based
- AFDO profile as above, has one of the following exit codes:
- + 0: The given profile is GOOD.
- + 1: The given profile is BAD.
- + 125: The goodness of the given profile cannot be accurately determined by
- the benchmarking script.
- + 127: Something went wrong while running the benchmarking script, no
- information about the profile (and this result will cause analysis to abort).
- * `analysis_output_file`: The path of a file to which to write the output.
- analysis results.
-
-### Optional flags:
-
-Note that these are all related to the state-saving feature which is
-described above in "Resuming", so feel free to return to this later.
- * `state_file`: An explicit path for saving/restoring intermediate state.
- Defaults to `$(pwd)/afdo_analysis_state.json`.
- * `no_resume`: If enabled, the analysis will not attempt to resume from
- previous state; instead, it will start from the beginning. Defaults to
- False, i.e. by default will always try to resume from previous state if
- possible.
- * `remove_state_on_completion`: If enabled, the state file will be removed
- upon the completion of profile analysis. If disabled, the state file will
- be renamed to `<state_file_name>.completed.<date>` to prevent reusing this
- as intermediate state. Defaults to False.
- * `seed`: A float specifying the seed for randomness. Defaults to seconds
- since epoch. Note that this can only be passed when --no_resume is True,
- since otherwise there is ambiguity in which seed to use.
diff --git a/afdo_tools/bisection/afdo_prof_analysis.py b/afdo_tools/bisection/afdo_prof_analysis.py
deleted file mode 100755
index 8455d2b3..00000000
--- a/afdo_tools/bisection/afdo_prof_analysis.py
+++ /dev/null
@@ -1,438 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""AFDO Profile analysis tool.
-
-This script takes a good AFDO profile, a bad AFDO profile, and an external
-script which deems particular AFDO profiles as GOOD/BAD/SKIP, and an output
-file as arguments. Given these pieces of information, it analyzes the profiles
-to try and determine what exactly is bad about the bad profile. It does this
-with three main techniques: bisecting search, range search, and rough diff-ing.
-"""
-
-from __future__ import division, print_function
-
-import argparse
-import json
-# Pylint recommends we use "from chromite.lib import cros_logging as logging".
-# Chromite specific policy message, we want to keep using the standard logging
-# pylint: disable=cros-logging-import
-import logging
-import os
-import random
-import subprocess
-import time
-from datetime import date
-from enum import IntEnum
-from tempfile import mkstemp
-
-
-class StatusEnum(IntEnum):
- """Enum of valid statuses returned by profile decider."""
- GOOD_STATUS = 0
- BAD_STATUS = 1
- SKIP_STATUS = 125
- PROBLEM_STATUS = 127
-
-
-statuses = StatusEnum.__members__.values()
-
-_NUM_RUNS_RANGE_SEARCH = 20 # how many times range search should run its algo
-
-
-def json_to_text(json_prof):
- text_profile = []
- for func in json_prof:
- text_profile.append(func)
- text_profile.append(json_prof[func])
- return ''.join(text_profile)
-
-
-def text_to_json(f):
- """Performs basic parsing of an AFDO text-based profile.
-
- This parsing expects an input file object with contents of the form generated
- by bin/llvm-profdata (within an LLVM build).
- """
- results = {}
- curr_func = None
- curr_data = []
- for line in f:
- if not line.startswith(' '):
- if curr_func:
- results[curr_func] = ''.join(curr_data)
- curr_data = []
- curr_func, rest = line.split(':', 1)
- curr_func = curr_func.strip()
- curr_data.append(':' + rest)
- else:
- curr_data.append(line)
-
- if curr_func:
- results[curr_func] = ''.join(curr_data)
- return results
-
-
-def prof_to_tmp(prof):
- """Creates (and returns) temp filename for given JSON-based AFDO profile."""
- fd, temp_path = mkstemp()
- text_profile = json_to_text(prof)
- with open(temp_path, 'w') as f:
- f.write(text_profile)
- os.close(fd)
- return temp_path
-
-
-class DeciderState(object):
- """Class for the external decider."""
-
- def __init__(self, state_file, external_decider, seed):
- self.accumulated_results = [] # over this run of the script
- self.external_decider = external_decider
- self.saved_results = [] # imported from a previous run of this script
- self.state_file = state_file
- self.seed = seed if seed is not None else time.time()
-
- def load_state(self):
- if not os.path.exists(self.state_file):
- logging.info('State file %s is empty, starting from beginning',
- self.state_file)
- return
-
- with open(self.state_file) as f:
- try:
- data = json.load(f)
- except:
- raise ValueError('Provided state file %s to resume from does not'
- ' contain a valid JSON.' % self.state_file)
-
- if 'seed' not in data or 'accumulated_results' not in data:
- raise ValueError('Provided state file %s to resume from does not contain'
- ' the correct information' % self.state_file)
-
- self.seed = data['seed']
- self.saved_results = data['accumulated_results']
- logging.info('Restored state from %s...', self.state_file)
-
- def save_state(self):
- state = {'seed': self.seed, 'accumulated_results': self.accumulated_results}
- tmp_file = self.state_file + '.new'
- with open(tmp_file, 'w') as f:
- json.dump(state, f, indent=2)
- os.rename(tmp_file, self.state_file)
- logging.info('Logged state to %s...', self.state_file)
-
- def run(self, prof, save_run=True):
- """Run the external deciding script on the given profile."""
- if self.saved_results and save_run:
- result = self.saved_results.pop(0)
- self.accumulated_results.append(result)
- self.save_state()
- return StatusEnum(result)
-
- filename = prof_to_tmp(prof)
-
- try:
- return_code = subprocess.call([self.external_decider, filename])
- finally:
- os.remove(filename)
-
- if return_code in statuses:
- status = StatusEnum(return_code)
- if status == StatusEnum.PROBLEM_STATUS:
- prof_file = prof_to_tmp(prof)
- raise RuntimeError('Provided decider script returned PROBLEM_STATUS '
- 'when run on profile stored at %s. AFDO Profile '
- 'analysis aborting' % prof_file)
- if save_run:
- self.accumulated_results.append(status.value)
- logging.info('Run %d of external script %s returned %s',
- len(self.accumulated_results), self.external_decider,
- status.name)
- self.save_state()
- return status
- raise ValueError(
- 'Provided external script had unexpected return code %d' % return_code)
-
-
-def bisect_profiles(decider, good, bad, common_funcs, lo, hi):
- """Recursive function which bisects good and bad profiles.
-
- Args:
- decider: function which, given a JSON-based AFDO profile, returns an
- element of 'statuses' based on the status of the profile
- good: JSON-based good AFDO profile
- bad: JSON-based bad AFDO profile
- common_funcs: the list of functions which have top-level profiles in both
- 'good' and 'bad'
- lo: lower bound of range being bisected on
- hi: upper bound of range being bisected on
-
- Returns a dictionary with two keys: 'individuals' and 'ranges'.
- 'individuals': a list of individual functions found to make the profile BAD
- 'ranges': a list of lists of function names. Each list of functions is a list
- such that including all of those from the bad profile makes the good
- profile BAD. It may not be the smallest problematic combination, but
- definitely contains a problematic combination of profiles.
- """
-
- results = {'individuals': [], 'ranges': []}
- if hi - lo <= 1:
- logging.info('Found %s as a problematic function profile', common_funcs[lo])
- results['individuals'].append(common_funcs[lo])
- return results
-
- mid = (lo + hi) // 2
- lo_mid_prof = good.copy() # covers bad from lo:mid
- mid_hi_prof = good.copy() # covers bad from mid:hi
- for func in common_funcs[lo:mid]:
- lo_mid_prof[func] = bad[func]
- for func in common_funcs[mid:hi]:
- mid_hi_prof[func] = bad[func]
-
- lo_mid_verdict = decider.run(lo_mid_prof)
- mid_hi_verdict = decider.run(mid_hi_prof)
-
- if lo_mid_verdict == StatusEnum.BAD_STATUS:
- result = bisect_profiles(decider, good, bad, common_funcs, lo, mid)
- results['individuals'].extend(result['individuals'])
- results['ranges'].extend(result['ranges'])
- if mid_hi_verdict == StatusEnum.BAD_STATUS:
- result = bisect_profiles(decider, good, bad, common_funcs, mid, hi)
- results['individuals'].extend(result['individuals'])
- results['ranges'].extend(result['ranges'])
-
- # neither half is bad -> the issue is caused by several things occuring
- # in conjunction, and this combination crosses 'mid'
- if lo_mid_verdict == mid_hi_verdict == StatusEnum.GOOD_STATUS:
- problem_range = range_search(decider, good, bad, common_funcs, lo, hi)
- if problem_range:
- logging.info('Found %s as a problematic combination of profiles',
- str(problem_range))
- results['ranges'].append(problem_range)
-
- return results
-
-
-def bisect_profiles_wrapper(decider, good, bad, perform_check=True):
- """Wrapper for recursive profile bisection."""
-
- # Validate good and bad profiles are such, otherwise bisection reports noise
- # Note that while decider is a random mock, these assertions may fail.
- if perform_check:
- if decider.run(good, save_run=False) != StatusEnum.GOOD_STATUS:
- raise ValueError('Supplied good profile is not actually GOOD')
- if decider.run(bad, save_run=False) != StatusEnum.BAD_STATUS:
- raise ValueError('Supplied bad profile is not actually BAD')
-
- common_funcs = sorted(func for func in good if func in bad)
- if not common_funcs:
- return {'ranges': [], 'individuals': []}
-
- # shuffle because the results of our analysis can be quite order-dependent
- # but this list has no inherent ordering. By shuffling each time, the chances
- # of finding new, potentially interesting results are increased each time
- # the program is run
- random.shuffle(common_funcs)
- results = bisect_profiles(decider, good, bad, common_funcs, 0,
- len(common_funcs))
- results['ranges'].sort()
- results['individuals'].sort()
- return results
-
-
-def range_search(decider, good, bad, common_funcs, lo, hi):
- """Searches for problematic range crossing mid border.
-
- The main inner algorithm is the following, which looks for the smallest
- possible ranges with problematic combinations. It starts the upper bound at
- the midpoint, and increments in halves until it gets a BAD profile.
- Then, it increments the lower bound (in halves) until the resultant profile
- is GOOD, and then we have a range that causes 'BAD'ness.
-
- It does this _NUM_RUNS_RANGE_SEARCH times, and shuffles the functions being
- looked at uniquely each time to try and get the smallest possible range
- of functions in a reasonable timeframe.
- """
-
- average = lambda x, y: int(round((x + y) // 2.0))
-
- def find_upper_border(good_copy, funcs, lo, hi, last_bad_val=None):
- """Finds the upper border of problematic range."""
- mid = average(lo, hi)
- if mid == lo or mid == hi:
- return last_bad_val or hi
-
- for func in funcs[lo:mid]:
- good_copy[func] = bad[func]
- verdict = decider.run(good_copy)
-
- # reset for next iteration
- for func in funcs:
- good_copy[func] = good[func]
-
- if verdict == StatusEnum.BAD_STATUS:
- return find_upper_border(good_copy, funcs, lo, mid, mid)
- return find_upper_border(good_copy, funcs, mid, hi, last_bad_val)
-
- def find_lower_border(good_copy, funcs, lo, hi, last_bad_val=None):
- """Finds the lower border of problematic range."""
- mid = average(lo, hi)
- if mid == lo or mid == hi:
- return last_bad_val or lo
-
- for func in funcs[lo:mid]:
- good_copy[func] = good[func]
- verdict = decider.run(good_copy)
-
- # reset for next iteration
- for func in funcs:
- good_copy[func] = bad[func]
-
- if verdict == StatusEnum.BAD_STATUS:
- return find_lower_border(good_copy, funcs, mid, hi, lo)
- return find_lower_border(good_copy, funcs, lo, mid, last_bad_val)
-
- lo_mid_funcs = []
- mid_hi_funcs = []
- min_range_funcs = []
- for _ in range(_NUM_RUNS_RANGE_SEARCH):
-
- if min_range_funcs: # only examine range we've already narrowed to
- random.shuffle(lo_mid_funcs)
- random.shuffle(mid_hi_funcs)
- else: # consider lo-mid and mid-hi separately bc must cross border
- mid = (lo + hi) // 2
- lo_mid_funcs = common_funcs[lo:mid]
- mid_hi_funcs = common_funcs[mid:hi]
-
- funcs = lo_mid_funcs + mid_hi_funcs
- hi = len(funcs)
- mid = len(lo_mid_funcs)
- lo = 0
-
- # because we need the problematic pair to pop up before we can narrow it
- prof = good.copy()
- for func in lo_mid_funcs:
- prof[func] = bad[func]
-
- upper_border = find_upper_border(prof, funcs, mid, hi)
- for func in lo_mid_funcs + funcs[mid:upper_border]:
- prof[func] = bad[func]
-
- lower_border = find_lower_border(prof, funcs, lo, mid)
- curr_range_funcs = funcs[lower_border:upper_border]
-
- if not min_range_funcs or len(curr_range_funcs) < len(min_range_funcs):
- min_range_funcs = curr_range_funcs
- lo_mid_funcs = lo_mid_funcs[lo_mid_funcs.index(min_range_funcs[0]):]
- mid_hi_funcs = mid_hi_funcs[:mid_hi_funcs.index(min_range_funcs[-1]) + 1]
- if len(min_range_funcs) == 2:
- min_range_funcs.sort()
- return min_range_funcs # can't get any smaller
-
- min_range_funcs.sort()
- return min_range_funcs
-
-
-def check_good_not_bad(decider, good, bad):
- """Check if bad prof becomes GOOD by adding funcs it lacks from good prof"""
- bad_copy = bad.copy()
- for func in good:
- if func not in bad:
- bad_copy[func] = good[func]
- return decider.run(bad_copy) == StatusEnum.GOOD_STATUS
-
-
-def check_bad_not_good(decider, good, bad):
- """Check if good prof BAD after adding funcs bad prof has that good doesnt"""
- good_copy = good.copy()
- for func in bad:
- if func not in good:
- good_copy[func] = bad[func]
- return decider.run(good_copy) == StatusEnum.BAD_STATUS
-
-
-def parse_args():
- parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '--good_prof',
- required=True,
- help='Text-based "Good" profile for analysis')
- parser.add_argument(
- '--bad_prof', required=True, help='Text-based "Bad" profile for analysis')
- parser.add_argument(
- '--external_decider',
- required=True,
- help='External script that, given an AFDO profile, returns '
- 'GOOD/BAD/SKIP')
- parser.add_argument(
- '--analysis_output_file',
- required=True,
- help='File to output JSON results to')
- parser.add_argument(
- '--state_file',
- default='%s/afdo_analysis_state.json' % os.getcwd(),
- help='File path containing state to load from initially, and will be '
- 'overwritten with new state on each iteration')
- parser.add_argument(
- '--no_resume',
- action='store_true',
- help='If enabled, no initial state will be loaded and the program will '
- 'run from the beginning')
- parser.add_argument(
- '--remove_state_on_completion',
- action='store_true',
- help='If enabled, state file will be removed once profile analysis is '
- 'completed')
- parser.add_argument(
- '--seed', type=float, help='Float specifying seed for randomness')
- return parser.parse_args()
-
-
-def main(flags):
- if not flags.no_resume and flags.seed: # conflicting seeds
- raise RuntimeError('Ambiguous seed value; do not resume from existing '
- 'state and also specify seed by command line flag')
-
- decider = DeciderState(
- flags.state_file, flags.external_decider, seed=flags.seed)
- if not flags.no_resume:
- decider.load_state()
- random.seed(decider.seed)
-
- with open(flags.good_prof) as good_f:
- good_items = text_to_json(good_f)
- with open(flags.bad_prof) as bad_f:
- bad_items = text_to_json(bad_f)
-
- bisect_results = bisect_profiles_wrapper(decider, good_items, bad_items)
- gnb_result = check_good_not_bad(decider, good_items, bad_items)
- bng_result = check_bad_not_good(decider, good_items, bad_items)
-
- results = {
- 'seed': decider.seed,
- 'bisect_results': bisect_results,
- 'good_only_functions': gnb_result,
- 'bad_only_functions': bng_result
- }
- with open(flags.analysis_output_file, 'wb') as f:
- json.dump(results, f, indent=2)
- if flags.remove_state_on_completion:
- os.remove(flags.state_file)
- logging.info('Removed state file %s following completion of script...',
- flags.state_file)
- else:
- completed_state_file = '%s.completed.%s' % (flags.state_file,
- str(date.today()))
- os.rename(flags.state_file, completed_state_file)
- logging.info('Stored completed state file as %s...', completed_state_file)
- return results
-
-
-if __name__ == '__main__':
- main(parse_args())
diff --git a/afdo_tools/bisection/afdo_prof_analysis_e2e_test.py b/afdo_tools/bisection/afdo_prof_analysis_e2e_test.py
deleted file mode 100755
index 85c1c175..00000000
--- a/afdo_tools/bisection/afdo_prof_analysis_e2e_test.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""End-to-end test for afdo_prof_analysis."""
-
-from __future__ import absolute_import, division, print_function
-
-import json
-import os
-import shutil
-import tempfile
-import unittest
-from datetime import date
-
-import afdo_prof_analysis as analysis
-
-
-class ObjectWithFields(object):
- """Turns kwargs given to the constructor into fields on an object.
-
- Example usage:
- x = ObjectWithFields(a=1, b=2)
- assert x.a == 1
- assert x.b == 2
- """
-
- def __init__(self, **kwargs):
- for key, val in kwargs.items():
- setattr(self, key, val)
-
-
-class AfdoProfAnalysisE2ETest(unittest.TestCase):
- """Class for end-to-end testing of AFDO Profile Analysis"""
-
- # nothing significant about the values, just easier to remember even vs odd
- good_prof = {
- 'func_a': ':1\n 1: 3\n 3: 5\n 5: 7\n',
- 'func_b': ':3\n 3: 5\n 5: 7\n 7: 9\n',
- 'func_c': ':5\n 5: 7\n 7: 9\n 9: 11\n',
- 'func_d': ':7\n 7: 9\n 9: 11\n 11: 13\n',
- 'good_func_a': ':11\n',
- 'good_func_b': ':13\n'
- }
-
- bad_prof = {
- 'func_a': ':2\n 2: 4\n 4: 6\n 6: 8\n',
- 'func_b': ':4\n 4: 6\n 6: 8\n 8: 10\n',
- 'func_c': ':6\n 6: 8\n 8: 10\n 10: 12\n',
- 'func_d': ':8\n 8: 10\n 10: 12\n 12: 14\n',
- 'bad_func_a': ':12\n',
- 'bad_func_b': ':14\n'
- }
-
- expected = {
- 'good_only_functions': False,
- 'bad_only_functions': True,
- 'bisect_results': {
- 'ranges': [],
- 'individuals': ['func_a']
- }
- }
-
- def test_afdo_prof_analysis(self):
- # Individual issues take precedence by nature of our algos
- # so first, that should be caught
- good = self.good_prof.copy()
- bad = self.bad_prof.copy()
- self.run_check(good, bad, self.expected)
-
- # Now remove individuals and exclusively BAD, and check that range is caught
- bad['func_a'] = good['func_a']
- bad.pop('bad_func_a')
- bad.pop('bad_func_b')
-
- expected_cp = self.expected.copy()
- expected_cp['bad_only_functions'] = False
- expected_cp['bisect_results'] = {
- 'individuals': [],
- 'ranges': [['func_b', 'func_c', 'func_d']]
- }
-
- self.run_check(good, bad, expected_cp)
-
- def test_afdo_prof_state(self):
- """Verifies that saved state is correct replication."""
- temp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
-
- good = self.good_prof.copy()
- bad = self.bad_prof.copy()
- # add more functions to data
- for x in range(400):
- good['func_%d' % x] = ''
- bad['func_%d' % x] = ''
-
- fd_first, first_result = tempfile.mkstemp(dir=temp_dir)
- os.close(fd_first)
- fd_state, state_file = tempfile.mkstemp(dir=temp_dir)
- os.close(fd_state)
- self.run_check(
- self.good_prof,
- self.bad_prof,
- self.expected,
- state_file=state_file,
- out_file=first_result)
-
- fd_second, second_result = tempfile.mkstemp(dir=temp_dir)
- os.close(fd_second)
- completed_state_file = '%s.completed.%s' % (state_file, str(date.today()))
- self.run_check(
- self.good_prof,
- self.bad_prof,
- self.expected,
- state_file=completed_state_file,
- no_resume=False,
- out_file=second_result)
-
- with open(first_result) as f:
- initial_run = json.load(f)
- with open(second_result) as f:
- loaded_run = json.load(f)
- self.assertEqual(initial_run, loaded_run)
-
- def test_exit_on_problem_status(self):
- temp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
-
- fd_state, state_file = tempfile.mkstemp(dir=temp_dir)
- os.close(fd_state)
- with self.assertRaises(RuntimeError):
- self.run_check(
- self.good_prof,
- self.bad_prof,
- self.expected,
- state_file=state_file,
- extern_decider='problemstatus_external.sh')
-
- def test_state_assumption(self):
-
- def compare_runs(tmp_dir, first_ctr, second_ctr):
- """Compares given prof versions between first and second run in test."""
- first_prof = '%s/.first_run_%d' % (tmp_dir, first_ctr)
- second_prof = '%s/.second_run_%d' % (tmp_dir, second_ctr)
- with open(first_prof) as f:
- first_prof_text = f.read()
- with open(second_prof) as f:
- second_prof_text = f.read()
- self.assertEqual(first_prof_text, second_prof_text)
-
- good_prof = {'func_a': ':1\n3: 3\n5: 7\n'}
- bad_prof = {'func_a': ':2\n4: 4\n6: 8\n'}
- # add some noise to the profiles; 15 is an arbitrary choice
- for x in range(15):
- func = 'func_%d' % x
- good_prof[func] = ':%d\n' % (x)
- bad_prof[func] = ':%d\n' % (x + 1)
- expected = {
- 'bisect_results': {
- 'ranges': [],
- 'individuals': ['func_a']
- },
- 'good_only_functions': False,
- 'bad_only_functions': False
- }
-
- # using a static temp dir rather than a dynamic one because these files are
- # shared between the bash scripts and this Python test, and the arguments
- # to the bash scripts are fixed by afdo_prof_analysis.py so it would be
- # difficult to communicate dynamically generated directory to bash scripts
- scripts_tmp_dir = '%s/afdo_test_tmp' % os.getcwd()
- os.mkdir(scripts_tmp_dir)
- self.addCleanup(shutil.rmtree, scripts_tmp_dir, ignore_errors=True)
-
- # files used in the bash scripts used as external deciders below
- # - count_file tracks the current number of calls to the script in total
- # - local_count_file tracks the number of calls to the script without
- # interruption
- count_file = '%s/.count' % scripts_tmp_dir
- local_count_file = '%s/.local_count' % scripts_tmp_dir
-
- # runs through whole thing at once
- initial_seed = self.run_check(
- good_prof,
- bad_prof,
- expected,
- extern_decider='state_assumption_external.sh')
- with open(count_file) as f:
- num_calls = int(f.read())
- os.remove(count_file) # reset counts for second run
- finished_state_file = 'afdo_analysis_state.json.completed.%s' % str(
- date.today())
- self.addCleanup(os.remove, finished_state_file)
-
- # runs the same analysis but interrupted each iteration
- for i in range(2 * num_calls + 1):
- no_resume_run = (i == 0)
- seed = initial_seed if no_resume_run else None
- try:
- self.run_check(
- good_prof,
- bad_prof,
- expected,
- no_resume=no_resume_run,
- extern_decider='state_assumption_interrupt.sh',
- seed=seed)
- break
- except RuntimeError:
- # script was interrupted, so we restart local count
- os.remove(local_count_file)
- else:
- raise RuntimeError('Test failed -- took too many iterations')
-
- for initial_ctr in range(3): # initial runs unaffected by interruption
- compare_runs(scripts_tmp_dir, initial_ctr, initial_ctr)
-
- start = 3
- for ctr in range(start, num_calls):
- # second run counter incremented by 4 for each one first run is because
- # +2 for performing initial checks on good and bad profs each time
- # +1 for PROBLEM_STATUS run which causes error and restart
- compare_runs(scripts_tmp_dir, ctr, 6 + (ctr - start) * 4)
-
- def run_check(self,
- good_prof,
- bad_prof,
- expected,
- state_file=None,
- no_resume=True,
- out_file=None,
- extern_decider=None,
- seed=None):
-
- temp_dir = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
-
- good_prof_file = '%s/%s' % (temp_dir, 'good_prof.txt')
- bad_prof_file = '%s/%s' % (temp_dir, 'bad_prof.txt')
- good_prof_text = analysis.json_to_text(good_prof)
- bad_prof_text = analysis.json_to_text(bad_prof)
- with open(good_prof_file, 'w') as f:
- f.write(good_prof_text)
- with open(bad_prof_file, 'w') as f:
- f.write(bad_prof_text)
-
- dir_path = os.path.dirname(os.path.realpath(__file__)) # dir of this file
- external_script = '%s/%s' % (dir_path, extern_decider or 'e2e_external.sh')
-
- # FIXME: This test ideally shouldn't be writing to $PWD
- if state_file is None:
- state_file = '%s/afdo_analysis_state.json' % os.getcwd()
-
- def rm_state():
- try:
- os.unlink(state_file)
- except OSError:
- # Probably because the file DNE. That's fine.
- pass
-
- self.addCleanup(rm_state)
-
- actual = analysis.main(
- ObjectWithFields(
- good_prof=good_prof_file,
- bad_prof=bad_prof_file,
- external_decider=external_script,
- analysis_output_file=out_file or '/dev/null',
- state_file=state_file,
- no_resume=no_resume,
- remove_state_on_completion=False,
- seed=seed,
- ))
- actual_seed = actual.pop('seed') # nothing to check
- self.assertEqual(actual, expected)
- return actual_seed
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/afdo_tools/bisection/afdo_prof_analysis_test.py b/afdo_tools/bisection/afdo_prof_analysis_test.py
deleted file mode 100755
index 7bd3050c..00000000
--- a/afdo_tools/bisection/afdo_prof_analysis_test.py
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for afdo_prof_analysis."""
-
-from __future__ import print_function
-
-import afdo_prof_analysis as analysis
-
-import random
-import StringIO
-import unittest
-
-
-class AfdoProfAnalysisTest(unittest.TestCase):
- """Class for testing AFDO Profile Analysis"""
- bad_items = {'func_a': '1', 'func_b': '3', 'func_c': '5'}
- good_items = {'func_a': '2', 'func_b': '4', 'func_d': '5'}
- random.seed(13) # 13 is an arbitrary choice. just for consistency
- # add some extra info to make tests more reflective of real scenario
- for num in range(128):
- func_name = 'func_extra_%d' % num
- # 1/3 to both, 1/3 only to good, 1/3 only to bad
- rand_val = random.randint(1, 101)
- if rand_val < 67:
- bad_items[func_name] = 'test_data'
- if rand_val < 34 or rand_val >= 67:
- good_items[func_name] = 'test_data'
-
- analysis.random.seed(5) # 5 is an arbitrary choice. For consistent testing
-
- def test_text_to_json(self):
- test_data = StringIO.StringIO('deflate_slow:87460059:3\n'
- ' 3: 24\n'
- ' 14: 54767\n'
- ' 15: 664 fill_window:22\n'
- ' 16: 661\n'
- ' 19: 637\n'
- ' 41: 36692 longest_match:36863\n'
- ' 44: 36692\n'
- ' 44.2: 5861\n'
- ' 46: 13942\n'
- ' 46.1: 14003\n')
- expected = {
- 'deflate_slow': ':87460059:3\n'
- ' 3: 24\n'
- ' 14: 54767\n'
- ' 15: 664 fill_window:22\n'
- ' 16: 661\n'
- ' 19: 637\n'
- ' 41: 36692 longest_match:36863\n'
- ' 44: 36692\n'
- ' 44.2: 5861\n'
- ' 46: 13942\n'
- ' 46.1: 14003\n'
- }
- actual = analysis.text_to_json(test_data)
- self.assertEqual(actual, expected)
- test_data.close()
-
- def test_text_to_json_empty_afdo(self):
- expected = {}
- actual = analysis.text_to_json('')
- self.assertEqual(actual, expected)
-
- def test_json_to_text(self):
- example_prof = {'func_a': ':1\ndata\n', 'func_b': ':2\nmore data\n'}
- expected_text = 'func_a:1\ndata\nfunc_b:2\nmore data\n'
- self.assertEqual(analysis.json_to_text(example_prof), expected_text)
-
- def test_bisect_profiles(self):
-
- # mock run of external script with arbitrarily-chosen bad profile vals
- # save_run specified and unused b/c afdo_prof_analysis.py
- # will call with argument explicitly specified
- # pylint: disable=unused-argument
- class DeciderClass(object):
- """Class for this tests's decider."""
-
- def run(self, prof, save_run=False):
- if '1' in prof['func_a'] or '3' in prof['func_b']:
- return analysis.StatusEnum.BAD_STATUS
- return analysis.StatusEnum.GOOD_STATUS
-
- results = analysis.bisect_profiles_wrapper(DeciderClass(), self.good_items,
- self.bad_items)
- self.assertEqual(results['individuals'], sorted(['func_a', 'func_b']))
- self.assertEqual(results['ranges'], [])
-
- def test_range_search(self):
-
- # arbitrarily chosen functions whose values in the bad profile constitute
- # a problematic pair
- # pylint: disable=unused-argument
- class DeciderClass(object):
- """Class for this tests's decider."""
-
- def run(self, prof, save_run=False):
- if '1' in prof['func_a'] and '3' in prof['func_b']:
- return analysis.StatusEnum.BAD_STATUS
- return analysis.StatusEnum.GOOD_STATUS
-
- # put the problematic combination in separate halves of the common funcs
- # so that non-bisecting search is invoked for its actual use case
- common_funcs = [func for func in self.good_items if func in self.bad_items]
- common_funcs.remove('func_a')
- common_funcs.insert(0, 'func_a')
- common_funcs.remove('func_b')
- common_funcs.append('func_b')
-
- problem_range = analysis.range_search(DeciderClass(), self.good_items,
- self.bad_items, common_funcs, 0,
- len(common_funcs))
-
- self.assertEquals(['func_a', 'func_b'], problem_range)
-
- def test_check_good_not_bad(self):
- func_in_good = 'func_c'
-
- # pylint: disable=unused-argument
- class DeciderClass(object):
- """Class for this tests's decider."""
-
- def run(self, prof, save_run=False):
- if func_in_good in prof:
- return analysis.StatusEnum.GOOD_STATUS
- return analysis.StatusEnum.BAD_STATUS
-
- self.assertTrue(
- analysis.check_good_not_bad(DeciderClass(), self.good_items,
- self.bad_items))
-
- def test_check_bad_not_good(self):
- func_in_bad = 'func_d'
-
- # pylint: disable=unused-argument
- class DeciderClass(object):
- """Class for this tests's decider."""
-
- def run(self, prof, save_run=False):
- if func_in_bad in prof:
- return analysis.StatusEnum.BAD_STATUS
- return analysis.StatusEnum.GOOD_STATUS
-
- self.assertTrue(
- analysis.check_bad_not_good(DeciderClass(), self.good_items,
- self.bad_items))
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/afdo_tools/bisection/e2e_external.sh b/afdo_tools/bisection/e2e_external.sh
deleted file mode 100755
index 1358075f..00000000
--- a/afdo_tools/bisection/e2e_external.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash -eu
-
-GOOD_STATUS=0
-BAD_STATUS=1
-SKIP_STATUS=125
-PROBLEM_STATUS=127
-
-tmp_file=$(mktemp)
-trap "rm -f '${tmp_file}'" EXIT
-grep -v '^ ' "$1" > "${tmp_file}"
-
-if grep -q bad "${tmp_file}"; then
- exit $BAD_STATUS
-fi
-
-# func_a containing '2' in its top line is BAD
-if grep -q 'func_a.*2' "${tmp_file}"; then
- exit $BAD_STATUS
-fi
-
-# func_b, func_c, and func_d with even values are bad in conjunction
-if grep -q 'func_b.*4' "${tmp_file}" && \
- grep -q 'func_c.*6' "${tmp_file}" && \
- grep -q 'func_d.*8' "${tmp_file}"; then
- exit $BAD_STATUS
-fi
-
-# If none of the BADness conditions are met
-exit $GOOD_STATUS
diff --git a/afdo_tools/bisection/problemstatus_external.sh b/afdo_tools/bisection/problemstatus_external.sh
deleted file mode 100755
index 3b53875b..00000000
--- a/afdo_tools/bisection/problemstatus_external.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash -eu
-
-exit 127
diff --git a/afdo_tools/bisection/state_assumption_external.sh b/afdo_tools/bisection/state_assumption_external.sh
deleted file mode 100755
index 1ad78ee2..00000000
--- a/afdo_tools/bisection/state_assumption_external.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash -eu
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script returns BAD_STATUS if '2' is in the top line of 'func_a's profile
-# and good otherwise
-
-GOOD_STATUS=0
-BAD_STATUS=1
-SKIP_STATUS=125
-PROBLEM_STATUS=127
-
-tmp_dir=$(pwd)/afdo_test_tmp
-count_file=${tmp_dir}/.count
-
-# keep count for purpose of filenames
-if [ -f "${count_file}" ]; then
- num_call=$(cat "${count_file}")
-else
- num_call=0
-fi
-
-echo -n $(( ${num_call}+1 )) > "${count_file}"
-
-tmp_file=$(mktemp)
-trap "rm -f '${tmp_file}'" EXIT
-grep -v '^ ' "$1" > "${tmp_file}"
-
-# copy prof to specific file for later test
-if [[ $# -eq 2 ]]; then
- cp "$1" "${tmp_dir}/.second_run_${num_call}"
-else
- cp "$1" "${tmp_dir}/.first_run_${num_call}"
-fi
-
-if grep -q 'func_a.*2' "${tmp_file}"; then
- exit "${BAD_STATUS}"
-fi
-exit "${GOOD_STATUS}"
diff --git a/afdo_tools/bisection/state_assumption_interrupt.sh b/afdo_tools/bisection/state_assumption_interrupt.sh
deleted file mode 100755
index eba3a4b4..00000000
--- a/afdo_tools/bisection/state_assumption_interrupt.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash -eu
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script returns the result of state_assumption_external.sh on every even
-# iteration, and PROBLEM_STATUS on every odd_iteration
-
-PROBLEM_STATUS=127
-
-tmp_dir=$(pwd)/afdo_test_tmp
-
-count_file="${tmp_dir}/.count"
-if [[ -f "${count_file}" ]]; then
- num_call=$(cat "${count_file}")
-else
- num_call=0
-fi
-
-local_count_file=${tmp_dir}/.local_count
-if [[ -f "${local_count_file}" ]]; then
- local_count=$(cat "${local_count_file}")
-else
- local_count=0
-fi
-
-echo -n $(( ${local_count}+1 )) > "${local_count_file}"
-
-# Don't want to fail on performance checks hence local_count >= 2
-# but following that, want to fail every other check
-if [[ ${local_count} -ge 2 ]] && [[ $(( ${num_call}%2 )) -ne 0 ]]; then
- echo -n $(( ${num_call}+1 )) > "${count_file}"
- exit "${PROBLEM_STATUS}"
-fi
-
-# script just needs any second argument to write profs to .second_run_*
-$(pwd)/state_assumption_external.sh "$1" 'second_run'
-exit $?
diff --git a/afdo_tools/generate_afdo_from_tryjob.py b/afdo_tools/generate_afdo_from_tryjob.py
deleted file mode 100755
index b8a2d669..00000000
--- a/afdo_tools/generate_afdo_from_tryjob.py
+++ /dev/null
@@ -1,165 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Given a tryjob and perf profile, generates an AFDO profile."""
-
-from __future__ import print_function
-
-import argparse
-import distutils.spawn
-import os
-import os.path
-import shutil
-import subprocess
-import sys
-import tempfile
-
-_CREATE_LLVM_PROF = 'create_llvm_prof'
-_GS_PREFIX = 'gs://'
-
-
-def _fetch_gs_artifact(remote_name, local_name):
- assert remote_name.startswith(_GS_PREFIX)
- subprocess.check_call(['gsutil', 'cp', remote_name, local_name])
-
-
-def _fetch_and_maybe_unpack(remote_name, local_name):
- unpackers = [
- ('.tar.bz2', ['tar', 'xaf']),
- ('.bz2', ['bunzip2']),
- ('.tar.xz', ['tar', 'xaf']),
- ('.xz', ['xz', '-d']),
- ]
-
- unpack_ext = None
- unpack_cmd = None
- for ext, unpack in unpackers:
- if remote_name.endswith(ext):
- unpack_ext, unpack_cmd = ext, unpack
- break
-
- download_to = local_name + unpack_ext if unpack_ext else local_name
- _fetch_gs_artifact(remote_name, download_to)
- if unpack_cmd is not None:
- print('Unpacking', download_to)
- subprocess.check_output(unpack_cmd + [download_to])
- assert os.path.exists(local_name)
-
-
-def _generate_afdo(perf_profile_loc, tryjob_loc, output_name):
- if perf_profile_loc.startswith(_GS_PREFIX):
- local_loc = 'perf.data'
- _fetch_and_maybe_unpack(perf_profile_loc, local_loc)
- perf_profile_loc = local_loc
-
- chrome_in_debug_loc = 'debug/opt/google/chrome/chrome.debug'
- debug_out = 'debug.tgz'
- _fetch_gs_artifact(os.path.join(tryjob_loc, 'debug.tgz'), debug_out)
-
- print('Extracting chrome.debug.')
- # This has tons of artifacts, and we only want Chrome; don't waste time
- # extracting the rest in _fetch_and_maybe_unpack.
- subprocess.check_call(['tar', 'xaf', 'debug.tgz', chrome_in_debug_loc])
-
- # Note that the AFDO tool *requires* a binary named `chrome` to be present if
- # we're generating a profile for chrome. It's OK for this to be split debug
- # information.
- os.rename(chrome_in_debug_loc, 'chrome')
-
- print('Generating AFDO profile.')
- subprocess.check_call([
- _CREATE_LLVM_PROF, '--out=' + output_name, '--binary=chrome',
- '--profile=' + perf_profile_loc
- ])
-
-
-def _abspath_or_gs_link(path):
- if path.startswith(_GS_PREFIX):
- return path
- return os.path.abspath(path)
-
-
-def _tryjob_arg(tryjob_arg):
- # Forward gs args through
- if tryjob_arg.startswith(_GS_PREFIX):
- return tryjob_arg
-
- # Clicking on the 'Artifacts' link gives us a pantheon link that's basically
- # a preamble and gs path.
- pantheon = 'https://pantheon.corp.google.com/storage/browser/'
- if tryjob_arg.startswith(pantheon):
- return _GS_PREFIX + tryjob_arg[len(pantheon):]
-
- # Otherwise, only do things with a tryjob ID (e.g. R75-11965.0.0-b3648595)
- if not tryjob_arg.startswith('R'):
- raise ValueError('Unparseable tryjob arg; give a tryjob ID, pantheon '
- 'link, or gs:// link. Please see source for more.')
-
- chell_path = 'chromeos-image-archive/chell-chrome-pfq-tryjob/'
- # ...And assume it's from chell, since that's the only thing we generate
- # profiles with today.
- return _GS_PREFIX + chell_path + tryjob_arg
-
-
-def main():
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument(
- '--perf_profile',
- required=True,
- help='Path to our perf profile. Accepts either a gs:// path or local '
- 'filepath.')
- parser.add_argument(
- '--tryjob',
- required=True,
- type=_tryjob_arg,
- help='Path to our tryjob\'s artifacts. Accepts a gs:// path, pantheon '
- 'link, or tryjob ID, e.g. R75-11965.0.0-b3648595. In the last case, '
- 'the assumption is that you ran a chell-chrome-pfq-tryjob.')
- parser.add_argument(
- '-o',
- '--output',
- default='afdo.prof',
- help='Where to put the AFDO profile. Default is afdo.prof.')
- parser.add_argument(
- '-k',
- '--keep_artifacts_on_failure',
- action='store_true',
- help='Don\'t remove the tempdir on failure')
- args = parser.parse_args()
-
- if not distutils.spawn.find_executable(_CREATE_LLVM_PROF):
- sys.exit(_CREATE_LLVM_PROF + ' not found; are you in the chroot?')
-
- profile = _abspath_or_gs_link(args.perf_profile)
- afdo_output = os.path.abspath(args.output)
-
- initial_dir = os.getcwd()
- temp_dir = tempfile.mkdtemp(prefix='generate_afdo')
- success = True
- try:
- os.chdir(temp_dir)
- _generate_afdo(profile, args.tryjob, afdo_output)
-
- # The AFDO tooling is happy to generate essentially empty profiles for us.
- # Chrome's profiles are often 8+ MB; if we only see a small fraction of
- # that, something's off. 512KB was arbitrarily selected.
- if os.path.getsize(afdo_output) < 512 * 1024:
- raise ValueError('The AFDO profile is suspiciously small for Chrome. '
- 'Something might have gone wrong.')
- except:
- success = False
- raise
- finally:
- os.chdir(initial_dir)
-
- if success or not args.keep_artifacts_on_failure:
- shutil.rmtree(temp_dir, ignore_errors=True)
- else:
- print('Artifacts are available at', temp_dir)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/afdo_tools/run_afdo_tryjob.py b/afdo_tools/run_afdo_tryjob.py
deleted file mode 100755
index de45af0b..00000000
--- a/afdo_tools/run_afdo_tryjob.py
+++ /dev/null
@@ -1,171 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Spawns off an AFDO tryjob.
-
-This tryjob will cause perf profiles to be collected as though we were running
-our benchmark AFDO pipeline. Depending on the set of flags that you use,
-different things will happen. Any artifacts will land in
-gs://chromeos-localmirror/distfiles/afdo/experimental/approximation
-
-This tryjob will generate *either* a full (AFDO profile, perf.data,
-chrome.debug) combo, or just a perf.data, depending on the arguments you feed
-it.
-
-The thing to be careful of is that our localmirror bucket is shared between
-everyone, so it's super easy for two AFDO profile runs to 'collide'. Hence, if
-you provide the --tag_profiles_with_current_time flag, the script will generate
-*only* a perf.data, but that perf.data will have a timestamp (with second
-resolution) on it. This makes collisions super unlikely.
-
-If you'd like to know which perf profile was yours:
- - Go to the tryjob output page
- - Look for 'HWTest [AFDO_Record]'
- - Click on its stdout
- - Find "Links to test logs:" in the stdout
- - Follow the link by telemetry_AFDOGenerate
- - Find and click the link to debug/autoserv.DEBUG
- - Look for a gs:// link ending in `.perf.data` with a compression suffix
- (currently `.bz2`; maybe `.xz` eventually). That's the gs:// path to your
- perf profile.
-
-The downside to this option is that there's no (reliable + trivial to
-implement) way for the bot that converts AFDO profiles into perf profiles to
-know the profile to choose. So, you're stuck generating a profile on your own.
-We have a tool for just that. Please see `generate_afdo_from_tryjob.py`.
-
-If you don't like that tool, generating your own profile isn't super difficult.
-Just grab the perf profile that your logs note from gs://, grab a copy of
-chrome.debug from your tryjob, and use `create_llvm_prof` to create a profile.
-
-On the other hand, if you're 100% sure that your profile won't collide, you can
-make your life easier by providing --use_afdo_generation_stage.
-
-If you provide neither --use_afdo_generation_stage nor
---tag_profiles_with_current_time, --tag_profiles_with_current_time is implied,
-since it's safer.
-"""
-
-from __future__ import print_function
-
-import argparse
-import collections
-import pipes
-import subprocess
-import sys
-import time
-
-
-def main():
- parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '--force_no_patches',
- action='store_true',
- help='Run even if no patches are provided')
- parser.add_argument(
- '--tag_profiles_with_current_time',
- action='store_true',
- help='Perf profile names will have the current time added to them.')
- parser.add_argument(
- '--use_afdo_generation_stage',
- action='store_true',
- help='Perf profiles will be automatically converted to AFDO profiles.')
- parser.add_argument(
- '-g',
- '--patch',
- action='append',
- default=[],
- help='A patch to add to the AFDO run')
- parser.add_argument(
- '-n',
- '--dry_run',
- action='store_true',
- help='Just print the command that would be run')
- args = parser.parse_args()
-
- dry_run = args.dry_run
- force_no_patches = args.force_no_patches
- tag_profiles_with_current_time = args.tag_profiles_with_current_time
- use_afdo_generation_stage = args.use_afdo_generation_stage
- user_patches = args.patch
-
- if tag_profiles_with_current_time and use_afdo_generation_stage:
- raise ValueError('You can\'t tag profiles with the time + have '
- 'afdo-generate')
-
- if not tag_profiles_with_current_time and not use_afdo_generation_stage:
- print('Neither current_time nor afdo_generate asked for. Assuming you '
- 'prefer current time tagging.')
- print('You have 5 seconds to cancel and try again.')
- print()
- if not dry_run:
- time.sleep(5)
- tag_profiles_with_current_time = True
-
- patches = [
- # Send profiles to localmirror instead of chromeos-prebuilt. This should
- # always be done, since sending profiles into production is bad. :)
- # https://chromium-review.googlesource.com/c/chromiumos/third_party/autotest/+/1436158
- 1436158,
- # Force profile generation. Otherwise, we'll decide to not spawn off the
- # perf hwtests.
- # https://chromium-review.googlesource.com/c/chromiumos/chromite/+/1313291
- 1313291,
- ]
-
- if tag_profiles_with_current_time:
- # Tags the profiles with the current time of day. As detailed in the
- # docstring, this is desirable unless you're sure that this is the only
- # experimental profile that will be generated today.
- # https://chromium-review.googlesource.com/c/chromiumos/third_party/autotest/+/1436157
- patches.append(1436157)
-
- if use_afdo_generation_stage:
- # Make the profile generation stage look in localmirror, instead of having
- # it look in chromeos-prebuilt. Without this, we'll never upload
- # chrome.debug or try to generate an AFDO profile.
- # https://chromium-review.googlesource.com/c/chromiumos/chromite/+/1436583
- patches.append(1436583)
-
- if not user_patches and not force_no_patches:
- raise ValueError('No patches given; pass --force_no_patches to force a '
- 'tryjob')
-
- for patch in user_patches:
- # We accept two formats. Either a URL that ends with a number, or a number.
- if patch.startswith('http'):
- patch = patch.split('/')[-1]
- patches.append(int(patch))
-
- count = collections.Counter(patches)
- too_many = [k for k, v in count.items() if v > 1]
- if too_many:
- too_many.sort()
- raise ValueError(
- 'Patch(es) asked for application more than once: %s' % too_many)
-
- args = [
- 'cros',
- 'tryjob',
- ]
-
- for patch in patches:
- args += ['-g', str(patch)]
-
- args += [
- '--nochromesdk',
- '--hwtest',
- 'chell-chrome-pfq-tryjob',
- ]
-
- print(' '.join(pipes.quote(a) for a in args))
- if not dry_run:
- sys.exit(subprocess.call(args))
-
-
-if __name__ == '__main__':
- main()
diff --git a/afe_lock_machine.py b/afe_lock_machine.py
new file mode 100755
index 00000000..f83e897b
--- /dev/null
+++ b/afe_lock_machine.py
@@ -0,0 +1,661 @@
+#!/usr/bin/env python2
+#
+# Copyright 2015 Google INc. All Rights Reserved.
+"""This module controls locking and unlocking of test machines."""
+
+from __future__ import print_function
+
+import argparse
+import getpass
+import os
+import sys
+import traceback
+
+from cros_utils import logger
+from cros_utils import machines
+
+
+class AFELockException(Exception):
+ """Base class for exceptions in this module."""
+
+
+class MachineNotPingable(AFELockException):
+ """Raised when machine does not respond to ping."""
+
+
+class MissingHostInfo(AFELockException):
+ """Raised when cannot find info about machine on machine servers."""
+
+
+class UpdateNonLocalMachine(AFELockException):
+ """Raised when user requests to add/remove a ChromeOS HW Lab machine.."""
+
+
+class DuplicateAdd(AFELockException):
+ """Raised when user requests to add a machine that's already on the server."""
+
+
+class UpdateServerError(AFELockException):
+ """Raised when attempt to add/remove a machine from local server fails."""
+
+
+class LockingError(AFELockException):
+ """Raised when server fails to lock/unlock machine as requested."""
+
+
+class DontOwnLock(AFELockException):
+ """Raised when user attmepts to unlock machine locked by someone else."""
+ # This should not be raised if the user specified '--force'
+
+
+class NoAFEServer(AFELockException):
+ """Raised when cannot find/access the autotest server."""
+
+
+class AFEAccessError(AFELockException):
+ """Raised when cannot get information about lab machine from lab server."""
+
+
+class AFELockManager(object):
+ """Class for locking/unlocking machines vie Autotest Front End servers.
+
+ This class contains methods for checking the locked status of machines
+ on both the ChromeOS HW Lab AFE server and a local AFE server. It also
+ has methods for adding/removing machines from the local server, and for
+ changing the lock status of machines on either server. For the ChromeOS
+ HW Lab, it only allows access to the toolchain team lab machines, as
+ defined in toolchain-utils/crosperf/default_remotes. By default it will
+ look for a local server on chrotomation2.svl.corp.google.com, but an
+ alternative local AFE server can be supplied, if desired.
+
+ !!!IMPORTANT NOTE!!! The AFE server can only be called from the main
+ thread/process of a program. If you launch threads and try to call it
+ from a thread, you will get an error. This has to do with restrictions
+ in the Python virtual machine (and signal handling) and cannot be changed.
+ """
+
+ LOCAL_SERVER = 'chrotomation2.svl.corp.google.com'
+
+ def __init__(self,
+ remotes,
+ force_option,
+ chromeos_root,
+ local_server,
+ use_local=True,
+ log=None):
+ """Initializes an AFELockManager object.
+
+ Args:
+ remotes: A list of machine names or ip addresses to be managed. Names
+ and ip addresses should be represented as strings. If the list is
+ empty, the lock manager will get all known machines.
+ force_option: A Boolean indicating whether or not to force an unlock of
+ a machine that was locked by someone else.
+ chromeos_root: The ChromeOS chroot to use for the autotest scripts.
+ local_server: A string containing the name or ip address of the machine
+ that is running an AFE server, which is to be used for managing
+ machines that are not in the ChromeOS HW lab.
+ local: A Boolean indicating whether or not to use/allow a local AFE
+ server to be used (see local_server argument).
+ use_local: Use the local server instead of the official one.
+ log: If not None, this is the logger object to be used for writing out
+ informational output messages. It is expected to be an instance of
+ Logger class from cros_utils/logger.py.
+ """
+ self.chromeos_root = chromeos_root
+ self.user = getpass.getuser()
+ self.logger = log or logger.GetLogger()
+ autotest_path = os.path.join(chromeos_root,
+ 'src/third_party/autotest/files')
+
+ sys.path.append(chromeos_root)
+ sys.path.append(autotest_path)
+ sys.path.append(os.path.join(autotest_path, 'server', 'cros'))
+
+ # We have to wait to do these imports until the paths above have
+ # been fixed.
+ # pylint: disable=import-error
+ from client import setup_modules
+ setup_modules.setup(
+ base_path=autotest_path, root_module_name='autotest_lib')
+
+ from dynamic_suite import frontend_wrappers
+
+ self.afe = frontend_wrappers.RetryingAFE(
+ timeout_min=30, delay_sec=10, debug=False, server='cautotest')
+
+ self.local = use_local
+ self.machines = list(set(remotes)) or []
+ self.toolchain_lab_machines = self.GetAllToolchainLabMachines()
+ if self.machines and self.AllLabMachines():
+ self.local = False
+
+ if not self.local:
+ self.local_afe = None
+ else:
+ dargs = {}
+ dargs['server'] = local_server or AFELockManager.LOCAL_SERVER
+ # Make sure local server is pingable.
+ error_msg = ('Local autotest server machine %s not responding to ping.' %
+ dargs['server'])
+ self.CheckMachine(dargs['server'], error_msg)
+ self.local_afe = frontend_wrappers.RetryingAFE(
+ timeout_min=30, delay_sec=10, debug=False, **dargs)
+ if not self.machines:
+ self.machines = self.toolchain_lab_machines + self.GetAllNonlabMachines()
+ self.force = force_option
+
+ def AllLabMachines(self):
+ """Check to see if all machines being used are HW Lab machines."""
+ all_lab = True
+ for m in self.machines:
+ if m not in self.toolchain_lab_machines:
+ all_lab = False
+ break
+ return all_lab
+
+ def CheckMachine(self, machine, error_msg):
+ """Verifies that machine is responding to ping.
+
+ Args:
+ machine: String containing the name or ip address of machine to check.
+ error_msg: Message to print if ping fails.
+
+ Raises:
+ MachineNotPingable: If machine is not responding to 'ping'
+ """
+ if not machines.MachineIsPingable(machine, logging_level='none'):
+ cros_machine = machine + '.cros'
+ if not machines.MachineIsPingable(cros_machine, logging_level='none'):
+ raise MachineNotPingable(error_msg)
+
+ def MachineIsKnown(self, machine):
+ """Checks to see if either AFE server knows the given machine.
+
+ Args:
+ machine: String containing name or ip address of machine to check.
+
+ Returns:
+ Boolean indicating if the machine is in the list of known machines for
+ either AFE server.
+ """
+ if machine in self.toolchain_lab_machines:
+ return True
+ elif self.local_afe and machine in self.GetAllNonlabMachines():
+ return True
+
+ return False
+
+ def GetAllToolchainLabMachines(self):
+ """Gets a list of all the toolchain machines in the ChromeOS HW lab.
+
+ Returns:
+ A list of names of the toolchain machines in the ChromeOS HW lab.
+ """
+ machines_file = os.path.join(
+ os.path.dirname(__file__), 'crosperf', 'default_remotes')
+ machine_list = []
+ with open(machines_file, 'r') as input_file:
+ lines = input_file.readlines()
+ for line in lines:
+ _, remotes = line.split(':')
+ remotes = remotes.strip()
+ for r in remotes.split():
+ machine_list.append(r.strip())
+ return machine_list
+
+ def GetAllNonlabMachines(self):
+ """Gets a list of all known machines on the local AFE server.
+
+ Returns:
+ A list of the names of the machines on the local AFE server.
+ """
+ non_lab_machines = []
+ if self.local_afe:
+ non_lab_machines = self.local_afe.get_hostnames()
+ return non_lab_machines
+
+ def PrintStatusHeader(self, is_lab_machine):
+ """Prints the status header lines for machines.
+
+ Args:
+ is_lab_machine: Boolean indicating whether to print HW Lab header or
+ local machine header (different spacing).
+ """
+ if is_lab_machine:
+ print('\nMachine (Board)\t\t\t\t\tStatus')
+ print('---------------\t\t\t\t\t------\n')
+ else:
+ print('\nMachine (Board)\t\tStatus')
+ print('---------------\t\t------\n')
+
+ def RemoveLocalMachine(self, m):
+ """Removes a machine from the local AFE server.
+
+ Args:
+ m: The machine to remove.
+
+ Raises:
+ MissingHostInfo: Can't find machine to be removed.
+ """
+ if self.local_afe:
+ host_info = self.local_afe.get_hosts(hostname=m)
+ if host_info:
+ host_info = host_info[0]
+ host_info.delete()
+ else:
+ raise MissingHostInfo('Cannot find/delete machine %s.' % m)
+
+ def AddLocalMachine(self, m):
+ """Adds a machine to the local AFE server.
+
+ Args:
+ m: The machine to be added.
+ """
+ if self.local_afe:
+ error_msg = 'Machine %s is not responding to ping.' % m
+ self.CheckMachine(m, error_msg)
+ self.local_afe.create_host(m)
+
+ def AddMachinesToLocalServer(self):
+ """Adds one or more machines to the local AFE server.
+
+ Verify that the requested machines are legal to add to the local server,
+ i.e. that they are not ChromeOS HW lab machines, and they are not already
+ on the local server. Call AddLocalMachine for each valid machine.
+
+ Raises:
+ DuplicateAdd: Attempt to add a machine that is already on the server.
+ UpdateNonLocalMachine: Attempt to add a ChromeOS HW lab machine.
+ UpdateServerError: Something went wrong while attempting to add a
+ machine.
+ """
+ for m in self.machines:
+ for cros_name in [m, m + '.cros']:
+ if cros_name in self.toolchain_lab_machines:
+ raise UpdateNonLocalMachine(
+ 'Machine %s is already in the ChromeOS HW'
+ 'Lab. Cannot add it to local server.' % cros_name)
+ host_info = self.local_afe.get_hosts(hostname=m)
+ if host_info:
+ raise DuplicateAdd('Machine %s is already on the local server.' % m)
+ try:
+ self.AddLocalMachine(m)
+ self.logger.LogOutput('Successfully added %s to local server.' % m)
+ except Exception as e:
+ traceback.print_exc()
+ raise UpdateServerError(
+ 'Error occurred while attempting to add %s. %s' % (m, str(e)))
+
+ def RemoveMachinesFromLocalServer(self):
+ """Removes one or more machines from the local AFE server.
+
+ Verify that the requested machines are legal to remove from the local
+ server, i.e. that they are not ChromeOS HW lab machines. Call
+ RemoveLocalMachine for each valid machine.
+
+ Raises:
+ UpdateServerError: Something went wrong while attempting to remove a
+ machine.
+ """
+ for m in self.machines:
+ for cros_name in [m, m + '.cros']:
+ if cros_name in self.toolchain_lab_machines:
+ raise UpdateNonLocalMachine(
+ 'Machine %s is in the ChromeOS HW Lab. '
+ 'This script cannot remove lab machines.' % cros_name)
+ try:
+ self.RemoveLocalMachine(m)
+ self.logger.LogOutput('Successfully removed %s from local server.' % m)
+ except Exception as e:
+ traceback.print_exc()
+ raise UpdateServerError('Error occurred while attempting to remove %s '
+ '(%s).' % (m, str(e)))
+
+ def ListMachineStates(self, machine_states):
+ """Gets and prints the current status for a list of machines.
+
+ Prints out the current status for all of the machines in the current
+ AFELockManager's list of machines (set when the object is initialized).
+
+ Args:
+ machine_states: A dictionary of the current state of every machine in
+ the current AFELockManager's list of machines. Normally obtained by
+ calling AFELockManager::GetMachineStates.
+ """
+ local_machines = []
+ printed_hdr = False
+ for m in machine_states:
+ cros_name = m + '.cros'
+ if (m in self.toolchain_lab_machines or
+ cros_name in self.toolchain_lab_machines):
+ name = m if m in self.toolchain_lab_machines else cros_name
+ if not printed_hdr:
+ self.PrintStatusHeader(True)
+ printed_hdr = True
+ state = machine_states[m]
+ if state['locked']:
+ print('%s (%s)\tlocked by %s since %s' %
+ (name, state['board'], state['locked_by'], state['lock_time']))
+ else:
+ print('%s (%s)\tunlocked' % (name, state['board']))
+ else:
+ local_machines.append(m)
+
+ if local_machines:
+ self.PrintStatusHeader(False)
+ for m in local_machines:
+ state = machine_states[m]
+ if state['locked']:
+ print('%s (%s)\tlocked by %s since %s' %
+ (m, state['board'], state['locked_by'], state['lock_time']))
+ else:
+ print('%s (%s)\tunlocked' % (m, state['board']))
+
+ def UpdateLockInAFE(self, should_lock_machine, machine):
+ """Calls an AFE server to lock/unlock a machine.
+
+ Args:
+ should_lock_machine: Boolean indicating whether to lock the machine (True)
+ or unlock the machine (False).
+ machine: The machine to update.
+
+ Raises:
+ LockingError: An error occurred while attempting to update the machine
+ state.
+ """
+ action = 'lock'
+ if not should_lock_machine:
+ action = 'unlock'
+ kwargs = {'locked': should_lock_machine}
+ kwargs['lock_reason'] = 'toolchain user request (%s)' % self.user
+
+ cros_name = machine + '.cros'
+ if cros_name in self.toolchain_lab_machines:
+ machine = cros_name
+ if machine in self.toolchain_lab_machines:
+ m = machine.split('.')[0]
+ afe_server = self.afe
+ else:
+ m = machine
+ afe_server = self.local_afe
+
+ try:
+ afe_server.run(
+ 'modify_hosts',
+ host_filter_data={'hostname__in': [m]},
+ update_data=kwargs)
+ except Exception as e:
+ traceback.print_exc()
+ raise LockingError('Unable to %s machine %s. %s' % (action, m, str(e)))
+
+ def UpdateMachines(self, lock_machines):
+ """Sets the locked state of the machines to the requested value.
+
+ The machines updated are the ones in self.machines (specified when the
+ class object was intialized).
+
+ Args:
+ lock_machines: Boolean indicating whether to lock the machines (True) or
+ unlock the machines (False).
+
+ Returns:
+ A list of the machines whose state was successfully updated.
+ """
+ updated_machines = []
+ for m in self.machines:
+ self.UpdateLockInAFE(lock_machines, m)
+ # Since we returned from self.UpdateLockInAFE we assume the request
+ # succeeded.
+ if lock_machines:
+ self.logger.LogOutput('Locked machine(s) %s.' % m)
+ else:
+ self.logger.LogOutput('Unlocked machine(s) %s.' % m)
+ updated_machines.append(m)
+
+ return updated_machines
+
+ def _InternalRemoveMachine(self, machine):
+ """Remove machine from internal list of machines.
+
+ Args:
+ machine: Name of machine to be removed from internal list.
+ """
+ # Check to see if machine is lab machine and if so, make sure it has
+ # ".cros" on the end.
+ cros_machine = machine
+ if machine.find('rack') > 0 and machine.find('row') > 0:
+ if machine.find('.cros') == -1:
+ cros_machine = cros_machine + '.cros'
+
+ self.machines = [
+ m for m in self.machines if m != cros_machine and m != machine
+ ]
+
+ def CheckMachineLocks(self, machine_states, cmd):
+ """Check that every machine in requested list is in the proper state.
+
+ If the cmd is 'unlock' verify that every machine is locked by requestor.
+ If the cmd is 'lock' verify that every machine is currently unlocked.
+
+ Args:
+ machine_states: A dictionary of the current state of every machine in
+ the current AFELockManager's list of machines. Normally obtained by
+ calling AFELockManager::GetMachineStates.
+ cmd: The user-requested action for the machines: 'lock' or 'unlock'.
+
+ Raises:
+ DontOwnLock: The lock on a requested machine is owned by someone else.
+ """
+ for k, state in machine_states.iteritems():
+ if cmd == 'unlock':
+ if not state['locked']:
+ self.logger.LogWarning('Attempt to unlock already unlocked machine '
+ '(%s).' % k)
+ self._InternalRemoveMachine(k)
+
+ if state['locked'] and state['locked_by'] != self.user:
+ raise DontOwnLock('Attempt to unlock machine (%s) locked by someone '
+ 'else (%s).' % (k, state['locked_by']))
+ elif cmd == 'lock':
+ if state['locked']:
+ self.logger.LogWarning(
+ 'Attempt to lock already locked machine (%s)' % k)
+ self._InternalRemoveMachine(k)
+
+ def HasAFEServer(self, local):
+ """Verifies that the AFELockManager has appropriate AFE server.
+
+ Args:
+ local: Boolean indicating whether we are checking for the local server
+ (True) or for the global server (False).
+
+ Returns:
+ A boolean indicating if the AFELockManager has the requested AFE server.
+ """
+ if local:
+ return self.local_afe is not None
+ else:
+ return self.afe is not None
+
+ def GetMachineStates(self, cmd=''):
+ """Gets the current state of all the requested machines.
+
+ Gets the current state of all the requested machines, both from the HW lab
+ sever and from the local server. Stores the data in a dictionary keyed
+ by machine name.
+
+ Args:
+ cmd: The command for which we are getting the machine states. This is
+ important because if one of the requested machines is missing we raise
+ an exception, unless the requested command is 'add'.
+
+ Returns:
+ A dictionary of machine states for all the machines in the AFELockManager
+ object.
+
+ Raises:
+ NoAFEServer: Cannot find the HW Lab or local AFE server.
+ AFEAccessError: An error occurred when querying the server about a
+ machine.
+ """
+ if not self.HasAFEServer(False):
+ raise NoAFEServer('Error: Cannot connect to main AFE server.')
+
+ if self.local and not self.HasAFEServer(True):
+ raise NoAFEServer('Error: Cannot connect to local AFE server.')
+
+ machine_list = {}
+ for m in self.machines:
+ host_info = None
+ cros_name = m + '.cros'
+ if (m in self.toolchain_lab_machines or
+ cros_name in self.toolchain_lab_machines):
+ mod_host = m.split('.')[0]
+ host_info = self.afe.get_hosts(hostname=mod_host)
+ if not host_info:
+ raise AFEAccessError('Unable to get information about %s from main'
+ ' autotest server.' % m)
+ else:
+ host_info = self.local_afe.get_hosts(hostname=m)
+ if not host_info and cmd != 'add':
+ raise AFEAccessError('Unable to get information about %s from '
+ 'local autotest server.' % m)
+ if host_info:
+ host_info = host_info[0]
+ name = host_info.hostname
+ values = {}
+ values['board'] = host_info.platform if host_info.platform else '??'
+ values['locked'] = host_info.locked
+ if host_info.locked:
+ values['locked_by'] = host_info.locked_by
+ values['lock_time'] = host_info.lock_time
+ else:
+ values['locked_by'] = ''
+ values['lock_time'] = ''
+ machine_list[name] = values
+ else:
+ machine_list[m] = {}
+ return machine_list
+
+
+def Main(argv):
+ """Parse the options, initialize lock manager and dispatch proper method.
+
+ Args:
+ argv: The options with which this script was invoked.
+
+ Returns:
+ 0 unless an exception is raised.
+ """
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument(
+ '--list',
+ dest='cmd',
+ action='store_const',
+ const='status',
+ help='List current status of all known machines.')
+ parser.add_argument(
+ '--lock',
+ dest='cmd',
+ action='store_const',
+ const='lock',
+ help='Lock given machine(s).')
+ parser.add_argument(
+ '--unlock',
+ dest='cmd',
+ action='store_const',
+ const='unlock',
+ help='Unlock given machine(s).')
+ parser.add_argument(
+ '--status',
+ dest='cmd',
+ action='store_const',
+ const='status',
+ help='List current status of given machine(s).')
+ parser.add_argument(
+ '--add_machine',
+ dest='cmd',
+ action='store_const',
+ const='add',
+ help='Add machine to local machine server.')
+ parser.add_argument(
+ '--remove_machine',
+ dest='cmd',
+ action='store_const',
+ const='remove',
+ help='Remove machine from the local machine server.')
+ parser.add_argument(
+ '--nolocal',
+ dest='local',
+ action='store_false',
+ default=True,
+ help='Do not try to use local machine server.')
+ parser.add_argument(
+ '--remote', dest='remote', help='machines on which to operate')
+ parser.add_argument(
+ '--chromeos_root',
+ dest='chromeos_root',
+ required=True,
+ help='ChromeOS root to use for autotest scripts.')
+ parser.add_argument(
+ '--local_server',
+ dest='local_server',
+ default=None,
+ help='Alternate local autotest server to use.')
+ parser.add_argument(
+ '--force',
+ dest='force',
+ action='store_true',
+ default=False,
+ help='Force lock/unlock of machines, even if not'
+ ' current lock owner.')
+
+ options = parser.parse_args(argv)
+
+ if not options.remote and options.cmd != 'status':
+ parser.error('No machines specified for operation.')
+
+ if not os.path.isdir(options.chromeos_root):
+ parser.error('Cannot find chromeos_root: %s.' % options.chromeos_root)
+
+ if not options.cmd:
+ parser.error('No operation selected (--list, --status, --lock, --unlock,'
+ ' --add_machine, --remove_machine).')
+
+ machine_list = []
+ if options.remote:
+ machine_list = options.remote.split()
+
+ lock_manager = AFELockManager(machine_list, options.force,
+ options.chromeos_root, options.local_server,
+ options.local)
+
+ machine_states = lock_manager.GetMachineStates(cmd=options.cmd)
+ cmd = options.cmd
+
+ if cmd == 'status':
+ lock_manager.ListMachineStates(machine_states)
+
+ elif cmd == 'lock':
+ if not lock_manager.force:
+ lock_manager.CheckMachineLocks(machine_states, cmd)
+ lock_manager.UpdateMachines(True)
+
+ elif cmd == 'unlock':
+ if not lock_manager.force:
+ lock_manager.CheckMachineLocks(machine_states, cmd)
+ lock_manager.UpdateMachines(False)
+
+ elif cmd == 'add':
+ lock_manager.AddMachinesToLocalServer()
+
+ elif cmd == 'remove':
+ lock_manager.RemoveMachinesFromLocalServer()
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1:]))
diff --git a/android_bench_suite/README.md b/android_bench_suite/README.md
deleted file mode 100644
index b539c7cd..00000000
--- a/android_bench_suite/README.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# `android_bench_suite`
-
-This is a Android Toolchain benchmark suite.
-
-Where to find this suite:
-
-This suite locates at google3, please create a google3 branch first, then run:
-
-```
-$ cd experimental/users/zhizhouy/benchtoolchain
-```
-
-Copy this directory to the place you want to put it.
-
-To use this suite:
-1. Configure the basic envrionment in `env_setting` file.
-
-2. Run `./apply_patches.py`, which will:
-
- 1. Patch all the android benchmarks in the android tree.
- Benchmark Panorama does not exist in android tree, so perftests/
- gets copied into the top-level of android tree.
-
- 2. Apply patch autotest.diff to `android_root/external/autotest`, which
- includes all the test scripts for benchmarks. Copy testcases to
- related autotest directory.
-
- If you have applied the patch partially and hope to discard the
- patch, just run `discard_patches.py`
-
-3. Build and run benchmark on the device using ./run.py. You can either
- use test configuration file (-t `test_config`), or set all the variables
- manually.
-
-4. The raw results locate at `bench_result_*` in bench suite home
- directory.
-
-5. The JSON format result will be generated for crosperf report.
-
-Utility tools:
-
-1. Autotest is a test framework located in android exteranl/autotest
- Before first time running it, please run
- `utils/build_externals.py` first to ensure all the environments
- and tools needed are installed.
-
-2. Crosperf is a report generating tool in ChromeOS toolchain utilities.
- Please look for it in chromium source:
- `src/third_party/toolchain-utils/crosperf`.
diff --git a/android_bench_suite/README.txt b/android_bench_suite/README.txt
new file mode 100644
index 00000000..3d0cceb5
--- /dev/null
+++ b/android_bench_suite/README.txt
@@ -0,0 +1,41 @@
+This is a Android Toolchain benchmark suite.
+===========================================
+Where to find this suite:
+ This suite locates at google3, please create a google3 branch first,
+ then run:
+ $cd experimental/users/zhizhouy/benchtoolchain
+ Copy this directory to the place you want to put it.
+
+To use this suite:
+ 1. Configure the basic envrionment in env_setting file.
+
+ 2. Run ./apply_patches.py, which will:
+ 1) Patch all the android benchmarks in the android tree.
+ Benchmark Panorama does not exist in android tree, so perftests/
+ gets copied into the top-level of android tree.
+
+ 2) Apply patch autotest.diff to android_root/external/autotest, which
+ includes all the test scripts for benchmarks. Copy testcases to
+ related autotest directory.
+
+ If you have applied the patch partially and hope to discard the
+ patch, just run discard_patches.py
+
+ 3. Build and run benchmark on the device using ./run.py. You can either
+ use test configuration file (-t test_config), or set all the variables
+ manually.
+
+ 4. The raw results locate at bench_result_* in bench suite home
+ directory.
+
+ 5. The JSON format result will be generated for crosperf report.
+
+Utility tools:
+ 1. Autotest is a test framework located in android exteranl/autotest
+ Before first time running it, please run
+ utils/build_externals.py first to ensure all the environments
+ and tools needed are installed.
+
+ 2. Crosperf is a report generating tool in ChromeOS toolchain utilities.
+ Please look for it in chromium source:
+ src/third_party/toolchain-utils/crosperf.
diff --git a/android_bench_suite/autotest.diff b/android_bench_suite/autotest.diff
index c2fed83b..ef0029ae 100644
--- a/android_bench_suite/autotest.diff
+++ b/android_bench_suite/autotest.diff
@@ -49,7 +49,7 @@ index 000000000..b233b586a
+ 'binderThroughputTest > /data/local/tmp/bench_result'
+ % os.getenv('TEST_MODE'))
+ # Next 4 runs add to bench_result
-+ for i in range(4):
++ for i in xrange(4):
+ self.client.run('taskset %s /data/local/tmp/'
+ 'binderThroughputTest >> '
+ '/data/local/tmp/bench_result'
@@ -165,11 +165,11 @@ index 000000000..dd6af0b53
+ 'bench_result'), 'w') as f:
+
+ # There are two benchmarks, chrome and camera.
-+ for i in range(2):
++ for i in xrange(2):
+ f.write('Test %d:\n' % i)
+ total_time = 0
+ # Run benchmark for several times for accurancy
-+ for j in range(3):
++ for j in xrange(3):
+ f.write('Iteration %d: ' % j)
+ result = self.client.run('time taskset %s dex2oat'
+ ' --dex-file=data/local/tmp/dex2oat_input/test%d.apk'
@@ -739,7 +739,7 @@ index 000000000..b317bd0f3
+ ' > /data/local/tmp/bench_result'
+ % os.getenv('TEST_MODE'))
+ # Next 4 runs add to bench_result
-+ for i in range(4):
++ for i in xrange(4):
+ self.client.run('taskset %s /data/local/tmp/synthmark'
+ ' >> /data/local/tmp/bench_result'
+ % os.getenv('TEST_MODE'))
diff --git a/android_bench_suite/fix_skia_results.py b/android_bench_suite/fix_skia_results.py
index 8c919d35..6eec6ccf 100755
--- a/android_bench_suite/fix_skia_results.py
+++ b/android_bench_suite/fix_skia_results.py
@@ -111,7 +111,7 @@ def _TransformBenchmarks(raw_benchmarks):
if len(results) < len(samples):
results.extend({
'retval': 0
- } for _ in range(len(samples) - len(results)))
+ } for _ in xrange(len(samples) - len(results)))
time_mul = _GetTimeMultiplier(friendly_name)
for sample, app in itertools.izip(samples, results):
diff --git a/android_bench_suite/gen_json.py b/android_bench_suite/gen_json.py
index e1252933..ad617ff4 100755
--- a/android_bench_suite/gen_json.py
+++ b/android_bench_suite/gen_json.py
@@ -81,7 +81,7 @@ def main(argv):
iteration = arguments.iterations
result = []
- for i in range(iteration):
+ for i in xrange(iteration):
result += collect_data(infile, bench, i)
with get_outfile(outfile, bench) as fout:
diff --git a/android_bench_suite/run.py b/android_bench_suite/run.py
index 19d9b36f..55acb663 100755
--- a/android_bench_suite/run.py
+++ b/android_bench_suite/run.py
@@ -301,7 +301,7 @@ def test_bench(bench, setting_no, iterations, serials, remote, mode):
logging.info('Start running benchmark on device...')
# Run benchmark and tests on DUT
- for i in range(iterations):
+ for i in xrange(iterations):
logging.info('Iteration No.%d:', i)
test_cmd = [
os.path.join(
@@ -463,7 +463,7 @@ def main(argv):
for bench in bench_list:
logging.info('Start building and running benchmark: [%s]', bench)
# Run script for each toolchain settings
- for setting_no in range(setting_count):
+ for setting_no in xrange(setting_count):
build_bench(setting_no, bench, compiler, llvm_version, build_os, cflags,
ldflags)
diff --git a/android_merge_from_upstream.sh b/android_merge_from_upstream.sh
deleted file mode 100755
index cf07d4bf..00000000
--- a/android_merge_from_upstream.sh
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/bin/bash -eu
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# This is a script crafted to make our Android friends' lives easier: when run
-# on their copy of toolchain-utils, this script will do all of the necessary
-# merging/branch creation/etc. to make keeping things up-to-date trivial.
-#
-# For example,
-# https://android-review.googlesource.com/c/platform/external/toolchain-utils/+/1132504/1
-
-local_branch_name="merge_with_upstream"
-local_upstream="aosp/master"
-remote="aosp"
-remote_branch="${remote}/upstream-mirror-master"
-
-my_dir="$(dirname "$(readlink -m "$0")")"
-cd "${my_dir}"
-
-ensure_head_is_upstream_master() {
- local current_rev master_rev
- current_rev="$(git rev-parse HEAD)"
- master_rev="$(git rev-parse ${local_upstream})"
- if [[ "${current_rev}" != "${master_rev}" ]]; then
- echo "Please checkout ${local_upstream} and rerun this" >&2
- exit
- fi
-}
-
-ensure_no_local_branch_present() {
- if ! git rev-parse "${local_branch_name}" >& /dev/null; then
- return 0
- fi
-
- echo -n "${local_branch_name} is a valid branch already. Delete? [y/N] " >&2
-
- local line
- read -r line
- if [[ "${line}" != y* && "${line}" != Y* ]]; then
- echo "Aborted" >&2
- exit 1
- fi
-
- # If we're *on* that branch, deleting it is difficult.
- local current_branch
- current_branch="$(git branch --show-current)"
- if [[ "${current_branch}" == "${local_branch_name}" ]]; then
- local rev
- rev="$(git rev-parse HEAD)"
- # This is fine, since we assume HEAD == upstream-mirror-master anyway
- # (e.g., the existing branch was pointless.)
- git checkout "${rev}"
- fi
- git branch -D "${local_branch_name}"
-}
-
-get_merge_commit_list() {
- local merge_base
- merge_base="$(git merge-base HEAD ${remote_branch})"
- git log --oneline "${merge_base}..${remote_branch}"
-}
-
-ensure_head_is_upstream_master
-ensure_no_local_branch_present
-
-echo "Ensuring repository is up-to-date..."
-git fetch "${remote}"
-repo start "${local_branch_name}"
-
-commit_list="$(get_merge_commit_list)"
-num_commits="$(wc -l <<< "${commit_list}")"
-commit_message="Merging ${num_commits} commit(s) from Chromium's toolchain-utils
-
-Merged commit digest:
-$(sed 's/^/ /' <<< "${commit_list}")
-"
-
-git merge "${remote_branch}" -m "${commit_message}"
-echo 'NOTE: When you try to `repo upload`, repo might show a scary warning'
-echo 'about the number of changes are being uploaded. That should be fine,'
-echo 'since repo will only create CLs for commits not known to our remote.'
diff --git a/auto_delete_nightly_test_data.py b/auto_delete_nightly_test_data.py
index 699e12a0..4f91b042 100755
--- a/auto_delete_nightly_test_data.py
+++ b/auto_delete_nightly_test_data.py
@@ -1,10 +1,4 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
"""A crontab script to delete night test data."""
from __future__ import print_function
@@ -27,8 +21,7 @@ DIR_BY_WEEKDAY = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
def CleanNumberedDir(s, dry_run=False):
"""Deleted directories under each dated_dir."""
chromeos_dirs = [
- os.path.join(s, x)
- for x in os.listdir(s)
+ os.path.join(s, x) for x in os.listdir(s)
if misc.IsChromeOsTree(os.path.join(s, x))
]
ce = command_executer.GetCommandExecuter(log_level='none')
@@ -69,8 +62,7 @@ def CleanNumberedDir(s, dry_run=False):
def CleanDatedDir(dated_dir, dry_run=False):
# List subdirs under dir
subdirs = [
- os.path.join(dated_dir, x)
- for x in os.listdir(dated_dir)
+ os.path.join(dated_dir, x) for x in os.listdir(dated_dir)
if os.path.isdir(os.path.join(dated_dir, x))
]
all_succeeded = True
@@ -112,7 +104,6 @@ def CleanChromeOsTmpFiles(chroot_tmp, days_to_preserve, dry_run):
cmd = (r'find {0} -maxdepth 1 -type d '
r'\( -name "test_that_*" -amin +{1} -o '
r' -name "cros-update*" -amin +{1} -o '
- r' -name "CrAU_temp_data*" -amin +{1} -o '
r' -regex "{0}/tmp......" -amin +{1} \) '
r'-exec bash -c "echo rm -fr {{}}" \; '
r'-exec bash -c "rm -fr {{}}" \;').format(chroot_tmp, minutes)
@@ -199,7 +190,6 @@ def Main(argv):
os.path.join(constants.CROSTC_WORKSPACE, dated_dir),
options.dry_run) else 1
-
## Finally clean temporaries, images under crostc/chromeos
rv2 = CleanChromeOsTmpAndImages(
int(options.days_to_preserve), options.dry_run)
diff --git a/automation/clients/android.py b/automation/clients/android.py
index 06e76d29..603744b1 100755
--- a/automation/clients/android.py
+++ b/automation/clients/android.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
"""Client for Android nightly jobs.
diff --git a/automation/clients/chromeos.py b/automation/clients/chromeos.py
index 572320fd..084f7840 100755
--- a/automation/clients/chromeos.py
+++ b/automation/clients/chromeos.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
"""chromeos.py: Build & Test ChromeOS using custom compilers."""
diff --git a/automation/clients/crosstool.py b/automation/clients/crosstool.py
index 9ba83807..65720343 100755
--- a/automation/clients/crosstool.py
+++ b/automation/clients/crosstool.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
diff --git a/automation/clients/dejagnu_compiler.py b/automation/clients/dejagnu_compiler.py
index 7448b87e..eb923d5d 100755
--- a/automation/clients/dejagnu_compiler.py
+++ b/automation/clients/dejagnu_compiler.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
"""dejagnu_compiler.py: Run dejagnu test."""
diff --git a/automation/clients/nightly.py b/automation/clients/nightly.py
index d35c4eca..98e2b081 100755
--- a/automation/clients/nightly.py
+++ b/automation/clients/nightly.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
diff --git a/automation/clients/output_test.py b/automation/clients/output_test.py
index 73c26eed..3126f050 100755
--- a/automation/clients/output_test.py
+++ b/automation/clients/output_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
diff --git a/automation/clients/pwd_test.py b/automation/clients/pwd_test.py
index 493444d5..a4b28552 100755
--- a/automation/clients/pwd_test.py
+++ b/automation/clients/pwd_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
diff --git a/automation/clients/report/validate_failures.py b/automation/clients/report/validate_failures.py
index d8776ba5..5db356d1 100755
--- a/automation/clients/report/validate_failures.py
+++ b/automation/clients/report/validate_failures.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
# Script to compare testsuite failures against a list of known-to-fail
# tests.
diff --git a/automation/common/command_executer_test.py b/automation/common/command_executer_test.py
index 2caaa146..4aa245f0 100755
--- a/automation/common/command_executer_test.py
+++ b/automation/common/command_executer_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
diff --git a/automation/common/machine_test.py b/automation/common/machine_test.py
index f66299f5..c9c200a9 100755
--- a/automation/common/machine_test.py
+++ b/automation/common/machine_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
"""Machine manager unittest.
diff --git a/automation/server/machine_manager_test.py b/automation/server/machine_manager_test.py
index 2fa5bb4b..67fdcc2b 100755
--- a/automation/server/machine_manager_test.py
+++ b/automation/server/machine_manager_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
diff --git a/automation/server/monitor/manage.py b/automation/server/monitor/manage.py
index 59f6e216..57deb5c2 100755
--- a/automation/server/monitor/manage.py
+++ b/automation/server/monitor/manage.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
diff --git a/automation/server/server.py b/automation/server/server.py
index c8f22521..f02a1d0f 100755
--- a/automation/server/server.py
+++ b/automation/server/server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
diff --git a/automation/server/server_test.py b/automation/server/server_test.py
index 131ebb3b..bcf1b9f5 100755
--- a/automation/server/server_test.py
+++ b/automation/server/server_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
"""Machine manager unittest.
diff --git a/bestflags/README.md b/bestflags/README
index c9f4397d..d9fc5ba6 100644
--- a/bestflags/README.md
+++ b/bestflags/README
@@ -1,5 +1,3 @@
-# bestflags
-
There is a vast set of compiler flags that can be used to build Chrome for
ChromeOS. This option space has not been explored before. This directory
provides an infrastructure to build Chrome with certain flag combinations, test
@@ -8,10 +6,10 @@ infrastructure supports plug-in modules that implement algorithms for searching
in the N-Dimensional space of compiler flag combinations.
Currently, three different algorithms are built, namely genetic algorithm, hill
-climbing and negative flag iterative elimination. The module `testing_batch.py`
+climbing and negative flag iterative elimination. The module 'testing_batch.py'
contains the testing of these algorithms.
-To run the script, type in `python testing_batch.py`.
+To run the script, type in python testing_batch.py.
For further information about the project, please refer to the design document
at:
diff --git a/bestflags/examples/omnetpp/README.md b/bestflags/examples/omnetpp/README
index b4582d0d..eba522fe 100644
--- a/bestflags/examples/omnetpp/README.md
+++ b/bestflags/examples/omnetpp/README
@@ -1,34 +1,23 @@
-# `omnetpp`
-
This directory contains the omnetpp example in SPEC2006 benchmark.
It also contains the json configuration file which includes the meta data
information to run the experiment.
-This directory contains a build file `build_omnetpp` which is used by the build
+This directory contains a build file build_omnetpp which is used by the build
module of the framework to compile the application.
-This directory contains a test file `test_omnetpp` which is used by the test
+This directory contains a test file test_omnetpp which is used by the test
module of the framework to benchmark the optimization compilation.
This directory contains a conf file which includes the set of optimization flags
the experiment will try.
To use this direction, first gives the file the executable permission.
-
-```
-chmod a+x build_bikjmp
-chmod a+x test_bikjmp
-```
+ chmod a+x build_bikjmp
+ chmod a+x test_bikjmp
Copy the SPEC2006 benchmark into this directory.
-To run, invoke the `example_algorithm.py` in the parent directory.
-
-```
-python example_algorithms.py --file=examples/omnetpp/example.json
-```
+To run, invoke the example_algorithm.py in the parent directory.
+ python example_algorithms.py --file=examples/omnetpp/example.json
For help,
-
-```
-python example_algorithms.py --help
-```
+ python example_algorithms.py --help \ No newline at end of file
diff --git a/bin/tc_pyformat b/bin/tc_pyformat
new file mode 100755
index 00000000..21eceebe
--- /dev/null
+++ b/bin/tc_pyformat
@@ -0,0 +1,36 @@
+#!/bin/bash
+# Usage: tc_pyformat <list of pyformat options> file1.py file2.py ...
+#
+# Most common option is -i, which makes formatting changes in place.
+set -u
+
+PF=pyformat
+PF_OPTIONS="--yapf --force_quote_type=single"
+PF_USER_OPTIONS=""
+
+if [[ -z "$(type -t ${PF})" ]]; then
+ echo "Error: ${PF} not in your path."
+ exit 1
+fi
+
+while [[ "$1" == -* ]]; do
+ PF_USER_OPTIONS+=" $1"
+ shift
+done
+
+FILES=$*
+PF_OPTIONS+=${PF_USER_OPTIONS}
+
+for f in ${FILES}; do
+ if [[ $f != *.py ]]; then
+ echo "Error: File $f is not a python file"
+ exit 2
+ elif [[ -x $f ]]; then
+ ${PF} ${PF_OPTIONS} $f
+ elif [[ -f $f ]]; then
+ ${PF} --remove_shebang ${PF_OPTIONS} $f
+ else
+ echo "Error: File $f does not exist"
+ exit 2
+ fi
+done
diff --git a/binary_search_tool/README.bisect b/binary_search_tool/README.bisect
new file mode 100644
index 00000000..49e0c085
--- /dev/null
+++ b/binary_search_tool/README.bisect
@@ -0,0 +1,217 @@
+
+bisect.py is a wrapper around the general purpose
+binary_search_state.py. It provides a user friendly interface for
+bisecting various compilation errors. The 2 currently provided
+methods of bisecting are ChromeOS package and object bisection. Each
+method defines a default set of options to pass to
+binary_search_state.py and allow the user to override these defaults
+(see the "Overriding" section).
+
+** NOTE **
+All commands, examples, scripts, etc. are to be run from your chroot unless
+stated otherwise.
+
+Bisection Methods:
+
+1) ChromeOS Package:
+ This method will bisect across all packages in a ChromeOS repository and find
+ the offending packages (according to your test script). This method takes the
+ following arguments:
+
+ board: The board to bisect on. For example: daisy, falco, etc.
+ remote: The IP address of the physical machine you're using to test with.
+
+ By default the ChromeOS package method will do a simple interactive test that
+ pings the machine and prompts the user if the machine is good.
+
+ a) Setup:
+ The ChromeOS package method requires that you have three build trees:
+
+ /build/${board}.bad - The build tree for your "bad" build
+ /build/${board}.good - The build tree for your "good" build
+ /build/${board}.work - A full copy of /build/${board}.bad
+
+ b) Cleanup:
+ bisect.py does most cleanup for you, the only
+ thing required by the user is to cleanup all built images and the
+ three build trees made in /build/
+
+ c) Default Arguments:
+ --get_initial_items='cros_pkg/get_initial_items.sh'
+ --switch_to_good='cros_pkg/switch_to_good.sh'
+ --switch_to_bad='cros_pkg/switch_to_bad.sh'
+ --test_setup_script='cros_pkg/test_setup.sh'
+ --test_script='cros_pkg/interactive_test.sh'
+ --incremental
+ --prune
+ --file_args
+
+ d) Additional Documentation:
+ See ./cros_pkg/README.cros_pkg_triage for full documentation of ChromeOS
+ package bisection.
+
+ e) Examples:
+ i) Basic interactive test package bisection, on daisy board:
+ ./bisect.py package daisy 172.17.211.184
+
+ ii) Basic boot test package bisection, on daisy board:
+ ./bisect.py package daisy 172.17.211.184 -t cros_pkg/boot_test.sh
+
+2) ChromeOS Object:
+ This method will bisect across all objects in a ChromeOS package and find
+ the offending objects (according to your test script). This method takes the
+ following arguments:
+
+ board: The board to bisect on. For example: daisy, falco, etc.
+ remote: The IP address of the physical machine you're using to test with.
+ package: The package to bisect with. For example: chromeos-chrome
+ dir: (Optional) the directory for your good/bad build trees. Defaults to
+ $BISECT_DIR or /tmp/sysroot_bisect. This value will set $BISECT_DIR
+ for all bisecting scripts.
+
+ By default the ChromeOS object method will do a simple interactive test that
+ pings the machine and prompts the user if the machine is good.
+
+ a) Setup:
+ The ChromeOS package method requires that you populate your good and bad set
+ of objects. sysroot_wrapper will automatically detect the BISECT_STAGE
+ variable and use this to populate emerged objects. Here is an example:
+
+ # Defaults to /tmp/sysroot_bisect
+ export BISECT_DIR="/path/to/where/you/want/to/store/builds/"
+
+ export BISECT_STAGE="POPULATE_GOOD"
+ ./switch_to_good_compiler.sh
+ emerge-${board} -C ${package_to_bisect}
+ emerge-${board} ${package_to_bisect}
+
+ export BISECT_STAGE="POPULATE_BAD"
+ ./switch_to_bad_compiler.sh
+ emerge-${board} -C {package_to_bisect}
+ emerge-${board} ${package_to_bisect}
+
+ b) Cleanup:
+ The user must clean up all built images and the populated object files.
+
+ c) Default Arguments:
+ --get_initial_items='sysroot_wrapper/get_initial_items.sh'
+ --switch_to_good='sysroot_wrapper/switch_to_good.sh'
+ --switch_to_bad='sysroot_wrapper/switch_to_bad.sh'
+ --test_setup_script='sysroot_wrapper/test_setup.sh'
+ --test_script='sysroot_wrapper/interactive_test.sh'
+ --noincremental
+ --prune
+ --file_args
+
+ d) Additional Documentation:
+ See ./sysroot_wrapper/README for full documentation of ChromeOS object file
+ bisecting.
+
+ e) Examples:
+ i) Basic interactive test object bisection, on daisy board for
+ cryptohome package:
+ ./bisect.py object daisy 172.17.211.184 cryptohome
+
+ ii) Basic boot test package bisection, on daisy board for cryptohome
+ package:
+ ./bisect.py object daisy 172.17.211.184 cryptohome \
+ --test_script=sysroot_wrapper/boot_test.sh
+
+3) Android object:
+ NOTE: Because this isn't a ChromeOS bisection tool, the concept of a
+ chroot doesn't exist. Just run this tool from a normal shell.
+
+ This method will bisect across all objects in the Android source tree and
+ find the offending objects (according to your test script). This method takes
+ the following arguments:
+
+ android_src: The location of your android source tree
+ num_jobs: (Optional) The number of jobs to pass to make. This is dependent
+ on how many cores your machine has. A good number is probably
+ somewhere around 5 to 10.
+ device_id: (Optional) The serial code for the device you are testing on.
+ This is used to determine which device should be used in case
+ multiple devices are plugged into your computer. You can get
+ serial code for your device by running "adb devices".
+ dir: (Optional) the directory for your good/bad build trees. Defaults to
+ $BISECT_DIR or ~/ANDROID_BISECT/. This value will set $BISECT_DIR
+ for all bisecting scripts.
+
+ By default the Android object method will do a simple interactive test that
+ pings the machine and prompts the user if the machine is good.
+
+ a) Setup:
+ The Android object method requires that you populate your good and bad set
+ of objects. The Android compiler wrapper will automatically detect the
+ BISECT_STAGE variable and use this to populate emerged objects. Here is an
+ example:
+
+ # Defaults to ~/ANDROID_BISECT/
+ export BISECT_DIR="/path/to/where/you/want/to/store/builds/"
+
+ export BISECT_STAGE="POPULATE_GOOD"
+ # Install the "good" compiler
+ ./switch_to_good_compiler.sh
+ make clean
+ make -j <your_preferred_number_of_jobs>
+
+ export BISECT_STAGE="POPULATE_BAD"
+ # Install the "bad" compiler
+ ./switch_to_bad_compiler.sh
+ make clean
+ make -j <your_preferred_number_of_jobs>
+
+ b) Cleanup:
+ The user must clean up all built images and the populated object files.
+
+ c) Default Arguments:
+ --get_initial_items='android/get_initial_items.sh'
+ --switch_to_good='android/switch_to_good.sh'
+ --switch_to_bad='android/switch_to_bad.sh'
+ --test_setup_script='android/test_setup.sh'
+ --test_script='android/interactive_test.sh'
+ --incremental
+ --prune
+ --file_args
+
+ d) Additional Documentation:
+ See ./android/README.android for full documentation of Android object file
+ bisecting.
+
+ e) Examples:
+ i) Basic interactive test android bisection, where the android source is
+ at ~/android_src:
+ ./bisect.py android ~/android_src
+
+ ii) Basic boot test android bisection, where the android source is at
+ ~/android_src, and 10 jobs will be used to build android:
+ ./bisect.py android ~/android_src --num_jobs=10 \
+ --test_script=sysroot_wrapper/boot_test.sh
+
+Resuming:
+ bisect.py and binary_search_state.py offer the
+ ability to resume a bisection in case it was interrupted by a
+ SIGINT, power failure, etc. Every time the tool completes a
+ bisection iteration its state is saved to disk (usually to the file
+ "./bisect_driver.py.state"). If passed the --resume option, the tool
+ it will automatically detect the state file and resume from the last
+ completed iteration.
+
+Overriding:
+ You can run ./bisect.py --help or ./binary_search_state.py
+ --help for a full list of arguments that can be overriden. Here are
+ a couple of examples:
+
+ Example 1 (do boot test instead of interactive test):
+ ./bisect.py package daisy 172.17.211.182 --test_script=cros_pkg/boot_test.sh
+
+ Example 2 (do package bisector system test instead of interactive test, this
+ is used to test the bisecting tool itself -- see comments in
+ hash_test.sh for more details):
+ ./bisect.py package daisy 172.17.211.182 \
+ --test_script=common/hash_test.sh --test_setup_script=""
+
+ Example 3 (enable verbose mode, disable pruning, and disable verification):
+ ./bisect.py package daisy 172.17.211.182 \
+ --verbose --prune=False --verify=False
+
diff --git a/binary_search_tool/README.bisect.md b/binary_search_tool/README.bisect.md
deleted file mode 100644
index 74715ca0..00000000
--- a/binary_search_tool/README.bisect.md
+++ /dev/null
@@ -1,248 +0,0 @@
-# `bisect.py`
-
-`bisect.py` is a wrapper around the general purpose
-`binary_search_state.py`. It provides a user friendly interface for
-bisecting various compilation errors. The 2 currently provided
-methods of bisecting are ChromeOS package and object bisection. Each
-method defines a default set of options to pass to
-`binary_search_state.py` and allow the user to override these defaults
-(see the "Overriding" section).
-
-Please note that all commands, examples, scripts, etc. are to be run from your
-chroot unless stated otherwise.
-
-## Bisection Methods
-
-### ChromeOS Package
-
-This method will bisect across all packages in a ChromeOS repository and find
-the offending packages (according to your test script). This method takes the
-following arguments:
-
-* board: The board to bisect on. For example: daisy, falco, etc.
-* remote: The IP address of the physical machine you're using to test with.
-
-By default the ChromeOS package method will do a simple interactive test that
-pings the machine and prompts the user if the machine is good.
-
-1. Setup: The ChromeOS package method requires that you have three build trees:
-
- ```
- /build/${board}.bad - The build tree for your "bad" build
- /build/${board}.good - The build tree for your "good" build
- /build/${board}.work - A full copy of /build/${board}.bad
- ```
-
-1. Cleanup: bisect.py does most cleanup for you, the only thing required by the
- user is to cleanup all built images and the three build trees made in
- `/build/`
-
-1. Default Arguments:
-
- ```
- --get_initial_items='cros_pkg/get_initial_items.sh'
- --switch_to_good='cros_pkg/switch_to_good.sh'
- --switch_to_bad='cros_pkg/switch_to_bad.sh'
- --test_setup_script='cros_pkg/test_setup.sh'
- --test_script='cros_pkg/interactive_test.sh'
- --incremental
- --prune
- --file_args
- ```
-
-1. Additional Documentation: See `./cros_pkg/README.cros_pkg_triage` for full
- documentation of ChromeOS package bisection.
-
-1. Examples:
-
- 1. Basic interactive test package bisection, on daisy board:
-
- ```
- ./bisect.py package daisy 172.17.211.184
- ```
-
- 2. Basic boot test package bisection, on daisy board:
-
- ```
- ./bisect.py package daisy 172.17.211.184 -t cros_pkg/boot_test.sh
- ```
-
-### ChromeOS Object
-
-This method will bisect across all objects in a ChromeOS package and find
-the offending objects (according to your test script). This method takes the
-following arguments:
-
-* board: The board to bisect on. For example: daisy, falco, etc.
-* remote: The IP address of the physical machine you're using to test with.
-* package: The package to bisect with. For example: chromeos-chrome
-* dir: (Optional) the directory for your good/bad build trees. Defaults to
- $BISECT_DIR or /tmp/sysroot_bisect. This value will set $BISECT_DIR
- for all bisecting scripts.
-
-By default the ChromeOS object method will do a simple interactive test that
-pings the machine and prompts the user if the machine is good.
-
-1. Setup: The ChromeOS package method requires that you populate your good and
- bad set of objects. `sysroot_wrapper` will automatically detect the
- `BISECT_STAGE` variable and use this to populate emerged objects. Here is an
- example:
-
- ```
- # Defaults to /tmp/sysroot_bisect
- export BISECT_DIR="/path/to/where/you/want/to/store/builds/"
-
- export BISECT_STAGE="POPULATE_GOOD"
- ./switch_to_good_compiler.sh
- emerge-${board} -C ${package_to_bisect}
- emerge-${board} ${package_to_bisect}
-
- export BISECT_STAGE="POPULATE_BAD"
- ./switch_to_bad_compiler.sh
- emerge-${board} -C {package_to_bisect}
- emerge-${board} ${package_to_bisect}
- ```
-
-1. Cleanup: The user must clean up all built images and the populated object
- files.
-
-1. Default Arguments:
-
- ```
- --get_initial_items='sysroot_wrapper/get_initial_items.sh'
- --switch_to_good='sysroot_wrapper/switch_to_good.sh'
- --switch_to_bad='sysroot_wrapper/switch_to_bad.sh'
- --test_setup_script='sysroot_wrapper/test_setup.sh'
- --test_script='sysroot_wrapper/interactive_test.sh'
- --noincremental
- --prune
- --file_args
- ```
-
-1. Additional Documentation: See `./sysroot_wrapper/README` for full
- documentation of ChromeOS object file bisecting.
-
-1. Examples:
-
- 1. Basic interactive test object bisection, on daisy board for cryptohome
- package: `./bisect.py object daisy 172.17.211.184 cryptohome`
-
- 2. Basic boot test package bisection, on daisy board for cryptohome
- package: `./bisect.py object daisy 172.17.211.184 cryptohome
- --test_script=sysroot_wrapper/boot_test.sh`
-
-### Android object
-
-NOTE: Because this isn't a ChromeOS bisection tool, the concept of a
- chroot doesn't exist. Just run this tool from a normal shell.
-
-This method will bisect across all objects in the Android source tree and
-find the offending objects (according to your test script). This method takes
-the following arguments:
-
-* `android_src`: The location of your android source tree
-
-* `num_jobs`: (Optional) The number of jobs to pass to make. This is dependent
- on how many cores your machine has. A good number is probably somewhere
- around 5 to 10.
-
-* `device_id`: (Optional) The serial code for the device you are testing on.
- This is used to determine which device should be used in case multiple
- devices are plugged into your computer. You can get serial code for your
- device by running "adb devices".
-
-* `dir`: (Optional) the directory for your good/bad build trees. Defaults to
- `$BISECT_DIR` or `~/ANDROID_BISECT/`. This value will set `$BISECT_DIR` for
- all bisecting scripts.
-
- By default the Android object method will do a simple interactive test that
- pings the machine and prompts the user if the machine is good.
-
-1. Setup: The Android object method requires that you populate your good and
- bad set of objects. The Android compiler wrapper will automatically detect
- the `BISECT_STAGE` variable and use this to populate emerged objects. Here
- is an example:
-
- ```
- # Defaults to ~/ANDROID_BISECT/
- export BISECT_DIR="/path/to/where/you/want/to/store/builds/"
-
- export BISECT_STAGE="POPULATE_GOOD"
- # Install the "good" compiler
- ./switch_to_good_compiler.sh
- make clean
- make -j <your_preferred_number_of_jobs>
-
- export BISECT_STAGE="POPULATE_BAD"
- # Install the "bad" compiler
- ./switch_to_bad_compiler.sh
- make clean
- make -j <your_preferred_number_of_jobs>
- ```
-
-1. Cleanup: The user must clean up all built images and the populated object
- files.
-
-1. Default Arguments:
-
- ```
- --get_initial_items='android/get_initial_items.sh'
- --switch_to_good='android/switch_to_good.sh'
- --switch_to_bad='android/switch_to_bad.sh'
- --test_setup_script='android/test_setup.sh'
- --test_script='android/interactive_test.sh'
- --incremental
- --prune
- --file_args
- ```
-
-1. Additional Documentation: See `./android/README.android` for full
- documentation of Android object file bisecting.
-
-1. Examples:
-
- 1. Basic interactive test android bisection, where the android source is at
- ~/android_src: `./bisect.py android ~/android_src`
-
- 2. Basic boot test android bisection, where the android source is at
- `~/android_src`, and 10 jobs will be used to build android: `./bisect.py
- android ~/android_src --num_jobs=10
- --test_script=sysroot_wrapper/boot_test.sh`
-
-### Resuming
-
-`bisect.py` and `binary_search_state.py` offer the
-ability to resume a bisection in case it was interrupted by a
-SIGINT, power failure, etc. Every time the tool completes a
-bisection iteration its state is saved to disk (usually to the file
-`./bisect_driver.py.state`). If passed the --resume option, the tool
-it will automatically detect the state file and resume from the last
-completed iteration.
-
-### Overriding
-
-You can run `./bisect.py --help` or `./binary_search_state.py
---help` for a full list of arguments that can be overriden. Here are
-a couple of examples:
-
-Example 1 (do boot test instead of interactive test):
-
-```
-./bisect.py package daisy 172.17.211.182 --test_script=cros_pkg/boot_test.sh
-```
-
-Example 2 (do package bisector system test instead of interactive test, this
- is used to test the bisecting tool itself -- see comments in
- hash_test.sh for more details):
-
-```
-./bisect.py package daisy 172.17.211.182 \
- --test_script=common/hash_test.sh --test_setup_script=""
-```
-
-Example 3 (enable verbose mode, disable pruning, and disable verification):
-
-```
-./bisect.py package daisy 172.17.211.182
- --verbose --prune=False --verify=False
-```
diff --git a/binary_search_tool/README.pass_bisect.md b/binary_search_tool/README.pass_bisect.md
deleted file mode 100644
index d03a563b..00000000
--- a/binary_search_tool/README.pass_bisect.md
+++ /dev/null
@@ -1,83 +0,0 @@
-# Pass bisection
-
-This document describes a feature for the bisection tool, which provides
-pass and transformation level bisection for a bad object file.
-
-Before reading this document, please refer to README.bisect for general usage
-of the bisection tool.
-
-The benefit of using pass level bisection is:
-When building a bad object file, it can tell you which pass and transformation
-in the compiler caused the error.
-
-*Notice:* This tool will only work for LLVM/clang, since it is using options
-`-opt-bisect-limit` and `print-debug-counter` that only exist in LLVM.
-
-## Arguments
-
-All the required arguments in object-file-level bisection tool are still
-to be provided. In addition, you will need to add the following arguments:
-
-1. `--pass_bisect`: enables pass level bisection
-2. `--ir_diff`: enables output of IR differences
-
-Please refer to `--help` or the examples below for details about how to use
-them.
-
-## HOW TO USE: ChromeOS
-
-*TODO* - Future work: Currently this only works for Android.
-
-## HOW TO USE: Android
-
-1. Prerequisites: A general setup is still needed for Android, which means that
- you need to populate good and bad set of objects with two versions of
- compilers.
-
- See the documentation in `README.bisect.md` for more detailed instructions.
-
-1. Pass/Transformation Bisection: If you do not wish to override the other
- arguments, this command should be sufficient to do pass/transformation level
- bisection:
-
- ```
- ./bisect.py android PATH_TO_ANDROID_HOME_DIR
- --pass_bisect=’android/generate_cmd.sh’
- --prune=False
- --ir_diff
- --verbose
- ```
-
- Where:
-
- ```
- --pass_bisect:
- Enables pass/transformation level bisection and with default
- script to generate the command as ‘android/generate_cmd.sh’.
- --prune:
- For now, prune must be set to False to return only the first
- bad item.
- --ir_diff:
- Optional argument to print out IR differences.
- --verbose:
- To show IR diff, verbose needs to be on.
- ```
-
- Other default arguments:
-
- ```
- --get_initial_items='android/get_initial_items.sh'
- --switch_to_good='android/switch_to_good.sh'
- --switch_to_bad='android/switch_to_bad.sh'
- --test_setup_script='android/test_setup.sh'
- --test_script='android/interactive_test.sh'
- --incremental
- --prune
- --file_args
- ```
-
- You can always override them if needed. See README.bisect for more
- details.
-
-1. Other features: Features such as resuming, number of jobs, and device id
- remain the same as before. See README.bisect for more details.
diff --git a/binary_search_tool/README.testing.md b/binary_search_tool/README.testing
index 139dc087..6c81ab96 100644
--- a/binary_search_tool/README.testing.md
+++ b/binary_search_tool/README.testing
@@ -1,5 +1,3 @@
-# Testing the binary search tool
-
This file explains how to set up and run the various kinds of bisection tests.
The bisection tool comes with several sets of tests which you should
@@ -7,34 +5,33 @@ run after updating any of the bisection tool scripts OR after updating
the Android compiler wrapper (to make sure the wrapper will still work
correctly with bisection).
-## Before you start.
+Before you start.
+----------------
Before you can run the tests, your PYTHONPATH environment variable
must be correct. This means that it must include both the
-toolchain-utils directory and the `binary_search_tool` directory. The
+toolchain-utils directory and the binary_search_tool directory. The
easiest way to set it is:
-```
$ cd toolchain-utils
$ export PYTHONPATH=`pwd`:${PYTHONPATH}
$ cd binary_search_tool
$ export PYTHONPATH=`pwd`:${PYTHONPATH}
-```
-## Running the unittests.
+Running the unittests.
+----------------------
To run the basic unit tests:
-```
$ cd toolchain-utils/binary_search_tool/test
$ ./binary_search_tool_tester.py
-```
-# Running the bisection tests, testing the compiler wrapper.
+Running the bisection tests, testing the compiler wrapper.
+----------------------------------------------------------
If you want to run the bisection tests, and test the compiler wrapper
-(to make sure the `POPULATE_GOOD` and `POPULATE_BAD` stages are still
+(to make sure the POPULATE_GOOD and POPULATE_BAD stages are still
working properly) you can do the following.
If you are testing with the ANDROID COMPILER WRAPPER, you need to to some
@@ -42,53 +39,42 @@ preliminary setup:
Set up the compiler wrapper to replace GCC:
-```
-$ cd <android-root/prebuilts/clang/host/linux-x86/clang-368880/bin
-$ cp clang gcc
-$ whereis gcc
-gcc: /usr/bin/gcc /usr/lib/gcc /usr/bin/X11/gcc /usr/share/man/man1/gcc.1.gz
-$ cd /usr/bin
-$ ls -l gcc
-lrwxrwxrwx 1 root root 7 Feb 3 17:00 gcc -> gcc-4.8*
-$ sudo mv gcc gcc.real
-$ sudo ln -s \
- <android-root>/prebuilts/clang/host/linux-x86/clang-3688880/bin/gcc gcc
-```
+ $ cd <android-root/prebuilts/clang/host/linux-x86/clang-368880/bin
+ $ cp clang gcc
+ $ whereis gcc
+ gcc: /usr/bin/gcc /usr/lib/gcc /usr/bin/X11/gcc /usr/share/man/man1/gcc.1.gz
+ $ cd /usr/bin
+ $ ls -l gcc
+ lrwxrwxrwx 1 root root 7 Feb 3 17:00 gcc -> gcc-4.8*
+ $ sudo mv gcc gcc.real
+ $ sudo ln -s <android-root>/prebuilts/clang/host/linux-x86/clang-3688880/bin/gcc gcc
Move to the correct directory, then run the test script:
-```
-$ cd toolchain-utils/binary_search_tool
-$ ./run_bisect_test.py
-```
+ $ cd toolchain-utils/binary_search_tool
+ $ ./run_bisect_test.py
+
If you are testing with the CHROMEOS COMPILER WRAPPER, you MUST run the
tests from INSIDE your CHROOT (but you don't need to do any special setup):
-```
-$ cd <path-to-chromeos-root>
-$ cros_sdk
-$ cd ~/trunk/src/third_party/toolchain-utils
-```
+ $ cd <path-to-chromeos-root>
+ $ cros_sdk
+ $ cd ~/trunk/src/third_party/toolchain-utils
-Set up your `PYTHONPATH`:
+ Set up your PYTHONPATH:
-```
-$ export PYTHONPATH=`pwd`:${PYTHONPATH}
-$ cd binary_search_tool
-$ export PYTHONPATH=`pwd`:${PYTHONPATH}
-```
+ $ export PYTHONPATH=`pwd`:${PYTHONPATH}
+ $ cd binary_search_tool
+ $ export PYTHONPATH=`pwd`:${PYTHONPATH}
-Run the test script:
+ Run the test script:
-```
-$ ./run_bisect_test.py
-```
+ $ ./run_bisect_test.py
-## Running the bisection tests, without testing the compiler wrapper.
+Running the bisection tests, without testing the compiler wrapper.
+------------------------------------------------------------------
-```
$ cd toolchain-utils/binary_search_tool
$ ./full_bisect_test/run-test-nowrapper.sh
-```
diff --git a/binary_search_tool/android/README.android b/binary_search_tool/android/README.android
new file mode 100644
index 00000000..9e518f60
--- /dev/null
+++ b/binary_search_tool/android/README.android
@@ -0,0 +1,186 @@
+
+binary_search_state.py is a general binary search triage tool that
+performs a binary search on a set of things to try to identify which
+thing or thing(s) in the set is 'bad'. binary_search_state.py assumes
+that the user has two sets, one where everything is known to be good,
+and one which contains at least one bad item. binary_search_state.py
+then copies items from the good and bad sets into a working set and
+tests the result (good or bad). binary_search_state.py requires that
+a set of scripts be supplied to it for any particular job. For more
+information on binary_search_state.py, see
+
+https://sites.google.com/a/google.com/chromeos-toolchain-team-home2/home/team-tools-and-scripts/binary-searcher-tool-for-triage
+
+This particular set of scripts is designed to work wtih
+binary_search_state.py in order to find the bad object or set of
+bad objects in an Android build.
+
+
+QUICKSTART:
+
+After setting up your 2 build trees (see Prerequisites section), do the
+following:
+
+ - Decide which test script to use (boot_test.sh or
+ interactive_test.sh)
+ - Get the serial number for the Android device you will use for testing.
+ - Run the following:
+
+ $ cd <android_src>
+ $ source build/envsetup.sh
+ $ lunch <android_device_lunch_combo>
+ $ cd <path_to_toolchain_utils>/binary_search_tool/
+ $ NUM_JOBS=10 ANDROID_SERIAL=<device_serial> \
+ ./android/setup.sh <android_src>
+
+ If you chose the boot test, then:
+ TEST_SCRIPT=android/boot_test.sh
+
+ If you chose the interactive test, then:
+ TEST_SCRIPT=android/interactive_test.sh
+
+ Finally, run the binary search tool:
+
+ $ python ./binary_search_state.py \
+ --get_initial_items=android/get_initial_items.sh \
+ --switch_to_good=android/switch_to_good.sh \
+ --switch_to_bad=android/switch_to_bad.sh \
+ --test_setup_script=android/test_setup.sh \
+ --test_script=$TEST_SCRIPT \
+ --file_args \
+ --prune
+
+ Once you have completely finished doing the binary search/triage,
+ run the cleanup script:
+
+ $ android/cleanup.sh
+
+
+
+FILES AND SCRIPTS:
+
+ Check the header comments for each script for more in depth documentation.
+
+ boot_test.sh - One of two possible test scripts used to determine
+ if the Android image built from the objects is good
+ or bad. This script tests to see if the image
+ booted, and requires no user intervention.
+
+ cleanup.sh - This is called after the binary search tool completes. This
+ script will clean up the common.sh file generated by setup.sh
+
+ get_initial_items.sh - This script is used to determine all Android objects
+ that will be bisected.
+
+ test_setup.sh - This script will build and flash your image to the
+ Android device. If the flash fails, this script will
+ help the user troubleshoot by trying to flash again or
+ by asking the user to manually flash it.
+
+ interactive_test.sh - One of two possible scripts used to determine
+ if the Android image built from the objects
+ is good or bad. This script requires user
+ interaction to determine if the image is
+ good or bad.
+
+ setup.sh - This is the first script the user should call, after
+ taking care of the prerequisites. It sets up the
+ environment appropriately for running the Android
+ object binary search triage, and it generates the
+ necessary common script (see below).
+
+ switch_to_bad.sh - This script is used to link objects from the
+ 'bad' build tree into the work area.
+
+ switch_to_good.sh - This script is used to link objects from the
+ 'good' build tree into the work area.
+
+
+GENERATED SCRIPTS:
+
+ common.sh - contains basic environment variable definitions for
+ this binary search triage session.
+
+ASSUMPTIONS:
+
+- There are two different Android builds, for the same board/lunch combo with
+ the same set of generated object files. One build creates a good working
+ Android image and the other does not.
+
+- The toolchain bug you are tracking down is not related to the linker. If the
+ linker is broken or generates bad code, this tool is unlikely to help you.
+
+
+PREREQUISITES FOR USING THESE SCRIPTS:
+
+ Step 1: Decide where to store each build tree
+ By default, each build tree is stored in "~/ANDROID_BISECT". However you
+ can override this by exporting BISECT_DIR set to whatever directory you
+ please. Keep in mind these build trees take dozens of gigabytes each.
+
+ Step 2: Setup your android build environment
+ 1. `cd <android_src>`
+ 2. `source build/envsetup.sh`
+ 3. `lunch <android_device_lunch_combo>`
+
+ Step 3: Populate the good build tree
+ 1. `make clean`
+ 2. `export BISECT_STAGE=POPULATE_GOOD`
+ 3. Install your "good" toolchain in Android, this will most likely be
+ the toolchain that comes preinstalled with the Android source.
+ 4. Build all of Android: `make -j10`. The "-j" parameter depends on how
+ many cores your machine has. See Android documentation for more details.
+
+ Step 4: Populate the bad build tree
+ 1. `make clean`
+ 2. `export BISECT_STAGE=POPULATE_BAD`
+ 3. Install your "bad" toolchain in Android.
+ 4. Build all of Android again.
+
+ Step 5: Run the android setup script
+ 1. `cd <path_to_toolchain_utils>/binary_search_tool/`
+ 2. `NUM_JOBS=<jobs> ANDROID_SERIAL=<android_serial_num> \
+ android/setup.sh <android_src>`
+
+ WARNING: It's important that you leave the full "out/" directory in your
+ Android source alone after Step 4. The binary search tool will
+ use this directory as a skeleton to build each test image while
+ triaging.
+
+USING THESE SCRIPTS FOR BINARY TRIAGE OF OBJECTS:
+
+To use these scripts, you must first run setup.sh, passing it the path to your
+Android source directory. setup.sh will do the following:
+
+ - Verify that your build trees are set up correctly (with good, bad).
+ - Verify that each build tree has the same contents.
+ - Verify that the android build environment (lunch, etc.) are setup in your
+ current shell.
+ - Create the common.sh file that the other scripts passed to the
+ binary triage tool will need.
+
+
+This set of scripts comes with two alternate test scripts. One test
+script, boot_test.sh, just checks to make sure that the image
+booted (wait for device to boot to home screen) and assumes that is enough.
+The other test script, interactive_test.sh, is interactive and asks YOU
+to tell it whether the image on the android device is ok or not (it
+prompts you and waits for a response).
+
+
+Once you have run setup.sh (and decided which test script you
+want to use) run the binary triage tool using these scripts to
+isolate/identify the bad object:
+
+./binary_search_state.py \
+ --get_initial_items=android/get_initial_items.sh \
+ --switch_to_good=android/switch_to_good.sh \
+ --switch_to_bad=android/switch_to_bad.sh \
+ --test_setup_script=android/test_setup.sh \
+ --test_script=android/boot_test.sh \ # could use interactive_test.sh instead
+ --prune
+
+
+After you have finished running the tool and have identified the bad
+object(s), you will want to run the cleanup script (android/cleanup.sh).
+
diff --git a/binary_search_tool/android/README.android.md b/binary_search_tool/android/README.android.md
deleted file mode 100644
index 9445dcbb..00000000
--- a/binary_search_tool/android/README.android.md
+++ /dev/null
@@ -1,209 +0,0 @@
-# Android's binary search tool
-
-`binary_search_state.py` is a general binary search triage tool that
-performs a binary search on a set of things to try to identify which
-thing or thing(s) in the set is 'bad'. `binary_search_state.py` assumes
-that the user has two sets, one where everything is known to be good,
-and one which contains at least one bad item. `binary_search_state.py`
-then copies items from the good and bad sets into a working set and
-tests the result (good or bad). `binary_search_state.py` requires that
-a set of scripts be supplied to it for any particular job. For more
-information on `binary_search_state.py`, see
-
-https://sites.google.com/a/google.com/chromeos-toolchain-team-home2/home/team-tools-and-scripts/binary-searcher-tool-for-triage
-
-This particular set of scripts is designed to work with
-`binary_search_state.py` in order to find the bad object or set of
-bad objects in an Android build. Furthermore, it can also help find
-the bad compiler pass and transformation when building that bad object.
-
-
-## QUICKSTART
-
-After setting up your 2 build trees (see Prerequisites section), do the
-following:
-
-- Decide which test script to use (`boot_test.sh` or
- `interactive_test.sh`)
-- Get the serial number for the Android device you will use for testing.
-- Run the following:
-
- ```
- $ cd <android_src>
- $ source build/envsetup.sh
- $ lunch <android_device_lunch_combo>
- $ cd <path_to_toolchain_utils>/binary_search_tool/
- $ NUM_JOBS=10 ANDROID_SERIAL=<device_serial> \
- ./android/setup.sh <android_src>
- ```
-
- If you chose the boot test, then:
-
- ```
- TEST_SCRIPT=android/boot_test.sh
- ```
-
- If you chose the interactive test, then:
-
- ```
- TEST_SCRIPT=android/interactive_test.sh
- ```
-
- Finally, run the binary search tool:
-
- ```
- $ python ./binary_search_state.py \
- --get_initial_items=android/get_initial_items.sh \
- --switch_to_good=android/switch_to_good.sh \
- --switch_to_bad=android/switch_to_bad.sh \
- --test_setup_script=android/test_setup.sh \
- --test_script=$TEST_SCRIPT \
- --file_args \
- --prune
- ```
-
- Once you have completely finished doing the binary search/triage,
- run the cleanup script:
-
- ```
- $ android/cleanup.sh
- ```
-
-
-## FILES AND SCRIPTS
-
-Check the header comments for each script for more in depth documentation.
-
-`boot_test.sh` - One of two possible test scripts used to determine
- if the Android image built from the objects is good
- or bad. This script tests to see if the image
- booted, and requires no user intervention.
-
-`cleanup.sh` - This is called after the binary search tool completes. This
- script will clean up the common.sh file generated by setup.sh
-
-`get_initial_items.sh` - This script is used to determine all Android objects
- that will be bisected.
-
-`test_setup.sh` - This script will build and flash your image to the
- Android device. If the flash fails, this script will
- help the user troubleshoot by trying to flash again or
- by asking the user to manually flash it.
-
-`interactive_test.sh` - One of two possible scripts used to determine
- if the Android image built from the objects
- is good or bad. This script requires user
- interaction to determine if the image is
- good or bad.
-
-`setup.sh` - This is the first script the user should call, after
- taking care of the prerequisites. It sets up the
- environment appropriately for running the Android
- object binary search triage, and it generates the
- necessary common script (see below).
-
-`switch_to_bad.sh` - This script is used to link objects from the
- 'bad' build tree into the work area.
-
-`switch_to_good.sh` - This script is used to link objects from the
- 'good' build tree into the work area.
-
-`generate_cmd.sh` - This script will generate another temporary script, which
- contains the command line options to build the bad object
- file again with pass/transformation level limit.
-
-
-## GENERATED SCRIPTS
-
-`common.sh` - contains basic environment variable definitions for
- this binary search triage session.
-
-## ASSUMPTIONS
-
-- There are two different Android builds, for the same board/lunch combo with
- the same set of generated object files. One build creates a good working
- Android image and the other does not.
-
-- The toolchain bug you are tracking down is not related to the linker. If the
- linker is broken or generates bad code, this tool is unlikely to help you.
-
-
-PREREQUISITES FOR USING THESE SCRIPTS:
-
-1. Decide where to store each build tree
- By default, each build tree is stored in `~/ANDROID_BISECT`. However you
- can override this by exporting `BISECT_DIR` set to whatever directory you
- please. Keep in mind these build trees take dozens of gigabytes each.
-
-2. Setup your android build environment
-
- ```
- cd <android_src>
- source build/envsetup.sh
- lunch <android_device_lunch_combo>
- ```
-
-3. Populate the good build tree
-
- 1. `make clean`
- 2. `export BISECT_STAGE=POPULATE_GOOD`
- 3. Install your "good" toolchain in Android, this will most likely be
- the toolchain that comes preinstalled with the Android source.
- 4. Build all of Android: `make -j10`. The "-j" parameter depends on how
- many cores your machine has. See Android documentation for more details.
-
-4. Populate the bad build tree
-
- 1. `make clean`
- 2. `export BISECT_STAGE=POPULATE_BAD`
- 3. Install your "bad" toolchain in Android.
- 4. Build all of Android again.
-
-5. Run the android setup script
-
- 1. `cd <path_to_toolchain_utils>/binary_search_tool/`
- 2. `NUM_JOBS=<jobs> ANDROID_SERIAL=<android_serial_num>
- android/setup.sh <android_src>`
-
-WARNING: It's important that you leave the full `out/` directory in your
- Android source alone after Step 4. The binary search tool will
- use this directory as a skeleton to build each test image while
- triaging.
-
-## USING THESE SCRIPTS FOR BINARY TRIAGE OF OBJECTS
-
-To use these scripts, you must first run setup.sh, passing it the path to your
-Android source directory. setup.sh will do the following:
-
-- Verify that your build trees are set up correctly (with good, bad).
-- Verify that each build tree has the same contents.
-- Verify that the android build environment (lunch, etc.) are setup in your
- current shell.
-- Create the common.sh file that the other scripts passed to the
- binary triage tool will need.
-
-
-This set of scripts comes with two alternate test scripts. One test
-script, `boot_test.sh`, just checks to make sure that the image
-booted (wait for device to boot to home screen) and assumes that is enough.
-The other test script, `interactive_test.sh`, is interactive and asks YOU
-to tell it whether the image on the android device is ok or not (it
-prompts you and waits for a response).
-
-
-Once you have run `setup.sh` (and decided which test script you
-want to use) run the binary triage tool using these scripts to
-isolate/identify the bad object:
-
-```
-./binary_search_state.py \
- --get_initial_items=android/get_initial_items.sh \
- --switch_to_good=android/switch_to_good.sh \
- --switch_to_bad=android/switch_to_bad.sh \
- --test_setup_script=android/test_setup.sh \
- --test_script=android/boot_test.sh \ # could use interactive_test.sh instead
- --prune
-```
-
-After you have finished running the tool and have identified the bad
-object(s), you will want to run the cleanup script (android/cleanup.sh).
diff --git a/binary_search_tool/android/switch_to_bad.sh b/binary_search_tool/android/switch_to_bad.sh
index d44f9f13..f746b628 100755
--- a/binary_search_tool/android/switch_to_bad.sh
+++ b/binary_search_tool/android/switch_to_bad.sh
@@ -18,7 +18,7 @@ source android/common.sh
OBJ_LIST_FILE=$1
# Symlink from BAD obj to working tree.
-SWITCH_CMD="ln -sf ${BISECT_BAD_BUILD}/{} {}; touch {};"
+SWITCH_CMD="ln -f ${BISECT_BAD_BUILD}/{} {}; touch {};"
overall_status=0
diff --git a/binary_search_tool/android/switch_to_good.sh b/binary_search_tool/android/switch_to_good.sh
index 557553c3..1c046c3f 100755
--- a/binary_search_tool/android/switch_to_good.sh
+++ b/binary_search_tool/android/switch_to_good.sh
@@ -17,7 +17,7 @@ source android/common.sh
OBJ_LIST_FILE=$1
# Symlink from GOOD obj to working tree.
-SWITCH_CMD="ln -sf ${BISECT_GOOD_BUILD}/{} {}; touch {};"
+SWITCH_CMD="ln -f ${BISECT_GOOD_BUILD}/{} {}; touch {};"
overall_status=0
diff --git a/binary_search_tool/binary_search_state.py b/binary_search_tool/binary_search_state.py
index f6c8ac7c..0d5810c3 100755
--- a/binary_search_tool/binary_search_state.py
+++ b/binary_search_tool/binary_search_state.py
@@ -14,7 +14,6 @@ import math
import os
import pickle
import re
-import shutil
import sys
import tempfile
import time
@@ -71,8 +70,7 @@ class BinarySearchState(object):
def __init__(self, get_initial_items, switch_to_good, switch_to_bad,
test_setup_script, test_script, incremental, prune, pass_bisect,
- ir_diff, iterations, prune_iterations, verify, file_args,
- verbose):
+ iterations, prune_iterations, verify, file_args, verbose):
"""BinarySearchState constructor, see Run for full args documentation."""
self.get_initial_items = get_initial_items
self.switch_to_good = switch_to_good
@@ -82,7 +80,6 @@ class BinarySearchState(object):
self.incremental = incremental
self.prune = prune
self.pass_bisect = pass_bisect
- self.ir_diff = ir_diff
self.iterations = iterations
self.prune_iterations = prune_iterations
self.verify = verify
@@ -348,7 +345,7 @@ class BinarySearchState(object):
# If pass not found, return None
return None
- def BuildWithPassLimit(self, limit, generate_ir=False):
+ def BuildWithPassLimit(self, limit):
""" Rebuild bad item with pass level bisect limit
Run command line script generated by GenerateBadCommandScript(), with
@@ -360,8 +357,6 @@ class BinarySearchState(object):
pass_name: The debugcounter name of current limit pass.
"""
os.environ['LIMIT_FLAGS'] = '-mllvm -opt-bisect-limit=' + str(limit)
- if generate_ir:
- os.environ['LIMIT_FLAGS'] += ' -S -emit-llvm'
self.l.LogOutput(
'Limit flags: %s' % os.environ['LIMIT_FLAGS'],
print_to_console=self.verbose)
@@ -393,11 +388,7 @@ class BinarySearchState(object):
raise ValueError('[Error] While building, limit number does not match.')
return pass_num, self.CollectPassName(last_pass)
- def BuildWithTransformLimit(self,
- limit,
- pass_name=None,
- pass_limit=-1,
- generate_ir=False):
+ def BuildWithTransformLimit(self, limit, pass_name=None, pass_limit=-1):
""" Rebuild bad item with transformation level bisect limit
Run command line script generated by GenerateBadCommandScript(), with
@@ -416,8 +407,6 @@ class BinarySearchState(object):
' -mllvm -debug-counter=' + counter_name + \
'-count=' + str(limit) + \
' -mllvm -print-debug-counter'
- if generate_ir:
- os.environ['LIMIT_FLAGS'] += ' -S -emit-llvm'
self.l.LogOutput(
'Limit flags: %s' % os.environ['LIMIT_FLAGS'],
print_to_console=self.verbose)
@@ -425,10 +414,6 @@ class BinarySearchState(object):
_, _, msg = self.ce.RunCommandWOutput(command, print_to_console=False)
if 'Counters and values:' not in msg:
- # Print pass level IR diff only if transformation level bisection does
- # not work.
- if self.ir_diff:
- self.PrintIRDiff(pass_limit)
raise RuntimeError('No bisect info printed, DebugCounter may not be '
'supported by the compiler.')
@@ -454,35 +439,6 @@ class BinarySearchState(object):
# transformation count.
return 0
- def PrintIRDiff(self, pass_index, pass_name=None, trans_index=-1):
- bad_item = list(self.found_items)[0]
- self.l.LogOutput(
- 'IR difference before and after bad pass/transformation:',
- print_to_console=self.verbose)
-
- if trans_index == -1:
- # Pass level IR diff
- self.BuildWithPassLimit(pass_index, self.ir_diff)
- good_ir = os.path.join(tempfile.tempdir, 'good.s')
- shutil.copyfile(bad_item, good_ir)
- pass_index += 1
- self.BuildWithPassLimit(pass_index, self.ir_diff)
- else:
- # Transformation level IR diff
- self.BuildWithTransformLimit(trans_index, pass_name, pass_index,
- self.ir_diff)
- good_ir = os.path.join(tempfile.tempdir, 'good.s')
- shutil.copyfile(bad_item, good_ir)
- trans_index += 1
- self.BuildWithTransformLimit(trans_index, pass_name, pass_index,
- self.ir_diff)
-
- bad_ir = os.path.join(tempfile.tempdir, 'bad.s')
- shutil.copyfile(bad_item, bad_ir)
-
- command = 'diff %s %s' % (good_ir, bad_ir)
- _, _, _ = self.ce.RunCommandWOutput(command, print_to_console=self.verbose)
-
def DoSearchBadPass(self):
"""Perform full search for bad pass of bad item."""
logger.GetLogger().LogOutput('Starting to bisect bad pass for bad item.')
@@ -515,10 +471,6 @@ class BinarySearchState(object):
trans_index, _ = self.DoBinarySearchBadPass(pass_index, pass_name)
if (trans_index == 0):
raise ValueError('Bisecting %s cannot reproduce good result.' % pass_name)
-
- if self.ir_diff:
- self.PrintIRDiff(pass_index, pass_name, trans_index)
-
logger.GetLogger().LogOutput(
'Bisection result for bad item %s:\n'
'Bad pass: %s at number %d\n'
@@ -747,7 +699,6 @@ class MockBinarySearchState(BinarySearchState):
'incremental': True,
'prune': False,
'pass_bisect': None,
- 'ir_diff': False,
'iterations': 50,
'prune_iterations': 100,
'verify': True,
@@ -780,7 +731,6 @@ def Run(get_initial_items,
iterations=50,
prune=False,
pass_bisect=None,
- ir_diff=False,
noincremental=False,
file_args=False,
verify=True,
@@ -810,9 +760,6 @@ def Run(get_initial_items,
pass/ transformation level bisection for the bad item. Requires that
'prune' be set to False, and needs support of `-opt-bisect-limit`(pass)
and `-print-debug-counter`(transformation) from LLVM.
- ir_diff: Whether to print IR differences before and after bad
- pass/transformation to verbose output. Defaults to False, only works when
- pass_bisect is enabled.
noincremental: Whether to send "diffs" of good/bad items to switch scripts.
file_args: If True then arguments to switch scripts will be a file name
containing a newline separated list of the items to switch.
@@ -849,10 +796,6 @@ def Run(get_initial_items,
logger.GetLogger().LogOutput('"--pass_bisect" only works when '
'"--prune" is set to be False.')
return 1
- if not pass_bisect and ir_diff:
- logger.GetLogger().LogOutput('"--ir_diff" only works when '
- '"--pass_bisect" is enabled.')
-
switch_to_good = _CanonicalizeScript(switch_to_good)
switch_to_bad = _CanonicalizeScript(switch_to_bad)
if test_setup_script:
@@ -867,8 +810,8 @@ def Run(get_initial_items,
bss = BinarySearchState(get_initial_items, switch_to_good, switch_to_bad,
test_setup_script, test_script, incremental, prune,
- pass_bisect, ir_diff, iterations, prune_iterations,
- verify, file_args, verbose)
+ pass_bisect, iterations, prune_iterations, verify,
+ file_args, verbose)
bss.DoVerify()
try:
diff --git a/binary_search_tool/bisect_driver.py b/binary_search_tool/bisect_driver.py
index 6a69fbf4..21dd11fa 100644
--- a/binary_search_tool/bisect_driver.py
+++ b/binary_search_tool/bisect_driver.py
@@ -2,9 +2,6 @@
#
# This script is used to help the compiler wrapper in the ChromeOS and
# Android build systems bisect for bad object files.
-#
-# pylint: disable=not-callable
-# pylint: disable=indentation
"""Utilities for bisection of ChromeOS and Android object files.
This module contains a set of utilities to allow bisection between
@@ -142,8 +139,7 @@ def get_obj_path(execargs):
# Ignore args that do not create a file.
if obj_path in (
'-',
- '/dev/null',
- ):
+ '/dev/null',):
return ''
# Ignore files ending in .tmp.
if obj_path.endswith(('.tmp',)):
@@ -241,7 +237,6 @@ def cache_file(execargs, bisect_dir, cache, abs_file_path):
bisect_dir: The directory where bisection caches live.
cache: Which cache the file will be cached to (GOOD/BAD).
abs_file_path: Absolute path to file being cached.
-
Returns:
True if caching was successful, False otherwise.
"""
diff --git a/binary_search_tool/common.py b/binary_search_tool/common.py
index 40660b52..2850801c 100644
--- a/binary_search_tool/common.py
+++ b/binary_search_tool/common.py
@@ -203,22 +203,6 @@ def _BuildArgsDict(args):
'For now it only supports one single bad item, so to use it, '
'prune must be set to False.')
# No input (evals to False),
- # --ir_diff (evals to True),
- # --ir_diff=False,
- # --ir_diff=True
- args.AddArgument(
- '-d',
- '--ir_diff',
- dest='ir_diff',
- nargs='?',
- const=True,
- default=False,
- type=StrToBool,
- metavar='bool',
- help='Whether to print IR differences before and after bad '
- 'pass/transformation to verbose output. Defaults to False, '
- 'only works when pass_bisect is enabled.')
- # No input (evals to False),
# --noincremental (evals to True),
# --noincremental=False,
# --noincremental=True
diff --git a/binary_search_tool/cros_pkg/README.cros_pkg_triage b/binary_search_tool/cros_pkg/README.cros_pkg_triage
new file mode 100644
index 00000000..5e285008
--- /dev/null
+++ b/binary_search_tool/cros_pkg/README.cros_pkg_triage
@@ -0,0 +1,185 @@
+
+binary_search_state.py is a general binary search triage tool that
+performs a binary search on a set of things to try to identify which
+thing or thing(s) in the set is 'bad'. binary_search_state.py assumes
+that the user has two sets, one where everything is known to be good,
+ane one which contains at least one bad item. binary_search_state.py
+then copies items from the good and bad sets into a working set and
+tests the result (good or bad). binary_search_state.py requires that
+a set of scripts be supplied to it for any particular job. For more
+information on binary_search_state.py, see
+
+https://sites.google.com/a/google.com/chromeos-toolchain-team-home2/home/team-tools-and-scripts/binary-searcher-tool-for-triage
+
+This particular set of scripts is designed to work wtih
+binary_search_state.py in order to find the bad package or set of
+packages in a ChromeOS build.
+
+
+QUICKSTART:
+
+After setting up your 3 build trees (see Prerequisites section), do the
+following:
+
+ - Decide which test script to use (boot_test.sh or
+ interactive_test.sh)
+ - Get the IP name or address of the chromebook you will use for testing.
+ - Do the following inside your chroot:
+
+ $ cd ~/trunk/src/third_party/toolchain_utils/binary_search_tool
+ $ ./cros_pkg/setup.sh <board-to-test> <IP-name-or-address-of-chromebook>
+
+ If you chose the boot test, then:
+
+ $ python ./binary_search_state.py \
+ --get_initial_items=cros_pkg/get_initial_items.sh \
+ --switch_to_good=cros_pkg/switch_to_good.sh \
+ --switch_to_bad=cros_pkg/switch_to_bad.sh \
+ --test_setup_script=cros_pkg/test_setup.sh \
+ --test_script=cros_pkg/boot_test.sh \
+ --file_args \
+ --prune
+
+ Otherwise, if you chose the interactive test, then:
+
+ $ python ./binary_search_state.py \
+ --get_initial_items=cros_pkg/get_initial_items.sh \
+ --switch_to_good=cros_pkg/switch_to_good.sh \
+ --switch_to_bad=cros_pkg/switch_to_bad.sh \
+ --test_setup_script=cros_pkg/test_setup.sh \
+ --test_script=cros_pkg/interactive_test.sh \
+ --file_args \
+ --prune
+
+ Once you have completely finished doing the binary search/triage,
+ run the genereated cleanup script, to restore your chroot to the state
+ it was in before you ran the setup.sh script:
+
+ $ cros_pkg/${BOARD}_cleanup.sh
+
+
+
+FILES AND SCRIPTS:
+
+ boot_test.sh - One of two possible test scripts used to determine
+ if the ChromeOS image built from the packages is good
+ or bad. This script tests to see if the image
+ booted, and requires no user intervention.
+
+ create_cleanup_script.py - This is called by setup.sh, to
+ generate ${BOARD}_cleanup.sh,
+ which is supposed to be run by the user
+ after the binary search triage process is
+ finished, to undo the changes made by
+ setup.sh and return everything
+ to its original state.
+
+ get_initial_items.sh - This script is used to determine the current
+ set of ChromeOS packages.
+
+ test_setup.sh - This script will build and flash your image to the
+ remote machine. If the flash fails, this script will
+ help the user troubleshoot by flashing through usb or
+ by retrying the flash over ethernet.
+
+ interactive_test.sh - One of two possible scripts used to determine
+ if the ChromeOS image built from the packages
+ is good or bad. This script requires user
+ interaction to determine if the image is
+ good or bad.
+
+ setup.sh - This is the first script the user should call, after
+ taking care of the prerequisites. It sets up the
+ environment appropriately for running the ChromeOS
+ package binary search triage, and it generates two
+ necessary scripts (see below).
+
+ switch_to_bad.sh - This script is used to copy packages from the
+ 'bad' build tree into the work area.
+
+ switch_to_good.sh - This script is used to copy packages from the
+ good' build tree into the work area.
+
+
+GENERATED SCRIPTS:
+
+ common.sh - contains basic environment variable definitions for
+ this binary search triage session.
+
+ ${BOARD}_cleanup.sh - script to undo all the changes made by
+ running setup.sh, and returning
+ everything to its original state. The user
+ should manually run this script once the
+ binary search triage process is over.
+
+ASSUMPTIONS:
+
+- There are two different ChromeOS builds, for the same board, with the
+ same set of ChromeOS packages. One build creates a good working ChromeOS
+ image and the other does not.
+
+- You have saved the complete build trees for both the good and bad builds.
+
+
+PREREQUISITES FOR USING THESE SCRIPTS (inside the chroot):
+
+- The "good" build tree, for the board, is in /build/${board}.good
+ (e.g. /build/lumpy.good or /build/daisy.good).
+
+- The "bad" build tree is in /build/${board}.bad
+ (e.g. /build/lumpy.bad or /build/daisy.bad).
+
+- You made a complete copy of the "bad" build tree , and put it in
+ /build/${board}.work (e.g. /build/lumpy.work or /build/daisy.work.
+ The easiest way to do this is to use something similar to the
+ following set of commands (this example assumes the board is
+ 'lumpy'):
+
+ $ cd /build
+ $ sudo tar -cvf lumpy.bad.tar lumpy.bad
+ $ sudo mv lumpy.bad lumpy.work
+ $ sudo tar -xvf lumpy.bad.tar
+
+
+USING THESE SCRIPTS FOR BINARY TRIAGE OF PACKAGES:
+
+To use these scripts, you must first run setup.sh, passing it two
+arguments (in order): the board for which you are building the image;
+and the name or ip address of the chromebook you want to use for
+testing your chromeos images. setup.sh will do the following:
+
+ - Verify that your build trees are set up correctly (with good, bad
+ and work).
+ - Create a soft link for /build/${board} pointing to the work build
+ tree.
+ - Create the common.sh file that the other scripts passed to the
+ binary triage tool will need.
+ - Create a cleanup script, ${board}_cleanup.sh, for you to
+ run after you are done with the binary triages, to undo all of these
+ various changes that setup.sh did.
+
+
+This set of scripts comes with two alternate test scripts. One test
+script, boot_test.sh, just checks to make sure that the image
+booted (i.e. responds to ping) and assumes that is enough. The other
+test script, interactive_test.sh, is interactive and asks YOU
+to tell it whether the image on the chromebook is ok or not (it
+prompts you and waits for a response).
+
+
+Once you have run setup.sh (and decided which test script you
+want to use) run the binary triage tool using these scripts to
+isolate/identify the bad package:
+
+~/trunk/src/third_party/toolchain_utils/binary_search_tool/binary_search_state.py \
+ --get_initial_items=cros_pkg/get_initial_items.sh \
+ --switch_to_good=cros_pkg/switch_to_good.sh \
+ --switch_to_bad=cros_pkg/switch_to_bad.sh \
+ --test_setup_script=cros_pkg/test_setup.sh \
+ --test_script=cros_pkg/boots_test.sh \ # could use interactive_test.sh instead
+ --prune
+
+
+After you have finished running the tool and have identified the bad
+package(s), you will want to run the cleanup script that setup.sh
+generated (cros_pkg/${BOARD}_cleanup.sh).
diff --git a/binary_search_tool/cros_pkg/README.cros_pkg_triage.md b/binary_search_tool/cros_pkg/README.cros_pkg_triage.md
deleted file mode 100644
index 17121dd7..00000000
--- a/binary_search_tool/cros_pkg/README.cros_pkg_triage.md
+++ /dev/null
@@ -1,193 +0,0 @@
-# CrOS's binary search tool
-
-`binary_search_state.py` is a general binary search triage tool that
-performs a binary search on a set of things to try to identify which
-thing or thing(s) in the set is 'bad'. `binary_search_state.py` assumes
-that the user has two sets, one where everything is known to be good,
-ane one which contains at least one bad item. `binary_search_state.py`
-then copies items from the good and bad sets into a working set and
-tests the result (good or bad). `binary_search_state.py` requires that
-a set of scripts be supplied to it for any particular job. For more
-information on `binary_search_state.py`, see
-
-https://sites.google.com/a/google.com/chromeos-toolchain-team-home2/home/team-tools-and-scripts/binary-searcher-tool-for-triage
-
-This particular set of scripts is designed to work wtih
-`binary_search_state.py` in order to find the bad package or set of
-packages in a ChromeOS build.
-
-
-## QUICKSTART
-
-After setting up your 3 build trees (see Prerequisites section), do the
-following:
-
-- Decide which test script to use (`boot_test.sh` or
- `interactive_test.sh`)
-- Get the IP name or address of the chromebook you will use for testing.
-- Do the following inside your chroot:
-
- ```
- $ cd ~/trunk/src/third_party/toolchain_utils/binary_search_tool
- $ ./cros_pkg/setup.sh <board-to-test> <IP-name-or-address-of-chromebook>
- ```
-
- If you chose the boot test, then:
-
- ```
- $ python ./binary_search_state.py \
- --get_initial_items=cros_pkg/get_initial_items.sh \
- --switch_to_good=cros_pkg/switch_to_good.sh \
- --switch_to_bad=cros_pkg/switch_to_bad.sh \
- --test_setup_script=cros_pkg/test_setup.sh \
- --test_script=cros_pkg/boot_test.sh \
- --file_args \
- --prune
- ```
-
- Otherwise, if you chose the interactive test, then:
-
- ```
- $ python ./binary_search_state.py \
- --get_initial_items=cros_pkg/get_initial_items.sh \
- --switch_to_good=cros_pkg/switch_to_good.sh \
- --switch_to_bad=cros_pkg/switch_to_bad.sh \
- --test_setup_script=cros_pkg/test_setup.sh \
- --test_script=cros_pkg/interactive_test.sh \
- --file_args \
- --prune
- ```
-
- Once you have completely finished doing the binary search/triage,
- run the genereated cleanup script, to restore your chroot to the state
- it was in before you ran the `setup.sh` script:
-
- ```
- $ cros_pkg/${BOARD}_cleanup.sh
- ```
-
-
-## FILES AND SCRIPTS
-
-`boot_test.sh` - One of two possible test scripts used to determine
- if the ChromeOS image built from the packages is good
- or bad. This script tests to see if the image
- booted, and requires no user intervention.
-
-`create_cleanup_script.py` - This is called by setup.sh, to
- generate ${BOARD}_cleanup.sh,
- which is supposed to be run by the user
- after the binary search triage process is
- finished, to undo the changes made by
- setup.sh and return everything
- to its original state.
-
-`get_initial_items.sh` - This script is used to determine the current
- set of ChromeOS packages.
-
-`test_setup.sh` - This script will build and flash your image to the
- remote machine. If the flash fails, this script will
- help the user troubleshoot by flashing through usb or
- by retrying the flash over ethernet.
-
-`interactive_test.sh` - One of two possible scripts used to determine
- if the ChromeOS image built from the packages
- is good or bad. This script requires user
- interaction to determine if the image is
- good or bad.
-
-`setup.sh` - This is the first script the user should call, after
- taking care of the prerequisites. It sets up the
- environment appropriately for running the ChromeOS
- package binary search triage, and it generates two
- necessary scripts (see below).
-
-`switch_to_bad.sh` - This script is used to copy packages from the
- 'bad' build tree into the work area.
-
-`switch_to_good.sh` - This script is used to copy packages from the
- 'good' build tree into the work area.
-
-
-## GENERATED SCRIPTS
-
-`common.sh` - contains basic environment variable definitions for
- this binary search triage session.
-
-`${BOARD}_cleanup.sh` - script to undo all the changes made by
- running setup.sh, and returning
- everything to its original state. The user
- should manually run this script once the
- binary search triage process is over.
-
-## ASSUMPTIONS
-
-- There are two different ChromeOS builds, for the same board, with the
- same set of ChromeOS packages. One build creates a good working ChromeOS
- image and the other does not.
-
-- You have saved the complete build trees for both the good and bad builds.
-
-
-## PREREQUISITES FOR USING THESE SCRIPTS (inside the chroot)
-
-- The "good" build tree, for the board, is in /build/${board}.good
- (e.g. /build/lumpy.good or /build/daisy.good).
-
-- The "bad" build tree is in /build/${board}.bad
- (e.g. /build/lumpy.bad or /build/daisy.bad).
-
-- You made a complete copy of the "bad" build tree , and put it in
- /build/${board}.work (e.g. /build/lumpy.work or /build/daisy.work.
- The easiest way to do this is to use something similar to the
- following set of commands (this example assumes the board is
- 'lumpy'):
-
- ```
- $ cd /build
- $ sudo tar -cvf lumpy.bad.tar lumpy.bad
- $ sudo mv lumpy.bad lumpy.work
- $ sudo tar -xvf lumpy.bad.tar
- ```
-
-
-## USING THESE SCRIPTS FOR BINARY TRIAGE OF PACKAGES
-
-To use these scripts, you must first run setup.sh, passing it two
-arguments (in order): the board for which you are building the image;
-and the name or ip address of the chromebook you want to use for
-testing your chromeos images. setup.sh will do the following:
-
-- Verify that your build trees are set up correctly (with good, bad and work).
-- Create a soft link for /build/${board} pointing to the work build tree.
-- Create the common.sh file that the other scripts passed to the binary triage
- tool will need.
-- Create a cleanup script, ${board}_cleanup.sh, for you to run after you are
- done with the binary triages, to undo all of these various changes that
- setup.sh did.
-
-This set of scripts comes with two alternate test scripts. One test
-script, `boot_test.sh`, just checks to make sure that the image
-booted (i.e. responds to ping) and assumes that is enough. The other
-test script, `interactive_test.sh`, is interactive and asks YOU
-to tell it whether the image on the chromebook is ok or not (it
-prompts you and waits for a response).
-
-
-Once you have run `setup.sh` (and decided which test script you
-want to use) run the binary triage tool using these scripts to
-isolate/identify the bad package:
-
-```
-~/trunk/src/third_party/toolchain_utils/binary_search_tool/binary_search_state.py \
- --get_initial_items=cros_pkg/get_initial_items.sh \
- --switch_to_good=cros_pkg/switch_to_good.sh \
- --switch_to_bad=cros_pkg/switch_to_bad.sh \
- --test_setup_script=cros_pkg/test_setup.sh \
- --test_script=cros_pkg/boots_test.sh \ # could use interactive_test.sh instead
- --prune
-```
-
-After you have finished running the tool and have identified the bad
-package(s), you will want to run the cleanup script that `setup.sh`
-generated (`cros_pkg/${BOARD}_cleanup.sh`).
diff --git a/binary_search_tool/ndk/README b/binary_search_tool/ndk/README
new file mode 100644
index 00000000..324d1391
--- /dev/null
+++ b/binary_search_tool/ndk/README
@@ -0,0 +1,84 @@
+
+This is an example bisection for an NDK build system. This example specifically
+bisects the sample NDK Teapot app. All steps (setup and otherwise) for bisection
+can be found in DO_BISECTION.sh. This shell script is meant to show the process
+required to bisect a compiler problem in an arbitrary NDK app build system.
+
+There are three necessary setup steps to run this example:
+
+ 1. Install the NDK (known to work with r12b)
+ a. See here for NDK: https://developer.android.com/ndk/index.html
+ b. Go here for older NDK downloads: https://github.com/android-ndk/ndk/wiki
+
+ 2. Install the compiler wrapper provided with this repo. See
+ compiler_wrapper.py for more details.
+ a. Essentially you must go into the NDK source (or where you build system
+ stores its toolchain), rename your compilers to <compiler>.real, and
+ create a symlink pointing to compiler_wrapper.py named <compiler>
+ (where your compiler used to be).
+ b. If you're using the toolchains that come with the NDK they live at:
+ <ndk_path>/toolchains/<arch>/prebuilt/<host>/bin
+ example:
+ <ndk_path>/toolchains/llvm/prebuilt/linux-x86_64/bin/clang
+
+ 3. Plug in an Arm7 compatible Android device with usb debugging enabled.
+ a. This bisection example was tested with a Nexus 5X
+ b. It should be relatively simple to change the example to work with other
+ types of devices. Just change the scripts, and change PATCH1 to use a
+ different build flavor (like x86). See below for more details.
+
+This example contains two patches:
+
+ PATCH1 - This is the necessary changes to the build system to make the
+ bisection easier. More specifically, it adds an arm7 build flavor to gradle.
+ By default, this project will build objects for all possible architectures and
+ package them into one big apk. These object files meant for another
+ architecture just sit there and don't actually execute. By adding a build
+ flavor for arm7, our compiler wrapper won't try to bisect object files meant
+ for another device.
+
+ PATCH2 - This patch is what inserts the "compiler error". This is a simple
+ nullptr error in one of the source files, but it is meant to mimic bad code
+ generation. The result of the error is the app simply crashes immediately
+ after starting.
+
+Using another device architecture:
+
+ If we want to bisect for an x86-64 device we first need to provide a arch
+ specific build flavor in our app/build.gradle file:
+
+ create("x86-64") {
+ ndk.abiFilters.add("x86_64")
+ }
+
+ We want to add this under the same "productFlavors" section that our arm7
+ build flavor is in (see PATCH1). Now we should have the "installx86-64Debug"
+ task in our build system. We can use this to build and install an x86-64
+ version of our app.
+
+ Now we want to change our test_setup.sh script to run our new gradle task:
+ ./gradlew installx86-64Debug
+
+ Keep in mind, these specific build flavors are not required. If your build
+ system makes these device specific builds difficult to implement, the
+ bisection tool will function perfectly fine without them. However, the
+ downside of not having targetting a single architecture is the bisection will
+ take longer (as it will need to search across more object files).
+
+Additional Documentation:
+ These are internal Google documents, if you are a developer external to
+ Google please ask whoever gave you this sample for access or copies to the
+ documentation. If you cannot gain access, the various READMEs paired with the
+ bisector should help you.
+
+ * Ahmad's original presentation:
+ https://goto.google.com/zxdfyi
+
+ * Bisection tool update design doc:
+ https://goto.google.com/zcwei
+
+ * Bisection tool webpage:
+ https://goto.google.com/ruwpyi
+
+ * Compiler wrapper webpage:
+ https://goto.google.com/xossn
diff --git a/binary_search_tool/ndk/README.md b/binary_search_tool/ndk/README.md
deleted file mode 100644
index e41311a1..00000000
--- a/binary_search_tool/ndk/README.md
+++ /dev/null
@@ -1,89 +0,0 @@
-# NDK Bisection tool
-
-This is an example bisection for an NDK build system. This example specifically
-bisects the sample NDK Teapot app. All steps (setup and otherwise) for bisection
-can be found in `DO_BISECTION.sh`. This shell script is meant to show the
-process required to bisect a compiler problem in an arbitrary NDK app build
-system.
-
-There are three necessary setup steps to run this example:
-
-1. Install the NDK (known to work with r12b)
-
- 1. See here for NDK: https://developer.android.com/ndk/index.html
- 2. Go here for older NDK downloads: https://github.com/android-ndk/ndk/wiki
-
-1. Install the compiler wrapper provided with this repo. See
- compiler_wrapper.py for more details.
-
- 1. Essentially you must go into the NDK source (or where you build system
- stores its toolchain), rename your compilers to <compiler>.real, and
- create a symlink pointing to compiler_wrapper.py named <compiler>
- (where your compiler used to be).
-
- 2. If you're using the toolchains that come with the NDK they live at:
- `<ndk_path>/toolchains/<arch>/prebuilt/<host>/bin`
- example:
- `<ndk_path>/toolchains/llvm/prebuilt/linux-x86_64/bin/clang`
-
-1. Plug in an Arm7 compatible Android device with usb debugging enabled.
-
- 1. This bisection example was tested with a Nexus 5X
-
- 2. It should be relatively simple to change the example to work with other
- types of devices. Just change the scripts, and change PATCH1 to use a
- different build flavor (like x86). See below for more details.
-
-This example contains two patches:
-
-`PATCH1` - This is the necessary changes to the build system to make the
-bisection easier. More specifically, it adds an arm7 build flavor to gradle.
-By default, this project will build objects for all possible architectures and
-package them into one big apk. These object files meant for another
-architecture just sit there and don't actually execute. By adding a build
-flavor for arm7, our compiler wrapper won't try to bisect object files meant
-for another device.
-
-`PATCH2` - This patch is what inserts the "compiler error". This is a simple
-nullptr error in one of the source files, but it is meant to mimic bad code
-generation. The result of the error is the app simply crashes immediately
-after starting.
-
-## Using another device architecture
-
-If we want to bisect for an x86-64 device we first need to provide a arch
-specific build flavor in our app/build.gradle file:
-
-```
-create("x86-64") {
- ndk.abiFilters.add("x86_64")
-}
-```
-
-We want to add this under the same "productFlavors" section that our arm7
-build flavor is in (see PATCH1). Now we should have the "installx86-64Debug"
-task in our build system. We can use this to build and install an x86-64
-version of our app.
-
-Now we want to change our `test_setup.sh` script to run our new gradle task:
-```
-./gradlew installx86-64Debug
-```
-
-Keep in mind, these specific build flavors are not required. If your build
-system makes these device specific builds difficult to implement, the
-bisection tool will function perfectly fine without them. However, the
-downside of not having targetting a single architecture is the bisection will
-take longer (as it will need to search across more object files).
-
-## Additional Documentation
-
-These are internal Google documents, if you are a developer external to
-Google please ask whoever gave you this sample for access or copies to the
-documentation. If you cannot gain access, the various READMEs paired with the
-bisector should help you.
-
-* Ahmad's original presentation: https://goto.google.com/zxdfyi
-* Bisection tool update design doc: https://goto.google.com/zcwei
-* Bisection tool webpage: https://goto.google.com/ruwpyi
-* Compiler wrapper webpage: https://goto.google.com/xossn
diff --git a/binary_search_tool/sysroot_wrapper/README b/binary_search_tool/sysroot_wrapper/README
new file mode 100644
index 00000000..599d700d
--- /dev/null
+++ b/binary_search_tool/sysroot_wrapper/README
@@ -0,0 +1,28 @@
+This is a set of scripts to use when triaging compiler problem by using
+the bisecting functionality included in the sysroot_wrapper.hardened.
+The only script that you need to create for your triaging problem is the
+test_script.sh (The ones in this directory are here only as an example).
+
+Before running the binary searcher tool you will need to run the setup script:
+
+./sysroot_wrapper/setup.sh ${board} ${remote_ip} ${package}
+
+This setup script will ensure your $BISECT_DIR is properly populated and
+generate a common variable script for the convenience of the scripts in
+./sysroot_wrapper
+
+To run the binary searcher tool with these scripts, execute it like this:
+
+./binary_search_state.py --get_initial_items=./sysroot_wrapper/get_initial_items.sh --switch_to_good=./sysroot_wrapper/switch_to_good.sh --switch_to_bad=./sysroot_wrapper/switch_to_bad.sh --test_script=./sysroot_wrapper/test_script.sh --noincremental --file_args 2>&1 | tee /tmp/binary_search.log
+
+Finally once done you will want to run the cleanup script:
+
+./sysroot_wrapper/cleanup.sh
+
+For more information on how to use the sysroot_wrapper to do object file
+triaging see:
+
+https://sites.google.com/a/google.com/chromeos-toolchain-team-home2/home/team-tools-and-scripts/bisecting-compiler-problems
+
+
+
diff --git a/binary_search_tool/sysroot_wrapper/README.md b/binary_search_tool/sysroot_wrapper/README.md
deleted file mode 100644
index 89904a0b..00000000
--- a/binary_search_tool/sysroot_wrapper/README.md
+++ /dev/null
@@ -1,35 +0,0 @@
-# Sysroot wrapper
-
-This is a set of scripts to use when triaging compiler problem by using
-the bisecting functionality included in the `sysroot_wrapper.hardened`.
-The only script that you need to create for your triaging problem is the
-`test_script.sh` (The ones in this directory are here only as an example).
-
-Before running the binary searcher tool you will need to run the setup script:
-
-```
-./sysroot_wrapper/setup.sh ${board} ${remote_ip} ${package}
-```
-
-This setup script will ensure your `$BISECT_DIR` is properly populated and
-generate a common variable script for the convenience of the scripts in
-`./sysroot_wrapper`
-
-To run the binary searcher tool with these scripts, execute it like this:
-
-```
-./binary_search_state.py \
- --get_initial_items=./sysroot_wrapper/get_initial_items.sh \
- --switch_to_good=./sysroot_wrapper/switch_to_good.sh \
- --switch_to_bad=./sysroot_wrapper/switch_to_bad.sh \
- --test_script=./sysroot_wrapper/test_script.sh \
- --noincremental \
- --file_args \
- 2>&1 | tee /tmp/binary_search.log
-```
-
-Finally once done you will want to run the cleanup script:
-`./sysroot_wrapper/cleanup.sh`
-
-For more information on how to use the `sysroot_wrapper` to do object file
-triaging see: https://sites.google.com/a/google.com/chromeos-toolchain-team-home2/home/team-tools-and-scripts/bisecting-compiler-problems
diff --git a/binary_search_tool/test/binary_search_tool_tester.py b/binary_search_tool/test/binary_search_tool_tester.py
index aff45a86..923ea112 100755
--- a/binary_search_tool/test/binary_search_tool_tester.py
+++ b/binary_search_tool/test/binary_search_tool_tester.py
@@ -132,7 +132,7 @@ class BisectingUtilsTest(unittest.TestCase):
cleanup_list = [
'./is_setup', binary_search_state.STATE_FILE, 'noinc_prune_bad',
- 'noinc_prune_good', './cmd_script.sh'
+ 'noinc_prune_good'
]
for f in cleanup_list:
if os.path.exists(f):
@@ -304,6 +304,24 @@ class BisectingUtilsTest(unittest.TestCase):
found_obj = int(bss.found_items.pop())
self.assertEquals(bad_objs[found_obj], 1)
+ def test_pass_bisect(self):
+ bss = binary_search_state.MockBinarySearchState(
+ get_initial_items='./gen_init_list.py',
+ switch_to_good='./switch_to_good.py',
+ switch_to_bad='./switch_to_bad.py',
+ pass_bisect='./generate_cmd.py',
+ test_script='./is_good.py',
+ test_setup_script='./test_setup.py',
+ prune=False,
+ file_args=True)
+ # TODO: Need to design unit tests for pass level bisection
+ bss.DoSearchBadItems()
+ self.assertEquals(len(bss.found_items), 1)
+
+ bad_objs = common.ReadObjectsFile()
+ found_obj = int(bss.found_items.pop())
+ self.assertEquals(bad_objs[found_obj], 1)
+
def test_set_file(self):
binary_search_state.Run(
get_initial_items='./gen_init_list.py',
@@ -349,131 +367,6 @@ class BisectingUtilsTest(unittest.TestCase):
self.assertEqual(actual_result, expected_result)
-class BisectingUtilsPassTest(BisectingUtilsTest):
- """Tests for bisecting tool at pass/transformation level."""
-
- def check_pass_output(self, pass_name, pass_num, trans_num):
- _, out, _ = command_executer.GetCommandExecuter().RunCommandWOutput(
- ('grep "Bad pass: " logs/binary_search_tool_tester.py.out | '
- 'tail -n1'))
- ls = out.splitlines()
- self.assertEqual(len(ls), 1)
- line = ls[0]
- _, _, bad_info = line.partition('Bad pass: ')
- actual_info = pass_name + ' at number ' + str(pass_num)
- self.assertEqual(actual_info, bad_info)
-
- _, out, _ = command_executer.GetCommandExecuter().RunCommandWOutput(
- ('grep "Bad transformation number: '
- '" logs/binary_search_tool_tester.py.out | '
- 'tail -n1'))
- ls = out.splitlines()
- self.assertEqual(len(ls), 1)
- line = ls[0]
- _, _, bad_info = line.partition('Bad transformation number: ')
- actual_info = str(trans_num)
- self.assertEqual(actual_info, bad_info)
-
- def test_with_prune(self):
- ret = binary_search_state.Run(
- get_initial_items='./gen_init_list.py',
- switch_to_good='./switch_to_good.py',
- switch_to_bad='./switch_to_bad.py',
- test_script='./is_good.py',
- pass_bisect='./generate_cmd.py',
- prune=True,
- file_args=True)
- self.assertEquals(ret, 1)
-
- def test_gen_cmd_script(self):
- bss = binary_search_state.MockBinarySearchState(
- get_initial_items='./gen_init_list.py',
- switch_to_good='./switch_to_good.py',
- switch_to_bad='./switch_to_bad.py',
- test_script='./is_good.py',
- pass_bisect='./generate_cmd.py',
- prune=False,
- file_args=True)
- bss.DoSearchBadItems()
- cmd_script_path = bss.cmd_script
- self.assertTrue(os.path.exists(cmd_script_path))
-
- def test_no_pass_support(self):
- bss = binary_search_state.MockBinarySearchState(
- get_initial_items='./gen_init_list.py',
- switch_to_good='./switch_to_good.py',
- switch_to_bad='./switch_to_bad.py',
- test_script='./is_good.py',
- pass_bisect='./generate_cmd.py',
- prune=False,
- file_args=True)
- bss.cmd_script = './cmd_script_no_support.py'
- # No support for -opt-bisect-limit
- with self.assertRaises(RuntimeError):
- bss.BuildWithPassLimit(-1)
-
- def test_no_transform_support(self):
- bss = binary_search_state.MockBinarySearchState(
- get_initial_items='./gen_init_list.py',
- switch_to_good='./switch_to_good.py',
- switch_to_bad='./switch_to_bad.py',
- test_script='./is_good.py',
- pass_bisect='./generate_cmd.py',
- prune=False,
- file_args=True)
- bss.cmd_script = './cmd_script_no_support.py'
- # No support for -print-debug-counter
- with self.assertRaises(RuntimeError):
- bss.BuildWithTransformLimit(-1, 'counter_name')
-
- def test_pass_transform_bisect(self):
- bss = binary_search_state.MockBinarySearchState(
- get_initial_items='./gen_init_list.py',
- switch_to_good='./switch_to_good.py',
- switch_to_bad='./switch_to_bad.py',
- test_script='./is_good.py',
- pass_bisect='./generate_cmd.py',
- prune=False,
- file_args=True)
- pass_num = 4
- trans_num = 19
- bss.cmd_script = './cmd_script.py %d %d' % (pass_num, trans_num)
- bss.DoSearchBadPass()
- self.check_pass_output('instcombine-visit', pass_num, trans_num)
-
- def test_result_not_reproduced_pass(self):
- bss = binary_search_state.MockBinarySearchState(
- get_initial_items='./gen_init_list.py',
- switch_to_good='./switch_to_good.py',
- switch_to_bad='./switch_to_bad.py',
- test_script='./is_good.py',
- pass_bisect='./generate_cmd.py',
- prune=False,
- file_args=True)
- # Fails reproducing at pass level.
- pass_num = 0
- trans_num = 19
- bss.cmd_script = './cmd_script.py %d %d' % (pass_num, trans_num)
- with self.assertRaises(ValueError):
- bss.DoSearchBadPass()
-
- def test_result_not_reproduced_transform(self):
- bss = binary_search_state.MockBinarySearchState(
- get_initial_items='./gen_init_list.py',
- switch_to_good='./switch_to_good.py',
- switch_to_bad='./switch_to_bad.py',
- test_script='./is_good.py',
- pass_bisect='./generate_cmd.py',
- prune=False,
- file_args=True)
- # Fails reproducing at transformation level.
- pass_num = 4
- trans_num = 0
- bss.cmd_script = './cmd_script.py %d %d' % (pass_num, trans_num)
- with self.assertRaises(ValueError):
- bss.DoSearchBadPass()
-
-
class BisectStressTest(unittest.TestCase):
"""Stress tests for bisecting tool."""
@@ -549,13 +442,6 @@ def Main(argv):
suite.addTest(BisectingUtilsTest('test_no_prune'))
suite.addTest(BisectingUtilsTest('test_set_file'))
suite.addTest(BisectingUtilsTest('test_noincremental_prune'))
- suite.addTest(BisectingUtilsPassTest('test_with_prune'))
- suite.addTest(BisectingUtilsPassTest('test_gen_cmd_script'))
- suite.addTest(BisectingUtilsPassTest('test_no_pass_support'))
- suite.addTest(BisectingUtilsPassTest('test_no_transform_support'))
- suite.addTest(BisectingUtilsPassTest('test_pass_transform_bisect'))
- suite.addTest(BisectingUtilsPassTest('test_result_not_reproduced_pass'))
- suite.addTest(BisectingUtilsPassTest('test_result_not_reproduced_transform'))
suite.addTest(BisectTest('test_full_bisector'))
suite.addTest(BisectStressTest('test_every_obj_bad'))
suite.addTest(BisectStressTest('test_every_index_is_bad'))
diff --git a/binary_search_tool/test/cmd_script.py b/binary_search_tool/test/cmd_script.py
deleted file mode 100755
index eb91fe9b..00000000
--- a/binary_search_tool/test/cmd_script.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python2
-"""Command script without compiler support for pass level bisection.
-
-This script generates a pseudo log which a workable compiler should print out.
-It assumes that -opt-bisect-limit and -print-debug-counter are supported by the
-compiler.
-"""
-
-from __future__ import print_function
-
-import os
-import sys
-
-import common
-
-
-def Main(argv):
- if not os.path.exists('./is_setup'):
- return 1
-
- if len(argv) != 3:
- return 1
-
- limit_flags = os.environ['LIMIT_FLAGS']
- opt_bisect_exist = False
- debug_counter_exist = False
-
- for option in limit_flags.split():
- if '-opt-bisect-limit' in option:
- opt_bisect_limit = int(option.split('=')[-1])
- opt_bisect_exist = True
- if '-debug-counter=' in option:
- debug_counter = int(option.split('=')[-1])
- debug_counter_exist = True
-
- if not opt_bisect_exist:
- return 1
-
- # Manually set total number and bad number
- total_pass = 10
- total_transform = 20
- bad_pass = int(argv[1])
- bad_transform = int(argv[2])
-
- if opt_bisect_limit == -1:
- opt_bisect_limit = total_pass
-
- for i in range(1, total_pass + 1):
- bisect_str = 'BISECT: %srunning pass (%d) Combine redundant ' \
- 'instructions on function (f1)' \
- % ('NOT ' if i > opt_bisect_limit else '', i)
- print(bisect_str, file=sys.stderr)
-
- if debug_counter_exist:
- print('Counters and values:', file=sys.stderr)
- print(
- 'instcombine-visit : {%d, 0, %d}' % (total_transform, debug_counter),
- file=sys.stderr)
-
- if opt_bisect_limit > bad_pass or \
- (debug_counter_exist and debug_counter > bad_transform):
- common.WriteWorkingSet([1])
- else:
- common.WriteWorkingSet([0])
-
- return 0
-
-
-if __name__ == '__main__':
- retval = Main(sys.argv)
- sys.exit(retval)
diff --git a/binary_search_tool/test/cmd_script_no_support.py b/binary_search_tool/test/cmd_script_no_support.py
deleted file mode 100644
index a817f300..00000000
--- a/binary_search_tool/test/cmd_script_no_support.py
+++ /dev/null
@@ -1,23 +0,0 @@
-"""Command script without compiler support for pass level bisection.
-
-This script generates a pseudo log when certain bisecting flags are not
-supported by compiler.
-"""
-
-from __future__ import print_function
-
-import os
-import sys
-
-
-def Main():
- if not os.path.exists('./is_setup'):
- return 1
- print('No support for -opt-bisect-limit or -print-debug-counter.',
- file=sys.stderr)
- return 0
-
-
-if __name__ == '__main__':
- retval = Main()
- sys.exit(retval)
diff --git a/binary_search_tool/test/gen_obj.py b/binary_search_tool/test/gen_obj.py
index a2bc7d93..d17e93f5 100755
--- a/binary_search_tool/test/gen_obj.py
+++ b/binary_search_tool/test/gen_obj.py
@@ -85,7 +85,7 @@ def Main(argv):
f.close()
obj_num = len(obj_list)
- bad_obj_num = obj_list.count(1)
+ bad_obj_num = obj_list.count('1')
print('Generated {0} object files, with {1} bad ones.'.format(
obj_num, bad_obj_num))
diff --git a/binary_search_tool/test/generate_cmd.py b/binary_search_tool/test/generate_cmd.py
deleted file mode 100755
index f6876eda..00000000
--- a/binary_search_tool/test/generate_cmd.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python2
-"""Generate a virtual cmd script for pass level bisection.
-
-This is a required argument for pass level bisecting. For unit test, we use
-this script to verify if cmd_script.sh is generated correctly.
-"""
-
-from __future__ import print_function
-
-import os
-import sys
-
-
-def Main():
- if not os.path.exists('./is_setup'):
- return 1
- file_name = 'cmd_script.sh'
- with open(file_name, 'w') as f:
- f.write('Generated by generate_cmd.py')
- return 0
-
-
-if __name__ == '__main__':
- retval = Main()
- sys.exit(retval)
diff --git a/build_tc.py b/build_tc.py
index c14b6905..4f022d29 100755
--- a/build_tc.py
+++ b/build_tc.py
@@ -59,7 +59,8 @@ class ToolchainPart(object):
cross_symlink = os.path.join(self._chromeos_root, 'chroot',
'usr/local/bin/emerge-%s' % self._board)
if not os.path.exists(cross_symlink):
- command = 'setup_board --board=%s' % self._board
+ command = ('%s/setup_board --board=%s' % (misc.CHROMEOS_SCRIPTS_DIR,
+ self._board))
self._ce.ChrootRunCommand(self._chromeos_root, command)
def Build(self):
diff --git a/build_tool.py b/build_tool.py
index 0c2fff65..3bd357c0 100755
--- a/build_tool.py
+++ b/build_tool.py
@@ -564,8 +564,9 @@ class Bootstrapper(object):
True if operation succeeds.
"""
- cmd = 'cd {0} && cros_sdk -- -- ./build_sdk_board'.format(
- self._chromeos_root)
+ cmd = ('cd {0} && cros_sdk -- -- ./setup_board --board=amd64-host '
+ '--accept_licenses=@CHROMEOS --skip_chroot_upgrade --nousepkg '
+ '--reuse_pkgs_from_local_boards').format(self._chromeos_root)
rv = self._ce.RunCommand(cmd, print_to_console=True)
if rv:
self._logger.LogError('Build amd64-host failed.')
diff --git a/buildbot_test_llvm.py b/buildbot_test_llvm.py
index 57ed3067..111068c6 100755
--- a/buildbot_test_llvm.py
+++ b/buildbot_test_llvm.py
@@ -1,10 +1,8 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
# Copyright 2017 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Script for running llvm validation tests on ChromeOS.
This script launches a buildbot to build ChromeOS with the llvm on
@@ -39,49 +37,55 @@ TEST_PER_DAY = 4
DATA_DIR = '/google/data/rw/users/mo/mobiletc-prebuild/waterfall-report-data/'
# Information about Rotating Boards
-# Board Arch Reference Platform Kernel
-# Board Version
-# ------------ ------- ------------ ------------- -------
-# atlas x86_64 poppy kabylake-y 4.4.*
-# cave x86_64 glados skylake-y 3.18
-# coral x86_64 reef apollo lake 4.4.*
-# cyan x86_64 strago braswell 3.18
-# elm aarch64 oak mediatek-8173 3.18 arm32 userspace
-# eve x86_64 poppy kabylake-u/r 4.4.*
-# gale armv7 3.18
-# grunt x86_64 grunt stoney ridge 4.14.*
-# fizz-moblab x86_64 4.4
-# kevin aarch64 gru rockchip-3399 4.4.* arm32 userspace
-# kevin64 aarch64 gru rockchip-3399 4.4.* arm64 userspace
-# lakitu x86_64 4.4.*
-# nyan_kitty armv7 nyan tegra 3.10.18
-# octopus x86_64 octopus GLK 4.14.*
-# sentry x86_64 kunimitsu skylake-u 3.18
-# tidus x86_64 auron broadwell 3.14
-# veyron_mighty armv7 veyron-pinky rockchip-3288 3.14
-# whirlwind armv7 3.14
-# winky x86_64 rambi baytrail 4.4.*
+# Board Arch Reference Platform Kernel
+# Board Version
+# ------------ ------- ------------ ------------- -------
+# cave x86_64 glados skylake-y 3.18
+# daisy armv7 daisy exynos-5250 3.8.11
+# elm aarch64 oak mediatek-8173 3.18
+# fizz x86_64 fizz kabylake-u/r 4.4.*
+# gale armv7 3.18
+# grunt x86_64 grunt stoney ridge 4.14.*
+# guado_moblab x86_64 3.14
+# kevin aarch64 gru rockchip-3399 4.4.*
+# lakitu x86_64 4.4.*
+# lars x86_64 kunimitsu skylake-u 3.18
+# link x86_64 ivybridge ivybridge 3.8.11
+# nautilus x86_64 poppy kabylake-y 4.4.*
+# nyan_big armv7 nyan tegra 3.10.18
+# peach_pit armv7 peach exynos-5420 3.8.11
+# peppy x86_64 slippy haswell 3.8.11
+# samus x86_64 auron broadwell 3.14
+# snappy x86_64 reef apollo lake 4.4.*
+# swanky x86_64 rambi baytrail 4.4.*
+# terra x86_64 strago braswell 3.18
+# veyron_jaq armv7 veyron-pinky rockchip-3288 3.14
+# whirlwind armv7 3.14
+# zoombini x86_64 zoombini cannonlake-y 4.14.*
TEST_BOARD = [
- 'atlas',
'cave',
- 'coral',
- 'cyan',
- 'elm',
- # 'eve', tested by amd64-llvm-next-toolchain builder.
+ 'daisy',
+ # 'elm', tested by arm64-llvm-next-toolchain builder.
+ 'fizz',
'gale',
'grunt',
- 'fizz-moblab',
- # 'kevin', tested by arm64-llvm-next-toolchain builder.
- 'kevin64',
+ 'guado_moblab',
+ 'kevin',
'lakitu',
- 'nyan_kitty',
- 'octopus',
- 'sentry',
- 'tidus',
- # 'veyron_mighty', tested by arm-llvm-next-toolchain builder.
+ 'lars',
+ 'link',
+ 'nautilus',
+ 'nyan_big',
+ 'peach_pit',
+ 'peppy',
+ # 'samus', tested by amd64-llvm-next-toolchain builder.
+ 'snappy',
+ 'swanky',
+ 'terra',
+ # 'veyron_jaq', tested by arm-llvm-next-toolchain builder.
'whirlwind',
- 'winky',
+ 'zoombini',
]
diff --git a/buildbot_test_toolchains.py b/buildbot_test_toolchains.py
index e289b1a5..bf3a8dea 100755
--- a/buildbot_test_toolchains.py
+++ b/buildbot_test_toolchains.py
@@ -1,10 +1,8 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
# Copyright 2016 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Script for running nightly compiler tests on ChromeOS.
This script launches a buildbot to build ChromeOS with the latest compiler on
@@ -94,7 +92,7 @@ class ToolchainComparator(object):
Args:
trybot_image: artifact name such as
- 'daisy-release-tryjob/R40-6394.0.0-b1389'
+ 'daisy-release-tryjob/R40-6394.0.0-b1389'
Returns:
Latest official image name, e.g. 'daisy-release/R57-9089.0.0'.
@@ -119,7 +117,7 @@ class ToolchainComparator(object):
Args:
trybot_image: artifact name such as
- 'daisy-release-tryjob/R40-6394.0.0-b1389'
+ 'daisy-release-tryjob/R40-6394.0.0-b1389'
Returns:
Corresponding chrome PFQ image name, e.g.
@@ -130,7 +128,7 @@ class ToolchainComparator(object):
assert mo
image_dict = mo.groupdict()
image_dict['image_type'] = 'chrome-pfq'
- for _ in range(2):
+ for _ in xrange(2):
image_dict['tip'] = str(int(image_dict['tip']) - 1)
nonafdo_image = PFQ_IMAGE_FS.replace('\\', '').format(**image_dict)
if buildbot_utils.DoesImageExist(self._chromeos_root, nonafdo_image):
@@ -143,7 +141,7 @@ class ToolchainComparator(object):
Given the names of the trybot, vanilla and non-AFDO images, create the
appropriate crosperf experiment file and launch crosperf on it.
"""
- experiment_file_dir = os.path.join(CROSTC_ROOT, self._weekday)
+ experiment_file_dir = os.path.join(self._chromeos_root, '..', self._weekday)
experiment_file_name = '%s_toolchain_experiment.txt' % self._board
compiler_string = 'llvm'
@@ -156,39 +154,20 @@ class ToolchainComparator(object):
board: %s
remote: %s
retries: 1
- cooldown_temp: 40
- cooldown_time: 10
- cpu_freq_pct: 95
- top_interval: 1
""" % (self._board, self._remotes)
experiment_tests = """
benchmark: all_toolchain_perf {
suite: telemetry_Crosperf
- iterations: 5
+ iterations: 0
run_local: False
}
- benchmark: loading.desktop {
+ benchmark: page_cycler_v2.typical_25 {
suite: telemetry_Crosperf
- test_args: --story-tag-filter=typical
- iterations: 3
+ iterations: 0
run_local: False
retries: 0
}
-
- benchmark: rendering.desktop {
- run_local: False
- suite: telemetry_Crosperf
- test_args: --story-filter=aquarium$
- iterations: 5
- }
-
- benchmark: rendering.desktop {
- run_local: False
- suite: telemetry_Crosperf
- test_args: --story-filter=aquarium_20k$
- iterations: 3
- }
"""
with open(experiment_file, 'w') as f:
@@ -233,7 +212,7 @@ class ToolchainComparator(object):
crosperf = os.path.join(TOOLCHAIN_DIR, 'crosperf', 'crosperf')
noschedv2_opts = '--noschedv2' if self._noschedv2 else ''
- command = ('{crosperf} --no_email=True --results_dir={r_dir} --no_hwp '
+ command = ('{crosperf} --no_email=True --results_dir={r_dir} '
'--json_report=True {noschedv2_opts} {exp_file}').format(
crosperf=crosperf,
r_dir=self._reports_dir,
diff --git a/chromiumos_image_diff.py b/chromiumos_image_diff.py
index 74906d32..82e4e17d 100755
--- a/chromiumos_image_diff.py
+++ b/chromiumos_image_diff.py
@@ -1,10 +1,4 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
"""Diff 2 chromiumos images by comparing each elf file.
The script diffs every *ELF* files by dissembling every *executable*
@@ -72,8 +66,8 @@ class CrosImage(object):
self.image, self.rootfs, self.stateful))
## First of all creating an unmount image
self.CreateUnmountScript()
- command = image_chromeos.GetImageMountCommand(self.image, self.rootfs,
- self.stateful)
+ command = image_chromeos.GetImageMountCommand(
+ self.chromeos_root, self.image, self.rootfs, self.stateful)
rv = self._ce.RunCommand(command, print_to_console=True)
self.mounted = (rv == 0)
if not self.mounted:
@@ -91,8 +85,8 @@ class CrosImage(object):
f.close()
self._ce.RunCommand(
'chmod +x {}'.format(self.unmount_script), print_to_console=False)
- self.logger.LogOutput('Created an unmount script - "{0}"'.format(
- self.unmount_script))
+ self.logger.LogOutput(
+ 'Created an unmount script - "{0}"'.format(self.unmount_script))
def UnmountImage(self):
"""Unmount the image and delete mount point."""
@@ -121,8 +115,8 @@ class CrosImage(object):
Always true
"""
- self.logger.LogOutput('Finding all elf files in "{0}" ...'.format(
- self.rootfs))
+ self.logger.LogOutput(
+ 'Finding all elf files in "{0}" ...'.format(self.rootfs))
# Note '\;' must be prefixed by 'r'.
command = ('find "{0}" -type f -exec '
'bash -c \'file -b "{{}}" | grep -q "ELF"\''
@@ -131,8 +125,8 @@ class CrosImage(object):
self.logger.LogCmd(command)
_, out, _ = self._ce.RunCommandWOutput(command, print_to_console=False)
self.elf_files = out.splitlines()
- self.logger.LogOutput('Total {0} elf files found.'.format(
- len(self.elf_files)))
+ self.logger.LogOutput(
+ 'Total {0} elf files found.'.format(len(self.elf_files)))
return True
@@ -148,10 +142,10 @@ class ImageComparator(object):
def Cleanup(self):
if self.tempf1 and self.tempf2:
- command_executer.GetCommandExecuter().RunCommand('rm {0} {1}'.format(
- self.tempf1, self.tempf2))
- logger.GetLogger('Removed "{0}" and "{1}".'.format(
- self.tempf1, self.tempf2))
+ command_executer.GetCommandExecuter().RunCommand(
+ 'rm {0} {1}'.format(self.tempf1, self.tempf2))
+ logger.GetLogger(
+ 'Removed "{0}" and "{1}".'.format(self.tempf1, self.tempf2))
def CheckElfFileSetEquality(self):
"""Checking whether images have exactly number of elf files."""
@@ -191,8 +185,8 @@ class ImageComparator(object):
match_count = 0
i1 = self.images[0]
i2 = self.images[1]
- self.logger.LogOutput('Start comparing {0} elf file by file ...'.format(
- len(i1.elf_files)))
+ self.logger.LogOutput(
+ 'Start comparing {0} elf file by file ...'.format(len(i1.elf_files)))
## Note - i1.elf_files and i2.elf_files have exactly the same entries here.
## Create 2 temp files to be used for all disassembed files.
@@ -228,8 +222,8 @@ class ImageComparator(object):
tempf2=self.tempf2)
ret = cmde.RunCommand(command, print_to_console=False)
if ret != 0:
- self.logger.LogOutput('*** Not match - "{0}" "{1}"'.format(
- full_path1, full_path2))
+ self.logger.LogOutput(
+ '*** Not match - "{0}" "{1}"'.format(full_path1, full_path2))
mismatch_list.append(f1)
if self.diff_file:
command = ('echo "Diffs of disassemble of \"{f1}\" and \"{f2}\"" '
diff --git a/compiler_wrapper/README.md b/compiler_wrapper/README.md
deleted file mode 100644
index 794e1635..00000000
--- a/compiler_wrapper/README.md
+++ /dev/null
@@ -1,36 +0,0 @@
-# Compiler wrapper
-
-See the comments on the top of main.go.
-Build is split into 2 steps via separate commands:
-- bundle: copies the sources and the `build.py` file into
- a folder.
-- build: builds the actual go binary, assuming it is executed
- from the folder created by `bundle.py`.
-
-This allows to copy the sources to a Chrome OS / Android
-package, including the build script, and then
-build from there without a dependency on toolchain-utils
-itself.
-
-## Update Chrome OS
-
-Copy over sources and `build.py` to Chrome OS:
-```
-(chroot) /mnt/host/source/src/third_party/chromiumos-overlay/sys-devel/llvm/files/update_compiler_wrapper.sh
-```
-
-`build.py` is called by these ebuilds:
-
-- third_party/chromiumos-overlay/sys-devel/llvm/llvm-9.0_pre361749_p20190714.ebuild
-- third_party/chromiumos-overlay/sys-devel/gcc/gcc-*.ebuild
-
-Generated wrappers are stored here:
-
-- Sysroot wrapper with ccache:
- `/usr/x86_64-pc-linux-gnu/<arch>/gcc-bin/4.9.x/sysroot_wrapper.hardened.ccache`
-- Sysroot wrapper without ccache:
- `/usr/x86_64-pc-linux-gnu/<arch>/gcc-bin/4.9.x/sysroot_wrapper.hardened.noccache`
-- Clang host wrapper:
- `/usr/bin/clang_host_wrapper`
-- Gcc host wrapper:
- `/usr/x86_64-pc-linux-gnu/gcc-bin/4.9.x/host_wrapper`
diff --git a/compiler_wrapper/android_config_test.go b/compiler_wrapper/android_config_test.go
deleted file mode 100644
index 104be6df..00000000
--- a/compiler_wrapper/android_config_test.go
+++ /dev/null
@@ -1,133 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "path"
- "path/filepath"
- "testing"
-)
-
-const oldAndroidPathForTest = "$ANDROID_PREBUILTS/clang/host/linux-x86/clang-r353983c/bin/clang"
-const androidGoldenDir = "testdata/android_golden"
-
-func TestAndroidConfig(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- useLlvmNext := false
- useCCache := false
- cfg, err := getConfig("android", useCCache, useLlvmNext, oldAndroidPathForTest, "123")
- if err != nil {
- t.Fatal(err)
- }
- ctx.updateConfig(cfg)
-
- runGoldenRecords(ctx, androidGoldenDir, []goldenFile{
- createAndroidClangPathGoldenInputs(ctx),
- createBisectGoldenInputs(filepath.Join(ctx.tempDir, "clang")),
- createAndroidCompileWithFallbackGoldenInputs(ctx),
- })
- })
-}
-
-func createAndroidClangPathGoldenInputs(ctx *testContext) goldenFile {
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- ctx.writeFile(gomaPath, "")
- defaultPath := filepath.Join(ctx.tempDir, "clang")
- clangTidyPath := filepath.Join(ctx.tempDir, "clang-tidy")
-
- deepPath := "a/b/c/d/e/f/g/clang"
- linkedDeepPath := "symlinked/clang_other"
- ctx.writeFile(filepath.Join(ctx.tempDir, "/pathenv/clang"), "")
- ctx.symlink(deepPath, linkedDeepPath)
- return goldenFile{
- Name: "clang_path.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(defaultPath, mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(defaultPath, mainCc),
- Cmds: errorResults,
- },
- {
- Env: []string{"WITH_TIDY=1"},
- WrapperCmd: newGoldenCmd(defaultPath, mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(filepath.Join(ctx.tempDir, "clang++"), mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangTidyPath, mainCc),
- Cmds: okResults,
- },
- {
- Env: []string{"WITH_TIDY=1"},
- WrapperCmd: newGoldenCmd(clangTidyPath, mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(deepPath, mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(linkedDeepPath, mainCc),
- Cmds: okResults,
- },
- {
- Env: []string{"PATH=" + filepath.Join(ctx.tempDir, "/pathenv")},
- WrapperCmd: newGoldenCmd("clang", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(defaultPath, mainCc, "--gomacc-path", gomaPath),
- Cmds: okResults,
- },
- },
- }
-}
-
-func createAndroidCompileWithFallbackGoldenInputs(ctx *testContext) goldenFile {
- env := []string{
- "ANDROID_LLVM_PREBUILT_COMPILER_PATH=fallback_compiler",
- "ANDROID_LLVM_STDERR_REDIRECT=" + filepath.Join(ctx.tempDir, "fallback_stderr"),
- "ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS=-a -b",
- }
- defaultPath := filepath.Join(ctx.tempDir, "clang")
- return goldenFile{
- Name: "compile_with_fallback.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(defaultPath, mainCc),
- Env: env,
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(defaultPath, mainCc),
- Env: env,
- Cmds: []commandResult{
- {
- ExitCode: 1,
- },
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(defaultPath, mainCc),
- Env: env,
- Cmds: []commandResult{
- {
- ExitCode: 1,
- },
- {
- ExitCode: 1,
- },
- },
- },
- },
- }
-}
diff --git a/compiler_wrapper/bisect_flag.go b/compiler_wrapper/bisect_flag.go
deleted file mode 100644
index 6271e23f..00000000
--- a/compiler_wrapper/bisect_flag.go
+++ /dev/null
@@ -1,66 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "errors"
- "path/filepath"
-)
-
-// Note: We keep this code in python as golang has no builtin
-// shlex function.
-const bisectPythonCommand = `
-import bisect_driver
-import shlex
-import sys
-
-def ExpandArgs(args, target):
- for arg in args:
- if arg[0] == '@':
- with open(arg[1:], 'rb') as f:
- ExpandArgs(shlex.split(f.read()), target)
- else:
- target.append(arg)
- return target
-
-stage = sys.argv[1]
-dir = sys.argv[2]
-execargs = ExpandArgs(sys.argv[3:], [])
-
-sys.exit(bisect_driver.bisect_driver(stage, dir, execargs))
-`
-
-func getBisectStage(env env) string {
- value, _ := env.getenv("BISECT_STAGE")
- return value
-}
-
-func calcBisectCommand(env env, cfg *config, bisectStage string, compilerCmd *command) (*command, error) {
- bisectDir, _ := env.getenv("BISECT_DIR")
- if bisectDir == "" {
- if cfg.isAndroidWrapper {
- homeDir, ok := env.getenv("HOME")
- if !ok {
- return nil, errors.New("$HOME is not set")
- }
- bisectDir = filepath.Join(homeDir, "ANDROID_BISECT")
- } else {
- bisectDir = "/tmp/sysroot_bisect"
- }
- }
- absCompilerPath := getAbsCmdPath(env, compilerCmd)
- return &command{
- Path: "/usr/bin/env",
- Args: append([]string{
- "python",
- "-c",
- bisectPythonCommand,
- bisectStage,
- bisectDir,
- absCompilerPath,
- }, compilerCmd.Args...),
- EnvUpdates: compilerCmd.EnvUpdates,
- }, nil
-}
diff --git a/compiler_wrapper/bisect_flag_test.go b/compiler_wrapper/bisect_flag_test.go
deleted file mode 100644
index 0bb6a820..00000000
--- a/compiler_wrapper/bisect_flag_test.go
+++ /dev/null
@@ -1,182 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "errors"
- "fmt"
- "io"
- "path/filepath"
- "strings"
- "testing"
-)
-
-func TestCallBisectDriver(t *testing.T) {
- withBisectTestContext(t, func(ctx *testContext) {
- ctx.env = []string{
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- }
- cmd := mustCallBisectDriver(ctx, callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, "bisect_driver"); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd,
- "someBisectStage", "someBisectDir", filepath.Join(ctx.tempDir, gccX86_64+".real"), "--sysroot=.*", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestCallBisectDriverWithParamsFile(t *testing.T) {
- withBisectTestContext(t, func(ctx *testContext) {
- ctx.env = []string{
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- }
- paramsFile1 := filepath.Join(ctx.tempDir, "params1")
- ctx.writeFile(paramsFile1, "a\n#comment\n@params2")
- paramsFile2 := filepath.Join(ctx.tempDir, "params2")
- ctx.writeFile(paramsFile2, "b\n"+mainCc)
-
- cmd := mustCallBisectDriver(ctx, callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, "@"+paramsFile1)))
- if err := verifyArgOrder(cmd,
- "a", "b", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestCallBisectDriverWithCCache(t *testing.T) {
- withBisectTestContext(t, func(ctx *testContext) {
- ctx.cfg.useCCache = true
- cmd := ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, "/usr/bin/env"); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, "python", "/usr/bin/ccache"); err != nil {
- t.Error(err)
- }
- if err := verifyEnvUpdate(cmd, "CCACHE_DIR=.*"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestDefaultBisectDirCros(t *testing.T) {
- withBisectTestContext(t, func(ctx *testContext) {
- ctx.env = []string{
- "BISECT_STAGE=someBisectStage",
- }
- cmd := mustCallBisectDriver(ctx, callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgOrder(cmd,
- "someBisectStage", "/tmp/sysroot_bisect"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestDefaultBisectDirAndroid(t *testing.T) {
- withBisectTestContext(t, func(ctx *testContext) {
- ctx.env = []string{
- "BISECT_STAGE=someBisectStage",
- "HOME=/somehome",
- }
- ctx.cfg.isAndroidWrapper = true
- cmd := mustCallBisectDriver(ctx, callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc)))
- if err := verifyArgOrder(cmd,
- "someBisectStage", filepath.Join("/somehome", "ANDROID_BISECT")); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestForwardStdOutAndStdErrAndExitCodeFromBisect(t *testing.T) {
- withBisectTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- fmt.Fprint(stdout, "somemessage")
- fmt.Fprint(stderr, "someerror")
- return newExitCodeError(23)
- }
- exitCode := callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc))
- if exitCode != 23 {
- t.Errorf("unexpected exit code. Got: %d", exitCode)
- }
- if ctx.stdoutString() != "somemessage" {
- t.Errorf("stdout was not forwarded. Got: %s", ctx.stdoutString())
- }
- if ctx.stderrString() != "someerror" {
- t.Errorf("stderr was not forwarded. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestForwardGeneralErrorFromBisect(t *testing.T) {
- withBisectTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- return errors.New("someerror")
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Fatal(err)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("unexpected error. Got: %s", stderr)
- }
- })
-}
-
-func withBisectTestContext(t *testing.T, work func(ctx *testContext)) {
- withTestContext(t, func(ctx *testContext) {
- // Disable comparing to the old wrapper as that calls the bisect_driver
- // directly from python, and the new wrapper calls it via a separate
- // sub command.
- ctx.cfg.oldWrapperPath = ""
- ctx.env = []string{"BISECT_STAGE=xyz"}
- // We execute the python script but replace the call to the bisect_driver with
- // a mock that logs the data in the same way as the oldwrapper. This way
- // we can reuse the parseOldWrapperCommands to get the values.
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if err := verifyPath(cmd, "/usr/bin/env"); err != nil {
- return err
- }
- if cmd.Args[0] != "python" {
- return fmt.Errorf("expected a call to python. Got: %s", cmd.Args[0])
- }
- if cmd.Args[1] != "-c" {
- return fmt.Errorf("expected an inline python script. Got: %s", cmd.Args)
- }
- script := cmd.Args[2]
- mock := `
-class BisectDriver:
- def __init__(self):
- self.VALID_MODES = ['POPULATE_GOOD', 'POPULATE_BAD', 'TRIAGE']
- def bisect_driver(self, bisect_stage, bisect_dir, execargs):
- print('command bisect_driver')
- print('arg %s' % bisect_stage)
- print('arg %s' % bisect_dir)
- for arg in execargs:
- print('arg %s' % arg)
-
-bisect_driver = BisectDriver()
-`
- script = mock + script
- script = strings.Replace(script, "import bisect_driver", "", -1)
- cmdCopy := *cmd
- cmdCopy.Args = append(append(cmd.Args[:2], script), cmd.Args[3:]...)
- // Evaluate the python script, but replace the call to the bisect_driver
- // with a log statement so that we can assert it.
- return runCmd(ctx, &cmdCopy, nil, stdout, stderr)
- }
- work(ctx)
- })
-}
-
-func mustCallBisectDriver(ctx *testContext, exitCode int) *command {
- ctx.must(exitCode)
- cmds, _ := parseOldWrapperCommands(ctx.stdoutString())
- return cmds[0]
-}
diff --git a/compiler_wrapper/build.py b/compiler_wrapper/build.py
deleted file mode 100755
index 763b3e64..00000000
--- a/compiler_wrapper/build.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Build script that builds a binary from a bundle."""
-
-from __future__ import print_function
-
-import argparse
-import os.path
-import re
-import subprocess
-import sys
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '--config',
- required=True,
- choices=['cros.hardened', 'cros.nonhardened', 'cros.host', 'android'])
- parser.add_argument('--use_ccache', required=True, choices=['true', 'false'])
- parser.add_argument(
- '--use_llvm_next', required=True, choices=['true', 'false'])
- parser.add_argument('--output_file', required=True, type=str)
- return parser.parse_args()
-
-
-def calc_go_args(args, version):
- ldFlags = [
- '-X',
- 'main.ConfigName=' + args.config,
- '-X',
- 'main.UseCCache=' + args.use_ccache,
- '-X',
- 'main.UseLlvmNext=' + args.use_llvm_next,
- '-X',
- 'main.Version=' + version,
- ]
- return [
- 'go', 'build', '-o',
- os.path.abspath(args.output_file), '-ldflags', ' '.join(ldFlags)
- ]
-
-
-def read_version(build_dir):
- version_path = os.path.join(build_dir, 'VERSION')
- if os.path.exists(version_path):
- with open(version_path, 'r') as r:
- return r.read()
-
- last_commit_msg = subprocess.check_output(
- ['git', '-C', build_dir, 'log', '-1', '--pretty=%B'])
- # Use last found change id to support reverts as well.
- change_ids = re.findall(r'Change-Id: (\w+)', last_commit_msg)
- if not change_ids:
- sys.exit("Couldn't find Change-Id in last commit message.")
- return change_ids[-1]
-
-
-def main():
- args = parse_args()
- build_dir = os.path.dirname(__file__)
- version = read_version(build_dir)
- # Note: Go does not support using absolute package names.
- # So we run go inside the directory of the the build file.
- sys.exit(subprocess.call(calc_go_args(args, version), cwd=build_dir))
-
-
-if __name__ == '__main__':
- main()
diff --git a/compiler_wrapper/bundle.README b/compiler_wrapper/bundle.README
deleted file mode 100644
index 10a28ee0..00000000
--- a/compiler_wrapper/bundle.README
+++ /dev/null
@@ -1,18 +0,0 @@
-Copyright 2019 The Chromium OS Authors. All rights reserved.
-Use of this source code is governed by a BSD-style license that can be
-found in the LICENSE file.
-
-Toolchain utils compiler wrapper sources.
-
-Build the wrapper:
-./build --config=<config name> --use_ccache=<bool> \
- --use_llvm_next=<bool> --output_file=<file>
-
-ATTENTION:
-The files in this folder are generated. Do not modify manually!
-
-To update:
-- modify third_party/toolchain_utils/compiler_wrapper
-- run third_party/toolchain_utils/compiler_wrapper/bundle.py --output_dir=...
-
-Source: https://chromium-review.googlesource.com/q/{change_id}
diff --git a/compiler_wrapper/bundle.py b/compiler_wrapper/bundle.py
deleted file mode 100755
index c1fa53e0..00000000
--- a/compiler_wrapper/bundle.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Build script that copies the go sources to a build destination."""
-
-from __future__ import print_function
-
-import argparse
-import os.path
-import re
-import shutil
-import subprocess
-import sys
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('output_dir')
- return parser.parse_args()
-
-
-def copy_files(input_dir, output_dir):
- for filename in os.listdir(input_dir):
- if ((filename.endswith('.go') and not filename.endswith('_test.go')) or
- filename == 'build.py'):
- shutil.copy(
- os.path.join(input_dir, filename), os.path.join(output_dir, filename))
-
-
-def read_change_id(input_dir):
- last_commit_msg = subprocess.check_output(
- ['git', '-C', input_dir, 'log', '-1', '--pretty=%B'])
- # Use last found change id to support reverts as well.
- change_ids = re.findall(r'Change-Id: (\w+)', last_commit_msg)
- if not change_ids:
- sys.exit("Couldn't find Change-Id in last commit message.")
- return change_ids[-1]
-
-
-def write_readme(input_dir, output_dir, change_id):
- with open(os.path.join(input_dir, 'bundle.README'), 'r') as r, \
- open(os.path.join(output_dir, 'README'), 'w') as w:
- content = r.read()
- w.write(content.format(change_id=change_id))
-
-
-def write_version(output_dir, change_id):
- with open(os.path.join(output_dir, 'VERSION'), 'w') as w:
- w.write(change_id)
-
-
-def main():
- args = parse_args()
- input_dir = os.path.dirname(__file__)
- change_id = read_change_id(input_dir)
- shutil.rmtree(args.output_dir, ignore_errors=True)
- os.makedirs(args.output_dir)
- copy_files(input_dir, args.output_dir)
- write_readme(input_dir, args.output_dir, change_id)
- write_version(args.output_dir, change_id)
-
-
-if __name__ == '__main__':
- main()
diff --git a/compiler_wrapper/ccache_flag.go b/compiler_wrapper/ccache_flag.go
deleted file mode 100644
index 312d0f02..00000000
--- a/compiler_wrapper/ccache_flag.go
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-func processCCacheFlag(sysroot string, builder *commandBuilder) {
- // We should be able to share the objects across compilers as
- // the pre-processed output will differ. This allows boards
- // that share compiler flags (like x86 boards) to share caches.
- const ccacheDir = "/var/cache/distfiles/ccache"
-
- useCCache := true
- builder.transformArgs(func(arg builderArg) string {
- if arg.value == "-noccache" {
- useCCache = false
- return ""
- }
- return arg.value
- })
-
- if builder.cfg.useCCache && useCCache {
- // We need to get ccache to make relative paths from within the
- // sysroot. This lets us share cached files across boards (if
- // all other things are equal of course like CFLAGS) as well as
- // across versions. A quick test is something like:
- // $ export CFLAGS='-O2 -g -pipe' CXXFLAGS='-O2 -g -pipe'
- // $ BOARD=x86-alex
- // $ cros_workon-$BOARD stop cros-disks
- // $ emerge-$BOARD cros-disks
- // $ cros_workon-$BOARD start cros-disks
- // $ emerge-$BOARD cros-disks
- // $ BOARD=amd64-generic
- // $ cros_workon-$BOARD stop cros-disks
- // $ emerge-$BOARD cros-disks
- // $ cros_workon-$BOARD start cros-disks
- // $ emerge-$BOARD cros-disks
- // All of those will get cache hits (ignoring the first one
- // which will seed the cache) due to this setting.
- builder.updateEnv("CCACHE_BASEDIR=" + sysroot)
- if _, present := builder.env.getenv("CCACHE_DISABLE"); present {
- // Portage likes to set this for us when it has FEATURES=-ccache.
- // The other vars we need to setup manually because of tools like
- // scons that scrubs the env before we get executed.
- builder.updateEnv("CCACHE_DISABLE=")
- }
- // If RESTRICT=sandbox is enabled, then sandbox won't be setup,
- // and the env vars won't be available for appending.
- if sandboxRewrite, present := builder.env.getenv("SANDBOX_WRITE"); present {
- builder.updateEnv("SANDBOX_WRITE=" + sandboxRewrite + ":" + ccacheDir)
- }
-
- // Make sure we keep the cached files group writable.
- builder.updateEnv("CCACHE_DIR="+ccacheDir, "CCACHE_UMASK=002")
-
- // ccache may generate false positive warnings.
- // Workaround bug https://crbug.com/649740
- if builder.target.compilerType == clangType {
- builder.updateEnv("CCACHE_CPP2=yes")
- }
-
- builder.wrapPath("/usr/bin/ccache")
- }
-}
diff --git a/compiler_wrapper/ccache_flag_test.go b/compiler_wrapper/ccache_flag_test.go
deleted file mode 100644
index 61abef06..00000000
--- a/compiler_wrapper/ccache_flag_test.go
+++ /dev/null
@@ -1,173 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestCallCCacheGivenConfig(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, "/usr/bin/ccache"); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, gccX86_64+".real", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestNotCallCCacheGivenConfig(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestNotCallCCacheGivenConfigAndNoCCacheArg(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-noccache", mainCc)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, "-noccache"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestSetCacheDir(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd, "CCACHE_DIR=/var/cache/distfiles/ccache"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestSetCacheBaseDirToSysroot(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd,
- "CCACHE_BASEDIR="+ctx.tempDir+"/usr/x86_64-cros-linux-gnu"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestSetCacheUmask(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd, "CCACHE_UMASK=002"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestUpdateSandboxRewriteWithValue(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNoEnvUpdate(cmd, "SANDBOX_WRITE"); err != nil {
- t.Error(err)
- }
-
- ctx.env = []string{"SANDBOX_WRITE=xyz"}
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd,
- "SANDBOX_WRITE=xyz:/var/cache/distfiles/ccache"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestUpdateSandboxRewriteWithoutValue(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNoEnvUpdate(cmd, "SANDBOX_WRITE"); err != nil {
- t.Error(err)
- }
-
- ctx.env = []string{"SANDBOX_WRITE="}
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd,
- "SANDBOX_WRITE=:/var/cache/distfiles/ccache"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClearCCacheDisableWithValue(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNoEnvUpdate(cmd, "CCACHE_DISABLE"); err != nil {
- t.Error(err)
- }
-
- ctx.env = []string{"CCACHE_DISABLE=true"}
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd, "CCACHE_DISABLE="); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClearCCacheDisableWithoutValue(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNoEnvUpdate(cmd, "CCACHE_DISABLE"); err != nil {
- t.Error(err)
- }
-
- ctx.env = []string{"CCACHE_DISABLE="}
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd, "CCACHE_DISABLE="); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddCCacheCpp2FlagForClang(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd, "CCACHE_CPP2=yes"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitCCacheCpp2FlagForGcc(t *testing.T) {
- withCCacheEnabledTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNoEnvUpdate(cmd, "CCACHE_CPP2"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func withCCacheEnabledTestContext(t *testing.T, work func(ctx *testContext)) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.useCCache = true
- work(ctx)
- })
-}
diff --git a/compiler_wrapper/clang_flags.go b/compiler_wrapper/clang_flags.go
deleted file mode 100644
index 8b76e965..00000000
--- a/compiler_wrapper/clang_flags.go
+++ /dev/null
@@ -1,215 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "os"
- "path/filepath"
- "strings"
-)
-
-func processClangFlags(builder *commandBuilder) error {
- env := builder.env
- clangDir, _ := env.getenv("CLANG")
-
- if clangDir == "" {
- if builder.cfg.isHostWrapper {
- clangDir = filepath.Dir(builder.absWrapperPath)
- } else {
- clangDir = filepath.Join(builder.rootPath, "usr/bin/")
- if !filepath.IsAbs(builder.path) {
- // If sysroot_wrapper is invoked by relative path, call actual compiler in
- // relative form. This is neccesary to remove absolute path from compile
- // outputs.
- var err error
- clangDir, err = filepath.Rel(env.getwd(), clangDir)
- if err != nil {
- return wrapErrorwithSourceLocf(err, "failed to make clangDir %s relative to %s.", clangDir, env.getwd())
- }
- }
- }
- } else {
- clangDir = filepath.Dir(clangDir)
- }
-
- clangBasename := "clang"
- if strings.HasSuffix(builder.target.compiler, "++") {
- clangBasename = "clang++"
- }
-
- // GCC flags to remove from the clang command line.
- // TODO: Once clang supports GCC compatibility mode, remove
- // these checks.
- //
- // Use of -Qunused-arguments allows this set to be small, just those
- // that clang still warns about.
- unsupported := map[string]bool{
- "-mno-movbe": true,
- "-pass-exit-codes": true,
- "-Wclobbered": true,
- "-Wno-psabi": true,
- "-Wlogical-op": true,
- "-Wmissing-parameter-type": true,
- "-Wold-style-declaration": true,
- "-Woverride-init": true,
- "-Wunsafe-loop-optimizations": true,
- }
-
- unsupportedPrefixes := []string{"-Wstrict-aliasing=", "-finline-limit="}
-
- // clang with '-ftrapv' generates 'call __mulodi4', which is only implemented
- // in compiler-rt library. However compiler-rt library only has i386/x86_64
- // backends (see '/usr/lib/clang/3.7.0/lib/linux/libclang_rt.*'). GCC, on the
- // other hand, generate 'call __mulvdi3', which is implemented in libgcc. See
- // bug chromium:503229.
- armUnsupported := map[string]bool{"-ftrapv": true}
- if builder.cfg.isHostWrapper {
- unsupported["-ftrapv"] = true
- }
-
- // Clang may use different options for the same or similar functionality.
- gccToClang := map[string]string{
- "-Wno-error=cpp": "-Wno-#warnings",
- "-Wno-error=maybe-uninitialized": "-Wno-error=uninitialized",
- "-Wno-error=unused-but-set-variable": "-Wno-error=unused-variable",
- "-Wno-unused-but-set-variable": "-Wno-unused-variable",
- "-Wunused-but-set-variable": "-Wunused-variable",
- }
-
- // Note: not using builder.transformArgs as we need to add multiple arguments
- // based on a single input argument, and also be able to return errors.
- newArgs := []builderArg{}
-
- for _, arg := range builder.args {
- // Adds an argument with the given value, preserving the
- // fromUser value of the original argument.
- addNewArg := func(value string) {
- newArgs = append(newArgs, builderArg{
- fromUser: arg.fromUser,
- value: value,
- })
- }
-
- if mapped, ok := gccToClang[arg.value]; ok {
- addNewArg(mapped)
- continue
- }
-
- if unsupported[arg.value] {
- continue
- }
-
- if hasAtLeastOnePrefix(arg.value, unsupportedPrefixes) {
- continue
- }
-
- if builder.target.arch == "armv7a" && builder.target.sys == "linux" {
- if armUnsupported[arg.value] {
- continue
- }
- }
-
- if clangOnly := "-Xclang-only="; strings.HasPrefix(arg.value, clangOnly) {
- addNewArg(arg.value[len(clangOnly):])
- continue
- }
-
- if clangPath := "-Xclang-path="; strings.HasPrefix(arg.value, clangPath) {
- clangPathValue := arg.value[len(clangPath):]
- resourceDir, err := getClangResourceDir(env, filepath.Join(clangDir, clangBasename))
- if err != nil {
- return err
- }
- clangDir = clangPathValue
-
- addNewArg("-resource-dir=" + resourceDir)
- addNewArg("--gcc-toolchain=/usr")
- continue
- }
-
- addNewArg(arg.value)
- }
- builder.args = newArgs
-
- builder.path = filepath.Join(clangDir, clangBasename)
-
- // Specify the target for clang.
- if !builder.cfg.isHostWrapper {
- linkerPath := getLinkerPath(env, builder.target.target+"-ld", builder.rootPath)
- relLinkerPath, err := filepath.Rel(env.getwd(), linkerPath)
- if err != nil {
- return wrapErrorwithSourceLocf(err, "failed to make linker path %s relative to %s",
- linkerPath, env.getwd())
- }
- builder.addPostUserArgs("-B" + relLinkerPath)
- if startswithI86(builder.target.arch) {
- // TODO: -target i686-pc-linux-gnu causes clang to search for
- // libclang_rt.asan-i686.a which doesn't exist because it's packaged
- // as libclang_rt.asan-i386.a. We can't use -target i386-pc-linux-gnu
- // because then it would try to run i386-pc-linux-gnu-ld which doesn't
- // exist. Consider renaming the runtime library to use i686 in its name.
- builder.addPostUserArgs("-m32")
- // clang does not support -mno-movbe. This is the alternate way to do it.
- builder.addPostUserArgs("-Xclang", "-target-feature", "-Xclang", "-movbe")
- } else {
- builder.addPostUserArgs("-target", builder.target.target)
- }
- }
- return nil
-}
-
-func getClangResourceDir(env env, clangPath string) (string, error) {
- readResourceCmd := &command{
- Path: clangPath,
- Args: []string{"--print-resource-dir"},
- }
- stdoutBuffer := bytes.Buffer{}
- if err := env.run(readResourceCmd, nil, &stdoutBuffer, env.stderr()); err != nil {
- return "", wrapErrorwithSourceLocf(err,
- "failed to call clang to read the resouce-dir: %#v",
- readResourceCmd)
- }
- return stdoutBuffer.String(), nil
-}
-
-// Return the a directory which contains an 'ld' that gcc is using.
-func getLinkerPath(env env, linkerCmd string, rootPath string) string {
- // We did not pass the tuple i686-pc-linux-gnu to x86-32 clang. Instead,
- // we passed '-m32' to clang. As a result, clang does not want to use the
- // i686-pc-linux-gnu-ld, so we need to add this to help clang find the right
- // linker.
- if linkerPath, err := resolveAgainstPathEnv(env, linkerCmd); err == nil {
- // FIXME: We are not using filepath.EvalSymlinks to only unpack
- // one layer of symlinks to match the old wrapper. Investigate
- // why this is important or simplify to filepath.EvalSymlinks.
- if fi, err := os.Lstat(linkerPath); err == nil {
- if fi.Mode()&os.ModeSymlink != 0 {
- if linkPath, err := os.Readlink(linkerPath); err == nil {
- linkerPath = linkPath
- }
- }
- return filepath.Dir(linkerPath)
- }
- }
-
- // When using the sdk outside chroot, we need to provide the cross linker path
- // to the compiler via -B ${linker_path}. This is because for gcc, it can
- // find the right linker via searching its internal paths. Clang does not have
- // such feature, and it falls back to $PATH search only. However, the path of
- // ${SDK_LOCATION}/bin is not necessarily in the ${PATH}. To fix this, we
- // provide the directory that contains the cross linker wrapper to clang.
- // Outside chroot, it is the top bin directory form the sdk tarball.
- return filepath.Join(rootPath, "bin")
-}
-
-func hasAtLeastOnePrefix(s string, prefixes []string) bool {
- for _, prefix := range prefixes {
- if strings.HasPrefix(s, prefix) {
- return true
- }
- }
- return false
-}
diff --git a/compiler_wrapper/clang_flags_test.go b/compiler_wrapper/clang_flags_test.go
deleted file mode 100644
index 0c0f1fdd..00000000
--- a/compiler_wrapper/clang_flags_test.go
+++ /dev/null
@@ -1,310 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "errors"
- "fmt"
- "io"
- "path/filepath"
- "strings"
- "testing"
-)
-
-func TestClangBasename(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- var tests = []struct {
- in string
- out string
- }{
- {"./x86_64-cros-linux-gnu-clang", ".*/clang"},
- {"./x86_64-cros-linux-gnu-clang++", ".*/clang\\+\\+"},
- }
-
- for _, tt := range tests {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(tt.in, mainCc)))
- if err := verifyPath(cmd, tt.out); err != nil {
- t.Error(err)
- }
- }
- })
-}
-
-func TestClangPathGivenClangEnv(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{"CLANG=/a/b/clang"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyPath(cmd, "/a/b/clang"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAbsoluteClangPathBasedOnRootPath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(filepath.Join(ctx.tempDir, clangX86_64), mainCc)))
- if err := verifyPath(cmd, filepath.Join(ctx.tempDir, "somepath/usr/bin/clang")); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestRelativeClangPathBasedOnRootPath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyPath(cmd, "somepath/usr/bin/clang"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestRelativeClangPathWithDirBasedOnRootPath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand("test/x86_64-cros-linux-gnu-clang", mainCc)))
- if err := verifyPath(cmd, "test/somepath/usr/bin/clang"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestPathEnvClangPathBasedOnRootPath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
- ctx.env = []string{"PATH=" + filepath.Join(ctx.tempDir, "/pathenv")}
- ctx.writeFile(filepath.Join(ctx.tempDir, "/pathenv/x86_64-cros-linux-gnu-clang"), "")
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand("x86_64-cros-linux-gnu-clang", mainCc)))
- if err := verifyPath(cmd, filepath.Join(ctx.tempDir, "pathenv/somepath/usr/bin/clang")); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClangPathForClangHostWrapper(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.isHostWrapper = true
- ctx.cfg.rootRelPath = "somepath"
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyPath(cmd, filepath.Join(ctx.tempDir, "clang")); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClangPathForAndroidWrapper(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.isAndroidWrapper = true
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand("somedir/clang", mainCc)))
- if err := verifyPath(cmd, "somedir/clang.real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClangPathForAndroidWrapperWithSymlinks(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.isAndroidWrapper = true
- ctx.writeFile("base/come_clang", "")
- ctx.symlink("base/some_clang", "linked/clang")
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand("linked/clang", mainCc)))
- if err := verifyPath(cmd, "linked/some_clang.real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestUseXclangPathAndCalcResourceDirByNestedClangCall(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount > 1 {
- return nil
- }
- if err := verifyPath(cmd, "somepath/usr/bin/clang"); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, "--print-resource-dir"); err != nil {
- t.Error(err)
- }
- fmt.Fprint(stdout, "someResourcePath")
- return nil
- }
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-Xclang-path=somedir", mainCc)))
- if err := verifyPath(cmd, "somedir/clang"); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, "-resource-dir=someResourcePath",
- "--gcc-toolchain=/usr", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestXclangPathFailIfNestedClangCallFails(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- fmt.Fprint(stderr, "someclangerror")
- return errors.New("someerror")
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-Xclang-path=somedir", mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Fatal(err)
- }
- if !strings.Contains(stderr, "clang") {
- t.Errorf("could not find compiler path on stderr. Got: %s", stderr)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("could not find original error on stderr. Got: %s", stderr)
- }
- if !strings.Contains(stderr, "someclangerror") {
- t.Errorf("stderr was not forwarded. Got: %s", stderr)
- }
- })
-}
-
-func TestConvertGccToClangFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- var tests = []struct {
- in string
- out string
- }{
- {"-Wno-error=unused-but-set-variable", "-Wno-error=unused-variable"},
- {"-Wno-error=maybe-uninitialized", "-Wno-error=uninitialized"},
- {"-Wno-unused-but-set-variable", "-Wno-unused-variable"},
- {"-Wunused-but-set-variable", "-Wunused-variable"},
- {"-Wno-error=cpp", "-Wno-#warnings"},
- {"-Xclang-only=-abc=xyz", "-abc=xyz"},
- }
-
- for _, tt := range tests {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, tt.in, mainCc)))
- if err := verifyArgCount(cmd, 0, tt.in); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, tt.out, mainCc); err != nil {
- t.Error(err)
- }
- }
- })
-}
-
-func TestFilterUnsupportedClangFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- var tests = []struct {
- compiler string
- flag string
- expectedCount int
- }{
- {clangX86_64, "-pass-exit-codes", 0},
- {clangX86_64, "-Wclobbered", 0},
- {clangX86_64, "-Wunsafe-loop-optimizations", 0},
- {clangX86_64, "-Wlogical-op", 0},
- {clangX86_64, "-Wmissing-parameter-type", 0},
- {clangX86_64, "-Woverride-init", 0},
- {clangX86_64, "-Wold-style-declaration", 0},
- {clangX86_64, "-Wno-psabi", 0},
- {clangX86_64, "-mno-movbe", 0},
- {clangX86_64, "-Wstrict-aliasing=xyz", 0},
- {clangX86_64, "-finline-limit=xyz", 0},
- {"./armv7a-cros-linux-gnu-clang", "-ftrapv", 0},
- {"./armv7a-cros-win-gnu-clang", "-ftrapv", 1},
- {"./armv8a-cros-win-gnu-clang", "-ftrapv", 1},
- {clangX86_64, "-ftrapv", 1},
- }
-
- for _, tt := range tests {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(tt.compiler, tt.flag, mainCc)))
- if err := verifyArgCount(cmd, tt.expectedCount, tt.flag); err != nil {
- t.Error(err)
- }
- }
- })
-}
-
-func TestClangArchFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- var tests = []struct {
- compiler string
- flags []string
- }{
- {"./i686_64-cros-linux-gnu-clang", []string{mainCc, "-m32", "-Xclang", "-target-feature", "-Xclang", "-movbe"}},
- {"./x86_64-cros-linux-gnu-clang", []string{mainCc, "-target", "x86_64-cros-linux-gnu"}},
- }
- for _, tt := range tests {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(tt.compiler, mainCc)))
- if err := verifyArgOrder(cmd, tt.flags...); err != nil {
- t.Error(err)
- }
- }
- })
-}
-
-func TestClangLinkerPathProbesBinariesOnPath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- linkerPath := filepath.Join(ctx.tempDir, "a/b/c")
- ctx.writeFile(filepath.Join(linkerPath, "x86_64-cros-linux-gnu-ld"), "")
- ctx.env = []string{"PATH=nonExistantPath:" + linkerPath}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand("./x86_64-cros-linux-gnu-clang", mainCc)))
- if err := verifyArgOrder(cmd, "-Ba/b/c"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClangLinkerPathEvaluatesSymlinksForBinariesOnPath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- realLinkerPath := filepath.Join(ctx.tempDir, "a/original/path/somelinker")
- ctx.writeFile(realLinkerPath, "")
- firstLinkLinkerPath := filepath.Join(ctx.tempDir, "a/first/somelinker")
- ctx.symlink(realLinkerPath, firstLinkLinkerPath)
- secondLinkLinkerPath := filepath.Join(ctx.tempDir, "a/second/x86_64-cros-linux-gnu-ld")
- ctx.symlink(firstLinkLinkerPath, secondLinkLinkerPath)
-
- ctx.env = []string{"PATH=nonExistantPath:" + filepath.Dir(secondLinkLinkerPath)}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand("./x86_64-cros-linux-gnu-clang", mainCc)))
- if err := verifyArgOrder(cmd, "-Ba/first"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClangFallbackLinkerPathRelativeToRootDir(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyArgOrder(cmd, "-Bbin"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClangLinkerPathRelativeToRootDir(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyArgOrder(cmd, "-Bsomepath/bin"); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/compiler_wrapper/clang_syntax_flag.go b/compiler_wrapper/clang_syntax_flag.go
deleted file mode 100644
index 90d2327a..00000000
--- a/compiler_wrapper/clang_syntax_flag.go
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
-)
-
-func processClangSyntaxFlag(builder *commandBuilder) (clangSyntax bool) {
- builder.transformArgs(func(arg builderArg) string {
- if arg.value == "-clang-syntax" {
- clangSyntax = true
- return ""
- }
- return arg.value
- })
- return clangSyntax
-}
-
-func checkClangSyntax(env env, clangCmd *command, gccCmd *command) (exitCode int, err error) {
- clangSyntaxCmd := &command{
- Path: clangCmd.Path,
- Args: append(clangCmd.Args, "-fsyntax-only", "-stdlib=libstdc++"),
- EnvUpdates: clangCmd.EnvUpdates,
- }
-
- stdinBuffer := &bytes.Buffer{}
- exitCode, err = wrapSubprocessErrorWithSourceLoc(clangSyntaxCmd,
- env.run(clangSyntaxCmd, teeStdinIfNeeded(env, clangCmd, stdinBuffer), env.stdout(), env.stderr()))
- if err != nil || exitCode != 0 {
- return exitCode, err
- }
- return wrapSubprocessErrorWithSourceLoc(gccCmd,
- env.run(gccCmd, bytes.NewReader(stdinBuffer.Bytes()), env.stdout(), env.stderr()))
-}
diff --git a/compiler_wrapper/clang_syntax_flag_test.go b/compiler_wrapper/clang_syntax_flag_test.go
deleted file mode 100644
index 8ee9c223..00000000
--- a/compiler_wrapper/clang_syntax_flag_test.go
+++ /dev/null
@@ -1,180 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "errors"
- "fmt"
- "io"
- "path"
- "strings"
- "testing"
-)
-
-func TestCheckClangSyntaxByNestedCall(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 1 {
- if err := verifyPath(cmd, "usr/bin/clang"); err != nil {
- return err
- }
- if err := verifyArgOrder(cmd, mainCc, "-fsyntax-only", `-stdlib=libstdc\+\+`); err != nil {
- return err
- }
- }
- return nil
- }
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-clang-syntax", mainCc)))
- if ctx.cmdCount != 2 {
- t.Errorf("expected 2 calls. Got: %d", ctx.cmdCount)
- }
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, "-clang-syntax"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestForwardStdOutAndStderrFromClangSyntaxCheck(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 1 {
- fmt.Fprint(stdout, "somemessage")
- fmt.Fprint(stderr, "someerror")
- }
- return nil
- }
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-clang-syntax", mainCc)))
- if ctx.stdoutString() != "somemessage" {
- t.Errorf("stdout was not forwarded. Got: %s", ctx.stdoutString())
- }
- if ctx.stderrString() != "someerror" {
- t.Errorf("stderr was not forwarded. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestForwardStdinToClangSyntaxCheck(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- // Note: This is called for the clang syntax call as well as for
- // the gcc call, and we assert that stdin is cloned and forwarded
- // to both.
- stdinStr := ctx.readAllString(stdin)
- if stdinStr != "someinput" {
- return fmt.Errorf("unexpected stdin. Got: %s", stdinStr)
- }
- return nil
- }
- io.WriteString(&ctx.stdinBuffer, "someinput")
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-clang-syntax", "-", mainCc)))
- })
-}
-
-func TestForwardExitCodeFromClangSyntaxCheck(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 1 {
- return newExitCodeError(23)
- }
- return nil
- }
- exitCode := callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-clang-syntax", mainCc))
- if exitCode != 23 {
- t.Errorf("unexpected exit code. Got: %d", exitCode)
- }
- })
-}
-
-func TestReportGeneralErrorsFromClangSyntaxCheck(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 1 {
- return errors.New("someerror")
- }
- return nil
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-clang-syntax", mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Fatal(err)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("unexpected error. Got: %s", stderr)
- }
- })
-}
-
-func TestIgnoreClangSyntaxCheckWhenCallingClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount > 1 {
- return fmt.Errorf("Unexpected call %#v", cmd)
- }
- return nil
- }
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-clang-syntax", mainCc)))
- if err := verifyArgCount(cmd, 0, "-clang-syntax"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestUseGomaForClangSyntaxCheck(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- // Create a file so the gomacc path is valid.
- ctx.writeFile(gomaPath, "")
- ctx.env = []string{"GOMACC_PATH=" + gomaPath}
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 1 {
- if err := verifyPath(cmd, gomaPath); err != nil {
- return err
- }
- if err := verifyArgOrder(cmd, "usr/bin/clang", mainCc); err != nil {
- return err
- }
- }
- return nil
- }
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-clang-syntax", mainCc)))
- if ctx.cmdCount != 2 {
- t.Errorf("expected 2 calls. Got: %d", ctx.cmdCount)
- }
- if err := verifyPath(cmd, gomaPath); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestPartiallyOmitCCacheForClangSyntaxCheck(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.useCCache = true
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 1 {
- if err := verifyPath(cmd, "usr/bin/clang"); err != nil {
- return err
- }
- }
- return nil
- }
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-clang-syntax", mainCc)))
- if ctx.cmdCount != 2 {
- t.Errorf("expected 2 calls. Got: %d", ctx.cmdCount)
- }
- if err := verifyPath(cmd, "/usr/bin/ccache"); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/compiler_wrapper/clang_tidy_flag.go b/compiler_wrapper/clang_tidy_flag.go
deleted file mode 100644
index 40a5bdbe..00000000
--- a/compiler_wrapper/clang_tidy_flag.go
+++ /dev/null
@@ -1,100 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "fmt"
- "path/filepath"
- "strings"
-)
-
-func processClangTidyFlags(builder *commandBuilder) (cSrcFile string, useClangTidy bool) {
- withTidy, _ := builder.env.getenv("WITH_TIDY")
- if withTidy == "" {
- return "", false
- }
- srcFileSuffixes := []string{
- ".c",
- ".cc",
- ".cpp",
- ".C",
- ".cxx",
- ".c++",
- }
- cSrcFile = ""
- lastArg := ""
- for _, arg := range builder.args {
- if hasAtLeastOneSuffix(arg.value, srcFileSuffixes) && lastArg != "-o" {
- cSrcFile = arg.value
- }
- lastArg = arg.value
- }
- useClangTidy = cSrcFile != ""
- return cSrcFile, useClangTidy
-}
-
-func runClangTidy(env env, clangCmd *command, cSrcFile string) error {
- defaultTidyChecks := strings.Join([]string{
- "*",
- "google*",
- "-bugprone-narrowing-conversions",
- "-cppcoreguidelines-*",
- "-fuchsia-*",
- "-google-build-using-namespace",
- "-google-default-arguments",
- "-google-explicit-constructor",
- "-google-readability*",
- "-google-runtime-int",
- "-google-runtime-references",
- "-hicpp-avoid-c-arrays",
- "-hicpp-braces-around-statements",
- "-hicpp-no-array-decay",
- "-hicpp-signed-bitwise",
- "-hicpp-uppercase-literal-suffix",
- "-hicpp-use-auto",
- "-llvm-namespace-comment",
- "-misc-non-private-member-variables-in-classes",
- "-misc-unused-parameters",
- "-modernize-*",
- "-readability-*",
- }, ",")
-
- resourceDir, err := getClangResourceDir(env, clangCmd.Path)
- if err != nil {
- return err
- }
-
- clangTidyPath := filepath.Join(filepath.Dir(clangCmd.Path), "clang-tidy")
- clangTidyCmd := &command{
- Path: clangTidyPath,
- Args: append([]string{
- "-checks=" + defaultTidyChecks,
- cSrcFile,
- "--",
- "-resource-dir=" + resourceDir,
- }, clangCmd.Args...),
- EnvUpdates: clangCmd.EnvUpdates,
- }
-
- // Note: We pass nil as stdin as we checked before that the compiler
- // was invoked with a source file argument.
- exitCode, err := wrapSubprocessErrorWithSourceLoc(clangTidyCmd,
- env.run(clangTidyCmd, nil, env.stdout(), env.stderr()))
- if err == nil && exitCode != 0 {
- // Note: We continue on purpose when clang-tidy fails
- // to maintain compatibility with the previous wrapper.
- fmt.Fprint(env.stderr(), "clang-tidy failed")
- }
- return err
-}
-
-func hasAtLeastOneSuffix(s string, suffixes []string) bool {
- for _, suffix := range suffixes {
- if strings.HasSuffix(s, suffix) {
- return true
- }
- }
- return false
-}
diff --git a/compiler_wrapper/clang_tidy_flag_test.go b/compiler_wrapper/clang_tidy_flag_test.go
deleted file mode 100644
index baf5219e..00000000
--- a/compiler_wrapper/clang_tidy_flag_test.go
+++ /dev/null
@@ -1,281 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "errors"
- "fmt"
- "io"
- "path"
- "strings"
- "testing"
-)
-
-func TestClangTidyBasename(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- testData := []struct {
- in string
- out string
- }{
- {"./x86_64-cros-linux-gnu-clang", ".*/clang-tidy"},
- {"./x86_64-cros-linux-gnu-clang++", ".*/clang-tidy"},
- }
-
- var clangTidyCmd *command
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 2 {
- clangTidyCmd = cmd
- }
- return nil
- }
-
- for _, tt := range testData {
- ctx.cmdCount = 0
- clangTidyCmd = nil
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(tt.in, mainCc)))
- if ctx.cmdCount != 3 {
- t.Errorf("expected 3 calls. Got: %d", ctx.cmdCount)
- }
- if err := verifyPath(clangTidyCmd, tt.out); err != nil {
- t.Error(err)
- }
- }
- })
-}
-
-func TestClangTidyClangResourceDir(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- if err := verifyPath(cmd, "usr/bin/clang"); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, "--print-resource-dir"); err != nil {
- t.Error(err)
- }
- fmt.Fprint(stdout, "someResourcePath")
- return nil
- case 2:
- if err := verifyPath(cmd, "usr/bin/clang-tidy"); err != nil {
- return err
- }
- if err := verifyArgOrder(cmd, "-resource-dir=someResourcePath", mainCc); err != nil {
- return err
- }
- return nil
- case 3:
- if err := verifyPath(cmd, "usr/bin/clang"); err != nil {
- t.Error(err)
- }
- return nil
- default:
- t.Fatalf("unexpected command %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if ctx.cmdCount != 3 {
- t.Errorf("expected 3 calls. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestClangTidyArgOrder(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 2 {
- if err := verifyArgOrder(cmd, "-checks=.*", mainCc, "--", "-resource-dir=.*", mainCc, "--some_arg"); err != nil {
- return err
- }
- }
- return nil
- }
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc, "--some_arg")))
- if ctx.cmdCount != 3 {
- t.Errorf("expected 3 calls. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestForwardStdOutAndStderrFromClangTidyCall(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 2 {
- fmt.Fprint(stdout, "somemessage")
- fmt.Fprint(stderr, "someerror")
- }
- return nil
- }
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if ctx.stdoutString() != "somemessage" {
- t.Errorf("stdout was not forwarded. Got: %s", ctx.stdoutString())
- }
- if ctx.stderrString() != "someerror" {
- t.Errorf("stderr was not forwarded. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestIgnoreNonZeroExitCodeFromClangTidy(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 2 {
- return newExitCodeError(23)
- }
- return nil
- }
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- stderr := ctx.stderrString()
- if err := verifyNonInternalError(stderr, "clang-tidy failed"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestReportGeneralErrorsFromClangTidy(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if ctx.cmdCount == 2 {
- return errors.New("someerror")
- }
- return nil
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Fatal(err)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("unexpected error. Got: %s", stderr)
- }
- })
-}
-
-func TestOmitClangTidyForGcc(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if ctx.cmdCount > 1 {
- t.Errorf("expected 1 command. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestOmitClangTidyForGccWithClangSyntax(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-clang-syntax", mainCc)))
- if ctx.cmdCount > 2 {
- t.Errorf("expected 2 commands. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestUseClangTidyBasedOnFileExtension(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- testData := []struct {
- args []string
- clangTidy bool
- }{
- {[]string{"main.cc"}, true},
- {[]string{"main.cc"}, true},
- {[]string{"main.C"}, true},
- {[]string{"main.cxx"}, true},
- {[]string{"main.c++"}, true},
- {[]string{"main.xy"}, false},
- {[]string{"-o", "main.cc"}, false},
- {[]string{}, false},
- }
- for _, tt := range testData {
- ctx.cmdCount = 0
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, tt.args...)))
- if ctx.cmdCount > 1 && !tt.clangTidy {
- t.Errorf("expected a call to clang tidy but got none for args %s", tt.args)
- }
- if ctx.cmdCount == 1 && tt.clangTidy {
- t.Errorf("expected no call to clang tidy but got one for args %s", tt.args)
- }
- }
- })
-}
-
-func TestOmitCCacheWithClangTidy(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- ctx.cfg.useCCache = true
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- if err := verifyPath(cmd, "usr/bin/clang"); err != nil {
- t.Error(err)
- }
- return nil
- case 2:
- if err := verifyPath(cmd, "usr/bin/clang-tidy"); err != nil {
- return err
- }
- return nil
- default:
- return nil
- }
- }
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if ctx.cmdCount != 3 {
- t.Errorf("expected 3 calls. Got: %d", ctx.cmdCount)
- }
- if err := verifyPath(cmd, "usr/bin/clang"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestPartiallyOmitGomaWithClangTidy(t *testing.T) {
- withClangTidyTestContext(t, func(ctx *testContext) {
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- // Create a file so the gomacc path is valid.
- ctx.writeFile(gomaPath, "")
- ctx.env = append(ctx.env, "GOMACC_PATH="+gomaPath)
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- if err := verifyPath(cmd, "usr/bin/clang"); err != nil {
- t.Error(err)
- }
- return nil
- case 2:
- if err := verifyPath(cmd, "usr/bin/clang-tidy"); err != nil {
- return err
- }
- return nil
- default:
- return nil
- }
- }
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if ctx.cmdCount != 3 {
- t.Errorf("expected 3 calls. Got: %d", ctx.cmdCount)
- }
- if err := verifyPath(cmd, gomaPath); err != nil {
- t.Error(err)
- }
- })
-}
-
-func withClangTidyTestContext(t *testing.T, work func(ctx *testContext)) {
- withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{"WITH_TIDY=1"}
- work(ctx)
- })
-}
diff --git a/compiler_wrapper/command.go b/compiler_wrapper/command.go
deleted file mode 100644
index 69578597..00000000
--- a/compiler_wrapper/command.go
+++ /dev/null
@@ -1,260 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "fmt"
- "io"
- "os"
- "os/exec"
- "path/filepath"
- "strings"
-)
-
-type command struct {
- Path string `json:"path"`
- Args []string `json:"args"`
- // Updates and additions have the form:
- // `NAME=VALUE`
- // Removals have the form:
- // `NAME=`.
- EnvUpdates []string `json:"env_updates,omitempty"`
-}
-
-func newProcessCommand() *command {
- // This is a workaround for the fact that ld.so does not support
- // passing in the executable name when ld.so is invoked as
- // an executable (crbug/1003841).
- path := os.Getenv("LD_ARGV0")
- if path == "" {
- path = os.Args[0]
- }
- return &command{
- Path: path,
- Args: os.Args[1:],
- }
-}
-
-func mergeEnvValues(values []string, updates []string) []string {
- envMap := map[string]string{}
- for _, entry := range values {
- equalPos := strings.IndexRune(entry, '=')
- envMap[entry[:equalPos]] = entry[equalPos+1:]
- }
- for _, update := range updates {
- equalPos := strings.IndexRune(update, '=')
- key := update[:equalPos]
- value := update[equalPos+1:]
- if value == "" {
- delete(envMap, key)
- } else {
- envMap[key] = value
- }
- }
- env := []string{}
- for key, value := range envMap {
- env = append(env, key+"="+value)
- }
- return env
-}
-
-func runCmd(env env, cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- execCmd := exec.Command(cmd.Path, cmd.Args...)
- execCmd.Env = mergeEnvValues(env.environ(), cmd.EnvUpdates)
- execCmd.Dir = env.getwd()
- execCmd.Stdin = stdin
- execCmd.Stdout = stdout
- execCmd.Stderr = stderr
- return execCmd.Run()
-}
-
-func resolveAgainstPathEnv(env env, cmd string) (string, error) {
- path, _ := env.getenv("PATH")
- for _, path := range strings.Split(path, ":") {
- resolvedPath := filepath.Join(path, cmd)
- if _, err := os.Lstat(resolvedPath); err == nil {
- return resolvedPath, nil
- }
- }
- return "", fmt.Errorf("Couldn't find cmd %q in path", cmd)
-}
-
-func getAbsCmdPath(env env, cmd *command) string {
- path := cmd.Path
- if !filepath.IsAbs(path) {
- path = filepath.Join(env.getwd(), path)
- }
- return path
-}
-
-func newCommandBuilder(env env, cfg *config, cmd *command) (*commandBuilder, error) {
- basename := filepath.Base(cmd.Path)
- var nameParts []string
- if basename == "clang-tidy" {
- nameParts = []string{basename}
- } else {
- nameParts = strings.Split(basename, "-")
- }
- target := builderTarget{}
- switch len(nameParts) {
- case 1:
- // E.g. gcc
- target = builderTarget{
- compiler: nameParts[0],
- }
- case 4:
- // E.g. armv7m-cros-eabi-gcc
- target = builderTarget{
- arch: nameParts[0],
- vendor: nameParts[1],
- abi: nameParts[2],
- compiler: nameParts[3],
- target: basename[:strings.LastIndex(basename, "-")],
- }
- case 5:
- // E.g. x86_64-cros-linux-gnu-gcc
- target = builderTarget{
- arch: nameParts[0],
- vendor: nameParts[1],
- sys: nameParts[2],
- abi: nameParts[3],
- compiler: nameParts[4],
- target: basename[:strings.LastIndex(basename, "-")],
- }
- default:
- return nil, newErrorwithSourceLocf("unexpected compiler name pattern. Actual: %s", basename)
- }
-
- var compilerType compilerType
- switch {
- case strings.HasPrefix(target.compiler, "clang-tidy"):
- compilerType = clangTidyType
- case strings.HasPrefix(target.compiler, "clang"):
- compilerType = clangType
- default:
- compilerType = gccType
- }
- target.compilerType = compilerType
- absWrapperPath, err := getAbsWrapperPath(env, cmd)
- if err != nil {
- return nil, err
- }
- rootPath := filepath.Join(filepath.Dir(absWrapperPath), cfg.rootRelPath)
- return &commandBuilder{
- path: cmd.Path,
- args: createBuilderArgs( /*fromUser=*/ true, cmd.Args),
- env: env,
- cfg: cfg,
- rootPath: rootPath,
- absWrapperPath: absWrapperPath,
- target: target,
- }, nil
-}
-
-type commandBuilder struct {
- path string
- target builderTarget
- args []builderArg
- envUpdates []string
- env env
- cfg *config
- rootPath string
- absWrapperPath string
-}
-
-type builderArg struct {
- value string
- fromUser bool
-}
-
-type compilerType int32
-
-const (
- gccType compilerType = iota
- clangType
- clangTidyType
-)
-
-type builderTarget struct {
- target string
- arch string
- vendor string
- sys string
- abi string
- compiler string
- compilerType compilerType
-}
-
-func createBuilderArgs(fromUser bool, args []string) []builderArg {
- builderArgs := make([]builderArg, len(args))
- for i, arg := range args {
- builderArgs[i] = builderArg{value: arg, fromUser: fromUser}
- }
- return builderArgs
-}
-
-func (builder *commandBuilder) clone() *commandBuilder {
- return &commandBuilder{
- path: builder.path,
- args: append([]builderArg{}, builder.args...),
- env: builder.env,
- cfg: builder.cfg,
- rootPath: builder.rootPath,
- target: builder.target,
- absWrapperPath: builder.absWrapperPath,
- }
-}
-
-func (builder *commandBuilder) wrapPath(path string) {
- builder.args = append([]builderArg{{value: builder.path, fromUser: false}}, builder.args...)
- builder.path = path
-}
-
-func (builder *commandBuilder) addPreUserArgs(args ...string) {
- index := 0
- for _, arg := range builder.args {
- if arg.fromUser {
- break
- }
- index++
- }
- builder.args = append(builder.args[:index], append(createBuilderArgs( /*fromUser=*/ false, args), builder.args[index:]...)...)
-}
-
-func (builder *commandBuilder) addPostUserArgs(args ...string) {
- builder.args = append(builder.args, createBuilderArgs( /*fromUser=*/ false, args)...)
-}
-
-// Allows to map and filter arguments. Filters when the callback returns an empty string.
-func (builder *commandBuilder) transformArgs(transform func(arg builderArg) string) {
- // See https://github.com/golang/go/wiki/SliceTricks
- newArgs := builder.args[:0]
- for _, arg := range builder.args {
- newArg := transform(arg)
- if newArg != "" {
- newArgs = append(newArgs, builderArg{
- value: newArg,
- fromUser: arg.fromUser,
- })
- }
- }
- builder.args = newArgs
-}
-
-func (builder *commandBuilder) updateEnv(updates ...string) {
- builder.envUpdates = append(builder.envUpdates, updates...)
-}
-
-func (builder *commandBuilder) build() *command {
- cmdArgs := make([]string, len(builder.args))
- for i, builderArg := range builder.args {
- cmdArgs[i] = builderArg.value
- }
- return &command{
- Path: builder.path,
- Args: cmdArgs,
- EnvUpdates: builder.envUpdates,
- }
-}
diff --git a/compiler_wrapper/command_test.go b/compiler_wrapper/command_test.go
deleted file mode 100644
index 18d05a9c..00000000
--- a/compiler_wrapper/command_test.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "reflect"
- "sort"
- "testing"
-)
-
-func TestMergeEnvValues(t *testing.T) {
- testData := []struct {
- values []string
- updates []string
- result []string
- }{
- {[]string{}, []string{}, []string{}},
- {[]string{"A=1"}, []string{}, []string{"A=1"}},
- {[]string{"A=1=2=3"}, []string{}, []string{"A=1=2=3"}},
- {[]string{}, []string{"A=1"}, []string{"A=1"}},
- {[]string{}, []string{"A=1=2=3"}, []string{"A=1=2=3"}},
- {[]string{"A=1"}, []string{"A=2"}, []string{"A=2"}},
- {[]string{"A="}, []string{}, []string{"A="}},
- {[]string{"A="}, []string{"A=2"}, []string{"A=2"}},
- {[]string{"A=1"}, []string{"A="}, []string{}},
- {[]string{}, []string{"A=1", "A="}, []string{}},
- {[]string{}, []string{"A=1", "A=", "A=2"}, []string{"A=2"}},
- {[]string{"A=1", "B=2"}, []string{"C=3", "D=4"}, []string{"A=1", "B=2", "C=3", "D=4"}},
- }
- for _, tt := range testData {
- result := mergeEnvValues(tt.values, tt.updates)
- sort.Strings(result)
- if !reflect.DeepEqual(tt.result, result) {
- t.Errorf("unexpected result: %s", result)
- }
- }
-}
diff --git a/compiler_wrapper/compile_with_fallback.go b/compiler_wrapper/compile_with_fallback.go
deleted file mode 100644
index a3b00bf1..00000000
--- a/compiler_wrapper/compile_with_fallback.go
+++ /dev/null
@@ -1,105 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bufio"
- "bytes"
- "fmt"
- "io"
- "os"
- "path/filepath"
- "strings"
- "syscall"
- "time"
-)
-
-const prebuiltCompilerPathKey = "ANDROID_LLVM_PREBUILT_COMPILER_PATH"
-
-func shouldCompileWithFallback(env env) bool {
- value, _ := env.getenv(prebuiltCompilerPathKey)
- return value != ""
-}
-
-// FIXME: Deduplicate this logic with the logic for FORCE_DISABLE_WERROR
-// (the logic here is from Android, the logic for FORCE_DISABLE_WERROR is from ChromeOS)
-func compileWithFallback(env env, cfg *config, originalCmd *command, absWrapperPath string) (exitCode int, err error) {
- firstCmd := &command{
- Path: originalCmd.Path,
- Args: originalCmd.Args,
- EnvUpdates: originalCmd.EnvUpdates,
- }
- // We only want to pass extra flags to clang and clang++.
- if base := filepath.Base(originalCmd.Path); base == "clang.real" || base == "clang++.real" {
- // We may introduce some new warnings after rebasing and we need to
- // disable them before we fix those warnings.
- extraArgs, _ := env.getenv("ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS")
- firstCmd.Args = append(
- append(firstCmd.Args, "-fno-color-diagnostics"),
- strings.Split(extraArgs, " ")...,
- )
- }
-
- firstCmdStdinBuffer := &bytes.Buffer{}
- firstCmdStderrBuffer := &bytes.Buffer{}
- firstCmdExitCode, err := wrapSubprocessErrorWithSourceLoc(firstCmd,
- env.run(firstCmd, teeStdinIfNeeded(env, firstCmd, firstCmdStdinBuffer), env.stdout(), io.MultiWriter(env.stderr(), firstCmdStderrBuffer)))
- if err != nil {
- return 0, err
- }
-
- if firstCmdExitCode == 0 {
- return 0, nil
- }
- stderrRedirectPath, _ := env.getenv("ANDROID_LLVM_STDERR_REDIRECT")
- f, err := os.OpenFile(stderrRedirectPath, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0644)
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error opening stderr file %s", stderrRedirectPath)
- }
- lockSuccess := false
- for i := 0; i < 30; i++ {
- err := syscall.Flock(int(f.Fd()), syscall.LOCK_EX|syscall.LOCK_NB)
- if err == nil {
- lockSuccess = true
- break
- }
- if errno, ok := err.(syscall.Errno); ok {
- if errno == syscall.EAGAIN || errno == syscall.EACCES {
- time.Sleep(500 * time.Millisecond)
- err = nil
- }
- }
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error waiting to lock file %s", stderrRedirectPath)
- }
- }
- if !lockSuccess {
- return 0, wrapErrorwithSourceLocf(err, "timeout waiting to lock file %s", stderrRedirectPath)
- }
- w := bufio.NewWriter(f)
- w.WriteString("==================COMMAND:====================\n")
- fmt.Fprintf(w, "%s %s\n\n", firstCmd.Path, strings.Join(firstCmd.Args, " "))
- firstCmdStderrBuffer.WriteTo(w)
- w.WriteString("==============================================\n\n")
- if err := w.Flush(); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "unable to write to file %s", stderrRedirectPath)
- }
- if err := f.Close(); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error closing file %s", stderrRedirectPath)
- }
-
- prebuiltCompilerPath, _ := env.getenv(prebuiltCompilerPathKey)
- fallbackCmd := &command{
- Path: filepath.Join(prebuiltCompilerPath, filepath.Base(absWrapperPath)),
- // Don't use extra args added (from ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS) for clang and
- // clang++ above. They may not be recognized by the fallback clang.
- Args: originalCmd.Args,
- // Delete prebuiltCompilerPathKey so the fallback doesn't keep
- // calling itself in case of an error.
- EnvUpdates: append(originalCmd.EnvUpdates, prebuiltCompilerPathKey+"="),
- }
- return wrapSubprocessErrorWithSourceLoc(fallbackCmd,
- env.run(fallbackCmd, bytes.NewReader(firstCmdStdinBuffer.Bytes()), env.stdout(), env.stderr()))
-}
diff --git a/compiler_wrapper/compile_with_fallback_test.go b/compiler_wrapper/compile_with_fallback_test.go
deleted file mode 100644
index 4ea847f6..00000000
--- a/compiler_wrapper/compile_with_fallback_test.go
+++ /dev/null
@@ -1,292 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "errors"
- "fmt"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "strings"
- "testing"
-)
-
-func TestOmitFallbackCompileForSuccessfulCall(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc)))
- if ctx.cmdCount != 1 {
- t.Errorf("expected 1 call. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestOmitFallbackCompileForGeneralError(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- return errors.New("someerror")
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Fatal(err)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("unexpected error. Got: %s", stderr)
- }
- if ctx.cmdCount != 1 {
- t.Errorf("expected 1 call. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestCompileWithFallbackForNonZeroExitCode(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- return newExitCodeError(1)
- case 2:
- if err := verifyPath(cmd, "fallback_compiler/clang"); err != nil {
- return err
- }
- if err := verifyEnvUpdate(cmd, "ANDROID_LLVM_PREBUILT_COMPILER_PATH="); err != nil {
- return err
- }
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc)))
- if ctx.cmdCount != 2 {
- t.Errorf("expected 2 calls. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestCompileWithFallbackForwardStdoutAndStderr(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- fmt.Fprint(stdout, "originalmessage")
- fmt.Fprint(stderr, "originalerror")
- return newExitCodeError(1)
- case 2:
- fmt.Fprint(stdout, "fallbackmessage")
- fmt.Fprint(stderr, "fallbackerror")
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc)))
- if err := verifyNonInternalError(ctx.stderrString(), "originalerrorfallbackerror"); err != nil {
- t.Error(err)
- }
- if !strings.Contains(ctx.stdoutString(), "originalmessagefallbackmessage") {
- t.Errorf("unexpected stdout. Got: %s", ctx.stdoutString())
- }
- })
-}
-
-func TestForwardGeneralErrorWhenFallbackCompileFails(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- return newExitCodeError(1)
- case 2:
- return errors.New("someerror")
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Error(err)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("unexpected stderr. Got: %s", stderr)
- }
- })
-}
-
-func TestForwardExitCodeWhenFallbackCompileFails(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- return newExitCodeError(1)
- case 2:
- return newExitCodeError(2)
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- exitCode := callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc))
- if exitCode != 2 {
- t.Errorf("unexpected exit code. Got: %d", exitCode)
- }
- })
-}
-
-func TestForwardStdinToFallbackCompile(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- stdinStr := ctx.readAllString(stdin)
- if stdinStr != "someinput" {
- return fmt.Errorf("unexpected stdin. Got: %s", stdinStr)
- }
-
- switch ctx.cmdCount {
- case 1:
- return newExitCodeError(1)
- case 2:
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- io.WriteString(&ctx.stdinBuffer, "someinput")
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, "-", mainCc)))
- })
-}
-
-func TestCompileWithFallbackExtraArgs(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- testData := []struct {
- compiler string
- expectExtraArgs bool
- }{
- {"./clang", true},
- {"./clang++", true},
- {"./clang-tidy", false},
- }
- ctx.env = append(ctx.env, "ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS=-a -b")
- extraArgs := []string{"-fno-color-diagnostics", "-a", "-b"}
- for _, tt := range testData {
- ctx.cmdCount = 0
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- if tt.expectExtraArgs {
- if err := verifyArgOrder(cmd, extraArgs...); err != nil {
- return err
- }
- } else {
- for _, arg := range extraArgs {
- if err := verifyArgCount(cmd, 0, arg); err != nil {
- return err
- }
- }
- }
- return newExitCodeError(1)
- case 2:
- for _, arg := range extraArgs {
- if err := verifyArgCount(cmd, 0, arg); err != nil {
- return err
- }
- }
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(tt.compiler, mainCc)))
- if ctx.cmdCount != 2 {
- t.Errorf("expected 2 calls. Got: %d", ctx.cmdCount)
- }
- }
- })
-}
-
-func TestCompileWithFallbackLogCommandAndErrors(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.env = append(ctx.env, "ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS=-a -b")
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- fmt.Fprint(stderr, "someerror\n")
- return newExitCodeError(1)
- case 2:
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc)))
-
- log := readCompileWithFallbackErrorLog(ctx)
- if log != `==================COMMAND:====================
-clang.real main.cc -fno-color-diagnostics -a -b
-
-someerror
-==============================================
-
-` {
- t.Errorf("unexpected log. Got: %s", log)
- }
-
- entry, _ := os.Lstat(filepath.Join(ctx.tempDir, "fallback_stderr"))
- if entry.Mode()&0777 != 0644 {
- t.Errorf("unexpected mode for logfile. Got: %#o", entry.Mode())
- }
- })
-}
-
-func TestCompileWithFallbackAppendToLog(t *testing.T) {
- withCompileWithFallbackTestContext(t, func(ctx *testContext) {
- ctx.writeFile(filepath.Join(ctx.tempDir, "fallback_stderr"), "oldContent\n")
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- return newExitCodeError(1)
- case 2:
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangAndroid, mainCc)))
-
- log := readCompileWithFallbackErrorLog(ctx)
- if !strings.Contains(log, "oldContent") {
- t.Errorf("old content not present: %s", log)
- }
- if !strings.Contains(log, "clang.real") {
- t.Errorf("new content not present: %s", log)
- }
- })
-}
-
-func withCompileWithFallbackTestContext(t *testing.T, work func(ctx *testContext)) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.isAndroidWrapper = true
- ctx.env = []string{
- "ANDROID_LLVM_PREBUILT_COMPILER_PATH=fallback_compiler",
- "ANDROID_LLVM_STDERR_REDIRECT=" + filepath.Join(ctx.tempDir, "fallback_stderr"),
- }
- work(ctx)
- })
-}
-
-func readCompileWithFallbackErrorLog(ctx *testContext) string {
- logFile := filepath.Join(ctx.tempDir, "fallback_stderr")
- data, err := ioutil.ReadFile(logFile)
- if err != nil {
- ctx.t.Fatalf("error reading log file %s: %s", logFile, err)
- }
- return string(data)
-}
diff --git a/compiler_wrapper/compiler_wrapper.go b/compiler_wrapper/compiler_wrapper.go
deleted file mode 100644
index 17c62db7..00000000
--- a/compiler_wrapper/compiler_wrapper.go
+++ /dev/null
@@ -1,266 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "fmt"
- "io"
- "path/filepath"
- "strings"
-)
-
-func callCompiler(env env, cfg *config, inputCmd *command) int {
- var compilerErr error
-
- if !filepath.IsAbs(inputCmd.Path) && !strings.HasPrefix(inputCmd.Path, ".") &&
- !strings.ContainsRune(inputCmd.Path, filepath.Separator) {
- if resolvedPath, err := resolveAgainstPathEnv(env, inputCmd.Path); err == nil {
- inputCmd = &command{
- Path: resolvedPath,
- Args: inputCmd.Args,
- EnvUpdates: inputCmd.EnvUpdates,
- }
- } else {
- compilerErr = err
- }
- }
- exitCode := 0
- if compilerErr == nil {
- if cfg.oldWrapperPath != "" {
- exitCode, compilerErr = callCompilerWithRunAndCompareToOldWrapper(env, cfg, inputCmd)
- } else {
- exitCode, compilerErr = callCompilerInternal(env, cfg, inputCmd)
- }
- }
- if compilerErr != nil {
- printCompilerError(env.stderr(), compilerErr)
- exitCode = 1
- }
- return exitCode
-}
-
-func callCompilerWithRunAndCompareToOldWrapper(env env, cfg *config, inputCmd *command) (exitCode int, err error) {
- stdinBuffer := &bytes.Buffer{}
- recordingEnv := &commandRecordingEnv{
- env: env,
- stdinReader: teeStdinIfNeeded(env, inputCmd, stdinBuffer),
- }
- // Note: this won't do a real exec as recordingEnv redirects exec to run.
- if exitCode, err = callCompilerInternal(recordingEnv, cfg, inputCmd); err != nil {
- return 0, err
- }
- if err = compareToOldWrapper(env, cfg, inputCmd, stdinBuffer.Bytes(), recordingEnv.cmdResults, exitCode); err != nil {
- return exitCode, err
- }
- return exitCode, nil
-}
-
-func callCompilerInternal(env env, cfg *config, inputCmd *command) (exitCode int, err error) {
- if err := checkUnsupportedFlags(inputCmd); err != nil {
- return 0, err
- }
- mainBuilder, err := newCommandBuilder(env, cfg, inputCmd)
- if err != nil {
- return 0, err
- }
- processPrintConfigFlag(mainBuilder)
- processPrintCmdlineFlag(mainBuilder)
- env = mainBuilder.env
- var compilerCmd *command
- clangSyntax := processClangSyntaxFlag(mainBuilder)
- if cfg.isAndroidWrapper {
- // FIXME: This combination of using the directory of the symlink but the
- // basename of the link target is strange but is the logic that old android
- // wrapper uses. Change this to use directory and basename either from the
- // absWrapperPath or from the builder.path, but don't mix anymore.
- mainBuilder.path = filepath.Join(filepath.Dir(mainBuilder.path), filepath.Base(mainBuilder.absWrapperPath)+".real")
-
- switch mainBuilder.target.compilerType {
- case clangType:
- mainBuilder.addPreUserArgs(mainBuilder.cfg.clangFlags...)
- mainBuilder.addPreUserArgs(mainBuilder.cfg.commonFlags...)
- if _, err := processGomaCccFlags(mainBuilder); err != nil {
- return 0, err
- }
- compilerCmd = mainBuilder.build()
- case clangTidyType:
- compilerCmd = mainBuilder.build()
- default:
- return 0, newErrorwithSourceLocf("unsupported compiler: %s", mainBuilder.target.compiler)
- }
- } else if mainBuilder.target.compilerType == clangType {
- cSrcFile, useClangTidy := processClangTidyFlags(mainBuilder)
- sysroot, err := prepareClangCommand(mainBuilder)
- if err != nil {
- return 0, err
- }
- allowCCache := true
- if useClangTidy {
- allowCCache = false
- clangCmdWithoutGomaAndCCache := mainBuilder.build()
- if err := runClangTidy(env, clangCmdWithoutGomaAndCCache, cSrcFile); err != nil {
- return 0, err
- }
- }
- if err := processGomaCCacheFlags(sysroot, allowCCache, mainBuilder); err != nil {
- return 0, err
- }
- compilerCmd = mainBuilder.build()
- } else {
- if clangSyntax {
- allowCCache := false
- clangCmd, err := calcClangCommand(allowCCache, mainBuilder.clone())
- if err != nil {
- return 0, err
- }
- gccCmd, err := calcGccCommand(mainBuilder)
- if err != nil {
- return 0, err
- }
- return checkClangSyntax(env, clangCmd, gccCmd)
- }
- compilerCmd, err = calcGccCommand(mainBuilder)
- if err != nil {
- return 0, err
- }
- }
- rusageLogfileName := getRusageLogFilename(env)
- bisectStage := getBisectStage(env)
- if shouldForceDisableWError(env) {
- if rusageLogfileName != "" {
- return 0, newUserErrorf("GETRUSAGE is meaningless with FORCE_DISABLE_WERROR")
- }
- if bisectStage != "" {
- return 0, newUserErrorf("BISECT_STAGE is meaningless with FORCE_DISABLE_WERROR")
- }
- return doubleBuildWithWNoError(env, cfg, compilerCmd)
- }
- if shouldCompileWithFallback(env) {
- if rusageLogfileName != "" {
- return 0, newUserErrorf("GETRUSAGE is meaningless with FORCE_DISABLE_WERROR")
- }
- if bisectStage != "" {
- return 0, newUserErrorf("BISECT_STAGE is meaningless with FORCE_DISABLE_WERROR")
- }
- return compileWithFallback(env, cfg, compilerCmd, mainBuilder.absWrapperPath)
- }
- if rusageLogfileName != "" {
- if bisectStage != "" {
- return 0, newUserErrorf("BISECT_STAGE is meaningless with GETRUSAGE")
- }
- return logRusage(env, rusageLogfileName, compilerCmd)
- }
- if bisectStage != "" {
- compilerCmd, err = calcBisectCommand(env, cfg, bisectStage, compilerCmd)
- if err != nil {
- return 0, err
- }
- }
- // Note: We return an exit code only if the underlying env is not
- // really doing an exec, e.g. commandRecordingEnv.
- return wrapSubprocessErrorWithSourceLoc(compilerCmd, env.exec(compilerCmd))
-}
-
-func prepareClangCommand(builder *commandBuilder) (sysroot string, err error) {
- sysroot = ""
- if !builder.cfg.isHostWrapper {
- sysroot = processSysrootFlag(builder)
- }
- builder.addPreUserArgs(builder.cfg.clangFlags...)
- calcCommonPreUserArgs(builder)
- if err := processClangFlags(builder); err != nil {
- return "", err
- }
- return sysroot, nil
-}
-
-func calcClangCommand(allowCCache bool, builder *commandBuilder) (*command, error) {
- sysroot, err := prepareClangCommand(builder)
- if err != nil {
- return nil, err
- }
- if err := processGomaCCacheFlags(sysroot, allowCCache, builder); err != nil {
- return nil, err
- }
- return builder.build(), nil
-}
-
-func calcGccCommand(builder *commandBuilder) (*command, error) {
- sysroot := ""
- if !builder.cfg.isHostWrapper {
- sysroot = processSysrootFlag(builder)
- }
- builder.addPreUserArgs(builder.cfg.gccFlags...)
- if !builder.cfg.isHostWrapper {
- calcCommonPreUserArgs(builder)
- }
- processGccFlags(builder)
- if !builder.cfg.isHostWrapper {
- allowCCache := true
- if err := processGomaCCacheFlags(sysroot, allowCCache, builder); err != nil {
- return nil, err
- }
- }
- return builder.build(), nil
-}
-
-func calcCommonPreUserArgs(builder *commandBuilder) {
- builder.addPreUserArgs(builder.cfg.commonFlags...)
- if !builder.cfg.isHostWrapper {
- processPieFlags(builder)
- processThumbCodeFlags(builder)
- processStackProtectorFlags(builder)
- processX86Flags(builder)
- }
- processSanitizerFlags(builder)
-}
-
-func processGomaCCacheFlags(sysroot string, allowCCache bool, builder *commandBuilder) (err error) {
- gomaccUsed := false
- if !builder.cfg.isHostWrapper {
- gomaccUsed, err = processGomaCccFlags(builder)
- if err != nil {
- return err
- }
- }
- if !gomaccUsed && allowCCache {
- processCCacheFlag(sysroot, builder)
- }
- return nil
-}
-
-func getAbsWrapperPath(env env, wrapperCmd *command) (string, error) {
- wrapperPath := getAbsCmdPath(env, wrapperCmd)
- evaledCmdPath, err := filepath.EvalSymlinks(wrapperPath)
- if err != nil {
- return "", wrapErrorwithSourceLocf(err, "failed to evaluate symlinks for %s", wrapperPath)
- }
- return evaledCmdPath, nil
-}
-
-func printCompilerError(writer io.Writer, compilerErr error) {
- if _, ok := compilerErr.(userError); ok {
- fmt.Fprintf(writer, "%s\n", compilerErr)
- } else {
- fmt.Fprintf(writer,
- "Internal error. Please report to chromeos-toolchain@google.com.\n%s\n",
- compilerErr)
- }
-}
-
-func teeStdinIfNeeded(env env, inputCmd *command, dest io.Writer) io.Reader {
- // We can't use io.TeeReader unconditionally, as that would block
- // calls to exec.Cmd.Run(), even if the underlying process has already
- // terminated. See https://github.com/golang/go/issues/7990 for more details.
- lastArg := ""
- for _, arg := range inputCmd.Args {
- if arg == "-" && lastArg != "-o" {
- return io.TeeReader(env.stdin(), dest)
- }
- lastArg = arg
- }
- return env.stdin()
-}
diff --git a/compiler_wrapper/compiler_wrapper_test.go b/compiler_wrapper/compiler_wrapper_test.go
deleted file mode 100644
index 71cd36df..00000000
--- a/compiler_wrapper/compiler_wrapper_test.go
+++ /dev/null
@@ -1,198 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "errors"
- "fmt"
- "io"
- "path/filepath"
- "strings"
- "syscall"
- "testing"
-)
-
-func TestAddCommonFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.commonFlags = []string{"-someflag"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgOrder(cmd, "-someflag", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddGccConfigFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.gccFlags = []string{"-someflag"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgOrder(cmd, "-someflag", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddClangConfigFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.clangFlags = []string{"-someflag"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyArgOrder(cmd, "-someflag", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestLogGeneralExecError(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- testOldWrapperPaths := []string{
- "",
- filepath.Join(ctx.tempDir, "fakewrapper"),
- }
- for _, testOldWrapperPath := range testOldWrapperPaths {
- ctx.cfg.oldWrapperPath = testOldWrapperPath
- // Note: No need to write the old wrapper as we don't execute
- // it due to the general error from the new error.
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- return errors.New("someerror")
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Fatal(err)
- }
- if !strings.Contains(stderr, gccX86_64) {
- t.Errorf("could not find compiler path on stderr. Got: %s", stderr)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("could not find original error on stderr. Got: %s", stderr)
- }
- }
- })
-}
-
-func TestForwardStdin(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- io.WriteString(&ctx.stdinBuffer, "someinput")
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- stdinStr := ctx.readAllString(stdin)
- if stdinStr != "someinput" {
- return fmt.Errorf("unexpected stdin. Got: %s", stdinStr)
- }
- return nil
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, "-", mainCc)))
- })
-}
-
-func TestLogMissingCCacheExecError(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.useCCache = true
-
- testOldWrapperPaths := []string{
- "",
- filepath.Join(ctx.tempDir, "fakewrapper"),
- }
- for _, testOldWrapperPath := range testOldWrapperPaths {
- ctx.cfg.oldWrapperPath = testOldWrapperPath
- // Note: No need to write the old wrapper as we don't execute
- // it due to the general error from the new error.
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- return syscall.ENOENT
- }
- ctx.stderrBuffer.Reset()
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNonInternalError(stderr, "ccache not found under .*. Please install it"); err != nil {
- t.Fatal(err)
- }
- }
- })
-}
-
-func TestLogExitCodeErrorWhenComparingToOldWrapper(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- writePythonMockWrapper(ctx, &mockWrapperConfig{
- Cmds: []*mockWrapperCmd{
- {
- Path: cmd.Path,
- Args: cmd.Args,
- ExitCode: 2,
- },
- },
- })
- fmt.Fprint(stderr, "someerror")
- return newExitCodeError(2)
- }
-
- exitCode := callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc))
- if exitCode != 2 {
- t.Fatalf("Expected exit code 2. Got: %d", exitCode)
- }
- if err := verifyNonInternalError(ctx.stderrString(), "someerror"); err != nil {
- t.Fatal(err)
- }
- })
-}
-
-func TestErrorOnLogRusageAndForceDisableWError(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{
- "FORCE_DISABLE_WERROR=1",
- "GETRUSAGE=rusage.log",
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNonInternalError(stderr, "GETRUSAGE is meaningless with FORCE_DISABLE_WERROR"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestErrorOnLogRusageAndBisect(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{
- "BISECT_STAGE=xyz",
- "GETRUSAGE=rusage.log",
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNonInternalError(stderr, "BISECT_STAGE is meaningless with GETRUSAGE"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestErrorOnBisectAndForceDisableWError(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{
- "BISECT_STAGE=xyz",
- "FORCE_DISABLE_WERROR=1",
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNonInternalError(stderr, "BISECT_STAGE is meaningless with FORCE_DISABLE_WERROR"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestPrintUserCompilerError(t *testing.T) {
- buffer := bytes.Buffer{}
- printCompilerError(&buffer, newUserErrorf("abcd"))
- if buffer.String() != "abcd\n" {
- t.Errorf("Unexpected string. Got: %s", buffer.String())
- }
-}
-
-func TestPrintOtherCompilerError(t *testing.T) {
- buffer := bytes.Buffer{}
- printCompilerError(&buffer, errors.New("abcd"))
- if buffer.String() != "Internal error. Please report to chromeos-toolchain@google.com.\nabcd\n" {
- t.Errorf("Unexpected string. Got: %s", buffer.String())
- }
-}
diff --git a/compiler_wrapper/config.go b/compiler_wrapper/config.go
deleted file mode 100644
index 3720018e..00000000
--- a/compiler_wrapper/config.go
+++ /dev/null
@@ -1,197 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "strconv"
-)
-
-type config struct {
- // TODO: Refactor this flag into more generic configuration properties.
- isHostWrapper bool
- isAndroidWrapper bool
- // Whether to use ccache.
- useCCache bool
- // Flags to add to gcc and clang.
- commonFlags []string
- // Flags to add to gcc only.
- gccFlags []string
- // Flags to add to clang only.
- clangFlags []string
- // Toolchain root path relative to the wrapper binary.
- rootRelPath string
- // Path of the old wrapper using the toolchain root.
- oldWrapperPath string
- // Whether to mock out the calls that the old wrapper does.
- mockOldWrapperCmds bool
- // Directory to store errors that were prevented with -Wno-error.
- newWarningsDir string
- // Version. Only used for printing via -print-cmd.
- version string
-}
-
-// Version can be set via a linker flag.
-// Values fills config.version.
-var Version = ""
-
-// UseCCache can be set via a linker flag.
-// Value will be passed to strconv.ParseBool.
-// E.g. go build -ldflags '-X config.UseCCache=true'.
-var UseCCache = "unknown"
-
-// UseLlvmNext can be set via a linker flag.
-// Value will be passed to strconv.ParseBool.
-// E.g. go build -ldflags '-X config.UseLlvmNext=true'.
-var UseLlvmNext = "unknown"
-
-// ConfigName can be set via a linker flag.
-// Value has to be one of:
-// - "cros.hardened"
-// - "cros.nonhardened"
-var ConfigName = "unknown"
-
-// Returns the configuration matching the UseCCache and ConfigName.
-func getRealConfig() (*config, error) {
- useCCache, err := strconv.ParseBool(UseCCache)
- if err != nil {
- return nil, wrapErrorwithSourceLocf(err, "invalid format for UseCCache")
- }
- useLlvmNext, err := strconv.ParseBool(UseLlvmNext)
- if err != nil {
- return nil, wrapErrorwithSourceLocf(err, "invalid format for UseLLvmNext")
- }
- // FIXME: Remove comparison to old wrapper once the new wrapper has landed.
- oldWrapperPath := ""
- config, err := getConfig(ConfigName, useCCache, useLlvmNext, oldWrapperPath, Version)
- if err != nil {
- return nil, err
- }
- return config, nil
-}
-
-func getConfig(configName string, useCCache bool, useLlvmNext bool, oldWrapperPath string, version string) (*config, error) {
- cfg := config{}
- switch configName {
- case "cros.hardened":
- cfg = *crosHardenedConfig
- case "cros.nonhardened":
- cfg = *crosNonHardenedConfig
- case "cros.host":
- cfg = *crosHostConfig
- case "android":
- cfg = *androidConfig
- default:
- return nil, newErrorwithSourceLocf("unknown config name: %s", configName)
- }
- cfg.useCCache = useCCache
- if useLlvmNext {
- cfg.clangFlags = append(cfg.clangFlags, llvmNextFlags...)
- }
- cfg.oldWrapperPath = oldWrapperPath
- cfg.version = version
- return &cfg, nil
-}
-
-var llvmNextFlags = []string{
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
-}
-
-// Full hardening.
-// Temporarily disable function splitting because of chromium:434751.
-var crosHardenedConfig = &config{
- rootRelPath: "../../../../..",
- commonFlags: []string{
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- },
- gccFlags: []string{
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- },
- // Temporarily disable tautological-*-compare chromium:778316.
- // Temporarily add no-unknown-warning-option to deal with old clang versions.
- // Temporarily disable Wsection since kernel gets a bunch of these. chromium:778867
- // Disable "-faddrsig" since it produces object files that strip doesn't understand, chromium:915742.
- clangFlags: []string{
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- },
- newWarningsDir: "/tmp/fatal_clang_warnings",
-}
-
-// Flags to be added to non-hardened toolchain.
-var crosNonHardenedConfig = &config{
- rootRelPath: "../../../../..",
- commonFlags: []string{},
- gccFlags: []string{
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- },
- // Temporarily disable tautological-*-compare chromium:778316.
- // Temporarily add no-unknown-warning-option to deal with old clang versions.
- // Temporarily disable Wsection since kernel gets a bunch of these. chromium:778867
- clangFlags: []string{
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- },
- newWarningsDir: "/tmp/fatal_clang_warnings",
-}
-
-// Flags to be added to host toolchain.
-var crosHostConfig = &config{
- isHostWrapper: true,
- rootRelPath: "../..",
- commonFlags: []string{},
- gccFlags: []string{
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- },
- // Temporarily disable tautological-*-compare chromium:778316.
- // Temporarily add no-unknown-warning-option to deal with old clang versions.
- clangFlags: []string{
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- },
- newWarningsDir: "/tmp/fatal_clang_warnings",
-}
-
-var androidConfig = &config{
- isHostWrapper: false,
- isAndroidWrapper: true,
- rootRelPath: "./",
- commonFlags: []string{},
- gccFlags: []string{},
- clangFlags: []string{},
- newWarningsDir: "/tmp/fatal_clang_warnings",
-}
diff --git a/compiler_wrapper/config_test.go b/compiler_wrapper/config_test.go
deleted file mode 100644
index a11c2857..00000000
--- a/compiler_wrapper/config_test.go
+++ /dev/null
@@ -1,143 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestRealConfigWithUseCCacheFlag(t *testing.T) {
- resetGlobals()
- defer resetGlobals()
- ConfigName = "cros.hardened"
- UseLlvmNext = "false"
-
- UseCCache = "false"
- cfg, err := getRealConfig()
- if err != nil {
- t.Fatal(err)
- }
- if cfg.useCCache {
- t.Fatal("UseCCache: Expected false got true")
- }
-
- UseCCache = "true"
- cfg, err = getRealConfig()
- if err != nil {
- t.Fatal(err)
- }
- if !cfg.useCCache {
- t.Fatal("UseCCache: Expected true got false")
- }
-
- UseCCache = "invalid"
- if _, err := getRealConfig(); err == nil {
- t.Fatalf("UseCCache: Expected an error, got none")
- }
-}
-
-func TestRealConfigWithUseLLvmFlag(t *testing.T) {
- resetGlobals()
- defer resetGlobals()
- ConfigName = "cros.hardened"
- UseCCache = "false"
-
- UseLlvmNext = "false"
- cfg, err := getRealConfig()
- if err != nil {
- t.Fatal(err)
- }
- if isUsingLLvmNext(cfg) {
- t.Fatal("UseLLvmNext: Expected not to be used")
- }
-
- UseLlvmNext = "true"
- cfg, err = getRealConfig()
- if err != nil {
- t.Fatal(err)
- }
-
- if !isUsingLLvmNext(cfg) {
- t.Fatal("UseLLvmNext: Expected to be used")
- }
-
- UseLlvmNext = "invalid"
- if _, err := getRealConfig(); err == nil {
- t.Fatalf("UseLlvmNext: Expected an error, got none")
- }
-}
-
-func TestRealConfigWithConfigNameFlag(t *testing.T) {
- resetGlobals()
- defer resetGlobals()
- UseCCache = "false"
- UseLlvmNext = "false"
-
- ConfigName = "cros.hardened"
- cfg, err := getRealConfig()
- if err != nil {
- t.Fatal(err)
- }
- if !isSysrootHardened(cfg) || cfg.isHostWrapper {
- t.Fatalf("ConfigName: Expected sysroot hardened config. Got: %#v", cfg)
- }
-
- ConfigName = "cros.nonhardened"
- cfg, err = getRealConfig()
- if err != nil {
- t.Fatal(err)
- }
- if isSysrootHardened(cfg) || cfg.isHostWrapper {
- t.Fatalf("ConfigName: Expected sysroot non hardened config. Got: %#v", cfg)
- }
-
- ConfigName = "cros.host"
- cfg, err = getRealConfig()
- if err != nil {
- t.Fatal(err)
- }
- if !cfg.isHostWrapper {
- t.Fatalf("ConfigName: Expected clang host config. Got: %#v", cfg)
- }
-
- ConfigName = "android"
- cfg, err = getRealConfig()
- if err != nil {
- t.Fatal(err)
- }
- if !cfg.isAndroidWrapper {
- t.Fatalf("ConfigName: Expected clang host config. Got: %#v", cfg)
- }
-
- ConfigName = "invalid"
- if _, err := getRealConfig(); err == nil {
- t.Fatalf("ConfigName: Expected an error, got none")
- }
-}
-
-func isSysrootHardened(cfg *config) bool {
- for _, arg := range cfg.commonFlags {
- if arg == "-pie" {
- return true
- }
- }
- return false
-}
-
-func isUsingLLvmNext(cfg *config) bool {
- for _, arg := range cfg.clangFlags {
- if arg == "-Wno-reorder-init-list" {
- return true
- }
- }
- return false
-}
-
-func resetGlobals() {
- // Set all global variables to a defined state.
- UseLlvmNext = "unknown"
- ConfigName = "unknown"
- UseCCache = "unknown"
-}
diff --git a/compiler_wrapper/cros_hardened_config_test.go b/compiler_wrapper/cros_hardened_config_test.go
deleted file mode 100644
index 10d8bf6f..00000000
--- a/compiler_wrapper/cros_hardened_config_test.go
+++ /dev/null
@@ -1,590 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "fmt"
- "io/ioutil"
- "path"
- "path/filepath"
- "regexp"
- "strings"
- "testing"
-)
-
-const oldHardenedWrapperPathForTest = "$CHROOT/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/gcc-bin/4.9.x/sysroot_wrapper.hardened"
-const crosHardenedGoldenDir = "testdata/cros_hardened_golden"
-const crosHardenedNoCCacheGoldenDir = "testdata/cros_hardened_noccache_golden"
-const crosHardenedLlvmNextGoldenDir = "testdata/cros_hardened_llvmnext_golden"
-
-func TestCrosHardenedConfig(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- useLlvmNext := false
- useCCache := true
- cfg, err := getConfig("cros.hardened", useCCache, useLlvmNext, oldHardenedWrapperPathForTest, "123")
- if err != nil {
- t.Fatal(err)
- }
- ctx.updateConfig(cfg)
-
- runGoldenRecords(ctx, crosHardenedGoldenDir, createSyswrapperGoldenInputs(ctx))
- })
-}
-
-func TestCrosHardenedConfigWithoutCCache(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- useLlvmNext := false
- useCCache := false
- cfg, err := getConfig("cros.hardened", useCCache, useLlvmNext, oldHardenedWrapperPathForTest, "123")
- if err != nil {
- t.Fatal(err)
- }
- ctx.updateConfig(cfg)
-
- // Create a copy of the old wrapper where the CCACHE_DEFAULT is false.
- if ctx.cfg.oldWrapperPath != "" {
- oldWrapperContent, err := ioutil.ReadFile(ctx.cfg.oldWrapperPath)
- if err != nil {
- t.Fatal(err)
- }
- oldWrapperContent = regexp.MustCompile(`True\s+#\s+@CCACHE_DEFAULT@`).ReplaceAll(oldWrapperContent, []byte("False #"))
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "oldwrapper_noccache")
- if err := ioutil.WriteFile(ctx.cfg.oldWrapperPath, oldWrapperContent, 0666); err != nil {
- t.Fatal(err)
- }
- }
-
- // Only run the subset of the sysroot wrapper tests that execute commands.
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- ctx.writeFile(gomaPath, "")
- gomaEnv := "GOMACC_PATH=" + gomaPath
- runGoldenRecords(ctx, crosHardenedNoCCacheGoldenDir, []goldenFile{
- createGccPathGoldenInputs(ctx, gomaEnv),
- createClangPathGoldenInputs(ctx, gomaEnv),
- createClangSyntaxGoldenInputs(gomaEnv),
- createBisectGoldenInputs(clangX86_64),
- createForceDisableWErrorGoldenInputs(),
- createClangTidyGoldenInputs(gomaEnv),
- })
- })
-}
-
-func TestCrosHardenedConfigWithLlvmNext(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- useLlvmNext := true
- useCCache := true
- cfg, err := getConfig("cros.hardened", useCCache, useLlvmNext, oldHardenedWrapperPathForTest, "123")
- if err != nil {
- t.Fatal(err)
- }
- ctx.updateConfig(cfg)
-
- // Create a copy of the old wrapper where we add the llvm next flags
- if ctx.cfg.oldWrapperPath != "" {
- oldWrapperContent, err := ioutil.ReadFile(ctx.cfg.oldWrapperPath)
- if err != nil {
- t.Fatal(err)
- }
- oldWrapperLlvmNextFlags := `
-LLVM_NEXT_FLAGS_TO_ADD = set(['-Wno-reorder-init-list',
-'-Wno-final-dtor-non-final-class',
-'-Wno-implicit-int-float-conversion',
-'-Wno-return-stack-address'
-])`
- oldWrapperContent = regexp.MustCompile(`LLVM_NEXT_FLAGS_TO_ADD = set\(\[\]\)`).ReplaceAll(oldWrapperContent, []byte(oldWrapperLlvmNextFlags))
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "oldwrapper_llvmnext")
- if err := ioutil.WriteFile(ctx.cfg.oldWrapperPath, oldWrapperContent, 0666); err != nil {
- t.Fatal(err)
- }
- }
-
- // Only run the subset of the sysroot wrapper tests that execute commands.
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- ctx.writeFile(gomaPath, "")
- gomaEnv := "GOMACC_PATH=" + gomaPath
- runGoldenRecords(ctx, crosHardenedLlvmNextGoldenDir, []goldenFile{
- createGccPathGoldenInputs(ctx, gomaEnv),
- createClangPathGoldenInputs(ctx, gomaEnv),
- createClangSyntaxGoldenInputs(gomaEnv),
- createBisectGoldenInputs(clangX86_64),
- createForceDisableWErrorGoldenInputs(),
- createClangTidyGoldenInputs(gomaEnv),
- })
- })
-}
-
-func createSyswrapperGoldenInputs(ctx *testContext) []goldenFile {
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- ctx.writeFile(gomaPath, "")
- gomaEnv := "GOMACC_PATH=" + gomaPath
-
- return []goldenFile{
- createGccPathGoldenInputs(ctx, gomaEnv),
- createGoldenInputsForAllTargets("gcc", mainCc),
- createSysrootWrapperCommonGoldenInputs("gcc", gomaEnv),
- createSanitizerGoldenInputs("gcc"),
- createGccArgsGoldenInputs(),
- createClangSyntaxGoldenInputs(gomaEnv),
- createClangPathGoldenInputs(ctx, gomaEnv),
- createGoldenInputsForAllTargets("clang", mainCc),
- createGoldenInputsForAllTargets("clang", "-ftrapv", mainCc),
- createSysrootWrapperCommonGoldenInputs("clang", gomaEnv),
- createSanitizerGoldenInputs("clang"),
- createClangArgsGoldenInputs(),
- createBisectGoldenInputs(clangX86_64),
- createForceDisableWErrorGoldenInputs(),
- createClangTidyGoldenInputs(gomaEnv),
- }
-}
-
-func createGoldenInputsForAllTargets(compiler string, args ...string) goldenFile {
- argsReplacer := strings.NewReplacer(".", "", "-", "")
- return goldenFile{
- Name: fmt.Sprintf("%s_%s_target_specific.json", compiler, argsReplacer.Replace(strings.Join(args, "_"))),
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd("./x86_64-cros-linux-gnu-"+compiler, args...),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./x86_64-cros-eabi-"+compiler, args...),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./x86_64-cros-win-gnu-"+compiler, args...),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./armv7m-cros-linux-gnu-"+compiler, args...),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./armv7m-cros-eabi-"+compiler, args...),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./armv7m-cros-win-gnu-"+compiler, args...),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./armv8m-cros-linux-gnu-"+compiler, args...),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./armv8m-cros-eabi-"+compiler, args...),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./armv8m-cros-win-gnu-"+compiler, args...),
- Cmds: okResults,
- },
- },
- }
-}
-
-func createBisectGoldenInputs(compiler string) goldenFile {
- return goldenFile{
- Name: "bisect.json",
- // Disable comparing to the old wrapper as that calls the bisect_driver
- // directly from python, and the new wrapper calls it via a separate
- // sub command.
- ignoreOldWrapper: true,
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(compiler, mainCc),
- Env: []string{
- "BISECT_STAGE=someBisectStage",
- "HOME=/user/home",
- },
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(compiler, mainCc),
- Env: []string{
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home",
- },
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(compiler, mainCc),
- Env: []string{
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home",
- },
- Cmds: errorResults,
- },
- },
- }
-}
-
-func createForceDisableWErrorGoldenInputs() goldenFile {
- return goldenFile{
- Name: "force_disable_werror.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(clangX86_64, mainCc),
- Env: []string{"FORCE_DISABLE_WERROR=1"},
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, mainCc),
- Env: []string{"FORCE_DISABLE_WERROR=1"},
- Cmds: []commandResult{
- {
- Stderr: "-Werror originalerror",
- ExitCode: 1,
- },
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, mainCc),
- Env: []string{"FORCE_DISABLE_WERROR=1"},
- Cmds: []commandResult{
- {
- Stderr: "-Werror originalerror",
- ExitCode: 1,
- },
- errorResult,
- },
- },
- },
- }
-}
-
-func createGccPathGoldenInputs(ctx *testContext, gomaEnv string) goldenFile {
- deepPath := "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc"
- linkedDeepPath := "./symlinked/x86_64-cros-linux-gnu-gcc"
- ctx.writeFile(filepath.Join(ctx.tempDir, "/pathenv/x86_64-cros-linux-gnu-gcc"), "")
- ctx.symlink(deepPath, linkedDeepPath)
- return goldenFile{
- Name: "gcc_path.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd("./x86_64-cros-linux-gnu-gcc", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./x86_64-cros-linux-gnu-gcc", mainCc),
- Cmds: errorResults,
- },
- {
- WrapperCmd: newGoldenCmd(filepath.Join(ctx.tempDir, "x86_64-cros-linux-gnu-gcc"), mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(deepPath, mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(linkedDeepPath, mainCc),
- Cmds: okResults,
- },
- {
- Env: []string{"PATH=" + filepath.Join(ctx.tempDir, "/pathenv")},
- WrapperCmd: newGoldenCmd("x86_64-cros-linux-gnu-gcc", mainCc),
- Cmds: okResults,
- },
- },
- }
-}
-
-func createClangPathGoldenInputs(ctx *testContext, gomaEnv string) goldenFile {
- deepPath := "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-clang"
- linkedDeepPath := "./symlinked/x86_64-cros-linux-gnu-clang"
- ctx.writeFile(filepath.Join(ctx.tempDir, "/pathenv/x86_64-cros-linux-gnu-clang"), "")
- ctx.symlink(deepPath, linkedDeepPath)
- return goldenFile{
- Name: "clang_path.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd("./x86_64-cros-linux-gnu-clang", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./x86_64-cros-linux-gnu-clang", mainCc),
- Cmds: errorResults,
- },
- {
- WrapperCmd: newGoldenCmd("./x86_64-cros-linux-gnu-clang++", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, mainCc),
- Env: []string{"CLANG=somepath/clang"},
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Xclang-path=/somedir", mainCc),
- Cmds: []commandResult{
- {Stdout: "someResourceDir"},
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Xclang-path=/somedir", mainCc),
- Env: []string{gomaEnv},
- Cmds: []commandResult{
- {Stdout: "someResourceDir"},
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Xclang-path=/somedir", mainCc),
- Cmds: []commandResult{
- {Stdout: "someResourceDir"},
- errorResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(filepath.Join(ctx.tempDir, "x86_64-cros-linux-gnu-clang"), mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(deepPath, mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(linkedDeepPath, mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("somedir/x86_64-cros-linux-gnu-clang", mainCc),
- Cmds: okResults,
- },
- {
- Env: []string{"PATH=" + filepath.Join(ctx.tempDir, "/pathenv")},
- WrapperCmd: newGoldenCmd("x86_64-cros-linux-gnu-clang", mainCc),
- Cmds: okResults,
- },
- },
- }
-}
-
-func createClangTidyGoldenInputs(gomaEnv string) goldenFile {
- tidyEnv := "WITH_TIDY=1"
- return goldenFile{
- Name: "clangtidy.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(clangX86_64, mainCc),
- Env: []string{tidyEnv},
- Cmds: []commandResult{
- {Stdout: "someResourceDir"},
- okResult,
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, mainCc),
- Env: []string{tidyEnv, gomaEnv},
- Cmds: []commandResult{
- {Stdout: "someResourceDir"},
- okResult,
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, mainCc),
- Env: []string{tidyEnv, gomaEnv},
- Cmds: []commandResult{
- {Stdout: "someResourceDir"},
- errorResult,
- // TODO: we don't fail the compilation due to clang-tidy, as that
- // is the behavior in the old wrapper. Consider changing this in
- // the future.
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, mainCc),
- Env: []string{tidyEnv, gomaEnv},
- Cmds: []commandResult{
- {Stdout: "someResourceDir"},
- okResult,
- errorResult,
- },
- },
- },
- }
-}
-
-func createClangSyntaxGoldenInputs(gomaEnv string) goldenFile {
- return goldenFile{
- Name: "gcc_clang_syntax.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(gccX86_64, "-clang-syntax", mainCc),
- Cmds: []commandResult{
- okResult,
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(gccX86_64, "-clang-syntax", mainCc),
- Env: []string{gomaEnv},
- Cmds: []commandResult{
- okResult,
- okResult,
- },
- },
- {
- WrapperCmd: newGoldenCmd(gccX86_64, "-clang-syntax", mainCc),
- Cmds: errorResults,
- },
- {
- WrapperCmd: newGoldenCmd(gccX86_64, "-clang-syntax", mainCc),
- Cmds: []commandResult{
- okResult,
- errorResult,
- },
- },
- },
- }
-}
-
-func createSysrootWrapperCommonGoldenInputs(compiler string, gomaEnv string) goldenFile {
- // We are using a fixed target as all of the following args are target independent.
- wrapperPath := "./x86_64-cros-linux-gnu-" + compiler
- return goldenFile{
- Name: compiler + "_sysroot_wrapper_common.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(gccX86_64, "-noccache", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, mainCc),
- Env: []string{"GOMACC_PATH=someNonExistingPath"},
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, mainCc),
- Env: []string{gomaEnv},
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-nopie", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-D__KERNEL__", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd("./armv7a-cros-linux-gnueabihf-"+compiler,
- "-D__KERNEL__", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "--sysroot=xyz", mainCc),
- Cmds: okResults,
- },
- },
- }
-}
-
-func createSanitizerGoldenInputs(compiler string) goldenFile {
- // We are using a fixed target as all of the following args are target independent.
- wrapperPath := "./x86_64-cros-linux-gnu-" + compiler
- return goldenFile{
- Name: compiler + "_sanitizer_args.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-fsanitize=kernel-address", "-Wl,--no-undefined", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-fsanitize=kernel-address", "-Wl,-z,defs", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-fsanitize=kernel-address", "-D_FORTIFY_SOURCE=1", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-fsanitize=kernel-address", "-D_FORTIFY_SOURCE=2", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-fsanitize=fuzzer", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-fsanitize=address", "-fprofile-instr-generate", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-fsanitize=address", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(wrapperPath, "-fprofile-instr-generate", mainCc),
- Cmds: okResults,
- },
- },
- }
-}
-
-func createGccArgsGoldenInputs() goldenFile {
- return goldenFile{
- Name: "gcc_specific_args.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(gccX86_64, "-march=goldmont", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(gccX86_64, "-march=goldmont-plus", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(gccX86_64, "-march=skylake", mainCc),
- Cmds: okResults,
- },
- },
- }
-}
-
-func createClangArgsGoldenInputs() goldenFile {
- return goldenFile{
- Name: "clang_specific_args.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-mno-movbe", "-pass-exit-codes", "-Wclobbered", "-Wno-psabi", "-Wlogical-op",
- "-Wmissing-parameter-type", "-Wold-style-declaration", "-Woverride-init", "-Wunsafe-loop-optimizations",
- "-Wstrict-aliasing=abc", "-finline-limit=abc", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Wno-error=cpp", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Wno-error=maybe-uninitialized", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Wno-error=unused-but-set-variable", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Wno-unused-but-set-variable", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Wunused-but-set-variable", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Xclang-only=-someflag", mainCc),
- Cmds: okResults,
- },
- },
- }
-}
diff --git a/compiler_wrapper/cros_host_config_test.go b/compiler_wrapper/cros_host_config_test.go
deleted file mode 100644
index d2be9ab6..00000000
--- a/compiler_wrapper/cros_host_config_test.go
+++ /dev/null
@@ -1,96 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "path"
- "testing"
-)
-
-const oldClangHostWrapperPathForTest = "$CHROOT/usr/bin/clang_host_wrapper"
-const oldGccHostWrapperPathForTest = "$CHROOT/../src/third_party/chromiumos-overlay/sys-devel/gcc/files/host_wrapper"
-const crosClangHostGoldenDir = "testdata/cros_clang_host_golden"
-const crosGccHostGoldenDir = "testdata/cros_gcc_host_golden"
-
-func TestCrosClangHostConfig(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- useLlvmNext := false
- useCCache := false
- cfg, err := getConfig("cros.host", useCCache, useLlvmNext, oldClangHostWrapperPathForTest, "123")
- if err != nil {
- t.Fatal(err)
- }
- ctx.updateConfig(cfg)
-
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- ctx.writeFile(gomaPath, "")
- gomaEnv := "GOMACC_PATH=" + gomaPath
-
- goldenFiles := []goldenFile{
- createClangPathGoldenInputs(ctx, gomaEnv),
- createGoldenInputsForAllTargets("clang", mainCc),
- createGoldenInputsForAllTargets("clang", "-ftrapv", mainCc),
- createSanitizerGoldenInputs("clang"),
- createClangArgsGoldenInputs(),
- createBisectGoldenInputs(clangX86_64),
- createForceDisableWErrorGoldenInputs(),
- createClangTidyGoldenInputs(gomaEnv),
- createClangHostWrapperInputs(),
- }
-
- runGoldenRecords(ctx, crosClangHostGoldenDir, goldenFiles)
- })
-}
-
-func TestCrosGccHostConfig(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- useLlvmNext := false
- useCCache := false
- cfg, err := getConfig("cros.host", useCCache, useLlvmNext, oldGccHostWrapperPathForTest, "123")
- if err != nil {
- t.Fatal(err)
- }
- ctx.updateConfig(cfg)
-
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- ctx.writeFile(gomaPath, "")
- gomaEnv := "GOMACC_PATH=" + gomaPath
-
- // Note: The old gcc host wrapper is very limited and only adds flags.
- // So we only test very few things here.
- goldenFiles := []goldenFile{
- createGccPathGoldenInputs(ctx, gomaEnv),
- createGoldenInputsForAllTargets("gcc", mainCc),
- createGccArgsGoldenInputs(),
- createGccHostWrapperInputs(),
- }
-
- runGoldenRecords(ctx, crosGccHostGoldenDir, goldenFiles)
- })
-}
-
-func createClangHostWrapperInputs() goldenFile {
- return goldenFile{
- Name: "clang_host_wrapper.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd("./clang", mainCc),
- Cmds: okResults,
- },
- },
- }
-}
-
-func createGccHostWrapperInputs() goldenFile {
- return goldenFile{
- Name: "gcc_host_wrapper.json",
- Records: []goldenRecord{
- {
- WrapperCmd: newGoldenCmd("./gcc", mainCc),
- Cmds: okResults,
- },
- },
- }
-}
diff --git a/compiler_wrapper/cros_nonhardened_config_test.go b/compiler_wrapper/cros_nonhardened_config_test.go
deleted file mode 100644
index bf8af4d2..00000000
--- a/compiler_wrapper/cros_nonhardened_config_test.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-const oldNonHardenedWrapperPathForTest = "$CHROOT/usr/x86_64-pc-linux-gnu/arm-none-eabi/gcc-bin/4.9.x/sysroot_wrapper"
-const crosNonHardenedGoldenDir = "testdata/cros_nonhardened_golden"
-
-func TestCrosNonHardenedConfig(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- useLlvmNext := false
- useCCache := true
- cfg, err := getConfig("cros.nonhardened", useCCache, useLlvmNext, oldNonHardenedWrapperPathForTest, "123")
- if err != nil {
- t.Fatal(err)
- }
- ctx.updateConfig(cfg)
-
- runGoldenRecords(ctx, crosNonHardenedGoldenDir, createSyswrapperGoldenInputs(ctx))
- })
-}
diff --git a/compiler_wrapper/disable_werror_flag.go b/compiler_wrapper/disable_werror_flag.go
deleted file mode 100644
index 864397dd..00000000
--- a/compiler_wrapper/disable_werror_flag.go
+++ /dev/null
@@ -1,134 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "encoding/json"
- "io/ioutil"
- "os"
- "strings"
- "syscall"
-)
-
-func shouldForceDisableWError(env env) bool {
- value, _ := env.getenv("FORCE_DISABLE_WERROR")
- return value != ""
-}
-
-func doubleBuildWithWNoError(env env, cfg *config, originalCmd *command) (exitCode int, err error) {
- originalStdoutBuffer := &bytes.Buffer{}
- originalStderrBuffer := &bytes.Buffer{}
- // TODO: This is a bug in the old wrapper that it drops the ccache path
- // during double build. Fix this once we don't compare to the old wrapper anymore.
- if originalCmd.Path == "/usr/bin/ccache" {
- originalCmd.Path = "ccache"
- }
- originalStdinBuffer := &bytes.Buffer{}
- originalExitCode, err := wrapSubprocessErrorWithSourceLoc(originalCmd,
- env.run(originalCmd, teeStdinIfNeeded(env, originalCmd, originalStdinBuffer), originalStdoutBuffer, originalStderrBuffer))
- if err != nil {
- return 0, err
- }
- // The only way we can do anything useful is if it looks like the failure
- // was -Werror-related.
- if originalExitCode == 0 || !strings.Contains(originalStderrBuffer.String(), "-Werror") {
- originalStdoutBuffer.WriteTo(env.stdout())
- originalStderrBuffer.WriteTo(env.stderr())
- return originalExitCode, nil
- }
-
- retryStdoutBuffer := &bytes.Buffer{}
- retryStderrBuffer := &bytes.Buffer{}
- retryCommand := &command{
- Path: originalCmd.Path,
- Args: append(originalCmd.Args, "-Wno-error"),
- EnvUpdates: originalCmd.EnvUpdates,
- }
- retryExitCode, err := wrapSubprocessErrorWithSourceLoc(retryCommand,
- env.run(retryCommand, bytes.NewReader(originalStdinBuffer.Bytes()), retryStdoutBuffer, retryStderrBuffer))
- if err != nil {
- return 0, err
- }
- // If -Wno-error fixed us, pretend that we never ran without -Wno-error.
- // Otherwise, pretend that we never ran the second invocation. Since -Werror
- // is an issue, log in either case.
- if retryExitCode == 0 {
- retryStdoutBuffer.WriteTo(env.stdout())
- retryStderrBuffer.WriteTo(env.stderr())
- } else {
- originalStdoutBuffer.WriteTo(env.stdout())
- originalStderrBuffer.WriteTo(env.stderr())
- }
-
- // All of the below is basically logging. If we fail at any point, it's
- // reasonable for that to fail the build. This is all meant for FYI-like
- // builders in the first place.
-
- // Buildbots use a nonzero umask, which isn't quite what we want: these directories should
- // be world-readable and world-writable.
- oldMask := syscall.Umask(0)
- defer syscall.Umask(oldMask)
-
- // Allow root and regular users to write to this without issue.
- if err := os.MkdirAll(cfg.newWarningsDir, 0777); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error creating warnings directory %s", cfg.newWarningsDir)
- }
-
- // Have some tag to show that files aren't fully written. It would be sad if
- // an interrupted build (or out of disk space, or similar) caused tools to
- // have to be overly-defensive.
- incompleteSuffix := ".incomplete"
-
- // Coming up with a consistent name for this is difficult (compiler command's
- // SHA can clash in the case of identically named files in different
- // directories, or similar); let's use a random one.
- tmpFile, err := ioutil.TempFile(cfg.newWarningsDir, "warnings_report*.json"+incompleteSuffix)
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error creating warnings file")
- }
-
- if err := tmpFile.Chmod(0666); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error chmoding the file to be world-readable/writeable")
- }
-
- lines := []string{}
- if originalStderrBuffer.Len() > 0 {
- lines = append(lines, originalStderrBuffer.String())
- }
- if originalStdoutBuffer.Len() > 0 {
- lines = append(lines, originalStdoutBuffer.String())
- }
- outputToLog := strings.Join(lines, "\n")
-
- jsonData := warningsJSONData{
- Cwd: env.getwd(),
- Command: append([]string{originalCmd.Path}, originalCmd.Args...),
- Stdout: outputToLog,
- }
- enc := json.NewEncoder(tmpFile)
- if err := enc.Encode(jsonData); err != nil {
- _ = tmpFile.Close()
- return 0, wrapErrorwithSourceLocf(err, "error writing warnings data")
- }
-
- if err := tmpFile.Close(); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error closing warnings file")
- }
-
- if err := os.Rename(tmpFile.Name(), tmpFile.Name()[:len(tmpFile.Name())-len(incompleteSuffix)]); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error removing incomplete suffix from warnings file")
- }
-
- return retryExitCode, nil
-}
-
-// Struct used to write JSON. Fileds have to be uppercase for the json
-// encoder to read them.
-type warningsJSONData struct {
- Cwd string `json:"cwd"`
- Command []string `json:"command"`
- Stdout string `json:"stdout"`
-}
diff --git a/compiler_wrapper/disable_werror_flag_test.go b/compiler_wrapper/disable_werror_flag_test.go
deleted file mode 100644
index fb25d193..00000000
--- a/compiler_wrapper/disable_werror_flag_test.go
+++ /dev/null
@@ -1,374 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "strings"
- "testing"
-)
-
-func TestOmitDoubleBuildForSuccessfulCall(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- if ctx.cmdCount != 1 {
- t.Errorf("expected 1 call. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestOmitDoubleBuildForGeneralError(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- return errors.New("someerror")
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Fatal(err)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("unexpected error. Got: %s", stderr)
- }
- if ctx.cmdCount != 1 {
- t.Errorf("expected 1 call. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestDoubleBuildWithWNoErrorFlag(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- if err := verifyArgCount(cmd, 0, "-Wno-error"); err != nil {
- return err
- }
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(1)
- case 2:
- if err := verifyArgCount(cmd, 1, "-Wno-error"); err != nil {
- return err
- }
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- if ctx.cmdCount != 2 {
- t.Errorf("expected 2 calls. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestDoubleBuildWithWNoErrorAndCCache(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cfg.useCCache = true
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- // TODO: This is a bug in the old wrapper that it drops the ccache path
- // during double build. Fix this once we don't compare to the old wrapper anymore.
- if err := verifyPath(cmd, "ccache"); err != nil {
- return err
- }
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(1)
- case 2:
- if err := verifyPath(cmd, "ccache"); err != nil {
- return err
- }
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- if ctx.cmdCount != 2 {
- t.Errorf("expected 2 calls. Got: %d", ctx.cmdCount)
- }
- })
-}
-
-func TestForwardStdoutAndStderrWhenDoubleBuildSucceeds(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- fmt.Fprint(stdout, "originalmessage")
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(1)
- case 2:
- fmt.Fprint(stdout, "retrymessage")
- fmt.Fprint(stderr, "retryerror")
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyNonInternalError(ctx.stderrString(), "retryerror"); err != nil {
- t.Error(err)
- }
- if !strings.Contains(ctx.stdoutString(), "retrymessage") {
- t.Errorf("unexpected stdout. Got: %s", ctx.stdoutString())
- }
- })
-}
-
-func TestForwardStdoutAndStderrWhenDoubleBuildFails(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- fmt.Fprint(stdout, "originalmessage")
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(3)
- case 2:
- fmt.Fprint(stdout, "retrymessage")
- fmt.Fprint(stderr, "retryerror")
- return newExitCodeError(5)
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- exitCode := callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc))
- if exitCode != 5 {
- t.Errorf("unexpected exitcode. Got: %d", exitCode)
- }
- if err := verifyNonInternalError(ctx.stderrString(), "-Werror originalerror"); err != nil {
- t.Error(err)
- }
- if !strings.Contains(ctx.stdoutString(), "originalmessage") {
- t.Errorf("unexpected stdout. Got: %s", ctx.stdoutString())
- }
- })
-}
-
-func TestForwardStdinFromDoubleBuild(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- // Note: This is called for the clang syntax call as well as for
- // the gcc call, and we assert that stdin is cloned and forwarded
- // to both.
- stdinStr := ctx.readAllString(stdin)
- if stdinStr != "someinput" {
- return fmt.Errorf("unexpected stdin. Got: %s", stdinStr)
- }
-
- switch ctx.cmdCount {
- case 1:
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(1)
- case 2:
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- io.WriteString(&ctx.stdinBuffer, "someinput")
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, "-", mainCc)))
- })
-}
-
-func TestForwardGeneralErrorWhenDoubleBuildFails(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(3)
- case 2:
- return errors.New("someerror")
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Error(err)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("unexpected stderr. Got: %s", stderr)
- }
- })
-}
-
-func TestOmitLogWarningsIfNoDoubleBuild(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- if ctx.cmdCount != 1 {
- t.Errorf("expected 1 call. Got: %d", ctx.cmdCount)
- }
- if loggedWarnings := readLoggedWarnings(ctx); loggedWarnings != nil {
- t.Errorf("expected no logged warnings. Got: %#v", loggedWarnings)
- }
- })
-}
-
-func TestLogWarningsWhenDoubleBuildSucceeds(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- fmt.Fprint(stdout, "originalmessage")
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(1)
- case 2:
- fmt.Fprint(stdout, "retrymessage")
- fmt.Fprint(stderr, "retryerror")
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- loggedWarnings := readLoggedWarnings(ctx)
- if loggedWarnings == nil {
- t.Fatal("expected logged warnings")
- }
- if loggedWarnings.Cwd != ctx.getwd() {
- t.Fatalf("unexpected cwd. Got: %s", loggedWarnings.Cwd)
- }
- loggedCmd := &command{
- Path: loggedWarnings.Command[0],
- Args: loggedWarnings.Command[1:],
- }
- if err := verifyPath(loggedCmd, "usr/bin/clang"); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(loggedCmd, "--sysroot=.*", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestLogWarningsWhenDoubleBuildFails(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- fmt.Fprint(stdout, "originalmessage")
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(1)
- case 2:
- fmt.Fprint(stdout, "retrymessage")
- fmt.Fprint(stderr, "retryerror")
- return newExitCodeError(1)
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- loggedWarnings := readLoggedWarnings(ctx)
- if loggedWarnings == nil {
- t.Fatal("expected logged warnings")
- }
- })
-}
-
-func withForceDisableWErrorTestContext(t *testing.T, work func(ctx *testContext)) {
- withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{"FORCE_DISABLE_WERROR=1"}
- work(ctx)
- })
-}
-
-func readLoggedWarnings(ctx *testContext) *warningsJSONData {
- files, err := ioutil.ReadDir(ctx.cfg.newWarningsDir)
- if err != nil {
- if _, ok := err.(*os.PathError); ok {
- return nil
- }
- ctx.t.Fatal(err)
- }
- if len(files) != 1 {
- ctx.t.Fatalf("expected 1 warning log file. Got: %s", files)
- }
- data, err := ioutil.ReadFile(filepath.Join(ctx.cfg.newWarningsDir, files[0].Name()))
- if err != nil {
- ctx.t.Fatal(err)
- }
- jsonData := warningsJSONData{}
- if err := json.Unmarshal(data, &jsonData); err != nil {
- ctx.t.Fatal(err)
- }
- return &jsonData
-}
-
-func TestDoubleBuildWerrorChmodsThingsAppropriately(t *testing.T) {
- withForceDisableWErrorTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- switch ctx.cmdCount {
- case 1:
- if err := verifyArgCount(cmd, 0, "-Wno-error"); err != nil {
- return err
- }
- fmt.Fprint(stderr, "-Werror originalerror")
- return newExitCodeError(1)
- case 2:
- if err := verifyArgCount(cmd, 1, "-Wno-error"); err != nil {
- return err
- }
- return nil
- default:
- t.Fatalf("unexpected command: %#v", cmd)
- return nil
- }
- }
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
- if ctx.cmdCount != 2 {
- // Later errors are likely senseless if we didn't get called twice.
- t.Fatalf("expected 2 calls. Got: %d", ctx.cmdCount)
- }
-
- t.Logf("Warnings dir is at %q", ctx.cfg.newWarningsDir)
- warningsDir, err := os.Open(ctx.cfg.newWarningsDir)
- if err != nil {
- t.Fatalf("failed to open the new warnings dir: %v", err)
- }
- defer warningsDir.Close()
-
- fi, err := warningsDir.Stat()
- if err != nil {
- t.Fatalf("failed stat'ing the warnings dir: %v", err)
- }
-
- permBits := func(mode os.FileMode) int { return int(mode & 0777) }
-
- if perms := permBits(fi.Mode()); perms != 0777 {
- t.Errorf("mode for tempdir are %#o; expected 0777", perms)
- }
-
- entries, err := warningsDir.Readdir(0)
- if err != nil {
- t.Fatalf("failed reading entries of the tempdir: %v", err)
- }
-
- if len(entries) != 1 {
- t.Errorf("found %d tempfiles in the tempdir; expected 1", len(entries))
- }
-
- for _, e := range entries {
- if perms := permBits(e.Mode()); perms != 0666 {
- t.Errorf("mode for %q is %#o; expected 0666", e.Name(), perms)
- }
- }
- })
-}
diff --git a/compiler_wrapper/env.go b/compiler_wrapper/env.go
deleted file mode 100644
index 4d83b17a..00000000
--- a/compiler_wrapper/env.go
+++ /dev/null
@@ -1,146 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "fmt"
- "io"
- "os"
- "strings"
-)
-
-type env interface {
- getenv(key string) (string, bool)
- environ() []string
- getwd() string
- stdin() io.Reader
- stdout() io.Writer
- stderr() io.Writer
- run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error
- exec(cmd *command) error
-}
-
-type processEnv struct {
- wd string
-}
-
-func newProcessEnv() (env, error) {
- // Note: We are not using os.getwd() as this sometimes uses the value of the PWD
- // env variable. This has the following problems:
- // - if PWD=/proc/self/cwd, os.getwd() will return "/proc/self/cwd",
- // and we need to read the link to get the actual wd. However, we can't always
- // do this as we are calculating
- // the path to clang, and following a symlinked cwd first would make
- // this calculation invalid.
- // - the old python wrapper doesn't respect the PWD env variable either, so if we
- // did we would fail the comparison to the old wrapper.
- wd, err := os.Readlink("/proc/self/cwd")
- if err != nil {
- return nil, wrapErrorwithSourceLocf(err, "failed to read working directory")
- }
- return &processEnv{wd: wd}, nil
-}
-
-var _ env = (*processEnv)(nil)
-
-func (env *processEnv) getenv(key string) (string, bool) {
- return os.LookupEnv(key)
-}
-
-func (env *processEnv) environ() []string {
- return os.Environ()
-}
-
-func (env *processEnv) getwd() string {
- return env.wd
-}
-
-func (env *processEnv) stdin() io.Reader {
- return os.Stdin
-}
-
-func (env *processEnv) stdout() io.Writer {
- return os.Stdout
-}
-
-func (env *processEnv) stderr() io.Writer {
- return os.Stderr
-}
-
-func (env *processEnv) exec(cmd *command) error {
- return libcExec(env, cmd)
-}
-
-func (env *processEnv) run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- return runCmd(env, cmd, stdin, stdout, stderr)
-}
-
-type commandRecordingEnv struct {
- env
- stdinReader io.Reader
- cmdResults []*commandResult
-}
-type commandResult struct {
- Cmd *command `json:"cmd"`
- Stdout string `json:"stdout,omitempty"`
- Stderr string `json:"stderr,omitempty"`
- ExitCode int `json:"exitcode,omitempty"`
-}
-
-var _ env = (*commandRecordingEnv)(nil)
-
-func (env *commandRecordingEnv) stdin() io.Reader {
- return env.stdinReader
-}
-
-func (env *commandRecordingEnv) exec(cmd *command) error {
- // Note: We treat exec the same as run so that we can do work
- // after the call.
- return env.run(cmd, env.stdin(), env.stdout(), env.stderr())
-}
-
-func (env *commandRecordingEnv) run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- stdoutBuffer := &bytes.Buffer{}
- stderrBuffer := &bytes.Buffer{}
- err := env.env.run(cmd, stdin, io.MultiWriter(stdout, stdoutBuffer), io.MultiWriter(stderr, stderrBuffer))
- if exitCode, ok := getExitCode(err); ok {
- env.cmdResults = append(env.cmdResults, &commandResult{
- Cmd: cmd,
- Stdout: stdoutBuffer.String(),
- Stderr: stderrBuffer.String(),
- ExitCode: exitCode,
- })
- }
- return err
-}
-
-type printingEnv struct {
- env
-}
-
-var _env = (*printingEnv)(nil)
-
-func (env *printingEnv) exec(cmd *command) error {
- printCmd(env, cmd)
- return env.env.exec(cmd)
-}
-
-func (env *printingEnv) run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- printCmd(env, cmd)
- return env.env.run(cmd, stdin, stdout, stderr)
-}
-
-func printCmd(env env, cmd *command) {
- fmt.Fprintf(env.stderr(), "cd '%s' &&", env.getwd())
- if len(cmd.EnvUpdates) > 0 {
- fmt.Fprintf(env.stderr(), " env '%s'", strings.Join(cmd.EnvUpdates, "' '"))
- }
- fmt.Fprintf(env.stderr(), " '%s'", getAbsCmdPath(env, cmd))
- if len(cmd.Args) > 0 {
- fmt.Fprintf(env.stderr(), " '%s'", strings.Join(cmd.Args, "' '"))
- }
- io.WriteString(env.stderr(), "\n")
-}
diff --git a/compiler_wrapper/env_test.go b/compiler_wrapper/env_test.go
deleted file mode 100644
index 8580d4aa..00000000
--- a/compiler_wrapper/env_test.go
+++ /dev/null
@@ -1,215 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "flag"
- "os"
- "os/exec"
- "path/filepath"
- "strings"
- "testing"
-)
-
-// Attention: The tests in this file execute the test binary again with the `-run` flag.
-// This is needed as they want to test an `exec`, which terminates the test process.
-var internalexececho = flag.Bool("internalexececho", false, "internal flag used for tests that exec")
-
-func TestProcessEnvExecPathAndArgs(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- if *internalexececho {
- execEcho(ctx, &command{
- Path: "some_binary",
- Args: []string{"arg1", "arg2"},
- })
- return
- }
- logLines := forkAndReadEcho(ctx)
- if !strings.HasSuffix(logLines[0], "/some_binary arg1 arg2") {
- t.Errorf("incorrect path or args: %s", logLines[0])
- }
- })
-}
-
-func TestProcessEnvExecAddEnv(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- if *internalexececho {
- execEcho(ctx, &command{
- Path: "some_binary",
- EnvUpdates: []string{"ABC=xyz"},
- })
- return
- }
-
- logLines := forkAndReadEcho(ctx)
- for _, ll := range logLines {
- if ll == "ABC=xyz" {
- return
- }
- }
- t.Errorf("could not find new env variable: %s", logLines)
- })
-}
-
-func TestProcessEnvExecUpdateEnv(t *testing.T) {
- if os.Getenv("PATH") == "" {
- t.Fatal("no PATH environment variable found!")
- }
- withTestContext(t, func(ctx *testContext) {
- if *internalexececho {
- execEcho(ctx, &command{
- Path: "some_binary",
- EnvUpdates: []string{"PATH=xyz"},
- })
- return
- }
- logLines := forkAndReadEcho(ctx)
- for _, ll := range logLines {
- if ll == "PATH=xyz" {
- return
- }
- }
- t.Errorf("could not find updated env variable: %s", logLines)
- })
-}
-
-func TestProcessEnvExecDeleteEnv(t *testing.T) {
- if os.Getenv("PATH") == "" {
- t.Fatal("no PATH environment variable found!")
- }
- withTestContext(t, func(ctx *testContext) {
- if *internalexececho {
- execEcho(ctx, &command{
- Path: "some_binary",
- EnvUpdates: []string{"PATH="},
- })
- return
- }
- logLines := forkAndReadEcho(ctx)
- for _, ll := range logLines {
- if strings.HasPrefix(ll, "PATH=") {
- t.Errorf("path env was not removed: %s", ll)
- }
- }
- })
-}
-
-func TestProcessEnvRunCmdPathAndArgs(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := &command{
- Path: "some_binary",
- Args: []string{"arg1", "arg2"},
- }
- logLines := runAndEcho(ctx, cmd)
- if !strings.HasSuffix(logLines[0], "/some_binary arg1 arg2") {
- t.Errorf("incorrect path or args: %s", logLines[0])
- }
- })
-}
-
-func TestProcessEnvRunCmdAddEnv(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := &command{
- Path: "some_binary",
- EnvUpdates: []string{"ABC=xyz"},
- }
- logLines := runAndEcho(ctx, cmd)
- for _, ll := range logLines {
- if ll == "ABC=xyz" {
- return
- }
- }
- t.Errorf("could not find new env variable: %s", logLines)
- })
-}
-
-func TestProcessEnvRunCmdUpdateEnv(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- if os.Getenv("PATH") == "" {
- t.Fatal("no PATH environment variable found!")
- }
- cmd := &command{
- Path: "some_binary",
- EnvUpdates: []string{"PATH=xyz"},
- }
- logLines := runAndEcho(ctx, cmd)
- for _, ll := range logLines {
- if ll == "PATH=xyz" {
- return
- }
- }
- t.Errorf("could not find updated env variable: %s", logLines)
- })
-}
-
-func TestProcessEnvRunCmdDeleteEnv(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- if os.Getenv("PATH") == "" {
- t.Fatal("no PATH environment variable found!")
- }
- cmd := &command{
- Path: "some_binary",
- EnvUpdates: []string{"PATH="},
- }
- logLines := runAndEcho(ctx, cmd)
- for _, ll := range logLines {
- if strings.HasPrefix(ll, "PATH=") {
- t.Errorf("path env was not removed: %s", ll)
- }
- }
- })
-}
-
-func execEcho(ctx *testContext, cmd *command) {
- env := &processEnv{}
- err := env.exec(createEcho(ctx, cmd))
- if err != nil {
- os.Stderr.WriteString(err.Error())
- }
- os.Exit(1)
-}
-
-func forkAndReadEcho(ctx *testContext) []string {
- testBin, err := os.Executable()
- if err != nil {
- ctx.t.Fatalf("unable to read the executable: %s", err)
- }
-
- subCmd := exec.Command(testBin, "-internalexececho", "-test.run="+ctx.t.Name())
- output, err := subCmd.CombinedOutput()
- if err != nil {
- ctx.t.Fatalf("error calling test binary again for exec: %s", err)
- }
- return strings.Split(string(output), "\n")
-}
-
-func runAndEcho(ctx *testContext, cmd *command) []string {
- env, err := newProcessEnv()
- if err != nil {
- ctx.t.Fatalf("creation of process env failed: %s", err)
- }
- buffer := bytes.Buffer{}
- if err := env.run(createEcho(ctx, cmd), nil, &buffer, &buffer); err != nil {
- ctx.t.Fatalf("run failed: %s", err)
- }
- return strings.Split(buffer.String(), "\n")
-}
-
-func createEcho(ctx *testContext, cmd *command) *command {
- content := `
-/bin/echo "$0" "$@"
-/usr/bin/env
-`
- fullPath := filepath.Join(ctx.tempDir, cmd.Path)
- ctx.writeFile(fullPath, content)
- // Note: Using a self executable wrapper does not work due to a race condition
- // on unix systems. See https://github.com/golang/go/issues/22315
- return &command{
- Path: "bash",
- Args: append([]string{fullPath}, cmd.Args...),
- EnvUpdates: cmd.EnvUpdates,
- }
-}
diff --git a/compiler_wrapper/errors.go b/compiler_wrapper/errors.go
deleted file mode 100644
index 18e0facf..00000000
--- a/compiler_wrapper/errors.go
+++ /dev/null
@@ -1,86 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "fmt"
- "os/exec"
- "runtime"
- "strings"
- "syscall"
-)
-
-type userError struct {
- err string
-}
-
-var _ error = userError{}
-
-func (err userError) Error() string {
- return err.err
-}
-
-func newUserErrorf(format string, v ...interface{}) userError {
- return userError{err: fmt.Sprintf(format, v...)}
-}
-
-func newErrorwithSourceLocf(format string, v ...interface{}) error {
- return newErrorwithSourceLocfInternal(2, format, v...)
-}
-
-func wrapErrorwithSourceLocf(err error, format string, v ...interface{}) error {
- return newErrorwithSourceLocfInternal(2, "%s: %s", fmt.Sprintf(format, v...), err.Error())
-}
-
-func wrapSubprocessErrorWithSourceLoc(cmd *command, subprocessErr error) (exitCode int, err error) {
- if subprocessErr == nil {
- return 0, nil
- }
- if userErr, ok := getCCacheError(cmd, subprocessErr); ok {
- return 0, userErr
- }
- if exitCode, ok := getExitCode(subprocessErr); ok {
- return exitCode, nil
- }
- err = newErrorwithSourceLocfInternal(2, "failed to execute %#v: %s", cmd, subprocessErr)
- return 0, err
-}
-
-// Based on the implementation of log.Output
-func newErrorwithSourceLocfInternal(skip int, format string, v ...interface{}) error {
- _, file, line, ok := runtime.Caller(skip)
- if !ok {
- file = "???"
- line = 0
- }
- if lastSlash := strings.LastIndex(file, "/"); lastSlash >= 0 {
- file = file[lastSlash+1:]
- }
-
- return fmt.Errorf("%s:%d: %s", file, line, fmt.Sprintf(format, v...))
-}
-
-func getExitCode(err error) (exitCode int, ok bool) {
- if err == nil {
- return 0, true
- }
- if exiterr, ok := err.(*exec.ExitError); ok {
- if status, ok := exiterr.Sys().(syscall.WaitStatus); ok {
- return status.ExitStatus(), true
- }
- }
- return 0, false
-}
-
-func getCCacheError(compilerCmd *command, compilerCmdErr error) (ccacheErr userError, ok bool) {
- if en, ok := compilerCmdErr.(syscall.Errno); ok && en == syscall.ENOENT &&
- strings.Contains(compilerCmd.Path, "ccache") {
- ccacheErr =
- newUserErrorf("ccache not found under %s. Please install it",
- compilerCmd.Path)
- return ccacheErr, true
- }
- return ccacheErr, false
-}
diff --git a/compiler_wrapper/errors_test.go b/compiler_wrapper/errors_test.go
deleted file mode 100644
index 957fae3a..00000000
--- a/compiler_wrapper/errors_test.go
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "errors"
- "fmt"
- "syscall"
- "testing"
-)
-
-func TestNewErrorwithSourceLocfMessage(t *testing.T) {
- err := newErrorwithSourceLocf("a%sc", "b")
- if err.Error() != "errors_test.go:15: abc" {
- t.Errorf("Error message incorrect. Got: %s", err.Error())
- }
-}
-
-func TestWrapErrorwithSourceLocfMessage(t *testing.T) {
- cause := errors.New("someCause")
- err := wrapErrorwithSourceLocf(cause, "a%sc", "b")
- if err.Error() != "errors_test.go:23: abc: someCause" {
- t.Errorf("Error message incorrect. Got: %s", err.Error())
- }
-}
-
-func TestNewUserErrorf(t *testing.T) {
- err := newUserErrorf("a%sc", "b")
- if err.Error() != "abc" {
- t.Errorf("Error message incorrect. Got: %s", err.Error())
- }
-}
-
-func TestSubprocessOk(t *testing.T) {
- exitCode, err := wrapSubprocessErrorWithSourceLoc(nil, nil)
- if exitCode != 0 {
- t.Errorf("unexpected exit code. Got: %d", exitCode)
- }
- if err != nil {
- t.Errorf("unexpected error. Got: %s", err)
- }
-}
-
-func TestSubprocessExitCodeError(t *testing.T) {
- exitCode, err := wrapSubprocessErrorWithSourceLoc(nil, newExitCodeError(23))
- if exitCode != 23 {
- t.Errorf("unexpected exit code. Got: %d", exitCode)
- }
- if err != nil {
- t.Errorf("unexpected error. Got: %s", err)
- }
-}
-
-func TestSubprocessCCacheError(t *testing.T) {
- _, err := wrapSubprocessErrorWithSourceLoc(&command{Path: "/usr/bin/ccache"}, syscall.ENOENT)
- if _, ok := err.(userError); !ok {
- t.Errorf("unexpected error type. Got: %T", err)
- }
- if err.Error() != "ccache not found under /usr/bin/ccache. Please install it" {
- t.Errorf("unexpected error message. Got: %s", err)
- }
-}
-
-func TestSubprocessGeneralError(t *testing.T) {
- cmd := &command{Path: "somepath"}
- _, err := wrapSubprocessErrorWithSourceLoc(cmd, errors.New("someerror"))
- if err.Error() != fmt.Sprintf("errors_test.go:68: failed to execute %#v: someerror", cmd) {
- t.Errorf("Error message incorrect. Got: %s", err.Error())
- }
-}
diff --git a/compiler_wrapper/gcc_flags.go b/compiler_wrapper/gcc_flags.go
deleted file mode 100644
index 8775b23b..00000000
--- a/compiler_wrapper/gcc_flags.go
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-func processGccFlags(builder *commandBuilder) {
- if !builder.cfg.isHostWrapper {
- // Flags not supported by GCC.
- unsupported := map[string]bool{"-Xcompiler": true}
-
- // Conversion for flags supported by clang but not gcc.
- clangToGcc := map[string]string{
- "-march=goldmont": "-march=silvermont",
- "-march=goldmont-plus": "-march=silvermont",
- "-march=skylake": "-march=corei7",
- }
-
- builder.transformArgs(func(arg builderArg) string {
- if unsupported[arg.value] {
- return ""
- }
- if mapped, ok := clangToGcc[arg.value]; ok {
- return mapped
- }
- return arg.value
- })
- }
-
- builder.path += ".real"
-}
diff --git a/compiler_wrapper/gcc_flags_test.go b/compiler_wrapper/gcc_flags_test.go
deleted file mode 100644
index 436f9521..00000000
--- a/compiler_wrapper/gcc_flags_test.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestCallRealGcc(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestCallRealGccForOtherNames(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand("./x86_64-cros-linux-gnu-somename", mainCc)))
- if err := verifyPath(cmd, "\\./x86_64-cros-linux-gnu-somename.real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestConvertClangToGccFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- var tests = []struct {
- in string
- out string
- }{
- {"-march=goldmont", "-march=silvermont"},
- {"-march=goldmont-plus", "-march=silvermont"},
- {"-march=skylake", "-march=corei7"},
- }
-
- for _, tt := range tests {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, tt.in, mainCc)))
- if err := verifyArgCount(cmd, 0, tt.in); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, tt.out, mainCc); err != nil {
- t.Error(err)
- }
- }
- })
-}
-
-func TestFilterUnsupportedGccFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-Xcompiler", mainCc)))
- if err := verifyArgCount(cmd, 0, "-Xcompiler"); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/compiler_wrapper/goldenutil_test.go b/compiler_wrapper/goldenutil_test.go
deleted file mode 100644
index 4eff8738..00000000
--- a/compiler_wrapper/goldenutil_test.go
+++ /dev/null
@@ -1,202 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "encoding/json"
- "flag"
- "io"
- "io/ioutil"
- "log"
- "os"
- "path/filepath"
- "regexp"
- "strings"
-)
-
-var updateGoldenFiles = flag.Bool("updategolden", false, "update golden files")
-var filterGoldenTests = flag.String("rungolden", "", "regex filter for golden tests to run")
-
-type goldenFile struct {
- Name string `json:"name"`
- ignoreOldWrapper bool
- Records []goldenRecord `json:"records"`
-}
-
-type goldenRecord struct {
- Wd string `json:"wd"`
- Env []string `json:"env,omitempty"`
- // runGoldenRecords will read cmd and fill
- // stdout, stderr, exitCode.
- WrapperCmd commandResult `json:"wrapper"`
- // runGoldenRecords will read stdout, stderr, err
- // and fill cmd
- Cmds []commandResult `json:"cmds"`
-}
-
-func newGoldenCmd(path string, args ...string) commandResult {
- return commandResult{
- Cmd: &command{
- Path: path,
- Args: args,
- },
- }
-}
-
-var okResult = commandResult{}
-var okResults = []commandResult{okResult}
-var errorResult = commandResult{
- ExitCode: 1,
- Stderr: "someerror",
- Stdout: "somemessage",
-}
-var errorResults = []commandResult{errorResult}
-
-func runGoldenRecords(ctx *testContext, goldenDir string, files []goldenFile) {
- if filterPattern := *filterGoldenTests; filterPattern != "" {
- files = filterGoldenRecords(filterPattern, files)
- }
- if len(files) == 0 {
- ctx.t.Errorf("No goldenrecords given.")
- return
- }
- files = fillGoldenResults(ctx, files)
- if *updateGoldenFiles {
- log.Printf("updating golden files under %s", goldenDir)
- if err := os.MkdirAll(goldenDir, 0777); err != nil {
- ctx.t.Fatal(err)
- }
- for _, file := range files {
- fileHandle, err := os.Create(filepath.Join(goldenDir, file.Name))
- if err != nil {
- ctx.t.Fatal(err)
- }
- defer fileHandle.Close()
-
- writeGoldenRecords(ctx, fileHandle, file.Records)
- }
- } else {
- for _, file := range files {
- compareBuffer := &bytes.Buffer{}
- writeGoldenRecords(ctx, compareBuffer, file.Records)
- filePath := filepath.Join(goldenDir, file.Name)
- goldenFileData, err := ioutil.ReadFile(filePath)
- if err != nil {
- ctx.t.Error(err)
- continue
- }
- if !bytes.Equal(compareBuffer.Bytes(), goldenFileData) {
- ctx.t.Errorf("Commands don't match the golden file under %s. Please regenerate via -updategolden to check the differences.",
- filePath)
- }
- }
- }
-}
-
-func filterGoldenRecords(pattern string, files []goldenFile) []goldenFile {
- matcher := regexp.MustCompile(pattern)
- newFiles := []goldenFile{}
- for _, file := range files {
- newRecords := []goldenRecord{}
- for _, record := range file.Records {
- cmd := record.WrapperCmd.Cmd
- str := strings.Join(append(append(record.Env, cmd.Path), cmd.Args...), " ")
- if matcher.MatchString(str) {
- newRecords = append(newRecords, record)
- }
- }
- file.Records = newRecords
- newFiles = append(newFiles, file)
- }
- return newFiles
-}
-
-func fillGoldenResults(ctx *testContext, files []goldenFile) []goldenFile {
- oldWrapperPath := ctx.cfg.oldWrapperPath
- newFiles := []goldenFile{}
- for _, file := range files {
- ctx.cfg.oldWrapperPath = oldWrapperPath
- if file.ignoreOldWrapper {
- ctx.cfg.oldWrapperPath = ""
- }
-
- newRecords := []goldenRecord{}
- for _, record := range file.Records {
- newCmds := []commandResult{}
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- if len(newCmds) >= len(record.Cmds) {
- ctx.t.Errorf("Not enough commands specified for wrapperCmd %#v and env %#v. Expected: %#v",
- record.WrapperCmd.Cmd, record.Env, record.Cmds)
- return nil
- }
- cmdResult := record.Cmds[len(newCmds)]
- cmdResult.Cmd = cmd
- newCmds = append(newCmds, cmdResult)
- io.WriteString(stdout, cmdResult.Stdout)
- io.WriteString(stderr, cmdResult.Stderr)
- if cmdResult.ExitCode != 0 {
- return newExitCodeError(cmdResult.ExitCode)
- }
- return nil
- }
- ctx.stdoutBuffer.Reset()
- ctx.stderrBuffer.Reset()
- ctx.env = record.Env
- if record.Wd == "" {
- record.Wd = ctx.tempDir
- }
- ctx.wd = record.Wd
- // Create an empty wrapper at the given path.
- // Needed as we are resolving symlinks which stats the wrapper file.
- ctx.writeFile(record.WrapperCmd.Cmd.Path, "")
- record.WrapperCmd.ExitCode = callCompiler(ctx, ctx.cfg, record.WrapperCmd.Cmd)
- if hasInternalError(ctx.stderrString()) {
- ctx.t.Errorf("found an internal error for wrapperCmd %#v and env #%v. Got: %s",
- record.WrapperCmd.Cmd, record.Env, ctx.stderrString())
- }
- if len(newCmds) < len(record.Cmds) {
- ctx.t.Errorf("Too many commands specified for wrapperCmd %#v and env %#v. Expected: %#v",
- record.WrapperCmd.Cmd, record.Env, record.Cmds)
- }
- record.Cmds = newCmds
- record.WrapperCmd.Stdout = ctx.stdoutString()
- record.WrapperCmd.Stderr = ctx.stderrString()
- newRecords = append(newRecords, record)
- }
- file.Records = newRecords
- newFiles = append(newFiles, file)
- }
- return newFiles
-}
-
-func writeGoldenRecords(ctx *testContext, writer io.Writer, records []goldenRecord) {
- // Replace the temp dir with a stable path so that the goldens stay stable.
- stableTempDir := filepath.Join(filepath.Dir(ctx.tempDir), "stable")
- writer = &replacingWriter{
- Writer: writer,
- old: [][]byte{[]byte(ctx.tempDir)},
- new: [][]byte{[]byte(stableTempDir)},
- }
- enc := json.NewEncoder(writer)
- enc.SetIndent("", " ")
- if err := enc.Encode(records); err != nil {
- ctx.t.Fatal(err)
- }
-}
-
-type replacingWriter struct {
- io.Writer
- old [][]byte
- new [][]byte
-}
-
-func (writer *replacingWriter) Write(p []byte) (n int, err error) {
- // TODO: Use bytes.ReplaceAll once cros sdk uses golang >= 1.12
- for i, old := range writer.old {
- p = bytes.Replace(p, old, writer.new[i], -1)
- }
- return writer.Writer.Write(p)
-}
diff --git a/compiler_wrapper/gomacc_flag.go b/compiler_wrapper/gomacc_flag.go
deleted file mode 100644
index ac298b12..00000000
--- a/compiler_wrapper/gomacc_flag.go
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "os"
-)
-
-func processGomaCccFlags(builder *commandBuilder) (gomaUsed bool, err error) {
- gomaPath := ""
- nextArgIsGomaPath := false
- builder.transformArgs(func(arg builderArg) string {
- if arg.fromUser {
- if arg.value == "--gomacc-path" {
- nextArgIsGomaPath = true
- return ""
- }
- if nextArgIsGomaPath {
- gomaPath = arg.value
- nextArgIsGomaPath = false
- return ""
- }
- }
- return arg.value
- })
- if nextArgIsGomaPath {
- return false, newUserErrorf("--gomacc-path given without value")
- }
- if gomaPath == "" {
- gomaPath, _ = builder.env.getenv("GOMACC_PATH")
- }
- if gomaPath != "" {
- if _, err := os.Lstat(gomaPath); err == nil {
- builder.wrapPath(gomaPath)
- return true, nil
- }
- }
- return false, nil
-}
diff --git a/compiler_wrapper/gomacc_flag_test.go b/compiler_wrapper/gomacc_flag_test.go
deleted file mode 100644
index d7b2b0b7..00000000
--- a/compiler_wrapper/gomacc_flag_test.go
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "path"
- "testing"
-)
-
-func TestCallGomaccIfEnvIsGivenAndValid(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- // Create a file so the gomacc path is valid.
- ctx.writeFile(gomaPath, "")
- ctx.env = []string{"GOMACC_PATH=" + gomaPath}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, gomaPath); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, gccX86_64+".real", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitGomaccIfEnvIsGivenButInvalid(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- // Note: This path does not point to a valid file.
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- ctx.env = []string{"GOMACC_PATH=" + gomaPath}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestCallGomaccIfArgIsGivenAndValid(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- // Create a file so the gomacc path is valid.
- ctx.writeFile(gomaPath, "")
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc, "--gomacc-path", gomaPath)))
- if err := verifyPath(cmd, gomaPath); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, "--gomacc-path"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, gomaPath); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, gccX86_64+".real", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitGomaccIfArgIsGivenButInvalid(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- // Note: This path does not point to a valid file.
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc, "--gomacc-path", gomaPath)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestErrorOnGomaccArgWithoutValue(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc, "--gomacc-path")))
- if err := verifyNonInternalError(stderr, "--gomacc-path given without value"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitGomaccByDefault(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/compiler_wrapper/libc_exec.go b/compiler_wrapper/libc_exec.go
deleted file mode 100644
index 268221dc..00000000
--- a/compiler_wrapper/libc_exec.go
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-// #include <errno.h>
-// #include <stdlib.h>
-// #include <string.h>
-// #include <unistd.h>
-//
-// int libc_exec(const char *pathname, char *const argv[], char *const envp[]) {
-// if (execve(pathname, argv, envp) != 0) {
-// return errno;
-// }
-// return 0;
-//}
-import "C"
-import (
- "os/exec"
- "unsafe"
-)
-
-// Replacement for syscall.Execve that uses the libc.
-// This allows tools that rely on intercepting syscalls via
-// LD_PRELOAD to work properly (e.g. gentoo sandbox).
-// Note that this changes the go binary to be a dynamically linked one.
-// See crbug.com/1000863 for details.
-func libcExec(env env, cmd *command) error {
- freeList := []unsafe.Pointer{}
- defer func() {
- for _, ptr := range freeList {
- C.free(ptr)
- }
- }()
-
- goStrToC := func(goStr string) *C.char {
- cstr := C.CString(goStr)
- freeList = append(freeList, unsafe.Pointer(cstr))
- return cstr
- }
-
- goSliceToC := func(goSlice []string) **C.char {
- // len(goSlice)+1 as the c array needs to be null terminated.
- cArray := C.malloc(C.size_t(len(goSlice)+1) * C.size_t(unsafe.Sizeof(uintptr(0))))
- freeList = append(freeList, cArray)
-
- // Convert the C array to a Go Array so we can index it.
- // Note: Storing pointers to the c heap in go pointer types is ok
- // (see https://golang.org/cmd/cgo/).
- cArrayForIndex := (*[1<<30 - 1]*C.char)(cArray)
- for i, str := range goSlice {
- cArrayForIndex[i] = goStrToC(str)
- }
- cArrayForIndex[len(goSlice)] = nil
-
- return (**C.char)(cArray)
- }
-
- execCmd := exec.Command(cmd.Path, cmd.Args...)
- mergedEnv := mergeEnvValues(env.environ(), cmd.EnvUpdates)
- if errno := C.libc_exec(goStrToC(execCmd.Path), goSliceToC(execCmd.Args), goSliceToC(mergedEnv)); errno != 0 {
- return newErrorwithSourceLocf("exec error: %d", errno)
- }
-
- return nil
-}
diff --git a/compiler_wrapper/main.go b/compiler_wrapper/main.go
deleted file mode 100644
index 3fcf3a66..00000000
--- a/compiler_wrapper/main.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This binary requires the following linker variables:
-// - main.UseCCache: Whether to use ccache.
-// - main.ConfigName: Name of the configuration to use.
-// See config.go for the supported values.
-// - main.OldWrapperPath: Path to the old wrapper to compare commands
-// against. Comparison is deactivated if empty.
-//
-// The script ./build simplifies the call to `go build`.
-// E.g. ./build --use_ccache=true --config=cros.hardened will build a
-// binary that uses the ccache for ChromeOS with hardened flags.
-//
-// Test arguments:
-// - crosroot: Specifies the ChromeOS toolchain root directory (aka chroot).
-// If this is given, golden tests will compare the produced commands against the
-// old compiler wrapper.
-// - updategolden: To update the golden results for the wrapper. Without it,
-// the tests will verify that the wrapper output matches the goldens.
-// - rungolden: To filter the golden tests by a regex for the wrapper env, path and args.
-//
-// Examples:
-// - run all tests and compare golden output against old compiler wrapper:
-// go test third_party/toolchain-utils/compiler_wrapper/ -v --crosroot=$HOME/chromiumos/chroot/
-// - run all tests in isolation:
-// go test third_party/toolchain-utils/compiler_wrapper/ -v
-package main
-
-import (
- "log"
- "os"
-)
-
-func main() {
- env, err := newProcessEnv()
- if err != nil {
- log.Fatal(err)
- }
- cfg, err := getRealConfig()
- if err != nil {
- log.Fatal(err)
- }
- // Note: callCompiler will exec the command. Only in case of
- // an error or when we are comparing against the old wrapper
- // will this os.Exit be called.
- os.Exit(callCompiler(env, cfg, newProcessCommand()))
-}
diff --git a/compiler_wrapper/oldwrapper.go b/compiler_wrapper/oldwrapper.go
deleted file mode 100644
index 72178da0..00000000
--- a/compiler_wrapper/oldwrapper.go
+++ /dev/null
@@ -1,392 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "fmt"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "reflect"
- "regexp"
- "sort"
- "strings"
- "text/template"
-)
-
-const compareToOldWrapperFilePattern = "old_wrapper_compare"
-
-// Note: We can't rely on os.TempDir as that just returns the value of $TMPDIR,
-// which some packages set incorrectly.
-// E.g. dev-python/pygobject sets it to "`pwd`/pygobject-2.18.0".
-const tempDir = "/tmp"
-
-func compareToOldWrapper(env env, cfg *config, inputCmd *command, stdinBuffer []byte, newCmdResults []*commandResult, newExitCode int) error {
- pythonStringEscaper := strings.NewReplacer("\n", "\\n", "'", "\\'", "\\", "\\\\")
-
- oldWrapperCfg, err := newOldWrapperConfig(env, cfg, inputCmd)
- if err != nil {
- return err
- }
- oldWrapperCfg.MockCmds = cfg.mockOldWrapperCmds
- newCmds := []*command{}
- for _, cmdResult := range newCmdResults {
- oldWrapperCfg.CmdResults = append(oldWrapperCfg.CmdResults, oldWrapperCmdResult{
- Stdout: pythonStringEscaper.Replace(cmdResult.Stdout),
- Stderr: pythonStringEscaper.Replace(cmdResult.Stderr),
- Exitcode: cmdResult.ExitCode,
- })
- newCmds = append(newCmds, cmdResult.Cmd)
- }
-
- stderrBuffer := bytes.Buffer{}
- oldExitCode := 0
- if strings.HasPrefix(oldWrapperCfg.OldWrapperContent, "#!/bin/sh") {
- oldExitCode, err = callOldShellWrapper(env, oldWrapperCfg, inputCmd, stdinBuffer, compareToOldWrapperFilePattern, &bytes.Buffer{}, &stderrBuffer)
- } else {
- oldExitCode, err = callOldPythonWrapper(env, oldWrapperCfg, inputCmd, stdinBuffer, compareToOldWrapperFilePattern, &bytes.Buffer{}, &stderrBuffer)
- }
- if err != nil {
- return err
- }
- differences := []string{}
- if oldExitCode != newExitCode {
- differences = append(differences, fmt.Sprintf("exit codes differ: old %d, new %d", oldExitCode, newExitCode))
- }
- oldCmds, stderr := parseOldWrapperCommands(stderrBuffer.String())
- if cmdDifferences := diffCommands(oldCmds, newCmds); cmdDifferences != "" {
- differences = append(differences, cmdDifferences)
- }
- if len(differences) > 0 {
- printCmd(env, inputCmd)
- return newErrorwithSourceLocf("wrappers differ:\n%s\nOld stderr:%s",
- strings.Join(differences, "\n"),
- stderr,
- )
- }
- return nil
-}
-
-func parseOldWrapperCommands(stderr string) (cmds []*command, remainingStderr string) {
- allStderrLines := strings.Split(stderr, "\n")
- remainingStderrLines := []string{}
- commandPrefix := "command "
- argPrefix := "arg "
- envUpdatePrefix := "envupdate "
- currentCmd := (*command)(nil)
- for _, line := range allStderrLines {
- if strings.HasPrefix(line, commandPrefix) {
- currentCmd = &command{
- Path: line[len(commandPrefix):],
- }
- cmds = append(cmds, currentCmd)
- } else if strings.HasPrefix(line, argPrefix) {
- currentCmd.Args = append(currentCmd.Args, line[len(argPrefix):])
- } else if strings.HasPrefix(line, envUpdatePrefix) {
- currentCmd.EnvUpdates = append(currentCmd.EnvUpdates, line[len(envUpdatePrefix):])
- } else {
- remainingStderrLines = append(remainingStderrLines, line)
- }
- }
- remainingStderr = strings.TrimSpace(strings.Join(remainingStderrLines, "\n"))
- return cmds, remainingStderr
-}
-
-func diffCommands(oldCmds []*command, newCmds []*command) string {
- maxLen := len(newCmds)
- if maxLen < len(oldCmds) {
- maxLen = len(oldCmds)
- }
- hasDifferences := false
- var cmdDifferences []string
- for i := 0; i < maxLen; i++ {
- var differences []string
- if i >= len(newCmds) {
- differences = append(differences, "missing command")
- } else if i >= len(oldCmds) {
- differences = append(differences, "extra command")
- } else {
- newCmd := newCmds[i]
- oldCmd := oldCmds[i]
-
- if newCmd.Path != oldCmd.Path {
- differences = append(differences, "path")
- }
-
- if !reflect.DeepEqual(newCmd.Args, oldCmd.Args) {
- differences = append(differences, "args")
- }
-
- // Sort the environment as we don't care in which order
- // it was modified.
- copyAndSort := func(data []string) []string {
- result := make([]string, len(data))
- copy(result, data)
- sort.Strings(result)
- return result
- }
-
- newEnvUpdates := copyAndSort(newCmd.EnvUpdates)
- oldEnvUpdates := copyAndSort(oldCmd.EnvUpdates)
-
- if !reflect.DeepEqual(newEnvUpdates, oldEnvUpdates) {
- differences = append(differences, "env updates")
- }
- }
- if len(differences) > 0 {
- hasDifferences = true
- } else {
- differences = []string{"none"}
- }
- cmdDifferences = append(cmdDifferences,
- fmt.Sprintf("Index %d: %s", i, strings.Join(differences, ",")))
- }
- if hasDifferences {
- return fmt.Sprintf("commands differ:\n%s\nOld:%#v\nNew:%#v",
- strings.Join(cmdDifferences, "\n"),
- dumpCommands(oldCmds),
- dumpCommands(newCmds))
- }
- return ""
-}
-
-func dumpCommands(cmds []*command) string {
- lines := []string{}
- for _, cmd := range cmds {
- lines = append(lines, fmt.Sprintf("%#v", cmd))
- }
- return strings.Join(lines, "\n")
-}
-
-// Note: field names are upper case so they can be used in
-// a template via reflection.
-type oldWrapperConfig struct {
- WrapperPath string
- CmdPath string
- OldWrapperContent string
- MockCmds bool
- CmdResults []oldWrapperCmdResult
-}
-
-type oldWrapperCmdResult struct {
- Stdout string
- Stderr string
- Exitcode int
-}
-
-func newOldWrapperConfig(env env, cfg *config, inputCmd *command) (*oldWrapperConfig, error) {
- absWrapperPath, err := getAbsWrapperPath(env, inputCmd)
- if err != nil {
- return nil, err
- }
- absOldWrapperPath := cfg.oldWrapperPath
- if !filepath.IsAbs(absOldWrapperPath) {
- absOldWrapperPath = filepath.Join(filepath.Dir(absWrapperPath), cfg.oldWrapperPath)
- }
- oldWrapperContentBytes, err := ioutil.ReadFile(absOldWrapperPath)
- if err != nil {
- return nil, wrapErrorwithSourceLocf(err, "failed to read old wrapper")
- }
- oldWrapperContent := string(oldWrapperContentBytes)
- return &oldWrapperConfig{
- WrapperPath: absWrapperPath,
- CmdPath: inputCmd.Path,
- OldWrapperContent: oldWrapperContent,
- }, nil
-}
-
-func callOldShellWrapper(env env, cfg *oldWrapperConfig, inputCmd *command, stdinBuffer []byte, filepattern string, stdout io.Writer, stderr io.Writer) (exitCode int, err error) {
- oldWrapperContent := cfg.OldWrapperContent
- oldWrapperContent = regexp.MustCompile(`(?m)^exec\b`).ReplaceAllString(oldWrapperContent, "exec_mock")
- oldWrapperContent = regexp.MustCompile(`\$EXEC`).ReplaceAllString(oldWrapperContent, "exec_mock")
- // TODO: Use strings.ReplaceAll once cros sdk uses golang >= 1.12
- oldWrapperContent = strings.Replace(oldWrapperContent, "$0", cfg.CmdPath, -1)
- cfg.OldWrapperContent = oldWrapperContent
- mockFile, err := ioutil.TempFile(tempDir, filepattern)
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "failed to create tempfile")
- }
- defer os.Remove(mockFile.Name())
-
- const mockTemplate = `
-EXEC=exec
-
-function exec_mock {
- echo command "$1" 1>&2
- for arg in "${@:2}"; do
- echo arg "$arg" 1>&2
- done
- {{if .MockCmds}}
- echo '{{(index .CmdResults 0).Stdout}}'
- echo '{{(index .CmdResults 0).Stderr}}' 1>&2
- exit {{(index .CmdResults 0).Exitcode}}
- {{else}}
- $EXEC "$@"
- {{end}}
-}
-
-{{.OldWrapperContent}}
-`
- tmpl, err := template.New("mock").Parse(mockTemplate)
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "failed to parse old wrapper template")
- }
- if err := tmpl.Execute(mockFile, cfg); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "failed execute old wrapper template")
- }
- if err := mockFile.Close(); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "failed to close temp file")
- }
-
- // Note: Using a self executable wrapper does not work due to a race condition
- // on unix systems. See https://github.com/golang/go/issues/22315
- oldWrapperCmd := &command{
- Path: "/bin/sh",
- Args: append([]string{mockFile.Name()}, inputCmd.Args...),
- EnvUpdates: inputCmd.EnvUpdates,
- }
- return wrapSubprocessErrorWithSourceLoc(oldWrapperCmd, env.run(oldWrapperCmd, bytes.NewReader(stdinBuffer), stdout, stderr))
-}
-
-func callOldPythonWrapper(env env, cfg *oldWrapperConfig, inputCmd *command, stdinBuffer []byte, filepattern string, stdout io.Writer, stderr io.Writer) (exitCode int, err error) {
- oldWrapperContent := cfg.OldWrapperContent
- // TODO: Use strings.ReplaceAll once cros sdk uses golang >= 1.12
- oldWrapperContent = strings.Replace(oldWrapperContent, "from __future__ import print_function", "", -1)
- // Replace sets with lists to make our comparisons deterministic
- oldWrapperContent = strings.Replace(oldWrapperContent, "set(", "ListSet(", -1)
- oldWrapperContent = strings.Replace(oldWrapperContent, "if __name__ == '__main__':", "def runMain():", -1)
- oldWrapperContent = strings.Replace(oldWrapperContent, "__file__", "'"+cfg.WrapperPath+"'", -1)
- cfg.OldWrapperContent = oldWrapperContent
-
- mockFile, err := ioutil.TempFile(tempDir, filepattern)
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "failed to create tempfile")
- }
- defer os.Remove(mockFile.Name())
-
- const mockTemplate = `
-# -*- coding: utf-8 -*-
-from __future__ import print_function
-
-class ListSet:
- def __init__(self, values):
- self.values = list(values)
- def __contains__(self, key):
- return self.values.__contains__(key)
- def __iter__(self):
- return self.values.__iter__()
- def __nonzero__(self):
- return len(self.values) > 0
- def add(self, value):
- if value not in self.values:
- self.values.append(value)
- def discard(self, value):
- if value in self.values:
- self.values.remove(value)
- def intersection(self, values):
- return ListSet([value for value in self.values if value in values])
-
-{{.OldWrapperContent}}
-import subprocess
-
-init_env = os.environ.copy()
-
-{{if .MockCmds}}
-mockResults = [{{range .CmdResults}} {
- 'stdout': '{{.Stdout}}',
- 'stderr': '{{.Stderr}}',
- 'exitcode': {{.Exitcode}},
-},{{end}}]
-{{end}}
-
-def serialize_cmd(args):
- current_env = os.environ
- envupdates = [k + "=" + current_env.get(k, '') for k in set(list(current_env.keys()) + list(init_env.keys())) if current_env.get(k, '') != init_env.get(k, '')]
- print('command %s' % args[0], file=sys.stderr)
- for arg in args[1:]:
- print('arg %s' % arg, file=sys.stderr)
- for update in envupdates:
- print('envupdate %s' % update, file=sys.stderr)
-
-def check_output_mock(args):
- serialize_cmd(args)
- {{if .MockCmds}}
- result = mockResults.pop(0)
- print(result['stderr'], file=sys.stderr)
- if result['exitcode']:
- raise subprocess.CalledProcessError(result['exitcode'])
- return result['stdout']
- {{else}}
- return old_check_output(args)
- {{end}}
-
-old_check_output = subprocess.check_output
-subprocess.check_output = check_output_mock
-
-def popen_mock(args, stdout=None, stderr=None):
- serialize_cmd(args)
- {{if .MockCmds}}
- result = mockResults.pop(0)
- if stdout is None:
- print(result['stdout'], file=sys.stdout)
- if stderr is None:
- print(result['stderr'], file=sys.stderr)
-
- class MockResult:
- def __init__(self, returncode):
- self.returncode = returncode
- def wait(self):
- return self.returncode
- def communicate(self):
- return (result['stdout'], result['stderr'])
-
- return MockResult(result['exitcode'])
- {{else}}
- return old_popen(args)
- {{end}}
-
-old_popen = subprocess.Popen
-subprocess.Popen = popen_mock
-
-def execv_mock(binary, args):
- serialize_cmd([binary] + args[1:])
- {{if .MockCmds}}
- result = mockResults.pop(0)
- print(result['stdout'], file=sys.stdout)
- print(result['stderr'], file=sys.stderr)
- sys.exit(result['exitcode'])
- {{else}}
- old_execv(binary, args)
- {{end}}
-
-old_execv = os.execv
-os.execv = execv_mock
-
-sys.argv[0] = '{{.CmdPath}}'
-
-runMain()
-`
- tmpl, err := template.New("mock").Parse(mockTemplate)
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "failed to parse old wrapper template")
- }
- if err := tmpl.Execute(mockFile, cfg); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "failed execute old wrapper template")
- }
- if err := mockFile.Close(); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "failed to close temp file")
- }
-
- // Note: Using a self executable wrapper does not work due to a race condition
- // on unix systems. See https://github.com/golang/go/issues/22315
- oldWrapperCmd := &command{
- Path: "/usr/bin/python2",
- Args: append([]string{"-S", mockFile.Name()}, inputCmd.Args...),
- EnvUpdates: inputCmd.EnvUpdates,
- }
- return wrapSubprocessErrorWithSourceLoc(oldWrapperCmd, env.run(oldWrapperCmd, bytes.NewReader(stdinBuffer), stdout, stderr))
-}
diff --git a/compiler_wrapper/oldwrapper_test.go b/compiler_wrapper/oldwrapper_test.go
deleted file mode 100644
index 4f5b3dc0..00000000
--- a/compiler_wrapper/oldwrapper_test.go
+++ /dev/null
@@ -1,431 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "fmt"
- "io"
- "path/filepath"
- "strings"
- "testing"
- "text/template"
-)
-
-func TestCompareToOldPythonWrapperCompilerCommand(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- pathSuffix := ""
- extraArgs := []string{}
- exitCode := 0
- newWrapperExitCode := 0
-
- reset := func() {
- ctx.stderrBuffer.Reset()
- pathSuffix = ""
- extraArgs = []string{}
- exitCode = 0
- newWrapperExitCode = 0
- }
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- writePythonMockWrapper(ctx, &mockWrapperConfig{
- Cmds: []*mockWrapperCmd{
- {
- Path: cmd.Path + pathSuffix,
- Args: append(cmd.Args, extraArgs...),
- ExitCode: exitCode,
- },
- },
- })
- if newWrapperExitCode != 0 {
- return newExitCodeError(newWrapperExitCode)
- }
- return nil
- }
-
- // Note: This will cause only the compiler command.
- inputCmd := ctx.newCommand(gccX86_64)
-
- reset()
- pathSuffix = "xyz"
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "Index 0: path") {
- t.Errorf("expected path difference error. Got: %s", stderr)
- }
-
- reset()
- extraArgs = []string{"xyz"}
- stderr = ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "Index 0: args") {
- t.Errorf("expected args difference error. Got: %s", stderr)
- }
-
- reset()
- exitCode = 1
- stderr = ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "exit codes differ: old 1, new 0") {
- t.Errorf("expected exit code difference error. Got: %s", stderr)
- }
-
- reset()
- newWrapperExitCode = 1
- stderr = ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "exit codes differ: old 0, new 1") {
- t.Errorf("expected exit code difference error. Got: %s", stderr)
- }
-
- reset()
- ctx.must(callCompiler(ctx, ctx.cfg, inputCmd))
- })
-}
-
-func TestCompareToOldPythonWrapperNestedCommand(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- pathSuffix := ""
- extraArgs := []string{}
- wrapperCfg := &mockWrapperConfig{}
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- isNestedCmd := len(wrapperCfg.Cmds) == 0
- var wrapperCmd *mockWrapperCmd
- if isNestedCmd {
- wrapperCmd = &mockWrapperCmd{
- Path: cmd.Path + pathSuffix,
- Args: append(cmd.Args, extraArgs...),
- }
- } else {
- wrapperCmd = &mockWrapperCmd{
- Path: cmd.Path,
- Args: cmd.Args,
- }
- }
- wrapperCfg.Cmds = append(wrapperCfg.Cmds, wrapperCmd)
- if !isNestedCmd {
- writePythonMockWrapper(ctx, wrapperCfg)
- }
- return nil
- }
-
- // Note: This will cause a nested command call.
- inputCmd := ctx.newCommand(clangX86_64, "-Xclang-path=somedir", mainCc)
-
- ctx.stderrBuffer.Reset()
- wrapperCfg = &mockWrapperConfig{}
- pathSuffix = "xyz"
- extraArgs = nil
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "Index 0: path") {
- t.Errorf("expected path difference error. Got: %s", stderr)
- }
- if !strings.Contains(stderr, "Index 1: none") {
- t.Errorf("expected no difference for cmd index 1. Got: %s", stderr)
- }
-
- ctx.stderrBuffer.Reset()
- wrapperCfg = &mockWrapperConfig{}
- pathSuffix = ""
- extraArgs = []string{"xyz"}
- stderr = ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "Index 0: args") {
- t.Errorf("expected args difference error. Got: %s", stderr)
- }
- if !strings.Contains(stderr, "Index 1: none") {
- t.Errorf("expected no difference for cmd index 1. Got: %s", stderr)
- }
-
- wrapperCfg = &mockWrapperConfig{}
- pathSuffix = ""
- extraArgs = nil
- ctx.must(callCompiler(ctx, ctx.cfg, inputCmd))
- })
-}
-
-func TestCompareToOldShellWrapperCompilerCommand(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- pathSuffix := ""
- extraArgs := []string{}
- exitCode := 0
- newWrapperExitCode := 0
-
- reset := func() {
- ctx.stderrBuffer.Reset()
- pathSuffix = ""
- extraArgs = []string{}
- exitCode = 0
- newWrapperExitCode = 0
- }
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- writeShellMockWrapper(ctx, &mockWrapperConfig{
- Cmds: []*mockWrapperCmd{
- {
- Path: cmd.Path + pathSuffix,
- Args: append(cmd.Args, extraArgs...),
- ExitCode: exitCode,
- },
- },
- })
- if newWrapperExitCode != 0 {
- return newExitCodeError(newWrapperExitCode)
- }
- return nil
- }
-
- // Note: This will cause only the compiler command.
- inputCmd := ctx.newCommand(gccX86_64)
-
- reset()
- pathSuffix = "xyz"
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "Index 0: path") {
- t.Errorf("expected path difference error. Got: %s", stderr)
- }
-
- reset()
- extraArgs = []string{"xyz"}
- stderr = ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "Index 0: args") {
- t.Errorf("expected args difference error. Got: %s", stderr)
- }
-
- reset()
- exitCode = 1
- stderr = ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "exit codes differ: old 1, new 0") {
- t.Errorf("expected exit code difference error. Got: %s", stderr)
- }
-
- reset()
- newWrapperExitCode = 1
- stderr = ctx.mustFail(callCompiler(ctx, ctx.cfg, inputCmd))
- if !strings.Contains(stderr, "exit codes differ: old 0, new 1") {
- t.Errorf("expected exit code difference error. Got: %s", stderr)
- }
-
- reset()
- ctx.must(callCompiler(ctx, ctx.cfg, inputCmd))
-
- reset()
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, " spaces ")))
- })
-}
-
-func TestCompareToOldWrapperEscapeStdoutAndStderr(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- io.WriteString(stdout, "a\n'b'\\")
- io.WriteString(stderr, "c\n'd'\\")
- writePythonMockWrapper(ctx, &mockWrapperConfig{
- Cmds: []*mockWrapperCmd{
- {
- Path: cmd.Path,
- Args: cmd.Args,
- },
- },
- })
- return nil
- }
-
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if ctx.stdoutString() != "a\n'b'\\" {
- t.Errorf("unexpected stdout. Got: %s", ctx.stdoutString())
- }
- if ctx.stderrString() != "c\n'd'\\" {
- t.Errorf("unexpected stderr. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestCompareToOldWrapperSupportUtf8InStdoutAndStderr(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- io.WriteString(stdout, "©")
- io.WriteString(stderr, "®")
- writePythonMockWrapper(ctx, &mockWrapperConfig{
- Cmds: []*mockWrapperCmd{
- {
- Path: cmd.Path,
- Args: cmd.Args,
- },
- },
- })
- return nil
- }
-
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, mainCc)))
- if ctx.stdoutString() != "©" {
- t.Errorf("unexpected stdout. Got: %s", ctx.stdoutString())
- }
- if ctx.stderrString() != "®" {
- t.Errorf("unexpected stderr. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestCompareToOldPythonWrapperArgumentsWithSpaces(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- writePythonMockWrapper(ctx, &mockWrapperConfig{
- Cmds: []*mockWrapperCmd{
- {
- Path: cmd.Path,
- Args: cmd.Args,
- },
- },
- })
- return nil
- }
-
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "a b", "c", mainCc)))
- })
-}
-
-func TestCompareToOldShellWrapperArgumentsWithSpaces(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- writeShellMockWrapper(ctx, &mockWrapperConfig{
- Cmds: []*mockWrapperCmd{
- {
- Path: cmd.Path,
- Args: cmd.Args,
- },
- },
- })
- return nil
- }
-
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "a b", "c", mainCc)))
- })
-}
-
-func TestForwardStdinWhenUsingOldWrapper(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- io.WriteString(&ctx.stdinBuffer, "someinput")
- ctx.cfg.mockOldWrapperCmds = false
- ctx.cfg.oldWrapperPath = filepath.Join(ctx.tempDir, "fakewrapper")
-
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- writeShellMockWrapper(ctx, &mockWrapperConfig{
- Cmds: []*mockWrapperCmd{
- {
- Path: cmd.Path,
- Args: cmd.Args,
- },
- },
- })
- stdinStr := ctx.readAllString(stdin)
- if stdinStr != "someinput" {
- return fmt.Errorf("unexpected stdin. Got: %s", stdinStr)
- }
- return nil
- }
-
- ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-", mainCc)))
- })
-}
-
-func writePythonMockWrapper(ctx *testContext, cfg *mockWrapperConfig) {
- const mockTemplate = `
-from __future__ import print_function
-import os
-import sys
-import subprocess
-
-mockCmds = [{{range .Cmds}} {
- 'path': '{{.Path}}',
- 'args': [{{range .Args}}'{{.}}',{{end}}],
- 'exitcode': {{.ExitCode}},
-},{{end}}]
-
-def execv_impl(binary, args):
- cmd = mockCmds.pop(0)
- sys.exit(cmd['exitcode'])
-os.execv = execv_impl
-
-def check_output_impl(args):
- cmd = mockCmds.pop(0)
- if cmd['exitcode']:
- raise subprocess.CalledProcessError(cmd['exitcode'])
- return ""
-subprocess.check_output = check_output_impl
-
-def main():
- while len(mockCmds) > 1:
- subprocess.check_output([mockCmds[0]['path']] + mockCmds[0]['args'])
-
- os.execv(mockCmds[0]['path'], [mockCmds[0]['path']] + mockCmds[0]['args'])
-
-if __name__ == '__main__':
- sys.exit(main())
-`
- tmpl, err := template.New("mock").Parse(mockTemplate)
- if err != nil {
- ctx.t.Fatalf("failed to parse old wrapper template. Error: %s", err)
- }
- buf := bytes.Buffer{}
- if err := tmpl.Execute(&buf, cfg); err != nil {
- ctx.t.Fatalf("failed to execute the template. Error: %s", err)
- }
- ctx.writeFile(ctx.cfg.oldWrapperPath, buf.String())
-}
-
-func writeShellMockWrapper(ctx *testContext, cfg *mockWrapperConfig) {
- const mockTemplate = `#!/bin/sh
-EXEC=fake_exec
-
-function fake_exec {
- exit {{(index .Cmds 0).ExitCode}}
-}
-
-$EXEC "{{(index .Cmds 0).Path}}"{{range (index .Cmds 0).Args}} "{{.}}"{{end}}
-`
- tmpl, err := template.New("mock").Parse(mockTemplate)
- if err != nil {
- ctx.t.Fatalf("failed to parse old wrapper template. Error: %s", err)
- }
- buf := bytes.Buffer{}
- if err := tmpl.Execute(&buf, cfg); err != nil {
- ctx.t.Fatalf("failed to execute the template. Error: %s", err)
- }
- ctx.writeFile(ctx.cfg.oldWrapperPath, buf.String())
-}
-
-// Note: Fields have to be uppercase so that they can be used with template.
-type mockWrapperConfig struct {
- Cmds []*mockWrapperCmd
-}
-
-// Note: Fields have to be uppercase so that they can be used with template.
-type mockWrapperCmd struct {
- Path string
- Args []string
- ExitCode int
-}
diff --git a/compiler_wrapper/pie_flags.go b/compiler_wrapper/pie_flags.go
deleted file mode 100644
index 9675f6ee..00000000
--- a/compiler_wrapper/pie_flags.go
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-func processPieFlags(builder *commandBuilder) {
- fpieMap := map[string]bool{"-D__KERNEL__": true, "-fPIC": true, "-fPIE": true, "-fno-PIC": true, "-fno-PIE": true,
- "-fno-pic": true, "-fno-pie": true, "-fpic": true, "-fpie": true, "-nopie": true,
- "-nostartfiles": true, "-nostdlib": true, "-pie": true, "-static": true}
-
- pieMap := map[string]bool{"-D__KERNEL__": true, "-A": true, "-fno-PIC": true, "-fno-PIE": true, "-fno-pic": true, "-fno-pie": true,
- "-nopie": true, "-nostartfiles": true, "-nostdlib": true, "-pie": true, "-r": true, "--shared": true,
- "-shared": true, "-static": true}
-
- pie := false
- fpie := false
- if builder.target.abi != "eabi" {
- for _, arg := range builder.args {
- if arg.fromUser {
- if fpieMap[arg.value] {
- fpie = true
- }
- if pieMap[arg.value] {
- pie = true
- }
- }
- }
- }
- builder.transformArgs(func(arg builderArg) string {
- // Remove -nopie as it is a non-standard flag.
- if arg.value == "-nopie" {
- return ""
- }
- if fpie && !arg.fromUser && arg.value == "-fPIE" {
- return ""
- }
- if pie && !arg.fromUser && arg.value == "-pie" {
- return ""
- }
- return arg.value
- })
-}
diff --git a/compiler_wrapper/pie_flags_test.go b/compiler_wrapper/pie_flags_test.go
deleted file mode 100644
index 77a0fc8f..00000000
--- a/compiler_wrapper/pie_flags_test.go
+++ /dev/null
@@ -1,84 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestAddPieFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initPieConfig(ctx.cfg)
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgOrder(cmd, "-pie", mainCc); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, "-fPIE", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitPieFlagsWhenNoPieArgGiven(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initPieConfig(ctx.cfg)
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-nopie", mainCc)))
- if err := verifyArgCount(cmd, 0, "-nopie"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, "-pie"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, "-fPIE"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fno-pie", mainCc)))
- if err := verifyArgCount(cmd, 0, "-pie"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, "-fPIE"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitPieFlagsWhenKernelDefined(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initPieConfig(ctx.cfg)
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-D__KERNEL__", mainCc)))
- if err := verifyArgCount(cmd, 0, "-pie"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, "-fPIE"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddPieFlagsForEabiEvenIfNoPieGiven(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initPieConfig(ctx.cfg)
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64Eabi, "-nopie", mainCc)))
- if err := verifyArgCount(cmd, 0, "-nopie"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 1, "-pie"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 1, "-fPIE"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func initPieConfig(cfg *config) {
- cfg.commonFlags = []string{"-fPIE", "-pie"}
-}
diff --git a/compiler_wrapper/print_cmdline_flag.go b/compiler_wrapper/print_cmdline_flag.go
deleted file mode 100644
index e2092edd..00000000
--- a/compiler_wrapper/print_cmdline_flag.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-func processPrintCmdlineFlag(builder *commandBuilder) {
- printCmd := false
- builder.transformArgs(func(arg builderArg) string {
- if arg.value == "-print-cmdline" {
- printCmd = true
- return ""
- }
- return arg.value
- })
- if printCmd {
- builder.env = &printingEnv{builder.env}
- }
-}
diff --git a/compiler_wrapper/print_cmdline_flag_test.go b/compiler_wrapper/print_cmdline_flag_test.go
deleted file mode 100644
index 8f6fc226..00000000
--- a/compiler_wrapper/print_cmdline_flag_test.go
+++ /dev/null
@@ -1,85 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "fmt"
- "regexp"
- "testing"
-)
-
-func TestRemovePrintCmdlineArg(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, "-print-cmdline", mainCc)))
- if err := verifyArgCount(cmd, 0, "-print-cmdline"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestPrintCompilerCommand(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, "-print-cmdline", mainCc)))
- if matched, _ := regexp.MatchString(`cd '.*' && '.*/x86_64-cros-linux-gnu-gcc.real'.*'main.cc'`, ctx.stderrString()); !matched {
- t.Errorf("sub command not printed to stderr. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestPrintNestedCommand(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- // Note: -clang-syntax calls clang to check the syntax
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, "-print-cmdline", "-clang-syntax", mainCc)))
- if matched, _ := regexp.MatchString(`cd '.*' && '.*usr/bin/clang'.*'main.cc'.*'-fsyntax-only'`, ctx.stderrString()); !matched {
- t.Errorf("sub command not printed to stderr. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestPrintCmdWd(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- printCmd(ctx, &command{
- Path: "/somepath",
- })
- if ctx.stderrString() != fmt.Sprintf("cd '%s' && '/somepath'\n", ctx.tempDir) {
- t.Errorf("unexpected result. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestPrintCmdAbsolutePath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- printCmd(ctx, &command{
- Path: "somepath",
- })
- if ctx.stderrString() != fmt.Sprintf("cd '%s' && '%s/somepath'\n", ctx.tempDir, ctx.tempDir) {
- t.Errorf("unexpected result. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestPrintCmdEnvUpdates(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- printCmd(ctx, &command{
- Path: "/somepath",
- EnvUpdates: []string{"a=b"},
- })
- if ctx.stderrString() != fmt.Sprintf("cd '%s' && env 'a=b' '/somepath'\n", ctx.tempDir) {
- t.Errorf("unexpected result. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestPrintCmdArgs(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- printCmd(ctx, &command{
- Path: "/somepath",
- Args: []string{"-a"},
- })
- if ctx.stderrString() != fmt.Sprintf("cd '%s' && '/somepath' '-a'\n", ctx.tempDir) {
- t.Errorf("unexpected result. Got: %s", ctx.stderrString())
- }
- })
-}
diff --git a/compiler_wrapper/print_config_flag.go b/compiler_wrapper/print_config_flag.go
deleted file mode 100644
index 9ab9f6bc..00000000
--- a/compiler_wrapper/print_config_flag.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import "fmt"
-
-func processPrintConfigFlag(builder *commandBuilder) {
- printConfig := false
- builder.transformArgs(func(arg builderArg) string {
- if arg.value == "-print-config" {
- printConfig = true
- return ""
- }
- return arg.value
- })
- if printConfig {
- fmt.Fprintf(builder.env.stderr(), "wrapper config: %#v\n", *builder.cfg)
- }
-}
diff --git a/compiler_wrapper/print_config_flag_test.go b/compiler_wrapper/print_config_flag_test.go
deleted file mode 100644
index 1b1528e8..00000000
--- a/compiler_wrapper/print_config_flag_test.go
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "strings"
- "testing"
-)
-
-func TestRemovePrintConfigArg(t *testing.T) {
- withPrintConfigTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, "-print-config", mainCc)))
- if err := verifyArgCount(cmd, 0, "-print-config"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestPrintConfig(t *testing.T) {
- withPrintConfigTestContext(t, func(ctx *testContext) {
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, "-print-config", mainCc)))
- if !strings.Contains(ctx.stderrString(), "wrapper config: main.config{") {
- t.Errorf("config not printed to stderr. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func withPrintConfigTestContext(t *testing.T, work func(ctx *testContext)) {
- withTestContext(t, func(ctx *testContext) {
- // Not comparing to old wrapper as the old wrapper doesn't have a print-config command.
- ctx.cfg.oldWrapperPath = ""
- work(ctx)
- })
-}
diff --git a/compiler_wrapper/rusage_flag.go b/compiler_wrapper/rusage_flag.go
deleted file mode 100644
index 2a3768c1..00000000
--- a/compiler_wrapper/rusage_flag.go
+++ /dev/null
@@ -1,70 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "fmt"
- "os"
- "path/filepath"
- "strings"
- "syscall"
- "time"
-)
-
-func getRusageLogFilename(env env) string {
- value, _ := env.getenv("GETRUSAGE")
- return value
-}
-
-func logRusage(env env, logFileName string, compilerCmd *command) (exitCode int, err error) {
- rusageBefore := syscall.Rusage{}
- if err := syscall.Getrusage(syscall.RUSAGE_CHILDREN, &rusageBefore); err != nil {
- return 0, err
- }
- compilerCmdWithoutRusage := &command{
- Path: compilerCmd.Path,
- Args: compilerCmd.Args,
- EnvUpdates: append(compilerCmd.EnvUpdates, "GETRUSAGE="),
- }
- startTime := time.Now()
- exitCode, err = wrapSubprocessErrorWithSourceLoc(compilerCmdWithoutRusage,
- env.run(compilerCmdWithoutRusage, env.stdin(), env.stdout(), env.stderr()))
- if err != nil {
- return 0, err
- }
- elapsedRealTime := time.Since(startTime)
- rusageAfter := syscall.Rusage{}
- if err := syscall.Getrusage(syscall.RUSAGE_CHILDREN, &rusageAfter); err != nil {
- return 0, err
- }
- elapsedSysTime := time.Duration(rusageAfter.Stime.Nano()-rusageBefore.Stime.Nano()) * time.Nanosecond
- elapsedUserTime := time.Duration(rusageAfter.Utime.Nano()-rusageBefore.Utime.Nano()) * time.Nanosecond
- // Note: We assume that the compiler takes more heap than any other
- // subcommands that we might have executed before.
- maxMemUsed := rusageAfter.Maxrss
- absCompilerPath := getAbsCmdPath(env, compilerCmd)
-
- if err := os.MkdirAll(filepath.Dir(logFileName), 0777); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error creating rusage log directory %s", logFileName)
- }
- // Note: using file mode 0666 so that a root-created log is writable by others.
- logFile, err := os.OpenFile(logFileName, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0666)
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error creating rusage logfile %s", logFileName)
- }
- timeUnit := float64(time.Second)
- if _, err := fmt.Fprintf(logFile, "%.5f : %.5f : %.5f : %d : %s : %s\n",
- float64(elapsedRealTime)/timeUnit, float64(elapsedUserTime)/timeUnit, float64(elapsedSysTime)/timeUnit,
- maxMemUsed, absCompilerPath,
- strings.Join(append([]string{filepath.Base(absCompilerPath)}, compilerCmd.Args...), " ")); err != nil {
- _ = logFile.Close()
- return 0, wrapErrorwithSourceLocf(err, "error writing rusage logfile %s", logFileName)
- }
- if err := logFile.Close(); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error closing rusage logfile %s", logFileName)
- }
-
- return exitCode, nil
-}
diff --git a/compiler_wrapper/rusage_flag_test.go b/compiler_wrapper/rusage_flag_test.go
deleted file mode 100644
index 7acba0c8..00000000
--- a/compiler_wrapper/rusage_flag_test.go
+++ /dev/null
@@ -1,174 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "errors"
- "fmt"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "regexp"
- "strconv"
- "strings"
- "testing"
-)
-
-func TestForwardStdOutAndStdErrAndExitCodeFromLogRusage(t *testing.T) {
- withLogRusageTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- fmt.Fprint(stdout, "somemessage")
- fmt.Fprint(stderr, "someerror")
- return newExitCodeError(23)
- }
- exitCode := callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc))
- if exitCode != 23 {
- t.Errorf("unexpected exit code. Got: %d", exitCode)
- }
- if ctx.stdoutString() != "somemessage" {
- t.Errorf("stdout was not forwarded. Got: %s", ctx.stdoutString())
- }
- if ctx.stderrString() != "someerror" {
- t.Errorf("stderr was not forwarded. Got: %s", ctx.stderrString())
- }
- })
-}
-
-func TestForwardStdinFromLogRusage(t *testing.T) {
- withLogRusageTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- // Note: This is called for the clang syntax call as well as for
- // the gcc call, and we assert that stdin is cloned and forwarded
- // to both.
- stdinStr := ctx.readAllString(stdin)
- if stdinStr != "someinput" {
- return fmt.Errorf("unexpected stdin. Got: %s", stdinStr)
- }
- return nil
- }
- io.WriteString(&ctx.stdinBuffer, "someinput")
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, "-", mainCc)))
- })
-}
-
-func TestReportGeneralErrorsFromLogRusage(t *testing.T) {
- withLogRusageTestContext(t, func(ctx *testContext) {
- ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- return errors.New("someerror")
- }
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyInternalError(stderr); err != nil {
- t.Fatal(err)
- }
- if !strings.Contains(stderr, "someerror") {
- t.Errorf("unexpected error. Got: %s", stderr)
- }
- })
-}
-
-func TestCreateDirAndFileForLogRusage(t *testing.T) {
- withLogRusageTestContext(t, func(ctx *testContext) {
- logFileName := filepath.Join(ctx.tempDir, "somedir", "rusage.log")
- ctx.env = []string{"GETRUSAGE=" + logFileName}
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
-
- if _, err := os.Stat(logFileName); err != nil {
- t.Errorf("rusage log file does not exist: %s", err)
- }
- })
-}
-
-func TestLogRusageFileContent(t *testing.T) {
- withLogRusageTestContext(t, func(ctx *testContext) {
- logFileName := filepath.Join(ctx.tempDir, "rusage.log")
- ctx.env = []string{"GETRUSAGE=" + logFileName}
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
-
- data, err := ioutil.ReadFile(logFileName)
- if err != nil {
- t.Errorf("could not read the rusage log file. Error: %s", err)
- }
- // Example output:
- // 0.100318 : 0.103412 : 0.096386 : 6508 : /tmp/compiler_wrapper036306868/x86_64-cros-linux-gnu-gcc.real : x86_64-cros-linux-gnu-gcc.real --sysroot=/tmp/compiler_wrapper036306868/usr/x86_64-cros-linux-gnu main.cc -mno-movbe
- logParts := strings.Split(string(data), " : ")
- if len(logParts) != 6 {
- t.Errorf("unexpected number of rusage log parts. Got: %s", logParts)
- }
-
- // First 3 numbers are times in seconds.
- for i := 0; i < 3; i++ {
- if _, err := strconv.ParseFloat(logParts[i], 64); err != nil {
- t.Errorf("unexpected value for index %d. Got: %s", i, logParts[i])
- }
- }
- // Then an int for the memory usage
- if _, err := strconv.ParseInt(logParts[3], 10, 64); err != nil {
- t.Errorf("unexpected mem usage. Got: %s", logParts[3])
- }
- // Then the full path of the compiler
- if logParts[4] != filepath.Join(ctx.tempDir, gccX86_64+".real") {
- t.Errorf("unexpected compiler path. Got: %s", logParts[4])
- }
- // Then the arguments, prefixes with the compiler basename
- if matched, _ := regexp.MatchString("x86_64-cros-linux-gnu-gcc.real --sysroot=.* main.cc", logParts[5]); !matched {
- t.Errorf("unexpected compiler args. Got: %s", logParts[5])
- }
- })
-}
-
-func TestLogRusageAppendsToFile(t *testing.T) {
- withLogRusageTestContext(t, func(ctx *testContext) {
- logFileName := filepath.Join(ctx.tempDir, "rusage.log")
- ctx.env = []string{"GETRUSAGE=" + logFileName}
-
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- data, err := ioutil.ReadFile(logFileName)
- if err != nil {
- t.Errorf("could not read the rusage log file. Error: %s", err)
- }
- firstCallLines := strings.Split(string(data), "\n")
- if len(firstCallLines) != 2 {
- t.Errorf("unexpected number of lines. Got: %s", firstCallLines)
- }
- if firstCallLines[0] == "" {
- t.Error("first line was empty")
- }
- if firstCallLines[1] != "" {
- t.Errorf("second line was not empty. Got: %s", firstCallLines[1])
- }
-
- ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- data, err = ioutil.ReadFile(logFileName)
- if err != nil {
- t.Errorf("could not read the rusage log file. Error: %s", err)
- }
- secondCallLines := strings.Split(string(data), "\n")
- if len(secondCallLines) != 3 {
- t.Errorf("unexpected number of lines. Got: %s", secondCallLines)
- }
- if secondCallLines[0] != firstCallLines[0] {
- t.Errorf("first line was changed. Got: %s", secondCallLines[0])
- }
- if secondCallLines[1] == "" {
- t.Error("second line was empty")
- }
- if secondCallLines[2] != "" {
- t.Errorf("third line was not empty. Got: %s", secondCallLines[2])
- }
- })
-}
-
-func withLogRusageTestContext(t *testing.T, work func(ctx *testContext)) {
- withTestContext(t, func(ctx *testContext) {
- // Disable comparing to the old wrapper as that uses fork + wait3
- // to calculate resource usage, and the new wrapper uses the getrusage
- // syscall.
- ctx.cfg.oldWrapperPath = ""
- ctx.env = []string{"GETRUSAGE=" + filepath.Join(ctx.tempDir, "rusage.log")}
- work(ctx)
- })
-}
diff --git a/compiler_wrapper/sanitizer_flags.go b/compiler_wrapper/sanitizer_flags.go
deleted file mode 100644
index fe8d1503..00000000
--- a/compiler_wrapper/sanitizer_flags.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "strings"
-)
-
-func processSanitizerFlags(builder *commandBuilder) {
- hasCoverageFlags := false
- hasSanitizeFlags := false
- hasSanitizeFuzzerFlags := false
- for _, arg := range builder.args {
- // TODO: This should probably be -fsanitize= to not match on
- // e.g. -fsanitize-blacklist
- if arg.fromUser {
- if strings.HasPrefix(arg.value, "-fsanitize") {
- hasSanitizeFlags = true
- if strings.Contains(arg.value, "fuzzer") {
- hasSanitizeFuzzerFlags = true
- }
- } else if arg.value == "-fprofile-instr-generate" {
- hasCoverageFlags = true
- }
- }
- }
- if hasSanitizeFlags {
- // Flags not supported by sanitizers (ASan etc.)
- unsupportedSanitizerFlags := map[string]bool{
- "-D_FORTIFY_SOURCE=1": true,
- "-D_FORTIFY_SOURCE=2": true,
- "-Wl,--no-undefined": true,
- "-Wl,-z,defs": true,
- }
-
- builder.transformArgs(func(arg builderArg) string {
- // TODO: This is a bug in the old wrapper to not filter
- // non user args for gcc. Fix this once we don't compare to the old wrapper anymore.
- if (builder.target.compilerType != gccType || arg.fromUser) &&
- unsupportedSanitizerFlags[arg.value] {
- return ""
- }
- return arg.value
- })
- if builder.target.compilerType == clangType {
- // hasSanitizeFlags && hasCoverageFlags is to work around crbug.com/1013622
- if hasSanitizeFuzzerFlags || (hasSanitizeFlags && hasCoverageFlags) {
- fuzzerFlagsToAdd := []string{
- // TODO: This flag should be removed once fuzzer works with new pass manager
- "-fno-experimental-new-pass-manager",
- }
- builder.addPreUserArgs(fuzzerFlagsToAdd...)
- }
- }
- }
-}
diff --git a/compiler_wrapper/sanitizer_flags_test.go b/compiler_wrapper/sanitizer_flags_test.go
deleted file mode 100644
index 8f50a900..00000000
--- a/compiler_wrapper/sanitizer_flags_test.go
+++ /dev/null
@@ -1,152 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestFilterUnsupportedSanitizerFlagsIfSanitizeGiven(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fsanitize=kernel-address", "-Wl,--no-undefined", mainCc)))
- if err := verifyArgCount(cmd, 0, "-Wl,--no-undefined"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fsanitize=kernel-address", "-Wl,-z,defs", mainCc)))
- if err := verifyArgCount(cmd, 0, "-Wl,-z,defs"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fsanitize=kernel-address", "-D_FORTIFY_SOURCE=1", mainCc)))
- if err := verifyArgCount(cmd, 0, "-D_FORTIFY_SOURCE=1"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fsanitize=kernel-address", "-D_FORTIFY_SOURCE=2", mainCc)))
- if err := verifyArgCount(cmd, 0, "-D_FORTIFY_SOURCE=2"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestFilterUnsupportedDefaultSanitizerFlagsIfSanitizeGivenForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.commonFlags = []string{"-D_FORTIFY_SOURCE=1"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fsanitize=kernel-address", mainCc)))
- if err := verifyArgCount(cmd, 0, "-D_FORTIFY_SOURCE=1"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestKeepUnsupportedDefaultSanitizerFlagsIfSanitizeGivenForGcc(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.commonFlags = []string{"-D_FORTIFY_SOURCE=1"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fsanitize=kernel-address", mainCc)))
- if err := verifyArgCount(cmd, 1, "-D_FORTIFY_SOURCE=1"); err != nil {
- t.Error(err)
- }
- })
-}
-
-// TODO: This is a bug in the old wrapper to not filter
-// non user args for gcc. Fix this once we don't compare to the old wrapper anymore.
-func TestKeepSanitizerFlagsIfNoSanitizeGiven(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-Wl,--no-undefined", mainCc)))
- if err := verifyArgCount(cmd, 1, "-Wl,--no-undefined"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-Wl,-z,defs", mainCc)))
- if err := verifyArgCount(cmd, 1, "-Wl,-z,defs"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-D_FORTIFY_SOURCE=1", mainCc)))
- if err := verifyArgCount(cmd, 1, "-D_FORTIFY_SOURCE=1"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-D_FORTIFY_SOURCE=2", mainCc)))
- if err := verifyArgCount(cmd, 1, "-D_FORTIFY_SOURCE=2"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestKeepSanitizerFlagsIfSanitizeGivenInCommonFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.commonFlags = []string{"-fsanitize=kernel-address"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-Wl,--no-undefined", mainCc)))
- if err := verifyArgCount(cmd, 1, "-Wl,--no-undefined"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddFuzzerFlagsForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fsanitize=fuzzer", mainCc)))
- if err := verifyArgOrder(cmd, "-fno-experimental-new-pass-manager",
- "-fsanitize=fuzzer", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitFuzzerFlagsForGcc(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fsanitize=fuzzer", mainCc)))
- if err := verifyArgCount(cmd, 0, "-fno-experimental-new-pass-manager"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddSanitizerCoverageFlagsForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fsanitize=address", "-fprofile-instr-generate", mainCc)))
- if err := verifyArgOrder(cmd, "-fno-experimental-new-pass-manager",
- "-fsanitize=address", "-fprofile-instr-generate", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitSanitizerCoverageFlagsForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fsanitize=address", mainCc)))
- if err := verifyArgCount(cmd, 0, "-fno-experimental-new-pass-manager"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestKeepSanitizerCoverageFlagsForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fprofile-instr-generate", mainCc)))
- if err := verifyArgCount(cmd, 0, "-fno-experimental-new-pass-manager"); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/compiler_wrapper/stackprotector_flags.go b/compiler_wrapper/stackprotector_flags.go
deleted file mode 100644
index 24605720..00000000
--- a/compiler_wrapper/stackprotector_flags.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-func processStackProtectorFlags(builder *commandBuilder) {
- fstackMap := map[string]bool{"-D__KERNEL__": true, "-fno-stack-protector": true, "-nodefaultlibs": true,
- "-nostdlib": true}
-
- fstack := false
- if builder.target.abi != "eabi" {
- for _, arg := range builder.args {
- if arg.fromUser && fstackMap[arg.value] {
- fstack = true
- break
- }
- }
- }
- if fstack {
- builder.addPreUserArgs("-fno-stack-protector")
- builder.transformArgs(func(arg builderArg) string {
- if !arg.fromUser && arg.value == "-fstack-protector-strong" {
- return ""
- }
- return arg.value
- })
- }
-}
diff --git a/compiler_wrapper/stackprotector_flags_test.go b/compiler_wrapper/stackprotector_flags_test.go
deleted file mode 100644
index a8757579..00000000
--- a/compiler_wrapper/stackprotector_flags_test.go
+++ /dev/null
@@ -1,57 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestAddStrongStackProtectorFlag(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initStackProtectorStrongConfig(ctx.cfg)
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgOrder(cmd, "-fstack-protector-strong", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddNoStackProtectorFlagWhenKernelDefined(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initStackProtectorStrongConfig(ctx.cfg)
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-D__KERNEL__", mainCc)))
- if err := verifyArgOrder(cmd, "-fno-stack-protector", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitNoStackProtectorFlagWhenAlreadyInCommonFlags(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.commonFlags = []string{"-fno-stack-protector"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgCount(cmd, 1, "-fno-stack-protector"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddStrongStackProtectorFlagForEabiEvenIfNoStackProtectorGiven(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initStackProtectorStrongConfig(ctx.cfg)
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64Eabi, "-fno-stack-protector", mainCc)))
- if err := verifyArgCount(cmd, 1, "-fstack-protector-strong"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func initStackProtectorStrongConfig(cfg *config) {
- cfg.commonFlags = []string{"-fstack-protector-strong"}
-}
diff --git a/compiler_wrapper/sysroot_flag.go b/compiler_wrapper/sysroot_flag.go
deleted file mode 100644
index 67625b3b..00000000
--- a/compiler_wrapper/sysroot_flag.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "path/filepath"
- "strings"
-)
-
-func processSysrootFlag(builder *commandBuilder) string {
- fromUser := false
- for _, arg := range builder.args {
- if arg.fromUser && strings.HasPrefix(arg.value, "--sysroot=") {
- fromUser = true
- break
- }
- }
- sysroot, syrootPresent := builder.env.getenv("SYSROOT")
- if syrootPresent {
- builder.updateEnv("SYSROOT=")
- }
- if sysroot == "" {
- // Use the bundled sysroot by default.
- sysroot = filepath.Join(builder.rootPath, "usr", builder.target.target)
- }
- if !fromUser {
- builder.addPreUserArgs("--sysroot=" + sysroot)
- }
- return sysroot
-}
diff --git a/compiler_wrapper/sysroot_flag_test.go b/compiler_wrapper/sysroot_flag_test.go
deleted file mode 100644
index 308d5e96..00000000
--- a/compiler_wrapper/sysroot_flag_test.go
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "path"
- "testing"
-)
-
-func TestOmitSysrootGivenUserDefinedSysroot(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- runWithCompiler := func(compiler string) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(compiler, "--sysroot=/somepath", mainCc)))
- if err := verifyArgOrder(cmd, "--sysroot=/somepath", mainCc); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 1, "--sysroot.*"); err != nil {
- t.Error(err)
- }
- }
-
- runWithCompiler(gccX86_64)
- runWithCompiler(clangX86_64)
- })
-}
-
-func TestSetSysrootFlagFromEnv(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{"SYSROOT=/envpath"}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd, "SYSROOT="); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, "--sysroot=/envpath", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestClearEmptySysrootFlagInEnv(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{"SYSROOT="}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyEnvUpdate(cmd, "SYSROOT="); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, "--sysroot=.*/x86_64-cros-linux-gnu", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestSetSysrootRelativeToWrapperPath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgOrder(cmd,
- "--sysroot="+ctx.tempDir+"/somepath/usr/x86_64-cros-linux-gnu", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestSetSysrootRelativeToSymlinkedWrapperPath(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
- linkedWrapperPath := path.Join(ctx.tempDir, "a/linked/path/x86_64-cros-linux-gnu-gcc")
- ctx.symlink(path.Join(ctx.tempDir, gccX86_64), linkedWrapperPath)
-
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(linkedWrapperPath, mainCc)))
- if err := verifyArgOrder(cmd,
- "--sysroot="+ctx.tempDir+"/somepath/usr/x86_64-cros-linux-gnu", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/compiler_wrapper/testdata/android_golden/bisect.json b/compiler_wrapper/testdata/android_golden/bisect.json
deleted file mode 100644
index a24222ab..00000000
--- a/compiler_wrapper/testdata/android_golden/bisect.json
+++ /dev/null
@@ -1,103 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "/user/home/ANDROID_BISECT",
- "/tmp/stable/clang.real",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/tmp/stable/clang.real",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/tmp/stable/clang.real",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/android_golden/clang_path.json b/compiler_wrapper/testdata/android_golden/clang_path.json
deleted file mode 100644
index 5686b381..00000000
--- a/compiler_wrapper/testdata/android_golden/clang_path.json
+++ /dev/null
@@ -1,230 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang.real",
- "args": [
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang.real",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang.real",
- "args": [
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang++",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang++.real",
- "args": [
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang-tidy",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang-tidy.real",
- "args": [
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang-tidy",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang-tidy.real",
- "args": [
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "a/b/c/d/e/f/g/clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "a/b/c/d/e/f/g/clang.real",
- "args": [
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "symlinked/clang_other",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "symlinked/clang.real",
- "args": [
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/pathenv/clang.real",
- "args": [
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc",
- "--gomacc-path",
- "/tmp/stable/gomacc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "/tmp/stable/clang.real",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/android_golden/compile_with_fallback.json b/compiler_wrapper/testdata/android_golden/compile_with_fallback.json
deleted file mode 100644
index 509583ae..00000000
--- a/compiler_wrapper/testdata/android_golden/compile_with_fallback.json
+++ /dev/null
@@ -1,115 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "ANDROID_LLVM_PREBUILT_COMPILER_PATH=fallback_compiler",
- "ANDROID_LLVM_STDERR_REDIRECT=/tmp/stable/fallback_stderr",
- "ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS=-a -b"
- ],
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang.real",
- "args": [
- "main.cc",
- "-fno-color-diagnostics",
- "-a",
- "-b"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "ANDROID_LLVM_PREBUILT_COMPILER_PATH=fallback_compiler",
- "ANDROID_LLVM_STDERR_REDIRECT=/tmp/stable/fallback_stderr",
- "ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS=-a -b"
- ],
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang.real",
- "args": [
- "main.cc",
- "-fno-color-diagnostics",
- "-a",
- "-b"
- ]
- },
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "fallback_compiler/clang",
- "args": [
- "main.cc"
- ],
- "env_updates": [
- "ANDROID_LLVM_PREBUILT_COMPILER_PATH="
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "ANDROID_LLVM_PREBUILT_COMPILER_PATH=fallback_compiler",
- "ANDROID_LLVM_STDERR_REDIRECT=/tmp/stable/fallback_stderr",
- "ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS=-a -b"
- ],
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "main.cc"
- ]
- },
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang.real",
- "args": [
- "main.cc",
- "-fno-color-diagnostics",
- "-a",
- "-b"
- ]
- },
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "fallback_compiler/clang",
- "args": [
- "main.cc"
- ],
- "env_updates": [
- "ANDROID_LLVM_PREBUILT_COMPILER_PATH="
- ]
- },
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/bisect.json b/compiler_wrapper/testdata/cros_clang_host_golden/bisect.json
deleted file mode 100644
index b9b1509f..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/bisect.json
+++ /dev/null
@@ -1,130 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "/tmp/sysroot_bisect",
- "/tmp/stable/clang",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/tmp/stable/clang",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/tmp/stable/clang",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_ftrapv_maincc_target_specific.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_ftrapv_maincc_target_specific.json
deleted file mode 100644
index 18a54945..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_ftrapv_maincc_target_specific.json
+++ /dev/null
@@ -1,281 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_host_wrapper.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_host_wrapper.json
deleted file mode 100644
index d46586f2..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_host_wrapper.json
+++ /dev/null
@@ -1,32 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_maincc_target_specific.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_maincc_target_specific.json
deleted file mode 100644
index 812686c7..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_maincc_target_specific.json
+++ /dev/null
@@ -1,272 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_path.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_path.json
deleted file mode 100644
index 61537a59..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_path.json
+++ /dev/null
@@ -1,419 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang++",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang++",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "CLANG=somepath/clang"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "somepath/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/somedir/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/somedir/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/somedir/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/a/b/c/d/e/f/g/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/a/b/c/d/e/f/g/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "somedir/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/somedir/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/pathenv/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_sanitizer_args.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_sanitizer_args.json
deleted file mode 100644
index be1a2922..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_sanitizer_args.json
+++ /dev/null
@@ -1,266 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,--no-undefined",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-fsanitize=kernel-address",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,-z,defs",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-fsanitize=kernel-address",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=1",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-fsanitize=kernel-address",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=2",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-fsanitize=kernel-address",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=fuzzer",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-fno-experimental-new-pass-manager",
- "-fsanitize=fuzzer",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-fno-experimental-new-pass-manager",
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=address",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-fsanitize=address",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_specific_args.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_specific_args.json
deleted file mode 100644
index 40a84449..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_specific_args.json
+++ /dev/null
@@ -1,235 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-mno-movbe",
- "-pass-exit-codes",
- "-Wclobbered",
- "-Wno-psabi",
- "-Wlogical-op",
- "-Wmissing-parameter-type",
- "-Wold-style-declaration",
- "-Woverride-init",
- "-Wunsafe-loop-optimizations",
- "-Wstrict-aliasing=abc",
- "-finline-limit=abc",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=cpp",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-#warnings",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=maybe-uninitialized",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-error=uninitialized",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-error=unused-variable",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-unused-variable",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wunused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wunused-variable",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-only=-someflag",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-someflag",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clangtidy.json b/compiler_wrapper/testdata/cros_clang_host_golden/clangtidy.json
deleted file mode 100644
index b6588995..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clangtidy.json
+++ /dev/null
@@ -1,268 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerrorclang-tidy failed"
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_clang_host_golden/force_disable_werror.json
deleted file mode 100644
index c3c316bf..00000000
--- a/compiler_wrapper/testdata/cros_clang_host_golden/force_disable_werror.json
+++ /dev/null
@@ -1,146 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc",
- "-Wno-error"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "main.cc",
- "-Wno-error"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_host_wrapper.json b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_host_wrapper.json
deleted file mode 100644
index 62afbbaa..00000000
--- a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_host_wrapper.json
+++ /dev/null
@@ -1,26 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_maincc_target_specific.json b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_maincc_target_specific.json
deleted file mode 100644
index 6c88c344..00000000
--- a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_maincc_target_specific.json
+++ /dev/null
@@ -1,218 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-eabi-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./armv7m-cros-eabi-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./armv8m-cros-eabi-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_path.json b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_path.json
deleted file mode 100644
index b846d47f..00000000
--- a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_path.json
+++ /dev/null
@@ -1,155 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/pathenv/x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_specific_args.json b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_specific_args.json
deleted file mode 100644
index e1470474..00000000
--- a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_specific_args.json
+++ /dev/null
@@ -1,80 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=goldmont",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-march=goldmont",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=goldmont-plus",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-march=goldmont-plus",
- "main.cc"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=skylake",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-march=skylake",
- "main.cc"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/bisect.json b/compiler_wrapper/testdata/cros_hardened_golden/bisect.json
deleted file mode 100644
index f9a503f3..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/bisect.json
+++ /dev/null
@@ -1,178 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "/tmp/sysroot_bisect",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_ftrapv_maincc_target_specific.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_ftrapv_maincc_target_specific.json
deleted file mode 100644
index 847d0e5f..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_ftrapv_maincc_target_specific.json
+++ /dev/null
@@ -1,434 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-eabi",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-win-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-eabi",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-win-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-eabi",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-win-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_maincc_target_specific.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_maincc_target_specific.json
deleted file mode 100644
index 15241001..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_maincc_target_specific.json
+++ /dev/null
@@ -1,416 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-eabi",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-win-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-eabi",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-win-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-eabi",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-win-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_path.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_path.json
deleted file mode 100644
index 86cead04..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_path.json
+++ /dev/null
@@ -1,605 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang++",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang++",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "CLANG=somepath/clang"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "somepath/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-Ba/b/bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-Ba/b/bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "somedir/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_sanitizer_args.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_sanitizer_args.json
deleted file mode 100644
index 39094948..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_sanitizer_args.json
+++ /dev/null
@@ -1,387 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,--no-undefined",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-fno-omit-frame-pointer",
- "-fsanitize=kernel-address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,-z,defs",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-fno-omit-frame-pointer",
- "-fsanitize=kernel-address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=1",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-fno-omit-frame-pointer",
- "-fsanitize=kernel-address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=2",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-fno-omit-frame-pointer",
- "-fsanitize=kernel-address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=fuzzer",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-fno-omit-frame-pointer",
- "-fno-experimental-new-pass-manager",
- "-fsanitize=fuzzer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-fno-omit-frame-pointer",
- "-fno-experimental-new-pass-manager",
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=address",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-fno-omit-frame-pointer",
- "-fsanitize=address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fprofile-instr-generate",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_specific_args.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_specific_args.json
deleted file mode 100644
index c9ad6e46..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_specific_args.json
+++ /dev/null
@@ -1,347 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-mno-movbe",
- "-pass-exit-codes",
- "-Wclobbered",
- "-Wno-psabi",
- "-Wlogical-op",
- "-Wmissing-parameter-type",
- "-Wold-style-declaration",
- "-Woverride-init",
- "-Wunsafe-loop-optimizations",
- "-Wstrict-aliasing=abc",
- "-finline-limit=abc",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=cpp",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-Wno-#warnings",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=maybe-uninitialized",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-Wno-error=uninitialized",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-Wno-error=unused-variable",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-Wno-unused-variable",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wunused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-Wunused-variable",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-only=-someflag",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-someflag",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_sysroot_wrapper_common.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_sysroot_wrapper_common.json
deleted file mode 100644
index ff68d4bd..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_sysroot_wrapper_common.json
+++ /dev/null
@@ -1,310 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-noccache",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=someNonExistingPath"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-nopie",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-D__KERNEL__",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fno-stack-protector",
- "-D__KERNEL__",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7a-cros-linux-gnueabihf-clang",
- "args": [
- "-D__KERNEL__",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7a-cros-linux-gnueabihf",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "-fno-stack-protector",
- "-D__KERNEL__",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7a-cros-linux-gnueabihf"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7a-cros-linux-gnueabihf",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "--sysroot=xyz",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "--sysroot=xyz",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clangtidy.json b/compiler_wrapper/testdata/cros_hardened_golden/clangtidy.json
deleted file mode 100644
index 03d0c437..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/clangtidy.json
+++ /dev/null
@@ -1,343 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerrorclang-tidy failed"
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_hardened_golden/force_disable_werror.json
deleted file mode 100644
index f80d9b65..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/force_disable_werror.json
+++ /dev/null
@@ -1,226 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-Wno-error"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-Wno-error"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_clang_syntax.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_clang_syntax.json
deleted file mode 100644
index b9778247..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_clang_syntax.json
+++ /dev/null
@@ -1,253 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_maincc_target_specific.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_maincc_target_specific.json
deleted file mode 100644
index a037dd10..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_maincc_target_specific.json
+++ /dev/null
@@ -1,329 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-eabi-gcc.real",
- "--sysroot=/usr/x86_64-cros-eabi",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-win-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-win-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv7m-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/armv7m-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv7m-cros-eabi-gcc.real",
- "--sysroot=/usr/armv7m-cros-eabi",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv7m-cros-win-gnu-gcc.real",
- "--sysroot=/usr/armv7m-cros-win-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv8m-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/armv8m-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv8m-cros-eabi-gcc.real",
- "--sysroot=/usr/armv8m-cros-eabi",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv8m-cros-win-gnu-gcc.real",
- "--sysroot=/usr/armv8m-cros-win-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_path.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_path.json
deleted file mode 100644
index 24ad65ae..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_path.json
+++ /dev/null
@@ -1,233 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/tmp/stable/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./symlinked/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/tmp/stable/pathenv/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_sanitizer_args.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_sanitizer_args.json
deleted file mode 100644
index a63aa25c..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_sanitizer_args.json
+++ /dev/null
@@ -1,320 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,--no-undefined",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fsanitize=kernel-address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,-z,defs",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fsanitize=kernel-address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=1",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fsanitize=kernel-address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=2",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fsanitize=kernel-address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=fuzzer",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fsanitize=fuzzer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=address",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fsanitize=address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fprofile-instr-generate",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_specific_args.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_specific_args.json
deleted file mode 100644
index b7a56ce4..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_specific_args.json
+++ /dev/null
@@ -1,119 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=goldmont",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-march=silvermont",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=goldmont-plus",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-march=silvermont",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=skylake",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-march=corei7",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_sysroot_wrapper_common.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_sysroot_wrapper_common.json
deleted file mode 100644
index c52003b7..00000000
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_sysroot_wrapper_common.json
+++ /dev/null
@@ -1,256 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-noccache",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=someNonExistingPath"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-nopie",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-D__KERNEL__",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-fno-stack-protector",
- "-D__KERNEL__",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7a-cros-linux-gnueabihf-gcc",
- "args": [
- "-D__KERNEL__",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv7a-cros-linux-gnueabihf-gcc.real",
- "--sysroot=/usr/armv7a-cros-linux-gnueabihf",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-D_FORTIFY_SOURCE=2",
- "-mthumb",
- "-fno-stack-protector",
- "-D__KERNEL__",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7a-cros-linux-gnueabihf",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "--sysroot=xyz",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "--sysroot=xyz",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/bisect.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/bisect.json
deleted file mode 100644
index 56b7b3ed..00000000
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/bisect.json
+++ /dev/null
@@ -1,193 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "/tmp/sysroot_bisect",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clang_path.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clang_path.json
deleted file mode 100644
index 09f39ebd..00000000
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clang_path.json
+++ /dev/null
@@ -1,665 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang++",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang++",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "CLANG=somepath/clang"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "somepath/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-Ba/b/bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-Ba/b/bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "somedir/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clangtidy.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clangtidy.json
deleted file mode 100644
index d4b1e970..00000000
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clangtidy.json
+++ /dev/null
@@ -1,383 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerrorclang-tidy failed"
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/force_disable_werror.json
deleted file mode 100644
index e197de4c..00000000
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/force_disable_werror.json
+++ /dev/null
@@ -1,251 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-Wno-error"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-Wno-error"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_clang_syntax.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_clang_syntax.json
deleted file mode 100644
index a4305412..00000000
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_clang_syntax.json
+++ /dev/null
@@ -1,273 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-reorder-init-list",
- "-Wno-final-dtor-non-final-class",
- "-Wno-implicit-int-float-conversion",
- "-Wno-return-stack-address",
- "-Werror=poison-system-directories",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_path.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_path.json
deleted file mode 100644
index 24ad65ae..00000000
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_path.json
+++ /dev/null
@@ -1,233 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/tmp/stable/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./symlinked/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/tmp/stable/pathenv/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/bisect.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/bisect.json
deleted file mode 100644
index f3061817..00000000
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/bisect.json
+++ /dev/null
@@ -1,157 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "/tmp/sysroot_bisect",
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/clang_path.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/clang_path.json
deleted file mode 100644
index 8062e8a5..00000000
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/clang_path.json
+++ /dev/null
@@ -1,528 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang++",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang++",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "CLANG=somepath/clang"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "somepath/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/somedir/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/somedir/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "a/b/usr/bin/clang",
- "args": [
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-Ba/b/bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "a/b/usr/bin/clang",
- "args": [
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-Ba/b/bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "somedir/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/clangtidy.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/clangtidy.json
deleted file mode 100644
index 03d0c437..00000000
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/clangtidy.json
+++ /dev/null
@@ -1,343 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerrorclang-tidy failed"
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/force_disable_werror.json
deleted file mode 100644
index 5510eec8..00000000
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/force_disable_werror.json
+++ /dev/null
@@ -1,191 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-Wno-error"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-Wno-error"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_clang_syntax.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_clang_syntax.json
deleted file mode 100644
index ff7b3c9d..00000000
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_clang_syntax.json
+++ /dev/null
@@ -1,241 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-grecord-gcc-switches",
- "-fno-addrsig",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_path.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_path.json
deleted file mode 100644
index 694b921a..00000000
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_path.json
+++ /dev/null
@@ -1,197 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/pathenv/x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-fno-reorder-blocks-and-partition",
- "-Wno-unused-local-typedefs",
- "-Wno-maybe-uninitialized",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/bisect.json b/compiler_wrapper/testdata/cros_nonhardened_golden/bisect.json
deleted file mode 100644
index 4bc696d3..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/bisect.json
+++ /dev/null
@@ -1,154 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "/tmp/sysroot_bisect",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "BISECT_STAGE=someBisectStage",
- "BISECT_DIR=someBisectDir",
- "HOME=/user/home"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/env",
- "args": [
- "python",
- "-c",
- "\nimport bisect_driver\nimport shlex\nimport sys\n\ndef ExpandArgs(args, target):\n\tfor arg in args:\n\t\tif arg[0] == '@':\n\t\t\twith open(arg[1:], 'rb') as f:\n\t\t\t\tExpandArgs(shlex.split(f.read()), target)\n\t\telse:\n\t\t\ttarget.append(arg)\n\treturn target\n\nstage = sys.argv[1]\ndir = sys.argv[2]\nexecargs = ExpandArgs(sys.argv[3:], [])\n\nsys.exit(bisect_driver.bisect_driver(stage, dir, execargs))\n",
- "someBisectStage",
- "someBisectDir",
- "/usr/bin/ccache",
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_ftrapv_maincc_target_specific.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_ftrapv_maincc_target_specific.json
deleted file mode 100644
index d7b5258d..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_ftrapv_maincc_target_specific.json
+++ /dev/null
@@ -1,366 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-eabi",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-win-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-eabi",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-win-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-eabi",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-clang",
- "args": [
- "-ftrapv",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-win-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "-ftrapv",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_maincc_target_specific.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_maincc_target_specific.json
deleted file mode 100644
index e78a420a..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_maincc_target_specific.json
+++ /dev/null
@@ -1,348 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-eabi",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-win-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-eabi",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7m-cros-win-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7m-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-eabi",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-eabi"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv8m-cros-win-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv8m-cros-win-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_path.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_path.json
deleted file mode 100644
index 2a30100c..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_path.json
+++ /dev/null
@@ -1,509 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang++",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang++",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "CLANG=somepath/clang"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "somepath/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-path=/somedir",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/somedir/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-resource-dir=someResourceDir",
- "--gcc-toolchain=/usr",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-Ba/b/bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-Ba/b/bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "somedir/x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sanitizer_args.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sanitizer_args.json
deleted file mode 100644
index 386e82d6..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sanitizer_args.json
+++ /dev/null
@@ -1,330 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,--no-undefined",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fsanitize=kernel-address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,-z,defs",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fsanitize=kernel-address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=1",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fsanitize=kernel-address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=2",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fsanitize=kernel-address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=fuzzer",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fno-experimental-new-pass-manager",
- "-fsanitize=fuzzer",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fno-experimental-new-pass-manager",
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fsanitize=address",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fsanitize=address",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fprofile-instr-generate",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_specific_args.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_specific_args.json
deleted file mode 100644
index c8d16bfe..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_specific_args.json
+++ /dev/null
@@ -1,291 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-mno-movbe",
- "-pass-exit-codes",
- "-Wclobbered",
- "-Wno-psabi",
- "-Wlogical-op",
- "-Wmissing-parameter-type",
- "-Wold-style-declaration",
- "-Woverride-init",
- "-Wunsafe-loop-optimizations",
- "-Wstrict-aliasing=abc",
- "-finline-limit=abc",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=cpp",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-Wno-#warnings",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=maybe-uninitialized",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-Wno-error=uninitialized",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-Wno-error=unused-variable",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-Wno-unused-variable",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wunused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-Wunused-variable",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Xclang-only=-someflag",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-someflag",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sysroot_wrapper_common.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sysroot_wrapper_common.json
deleted file mode 100644
index d0788a6b..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sysroot_wrapper_common.json
+++ /dev/null
@@ -1,267 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-noccache",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=someNonExistingPath"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-nopie",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-D__KERNEL__",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fno-stack-protector",
- "-D__KERNEL__",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7a-cros-linux-gnueabihf-clang",
- "args": [
- "-D__KERNEL__",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/armv7a-cros-linux-gnueabihf",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-mthumb",
- "-fno-stack-protector",
- "-D__KERNEL__",
- "main.cc",
- "-B../../bin",
- "-target",
- "armv7a-cros-linux-gnueabihf"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7a-cros-linux-gnueabihf",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "--sysroot=xyz",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "--sysroot=xyz",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clangtidy.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clangtidy.json
deleted file mode 100644
index 22dd9ddc..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clangtidy.json
+++ /dev/null
@@ -1,279 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerrorclang-tidy failed"
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "WITH_TIDY=1",
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--print-resource-dir"
- ]
- },
- "stdout": "someResourceDir"
- },
- {
- "cmd": {
- "path": "../../usr/bin/clang-tidy",
- "args": [
- "-checks=*,google*,-bugprone-narrowing-conversions,-cppcoreguidelines-*,-fuchsia-*,-google-build-using-namespace,-google-default-arguments,-google-explicit-constructor,-google-readability*,-google-runtime-int,-google-runtime-references,-hicpp-avoid-c-arrays,-hicpp-braces-around-statements,-hicpp-no-array-decay,-hicpp-signed-bitwise,-hicpp-uppercase-literal-suffix,-hicpp-use-auto,-llvm-namespace-comment,-misc-non-private-member-variables-in-classes,-misc-unused-parameters,-modernize-*,-readability-*",
- "main.cc",
- "--",
- "-resource-dir=someResourceDir",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_nonhardened_golden/force_disable_werror.json
deleted file mode 100644
index a99c1067..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/force_disable_werror.json
+++ /dev/null
@@ -1,186 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-Wno-error"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "FORCE_DISABLE_WERROR=1"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "main.cc"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stderr": "-Werror originalerror",
- "exitcode": 1
- },
- {
- "cmd": {
- "path": "ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-Wno-error"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_clang_syntax.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_clang_syntax.json
deleted file mode 100644
index 81b81a2f..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_clang_syntax.json
+++ /dev/null
@@ -1,209 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-clang-syntax",
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "../../usr/bin/clang",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "main.cc",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu",
- "-fsyntax-only",
- "-stdlib=libstdc++"
- ]
- }
- },
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_maincc_target_specific.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_maincc_target_specific.json
deleted file mode 100644
index 5efa5ed6..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_maincc_target_specific.json
+++ /dev/null
@@ -1,297 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-eabi-gcc.real",
- "--sysroot=/usr/x86_64-cros-eabi",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-win-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-win-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv7m-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/armv7m-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-mthumb",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv7m-cros-eabi-gcc.real",
- "--sysroot=/usr/armv7m-cros-eabi",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7m-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv7m-cros-win-gnu-gcc.real",
- "--sysroot=/usr/armv7m-cros-win-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-mthumb",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv8m-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/armv8m-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-mthumb",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-eabi-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv8m-cros-eabi-gcc.real",
- "--sysroot=/usr/armv8m-cros-eabi",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-eabi",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv8m-cros-win-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv8m-cros-win-gnu-gcc.real",
- "--sysroot=/usr/armv8m-cros-win-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-mthumb",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv8m-cros-win-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_path.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_path.json
deleted file mode 100644
index 92a261ce..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_path.json
+++ /dev/null
@@ -1,209 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- },
- "stdout": "somemessage",
- "stderr": "someerror",
- "exitcode": 1
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "/tmp/stable/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/tmp/stable/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./a/b/c/d/e/f/g/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./symlinked/x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./symlinked/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "PATH=/tmp/stable/pathenv"
- ],
- "wrapper": {
- "cmd": {
- "path": "x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "/tmp/stable/pathenv/x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sanitizer_args.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sanitizer_args.json
deleted file mode 100644
index 7091f608..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sanitizer_args.json
+++ /dev/null
@@ -1,288 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,--no-undefined",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fsanitize=kernel-address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=kernel-address",
- "-Wl,-z,defs",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fsanitize=kernel-address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=1",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fsanitize=kernel-address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=kernel-address",
- "-D_FORTIFY_SOURCE=2",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fsanitize=kernel-address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=fuzzer",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fsanitize=fuzzer",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fsanitize=address",
- "-fprofile-instr-generate",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fsanitize=address",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fsanitize=address",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-fprofile-instr-generate",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fprofile-instr-generate",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_specific_args.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_specific_args.json
deleted file mode 100644
index 6e519429..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_specific_args.json
+++ /dev/null
@@ -1,107 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=goldmont",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-march=silvermont",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=goldmont-plus",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-march=silvermont",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-march=skylake",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-march=corei7",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sysroot_wrapper_common.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sysroot_wrapper_common.json
deleted file mode 100644
index a3cfc34a..00000000
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sysroot_wrapper_common.json
+++ /dev/null
@@ -1,237 +0,0 @@
-[
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-noccache",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc.real",
- "args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=someNonExistingPath"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "env": [
- "GOMACC_PATH=/tmp/stable/gomacc"
- ],
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/gomacc",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-nopie",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "-D__KERNEL__",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-fno-stack-protector",
- "-D__KERNEL__",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./armv7a-cros-linux-gnueabihf-gcc",
- "args": [
- "-D__KERNEL__",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./armv7a-cros-linux-gnueabihf-gcc.real",
- "--sysroot=/usr/armv7a-cros-linux-gnueabihf",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "-mthumb",
- "-fno-stack-protector",
- "-D__KERNEL__",
- "main.cc"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/armv7a-cros-linux-gnueabihf",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-gcc",
- "args": [
- "--sysroot=xyz",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "./x86_64-cros-linux-gnu-gcc.real",
- "-Wno-maybe-uninitialized",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wtrampolines",
- "--sysroot=xyz",
- "main.cc",
- "-mno-movbe"
- ],
- "env_updates": [
- "CCACHE_BASEDIR=/usr/x86_64-cros-linux-gnu",
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002"
- ]
- }
- }
- ]
- }
-]
diff --git a/compiler_wrapper/testutil_test.go b/compiler_wrapper/testutil_test.go
deleted file mode 100644
index c0e49fdd..00000000
--- a/compiler_wrapper/testutil_test.go
+++ /dev/null
@@ -1,336 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "bytes"
- "flag"
- "fmt"
- "io"
- "io/ioutil"
- "os"
- "os/exec"
- "path/filepath"
- "regexp"
- "strings"
- "testing"
-)
-
-var crosRootDirFlag = flag.String("crosroot", "", "root dir of the chrome os toolchain")
-var androidPrebuiltsDirFlag = flag.String("androidprebuilts", "", "prebuilts dir of android")
-
-const mainCc = "main.cc"
-const clangAndroid = "./clang"
-const clangX86_64 = "./x86_64-cros-linux-gnu-clang"
-const gccX86_64 = "./x86_64-cros-linux-gnu-gcc"
-const gccX86_64Eabi = "./x86_64-cros-eabi-gcc"
-const gccArmV7 = "./armv7m-cros-linux-gnu-gcc"
-const gccArmV7Eabi = "./armv7m-cros-eabi-gcc"
-const gccArmV8 = "./armv8m-cros-linux-gnu-gcc"
-const gccArmV8Eabi = "./armv8m-cros-eabi-gcc"
-
-type testContext struct {
- t *testing.T
- wd string
- tempDir string
- env []string
- cfg *config
- inputCmd *command
- lastCmd *command
- cmdCount int
- cmdMock func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error
- stdinBuffer bytes.Buffer
- stdoutBuffer bytes.Buffer
- stderrBuffer bytes.Buffer
-}
-
-func withTestContext(t *testing.T, work func(ctx *testContext)) {
- t.Parallel()
- tempDir, err := ioutil.TempDir("", "compiler_wrapper")
- if err != nil {
- t.Fatalf("Unable to create the temp dir. Error: %s", err)
- }
- defer os.RemoveAll(tempDir)
-
- ctx := testContext{
- t: t,
- wd: tempDir,
- tempDir: tempDir,
- env: nil,
- cfg: &config{},
- }
- ctx.updateConfig(&config{})
-
- work(&ctx)
-}
-
-var _ env = (*testContext)(nil)
-
-func (ctx *testContext) getenv(key string) (string, bool) {
- for i := len(ctx.env) - 1; i >= 0; i-- {
- entry := ctx.env[i]
- if strings.HasPrefix(entry, key+"=") {
- return entry[len(key)+1:], true
- }
- }
- return "", false
-}
-
-func (ctx *testContext) environ() []string {
- return ctx.env
-}
-
-func (ctx *testContext) getwd() string {
- return ctx.wd
-}
-
-func (ctx *testContext) stdin() io.Reader {
- return &ctx.stdinBuffer
-}
-
-func (ctx *testContext) stdout() io.Writer {
- return &ctx.stdoutBuffer
-}
-
-func (ctx *testContext) stdoutString() string {
- return ctx.stdoutBuffer.String()
-}
-
-func (ctx *testContext) stderr() io.Writer {
- return &ctx.stderrBuffer
-}
-
-func (ctx *testContext) stderrString() string {
- return ctx.stderrBuffer.String()
-}
-
-func (ctx *testContext) run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
- // Keep calling the old wrapper when we are comparing the output of the
- // old wrapper to the new wrapper.
- if isCompareToOldWrapperCmd(cmd) {
- // Make sure we have a PATH in the env as the old wrapper needs that.
- pathFound := false
- for _, arg := range ctx.env {
- if arg == "PATH" {
- pathFound = true
- break
- }
- }
- if !pathFound {
- ctx.env = append(ctx.env, "PATH=")
- }
- return runCmd(ctx, cmd, nil, stdout, stderr)
- }
- ctx.cmdCount++
- ctx.lastCmd = cmd
- if ctx.cmdMock != nil {
- return ctx.cmdMock(cmd, stdin, stdout, stderr)
- }
- return nil
-}
-
-func (ctx *testContext) exec(cmd *command) error {
- ctx.cmdCount++
- ctx.lastCmd = cmd
- if ctx.cmdMock != nil {
- return ctx.cmdMock(cmd, ctx.stdin(), ctx.stdout(), ctx.stderr())
- }
- return nil
-}
-
-func (ctx *testContext) must(exitCode int) *command {
- if exitCode != 0 {
- ctx.t.Fatalf("expected no error, but got exit code %d. Stderr: %s",
- exitCode, ctx.stderrString())
- }
- return ctx.lastCmd
-}
-
-func (ctx *testContext) mustFail(exitCode int) string {
- if exitCode == 0 {
- ctx.t.Fatalf("expected an error, but got none")
- }
- return ctx.stderrString()
-}
-
-func (ctx *testContext) updateConfig(cfg *config) {
- *ctx.cfg = *cfg
- ctx.cfg.mockOldWrapperCmds = true
- ctx.cfg.newWarningsDir = filepath.Join(ctx.tempDir, "fatal_clang_warnings")
- if strings.HasPrefix(ctx.cfg.oldWrapperPath, "$CHROOT") {
- if *crosRootDirFlag != "" && ctx.cfg.oldWrapperPath != "" {
- ctx.cfg.oldWrapperPath = strings.Replace(ctx.cfg.oldWrapperPath, "$CHROOT", *crosRootDirFlag, -1)
- } else {
- ctx.cfg.oldWrapperPath = ""
- }
- } else if strings.HasPrefix(ctx.cfg.oldWrapperPath, "$ANDROID_PREBUILTS") {
- if *androidPrebuiltsDirFlag != "" && ctx.cfg.oldWrapperPath != "" {
- ctx.cfg.oldWrapperPath = strings.Replace(ctx.cfg.oldWrapperPath, "$ANDROID_PREBUILTS", *androidPrebuiltsDirFlag, -1)
- } else {
- ctx.cfg.oldWrapperPath = ""
- }
- }
-}
-
-func (ctx *testContext) newCommand(path string, args ...string) *command {
- // Create an empty wrapper at the given path.
- // Needed as we are resolving symlinks which stats the wrapper file.
- ctx.writeFile(path, "")
- return &command{
- Path: path,
- Args: args,
- }
-}
-
-func (ctx *testContext) writeFile(fullFileName string, fileContent string) {
- if !filepath.IsAbs(fullFileName) {
- fullFileName = filepath.Join(ctx.tempDir, fullFileName)
- }
- if err := os.MkdirAll(filepath.Dir(fullFileName), 0777); err != nil {
- ctx.t.Fatal(err)
- }
- if err := ioutil.WriteFile(fullFileName, []byte(fileContent), 0777); err != nil {
- ctx.t.Fatal(err)
- }
-}
-
-func (ctx *testContext) symlink(oldname string, newname string) {
- if !filepath.IsAbs(oldname) {
- oldname = filepath.Join(ctx.tempDir, oldname)
- }
- if !filepath.IsAbs(newname) {
- newname = filepath.Join(ctx.tempDir, newname)
- }
- if err := os.MkdirAll(filepath.Dir(newname), 0777); err != nil {
- ctx.t.Fatal(err)
- }
- if err := os.Symlink(oldname, newname); err != nil {
- ctx.t.Fatal(err)
- }
-}
-
-func (ctx *testContext) readAllString(r io.Reader) string {
- if r == nil {
- return ""
- }
- bytes, err := ioutil.ReadAll(r)
- if err != nil {
- ctx.t.Fatal(err)
- }
- return string(bytes)
-}
-
-func verifyPath(cmd *command, expectedRegex string) error {
- compiledRegex := regexp.MustCompile(matchFullString(expectedRegex))
- if !compiledRegex.MatchString(cmd.Path) {
- return fmt.Errorf("path does not match %s. Actual %s", expectedRegex, cmd.Path)
- }
- return nil
-}
-
-func verifyArgCount(cmd *command, expectedCount int, expectedRegex string) error {
- compiledRegex := regexp.MustCompile(matchFullString(expectedRegex))
- count := 0
- for _, arg := range cmd.Args {
- if compiledRegex.MatchString(arg) {
- count++
- }
- }
- if count != expectedCount {
- return fmt.Errorf("expected %d matches for arg %s. All args: %s",
- expectedCount, expectedRegex, cmd.Args)
- }
- return nil
-}
-
-func verifyArgOrder(cmd *command, expectedRegexes ...string) error {
- compiledRegexes := []*regexp.Regexp{}
- for _, regex := range expectedRegexes {
- compiledRegexes = append(compiledRegexes, regexp.MustCompile(matchFullString(regex)))
- }
- expectedArgIndex := 0
- for _, arg := range cmd.Args {
- if expectedArgIndex == len(compiledRegexes) {
- break
- } else if compiledRegexes[expectedArgIndex].MatchString(arg) {
- expectedArgIndex++
- }
- }
- if expectedArgIndex != len(expectedRegexes) {
- return fmt.Errorf("expected args %s in order. All args: %s",
- expectedRegexes, cmd.Args)
- }
- return nil
-}
-
-func verifyEnvUpdate(cmd *command, expectedRegex string) error {
- compiledRegex := regexp.MustCompile(matchFullString(expectedRegex))
- for _, update := range cmd.EnvUpdates {
- if compiledRegex.MatchString(update) {
- return nil
- }
- }
- return fmt.Errorf("expected at least one match for env update %s. All env updates: %s",
- expectedRegex, cmd.EnvUpdates)
-}
-
-func verifyNoEnvUpdate(cmd *command, expectedRegex string) error {
- compiledRegex := regexp.MustCompile(matchFullString(expectedRegex))
- updates := cmd.EnvUpdates
- for _, update := range updates {
- if compiledRegex.MatchString(update) {
- return fmt.Errorf("expected no match for env update %s. All env updates: %s",
- expectedRegex, cmd.EnvUpdates)
- }
- }
- return nil
-}
-
-func hasInternalError(stderr string) bool {
- return strings.Contains(stderr, "Internal error")
-}
-
-func verifyInternalError(stderr string) error {
- if !hasInternalError(stderr) {
- return fmt.Errorf("expected an internal error. Got: %s", stderr)
- }
- if ok, _ := regexp.MatchString(`\w+.go:\d+`, stderr); !ok {
- return fmt.Errorf("expected a source line reference. Got: %s", stderr)
- }
- return nil
-}
-
-func verifyNonInternalError(stderr string, expectedRegex string) error {
- if hasInternalError(stderr) {
- return fmt.Errorf("expected a non internal error. Got: %s", stderr)
- }
- if ok, _ := regexp.MatchString(`\w+.go:\d+`, stderr); ok {
- return fmt.Errorf("expected no source line reference. Got: %s", stderr)
- }
- if ok, _ := regexp.MatchString(matchFullString(expectedRegex), strings.TrimSpace(stderr)); !ok {
- return fmt.Errorf("expected stderr matching %s. Got: %s", expectedRegex, stderr)
- }
- return nil
-}
-
-func matchFullString(regex string) string {
- return "^" + regex + "$"
-}
-
-func newExitCodeError(exitCode int) error {
- // It's actually hard to create an error that represents a command
- // with exit code. Using a real command instead.
- tmpCmd := exec.Command("/bin/sh", "-c", fmt.Sprintf("exit %d", exitCode))
- return tmpCmd.Run()
-}
-
-func isCompareToOldWrapperCmd(cmd *command) bool {
- for _, arg := range cmd.Args {
- if strings.Contains(arg, compareToOldWrapperFilePattern) {
- return true
- }
- }
- return false
-}
diff --git a/compiler_wrapper/thumb_flags.go b/compiler_wrapper/thumb_flags.go
deleted file mode 100644
index 0edaf4ff..00000000
--- a/compiler_wrapper/thumb_flags.go
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "strings"
-)
-
-func processThumbCodeFlags(builder *commandBuilder) {
- arch := builder.target.arch
- if builder.target.abi != "eabi" && (strings.HasPrefix(arch, "armv7") || strings.HasPrefix(arch, "armv8")) {
- // ARM32 specfic:
- // 1. Generate thumb codes by default. GCC is configured with
- // --with-mode=thumb and defaults to thumb mode already. This
- // changes the default behavior of clang and doesn't affect GCC.
- // 2. Do not force frame pointers on ARM32 (https://crbug.com/693137).
- builder.addPreUserArgs("-mthumb")
- builder.transformArgs(func(arg builderArg) string {
- if !arg.fromUser && arg.value == "-fno-omit-frame-pointer" {
- return ""
- }
- return arg.value
- })
- }
-}
diff --git a/compiler_wrapper/thumb_flags_test.go b/compiler_wrapper/thumb_flags_test.go
deleted file mode 100644
index 2e8f7e66..00000000
--- a/compiler_wrapper/thumb_flags_test.go
+++ /dev/null
@@ -1,113 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestAddThumbFlagForArm(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV7, mainCc)))
- if err := verifyArgOrder(cmd, "-mthumb", mainCc); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV8, mainCc)))
- if err := verifyArgOrder(cmd, "-mthumb", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitThumbFlagForNonArm(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgCount(cmd, 0, "-mthumb"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitThumbFlagForEabiArm(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV7Eabi, mainCc)))
- if err := verifyArgCount(cmd, 0, "-mthumb"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV8Eabi, mainCc)))
- if err := verifyArgCount(cmd, 0, "-mthumb"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestRemoveNoOmitFramePointerFlagForArm(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initNoOmitFramePointerConfig(ctx.cfg)
-
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV7, mainCc)))
- if err := verifyArgCount(cmd, 0, "-fno-omit-frame-pointer"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV8, mainCc)))
- if err := verifyArgCount(cmd, 0, "-fno-omit-frame-pointer"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestKeepNoOmitFramePointerFlagForNonArm(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initNoOmitFramePointerConfig(ctx.cfg)
-
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgCount(cmd, 1, "-fno-omit-frame-pointer"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestKeepNoOmitFramePointerFlagForEabiArm(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- initNoOmitFramePointerConfig(ctx.cfg)
-
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV7Eabi, mainCc)))
- if err := verifyArgCount(cmd, 1, "-fno-omit-frame-pointer"); err != nil {
- t.Error(err)
- }
-
- cmd = ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV8Eabi, mainCc)))
- if err := verifyArgCount(cmd, 1, "-fno-omit-frame-pointer"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestKeepNoOmitFramePointIfGivenByUser(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV7, "-fno-omit-frame-pointer", mainCc)))
- if err := verifyArgCount(cmd, 1, "-fno-omit-frame-pointer"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func initNoOmitFramePointerConfig(cfg *config) {
- cfg.commonFlags = []string{"-fno-omit-frame-pointer"}
-}
diff --git a/compiler_wrapper/unsupported_flags.go b/compiler_wrapper/unsupported_flags.go
deleted file mode 100644
index 48fee2f5..00000000
--- a/compiler_wrapper/unsupported_flags.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-func checkUnsupportedFlags(cmd *command) error {
- for _, arg := range cmd.Args {
- if arg == "-fstack-check" {
- return newUserErrorf(`option %q is not supported; crbug/485492`, arg)
- }
- }
- return nil
-}
diff --git a/compiler_wrapper/unsupported_flags_test.go b/compiler_wrapper/unsupported_flags_test.go
deleted file mode 100644
index a32eb521..00000000
--- a/compiler_wrapper/unsupported_flags_test.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestErrorOnFstatCheckFlag(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fstack-check", mainCc)))
- if err := verifyNonInternalError(stderr,
- `option "-fstack-check" is not supported; crbug/485492`); err != nil {
- t.Fatal(err)
- }
- })
-}
diff --git a/compiler_wrapper/x64_flags.go b/compiler_wrapper/x64_flags.go
deleted file mode 100644
index 40505cf8..00000000
--- a/compiler_wrapper/x64_flags.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "strings"
-)
-
-func processX86Flags(builder *commandBuilder) {
- arch := builder.target.arch
- if strings.HasPrefix(arch, "x86_64") || startswithI86(arch) {
- builder.addPostUserArgs("-mno-movbe")
- }
-}
-
-// Returns true if s starts with i.86.
-func startswithI86(s string) bool {
- return len(s) >= 4 && s[0] == 'i' && s[2:4] == "86"
-}
diff --git a/compiler_wrapper/x64_flags_test.go b/compiler_wrapper/x64_flags_test.go
deleted file mode 100644
index fd93728f..00000000
--- a/compiler_wrapper/x64_flags_test.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "testing"
-)
-
-func TestAddNoMovbeFlagOnX86(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyArgOrder(cmd, mainCc, "-mno-movbe"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddNoMovbeFlagOnI686(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand("./i686-cros-linux-gnu-gcc", mainCc)))
- if err := verifyArgOrder(cmd, mainCc, "-mno-movbe"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestDoNotAddNoMovbeFlagOnArm(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccArmV7, mainCc)))
- if err := verifyArgCount(cmd, 0, "-mno-movbe"); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/crb/crb_driver.py b/crb/crb_driver.py
index c6403462..8c767fb8 100755
--- a/crb/crb_driver.py
+++ b/crb/crb_driver.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
diff --git a/cros_utils/buildbot_json.py b/cros_utils/buildbot_json.py
index 42a27744..8a9d9cb8 100755
--- a/cros_utils/buildbot_json.py
+++ b/cros_utils/buildbot_json.py
@@ -316,7 +316,7 @@ class NonAddressableNodeList(VirtualNodeList): # pylint: disable=W0223
@property
def cached_children(self):
if self.parent.cached_data is not None:
- for i in range(len(self.parent.cached_data[self.subkey])):
+ for i in xrange(len(self.parent.cached_data[self.subkey])):
yield self[i]
@property
@@ -352,7 +352,7 @@ class NonAddressableNodeList(VirtualNodeList): # pylint: disable=W0223
def __iter__(self):
"""Enables 'for i in obj:'. It returns children."""
if self.data:
- for i in range(len(self.data)):
+ for i in xrange(len(self.data)):
yield self[i]
def __getitem__(self, key):
@@ -868,7 +868,7 @@ class Builds(AddressableNodeList):
# Only cache keys here.
self.cache_keys()
if self._keys:
- for i in range(max(self._keys), -1, -1):
+ for i in xrange(max(self._keys), -1, -1):
yield self[i]
def cache_keys(self):
diff --git a/cros_utils/buildbot_utils.py b/cros_utils/buildbot_utils.py
index 35dc3ac6..911ea03e 100644
--- a/cros_utils/buildbot_utils.py
+++ b/cros_utils/buildbot_utils.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Utilities for launching and accessing ChromeOS buildbots."""
from __future__ import print_function
@@ -19,9 +17,9 @@ from cros_utils import logger
INITIAL_SLEEP_TIME = 7200 # 2 hours; wait time before polling buildbot.
SLEEP_TIME = 600 # 10 minutes; time between polling of buildbot.
-# Some of our slower builders (llvm-next) are taking more
-# than 11 hours. So, increase this TIME_OUT to 12 hours.
-TIME_OUT = 43200 # Decide the build is dead or will never finish
+# Some of our slower builders (llmv-next) are taking more
+# than 8 hours. So, increase this TIME_OUT to 9 hours.
+TIME_OUT = 32400 # Decide the build is dead or will never finish
class BuildbotTimeout(Exception):
@@ -61,10 +59,6 @@ def PeekTrybotImage(chromeos_root, buildbucket_id):
results = json.loads(out)[buildbucket_id]
- # Handle the case where the tryjob failed to launch correctly.
- if results['artifacts_url'] is None:
- return (results['status'], '')
-
return (results['status'], results['artifacts_url'].rstrip('/'))
@@ -116,7 +110,7 @@ def SubmitTryjob(chromeos_root,
# Launch buildbot with appropriate flags.
build = buildbot_name
- command = ('cros_sdk -- cros tryjob --yes --json --nochromesdk %s %s %s' %
+ command = ('cros tryjob --yes --json --nochromesdk %s %s %s' %
(tryjob_flags, patch_arg, build))
print('CMD: %s' % command)
_, out, _ = RunCommandInPath(chromeos_root, command)
@@ -251,10 +245,5 @@ def GetLatestImage(chromeos_root, path):
candidates.sort(reverse=True)
for c in candidates:
build = '%s/R%d-%d.%d.%d' % (path, c[0], c[1], c[2], c[3])
- # Blacklist "R79-12384.0.0" image released by mistake.
- # TODO(crbug.com/992242): Remove the filter by 2019-09-05.
- if c == [79, 12384, 0, 0]:
- continue
-
if DoesImageExist(chromeos_root, build):
return build
diff --git a/cros_utils/buildbot_utils_unittest.py b/cros_utils/buildbot_utils_unittest.py
index bfba8d78..c57b2d32 100755
--- a/cros_utils/buildbot_utils_unittest.py
+++ b/cros_utils/buildbot_utils_unittest.py
@@ -1,10 +1,8 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-#
+
# Copyright 2018 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Unittest for buildbot_utils.py."""
from __future__ import print_function
@@ -36,19 +34,9 @@ class TrybotTest(unittest.TestCase):
tryjob_out = (
'[{"buildbucket_id": "8952721143823688176", "build_config": '
'"cave-llvm-toolchain-tryjob", "url": '
- # pylint: disable=line-too-long
'"http://cros-goldeneye/chromeos/healthmonitoring/buildDetails?buildbucketId=8952721143823688176"}]'
)
- GSUTILS_LS = '\n'.join([
- 'gs://chromeos-image-archive/{0}/R78-12421.0.0/',
- 'gs://chromeos-image-archive/{0}/R78-12422.0.0/',
- 'gs://chromeos-image-archive/{0}/R78-12423.0.0/',
- # "R79-12384.0.0" image should be blacklisted.
- # TODO(crbug.com/992242): Remove the filter by 2019-09-05.
- 'gs://chromeos-image-archive/{0}/R79-12384.0.0/',
- ])
-
buildresult_out = (
'{"8952721143823688176": {"status": "pass", "artifacts_url":'
'"gs://chromeos-image-archive/trybot-elm-release-tryjob/R67-10468.0.0-'
@@ -124,26 +112,6 @@ class TrybotTest(unittest.TestCase):
buildbucket_id = buildbot_utils.ParseTryjobBuildbucketId(self.tryjob_out)
self.assertEqual(buildbucket_id, self.buildbucket_id)
- def testGetLatestImageValid(self):
- with patch.object(command_executer.CommandExecuter,
- 'ChrootRunCommandWOutput') as mocked_run:
- with patch.object(buildbot_utils, 'DoesImageExist') as mocked_imageexist:
- IMAGE_DIR = 'lulu-release'
- mocked_run.return_value = (0, self.GSUTILS_LS.format(IMAGE_DIR), '')
- mocked_imageexist.return_value = True
- image = buildbot_utils.GetLatestImage('', IMAGE_DIR)
- self.assertEqual(image, '{0}/R78-12423.0.0'.format(IMAGE_DIR))
-
- def testGetLatestImageInvalid(self):
- with patch.object(command_executer.CommandExecuter,
- 'ChrootRunCommandWOutput') as mocked_run:
- with patch.object(buildbot_utils, 'DoesImageExist') as mocked_imageexist:
- IMAGE_DIR = 'kefka-release'
- mocked_run.return_value = (0, self.GSUTILS_LS.format(IMAGE_DIR), '')
- mocked_imageexist.return_value = False
- image = buildbot_utils.GetLatestImage('', IMAGE_DIR)
- self.assertIsNone(image)
-
if __name__ == '__main__':
unittest.main()
diff --git a/cros_utils/command_executer.py b/cros_utils/command_executer.py
index 08e4e6ae..ae1b2962 100644
--- a/cros_utils/command_executer.py
+++ b/cros_utils/command_executer.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Utilities to run commands in outside/inside chroot and on the board."""
from __future__ import print_function
@@ -68,7 +66,6 @@ class CommandExecuter(object):
command_timeout=None,
terminated_timeout=10,
print_to_console=True,
- env=None,
except_handler=lambda p, e: None):
"""Run a command.
@@ -107,8 +104,7 @@ class CommandExecuter(object):
stderr=subprocess.PIPE,
shell=True,
preexec_fn=os.setsid,
- executable='/bin/bash',
- env=env)
+ executable='/bin/bash')
full_stdout = ''
full_stderr = ''
@@ -128,9 +124,9 @@ class CommandExecuter(object):
if command_terminator and command_terminator.IsTerminated():
os.killpg(os.getpgid(p.pid), signal.SIGTERM)
if self.logger:
- self.logger.LogError(
- 'Command received termination request. '
- 'Killed child process group.', print_to_console)
+ self.logger.LogError('Command received termination request. '
+ 'Killed child process group.',
+ print_to_console)
break
l = my_poll.poll(100)
@@ -160,19 +156,18 @@ class CommandExecuter(object):
elif (terminated_timeout is not None and
time.time() - terminated_time > terminated_timeout):
if self.logger:
- self.logger.LogWarning(
- 'Timeout of %s seconds reached since '
- 'process termination.' % terminated_timeout, print_to_console)
+ self.logger.LogWarning('Timeout of %s seconds reached since '
+ 'process termination.' %
+ terminated_timeout, print_to_console)
break
if (command_timeout is not None and
time.time() - started_time > command_timeout):
os.killpg(os.getpgid(p.pid), signal.SIGTERM)
if self.logger:
- self.logger.LogWarning(
- 'Timeout of %s seconds reached since process'
- 'started. Killed child process group.' % command_timeout,
- print_to_console)
+ self.logger.LogWarning('Timeout of %s seconds reached since process'
+ 'started. Killed child process group.' %
+ command_timeout, print_to_console)
break
if out == err == '':
@@ -349,8 +344,7 @@ class CommandExecuter(object):
command_timeout=None,
terminated_timeout=10,
print_to_console=True,
- cros_sdk_options='',
- env=None):
+ cros_sdk_options=''):
"""Runs a command within the chroot.
Returns triplet (returncode, stdout, stderr).
@@ -377,9 +371,8 @@ class CommandExecuter(object):
# the chroot already exists. We want the final returned output to skip
# the output from chroot creation steps.
if return_output:
- ret = self.RunCommand(
- 'cd %s; cros_sdk %s -- true' % (chromeos_root, cros_sdk_options),
- env=env)
+ ret = self.RunCommand('cd %s; cros_sdk %s -- true' % (chromeos_root,
+ cros_sdk_options))
if ret:
return (ret, '', '')
@@ -394,8 +387,7 @@ class CommandExecuter(object):
command_terminator=command_terminator,
command_timeout=command_timeout,
terminated_timeout=terminated_timeout,
- print_to_console=print_to_console,
- env=env)
+ print_to_console=print_to_console)
os.remove(command_file)
return ret
@@ -457,15 +449,15 @@ class CommandExecuter(object):
src = src + '/'
dest = dest + '/'
- if src_cros or dest_cros:
+ if src_cros == True or dest_cros == True:
if self.logger:
- self.logger.LogFatalIf(
- src_cros == dest_cros, 'Only one of src_cros and desc_cros can '
- 'be True.')
+ self.logger.LogFatalIf(src_cros == dest_cros,
+ 'Only one of src_cros and desc_cros can '
+ 'be True.')
self.logger.LogFatalIf(not chromeos_root, 'chromeos_root not given!')
elif src_cros == dest_cros or not chromeos_root:
sys.exit(1)
- if src_cros:
+ if src_cros == True:
cros_machine = src_machine
else:
cros_machine = dest_machine
@@ -475,7 +467,7 @@ class CommandExecuter(object):
'ssh -p ${FLAGS_ssh_port}' + ' -o StrictHostKeyChecking=no' +
' -o UserKnownHostsFile=$(mktemp)' + ' -i $TMP_PRIVATE_KEY')
rsync_prefix = "\nrsync -r -e \"%s\" " % ssh_command
- if dest_cros:
+ if dest_cros == True:
command += rsync_prefix + '%s root@%s:%s' % (src, dest_machine, dest)
return self.RunCommand(
command,
@@ -662,7 +654,6 @@ class MockCommandExecuter(CommandExecuter):
command_timeout=None,
terminated_timeout=10,
print_to_console=True,
- env=None,
except_handler=lambda p, e: None):
assert not command_timeout
cmd = str(cmd)
diff --git a/cros_utils/command_executer_unittest.py b/cros_utils/command_executer_unittest.py
index 4da4a4ac..f039ebc5 100755
--- a/cros_utils/command_executer_unittest.py
+++ b/cros_utils/command_executer_unittest.py
@@ -1,6 +1,4 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
"""Unittest for command_executer.py."""
from __future__ import print_function
diff --git a/cros_utils/contextlib3.py b/cros_utils/contextlib3.py
deleted file mode 100644
index 9fabbf6e..00000000
--- a/cros_utils/contextlib3.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Random utilties from Python3's contextlib."""
-
-from __future__ import division
-from __future__ import print_function
-
-import sys
-
-
-class ExitStack(object):
- """https://docs.python.org/3/library/contextlib.html#contextlib.ExitStack"""
-
- def __init__(self):
- self._stack = []
- self._is_entered = False
-
- def _assert_is_entered(self):
- # Strictly, entering has no effect on the operations that call this.
- # However, if you're trying to e.g. push things to an ExitStack that hasn't
- # yet been entered, that's likely a bug.
- assert self._is_entered, 'ExitStack op performed before entering'
-
- def __enter__(self):
- self._is_entered = True
- return self
-
- def _perform_exit(self, exc_type, exc, exc_traceback):
- # I suppose a better name for this is
- # `take_exception_handling_into_our_own_hands`, but that's harder to type.
- exception_handled = False
- while self._stack:
- fn = self._stack.pop()
- # The except clause below is meant to run as-if it's a `finally` block,
- # but `finally` blocks don't have easy access to exceptions currently in
- # flight. Hence, we do need to catch things like KeyboardInterrupt,
- # SystemExit, ...
- # pylint: disable=bare-except
- try:
- # If an __exit__ handler returns a truthy value, we should assume that
- # it handled the exception appropriately. Otherwise, we need to keep it
- # with us. (PEP 343)
- if fn(exc_type, exc, exc_traceback):
- exc_type, exc, exc_traceback = None, None, None
- exception_handled = True
- except:
- # Python2 doesn't appear to have the notion of 'exception causes',
- # which is super unfortunate. In the case:
- #
- # @contextlib.contextmanager
- # def foo()
- # try:
- # yield
- # finally:
- # raise ValueError
- #
- # with foo():
- # assert False
- #
- # ...Python will only note the ValueError; nothing about the failing
- # assertion is printed.
- #
- # I guess on the bright side, that means we don't have to fiddle with
- # __cause__s/etc.
- exc_type, exc, exc_traceback = sys.exc_info()
- exception_handled = True
-
- if not exception_handled:
- return False
-
- # Something changed. We either need to raise for ourselves, or note that
- # the exception has been suppressed.
- if exc_type is not None:
- raise exc_type, exc, exc_traceback
-
- # Otherwise, the exception was suppressed. Go us!
- return True
-
- def __exit__(self, exc_type, exc, exc_traceback):
- return self._perform_exit(exc_type, exc, exc_traceback)
-
- def close(self):
- """Unwinds the exit stack, unregistering all events"""
- self._perform_exit(None, None, None)
-
- def enter_context(self, cm):
- """Enters the given context manager, and registers it to be exited."""
- self._assert_is_entered()
-
- # The spec specifically notes that we should take __exit__ prior to calling
- # __enter__.
- exit_cleanup = cm.__exit__
- result = cm.__enter__()
- self._stack.append(exit_cleanup)
- return result
-
- # pylint complains about `exit` being redefined. `exit` is the documented
- # name of this param, and renaming it would break portability if someone
- # decided to `push(exit=foo)`, so just ignore the lint.
- # pylint: disable=redefined-builtin
- def push(self, exit):
- """Like `enter_context`, but won't enter the value given."""
- self._assert_is_entered()
- self._stack.append(exit.__exit__)
-
- def callback(self, callback, *args, **kwargs):
- """Performs the given callback on exit"""
- self._assert_is_entered()
-
- def fn(_exc_type, _exc, _exc_traceback):
- callback(*args, **kwargs)
-
- self._stack.append(fn)
diff --git a/cros_utils/contextlib3_test.py b/cros_utils/contextlib3_test.py
deleted file mode 100755
index 76c010f2..00000000
--- a/cros_utils/contextlib3_test.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for contextlib3"""
-
-from __future__ import division
-from __future__ import print_function
-
-import contextlib
-import unittest
-
-import contextlib3
-
-
-class SomeException(Exception):
- """Just an alternative to ValueError in the Exception class hierarchy."""
- pass
-
-
-class TestExitStack(unittest.TestCase):
- """Tests contextlib3.ExitStack"""
-
- def test_exceptions_in_exit_override_exceptions_in_with(self):
-
- @contextlib.contextmanager
- def raise_exit():
- raised = False
- try:
- yield
- except Exception:
- raised = True
- raise ValueError
- finally:
- self.assertTrue(raised)
-
- # (As noted in comments in contextlib3, this behavior is consistent with
- # how python2 works. Namely, if __exit__ raises, the exception from
- # __exit__ overrides the inner exception)
- with self.assertRaises(ValueError):
- with contextlib3.ExitStack() as stack:
- stack.enter_context(raise_exit())
- raise SomeException()
-
- def test_raising_in_exit_doesnt_block_later_exits(self):
- exited = []
-
- @contextlib.contextmanager
- def raise_exit():
- try:
- yield
- finally:
- exited.append('raise')
- raise ValueError
-
- @contextlib.contextmanager
- def push_exit():
- try:
- yield
- finally:
- exited.append('push')
-
- with self.assertRaises(ValueError):
- with contextlib3.ExitStack() as stack:
- stack.enter_context(push_exit())
- stack.enter_context(raise_exit())
- self.assertEqual(exited, ['raise', 'push'])
-
- exited = []
- with self.assertRaises(ValueError):
- with contextlib3.ExitStack() as stack:
- stack.enter_context(push_exit())
- stack.enter_context(raise_exit())
- raise SomeException()
- self.assertEqual(exited, ['raise', 'push'])
-
- def test_push_doesnt_enter_the_context(self):
- exited = []
-
- test_self = self
-
- class Manager(object):
- """A simple ContextManager for testing purposes"""
-
- def __enter__(self):
- test_self.fail('context manager was entered :(')
-
- def __exit__(self, *args, **kwargs):
- exited.append(1)
-
- with contextlib3.ExitStack() as stack:
- stack.push(Manager())
- self.assertEqual(exited, [])
- self.assertEqual(exited, [1])
-
- def test_callbacks_are_run_properly(self):
- callback_was_run = []
-
- def callback(arg, some_kwarg=None):
- self.assertEqual(arg, 41)
- self.assertEqual(some_kwarg, 42)
- callback_was_run.append(1)
-
- with contextlib3.ExitStack() as stack:
- stack.callback(callback, 41, some_kwarg=42)
- self.assertEqual(callback_was_run, [])
- self.assertEqual(callback_was_run, [1])
-
- callback_was_run = []
- with self.assertRaises(ValueError):
- with contextlib3.ExitStack() as stack:
- stack.callback(callback, 41, some_kwarg=42)
- raise ValueError()
- self.assertEqual(callback_was_run, [1])
-
- def test_finallys_are_run(self):
- finally_run = []
-
- @contextlib.contextmanager
- def append_on_exit():
- try:
- yield
- finally:
- finally_run.append(0)
-
- with self.assertRaises(ValueError):
- with contextlib3.ExitStack() as stack:
- stack.enter_context(append_on_exit())
- raise ValueError()
- self.assertEqual(finally_run, [0])
-
- def test_unwinding_happens_in_reverse_order(self):
- exit_runs = []
-
- @contextlib.contextmanager
- def append_things(start_push, end_push):
- exit_runs.append(start_push)
- try:
- yield
- finally:
- exit_runs.append(end_push)
-
- with contextlib3.ExitStack() as stack:
- stack.enter_context(append_things(1, 4))
- stack.enter_context(append_things(2, 3))
- self.assertEqual(exit_runs, [1, 2, 3, 4])
-
- exit_runs = []
- with self.assertRaises(ValueError):
- with contextlib3.ExitStack() as stack:
- stack.enter_context(append_things(1, 4))
- stack.enter_context(append_things(2, 3))
- raise ValueError
- self.assertEqual(exit_runs, [1, 2, 3, 4])
-
- def test_exceptions_are_propagated(self):
-
- @contextlib.contextmanager
- def die_on_regular_exit():
- yield
- self.fail('Unreachable in theory')
-
- with self.assertRaises(ValueError):
- with contextlib3.ExitStack() as stack:
- stack.enter_context(die_on_regular_exit())
- raise ValueError()
-
- def test_exceptions_can_be_blocked(self):
-
- @contextlib.contextmanager
- def block():
- try:
- yield
- except Exception:
- pass
-
- with contextlib3.ExitStack() as stack:
- stack.enter_context(block())
- raise ValueError()
-
- def test_objects_are_returned_from_enter_context(self):
-
- @contextlib.contextmanager
- def yield_arg(arg):
- yield arg
-
- with contextlib3.ExitStack() as stack:
- val = stack.enter_context(yield_arg(1))
- self.assertEqual(val, 1)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/cros_utils/email_sender.py b/cros_utils/email_sender.py
index 0d2bd651..e5a20ad2 100755
--- a/cros_utils/email_sender.py
+++ b/cros_utils/email_sender.py
@@ -9,7 +9,6 @@ from email import encoders as Encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
-import getpass
import os
import smtplib
import tempfile
@@ -87,7 +86,7 @@ class EmailSender(object):
ce = command_executer.GetCommandExecuter(log_level='none')
if not email_from:
- email_from = getpass.getuser() + '@google.com'
+ email_from = os.path.basename(__file__)
to_list = ','.join(email_to)
@@ -107,14 +106,13 @@ class EmailSender(object):
subject = subject.replace("'", "'\\''")
if msg_type == 'html':
- command = ("sendgmr --to='%s' --from='%s' --subject='%s' "
- "--html_file='%s' --body_file=/dev/null" %
- (to_list, email_from, subject, body_filename))
+ command = ("sendgmr --to='%s' --subject='%s' --html_file='%s' "
+ '--body_file=/dev/null' % (to_list, subject, body_filename))
else:
- command = (
- "sendgmr --to='%s' --from='%s' --subject='%s' "
- "--body_file='%s'" % (to_list, email_from, subject, body_filename))
-
+ command = ("sendgmr --to='%s' --subject='%s' --body_file='%s'" %
+ (to_list, subject, body_filename))
+ if email_from:
+ command += ' --from=%s' % email_from
if email_cc:
cc_list = ','.join(email_cc)
command += " --cc='%s'" % cc_list
diff --git a/cros_utils/locks.py b/cros_utils/locks.py
index 4ecbe0a9..cb96368e 100644
--- a/cros_utils/locks.py
+++ b/cros_utils/locks.py
@@ -1,29 +1,24 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
"""Utilities for locking machines."""
from __future__ import print_function
import time
-import lock_machine
+import afe_lock_machine
import logger
def AcquireLock(machines, chromeos_root, timeout=1200):
- """Acquire lock for machine(s) with timeout."""
+ """Acquire lock for machine(s) with timeout, using AFE server for locking."""
start_time = time.time()
locked = True
sleep_time = min(10, timeout / 10.0)
while True:
try:
- lock_machine.LockManager(machines, False,
- chromeos_root).UpdateMachines(True)
+ afe_lock_machine.AFELockManager(machines, False, chromeos_root,
+ None).UpdateMachines(True)
break
except Exception as e:
if time.time() - start_time > timeout:
@@ -37,13 +32,13 @@ def AcquireLock(machines, chromeos_root, timeout=1200):
def ReleaseLock(machines, chromeos_root):
- """Release locked machine(s)."""
+ """Release locked machine(s), using AFE server for locking."""
unlocked = True
try:
- lock_machine.LockManager(machines, False,
- chromeos_root).UpdateMachines(False)
+ afe_lock_machine.AFELockManager(machines, False, chromeos_root,
+ None).UpdateMachines(False)
except Exception as e:
unlocked = False
- logger.GetLogger().LogWarning(
- 'Could not unlock %s. %s' % (repr(machines), str(e)))
+ logger.GetLogger().LogWarning('Could not unlock %s. %s' %
+ (repr(machines), str(e)))
return unlocked
diff --git a/cros_utils/misc.py b/cros_utils/misc.py
index 58076f40..f9034b89 100644
--- a/cros_utils/misc.py
+++ b/cros_utils/misc.py
@@ -161,11 +161,19 @@ def GetBuildImageCommand(board, dev=False):
def GetSetupBoardCommand(board,
+ gcc_version=None,
+ binutils_version=None,
usepkg=None,
force=None):
"""Get setup_board command."""
options = []
+ if gcc_version:
+ options.append('--gcc_version=%s' % gcc_version)
+
+ if binutils_version:
+ options.append('--binutils_version=%s' % binutils_version)
+
if usepkg:
options.append('--usepkg')
else:
@@ -174,9 +182,10 @@ def GetSetupBoardCommand(board,
if force:
options.append('--force')
- options.append('--accept-licenses=@CHROMEOS')
+ options.append('--accept_licenses=@CHROMEOS')
- return 'setup_board --board=%s %s' % (board, ' '.join(options))
+ return ('%s/setup_board --board=%s %s' % (CHROMEOS_SCRIPTS_DIR, board,
+ ' '.join(options)))
def CanonicalizePath(path):
diff --git a/cros_utils/no_pseudo_terminal_test.py b/cros_utils/no_pseudo_terminal_test.py
index 41d71539..43eabb13 100755..100644
--- a/cros_utils/no_pseudo_terminal_test.py
+++ b/cros_utils/no_pseudo_terminal_test.py
@@ -1,10 +1,3 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
"""Test to ensure we're not touching /dev/ptmx when running commands."""
from __future__ import print_function
@@ -25,9 +18,9 @@ class NoPsuedoTerminalTest(unittest.TestCase):
def _AttachStraceToSelf(self, output_file):
"""Attaches strace to the current process."""
- args = ['sudo', 'strace', '-o', output_file, '-p', str(os.getpid())]
+ args = ['strace', '-o', output_file, '-p', str(os.getpid())]
print(args)
- self._strace_process = subprocess.Popen(args, preexec_fn=os.setpgrp)
+ self._strace_process = subprocess.Popen(args)
# Wait until we see some activity.
start_time = time.time()
while time.time() - start_time < self.STRACE_TIMEOUT:
@@ -38,12 +31,9 @@ class NoPsuedoTerminalTest(unittest.TestCase):
def _KillStraceProcess(self):
"""Kills strace that was started by _AttachStraceToSelf()."""
- pgid = os.getpgid(self._strace_process.pid)
- args = ['sudo', 'kill', str(pgid)]
- if subprocess.call(args) == 0:
- os.waitpid(pgid, 0)
- return True
- return False
+ self._strace_process.terminate()
+ self._strace_process.wait()
+ return True
def testNoPseudoTerminalWhenRunningCommand(self):
"""Test to make sure we're not touching /dev/ptmx when running commands."""
diff --git a/cros_utils/tabulator.py b/cros_utils/tabulator.py
index 59e4d426..6936d35f 100644
--- a/cros_utils/tabulator.py
+++ b/cros_utils/tabulator.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Table generating, analyzing and printing functions.
This defines several classes that are used to generate, analyze and print
@@ -88,17 +86,6 @@ def _StripNone(results):
return res
-def _RemoveMinMax(cell, values):
- if len(values) < 3:
- print('WARNING: Values count is less than 3, not ignoring min/max values')
- print('WARNING: Cell name:', cell.name, 'Values:', values)
- return values
-
- values.remove(min(values))
- values.remove(max(values))
- return values
-
-
class TableGenerator(object):
"""Creates a table from a list of list of dicts.
@@ -165,7 +152,17 @@ class TableGenerator(object):
def GetTable(self, number_of_rows=sys.maxint):
"""Returns a table from a list of list of dicts.
- Examples:
+ The list of list of dicts is passed into the constructor of TableGenerator.
+ This method converts that into a canonical list of lists which represents a
+ table of values.
+
+ Args:
+ number_of_rows: Maximum number of rows to return from the table.
+
+ Returns:
+ A list of lists which is the table.
+
+ Example:
We have the following runs:
[[{"k1": "v1", "k2": "v2"}, {"k1": "v3"}],
[{"k1": "v4", "k4": "v5"}]]
@@ -178,16 +175,6 @@ class TableGenerator(object):
["k4", [], ["v5"]]]
The returned table can then be processed further by other classes in this
module.
-
- The list of list of dicts is passed into the constructor of TableGenerator.
- This method converts that into a canonical list of lists which represents a
- table of values.
-
- Args:
- number_of_rows: Maximum number of rows to return from the table.
-
- Returns:
- A list of lists which is the table.
"""
keys = self._GetKeys()
header = [self._key_name] + self._labels
@@ -200,7 +187,7 @@ class TableGenerator(object):
v = []
for run in run_list:
if k in run:
- if isinstance(run[k], list):
+ if type(run[k]) is list:
val = run[k][0]
unit = run[k][1]
else:
@@ -220,192 +207,6 @@ class TableGenerator(object):
return table
-class SamplesTableGenerator(TableGenerator):
- """Creates a table with only samples from the results
-
- The main public function is called GetTable().
-
- Different than TableGenerator, self._runs is now a dict of {benchmark: runs}
- We are expecting there is 'samples' in `runs`.
- """
-
- def __init__(self, run_keyvals, label_list, iter_counts, weights):
- TableGenerator.__init__(
- self, run_keyvals, label_list, key_name='Benchmarks')
- self._iter_counts = iter_counts
- self._weights = weights
-
- def _GetKeys(self):
- keys = self._runs.keys()
- return self._SortKeys(keys)
-
- def GetTable(self, number_of_rows=sys.maxint):
- """Returns a tuple, which contains three args:
-
- 1) a table from a list of list of dicts.
- 2) updated benchmark_results run_keyvals with composite benchmark
- 3) updated benchmark_results iter_count with composite benchmark
-
- The dict of list of list of dicts is passed into the constructor of
- SamplesTableGenerator.
- This method converts that into a canonical list of lists which
- represents a table of values.
-
- Examples:
- We have the following runs:
- {bench1: [[{"samples": "v1"}, {"samples": "v2"}],
- [{"samples": "v3"}, {"samples": "v4"}]]
- bench2: [[{"samples": "v21"}, None],
- [{"samples": "v22"}, {"samples": "v23"}]]}
- and weights of benchmarks:
- {bench1: w1, bench2: w2}
- and the following labels:
- ["vanilla", "modified"]
- it will return:
- [["Benchmark", "Weights", "vanilla", "modified"]
- ["bench1", w1,
- ((2, 0), ["v1*w1", "v2*w1"]), ((2, 0), ["v3*w1", "v4*w1"])]
- ["bench2", w2,
- ((1, 1), ["v21*w2", None]), ((2, 0), ["v22*w2", "v23*w2"])]
- ["Composite Benchmark", N/A,
- ((1, 1), ["v1*w1+v21*w2", None]),
- ((2, 0), ["v3*w1+v22*w2", "v4*w1+ v23*w2"])]]
- The returned table can then be processed further by other classes in this
- module.
-
- Args:
- number_of_rows: Maximum number of rows to return from the table.
-
- Returns:
- A list of lists which is the table.
- """
- keys = self._GetKeys()
- header = [self._key_name, 'Weights'] + self._labels
- table = [header]
- rows = 0
- iterations = 0
-
- for k in keys:
- bench_runs = self._runs[k]
- unit = None
- all_runs_empty = all(not dict for label in bench_runs for dict in label)
- if all_runs_empty:
- cell = Cell()
- cell.string_value = 'Benchmark %s contains no result.' + \
- ' Is the benchmark name valid?' % k
- table.append([cell])
- else:
- row = [k]
- row.append(self._weights[k])
- for run_list in bench_runs:
- run_pass = 0
- run_fail = 0
- v = []
- for run in run_list:
- if 'samples' in run:
- if isinstance(run['samples'], list):
- val = run['samples'][0] * self._weights[k]
- unit = run['samples'][1]
- else:
- val = run['samples'] * self._weights[k]
- v.append(val)
- run_pass += 1
- else:
- v.append(None)
- run_fail += 1
- one_tuple = ((run_pass, run_fail), v)
- if iterations != 0 and iterations != run_pass + run_fail:
- raise ValueError('Iterations of each benchmark run ' \
- 'are not the same')
- iterations = run_pass + run_fail
- row.append(one_tuple)
- if unit:
- keyname = row[0] + ' (%s) ' % unit
- row[0] = keyname
- table.append(row)
- rows += 1
- if rows == number_of_rows:
- break
-
- k = 'Composite Benchmark'
- if k in keys:
- raise RuntimeError('Composite benchmark already exists in results')
-
- # Create a new composite benchmark row at the bottom of the summary table
- # The new row will be like the format in example:
- # ["Composite Benchmark", N/A,
- # ((1, 1), ["v1*w1+v21*w2", None]),
- # ((2, 0), ["v3*w1+v22*w2", "v4*w1+ v23*w2"])]]
- # First we will create a row of [key, weight, [[0] * iterations] * labels]
- row = [None] * len(header)
- row[0] = '%s (samples)' % k
- row[1] = 'N/A'
- for label_index in range(2, len(row)):
- row[label_index] = [0] * iterations
-
- for cur_row in table[1:]:
- # Iterate through each benchmark
- if len(cur_row) > 1:
- for label_index in range(2, len(cur_row)):
- # Iterate through each run in a single benchmark
- # each result should look like ((pass, fail), [values_list])
- bench_runs = cur_row[label_index][1]
- for index in range(iterations):
- # Accumulate each run result to composite benchmark run
- # If any run fails, then we set this run for composite benchmark
- # to None so that we know it fails.
- if bench_runs[index] and row[label_index][index] != None:
- row[label_index][index] += bench_runs[index]
- else:
- row[label_index][index] = None
- else:
- # One benchmark totally fails, no valid data will be in final result
- for label_index in range(2, len(row)):
- row[label_index] = [None] * iterations
- break
- # Calculate pass and fail count for composite benchmark
- for label_index in range(2, len(row)):
- run_pass = 0
- run_fail = 0
- for run in row[label_index]:
- if run:
- run_pass += 1
- else:
- run_fail += 1
- row[label_index] = ((run_pass, run_fail), row[label_index])
- table.append(row)
-
- # Now that we have the table genearted, we want to store this new composite
- # benchmark into the benchmark_result in ResultReport object.
- # This will be used to generate a full table which contains our composite
- # benchmark.
- # We need to create composite benchmark result and add it to keyvals in
- # benchmark_results.
- v = []
- for label in row[2:]:
- # each label's result looks like ((pass, fail), [values])
- benchmark_runs = label[1]
- # List of values of each label
- single_run_list = []
- for run in benchmark_runs:
- # Result of each run under the same label is a dict of keys.
- # Here the only key we will add for composite benchmark is the
- # weighted_samples we added up.
- one_dict = {}
- if run:
- one_dict[u'weighted_samples'] = [run, u'samples']
- one_dict['retval'] = 0
- else:
- one_dict['retval'] = 1
- single_run_list.append(one_dict)
- v.append(single_run_list)
-
- self._runs[k] = v
- self._iter_counts[k] = iterations
-
- return (table, self._runs, self._iter_counts)
-
-
class Result(object):
"""A class that respresents a single result.
@@ -522,8 +323,8 @@ class NonEmptyCountResult(Result):
len_values = len(values)
len_baseline_values = len(baseline_values)
tmp_cell = Cell()
- tmp_cell.value = 1.0 + (
- float(cell.value - base_value) / (max(len_values, len_baseline_values)))
+ tmp_cell.value = 1.0 + (float(cell.value - base_value) /
+ (max(len_values, len_baseline_values)))
f.Compute(tmp_cell)
cell.bgcolor = tmp_cell.bgcolor
@@ -541,13 +342,7 @@ class StringMeanResult(Result):
class AmeanResult(StringMeanResult):
"""Arithmetic mean."""
- def __init__(self, ignore_min_max=False):
- super(AmeanResult, self).__init__()
- self.ignore_min_max = ignore_min_max
-
def _ComputeFloat(self, cell, values, baseline_values):
- if self.ignore_min_max:
- values = _RemoveMinMax(cell, values)
cell.value = numpy.mean(values)
@@ -556,11 +351,6 @@ class RawResult(Result):
pass
-class IterationResult(Result):
- """Iteration result."""
- pass
-
-
class MinResult(Result):
"""Minimum."""
@@ -597,26 +387,14 @@ class NumericalResult(Result):
class StdResult(NumericalResult):
"""Standard deviation."""
- def __init__(self, ignore_min_max=False):
- super(StdResult, self).__init__()
- self.ignore_min_max = ignore_min_max
-
def _ComputeFloat(self, cell, values, baseline_values):
- if self.ignore_min_max:
- values = _RemoveMinMax(cell, values)
cell.value = numpy.std(values)
class CoeffVarResult(NumericalResult):
"""Standard deviation / Mean"""
- def __init__(self, ignore_min_max=False):
- super(CoeffVarResult, self).__init__()
- self.ignore_min_max = ignore_min_max
-
def _ComputeFloat(self, cell, values, baseline_values):
- if self.ignore_min_max:
- values = _RemoveMinMax(cell, values)
if numpy.mean(values) != 0.0:
noise = numpy.abs(numpy.std(values) / numpy.mean(values))
else:
@@ -649,14 +427,7 @@ class ComparisonResult(Result):
class PValueResult(ComparisonResult):
"""P-value."""
- def __init__(self, ignore_min_max=False):
- super(PValueResult, self).__init__()
- self.ignore_min_max = ignore_min_max
-
def _ComputeFloat(self, cell, values, baseline_values):
- if self.ignore_min_max:
- values = _RemoveMinMax(cell, values)
- baseline_values = _RemoveMinMax(cell, baseline_values)
if len(values) < 2 or len(baseline_values) < 2:
cell.value = float('nan')
return
@@ -671,13 +442,6 @@ class KeyAwareComparisonResult(ComparisonResult):
"""Automatic key aware comparison."""
def _IsLowerBetter(self, key):
- # Units in histograms should include directions
- if 'smallerIsBetter' in key:
- return True
- if 'biggerIsBetter' in key:
- return False
-
- # For units in chartjson:
# TODO(llozano): Trying to guess direction by looking at the name of the
# test does not seem like a good idea. Test frameworks should provide this
# info explicitly. I believe Telemetry has this info. Need to find it out.
@@ -704,7 +468,7 @@ class KeyAwareComparisonResult(ComparisonResult):
'dropped_percent', '(ms)', '(seconds)', '--ms',
'--average_num_missing_tiles', '--experimental_jank',
'--experimental_mean_frame', '--experimental_median_frame_time',
- '--total_deferred_image_decode_count', '--seconds', 'samples'
+ '--total_deferred_image_decode_count', '--seconds'
]
return any([l in key for l in lower_is_better_keys])
@@ -718,14 +482,7 @@ class KeyAwareComparisonResult(ComparisonResult):
class AmeanRatioResult(KeyAwareComparisonResult):
"""Ratio of arithmetic means of values vs. baseline values."""
- def __init__(self, ignore_min_max=False):
- super(AmeanRatioResult, self).__init__()
- self.ignore_min_max = ignore_min_max
-
def _ComputeFloat(self, cell, values, baseline_values):
- if self.ignore_min_max:
- values = _RemoveMinMax(cell, values)
- baseline_values = _RemoveMinMax(cell, baseline_values)
if numpy.mean(baseline_values) != 0:
cell.value = numpy.mean(values) / numpy.mean(baseline_values)
elif numpy.mean(values) != 0:
@@ -739,14 +496,7 @@ class AmeanRatioResult(KeyAwareComparisonResult):
class GmeanRatioResult(KeyAwareComparisonResult):
"""Ratio of geometric means of values vs. baseline values."""
- def __init__(self, ignore_min_max=False):
- super(GmeanRatioResult, self).__init__()
- self.ignore_min_max = ignore_min_max
-
def _ComputeFloat(self, cell, values, baseline_values):
- if self.ignore_min_max:
- values = _RemoveMinMax(cell, values)
- baseline_values = _RemoveMinMax(cell, baseline_values)
if self._GetGmean(baseline_values) != 0:
cell.value = self._GetGmean(values) / self._GetGmean(baseline_values)
elif self._GetGmean(values) != 0:
@@ -867,13 +617,6 @@ class PValueFormat(Format):
power=1)
-class WeightFormat(Format):
- """Formatting for weight in cwp mode."""
-
- def _ComputeFloat(self, cell):
- cell.string_value = '%0.4f' % float(cell.value)
-
-
class StorageFormat(Format):
"""Format the cell as a storage number.
@@ -923,7 +666,8 @@ class PercentFormat(Format):
def _ComputeFloat(self, cell):
cell.string_value = '%+1.1f%%' % ((float(cell.value) - 1) * 100)
- cell.color = self._GetColor(cell.value, Color(255, 0, 0, 0),
+ cell.color = self._GetColor(cell.value,
+ Color(255, 0, 0, 0),
Color(0, 0, 0, 0), Color(0, 255, 0, 0))
@@ -936,7 +680,8 @@ class RatioFormat(Format):
def _ComputeFloat(self, cell):
cell.string_value = '%+1.1f%%' % ((cell.value - 1) * 100)
- cell.color = self._GetColor(cell.value, Color(255, 0, 0, 0),
+ cell.color = self._GetColor(cell.value,
+ Color(255, 0, 0, 0),
Color(0, 0, 0, 0), Color(0, 255, 0, 0))
@@ -952,7 +697,8 @@ class ColorBoxFormat(Format):
def _ComputeFloat(self, cell):
cell.string_value = '--'
- bgcolor = self._GetColor(cell.value, Color(255, 0, 0, 0),
+ bgcolor = self._GetColor(cell.value,
+ Color(255, 0, 0, 0),
Color(255, 255, 255, 0), Color(0, 255, 0, 0))
cell.bgcolor = bgcolor
cell.color = bgcolor
@@ -1031,19 +777,15 @@ class TableFormatter(object):
formats to apply to the table and returns a table of cells.
"""
- def __init__(self, table, columns, samples_table=False):
+ def __init__(self, table, columns):
"""The constructor takes in a table and a list of columns.
Args:
table: A list of lists of values.
- columns: A list of column containing what to produce and how to format
- it.
- samples_table: A flag to check whether we are generating a table of
- samples in CWP apporximation mode.
+ columns: A list of column containing what to produce and how to format it.
"""
self._table = table
self._columns = columns
- self._samples_table = samples_table
self._table_columns = []
self._out_table = []
@@ -1052,48 +794,30 @@ class TableFormatter(object):
all_failed = False
for row in self._table[1:]:
- # If we are generating samples_table, the second value will be weight
- # rather than values.
- start_col = 2 if self._samples_table else 1
# It does not make sense to put retval in the summary table.
if str(row[0]) == 'retval' and table_type == 'summary':
# Check to see if any runs passed, and update all_failed.
all_failed = True
- for values in row[start_col:]:
+ for values in row[1:]:
if 0 in values:
all_failed = False
continue
key = Cell()
key.string_value = str(row[0])
out_row = [key]
- if self._samples_table:
- # Add one column for weight if in samples_table mode
- weight = Cell()
- weight.value = row[1]
- f = WeightFormat()
- f.Compute(weight)
- out_row.append(weight)
baseline = None
- for results in row[start_col:]:
- column_start = 0
- values = None
- # If generating sample table, we will split a tuple of iterations info
- # from the results
- if isinstance(results, tuple):
- it, values = results
- column_start = 1
- cell = Cell()
- cell.string_value = '[%d: %d]' % (it[0], it[1])
- out_row.append(cell)
- if not row_index:
- self._table_columns.append(self._columns[0])
- else:
- values = results
- # Parse each column
- for column in self._columns[column_start:]:
+ for values in row[1:]:
+ for column in self._columns:
cell = Cell()
cell.name = key.string_value
- if not column.result.NeedsBaseline() or baseline is not None:
+ if column.result.NeedsBaseline():
+ if baseline is not None:
+ column.result.Compute(cell, values, baseline)
+ column.fmt.Compute(cell)
+ out_row.append(cell)
+ if not row_index:
+ self._table_columns.append(column)
+ else:
column.result.Compute(cell, values, baseline)
column.fmt.Compute(cell)
out_row.append(cell)
@@ -1129,13 +853,8 @@ class TableFormatter(object):
"""Generate Column name at the top of table."""
key = Cell()
key.header = True
- key.string_value = 'Keys' if not self._samples_table else 'Benchmarks'
+ key.string_value = 'Keys'
header = [key]
- if self._samples_table:
- weight = Cell()
- weight.header = True
- weight.string_value = 'Weights'
- header.append(weight)
for column in self._table_columns:
cell = Cell()
cell.header = True
@@ -1196,7 +915,7 @@ class TableFormatter(object):
# Put the number of pass/fail iterations in the image label header.
if column_position > 0 and retval_row:
retval_values = retval_row[column_position]
- if isinstance(retval_values, list):
+ if type(retval_values) is list:
passes, fails = self.GetPassesAndFails(retval_values)
cell.string_value = str(label) + ' (pass:%d fail:%d)' % (passes,
fails)
@@ -1205,13 +924,9 @@ class TableFormatter(object):
else:
cell.string_value = str(label)
if top_header:
- if not self._samples_table or (self._samples_table and
- len(top_header) == 2):
- cell.colspan = base_colspan
+ cell.colspan = base_colspan
if len(top_header) > 1:
- if not self._samples_table or (self._samples_table and
- len(top_header) > 2):
- cell.colspan = compare_colspan
+ cell.colspan = compare_colspan
top_header.append(cell)
column_position = column_position + 1
self._out_table = [top_header] + self._out_table
@@ -1420,12 +1135,8 @@ class TablePrinter(object):
def GetSimpleTable(table, out_to=TablePrinter.CONSOLE):
"""Prints a simple table.
- This is used by code that has a very simple list-of-lists and wants to
- produce a table with ameans, a percentage ratio of ameans and a colorbox.
-
- Examples:
- GetSimpleConsoleTable([["binary", "b1", "b2"],["size", "300", "400"]])
- will produce a colored table that can be printed to the console.
+ This is used by code that has a very simple list-of-lists and wants to produce
+ a table with ameans, a percentage ratio of ameans and a colorbox.
Args:
table: a list of lists.
@@ -1433,6 +1144,10 @@ def GetSimpleTable(table, out_to=TablePrinter.CONSOLE):
Returns:
A string version of the table that can be printed to the console.
+
+ Example:
+ GetSimpleConsoleTable([["binary", "b1", "b2"],["size", "300", "400"]])
+ will produce a colored table that can be printed to the console.
"""
columns = [
Column(AmeanResult(), Format()),
@@ -1470,15 +1185,15 @@ def GetComplexTable(runs, labels, out_to=TablePrinter.CONSOLE):
tg = TableGenerator(runs, labels, TableGenerator.SORT_BY_VALUES_DESC)
table = tg.GetTable()
columns = [
- Column(LiteralResult(), Format(), 'Literal'),
- Column(AmeanResult(), Format()),
- Column(StdResult(), Format()),
- Column(CoeffVarResult(), CoeffVarFormat()),
- Column(NonEmptyCountResult(), Format()),
- Column(AmeanRatioResult(), PercentFormat()),
- Column(AmeanRatioResult(), RatioFormat()),
- Column(GmeanRatioResult(), RatioFormat()),
- Column(PValueResult(), PValueFormat())
+ Column(LiteralResult(), Format(), 'Literal'), Column(
+ AmeanResult(), Format()), Column(StdResult(), Format()), Column(
+ CoeffVarResult(), CoeffVarFormat()), Column(
+ NonEmptyCountResult(), Format()),
+ Column(AmeanRatioResult(), PercentFormat()), Column(
+ AmeanRatioResult(), RatioFormat()), Column(GmeanRatioResult(),
+ RatioFormat()), Column(
+ PValueResult(),
+ PValueFormat())
]
tf = TableFormatter(table, columns)
cell_table = tf.GetCellTable()
@@ -1498,29 +1213,27 @@ if __name__ == '__main__':
'k8': 'PASS',
'k9': 'PASS',
'k10': '0'
- },
- {
- 'k1': '13',
- 'k2': '14',
- 'k3': '15',
- 'ms_1': '10',
- 'k8': 'PASS',
- 'k9': 'FAIL',
- 'k10': '0'
- }],
- [{
- 'k1': '50',
- 'k2': '51',
- 'k3': '52',
- 'k4': '53',
- 'k5': '35',
- 'k6': '45',
- 'ms_1': '200',
- 'ms_2': '20',
- 'k7': 'FAIL',
- 'k8': 'PASS',
- 'k9': 'PASS'
- }]]
+ }, {
+ 'k1': '13',
+ 'k2': '14',
+ 'k3': '15',
+ 'ms_1': '10',
+ 'k8': 'PASS',
+ 'k9': 'FAIL',
+ 'k10': '0'
+ }], [{
+ 'k1': '50',
+ 'k2': '51',
+ 'k3': '52',
+ 'k4': '53',
+ 'k5': '35',
+ 'k6': '45',
+ 'ms_1': '200',
+ 'ms_2': '20',
+ 'k7': 'FAIL',
+ 'k8': 'PASS',
+ 'k9': 'PASS'
+ }]]
labels = ['vanilla', 'modified']
t = GetComplexTable(runs, labels, TablePrinter.CONSOLE)
print(t)
diff --git a/cros_utils/tabulator_test.py b/cros_utils/tabulator_test.py
index 33c8da25..21cd1e73 100644
--- a/cros_utils/tabulator_test.py
+++ b/cros_utils/tabulator_test.py
@@ -1,7 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright 2012 Google Inc. All Rights Reserved.
"""Tests for the tabulator module."""
from __future__ import print_function
@@ -73,22 +70,14 @@ class TabulatorTest(unittest.TestCase):
b = tabulator.Result()._GetGmean(a)
self.assertTrue(b >= 0.99e+308 and b <= 1.01e+308)
- def testIgnoreMinMax(self):
- amr = tabulator.AmeanResult(ignore_min_max=True)
- cell = tabulator.Cell()
- values = [1, 2]
- amr.Compute(cell, values, None)
- self.assertTrue(cell.value == 1.5)
- values = [1, 2, 8]
- amr.Compute(cell, values, None)
- self.assertTrue(cell.value == 2)
-
def testTableGenerator(self):
- # yapf: disable
- runs = [[{'k1': '10', 'k2': '12'},
- {'k1': '13', 'k2': '14', 'k3': '15'}],
- [{'k1': '50', 'k2': '51', 'k3': '52', 'k4': '53'}]]
- # yapf: enable
+ runs = [[{'k1': '10',
+ 'k2': '12'}, {'k1': '13',
+ 'k2': '14',
+ 'k3': '15'}], [{'k1': '50',
+ 'k2': '51',
+ 'k3': '52',
+ 'k4': '53'}]]
labels = ['vanilla', 'modified']
tg = tabulator.TableGenerator(runs, labels)
table = tg.GetTable()
@@ -114,52 +103,6 @@ class TabulatorTest(unittest.TestCase):
table = tf.GetCellTable()
self.assertTrue(table)
- def testSamplesTableGenerator(self):
- # yapf: disable
- keyvals = {
- 'bench1': [[{'samples': 1}, {'samples': 2}],
- [{'samples': 3}, {'samples': 4}]],
- 'bench2': [[{'samples': 5}, {}],
- [{'samples': 6}, {'samples': 7}]]
- }
- # yapf: enable
- weights = {'bench1': 0.2, 'bench2': 0.7}
- iter_counts = {'bench1': 2, 'bench2': 2}
- labels = ['vanilla', 'modified']
- tg = tabulator.SamplesTableGenerator(keyvals, labels, iter_counts, weights)
- (table, new_keyvals, new_iter_counts) = tg.GetTable()
-
- columns = [
- tabulator.Column(tabulator.IterationResult(), tabulator.Format()),
- tabulator.Column(tabulator.AmeanResult(), tabulator.Format()),
- tabulator.Column(tabulator.AmeanRatioResult(),
- tabulator.PercentFormat()),
- ]
- # This is the function to load column info.
- tf = tabulator.TableFormatter(table, columns, samples_table=True)
- # This is the function where to do all weighting calculation.
- cell_table = tf.GetCellTable('summary')
- self.assertTrue(cell_table)
-
- header = table.pop(0)
- self.assertTrue(header == ['Benchmarks', 'Weights', 'vanilla', 'modified'])
- row = table.pop(0)
- # yapf: disable
- self.assertTrue(row == ['bench1', 0.2,
- ((2, 0), [1 * 0.2, 2 * 0.2]),
- ((2, 0), [3 * 0.2, 4 * 0.2])])
- row = table.pop(0)
- self.assertTrue(row == ['bench2', 0.7,
- ((1, 1), [5 * 0.7, None]),
- ((2, 0), [6 * 0.7, 7 * 0.7])])
- row = table.pop(0)
- self.assertTrue(row == ['Composite Benchmark (samples)', 'N/A',
- ((1, 1), [1 * 0.2 + 5 * 0.7, None]),
- ((2, 0), [3 * 0.2 + 6 * 0.7, 4 * 0.2 + 7 * 0.7])])
- # yapf: enable
- self.assertTrue('Composite Benchmark' in new_keyvals.keys())
- self.assertTrue('Composite Benchmark' in new_iter_counts.keys())
-
def testColspan(self):
simple_table = [
['binary', 'b1', 'b2', 'b3'],
diff --git a/crosperf/benchmark.py b/crosperf/benchmark.py
index 5c11e27e..60ac778b 100644
--- a/crosperf/benchmark.py
+++ b/crosperf/benchmark.py
@@ -1,16 +1,10 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Define a type that wraps a Benchmark instance."""
-
-from __future__ import division
from __future__ import print_function
import math
-#FIXME(denik): Fix the import in chroot.
-#pylint: disable=import-error
from scipy import stats
# See crbug.com/673558 for how these are estimated.
@@ -23,7 +17,6 @@ _estimated_stddev = {
'dromaeo.domcoremodify': 0.011,
'graphics_WebGLAquarium': 0.008,
'page_cycler_v2.typical_25': 0.021,
- 'loading.desktop': 0.021, # Copied from page_cycler initially
}
@@ -63,10 +56,7 @@ class Benchmark(object):
suite='',
show_all_results=False,
retries=0,
- run_local=False,
- cwp_dso='',
- weight=0,
- turbostat=True):
+ run_local=False):
self.name = name
#For telemetry, this is the benchmark name.
self.test_name = test_name
@@ -84,6 +74,3 @@ class Benchmark(object):
if run_local and self.suite != 'telemetry_Crosperf':
raise RuntimeError('run_local is only supported by telemetry_Crosperf.')
self.run_local = run_local
- self.cwp_dso = cwp_dso
- self.weight = weight
- self.turbostat = turbostat
diff --git a/crosperf/benchmark_run.py b/crosperf/benchmark_run.py
index 6512b8ea..bba71a36 100644
--- a/crosperf/benchmark_run.py
+++ b/crosperf/benchmark_run.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Module of benchmark runs."""
from __future__ import print_function
@@ -31,18 +29,8 @@ STATUS_PENDING = 'PENDING'
class BenchmarkRun(threading.Thread):
"""The benchmarkrun class."""
- def __init__(self,
- name,
- benchmark,
- label,
- iteration,
- cache_conditions,
- machine_manager,
- logger_to_use,
- log_level,
- share_cache,
- dut_config,
- enable_aslr=False):
+ def __init__(self, name, benchmark, label, iteration, cache_conditions,
+ machine_manager, logger_to_use, log_level, share_cache):
threading.Thread.__init__(self)
self.name = name
self._logger = logger_to_use
@@ -55,8 +43,7 @@ class BenchmarkRun(threading.Thread):
self.retval = None
self.run_completed = False
self.machine_manager = machine_manager
- self.suite_runner = SuiteRunner(
- dut_config, self._logger, self.log_level, enable_aslr=enable_aslr)
+ self.suite_runner = SuiteRunner(self._logger, self.log_level)
self.machine = None
self.cache_conditions = cache_conditions
self.runs_complete = 0
@@ -85,7 +72,7 @@ class BenchmarkRun(threading.Thread):
self.label.board, self.cache_conditions, self._logger,
self.log_level, self.label, self.share_cache,
self.benchmark.suite, self.benchmark.show_all_results,
- self.benchmark.run_local, self.benchmark.cwp_dso)
+ self.benchmark.run_local)
self.result = self.cache.ReadResult()
self.cache_hit = (self.result is not None)
@@ -108,8 +95,7 @@ class BenchmarkRun(threading.Thread):
err = 'No cache hit.'
self.result = Result.CreateFromRun(
self._logger, self.log_level, self.label, self.machine, output, err,
- retval, self.benchmark.test_name, self.benchmark.suite,
- self.benchmark.cwp_dso)
+ retval, self.benchmark.test_name, self.benchmark.suite)
else:
self._logger.LogOutput('%s: No cache hit.' % self.name)
@@ -118,7 +104,6 @@ class BenchmarkRun(threading.Thread):
self.machine = self.AcquireMachine()
self.cache.machine = self.machine
self.result = self.RunTest(self.machine)
- # TODO(denik): Add Top5 report into html.
self.cache.remote = self.machine.name
self.label.chrome_version = self.machine_manager.GetChromeVersion(
@@ -181,9 +166,9 @@ class BenchmarkRun(threading.Thread):
machine = self.machine_manager.AcquireMachine(self.label)
if machine:
- self._logger.LogOutput(
- '%s: Machine %s acquired at %s' % (self.name, machine.name,
- datetime.datetime.now()))
+ self._logger.LogOutput('%s: Machine %s acquired at %s' %
+ (self.name, machine.name,
+ datetime.datetime.now()))
break
time.sleep(10)
return machine
@@ -205,7 +190,7 @@ class BenchmarkRun(threading.Thread):
raise SyntaxError('perf_args must start with either record or stat')
extra_test_args = [
'--profiler=custom_perf',
- ('--profiler_args=\'perf_options="%s"\'' % perf_args)
+ ("--profiler_args='perf_options=\"%s\"'" % perf_args)
]
return ' '.join(extra_test_args)
else:
@@ -226,8 +211,7 @@ class BenchmarkRun(threading.Thread):
self.run_completed = True
return Result.CreateFromRun(self._logger, self.log_level, self.label,
self.machine, out, err, retval,
- self.benchmark.test_name, self.benchmark.suite,
- self.benchmark.cwp_dso)
+ self.benchmark.test_name, self.benchmark.suite)
def SetCacheConditions(self, cache_conditions):
self.cache_conditions = cache_conditions
@@ -260,7 +244,7 @@ class MockBenchmarkRun(BenchmarkRun):
self.label.board, self.cache_conditions, self._logger,
self.log_level, self.label, self.share_cache,
self.benchmark.suite, self.benchmark.show_all_results,
- self.benchmark.run_local, self.benchmark.cwp_dso)
+ self.benchmark.run_local)
self.result = self.cache.ReadResult()
self.cache_hit = (self.result is not None)
diff --git a/crosperf/benchmark_run_unittest.py b/crosperf/benchmark_run_unittest.py
index 51b287cf..74757ac2 100755
--- a/crosperf/benchmark_run_unittest.py
+++ b/crosperf/benchmark_run_unittest.py
@@ -1,22 +1,20 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Testing of benchmark_run."""
from __future__ import print_function
+import mock
import unittest
import inspect
-import mock
+from cros_utils import logger
import benchmark_run
-from cros_utils import logger
from suite_runner import MockSuiteRunner
from suite_runner import SuiteRunner
from label import MockLabel
@@ -50,10 +48,8 @@ class BenchmarkRunTest(unittest.TestCase):
self.test_label = MockLabel(
'test1',
- 'build',
'image1',
'autotest_dir',
- 'debug_dir',
'/tmp/test_benchmark_run',
'x86-alex',
'chromeos2-row1-rack4-host9.cros',
@@ -61,8 +57,7 @@ class BenchmarkRunTest(unittest.TestCase):
cache_dir='',
cache_only=False,
log_level='average',
- compiler='gcc',
- skylab=False)
+ compiler='gcc')
self.test_cache_conditions = [
CacheConditions.CACHE_FILE_EXISTS, CacheConditions.CHECKSUMS_MATCH
@@ -75,10 +70,8 @@ class BenchmarkRunTest(unittest.TestCase):
def testDryRun(self):
my_label = MockLabel(
'test1',
- 'build',
'image1',
'autotest_dir',
- 'debug_dir',
'/tmp/test_benchmark_run',
'x86-alex',
'chromeos2-row1-rack4-host9.cros',
@@ -86,8 +79,7 @@ class BenchmarkRunTest(unittest.TestCase):
cache_dir='',
cache_only=False,
log_level='average',
- compiler='gcc',
- skylab=False)
+ compiler='gcc')
logging_level = 'average'
m = MockMachineManager('/tmp/chromeos_root', 0, logging_level, '')
@@ -100,16 +92,8 @@ class BenchmarkRunTest(unittest.TestCase):
False, # rm_chroot_tmp
'', # perf_args
suite='telemetry_Crosperf') # suite
- dut_conf = {
- 'cooldown_time': 5,
- 'cooldown_temp': 45,
- 'governor': 'powersave',
- 'cpu_usage': 'big_only',
- 'cpu_freq_pct': 80,
- }
b = benchmark_run.MockBenchmarkRun('test run', bench, my_label, 1, [], m,
- logger.GetLogger(), logging_level, '',
- dut_conf)
+ logger.GetLogger(), logging_level, '')
b.cache = MockResultsCache()
b.suite_runner = MockSuiteRunner()
b.start()
@@ -118,8 +102,7 @@ class BenchmarkRunTest(unittest.TestCase):
# since the last time this test was updated:
args_list = [
'self', 'name', 'benchmark', 'label', 'iteration', 'cache_conditions',
- 'machine_manager', 'logger_to_use', 'log_level', 'share_cache',
- 'dut_config', 'enable_aslr'
+ 'machine_manager', 'logger_to_use', 'log_level', 'share_cache'
]
arg_spec = inspect.getargspec(benchmark_run.BenchmarkRun.__init__)
self.assertEqual(len(arg_spec.args), len(args_list))
@@ -137,30 +120,30 @@ class BenchmarkRunTest(unittest.TestCase):
br = benchmark_run.BenchmarkRun(
'test_run', self.test_benchmark, self.test_label, 1,
self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ 'average', '')
def MockLogOutput(msg, print_to_console=False):
- """Helper function for test_run."""
+ 'Helper function for test_run.'
del print_to_console
self.log_output.append(msg)
def MockLogError(msg, print_to_console=False):
- """Helper function for test_run."""
+ 'Helper function for test_run.'
del print_to_console
self.log_error.append(msg)
def MockRecordStatus(msg):
- """Helper function for test_run."""
+ 'Helper function for test_run.'
self.status.append(msg)
def FakeReadCache():
- """Helper function for test_run."""
+ 'Helper function for test_run.'
br.cache = mock.Mock(spec=ResultsCache)
self.called_ReadCache = True
return 0
def FakeReadCacheSucceed():
- """Helper function for test_run."""
+ 'Helper function for test_run.'
br.cache = mock.Mock(spec=ResultsCache)
br.result = mock.Mock(spec=Result)
br.result.out = 'result.out stuff'
@@ -170,29 +153,29 @@ class BenchmarkRunTest(unittest.TestCase):
return 0
def FakeReadCacheException():
- """Helper function for test_run."""
+ 'Helper function for test_run.'
raise RuntimeError('This is an exception test; it is supposed to happen')
def FakeAcquireMachine():
- """Helper function for test_run."""
+ 'Helper function for test_run.'
mock_machine = MockCrosMachine('chromeos1-row3-rack5-host7.cros',
'chromeos', 'average')
return mock_machine
def FakeRunTest(_machine):
- """Helper function for test_run."""
+ 'Helper function for test_run.'
mock_result = mock.Mock(spec=Result)
mock_result.retval = 0
return mock_result
def FakeRunTestFail(_machine):
- """Helper function for test_run."""
+ 'Helper function for test_run.'
mock_result = mock.Mock(spec=Result)
mock_result.retval = 1
return mock_result
def ResetTestValues():
- """Helper function for test_run."""
+ 'Helper function for test_run.'
self.log_output = []
self.log_error = []
self.status = []
@@ -277,14 +260,14 @@ class BenchmarkRunTest(unittest.TestCase):
br = benchmark_run.BenchmarkRun(
'test_run', self.test_benchmark, self.test_label, 1,
self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ 'average', '')
def GetLastEventPassed():
- """Helper function for test_terminate_pass"""
+ 'Helper function for test_terminate_pass'
return benchmark_run.STATUS_SUCCEEDED
def RecordStub(status):
- """Helper function for test_terminate_pass"""
+ 'Helper function for test_terminate_pass'
self.status = status
self.status = benchmark_run.STATUS_SUCCEEDED
@@ -304,14 +287,14 @@ class BenchmarkRunTest(unittest.TestCase):
br = benchmark_run.BenchmarkRun(
'test_run', self.test_benchmark, self.test_label, 1,
self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ 'average', '')
def GetLastEventFailed():
- """Helper function for test_terminate_fail"""
+ 'Helper function for test_terminate_fail'
return benchmark_run.STATUS_FAILED
def RecordStub(status):
- """Helper function for test_terminate_fail"""
+ 'Helper function for test_terminate_fail'
self.status = status
self.status = benchmark_run.STATUS_SUCCEEDED
@@ -331,7 +314,7 @@ class BenchmarkRunTest(unittest.TestCase):
br = benchmark_run.BenchmarkRun(
'test_run', self.test_benchmark, self.test_label, 1,
self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ 'average', '')
br.terminated = True
self.assertRaises(Exception, br.AcquireMachine)
@@ -348,10 +331,10 @@ class BenchmarkRunTest(unittest.TestCase):
br = benchmark_run.BenchmarkRun(
'test_run', self.test_benchmark, self.test_label, 1,
self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ 'average', '')
def MockLogError(err_msg):
- """Helper function for test_get_extra_autotest_args"""
+ 'Helper function for test_get_extra_autotest_args'
self.err_msg = err_msg
self.mock_logger.LogError = MockLogError
@@ -363,8 +346,8 @@ class BenchmarkRunTest(unittest.TestCase):
result = br.GetExtraAutotestArgs()
self.assertEqual(
result,
- '--profiler=custom_perf --profiler_args=\'perf_options="record -a -e '
- 'cycles"\'')
+ "--profiler=custom_perf --profiler_args='perf_options=\"record -a -e "
+ "cycles\"'")
self.test_benchmark.suite = 'telemetry'
result = br.GetExtraAutotestArgs()
@@ -387,7 +370,7 @@ class BenchmarkRunTest(unittest.TestCase):
br = benchmark_run.BenchmarkRun(
'test_run', self.test_benchmark, self.test_label, 1,
self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ 'average', '')
self.status = []
@@ -402,9 +385,9 @@ class BenchmarkRunTest(unittest.TestCase):
br.RunTest(mock_machine)
self.assertTrue(br.run_completed)
- self.assertEqual(
- self.status,
- [benchmark_run.STATUS_IMAGING, benchmark_run.STATUS_RUNNING])
+ self.assertEqual(self.status, [
+ benchmark_run.STATUS_IMAGING, benchmark_run.STATUS_RUNNING
+ ])
self.assertEqual(br.machine_manager.ImageMachine.call_count, 1)
br.machine_manager.ImageMachine.assert_called_with(mock_machine,
@@ -416,13 +399,13 @@ class BenchmarkRunTest(unittest.TestCase):
self.assertEqual(mock_result.call_count, 1)
mock_result.assert_called_with(
self.mock_logger, 'average', self.test_label, None, "{'Score':100}", '',
- 0, 'page_cycler.netsim.top_10', 'telemetry_Crosperf', '')
+ 0, 'page_cycler.netsim.top_10', 'telemetry_Crosperf')
def test_set_cache_conditions(self):
br = benchmark_run.BenchmarkRun(
'test_run', self.test_benchmark, self.test_label, 1,
self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ 'average', '')
phony_cache_conditions = [123, 456, True, False]
diff --git a/crosperf/benchmark_unittest.py b/crosperf/benchmark_unittest.py
index 6c0c22f6..24c364c0 100755
--- a/crosperf/benchmark_unittest.py
+++ b/crosperf/benchmark_unittest.py
@@ -1,19 +1,15 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+# Copyright 2014 Google Inc. All Rights Reserved
"""Unit tests for the Crosperf Benchmark class."""
from __future__ import print_function
import inspect
-import unittest
-
from benchmark import Benchmark
+import unittest
+
class BenchmarkTestCase(unittest.TestCase):
"""Individual tests for the Benchmark class."""
@@ -59,8 +55,7 @@ class BenchmarkTestCase(unittest.TestCase):
# this test was updated.
args_list = [
'self', 'name', 'test_name', 'test_args', 'iterations', 'rm_chroot_tmp',
- 'perf_args', 'suite', 'show_all_results', 'retries', 'run_local',
- 'cwp_dso', 'weight', 'turbostat'
+ 'perf_args', 'suite', 'show_all_results', 'retries', 'run_local'
]
arg_spec = inspect.getargspec(Benchmark.__init__)
self.assertEqual(len(arg_spec.args), len(args_list))
diff --git a/crosperf/crosperf.py b/crosperf/crosperf.py
index fddd18b9..b78c8b9e 100755
--- a/crosperf/crosperf.py
+++ b/crosperf/crosperf.py
@@ -1,19 +1,15 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright 2011 Google Inc. All Rights Reserved.
"""The driver script for running performance benchmarks on ChromeOS."""
from __future__ import print_function
-import argparse
import atexit
+import argparse
import os
import signal
import sys
-
from experiment_runner import ExperimentRunner
from experiment_runner import MockExperimentRunner
from experiment_factory import ExperimentFactory
@@ -90,11 +86,6 @@ def RunCrosperf(argv):
dest='log_dir',
default='',
help='The log_dir, default is under <crosperf_logs>/logs')
- parser.add_argument(
- '--no_hwp',
- default=False,
- action='store_true',
- help='Disable intel_pstate on Intel CPU with HWP support.')
SetupParserOptions(parser)
options, args = parser.parse_known_args(argv)
@@ -121,8 +112,6 @@ def RunCrosperf(argv):
experiment_file.GetGlobalSettings().SetField('name', experiment_name)
experiment = ExperimentFactory().GetExperiment(experiment_file,
working_directory, log_dir)
- if options.no_hwp:
- experiment.intel_pstate = 'no_hwp'
json_report = experiment_file.GetGlobalSettings().GetField('json_report')
@@ -141,11 +130,14 @@ def RunCrosperf(argv):
def Main(argv):
try:
RunCrosperf(argv)
- except Exception:
+ except Exception as ex:
# Flush buffers before exiting to avoid out of order printing
sys.stdout.flush()
- # Raise exception prints out traceback
- raise
+ sys.stderr.flush()
+ print('Crosperf error: %s' % repr(ex))
+ sys.stdout.flush()
+ sys.stderr.flush()
+ sys.exit(1)
if __name__ == '__main__':
diff --git a/crosperf/crosperf_test.py b/crosperf/crosperf_test.py
new file mode 100755
index 00000000..085efafe
--- /dev/null
+++ b/crosperf/crosperf_test.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python2
+
+# Copyright 2011 Google Inc. All Rights Reserved.
+"""Test for crosperf."""
+
+from __future__ import print_function
+
+import os
+import tempfile
+import unittest
+import crosperf
+from cros_utils.file_utils import FileUtils
+
+EXPERIMENT_FILE_1 = """
+ board: x86-alex
+ remote: chromeos-alex3
+
+ benchmark: PageCycler {
+ iterations: 3
+ }
+
+ image1 {
+ chromeos_image: /usr/local/google/cros_image1.bin
+ }
+
+ image2 {
+ chromeos_image: /usr/local/google/cros_image2.bin
+ }
+ """
+
+
+class CrosPerfTest(unittest.TestCase):
+ """Class to test Crosperf."""
+
+ def testDryRun(self):
+ filehandle, filename = tempfile.mkstemp()
+ os.write(filehandle, EXPERIMENT_FILE_1)
+ crosperf.Main(['', filename, '--dry_run'])
+ os.remove(filename)
+
+
+if __name__ == '__main__':
+ FileUtils.Configure(True)
+ unittest.main()
diff --git a/crosperf/crosperf_unittest.py b/crosperf/crosperf_unittest.py
index 4f40d139..b361f15b 100755
--- a/crosperf/crosperf_unittest.py
+++ b/crosperf/crosperf_unittest.py
@@ -1,22 +1,14 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+# Copyright 2014 Google Inc. All Rights Reserved.
"""Unittest for crosperf."""
-from __future__ import division
from __future__ import print_function
import argparse
-import io
-import os
-import tempfile
-import unittest
+import StringIO
-import mock
+import unittest
import crosperf
import settings_factory
@@ -45,16 +37,10 @@ class CrosperfTest(unittest.TestCase):
"""Crosperf test class."""
def setUp(self):
- input_file = io.BytesIO(EXPERIMENT_FILE_1)
+ input_file = StringIO.StringIO(EXPERIMENT_FILE_1)
self.exp_file = experiment_file.ExperimentFile(input_file)
- def testDryRun(self):
- filehandle, filename = tempfile.mkstemp()
- os.write(filehandle, EXPERIMENT_FILE_1)
- crosperf.Main(['', filename, '--dry_run'])
- os.remove(filename)
-
- def testConvertOptionsToSettings(self):
+ def test_convert_options_to_settings(self):
parser = argparse.ArgumentParser()
parser.add_argument(
'-l',
@@ -69,24 +55,13 @@ class CrosperfTest(unittest.TestCase):
settings = crosperf.ConvertOptionsToSettings(options)
self.assertIsNotNone(settings)
self.assertIsInstance(settings, settings_factory.GlobalSettings)
- self.assertEqual(len(settings.fields), 37)
+ self.assertEqual(len(settings.fields), 25)
self.assertTrue(settings.GetField('rerun'))
argv = ['crosperf/crosperf.py', 'temp.exp']
options, _ = parser.parse_known_args(argv)
settings = crosperf.ConvertOptionsToSettings(options)
self.assertFalse(settings.GetField('rerun'))
- def testExceptionPrintTraceback(self):
- """Test the main function can print traceback in exception."""
-
- def mock_RunCrosperf(*_args, **_kwargs):
- return 10 / 0
-
- with mock.patch('crosperf.RunCrosperf', new=mock_RunCrosperf):
- with self.assertRaises(ZeroDivisionError) as context:
- crosperf.Main([])
- self.assertEqual('division by zero', str(context.exception))
-
if __name__ == '__main__':
unittest.main()
diff --git a/crosperf/default-telemetry-results.json b/crosperf/default-telemetry-results.json
index b5ea45a6..240664b5 100644
--- a/crosperf/default-telemetry-results.json
+++ b/crosperf/default-telemetry-results.json
@@ -16,10 +16,6 @@
"percentage_smooth__percentage_smooth",
"percentage_smooth__summary"
],
- "loading.desktop@@typical": [
- "timeToFirstContentfulPaint__cache_temperature:cold",
- "timeToFirstContentfulPaint__cache_temperature:warm"
- ],
"page_cycler_v2.intl_es_fr_pt-BR": [
"cold_times__page_load_time",
"warm_times__page_load_time",
@@ -68,15 +64,14 @@
"warm@@timeToOnload_avg__summary"
],
"speedometer": [
- "RunsPerMinute",
- "Total"
+ "Total__Total",
+ "Total__summary"
],
"speedometer2": [
- "RunsPerMinute",
- "Total"
+ "Total__summary"
],
"octane": [
- "Total.Score"
+ "Total__Score"
],
"jsgamebench": [
"Score__Score"
@@ -148,24 +143,13 @@
"warm@@timeToOnload_avg__summary"
],
"kraken": [
- "Total"
+ "Total__Total",
+ "Total__summary"
],
"jetstream": [
- "Score"
- ],
- "jetstream2": [
- "Score"
+ "Score__summary"
],
"cros_ui_smoothness": [
- "ui_percentage_smooth"
- ],
- "rendering.desktop": [
- "Event.Latency.ScrollUpdate.Touch.TimeToScrollUpdateSwapBegin4_avg"
- ],
- "rendering.desktop@@aquarium$": [
- "avg_surface_fps"
- ],
- "rendering.desktop@@aquarium_20k$": [
- "avg_surface_fps"
+ "ui_percentage_smooth__summary"
]
}
diff --git a/crosperf/default_remotes b/crosperf/default_remotes
index 7b59c2af..c3a8cc75 100644
--- a/crosperf/default_remotes
+++ b/crosperf/default_remotes
@@ -1,6 +1,11 @@
+daisy : chromeos2-row9-rack9-host7.cros
+peach_pit : chromeos2-row9-rack10-host13.cros chromeos2-row9-rack10-host15.cros chromeos2-row9-rack10-host17.cros
+peppy : chromeos2-row9-rack10-host19.cros chromeos2-row9-rack10-host21.cros chromeos2-row9-rack9-host1.cros
+squawks : chromeos2-row9-rack10-host7.cros chromeos2-row9-rack10-host9.cros chromeos2-row9-rack10-host11.cros
elm : chromeos2-row9-rack8-host19.cros chromeos2-row9-rack8-host21.cros
bob : chromeos2-row9-rack7-host1.cros chromeos2-row9-rack7-host3.cros
chell : chromeos2-row9-rack8-host3.cros chromeos2-row9-rack8-host5.cros
+falco : chromeos2-row9-rack8-host13.cros chromeos2-row9-rack8-host11.cros
kefka : chromeos2-row9-rack9-host21.cros chromeos2-row9-rack8-host1.cros
lulu : chromeos2-row9-rack8-host9.cros chromeos2-row9-rack8-host7.cros
nautilus : chromeos2-row9-rack7-host11.cros chromeos2-row9-rack7-host9.cros
diff --git a/crosperf/download_images.py b/crosperf/download_images.py
index e02c5817..ad0a812b 100644
--- a/crosperf/download_images.py
+++ b/crosperf/download_images.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2014-2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Download images from Cloud Storage."""
from __future__ import print_function
@@ -14,7 +12,7 @@ import test_flag
from cros_utils import command_executer
-GS_UTIL = 'src/chromium/depot_tools/gsutil.py'
+GS_UTIL = 'chromium/tools/depot_tools/gsutil.py'
class MissingImage(Exception):
@@ -134,17 +132,19 @@ class ImageDownloader(object):
if retval != 0:
print('(Warning: Could not remove file chromiumos_test_image.tar.xz .)')
- def DownloadSingleFile(self, chromeos_root, build_id, package_file_name):
+ def DownloadSingleAutotestFile(self, chromeos_root, build_id,
+ package_file_name):
# Verify if package files exist
status = 0
- gs_package_name = (
- 'gs://chromeos-image-archive/%s/%s' % (build_id, package_file_name))
+ gs_package_name = ('gs://chromeos-image-archive/%s/%s' %
+ (build_id, package_file_name))
gsutil_cmd = os.path.join(chromeos_root, GS_UTIL)
if not test_flag.GetTestMode():
cmd = '%s ls %s' % (gsutil_cmd, gs_package_name)
status = self._ce.RunCommand(cmd)
if status != 0:
- raise MissingFile('Cannot find package file: %s.' % package_file_name)
+ raise MissingFile(
+ 'Cannot find autotest package file: %s.' % package_file_name)
if self.log_level == 'average':
self._logger.LogOutput('Preparing to download %s package to local '
@@ -167,16 +167,16 @@ class ImageDownloader(object):
if status != 0 or not os.path.exists(package_path):
raise MissingFile('Cannot download package: %s .' % package_path)
- def UncompressSingleFile(self, chromeos_root, build_id, package_file_name,
- uncompress_cmd):
+ def UncompressSingleAutotestFile(self, chromeos_root, build_id,
+ package_file_name, uncompress_cmd):
# Uncompress file
download_path = os.path.join(chromeos_root, 'chroot/tmp', build_id)
- command = (
- 'cd %s ; %s %s' % (download_path, uncompress_cmd, package_file_name))
+ command = ('cd %s ; %s %s' % (download_path, uncompress_cmd,
+ package_file_name))
if self.log_level != 'verbose':
self._logger.LogOutput('CMD: %s' % command)
- print('(Uncompressing file %s .)' % package_file_name)
+ print('(Uncompressing autotest file %s .)' % package_file_name)
retval = self._ce.RunCommand(command)
if retval != 0:
raise MissingFile('Cannot uncompress file: %s.' % package_file_name)
@@ -184,17 +184,17 @@ class ImageDownloader(object):
command = ('cd %s ; rm -f %s' % (download_path, package_file_name))
if self.log_level != 'verbose':
self._logger.LogOutput('CMD: %s' % command)
- print('(Removing processed file %s .)' % package_file_name)
+ print('(Removing processed autotest file %s .)' % package_file_name)
# try removing file, its ok to have an error, print if encountered
retval = self._ce.RunCommand(command)
if retval != 0:
print('(Warning: Could not remove file %s .)' % package_file_name)
- def VerifyFileExists(self, chromeos_root, build_id, package_file):
+ def VerifyAutotestFilesExist(self, chromeos_root, build_id, package_file):
# Quickly verify if the files are there
status = 0
- gs_package_name = (
- 'gs://chromeos-image-archive/%s/%s' % (build_id, package_file))
+ gs_package_name = ('gs://chromeos-image-archive/%s/%s' % (build_id,
+ package_file))
gsutil_cmd = os.path.join(chromeos_root, GS_UTIL)
if not test_flag.GetTestMode():
cmd = '%s ls %s' % (gsutil_cmd, gs_package_name)
@@ -223,8 +223,8 @@ class ImageDownloader(object):
if not os.path.exists(autotest_path):
# Quickly verify if the files are present on server
# If not, just exit with warning
- status = self.VerifyFileExists(chromeos_root, build_id,
- autotest_packages_name)
+ status = self.VerifyAutotestFilesExist(chromeos_root, build_id,
+ autotest_packages_name)
if status != 0:
default_autotest_dir = '~/trunk/src/third_party/autotest/files'
print(
@@ -233,18 +233,19 @@ class ImageDownloader(object):
return default_autotest_dir
# Files exist on server, download and uncompress them
- self.DownloadSingleFile(chromeos_root, build_id, autotest_packages_name)
- self.DownloadSingleFile(chromeos_root, build_id,
- autotest_server_package_name)
- self.DownloadSingleFile(chromeos_root, build_id,
- autotest_control_files_name)
-
- self.UncompressSingleFile(chromeos_root, build_id, autotest_packages_name,
- 'tar -xf ')
- self.UncompressSingleFile(chromeos_root, build_id,
- autotest_server_package_name, 'tar -jxf ')
- self.UncompressSingleFile(chromeos_root, build_id,
- autotest_control_files_name, 'tar -xf ')
+ self.DownloadSingleAutotestFile(chromeos_root, build_id,
+ autotest_packages_name)
+ self.DownloadSingleAutotestFile(chromeos_root, build_id,
+ autotest_server_package_name)
+ self.DownloadSingleAutotestFile(chromeos_root, build_id,
+ autotest_control_files_name)
+
+ self.UncompressSingleAutotestFile(chromeos_root, build_id,
+ autotest_packages_name, 'tar -xvf ')
+ self.UncompressSingleAutotestFile(
+ chromeos_root, build_id, autotest_server_package_name, 'tar -jxvf ')
+ self.UncompressSingleAutotestFile(
+ chromeos_root, build_id, autotest_control_files_name, 'tar -xvf ')
# Rename created autotest directory to autotest_files
command = ('cd %s ; mv autotest autotest_files' % download_path)
if self.log_level != 'verbose':
@@ -256,44 +257,7 @@ class ImageDownloader(object):
return autotest_rel_path
- def DownloadDebugFile(self, chromeos_root, build_id):
- # Download autest package files (3 files)
- debug_archive_name = 'debug.tgz'
-
- download_path = os.path.join(chromeos_root, 'chroot/tmp', build_id)
- # Debug directory relative path wrt chroot
- debug_rel_path = os.path.join('/tmp', build_id, 'debug_files')
- # Debug path to download files
- debug_path = os.path.join(chromeos_root, 'chroot/tmp', build_id,
- 'debug_files')
-
- if not os.path.exists(debug_path):
- # Quickly verify if the file is present on server
- # If not, just exit with warning
- status = self.VerifyFileExists(chromeos_root, build_id,
- debug_archive_name)
- if status != 0:
- self._logger.LogOutput('WARNING: Could not find debug archive on gs')
- return ''
-
- # File exists on server, download and uncompress it
- self.DownloadSingleFile(chromeos_root, build_id, debug_archive_name)
-
- self.UncompressSingleFile(chromeos_root, build_id, debug_archive_name,
- 'tar -xf ')
- # Rename created autotest directory to autotest_files
- command = ('cd %s ; mv debug debug_files' % download_path)
- if self.log_level != 'verbose':
- self._logger.LogOutput('CMD: %s' % command)
- print('(Moving downloaded debug files to debug_files)')
- retval = self._ce.RunCommand(command)
- if retval != 0:
- raise MissingFile('Could not create directory debug_files')
-
- return debug_rel_path
-
- def Run(self, chromeos_root, xbuddy_label, autotest_path, debug_path,
- perf_args):
+ def Run(self, chromeos_root, xbuddy_label, autotest_path):
build_id = self.GetBuildID(chromeos_root, xbuddy_label)
image_name = ('gs://chromeos-image-archive/%s/chromiumos_test_image.tar.xz'
% build_id)
@@ -317,7 +281,4 @@ class ImageDownloader(object):
if autotest_path == '':
autotest_path = self.DownloadAutotestFiles(chromeos_root, build_id)
- if debug_path == '' and perf_args:
- debug_path = self.DownloadDebugFile(chromeos_root, build_id)
-
- return image_path, autotest_path, debug_path
+ return image_path, autotest_path
diff --git a/crosperf/download_images_unittest.py b/crosperf/download_images_unittest.py
index 68a84676..349a2dbb 100755
--- a/crosperf/download_images_unittest.py
+++ b/crosperf/download_images_unittest.py
@@ -1,9 +1,6 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+#
+# Copyright 2014 Google Inc. All Rights Reserved
"""Download image unittest."""
from __future__ import print_function
@@ -30,7 +27,6 @@ class ImageDownloaderTestcast(unittest.TestCase):
self.called_uncompress_image = False
self.called_get_build_id = False
self.called_download_autotest_files = False
- self.called_download_debug_file = False
@mock.patch.object(os, 'makedirs')
@mock.patch.object(os.path, 'exists')
@@ -68,7 +64,7 @@ class ImageDownloaderTestcast(unittest.TestCase):
# Verify we called RunCommand once, with proper arguments.
self.assertEqual(mock_cmd_exec.RunCommand.call_count, 1)
expected_args = (
- '/usr/local/home/chromeos/src/chromium/depot_tools/gsutil.py '
+ '/usr/local/home/chromeos/chromium/tools/depot_tools/gsutil.py '
'cp gs://chromeos-image-archive/lumpy-release/R36-5814.0.0/'
'chromiumos_test_image.tar.xz '
'/usr/local/home/chromeos/chroot/tmp/lumpy-release/R36-5814.0.0')
@@ -130,8 +126,8 @@ class ImageDownloaderTestcast(unittest.TestCase):
# 2nd arg must be exception handler
except_handler_string = 'RunCommandExceptionHandler.HandleException'
self.assertTrue(
- except_handler_string in repr(mock_cmd_exec.RunCommand.call_args_list[0]
- [1]))
+ except_handler_string in repr(
+ mock_cmd_exec.RunCommand.call_args_list[0][1]))
# Call 2, should have 2 arguments
self.assertEqual(len(mock_cmd_exec.RunCommand.call_args_list[1]), 2)
@@ -164,17 +160,13 @@ class ImageDownloaderTestcast(unittest.TestCase):
test_chroot = '/usr/local/home/chromeos'
test_build_id = 'remote/lumpy/latest-dev'
test_empty_autotest_path = ''
- test_empty_debug_path = ''
test_autotest_path = '/tmp/autotest'
- test_debug_path = '/tmp/debug'
- perf_args = '-a'
# Set values to test/check.
self.called_download_image = False
self.called_uncompress_image = False
self.called_get_build_id = False
self.called_download_autotest_files = False
- self.called_download_debug_file = False
# Define fake stub functions for Run to call
def FakeGetBuildID(unused_root, unused_xbuddy_label):
@@ -205,12 +197,6 @@ class ImageDownloaderTestcast(unittest.TestCase):
self.called_download_autotest_files = True
return 'autotest'
- def FakeDownloadDebugFile(root, build_id):
- if root or build_id:
- pass
- self.called_download_debug_file = True
- return 'debug'
-
# Initialize downloader
downloader = download_images.ImageDownloader(logger_to_use=MOCK_LOGGER)
@@ -219,58 +205,46 @@ class ImageDownloaderTestcast(unittest.TestCase):
downloader.UncompressImage = FakeUncompressImage
downloader.DownloadImage = GoodDownloadImage
downloader.DownloadAutotestFiles = FakeDownloadAutotestFiles
- downloader.DownloadDebugFile = FakeDownloadDebugFile
# Call Run.
- image_path, autotest_path, debug_path = downloader.Run(
- test_chroot, test_build_id, test_empty_autotest_path,
- test_empty_debug_path, perf_args)
+ image_path, autotest_path = downloader.Run(test_chroot, test_build_id,
+ test_empty_autotest_path)
# Make sure it called both _DownloadImage and _UncompressImage
self.assertTrue(self.called_download_image)
self.assertTrue(self.called_uncompress_image)
# Make sure it called DownloadAutotestFiles
self.assertTrue(self.called_download_autotest_files)
- # Make sure it called DownloadDebugFile
- self.assertTrue(self.called_download_debug_file)
- # Make sure it returned an image and autotest path returned from this call
+ # Make sure it returned an image and autotest path returned from this call
self.assertTrue(image_path == 'chromiumos_test_image.bin')
self.assertTrue(autotest_path == 'autotest')
- self.assertTrue(debug_path == 'debug')
- # Call Run with a non-empty autotest and debug path
+ # Call Run with a non-empty autotest path
self.called_download_autotest_files = False
- self.called_download_debug_file = False
- image_path, autotest_path, debug_path = downloader.Run(
- test_chroot, test_build_id, test_autotest_path, test_debug_path,
- perf_args)
+ image_path, autotest_path = downloader.Run(test_chroot, test_build_id,
+ test_autotest_path)
# Verify that downloadAutotestFiles was not called
self.assertFalse(self.called_download_autotest_files)
# Make sure it returned the specified autotest path returned from this call
self.assertTrue(autotest_path == test_autotest_path)
- # Make sure it returned the specified debug path returned from this call
- self.assertTrue(debug_path == test_debug_path)
# Reset values; Now use fake stub that simulates DownloadImage failing.
self.called_download_image = False
self.called_uncompress_image = False
self.called_download_autotest_files = False
- self.called_download_debug_file = False
downloader.DownloadImage = BadDownloadImage
# Call Run again.
self.assertRaises(download_images.MissingImage, downloader.Run, test_chroot,
- test_autotest_path, test_debug_path, test_build_id,
- perf_args)
+ test_autotest_path, test_build_id)
# Verify that UncompressImage and downloadAutotestFiles were not called,
# since _DownloadImage "failed"
self.assertTrue(self.called_download_image)
self.assertFalse(self.called_uncompress_image)
self.assertFalse(self.called_download_autotest_files)
- self.assertFalse(self.called_download_debug_file)
if __name__ == '__main__':
diff --git a/crosperf/experiment.py b/crosperf/experiment.py
index 1d87b6e4..987318a5 100644
--- a/crosperf/experiment.py
+++ b/crosperf/experiment.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""The experiment setting module."""
from __future__ import print_function
@@ -10,6 +8,7 @@ from __future__ import print_function
import os
import time
+import afe_lock_machine
from threading import Lock
from cros_utils import logger
@@ -28,8 +27,7 @@ class Experiment(object):
def __init__(self, name, remote, working_directory, chromeos_root,
cache_conditions, labels, benchmarks, experiment_file, email_to,
acquire_timeout, log_dir, log_level, share_cache,
- results_directory, locks_directory, cwp_dso, enable_aslr,
- ignore_min_max, skylab, dut_config):
+ results_directory, locks_directory):
self.name = name
self.working_directory = working_directory
self.remote = remote
@@ -50,22 +48,18 @@ class Experiment(object):
self.num_run_complete = 0
self.share_cache = share_cache
self.active_threads = []
+ # If locks_directory (self.lock_dir) not blank, we will use the file
+ # locking mechanism; if it is blank then we will use the AFE server
+ # locking mechanism.
self.locks_dir = locks_directory
self.locked_machines = []
- self.lock_mgr = None
- self.cwp_dso = cwp_dso
- self.enable_aslr = enable_aslr
- self.ignore_min_max = ignore_min_max
- self.skylab = skylab
- self.l = logger.GetLogger(log_dir)
- self.intel_pstate = dut_config['intel_pstate']
+ if not remote:
+ raise RuntimeError('No remote hosts specified')
if not self.benchmarks:
raise RuntimeError('No benchmarks specified')
if not self.labels:
raise RuntimeError('No labels specified')
- if not remote and not self.skylab:
- raise RuntimeError('No remote hosts specified')
# We need one chromeos_root to run the benchmarks in, but it doesn't
# matter where it is, unless the ABIs are different.
@@ -94,13 +88,22 @@ class Experiment(object):
if not self.remote:
raise RuntimeError('No machine available for running experiment.')
- # Initialize checksums for all machines, ignore errors at this time.
- # The checksum will be double checked, and image will be flashed after
- # duts are locked/leased.
- self.SetCheckSums()
+ for label in labels:
+ # We filter out label remotes that are not reachable (not in
+ # self.remote). So each label.remote is a sublist of experiment.remote.
+ label.remote = [r for r in label.remote if r in self.remote]
+ try:
+ self.machine_manager.ComputeCommonCheckSum(label)
+ except BadChecksum:
+ # Force same image on all machines, then we do checksum again. No
+ # bailout if checksums still do not match.
+ self.machine_manager.ForceSameImageToAllMachines(label)
+ self.machine_manager.ComputeCommonCheckSum(label)
+
+ self.machine_manager.ComputeCommonCheckSumString(label)
self.start_time = None
- self.benchmark_runs = self._GenerateBenchmarkRuns(dut_config)
+ self.benchmark_runs = self._GenerateBenchmarkRuns()
self._schedv2 = None
self._internal_counter_lock = Lock()
@@ -111,12 +114,12 @@ class Experiment(object):
def schedv2(self):
return self._schedv2
- def _GenerateBenchmarkRuns(self, dut_config):
+ def _GenerateBenchmarkRuns(self):
"""Generate benchmark runs from labels and benchmark defintions."""
benchmark_runs = []
for label in self.labels:
for benchmark in self.benchmarks:
- for iteration in range(1, benchmark.iterations + 1):
+ for iteration in xrange(1, benchmark.iterations + 1):
benchmark_run_name = '%s: %s (%s)' % (label.name, benchmark.name,
iteration)
@@ -127,29 +130,10 @@ class Experiment(object):
benchmark_run.BenchmarkRun(benchmark_run_name, benchmark, label,
iteration, self.cache_conditions,
self.machine_manager, logger_to_use,
- self.log_level, self.share_cache,
- dut_config, self.enable_aslr))
+ self.log_level, self.share_cache))
return benchmark_runs
- def SetCheckSums(self, forceSameImage=False):
- for label in self.labels:
- # We filter out label remotes that are not reachable (not in
- # self.remote). So each label.remote is a sublist of experiment.remote.
- label.remote = [r for r in label.remote if r in self.remote]
- try:
- self.machine_manager.ComputeCommonCheckSum(label)
- except BadChecksum:
- # Force same image on all machines, then we do checksum again. No
- # bailout if checksums still do not match.
- # TODO (zhizhouy): Need to figure out how flashing image will influence
- # the new checksum.
- if forceSameImage:
- self.machine_manager.ForceSameImageToAllMachines(label)
- self.machine_manager.ComputeCommonCheckSum(label)
-
- self.machine_manager.ComputeCommonCheckSumString(label)
-
def Build(self):
pass
@@ -212,19 +196,18 @@ class Experiment(object):
# We are using the file locks mechanism, so call machine_manager.Cleanup
# to unlock everything.
self.machine_manager.Cleanup()
-
- if test_flag.GetTestMode() or not self.locked_machines:
- return
-
- # If we locked any machines earlier, make sure we unlock them now.
- if self.lock_mgr:
- machine_states = self.lock_mgr.GetMachineStates('unlock')
- self.lock_mgr.CheckMachineLocks(machine_states, 'unlock')
- unlocked_machines = self.lock_mgr.UpdateMachines(False)
- failed_machines = [
- m for m in self.locked_machines if m not in unlocked_machines
- ]
- if failed_machines:
- raise RuntimeError(
- 'These machines are not unlocked correctly: %s' % failed_machines)
- self.lock_mgr = None
+ else:
+ if test_flag.GetTestMode():
+ return
+
+ all_machines = self.locked_machines
+ if not all_machines:
+ return
+
+ # If we locked any machines earlier, make sure we unlock them now.
+ lock_mgr = afe_lock_machine.AFELockManager(
+ all_machines, '', self.labels[0].chromeos_root, None)
+ machine_states = lock_mgr.GetMachineStates('unlock')
+ for k, state in machine_states.iteritems():
+ if state['locked']:
+ lock_mgr.UpdateLockInAFE(False, k)
diff --git a/crosperf/experiment_factory.py b/crosperf/experiment_factory.py
index 5b4d4b0d..bd25c785 100644
--- a/crosperf/experiment_factory.py
+++ b/crosperf/experiment_factory.py
@@ -1,20 +1,15 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""A module to generate experiments."""
from __future__ import print_function
import os
import re
import socket
-import sys
from benchmark import Benchmark
import config
-from cros_utils import logger
-from cros_utils import command_executer
from experiment import Experiment
from label import Label
from label import MockLabel
@@ -51,7 +46,10 @@ telemetry_toolchain_old_perf_tests = [
'tab_switching.top_10',
]
telemetry_toolchain_perf_tests = [
- 'octane', 'kraken', 'speedometer', 'speedometer2', 'jetstream2'
+ 'octane',
+ 'kraken',
+ 'speedometer',
+ 'speedometer2',
]
graphics_perf_tests = [
'graphics_GLBench',
@@ -60,22 +58,22 @@ graphics_perf_tests = [
'graphics_WebGLAquarium',
'graphics_WebGLPerformance',
]
-# TODO: disable rendering.desktop by default as the benchmark is
-# currently in a bad state
-# page_cycler_v2.typical_25 is deprecated and the recommend replacement is
-# loading.desktop@@typical (crbug.com/916340)
telemetry_crosbolt_perf_tests = [
'octane',
'kraken',
+ 'speedometer',
'speedometer2',
'jetstream',
- 'loading.desktop',
- #'rendering.desktop',
+ 'cros_ui_smoothness',
]
-
crosbolt_perf_tests = [
'graphics_WebGLAquarium',
- 'tast.video.PlaybackPerfVP91080P30FPS',
+ 'video_PlaybackPerf.h264',
+ 'video_PlaybackPerf.vp9',
+ 'video_WebRtcPerf',
+ 'BootPerfServerCrosPerf',
+ 'power_Resume',
+ 'build_RootFilesystemSize',
]
# 'cheets_AntutuTest',
@@ -84,12 +82,6 @@ crosbolt_perf_tests = [
# 'cheets_LinpackTest',
#]
-dso_list = [
- 'all',
- 'chrome',
- 'kallsyms',
-]
-
class ExperimentFactory(object):
"""Factory class for building an Experiment, given an ExperimentFile as input.
@@ -101,14 +93,12 @@ class ExperimentFactory(object):
def AppendBenchmarkSet(self, benchmarks, benchmark_list, test_args,
iterations, rm_chroot_tmp, perf_args, suite,
- show_all_results, retries, run_local, cwp_dso, weight,
- turbostat):
+ show_all_results, retries, run_local):
"""Add all the tests in a set to the benchmarks list."""
for test_name in benchmark_list:
- telemetry_benchmark = Benchmark(test_name, test_name, test_args,
- iterations, rm_chroot_tmp, perf_args,
- suite, show_all_results, retries,
- run_local, cwp_dso, weight, turbostat)
+ telemetry_benchmark = Benchmark(
+ test_name, test_name, test_args, iterations, rm_chroot_tmp, perf_args,
+ suite, show_all_results, retries, run_local)
benchmarks.append(telemetry_benchmark)
def GetExperiment(self, experiment_file, working_directory, log_dir):
@@ -116,16 +106,6 @@ class ExperimentFactory(object):
global_settings = experiment_file.GetGlobalSettings()
experiment_name = global_settings.GetField('name')
board = global_settings.GetField('board')
- chromeos_root = global_settings.GetField('chromeos_root')
- log_level = global_settings.GetField('logging_level')
- if log_level not in ('quiet', 'average', 'verbose'):
- log_level = 'verbose'
-
- skylab = global_settings.GetField('skylab')
- # Check whether skylab tool is installed correctly for skylab mode.
- if skylab and not self.CheckSkylabTool(chromeos_root, log_level):
- sys.exit(0)
-
remote = global_settings.GetField('remote')
# This is used to remove the ",' from the remote if user
# add them to the remote string.
@@ -135,6 +115,7 @@ class ExperimentFactory(object):
c = re.sub('["\']', '', i)
new_remote.append(c)
remote = new_remote
+ chromeos_root = global_settings.GetField('chromeos_root')
rm_chroot_tmp = global_settings.GetField('rm_chroot_tmp')
perf_args = global_settings.GetField('perf_args')
acquire_timeout = global_settings.GetField('acquire_timeout')
@@ -143,38 +124,19 @@ class ExperimentFactory(object):
config.AddConfig('no_email', global_settings.GetField('no_email'))
share_cache = global_settings.GetField('share_cache')
results_dir = global_settings.GetField('results_dir')
- # Warn user that option use_file_locks is deprecated.
use_file_locks = global_settings.GetField('use_file_locks')
- if use_file_locks:
- l = logger.GetLogger()
- l.LogWarning('Option use_file_locks is deprecated, please remove it '
- 'from your experiment settings.')
locks_dir = global_settings.GetField('locks_dir')
- # If not specified, set the locks dir to the default locks dir in
+ # If we pass a blank locks_dir to the Experiment, it will use the AFE server
+ # lock mechanism. So if the user specified use_file_locks, but did not
+ # specify a locks dir, set the locks dir to the default locks dir in
# file_lock_machine.
- if not locks_dir:
+ if use_file_locks and not locks_dir:
locks_dir = file_lock_machine.Machine.LOCKS_DIR
- if not os.path.exists(locks_dir):
- raise RuntimeError('Cannot access default lock directory. '
- 'Please run prodaccess or specify a local directory')
chrome_src = global_settings.GetField('chrome_src')
show_all_results = global_settings.GetField('show_all_results')
- cwp_dso = global_settings.GetField('cwp_dso')
- if cwp_dso and not cwp_dso in dso_list:
- raise RuntimeError('The DSO specified is not supported')
- enable_aslr = global_settings.GetField('enable_aslr')
- ignore_min_max = global_settings.GetField('ignore_min_max')
- turbostat_opt = global_settings.GetField('turbostat')
- dut_config = {
- 'intel_pstate': global_settings.GetField('intel_pstate'),
- 'cooldown_time': global_settings.GetField('cooldown_time'),
- 'cooldown_temp': global_settings.GetField('cooldown_temp'),
- 'governor': global_settings.GetField('governor'),
- 'cpu_usage': global_settings.GetField('cpu_usage'),
- 'cpu_freq_pct': global_settings.GetField('cpu_freq_pct'),
- 'top_interval': global_settings.GetField('top_interval'),
- }
-
+ log_level = global_settings.GetField('logging_level')
+ if log_level not in ('quiet', 'average', 'verbose'):
+ log_level = 'verbose'
# Default cache hit conditions. The image checksum in the cache and the
# computed checksum of the image must match. Also a cache file must exist.
cache_conditions = [
@@ -194,138 +156,32 @@ class ExperimentFactory(object):
# inherited and/or merged with the global settings values.
benchmarks = []
all_benchmark_settings = experiment_file.GetSettings('benchmark')
-
- # Check if there is duplicated benchmark name
- benchmark_names = {}
- # Check if in cwp_dso mode, all benchmarks should have same iterations
- cwp_dso_iterations = 0
-
for benchmark_settings in all_benchmark_settings:
benchmark_name = benchmark_settings.name
test_name = benchmark_settings.GetField('test_name')
if not test_name:
test_name = benchmark_name
test_args = benchmark_settings.GetField('test_args')
-
- # Rename benchmark name if 'story-filter' or 'story-tag-filter' specified
- # in test_args. Make sure these two tags only appear once.
- story_count = 0
- for arg in test_args.split():
- if '--story-filter=' in arg or '--story-tag-filter=' in arg:
- story_count += 1
- if story_count > 1:
- raise RuntimeError('Only one story or story-tag filter allowed in '
- 'a single benchmark run')
- # Rename benchmark name with an extension of 'story'-option
- benchmark_name = '%s@@%s' % (benchmark_name, arg.split('=')[-1])
-
- # Check for duplicated benchmark name after renaming
- if not benchmark_name in benchmark_names:
- benchmark_names[benchmark_name] = True
- else:
- raise SyntaxError("Duplicate benchmark name: '%s'." % benchmark_name)
-
iterations = benchmark_settings.GetField('iterations')
- if cwp_dso:
- if cwp_dso_iterations != 0 and iterations != cwp_dso_iterations:
- raise RuntimeError('Iterations of each benchmark run are not the ' \
- 'same')
- cwp_dso_iterations = iterations
-
suite = benchmark_settings.GetField('suite')
retries = benchmark_settings.GetField('retries')
run_local = benchmark_settings.GetField('run_local')
- weight = benchmark_settings.GetField('weight')
- if weight:
- if not cwp_dso:
- raise RuntimeError('Weight can only be set when DSO specified')
- if suite != 'telemetry_Crosperf':
- raise RuntimeError('CWP approximation weight only works with '
- 'telemetry_Crosperf suite')
- if run_local:
- raise RuntimeError('run_local must be set to False to use CWP '
- 'approximation')
- if weight < 0:
- raise RuntimeError('Weight should be a float >=0')
- elif cwp_dso:
- raise RuntimeError('With DSO specified, each benchmark should have a '
- 'weight')
if suite == 'telemetry_Crosperf':
if test_name == 'all_perfv2':
- self.AppendBenchmarkSet(
- benchmarks,
- telemetry_perfv2_tests,
- test_args,
- iterations,
- rm_chroot_tmp,
- perf_args,
- suite,
- show_all_results,
- retries,
- run_local,
- cwp_dso,
- weight,
- turbostat=turbostat_opt)
+ self.AppendBenchmarkSet(benchmarks, telemetry_perfv2_tests, test_args,
+ iterations, rm_chroot_tmp, perf_args, suite,
+ show_all_results, retries, run_local)
elif test_name == 'all_pagecyclers':
- self.AppendBenchmarkSet(
- benchmarks,
- telemetry_pagecycler_tests,
- test_args,
- iterations,
- rm_chroot_tmp,
- perf_args,
- suite,
- show_all_results,
- retries,
- run_local,
- cwp_dso,
- weight,
- turbostat=turbostat_opt)
- elif test_name == 'all_crosbolt_perf':
- self.AppendBenchmarkSet(
- benchmarks,
- telemetry_crosbolt_perf_tests,
- test_args,
- iterations,
- rm_chroot_tmp,
- perf_args,
- 'telemetry_Crosperf',
- show_all_results,
- retries,
- run_local,
- cwp_dso,
- weight,
- turbostat=turbostat_opt)
- self.AppendBenchmarkSet(
- benchmarks,
- crosbolt_perf_tests,
- '',
- iterations,
- rm_chroot_tmp,
- perf_args,
- '',
- show_all_results,
- retries,
- run_local=False,
- cwp_dso=cwp_dso,
- weight=weight,
- turbostat=turbostat_opt)
+ self.AppendBenchmarkSet(benchmarks, telemetry_pagecycler_tests,
+ test_args, iterations, rm_chroot_tmp,
+ perf_args, suite, show_all_results, retries,
+ run_local)
elif test_name == 'all_toolchain_perf':
- self.AppendBenchmarkSet(
- benchmarks,
- telemetry_toolchain_perf_tests,
- test_args,
- iterations,
- rm_chroot_tmp,
- perf_args,
- suite,
- show_all_results,
- retries,
- run_local,
- cwp_dso,
- weight,
- turbostat=turbostat_opt)
+ self.AppendBenchmarkSet(benchmarks, telemetry_toolchain_perf_tests,
+ test_args, iterations, rm_chroot_tmp,
+ perf_args, suite, show_all_results, retries,
+ run_local)
# Add non-telemetry toolchain-perf benchmarks:
benchmarks.append(
Benchmark(
@@ -338,36 +194,38 @@ class ExperimentFactory(object):
'',
show_all_results,
retries,
- run_local=False,
- cwp_dso=cwp_dso,
- weight=weight,
- turbostat=turbostat_opt))
+ run_local=False))
elif test_name == 'all_toolchain_perf_old':
+ self.AppendBenchmarkSet(benchmarks,
+ telemetry_toolchain_old_perf_tests, test_args,
+ iterations, rm_chroot_tmp, perf_args, suite,
+ show_all_results, retries, run_local)
+ else:
+ benchmark = Benchmark(test_name, test_name, test_args, iterations,
+ rm_chroot_tmp, perf_args, suite,
+ show_all_results, retries, run_local)
+ benchmarks.append(benchmark)
+ else:
+ if test_name == 'all_graphics_perf':
self.AppendBenchmarkSet(
benchmarks,
- telemetry_toolchain_old_perf_tests,
- test_args,
+ graphics_perf_tests,
+ '',
iterations,
rm_chroot_tmp,
perf_args,
- suite,
+ '',
show_all_results,
retries,
- run_local,
- cwp_dso,
- weight,
- turbostat=turbostat_opt)
- else:
- benchmark = Benchmark(benchmark_name, test_name, test_args,
- iterations, rm_chroot_tmp, perf_args, suite,
- show_all_results, retries, run_local, cwp_dso,
- weight, turbostat_opt)
- benchmarks.append(benchmark)
- else:
- if test_name == 'all_graphics_perf':
+ run_local=False)
+ elif test_name == 'all_crosbolt_perf':
+ self.AppendBenchmarkSet(benchmarks, telemetry_crosbolt_perf_tests,
+ test_args, iterations, rm_chroot_tmp,
+ perf_args, 'telemetry_Crosperf',
+ show_all_results, retries, run_local)
self.AppendBenchmarkSet(
benchmarks,
- graphics_perf_tests,
+ crosbolt_perf_tests,
'',
iterations,
rm_chroot_tmp,
@@ -375,10 +233,7 @@ class ExperimentFactory(object):
'',
show_all_results,
retries,
- run_local=False,
- cwp_dso=cwp_dso,
- weight=weight,
- turbostat=turbostat_opt)
+ run_local=False)
else:
# Add the single benchmark.
benchmark = Benchmark(
@@ -391,10 +246,7 @@ class ExperimentFactory(object):
suite,
show_all_results,
retries,
- run_local=False,
- cwp_dso=cwp_dso,
- weight=weight,
- turbostat=turbostat_opt)
+ run_local=False)
benchmarks.append(benchmark)
if not benchmarks:
@@ -409,9 +261,7 @@ class ExperimentFactory(object):
for label_settings in all_label_settings:
label_name = label_settings.name
image = label_settings.GetField('chromeos_image')
- build = label_settings.GetField('build')
autotest_path = label_settings.GetField('autotest_path')
- debug_path = label_settings.GetField('debug_path')
chromeos_root = label_settings.GetField('chromeos_root')
my_remote = label_settings.GetField('remote')
compiler = label_settings.GetField('compiler')
@@ -421,27 +271,21 @@ class ExperimentFactory(object):
c = re.sub('["\']', '', i)
new_remote.append(c)
my_remote = new_remote
-
- if image:
- if skylab:
- raise RuntimeError('In skylab mode, local image should not be used.')
- if build:
- raise RuntimeError('Image path and build are provided at the same '
- 'time, please use only one of them.')
- else:
- if not build:
+ if image == '':
+ build = label_settings.GetField('build')
+ if len(build) == 0:
raise RuntimeError("Can not have empty 'build' field!")
- image, autotest_path, debug_path = label_settings.GetXbuddyPath(
- build, autotest_path, debug_path, board, chromeos_root, log_level,
- perf_args)
+ image, autotest_path = label_settings.GetXbuddyPath(
+ build, autotest_path, board, chromeos_root, log_level)
cache_dir = label_settings.GetField('cache_dir')
chrome_src = label_settings.GetField('chrome_src')
# TODO(yunlian): We should consolidate code in machine_manager.py
# to derermine whether we are running from within google or not
- if ('corp.google.com' in socket.gethostname() and not my_remote and
- not skylab):
+ if ('corp.google.com' in socket.gethostname() and
+ (not my_remote or
+ my_remote == remote and global_settings.GetField('board') != board)):
my_remote = self.GetDefaultRemotes(board)
if global_settings.GetField('same_machine') and len(my_remote) > 1:
raise RuntimeError('Only one remote is allowed when same_machine '
@@ -450,14 +294,13 @@ class ExperimentFactory(object):
image_args = label_settings.GetField('image_args')
if test_flag.GetTestMode():
# pylint: disable=too-many-function-args
- label = MockLabel(label_name, build, image, autotest_path, debug_path,
- chromeos_root, board, my_remote, image_args,
- cache_dir, cache_only, log_level, compiler, skylab,
- chrome_src)
+ label = MockLabel(label_name, image, autotest_path, chromeos_root,
+ board, my_remote, image_args, cache_dir, cache_only,
+ log_level, compiler, chrome_src)
else:
- label = Label(label_name, build, image, autotest_path, debug_path,
- chromeos_root, board, my_remote, image_args, cache_dir,
- cache_only, log_level, compiler, skylab, chrome_src)
+ label = Label(label_name, image, autotest_path, chromeos_root, board,
+ my_remote, image_args, cache_dir, cache_only, log_level,
+ compiler, chrome_src)
labels.append(label)
if not labels:
@@ -466,15 +309,11 @@ class ExperimentFactory(object):
email = global_settings.GetField('email')
all_remote += list(set(my_remote))
all_remote = list(set(all_remote))
- if skylab:
- for remote in all_remote:
- self.CheckRemotesInSkylab(remote)
experiment = Experiment(experiment_name, all_remote, working_directory,
chromeos_root, cache_conditions, labels, benchmarks,
experiment_file.Canonicalize(), email,
acquire_timeout, log_dir, log_level, share_cache,
- results_dir, locks_dir, cwp_dso, enable_aslr,
- ignore_min_max, skylab, dut_config)
+ results_dir, locks_dir)
return experiment
@@ -493,31 +332,7 @@ class ExperimentFactory(object):
raise RuntimeError('There is no remote for {0}'.format(board))
except IOError:
# TODO: rethrow instead of throwing different exception.
- raise RuntimeError(
- 'IOError while reading file {0}'.format(default_remotes_file))
+ raise RuntimeError('IOError while reading file {0}'
+ .format(default_remotes_file))
else:
- raise RuntimeError('There is no remote for {0}'.format(board))
-
- def CheckRemotesInSkylab(self, remote):
- # TODO: (AI:zhizhouy) need to check whether a remote is a local or lab
- # machine. If not lab machine, raise an error.
- pass
-
- def CheckSkylabTool(self, chromeos_root, log_level):
- SKYLAB_PATH = '/usr/local/bin/skylab'
- if os.path.exists(SKYLAB_PATH):
- return True
- l = logger.GetLogger()
- l.LogOutput('Skylab tool not installed, trying to install it.')
- ce = command_executer.GetCommandExecuter(l, log_level=log_level)
- setup_lab_tools = os.path.join(chromeos_root, 'chromeos-admin', 'lab-tools',
- 'setup_lab_tools')
- cmd = '%s' % setup_lab_tools
- status = ce.RunCommand(cmd)
- if status != 0:
- raise RuntimeError('Skylab tool not installed correctly, please try to '
- 'manually install it from %s' % setup_lab_tools)
- l.LogOutput('Skylab is installed at %s, please login before first use. '
- 'Login by running "skylab login" and follow instructions.' %
- SKYLAB_PATH)
- return False
+ raise RuntimeError('There is not remote for {0}'.format(board))
diff --git a/crosperf/experiment_factory_unittest.py b/crosperf/experiment_factory_unittest.py
index 32813357..b7d3420b 100755
--- a/crosperf/experiment_factory_unittest.py
+++ b/crosperf/experiment_factory_unittest.py
@@ -1,29 +1,24 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Unit test for experiment_factory.py"""
from __future__ import print_function
-import io
-import os
+import StringIO
import socket
-import unittest
-
import mock
+import unittest
-from cros_utils import command_executer
from cros_utils.file_utils import FileUtils
+from experiment_factory import ExperimentFactory
from experiment_file import ExperimentFile
import test_flag
import benchmark
import experiment_factory
-from experiment_factory import ExperimentFactory
import settings_factory
EXPERIMENT_FILE_1 = """
@@ -34,11 +29,6 @@ EXPERIMENT_FILE_1 = """
iterations: 3
}
- benchmark: webrtc {
- iterations: 1
- test_args: --story-filter=datachannel
- }
-
image1 {
chromeos_image: /usr/local/google/cros_image1.bin
}
@@ -48,31 +38,6 @@ EXPERIMENT_FILE_1 = """
}
"""
-EXPERIMENT_FILE_2 = """
- board: x86-alex
- remote: chromeos-alex3
-
- cwp_dso: kallsyms
-
- benchmark: Octane {
- iterations: 1
- suite: telemetry_Crosperf
- run_local: False
- weight: 0.8
- }
-
- benchmark: Kraken {
- iterations: 1
- suite: telemetry_Crosperf
- run_local: False
- weight: 0.2
- }
-
- image1 {
- chromeos_image: /usr/local/google/cros_image1.bin
- }
- """
-
# pylint: disable=too-many-function-args
@@ -83,183 +48,47 @@ class ExperimentFactoryTest(unittest.TestCase):
self.append_benchmark_call_args = []
def testLoadExperimentFile1(self):
- experiment_file = ExperimentFile(io.BytesIO(EXPERIMENT_FILE_1))
+ experiment_file = ExperimentFile(StringIO.StringIO(EXPERIMENT_FILE_1))
exp = ExperimentFactory().GetExperiment(
experiment_file, working_directory='', log_dir='')
self.assertEqual(exp.remote, ['chromeos-alex3'])
- self.assertEqual(len(exp.benchmarks), 2)
+ self.assertEqual(len(exp.benchmarks), 1)
self.assertEqual(exp.benchmarks[0].name, 'PageCycler')
self.assertEqual(exp.benchmarks[0].test_name, 'PageCycler')
self.assertEqual(exp.benchmarks[0].iterations, 3)
- self.assertEqual(exp.benchmarks[1].name, 'webrtc@@datachannel')
- self.assertEqual(exp.benchmarks[1].test_name, 'webrtc')
- self.assertEqual(exp.benchmarks[1].iterations, 1)
self.assertEqual(len(exp.labels), 2)
self.assertEqual(exp.labels[0].chromeos_image,
'/usr/local/google/cros_image1.bin')
self.assertEqual(exp.labels[0].board, 'x86-alex')
- def testLoadExperimentFile2CWP(self):
- experiment_file = ExperimentFile(io.BytesIO(EXPERIMENT_FILE_2))
- exp = ExperimentFactory().GetExperiment(
- experiment_file, working_directory='', log_dir='')
- self.assertEqual(exp.cwp_dso, 'kallsyms')
- self.assertEqual(len(exp.benchmarks), 2)
- self.assertEqual(exp.benchmarks[0].weight, 0.8)
- self.assertEqual(exp.benchmarks[1].weight, 0.2)
-
- def testDuplecateBenchmark(self):
- mock_experiment_file = ExperimentFile(io.BytesIO(''))
- mock_experiment_file.all_settings = []
- benchmark_settings1 = settings_factory.BenchmarkSettings('name')
- mock_experiment_file.all_settings.append(benchmark_settings1)
- benchmark_settings2 = settings_factory.BenchmarkSettings('name')
- mock_experiment_file.all_settings.append(benchmark_settings2)
-
- with self.assertRaises(SyntaxError):
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
-
- def testCWPExceptions(self):
- mock_experiment_file = ExperimentFile(io.BytesIO(''))
- mock_experiment_file.all_settings = []
- global_settings = settings_factory.GlobalSettings('test_name')
-
- # Test 1: DSO type not supported
- global_settings.SetField('cwp_dso', 'test')
- self.assertEqual(global_settings.GetField('cwp_dso'), 'test')
- mock_experiment_file.global_settings = global_settings
- with self.assertRaises(RuntimeError) as msg:
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual('The DSO specified is not supported', str(msg.exception))
-
- # Test 2: No weight after DSO specified
- global_settings.SetField('cwp_dso', 'kallsyms')
- mock_experiment_file.global_settings = global_settings
- benchmark_settings = settings_factory.BenchmarkSettings('name')
- mock_experiment_file.all_settings.append(benchmark_settings)
- with self.assertRaises(RuntimeError) as msg:
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual('With DSO specified, each benchmark should have a weight',
- str(msg.exception))
-
- # Test 3: Weight is set, but no dso specified
- global_settings.SetField('cwp_dso', '')
- mock_experiment_file.global_settings = global_settings
- benchmark_settings = settings_factory.BenchmarkSettings('name')
- benchmark_settings.SetField('weight', '0.8')
- mock_experiment_file.all_settings = []
- mock_experiment_file.all_settings.append(benchmark_settings)
- with self.assertRaises(RuntimeError) as msg:
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual('Weight can only be set when DSO specified',
- str(msg.exception))
-
- # Test 4: cwp_dso only works for telemetry_Crosperf benchmarks
- global_settings.SetField('cwp_dso', 'kallsyms')
- mock_experiment_file.global_settings = global_settings
- benchmark_settings = settings_factory.BenchmarkSettings('name')
- benchmark_settings.SetField('weight', '0.8')
- mock_experiment_file.all_settings = []
- mock_experiment_file.all_settings.append(benchmark_settings)
- with self.assertRaises(RuntimeError) as msg:
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual(
- 'CWP approximation weight only works with '
- 'telemetry_Crosperf suite', str(msg.exception))
-
- # Test 5: cwp_dso does not work for local run
- benchmark_settings = settings_factory.BenchmarkSettings('name')
- benchmark_settings.SetField('weight', '0.8')
- benchmark_settings.SetField('suite', 'telemetry_Crosperf')
- benchmark_settings.SetField('run_local', 'True')
- mock_experiment_file.all_settings = []
- mock_experiment_file.all_settings.append(benchmark_settings)
- with self.assertRaises(RuntimeError) as msg:
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual('run_local must be set to False to use CWP approximation',
- str(msg.exception))
-
- # Test 6: weight should be float >=0
- benchmark_settings = settings_factory.BenchmarkSettings('name')
- benchmark_settings.SetField('weight', '-1.2')
- benchmark_settings.SetField('suite', 'telemetry_Crosperf')
- benchmark_settings.SetField('run_local', 'False')
- mock_experiment_file.all_settings = []
- mock_experiment_file.all_settings.append(benchmark_settings)
- with self.assertRaises(RuntimeError) as msg:
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual('Weight should be a float >=0', str(msg.exception))
-
- # Test 7: more than one story tag in test_args
- benchmark_settings = settings_factory.BenchmarkSettings('name')
- benchmark_settings.SetField('test_args',
- '--story-filter=a --story-tag-filter=b')
- benchmark_settings.SetField('weight', '1.2')
- benchmark_settings.SetField('suite', 'telemetry_Crosperf')
- mock_experiment_file.all_settings = []
- mock_experiment_file.all_settings.append(benchmark_settings)
- with self.assertRaises(RuntimeError) as msg:
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual(
- 'Only one story or story-tag filter allowed in a single '
- 'benchmark run', str(msg.exception))
-
- # Test 8: Iterations of each benchmark run are not same in cwp mode
- mock_experiment_file.all_settings = []
- benchmark_settings = settings_factory.BenchmarkSettings('name1')
- benchmark_settings.SetField('iterations', '4')
- benchmark_settings.SetField('weight', '1.2')
- benchmark_settings.SetField('suite', 'telemetry_Crosperf')
- benchmark_settings.SetField('run_local', 'False')
- mock_experiment_file.all_settings.append(benchmark_settings)
- benchmark_settings = settings_factory.BenchmarkSettings('name2')
- benchmark_settings.SetField('iterations', '3')
- benchmark_settings.SetField('weight', '1.2')
- benchmark_settings.SetField('suite', 'telemetry_Crosperf')
- benchmark_settings.SetField('run_local', 'False')
- mock_experiment_file.all_settings.append(benchmark_settings)
- with self.assertRaises(RuntimeError) as msg:
- ef = ExperimentFactory()
- ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual('Iterations of each benchmark run are not the same',
- str(msg.exception))
-
def test_append_benchmark_set(self):
ef = ExperimentFactory()
bench_list = []
ef.AppendBenchmarkSet(bench_list, experiment_factory.telemetry_perfv2_tests,
'', 1, False, '', 'telemetry_Crosperf', False, 0,
- False, '', 0, False)
+ False)
self.assertEqual(
len(bench_list), len(experiment_factory.telemetry_perfv2_tests))
- self.assertTrue(isinstance(bench_list[0], benchmark.Benchmark))
+ self.assertTrue(type(bench_list[0]) is benchmark.Benchmark)
bench_list = []
- ef.AppendBenchmarkSet(
- bench_list, experiment_factory.telemetry_pagecycler_tests, '', 1, False,
- '', 'telemetry_Crosperf', False, 0, False, '', 0, False)
+ ef.AppendBenchmarkSet(bench_list,
+ experiment_factory.telemetry_pagecycler_tests, '', 1,
+ False, '', 'telemetry_Crosperf', False, 0, False)
self.assertEqual(
len(bench_list), len(experiment_factory.telemetry_pagecycler_tests))
- self.assertTrue(isinstance(bench_list[0], benchmark.Benchmark))
+ self.assertTrue(type(bench_list[0]) is benchmark.Benchmark)
bench_list = []
- ef.AppendBenchmarkSet(
- bench_list, experiment_factory.telemetry_toolchain_perf_tests, '', 1,
- False, '', 'telemetry_Crosperf', False, 0, False, '', 0, False)
+ ef.AppendBenchmarkSet(bench_list,
+ experiment_factory.telemetry_toolchain_perf_tests, '',
+ 1, False, '', 'telemetry_Crosperf', False, 0, False)
self.assertEqual(
len(bench_list), len(experiment_factory.telemetry_toolchain_perf_tests))
- self.assertTrue(isinstance(bench_list[0], benchmark.Benchmark))
+ self.assertTrue(type(bench_list[0]) is benchmark.Benchmark)
@mock.patch.object(socket, 'gethostname')
def test_get_experiment(self, mock_socket):
@@ -280,17 +109,13 @@ class ExperimentFactoryTest(unittest.TestCase):
return []
return ['fake_chromeos_machine1.cros', 'fake_chromeos_machine2.cros']
- def FakeGetXbuddyPath(build, autotest_dir, debug_dir, board, chroot,
- log_level, perf_args):
+ def FakeGetXbuddyPath(build, autotest_dir, board, chroot, log_level):
autotest_path = autotest_dir
if not autotest_path:
autotest_path = 'fake_autotest_path'
- debug_path = debug_dir
- if not debug_path and perf_args:
- debug_path = 'fake_debug_path'
if not build or not board or not chroot or not log_level:
- return '', autotest_path, debug_path
- return 'fake_image_path', autotest_path, debug_path
+ return '', autotest_path
+ return 'fake_image_path', autotest_path
ef = ExperimentFactory()
ef.AppendBenchmarkSet = FakeAppendBenchmarkSet
@@ -302,7 +127,7 @@ class ExperimentFactoryTest(unittest.TestCase):
label_settings.GetXbuddyPath = FakeGetXbuddyPath
- mock_experiment_file = ExperimentFile(io.BytesIO(''))
+ mock_experiment_file = ExperimentFile(StringIO.StringIO(''))
mock_experiment_file.all_settings = []
test_flag.SetTestMode(True)
@@ -333,16 +158,16 @@ class ExperimentFactoryTest(unittest.TestCase):
self.assertEqual(exp.log_level, 'average')
self.assertEqual(len(exp.benchmarks), 1)
- self.assertEqual(exp.benchmarks[0].name, 'bench_test')
+ self.assertEqual(exp.benchmarks[0].name, 'kraken')
self.assertEqual(exp.benchmarks[0].test_name, 'kraken')
self.assertEqual(exp.benchmarks[0].iterations, 1)
self.assertEqual(exp.benchmarks[0].suite, 'telemetry_Crosperf')
self.assertFalse(exp.benchmarks[0].show_all_results)
self.assertEqual(len(exp.labels), 1)
- self.assertEqual(
- exp.labels[0].chromeos_image, 'chromeos/src/build/images/lumpy/latest/'
- 'chromiumos_test_image.bin')
+ self.assertEqual(exp.labels[0].chromeos_image,
+ 'chromeos/src/build/images/lumpy/latest/'
+ 'chromiumos_test_image.bin')
self.assertEqual(exp.labels[0].autotest_path, '/tmp/autotest')
self.assertEqual(exp.labels[0].board, 'lumpy')
@@ -350,9 +175,9 @@ class ExperimentFactoryTest(unittest.TestCase):
test_flag.SetTestMode(True)
label_settings.SetField('remote', 'chromeos1.cros chromeos2.cros')
exp = ef.GetExperiment(mock_experiment_file, '', '')
- self.assertEqual(
- exp.remote,
- ['chromeos1.cros', 'chromeos2.cros', '123.45.67.89', '123.45.76.80'])
+ self.assertEqual(exp.remote, [
+ 'chromeos1.cros', 'chromeos2.cros', '123.45.67.89', '123.45.76.80'
+ ])
# Third test: Automatic fixing of bad logging_level param:
global_settings.SetField('logging_level', 'really loud!')
@@ -388,14 +213,13 @@ class ExperimentFactoryTest(unittest.TestCase):
self.assertEqual(len(exp.labels), 2)
self.assertEqual(exp.labels[1].chromeos_image, 'fake_image_path')
self.assertEqual(exp.labels[1].autotest_path, 'fake_autotest_path')
- self.assertEqual(
- exp.remote,
- ['fake_chromeos_machine1.cros', 'fake_chromeos_machine2.cros'])
+ self.assertEqual(exp.remote, [
+ 'fake_chromeos_machine1.cros', 'fake_chromeos_machine2.cros'
+ ])
def test_get_default_remotes(self):
board_list = [
- 'elm', 'bob', 'chell', 'kefka', 'lulu', 'nautilus', 'snappy',
- 'veyron_minnie'
+ 'lumpy', 'elm', 'parrot', 'daisy', 'peach_pit', 'peppy', 'squawks'
]
ef = ExperimentFactory()
@@ -410,33 +234,6 @@ class ExperimentFactoryTest(unittest.TestCase):
else:
self.assertGreaterEqual(len(remotes), 2)
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommand')
- @mock.patch.object(os.path, 'exists')
- def test_check_skylab_tool(self, mock_exists, mock_runcmd):
- ef = ExperimentFactory()
- chromeos_root = '/tmp/chromeos'
- log_level = 'average'
-
- mock_exists.return_value = True
- ret = ef.CheckSkylabTool(chromeos_root, log_level)
- self.assertTrue(ret)
-
- mock_exists.return_value = False
- mock_runcmd.return_value = 1
- with self.assertRaises(RuntimeError) as err:
- ef.CheckSkylabTool(chromeos_root, log_level)
- self.assertEqual(mock_runcmd.call_count, 1)
- self.assertEqual(
- err.exception.message, 'Skylab tool not installed '
- 'correctly, please try to manually install it from '
- '/tmp/chromeos/chromeos-admin/lab-tools/setup_lab_tools')
-
- mock_runcmd.return_value = 0
- mock_runcmd.call_count = 0
- ret = ef.CheckSkylabTool(chromeos_root, log_level)
- self.assertEqual(mock_runcmd.call_count, 1)
- self.assertFalse(ret)
-
if __name__ == '__main__':
FileUtils.Configure(True)
diff --git a/crosperf/experiment_file.py b/crosperf/experiment_file.py
index 41a2b809..57eb52dc 100644
--- a/crosperf/experiment_file.py
+++ b/crosperf/experiment_file.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""The experiment file module. It manages the input file of crosperf."""
from __future__ import print_function
@@ -99,7 +97,7 @@ class ExperimentFile(object):
field = self._ParseField(reader)
settings.SetField(field[0], field[1], field[2])
elif ExperimentFile._CLOSE_SETTINGS_RE.match(line):
- return settings, settings_type
+ return settings
raise EOFError('Unexpected EOF while parsing settings block.')
@@ -114,15 +112,11 @@ class ExperimentFile(object):
if not line:
continue
elif ExperimentFile._OPEN_SETTINGS_RE.match(line):
- new_settings, settings_type = self._ParseSettings(reader)
- # We will allow benchmarks with duplicated settings name for now.
- # Further decision will be made when parsing benchmark details in
- # ExperimentFactory.GetExperiment().
- if settings_type != 'benchmark':
- if new_settings.name in settings_names:
- raise SyntaxError(
- "Duplicate settings name: '%s'." % new_settings.name)
- settings_names[new_settings.name] = True
+ new_settings = self._ParseSettings(reader)
+ if new_settings.name in settings_names:
+ raise SyntaxError(
+ "Duplicate settings name: '%s'." % new_settings.name)
+ settings_names[new_settings.name] = True
self.all_settings.append(new_settings)
elif ExperimentFile._FIELD_VALUE_RE.match(line):
field = self._ParseField(reader)
@@ -166,22 +160,11 @@ class ExperimentFile(object):
autotest_path = ''
if autotest_field.assigned:
autotest_path = autotest_field.GetString()
- debug_field = settings.fields['debug_path']
- debug_path = ''
- if debug_field.assigned:
- debug_path = autotest_field.GetString()
- perf_args_field = self.global_settings.fields['perf_args']
- perf_args = ''
- if perf_args_field.assigned:
- perf_args = perf_args_field.GetString()
- image_path, autotest_path, debug_path = settings.GetXbuddyPath(
- value, autotest_path, debug_path, board, chromeos_root,
- 'quiet', perf_args)
+ image_path, autotest_path = settings.GetXbuddyPath(
+ value, autotest_path, board, chromeos_root, 'quiet')
res += '\t#actual_image: %s\n' % image_path
if not autotest_field.assigned:
res += '\t#actual_autotest_path: %s\n' % autotest_path
- if not debug_field.assigned:
- res += '\t#actual_debug_path: %s\n' % debug_path
res += '}\n\n'
diff --git a/crosperf/experiment_file_unittest.py b/crosperf/experiment_file_unittest.py
index 12b68223..d4a02107 100755
--- a/crosperf/experiment_file_unittest.py
+++ b/crosperf/experiment_file_unittest.py
@@ -1,16 +1,12 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""The unittest of experiment_file."""
from __future__ import print_function
-
-import io
+import StringIO
import unittest
-
from experiment_file import ExperimentFile
EXPERIMENT_FILE_1 = """
@@ -69,75 +65,6 @@ EXPERIMENT_FILE_3 = """
}
"""
-EXPERIMENT_FILE_4 = """
- board: x86-alex
- remote: chromeos-alex3
- iterations: 3
-
- benchmark: webrtc {
- test_args: --story-filter=datachannel
- }
-
- benchmark: webrtc {
- test_args: --story-tag-filter=smoothness
- }
-
- image1 {
- chromeos_image:/usr/local/google/cros_image1.bin
- }
- """
-
-DUT_CONFIG_EXPERIMENT_FILE_GOOD = """
- board: kevin64
- remote: chromeos-kevin.cros
- turbostat: False
- intel_pstate: no_hwp
- cooldown_temp: 38
- cooldown_time: 5
- governor: powersave
- cpu_usage: exclusive_cores
- cpu_freq_pct: 50
- top_interval: 5
-
- benchmark: speedometer {
- iterations: 3
- suite: telemetry_Crosperf
- }
-
- image1 {
- chromeos_image:/usr/local/google/cros_image1.bin
- }
- """
-
-DUT_CONFIG_EXPERIMENT_FILE_BAD_GOV = """
- board: kevin64
- remote: chromeos-kevin.cros
- intel_pstate: active
- governor: misspelled_governor
-
- benchmark: speedometer2 {
- iterations: 3
- suite: telemetry_Crosperf
- }
- """
-
-DUT_CONFIG_EXPERIMENT_FILE_BAD_CPUUSE = """
- board: kevin64
- remote: chromeos-kevin.cros
- turbostat: False
- governor: ondemand
- cpu_usage: unknown
-
- benchmark: speedometer2 {
- iterations: 3
- suite: telemetry_Crosperf
- }
-
- image1 {
- chromeos_image:/usr/local/google/cros_image1.bin
- }
- """
-
OUTPUT_FILE = """board: x86-alex
remote: chromeos-alex3
perf_args: record -a -e cycles
@@ -161,7 +88,7 @@ class ExperimentFileTest(unittest.TestCase):
"""The main class for Experiment File test."""
def testLoadExperimentFile1(self):
- input_file = io.BytesIO(EXPERIMENT_FILE_1)
+ input_file = StringIO.StringIO(EXPERIMENT_FILE_1)
experiment_file = ExperimentFile(input_file)
global_settings = experiment_file.GetGlobalSettings()
self.assertEqual(global_settings.GetField('remote'), ['chromeos-alex3'])
@@ -181,7 +108,7 @@ class ExperimentFileTest(unittest.TestCase):
self.assertEqual(label_settings[0].GetField('remote'), ['chromeos-alex3'])
def testOverrideSetting(self):
- input_file = io.BytesIO(EXPERIMENT_FILE_2)
+ input_file = StringIO.StringIO(EXPERIMENT_FILE_2)
experiment_file = ExperimentFile(input_file)
global_settings = experiment_file.GetGlobalSettings()
self.assertEqual(global_settings.GetField('remote'), ['chromeos-alex3'])
@@ -194,59 +121,15 @@ class ExperimentFileTest(unittest.TestCase):
self.assertEqual(benchmark_settings[1].GetField('iterations'), 2)
def testDuplicateLabel(self):
- input_file = io.BytesIO(EXPERIMENT_FILE_3)
+ input_file = StringIO.StringIO(EXPERIMENT_FILE_3)
self.assertRaises(Exception, ExperimentFile, input_file)
- def testDuplicateBenchmark(self):
- input_file = io.BytesIO(EXPERIMENT_FILE_4)
- experiment_file = ExperimentFile(input_file)
- benchmark_settings = experiment_file.GetSettings('benchmark')
- self.assertEqual(benchmark_settings[0].name, 'webrtc')
- self.assertEqual(benchmark_settings[0].GetField('test_args'),
- '--story-filter=datachannel')
- self.assertEqual(benchmark_settings[1].name, 'webrtc')
- self.assertEqual(benchmark_settings[1].GetField('test_args'),
- '--story-tag-filter=smoothness')
-
def testCanonicalize(self):
- input_file = io.BytesIO(EXPERIMENT_FILE_1)
+ input_file = StringIO.StringIO(EXPERIMENT_FILE_1)
experiment_file = ExperimentFile(input_file)
res = experiment_file.Canonicalize()
self.assertEqual(res, OUTPUT_FILE)
- def testLoadDutConfigExperimentFile_Good(self):
- input_file = io.BytesIO(DUT_CONFIG_EXPERIMENT_FILE_GOOD)
- experiment_file = ExperimentFile(input_file)
- global_settings = experiment_file.GetGlobalSettings()
- self.assertEqual(global_settings.GetField('turbostat'), False)
- self.assertEqual(global_settings.GetField('intel_pstate'), 'no_hwp')
- self.assertEqual(global_settings.GetField('governor'), 'powersave')
- self.assertEqual(global_settings.GetField('cpu_usage'), 'exclusive_cores')
- self.assertEqual(global_settings.GetField('cpu_freq_pct'), 50)
- self.assertEqual(global_settings.GetField('cooldown_time'), 5)
- self.assertEqual(global_settings.GetField('cooldown_temp'), 38)
- self.assertEqual(global_settings.GetField('top_interval'), 5)
-
- def testLoadDutConfigExperimentFile_WrongGovernor(self):
- input_file = io.BytesIO(DUT_CONFIG_EXPERIMENT_FILE_BAD_GOV)
- with self.assertRaises(RuntimeError) as msg:
- ExperimentFile(input_file)
- self.assertRegexpMatches(
- str(msg.exception), 'governor: misspelled_governor')
- self.assertRegexpMatches(
- str(msg.exception), "Invalid enum value for field 'governor'."
- r' Must be one of \(performance, powersave, userspace, ondemand,'
- r' conservative, schedutils, sched, interactive\)')
-
- def testLoadDutConfigExperimentFile_WrongCpuUsage(self):
- input_file = io.BytesIO(DUT_CONFIG_EXPERIMENT_FILE_BAD_CPUUSE)
- with self.assertRaises(RuntimeError) as msg:
- ExperimentFile(input_file)
- self.assertRegexpMatches(str(msg.exception), 'cpu_usage: unknown')
- self.assertRegexpMatches(
- str(msg.exception), "Invalid enum value for field 'cpu_usage'."
- r' Must be one of \(all, big_only, little_only, exclusive_cores\)')
-
if __name__ == '__main__':
unittest.main()
diff --git a/crosperf/experiment_files/README.md b/crosperf/experiment_files/README
index 8c1fe200..d9c96870 100644
--- a/crosperf/experiment_files/README.md
+++ b/crosperf/experiment_files/README
@@ -1,20 +1,16 @@
-# Experiment files
-
To use these experiment files, replace the board, remote and images
placeholders and run crosperf on them.
Further information about crosperf:
https://sites.google.com/a/google.com/chromeos-toolchain-team-home2/home/team-tools-and-scripts/crosperf-cros-image-performance-comparison-tool
-The final experiment file should look something like the following (but with
+The final experiment file should look something like the following (but with
different actual values for the fields):
-```
board: lumpy
remote: 123.45.67.089
# Add images you want to test:
-
my_image {
chromeos_image: /usr/local/chromeos/src/build/images/lumpy/chromiumos_test_image.bin
}
@@ -25,15 +21,14 @@ vanilla_image {
}
# Paste experiment benchmarks here. Example, I pasted
-# `page_cycler_v2.morejs` here.
+# page_cycler_v2.morejs here.
-# This experiment just runs a short autotest which measures the performance
-# of Telemetry's `page_cycler_v2.morejs`. In addition, it profiles cycles.
+# This experiment just runs a short autotest which measures the performance of
+# Telemetry's page_cycler_v2.morejs. In addition, it profiles
-perf_args: record -e cycles
+perg_args: record -e cycles
benchmark: page_cycler_v2.morejs {
suite: telemetry_Crosperf
iterations: 1
}
-```
diff --git a/crosperf/experiment_files/dut_config.exp b/crosperf/experiment_files/dut_config.exp
deleted file mode 100644
index fb81ba89..00000000
--- a/crosperf/experiment_files/dut_config.exp
+++ /dev/null
@@ -1,66 +0,0 @@
-# This experiment template shows how to run Telemetry tests (using autotest)
-# with explicitly specified DUT configurations.
-#
-# You should replace all the placeholders, marked by angle-brackets,
-# with the appropriate actual values.
-
-name: dut_config_telemetry_crosperf_example
-board: <your-board-goes-here>
-
-# Note: You can specify multiple remotes, to run your tests in parallel on
-# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
-# test-machine3.com"
-remote: <your-remote-goes-here>
-
-# DUT configuration parameters. All are optional.
-#
-# Run turbostat process in background. Default: True.
-turbostat: <True|False>
-# Run top process in background with specified interval of sampling in
-# seconds, type float. 0 - don't run top.
-# Default: 0
-# Recommended values 1-5 (Lower number provides more accurate data).
-# NOTE: Running top with interval 1-5 sec has insignificant
-# performance impact (performance degradation does not exceed 0.3%).
-top_interval: <interval_in_seconds_float>
-# One of Intel Pstate modes defined in kernel command line:
-# active, passive, no_hwp.
-intel_pstate: <active|passive|no_hwp>
-# Wait until CPU cools down to a specified temperature
-# in Celsius or cooldown_time timeout reaches zero
-# (whichever happens first). Default: 40.
-cooldown_temp: <temperature-threshold-for-cooldown>
-# Timeout specified in minutes for CPU cooling down
-# to cooldown_temp temperature. Zero value disables cooldown.
-# Default: 0.
-cooldown_time: <time-to-cooldown-in-minutes>
-# CPU governor.
-# See: https://www.kernel.org/doc/Documentation/cpu-freq/governors.txt
-# for available values (they might differ for ARM and Intel).
-governor: <one-of-scaling_available_governors-values>
-# Restrict CPU usage to predefined "models":
-# all, big_only, little_only, exclusive_cores.
-cpu_usage: <usage-model>
-# Setup CPU frequency as percent of max_freq.
-# Default: 100
-cpu_freq_pct: <0-100>
-
-# The example below will run Telemetry toolchain performance benchmarks.
-# The exact list of benchmarks that will be run can be seen in
-# crosperf/experiment_factory.py
-benchmark: all_toolchain_perf {
- suite: telemetry_Crosperf
- run_local: False
- iterations: 1
-}
-
-# NOTE: You must specify at least one image; you may specify more than one.
-# Replace <path-to-your-chroot-goes-here> and <board-goes-here> below.
-vanilla_image {
- chromeos_image:<path-to-your-chroot>/src/build/images/<board>/vanilla-image/chromiumos_test_image.bin
-}
-
-# Replace the chromeos image below with the actual path to your test image.
-test_image {
- chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
-}
diff --git a/crosperf/experiment_files/enable_aslr.exp b/crosperf/experiment_files/enable_aslr.exp
deleted file mode 100644
index 5f8f654e..00000000
--- a/crosperf/experiment_files/enable_aslr.exp
+++ /dev/null
@@ -1,37 +0,0 @@
-# This example experiment file shows how to run a Telemetry test,
-# using autotest (via "suite: telemetry_Crosperf"), and also enable
-# ASLR. Note that ASLR is diabled by default
-# This turns on ASLR on the machine and runs the Telemetry's
-# "run_benchmark" for the specified test,
-#
-#
-# You should replace all the placeholders, marked by angle-brackets,
-# with the appropriate actual values.
-
-name: basic_telemetry_crosperf_example
-board: <your-board-goes-here>
-
-enable_aslr: True
-
-# Note: You can specify multiple remotes, to run your tests in parallel on
-# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
-# test-machine3.com"
-remote: <your-remote-goes-here>
-
-# Replace "octane" below with the name of the Telemetry benchmark you
-# want to run.
-benchmark: octane {
- suite: telemetry_Crosperf
- iterations: 1
-}
-
-# NOTE: You must specify at least one image; you may specify more than one.
-# Replace <path-to-your-chroot-goes-here> and <board-goes-here> below.
-vanilla_image {
- chromeos_image:<path-to-your-chroot>/src/build/images/<board>/vanilla-image/chromiumos_test_image.bin
-}
-
-# Replace the chromeos image below with the actual path to your test image.
-test_image {
- chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
-}
diff --git a/crosperf/experiment_runner.py b/crosperf/experiment_runner.py
index cb6e9785..b583743b 100644
--- a/crosperf/experiment_runner.py
+++ b/crosperf/experiment_runner.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""The experiment runner module."""
from __future__ import print_function
@@ -11,7 +9,7 @@ import os
import shutil
import time
-import lock_machine
+import afe_lock_machine
import test_flag
from cros_utils import command_executer
@@ -99,68 +97,65 @@ class ExperimentRunner(object):
if m not in locked_machines:
l.remote.remove(m)
- def _GetMachineType(self, lock_mgr, machine):
- """Get where is the machine from.
-
- Returns:
- The location of the machine: local, skylab or afe
- """
- # We assume that lab machine always starts with chromeos*, and local
- # machines are ip address.
- if 'chromeos' in machine:
- if lock_mgr.CheckMachineInSkylab(machine):
- return 'skylab'
- else:
- return 'afe'
- return 'local'
-
def _LockAllMachines(self, experiment):
"""Attempt to globally lock all of the machines requested for run.
- This method tries to lock all machines requested for this crosperf run
- in three different modes automatically, to prevent any other crosperf runs
- from being able to update/use the machines while this experiment is
- running:
- - Skylab machines: Use skylab lease-dut mechanism to lease
- - AFE machines: Use AFE lock mechanism to lock
- - Local machines: Use file lock mechanism to lock
+ This method will use the AFE server to globally lock all of the machines
+ requested for this crosperf run, to prevent any other crosperf runs from
+ being able to update/use the machines while this experiment is running.
"""
if test_flag.GetTestMode():
self.locked_machines = self._GetMachineList()
- experiment.locked_machines = self.locked_machines
+ self._experiment.locked_machines = self.locked_machines
else:
- experiment.lock_mgr = lock_machine.LockManager(
+ lock_mgr = afe_lock_machine.AFELockManager(
self._GetMachineList(),
'',
experiment.labels[0].chromeos_root,
- experiment.locks_dir,
- log=self.l,
- )
- for m in experiment.lock_mgr.machines:
- machine_type = self._GetMachineType(experiment.lock_mgr, m)
- if machine_type == 'local':
- experiment.lock_mgr.AddMachineToLocal(m)
- elif machine_type == 'skylab':
- experiment.lock_mgr.AddMachineToSkylab(m)
- machine_states = experiment.lock_mgr.GetMachineStates('lock')
- experiment.lock_mgr.CheckMachineLocks(machine_states, 'lock')
- self.locked_machines = experiment.lock_mgr.UpdateMachines(True)
- experiment.locked_machines = self.locked_machines
+ None,
+ log=self.l,)
+ for m in lock_mgr.machines:
+ if not lock_mgr.MachineIsKnown(m):
+ lock_mgr.AddLocalMachine(m)
+ machine_states = lock_mgr.GetMachineStates('lock')
+ lock_mgr.CheckMachineLocks(machine_states, 'lock')
+ self.locked_machines = lock_mgr.UpdateMachines(True)
+ self._experiment.locked_machines = self.locked_machines
self._UpdateMachineList(self.locked_machines)
- experiment.machine_manager.RemoveNonLockedMachines(self.locked_machines)
+ self._experiment.machine_manager.RemoveNonLockedMachines(
+ self.locked_machines)
if len(self.locked_machines) == 0:
raise RuntimeError('Unable to lock any machines.')
+ def _UnlockAllMachines(self, experiment):
+ """Attempt to globally unlock all of the machines requested for run.
+
+ The method will use the AFE server to globally unlock all of the machines
+ requested for this crosperf run.
+ """
+ if not self.locked_machines or test_flag.GetTestMode():
+ return
+
+ lock_mgr = afe_lock_machine.AFELockManager(
+ self.locked_machines,
+ '',
+ experiment.labels[0].chromeos_root,
+ None,
+ log=self.l,)
+ machine_states = lock_mgr.GetMachineStates('unlock')
+ lock_mgr.CheckMachineLocks(machine_states, 'unlock')
+ lock_mgr.UpdateMachines(False)
+
def _ClearCacheEntries(self, experiment):
for br in experiment.benchmark_runs:
cache = ResultsCache()
- cache.Init(
- br.label.chromeos_image, br.label.chromeos_root,
- br.benchmark.test_name, br.iteration, br.test_args, br.profiler_args,
- br.machine_manager, br.machine, br.label.board, br.cache_conditions,
- br.logger(), br.log_level, br.label, br.share_cache,
- br.benchmark.suite, br.benchmark.show_all_results,
- br.benchmark.run_local, br.benchmark.cwp_dso)
+ cache.Init(br.label.chromeos_image, br.label.chromeos_root,
+ br.benchmark.test_name, br.iteration, br.test_args,
+ br.profiler_args, br.machine_manager, br.machine,
+ br.label.board, br.cache_conditions,
+ br.logger(), br.log_level, br.label, br.share_cache,
+ br.benchmark.suite, br.benchmark.show_all_results,
+ br.benchmark.run_local)
cache_dir = cache.GetCacheDirForWrite()
if os.path.exists(cache_dir):
self.l.LogOutput('Removing cache dir: %s' % cache_dir)
@@ -168,10 +163,8 @@ class ExperimentRunner(object):
def _Run(self, experiment):
try:
- self._LockAllMachines(experiment)
- # Calculate all checksums of avaiable/locked machines, to ensure same
- # label has same machines for testing
- experiment.SetCheckSums(forceSameImage=True)
+ if not experiment.locks_dir:
+ self._LockAllMachines(experiment)
if self._using_schedv2:
schedv2 = Schedv2(experiment)
experiment.set_schedv2(schedv2)
@@ -215,7 +208,8 @@ class ExperimentRunner(object):
experiment.Terminate()
raise
finally:
- experiment.Cleanup()
+ if not experiment.locks_dir:
+ self._UnlockAllMachines(experiment)
def _PrintTable(self, experiment):
self.l.LogOutput(TextResultsReport.FromExperiment(experiment).GetReport())
@@ -282,24 +276,11 @@ class ExperimentRunner(object):
self.l.LogOutput('Storing results of each benchmark run.')
for benchmark_run in experiment.benchmark_runs:
if benchmark_run.result:
- benchmark_run_name = ''.join(
- ch for ch in benchmark_run.name if ch.isalnum())
+ benchmark_run_name = filter(str.isalnum, benchmark_run.name)
benchmark_run_path = os.path.join(results_directory, benchmark_run_name)
benchmark_run.result.CopyResultsTo(benchmark_run_path)
benchmark_run.result.CleanUp(benchmark_run.benchmark.rm_chroot_tmp)
- topstats_file = os.path.join(results_directory, 'topstats.log')
- self.l.LogOutput('Storing top5 statistics of each benchmark run into %s.' %
- topstats_file)
- with open(topstats_file, 'w') as top_fd:
- for benchmark_run in experiment.benchmark_runs:
- if benchmark_run.result:
- # Header with benchmark run name.
- top_fd.write('%s\n' % str(benchmark_run))
- # Formatted string with top statistics.
- top_fd.write(benchmark_run.result.FormatStringTop5())
- top_fd.write('\n\n')
-
def Run(self):
try:
self._Run(self._experiment)
diff --git a/crosperf/experiment_runner_unittest.py b/crosperf/experiment_runner_unittest.py
index 2ec11ccd..4809894f 100755
--- a/crosperf/experiment_runner_unittest.py
+++ b/crosperf/experiment_runner_unittest.py
@@ -1,21 +1,18 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Tests for the experiment runner module."""
from __future__ import print_function
+import StringIO
import getpass
-import io
import os
-import time
-import unittest
import mock
+import unittest
import experiment_runner
import experiment_status
@@ -108,7 +105,7 @@ class ExperimentRunnerTest(unittest.TestCase):
def make_fake_experiment(self):
test_flag.SetTestMode(True)
- experiment_file = ExperimentFile(io.BytesIO(EXPERIMENT_FILE_1))
+ experiment_file = ExperimentFile(StringIO.StringIO(EXPERIMENT_FILE_1))
experiment = ExperimentFactory().GetExperiment(
experiment_file, working_directory='', log_dir='')
return experiment
@@ -140,18 +137,12 @@ class ExperimentRunnerTest(unittest.TestCase):
cmd_exec=self.mock_cmd_exec)
self.assertEqual(er.STATUS_TIME_DELAY, 30)
- @mock.patch.object(time, 'time')
- @mock.patch.object(time, 'sleep')
@mock.patch.object(experiment_status.ExperimentStatus, 'GetStatusString')
@mock.patch.object(experiment_status.ExperimentStatus, 'GetProgressString')
- def test_run(self, mock_progress_string, mock_status_string, mock_sleep,
- mock_time):
+ def test_run(self, mock_progress_string, mock_status_string):
self.run_count = 0
self.is_complete_count = 0
- mock_sleep.return_value = None
- # pylint: disable=range-builtin-not-iterating
- mock_time.side_effect = range(1, 50, 1)
def reset():
self.run_count = 0
@@ -163,7 +154,7 @@ class ExperimentRunnerTest(unittest.TestCase):
def FakeIsComplete():
self.is_complete_count += 1
- if self.is_complete_count < 6:
+ if self.is_complete_count < 3:
return False
else:
return True
@@ -313,11 +304,11 @@ class ExperimentRunnerTest(unittest.TestCase):
self.assertEqual(mock_html_report.call_count, 1)
self.assertEqual(len(mock_emailer.call_args), 2)
self.assertEqual(mock_emailer.call_args[0],
- (['jane.doe@google.com', 'john.smith@google.com'
- ], ': image1 vs. image2',
+ (['jane.doe@google.com',
+ 'john.smith@google.com'], ': image1 vs. image2',
"<pre style='font-size: 13px'>This is a fake text "
'report.\nResults are stored in _results.\n</pre>'))
- self.assertTrue(isinstance(mock_emailer.call_args[1], dict))
+ self.assertTrue(type(mock_emailer.call_args[1]) is dict)
self.assertEqual(len(mock_emailer.call_args[1]), 2)
self.assertTrue('attachments' in mock_emailer.call_args[1].keys())
self.assertEqual(mock_emailer.call_args[1]['msg_type'], 'html')
@@ -349,7 +340,7 @@ class ExperimentRunnerTest(unittest.TestCase):
], ': image1 vs. image2',
"<pre style='font-size: 13px'>This is a fake text "
'report.\nResults are stored in _results.\n</pre>'))
- self.assertTrue(isinstance(mock_emailer.call_args[1], dict))
+ self.assertTrue(type(mock_emailer.call_args[1]) is dict)
self.assertEqual(len(mock_emailer.call_args[1]), 2)
self.assertTrue('attachments' in mock_emailer.call_args[1].keys())
self.assertEqual(mock_emailer.call_args[1]['msg_type'], 'html')
@@ -376,7 +367,7 @@ class ExperimentRunnerTest(unittest.TestCase):
(['john.smith@google.com'], ': image1 vs. image2',
"<pre style='font-size: 13px'>This is a fake text "
'report.\nResults are stored in _results.\n</pre>'))
- self.assertTrue(isinstance(mock_emailer.call_args[1], dict))
+ self.assertTrue(type(mock_emailer.call_args[1]) is dict)
self.assertEqual(len(mock_emailer.call_args[1]), 2)
self.assertTrue('attachments' in mock_emailer.call_args[1].keys())
self.assertEqual(mock_emailer.call_args[1]['msg_type'], 'html')
@@ -407,17 +398,14 @@ class ExperimentRunnerTest(unittest.TestCase):
@mock.patch.object(TextResultsReport, 'FromExperiment')
@mock.patch.object(Result, 'CopyResultsTo')
@mock.patch.object(Result, 'CleanUp')
- @mock.patch.object(Result, 'FormatStringTop5')
- @mock.patch('__builtin__.open', new_callable=mock.mock_open)
- def test_store_results(self, mock_open, mock_top5, mock_cleanup, mock_copy,
- _mock_text_report, mock_report, mock_writefile,
- mock_mkdir, mock_rmdir):
+ def test_store_results(self, mock_cleanup, mock_copy, _mock_text_report,
+ mock_report, mock_writefile, mock_mkdir, mock_rmdir):
self.mock_logger.Reset()
self.exp.results_directory = '/usr/local/crosperf-results'
bench_run = self.exp.benchmark_runs[5]
- bench_path = '/usr/local/crosperf-results/' + ''.join(
- ch for ch in bench_run.name if ch.isalnum())
+ bench_path = '/usr/local/crosperf-results/' + filter(
+ str.isalnum, bench_run.name)
self.assertEqual(len(self.exp.benchmark_runs), 6)
er = experiment_runner.ExperimentRunner(
@@ -437,8 +425,6 @@ class ExperimentRunnerTest(unittest.TestCase):
self.assertEqual(mock_mkdir.call_count, 0)
self.assertEqual(mock_rmdir.call_count, 0)
self.assertEqual(self.mock_logger.LogOutputCount, 0)
- self.assertEqual(mock_open.call_count, 0)
- self.assertEqual(mock_top5.call_count, 0)
# Test 2. _terminated is false; everything works properly.
fake_result = Result(self.mock_logger, self.exp.labels[0], 'average',
@@ -448,9 +434,9 @@ class ExperimentRunnerTest(unittest.TestCase):
er._terminated = False
er._StoreResults(self.exp)
self.assertEqual(mock_cleanup.call_count, 6)
- mock_cleanup.assert_called_with(bench_run.benchmark.rm_chroot_tmp)
+ mock_cleanup.called_with(bench_run.benchmark.rm_chroot_tmp)
self.assertEqual(mock_copy.call_count, 6)
- mock_copy.assert_called_with(bench_path)
+ mock_copy.called_with(bench_path)
self.assertEqual(mock_writefile.call_count, 3)
self.assertEqual(len(mock_writefile.call_args_list), 3)
first_args = mock_writefile.call_args_list[0]
@@ -460,27 +446,16 @@ class ExperimentRunnerTest(unittest.TestCase):
self.assertEqual(second_args[0][0],
'/usr/local/crosperf-results/results.html')
self.assertEqual(mock_mkdir.call_count, 1)
- mock_mkdir.assert_called_with('/usr/local/crosperf-results')
+ mock_mkdir.called_with('/usr/local/crosperf-results')
self.assertEqual(mock_rmdir.call_count, 1)
- mock_rmdir.assert_called_with('/usr/local/crosperf-results')
- self.assertEqual(self.mock_logger.LogOutputCount, 5)
+ mock_rmdir.called_with('/usr/local/crosperf-results')
+ self.assertEqual(self.mock_logger.LogOutputCount, 4)
self.assertEqual(self.mock_logger.output_msgs, [
'Storing experiment file in /usr/local/crosperf-results.',
'Storing results report in /usr/local/crosperf-results.',
'Storing email message body in /usr/local/crosperf-results.',
- 'Storing results of each benchmark run.',
- 'Storing top5 statistics of each benchmark run into'
- ' /usr/local/crosperf-results/topstats.log.',
+ 'Storing results of each benchmark run.'
])
- self.assertEqual(mock_open.call_count, 1)
- # Check write to a topstats.log file.
- mock_open.assert_called_with('/usr/local/crosperf-results/topstats.log',
- 'w')
- mock_open().write.assert_called()
-
- # Check top5 calls with no arguments.
- top5calls = [mock.call()] * 6
- self.assertEqual(mock_top5.call_args_list, top5calls)
if __name__ == '__main__':
diff --git a/crosperf/label.py b/crosperf/label.py
index bebc2706..d993c15c 100644
--- a/crosperf/label.py
+++ b/crosperf/label.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""The label of benchamrks."""
from __future__ import print_function
@@ -20,10 +18,8 @@ class Label(object):
def __init__(self,
name,
- build,
chromeos_image,
autotest_path,
- debug_path,
chromeos_root,
board,
remote,
@@ -32,7 +28,6 @@ class Label(object):
cache_only,
log_level,
compiler,
- skylab=False,
chrome_src=None):
self.image_type = self._GetImageType(chromeos_image)
@@ -43,10 +38,8 @@ class Label(object):
chromeos_image = os.path.expanduser(chromeos_image)
self.name = name
- self.build = build
self.chromeos_image = chromeos_image
self.autotest_path = autotest_path
- self.debug_path = debug_path
self.board = board
self.remote = remote
self.image_args = image_args
@@ -55,15 +48,14 @@ class Label(object):
self.log_level = log_level
self.chrome_version = ''
self.compiler = compiler
- self.skylab = skylab
if not chromeos_root:
if self.image_type == 'local':
chromeos_root = FileUtils().ChromeOSRootFromImage(chromeos_image)
if not chromeos_root:
- raise RuntimeError(
- "No ChromeOS root given for label '%s' and could "
- "not determine one from image path: '%s'." % (name, chromeos_image))
+ raise RuntimeError("No ChromeOS root given for label '%s' and could "
+ "not determine one from image path: '%s'." %
+ (name, chromeos_image))
else:
chromeos_root = FileUtils().CanonicalizeChromeOSRoot(chromeos_root)
if not chromeos_root:
@@ -126,10 +118,8 @@ class MockLabel(object):
def __init__(self,
name,
- build,
chromeos_image,
autotest_path,
- debug_path,
chromeos_root,
board,
remote,
@@ -138,13 +128,10 @@ class MockLabel(object):
cache_only,
log_level,
compiler,
- skylab=False,
chrome_src=None):
self.name = name
- self.build = build
self.chromeos_image = chromeos_image
self.autotest_path = autotest_path
- self.debug_path = debug_path
self.board = board
self.remote = remote
self.cache_dir = cache_dir
@@ -159,7 +146,6 @@ class MockLabel(object):
self.checksum = ''
self.log_level = log_level
self.compiler = compiler
- self.skylab = skylab
self.chrome_version = 'Fake Chrome Version 50'
def _GetImageType(self, chromeos_image):
diff --git a/crosperf/machine_manager.py b/crosperf/machine_manager.py
index ea3d105a..b9dda148 100644
--- a/crosperf/machine_manager.py
+++ b/crosperf/machine_manager.py
@@ -1,7 +1,6 @@
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Machine Manager module."""
from __future__ import print_function
@@ -185,10 +184,10 @@ class MachineManager(object):
This class contains methods and calls to lock, unlock and image
machines and distribute machines to each benchmark run. The assumption is
that all of the machines for the experiment have been globally locked
- in the ExperimentRunner, but the machines still need to be locally
- locked/unlocked (allocated to benchmark runs) to prevent multiple benchmark
- runs within the same experiment from trying to use the same machine at the
- same time.
+ (using an AFE server) in the ExperimentRunner, but the machines still need
+ to be locally locked/unlocked (allocated to benchmark runs) to prevent
+ multiple benchmark runs within the same experiment from trying to use the
+ same machine at the same time.
"""
def __init__(self,
@@ -255,8 +254,7 @@ class MachineManager(object):
image_chromeos.__file__, '--no_lock',
'--chromeos_root=%s' % chromeos_root,
'--image=%s' % label.chromeos_image,
- '--image_args=%s' % label.image_args,
- '--remote=%s' % machine.name,
+ '--image_args=%s' % label.image_args, '--remote=%s' % machine.name,
'--logging_level=%s' % self.log_level
]
if label.board:
@@ -403,10 +401,10 @@ class MachineManager(object):
self.acquire_timeout -= sleep_time
if self.acquire_timeout < 0:
- self.logger.LogFatal('Could not acquire any of the '
- "following machines: '%s'" % ', '.join(
- machine.name for machine in machines))
-
+ self.logger.LogFatal(
+ 'Could not acquire any of the '
+ "following machines: '%s'" % ', '.join(machine.name
+ for machine in machines))
### for m in self._machines:
### if (m.locked and time.time() - m.released_time < 10 and
diff --git a/crosperf/machine_manager_unittest.py b/crosperf/machine_manager_unittest.py
index 0f64a714..b267d698 100755
--- a/crosperf/machine_manager_unittest.py
+++ b/crosperf/machine_manager_unittest.py
@@ -1,10 +1,6 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright 2012 Google Inc. All Rights Reserved.
"""Unittest for machine_manager."""
from __future__ import print_function
@@ -52,14 +48,14 @@ class MyMachineManager(machine_manager.MachineManager):
CHROMEOS_ROOT = '/tmp/chromeos-root'
MACHINE_NAMES = ['lumpy1', 'lumpy2', 'lumpy3', 'daisy1', 'daisy2']
-LABEL_LUMPY = label.MockLabel('lumpy', 'build', 'lumpy_chromeos_image',
- 'autotest_dir', 'debug_dir', CHROMEOS_ROOT,
- 'lumpy', ['lumpy1', 'lumpy2', 'lumpy3', 'lumpy4'],
- '', '', False, 'average', 'gcc', False, None)
-LABEL_MIX = label.MockLabel('mix', 'build', 'chromeos_image', 'autotest_dir',
- 'debug_dir', CHROMEOS_ROOT, 'mix',
- ['daisy1', 'daisy2', 'lumpy3', 'lumpy4'], '', '',
- False, 'average', 'gcc', False, None)
+LABEL_LUMPY = label.MockLabel(
+ 'lumpy', 'lumpy_chromeos_image', 'autotest_dir', CHROMEOS_ROOT, 'lumpy',
+ ['lumpy1', 'lumpy2', 'lumpy3', 'lumpy4'], '', '', False, 'average,'
+ 'gcc', None)
+LABEL_MIX = label.MockLabel('mix', 'chromeos_image', 'autotest_dir',
+ CHROMEOS_ROOT, 'mix',
+ ['daisy1', 'daisy2', 'lumpy3',
+ 'lumpy4'], '', '', False, 'average', 'gcc', None)
class MachineManagerTest(unittest.TestCase):
@@ -251,6 +247,8 @@ class MachineManagerTest(unittest.TestCase):
@mock.patch.object(command_executer.CommandExecuter, 'CrosRunCommandWOutput')
def test_try_to_lock_machine(self, mock_cros_runcmd):
+ self.assertRaises(self.mm._TryToLockMachine, None)
+
mock_cros_runcmd.return_value = [0, 'false_lock_checksum', '']
self.mock_cmd_exec.CrosRunCommandWOutput = mock_cros_runcmd
self.mm._machines = []
@@ -456,7 +454,7 @@ class MachineManagerTest(unittest.TestCase):
suite='telemetry_Crosperf') # suite
test_run = MockBenchmarkRun('test run', bench, LABEL_LUMPY, 1, [], self.mm,
- mock_logger, 'verbose', '', {}, False)
+ mock_logger, 'verbose', '')
self.mm._machines = [
self.mock_lumpy1, self.mock_lumpy2, self.mock_lumpy3, self.mock_daisy1,
@@ -486,9 +484,9 @@ class MachineManagerTest(unittest.TestCase):
def test_get_all_cpu_info(self):
info = self.mm.GetAllCPUInfo([LABEL_LUMPY, LABEL_MIX])
- self.assertEqual(
- info, 'lumpy\n-------------------\nlumpy_cpu_info\n\n\nmix\n-'
- '------------------\ndaisy_cpu_info\n\n\n')
+ self.assertEqual(info,
+ 'lumpy\n-------------------\nlumpy_cpu_info\n\n\nmix\n-'
+ '------------------\ndaisy_cpu_info\n\n\n')
MEMINFO_STRING = """MemTotal: 3990332 kB
diff --git a/crosperf/mock_instance.py b/crosperf/mock_instance.py
index 4271d8fd..758108fa 100644
--- a/crosperf/mock_instance.py
+++ b/crosperf/mock_instance.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""This contains some mock instances for testing."""
from __future__ import print_function
@@ -13,10 +11,8 @@ from label import MockLabel
perf_args = 'record -a -e cycles'
label1 = MockLabel(
'test1',
- 'build1',
'image1',
'autotest_dir',
- 'debug_dir',
'/tmp/test_benchmark_run',
'x86-alex',
'chromeos-alex1',
@@ -24,16 +20,12 @@ label1 = MockLabel(
cache_dir='',
cache_only=False,
log_level='average',
- compiler='gcc',
- skylab=False,
- chrome_src=None)
+ compiler='gcc')
label2 = MockLabel(
'test2',
- 'build2',
'image2',
'autotest_dir',
- 'debug_dir',
'/tmp/test_benchmark_run_2',
'x86-alex',
'chromeos-alex2',
@@ -41,9 +33,7 @@ label2 = MockLabel(
cache_dir='',
cache_only=False,
log_level='average',
- compiler='gcc',
- skylab=False,
- chrome_src=None)
+ compiler='gcc')
benchmark1 = Benchmark('benchmark1', 'autotest_name_1', 'autotest_args', 2, '',
perf_args, '', '')
diff --git a/crosperf/results_cache.py b/crosperf/results_cache.py
index 977e3e22..04e6590b 100644
--- a/crosperf/results_cache.py
+++ b/crosperf/results_cache.py
@@ -1,22 +1,17 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Module to deal with result cache."""
-from __future__ import division
from __future__ import print_function
-import collections
import glob
import hashlib
-import heapq
-import json
import os
import pickle
import re
import tempfile
+import json
from cros_utils import command_executer
from cros_utils import misc
@@ -55,9 +50,6 @@ class Result(object):
self.perf_data_files = []
self.perf_report_files = []
self.results_file = []
- self.turbostat_log_file = ''
- self.cpustats_log_file = ''
- self.top_log_file = ''
self.chrome_version = ''
self.err = None
self.chroot_results_dir = ''
@@ -65,35 +57,8 @@ class Result(object):
self.keyvals = None
self.board = None
self.suite = None
- self.cwp_dso = ''
self.retval = None
self.out = None
- self.top_cmds = []
-
- def GetTopCmds(self):
- """Get the list of top commands consuming CPU on the machine."""
- return self.top_cmds
-
- def FormatStringTop5(self):
- """Get formatted top5 string.
-
- Get the formatted string with top5 commands consuming CPU on DUT machine.
- """
- format_list = [
- 'Top 5 commands with highest CPU usage:',
- # Header.
- '%20s %9s %6s %s' % ('COMMAND', 'AVG CPU%', 'COUNT', 'HIGHEST 5'),
- '-' * 50,
- ]
- if self.top_cmds:
- for topcmd in self.top_cmds[:5]:
- print_line = '%20s %9.2f %6s %s' % (topcmd['cmd'], topcmd['cpu_avg'],
- topcmd['count'], topcmd['top5'])
- format_list.append(print_line)
- else:
- format_list.append('[NO DATA FROM THE TOP LOG]')
- format_list.append('-' * 50)
- return '\n'.join(format_list)
def CopyFilesTo(self, dest_dir, files_to_copy):
file_index = 0
@@ -182,7 +147,7 @@ class Result(object):
results_dict = {}
for k in keyvals_dict:
# We don't want these lines in our reports; they add no useful data.
- if not k or k == 'telemetry_Crosperf':
+ if k == '' or k == 'telemetry_Crosperf':
continue
val = keyvals_dict[k]
units = units_dict[k]
@@ -221,50 +186,6 @@ class Result(object):
keyvals_dict = self.AppendTelemetryUnits(keyvals_dict, units_dict)
return keyvals_dict
- def GetSamples(self):
- samples = 0
- for perf_data_file in self.perf_data_files:
- chroot_perf_data_file = misc.GetInsideChrootPath(self.chromeos_root,
- perf_data_file)
- perf_path = os.path.join(self.chromeos_root, 'chroot', 'usr/bin/perf')
- perf_file = '/usr/sbin/perf'
- if os.path.exists(perf_path):
- perf_file = '/usr/bin/perf'
-
- # For each perf.data, we want to collect sample count for specific DSO.
- # We specify exact match for known DSO type, and every sample for `all`.
- exact_match = ''
- if self.cwp_dso == 'all':
- exact_match = '""'
- elif self.cwp_dso == 'chrome':
- exact_match = '" chrome "'
- elif self.cwp_dso == 'kallsyms':
- exact_match = '"[kernel.kallsyms]"'
- else:
- # This will need to be updated once there are more DSO types supported,
- # if user want an exact match for the field they want.
- exact_match = '"%s"' % self.cwp_dso
-
- command = ('%s report -n -s dso -i %s 2> /dev/null | grep %s' %
- (perf_file, chroot_perf_data_file, exact_match))
- _, result, _ = self.ce.ChrootRunCommandWOutput(self.chromeos_root,
- command)
- # Accumulate the sample count for all matched fields.
- # Each line looks like this:
- # 45.42% 237210 chrome
- # And we want the second number which is the sample count.
- sample = 0
- try:
- for line in result.split('\n'):
- attr = line.split()
- if len(attr) == 3 and '%' in attr[0]:
- sample += int(attr[1])
- except:
- raise RuntimeError('Cannot parse perf dso result')
-
- samples += sample
- return [samples, u'samples']
-
def GetResultsDir(self):
mo = re.search(r'Results placed in (\S+)', self.out)
if mo:
@@ -274,7 +195,7 @@ class Result(object):
def FindFilesInResultsDir(self, find_args):
if not self.results_dir:
- return ''
+ return None
command = 'find %s %s' % (self.results_dir, find_args)
ret, out, _ = self.ce.RunCommandWOutput(command, print_to_console=False)
@@ -283,8 +204,6 @@ class Result(object):
return out
def GetResultsFile(self):
- if self.suite == 'telemetry_Crosperf':
- return self.FindFilesInResultsDir('-name histograms.json').splitlines()
return self.FindFilesInResultsDir('-name results-chart.json').splitlines()
def GetPerfDataFiles(self):
@@ -296,38 +215,10 @@ class Result(object):
def GetDataMeasurementsFiles(self):
result = self.FindFilesInResultsDir('-name perf_measurements').splitlines()
if not result:
- if self.suite == 'telemetry_Crosperf':
- result = \
- self.FindFilesInResultsDir('-name histograms.json').splitlines()
- else:
- result = \
- self.FindFilesInResultsDir('-name results-chart.json').splitlines()
+ result = \
+ self.FindFilesInResultsDir('-name results-chart.json').splitlines()
return result
- def GetTurbostatFile(self):
- """Get turbostat log path string."""
- return self.FindFilesInResultsDir('-name turbostat.log').split('\n')[0]
-
- def GetCpustatsFile(self):
- """Get cpustats log path string."""
- return self.FindFilesInResultsDir('-name cpustats.log').split('\n')[0]
-
- def GetTopFile(self):
- """Get cpustats log path string."""
- return self.FindFilesInResultsDir('-name top.log').split('\n')[0]
-
- def _CheckDebugPath(self, option, path):
- relative_path = path[1:]
- out_chroot_path = os.path.join(self.chromeos_root, 'chroot', relative_path)
- if os.path.exists(out_chroot_path):
- if option == 'kallsyms':
- path = os.path.join(path, 'System.map-*')
- return '--' + option + ' ' + path
- else:
- print('** WARNING **: --%s option not applied, %s does not exist' %
- (option, out_chroot_path))
- return ''
-
def GeneratePerfReportFiles(self):
perf_report_files = []
for perf_data_file in self.perf_data_files:
@@ -347,37 +238,15 @@ class Result(object):
if os.path.exists(perf_path):
perf_file = '/usr/bin/perf'
- debug_path = self.label.debug_path
-
- if debug_path:
- symfs = '--symfs ' + debug_path
- vmlinux = '--vmlinux ' + os.path.join(debug_path, 'boot', 'vmlinux')
- kallsyms = ''
- print('** WARNING **: --kallsyms option not applied, no System.map-* '
- 'for downloaded image.')
- else:
- if self.label.image_type != 'local':
- print('** WARNING **: Using local debug info in /build, this may '
- 'not match the downloaded image.')
- build_path = os.path.join('/build', self.board)
- symfs = self._CheckDebugPath('symfs', build_path)
- vmlinux_path = os.path.join(build_path, 'usr/lib/debug/boot/vmlinux')
- vmlinux = self._CheckDebugPath('vmlinux', vmlinux_path)
- kallsyms_path = os.path.join(build_path, 'boot')
- kallsyms = self._CheckDebugPath('kallsyms', kallsyms_path)
-
- command = ('%s report -n %s %s %s -i %s --stdio > %s' %
- (perf_file, symfs, vmlinux, kallsyms, chroot_perf_data_file,
- chroot_perf_report_file))
- if self.log_level != 'verbose':
- self._logger.LogOutput('Generating perf report...\nCMD: %s' % command)
- exit_code = self.ce.ChrootRunCommand(self.chromeos_root, command)
- if exit_code == 0:
- if self.log_level != 'verbose':
- self._logger.LogOutput('Perf report generated successfully.')
- else:
- raise RuntimeError(
- 'Perf report not generated correctly. CMD: %s' % command)
+ command = ('%s report '
+ '-n '
+ '--symfs /build/%s '
+ '--vmlinux /build/%s/usr/lib/debug/boot/vmlinux '
+ '--kallsyms /build/%s/boot/System.map-* '
+ '-i %s --stdio '
+ '> %s' % (perf_file, self.board, self.board, self.board,
+ chroot_perf_data_file, chroot_perf_report_file))
+ self.ce.ChrootRunCommand(self.chromeos_root, command)
# Add a keyval to the dictionary for the events captured.
perf_report_files.append(
@@ -397,14 +266,13 @@ class Result(object):
value = str(misc.UnitToNumber(num_events))
self.keyvals[key] = value
- def PopulateFromRun(self, out, err, retval, test, suite, cwp_dso):
+ def PopulateFromRun(self, out, err, retval, test, suite):
self.board = self.label.board
self.out = out
self.err = err
self.retval = retval
self.test_name = test
self.suite = suite
- self.cwp_dso = cwp_dso
self.chroot_results_dir = self.GetResultsDir()
self.results_dir = misc.GetOutsideChrootPath(self.chromeos_root,
self.chroot_results_dir)
@@ -412,30 +280,28 @@ class Result(object):
self.perf_data_files = self.GetPerfDataFiles()
# Include all perf.report data in table.
self.perf_report_files = self.GeneratePerfReportFiles()
- self.turbostat_log_file = self.GetTurbostatFile()
- self.cpustats_log_file = self.GetCpustatsFile()
- self.top_log_file = self.GetTopFile()
# TODO(asharif): Do something similar with perf stat.
# Grab keyvals from the directory.
self.ProcessResults()
- def ProcessChartResults(self):
+ def ProcessJsonResults(self):
# Open and parse the json results file generated by telemetry/test_that.
if not self.results_file:
raise IOError('No results file found.')
filename = self.results_file[0]
if not filename.endswith('.json'):
raise IOError('Attempt to call json on non-json file: %s' % filename)
+
if not os.path.exists(filename):
- raise IOError('%s does not exist' % filename)
+ return {}
keyvals = {}
with open(filename, 'r') as f:
raw_dict = json.load(f)
if 'charts' in raw_dict:
raw_dict = raw_dict['charts']
- for k, field_dict in raw_dict.items():
+ for k, field_dict in raw_dict.iteritems():
for item in field_dict:
keyname = k + '__' + item
value_dict = field_dict[item]
@@ -451,388 +317,28 @@ class Result(object):
result = sum(values) / float(len(values))
else:
result = values
- else:
- continue
units = value_dict['units']
new_value = [result, units]
keyvals[keyname] = new_value
return keyvals
- def ProcessTurbostatResults(self):
- """Given turbostat_log_file non-null parse cpu stats from file.
-
- Returns:
- Dictionary of 'cpufreq', 'cputemp' where each
- includes dictionary 'all': [list_of_values]
-
- Example of the output of turbostat_log.
- ----------------------
- CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
- - 329 12.13 2723 2393 10975 77
- 0 336 12.41 2715 2393 6328 77
- 2 323 11.86 2731 2393 4647 69
- CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
- - 1940 67.46 2884 2393 39920 83
- 0 1827 63.70 2877 2393 21184 83
- """
- cpustats = {}
- read_data = ''
- with open(self.turbostat_log_file) as f:
- read_data = f.readlines()
-
- if not read_data:
- self._logger.LogOutput('WARNING: Turbostat output file is empty.')
- return {}
-
- # First line always contains the header.
- stats = read_data[0].split()
-
- # Mandatory parameters.
- if 'CPU' not in stats:
- self._logger.LogOutput(
- 'WARNING: Missing data for CPU# in Turbostat output.')
- return {}
- if 'Bzy_MHz' not in stats:
- self._logger.LogOutput(
- 'WARNING: Missing data for Bzy_MHz in Turbostat output.')
- return {}
- cpu_index = stats.index('CPU')
- cpufreq_index = stats.index('Bzy_MHz')
- cpufreq = cpustats.setdefault('cpufreq', {'all': []})
-
- # Optional parameters.
- cputemp_index = -1
- if 'CoreTmp' in stats:
- cputemp_index = stats.index('CoreTmp')
- cputemp = cpustats.setdefault('cputemp', {'all': []})
-
- # Parse data starting from the second line ignoring repeating headers.
- for st in read_data[1:]:
- # Data represented by int or float separated by spaces.
- numbers = st.split()
- if not all(word.replace('.', '', 1).isdigit() for word in numbers[1:]):
- # Skip the line if data mismatch.
- continue
- if numbers[cpu_index] != '-':
- # Ignore Core-specific statistics which starts with Core number.
- # Combined statistics for all core has "-" CPU identifier.
- continue
-
- cpufreq['all'].append(int(numbers[cpufreq_index]))
- if cputemp_index != -1:
- cputemp['all'].append(int(numbers[cputemp_index]))
- return cpustats
-
- def ProcessTopResults(self):
- """Given self.top_log_file process top log data.
-
- Returns:
- List of dictionaries with the following keyvals:
- 'cmd': command name (string),
- 'cpu_avg': average cpu usage (float),
- 'count': number of occurrences (int),
- 'top5': up to 5 highest cpu usages (descending list of floats)
-
- Example of the top log:
- PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
- 4102 chronos 12 -8 3454472 238300 118188 R 41.8 6.1 0:08.37 chrome
- 375 root 0 -20 0 0 0 S 5.9 0.0 0:00.17 kworker
- 617 syslog 20 0 25332 8372 7888 S 5.9 0.2 0:00.77 systemd
-
- PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
- 5745 chronos 20 0 5438580 139328 67988 R 122.8 3.6 0:04.26 chrome
- 912 root -51 0 0 0 0 S 2.0 0.0 0:01.04 irq/cro
- 121 root 20 0 0 0 0 S 1.0 0.0 0:00.45 spi5
- """
- all_data = ''
- with open(self.top_log_file) as f:
- all_data = f.read()
-
- if not all_data:
- self._logger.LogOutput('WARNING: Top log file is empty.')
- return []
-
- top_line_regex = re.compile(
- r"""
- ^\s*(?P<pid>\d+)\s+ # Group 1: PID
- \S+\s+\S+\s+-?\d+\s+ # Ignore: user, prio, nice
- \d+\s+\d+\s+\d+\s+ # Ignore: virt/res/shared mem
- \S+\s+ # Ignore: state
- (?P<cpu_use>\d+\.\d+)\s+ # Group 2: CPU usage
- \d+\.\d+\s+\d+:\d+\.\d+\s+ # Ignore: mem usage, time
- (?P<cmd>\S+)$ # Group 3: command
- """, re.VERBOSE)
- # Page represents top log data per one measurement within time interval
- # 'top_interval'.
- # Pages separated by empty line.
- pages = all_data.split('\n\n')
- # Snapshots are structured representation of the pages.
- snapshots = []
- for page in pages:
- if not page:
- continue
-
- # Snapshot list will contain all processes (command duplicates are
- # allowed).
- snapshot = []
- for line in page.splitlines():
- match = top_line_regex.match(line)
- if match:
- # Top line is valid, collect data.
- process = {
- # NOTE: One command may be represented by multiple processes.
- 'cmd': match.group('cmd'),
- 'pid': int(match.group('pid')),
- 'cpu_use': float(match.group('cpu_use')),
- }
-
- # Filter out processes with 0 CPU usage and top command.
- if process['cpu_use'] > 0 and process['cmd'] != 'top':
- snapshot.append(process)
-
- # If page contained meaningful data add snapshot to the list.
- if snapshot:
- snapshots.append(snapshot)
-
- # Define threshold of CPU usage when Chrome is busy, i.e. benchmark is
- # running.
- # Ideally it should be 100% but it will be hardly reachable with 1 core.
- # Statistics on DUT with 2-6 cores shows that chrome load of 100%, 95% and
- # 90% equally occurs in 72-74% of all top log snapshots.
- # Further decreasing of load threshold leads to a shifting percent of
- # "high load" snapshots which might include snapshots when benchmark is
- # not running.
- # On 1-core DUT 90% chrome cpu load occurs in 55%, 95% in 33% and 100% in 2%
- # of snapshots accordingly.
- CHROME_HIGH_CPU_LOAD = 90
- # Number of snapshots where chrome is heavily used.
- high_load_snapshots = 0
- # Total CPU use per process in ALL active snapshots.
- cmd_total_cpu_use = collections.defaultdict(float)
- # Top CPU usages per command.
- cmd_top5_cpu_use = collections.defaultdict(list)
- # List of Top Commands to be returned.
- topcmds = []
-
- for snapshot_processes in snapshots:
- # CPU usage per command in one snapshot.
- cmd_cpu_use_per_snapshot = collections.defaultdict(float)
- for process in snapshot_processes:
- cmd = process['cmd']
- cpu_use = process['cpu_use']
-
- # Collect CPU usage per command.
- cmd_cpu_use_per_snapshot[cmd] += cpu_use
-
- if cmd_cpu_use_per_snapshot.setdefault('chrome',
- 0.0) > CHROME_HIGH_CPU_LOAD:
- # Combined CPU usage of "chrome" command exceeds "High load" threshold
- # which means DUT is busy running a benchmark.
- high_load_snapshots += 1
- for cmd, cpu_use in cmd_cpu_use_per_snapshot.items():
- # Update total CPU usage.
- cmd_total_cpu_use[cmd] += cpu_use
-
- # Add cpu_use into command top cpu usages, sorted in descending order.
- heapq.heappush(cmd_top5_cpu_use[cmd], round(cpu_use, 1))
-
- for consumer, usage in sorted(
- cmd_total_cpu_use.items(), key=lambda x: x[1], reverse=True):
- # Iterate through commands by descending order of total CPU usage.
- topcmd = {
- 'cmd': consumer,
- 'cpu_avg': usage / high_load_snapshots,
- 'count': len(cmd_top5_cpu_use[consumer]),
- 'top5': heapq.nlargest(5, cmd_top5_cpu_use[consumer]),
- }
- topcmds.append(topcmd)
-
- return topcmds
-
- def ProcessCpustatsResults(self):
- """Given cpustats_log_file non-null parse cpu data from file.
-
- Returns:
- Dictionary of 'cpufreq', 'cputemp' where each
- includes dictionary of parameter: [list_of_values]
-
- Example of cpustats.log output.
- ----------------------
- /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1512000
- /sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 2016000
- little-cpu 41234
- big-cpu 51234
-
- If cores share the same policy their frequencies may always match
- on some devices.
- To make report concise we should eliminate redundancy in the output.
- Function removes cpuN data if it duplicates data from other cores.
- """
-
- cpustats = {}
- read_data = ''
- with open(self.cpustats_log_file) as f:
- read_data = f.readlines()
-
- if not read_data:
- self._logger.LogOutput('WARNING: Cpustats output file is empty.')
- return {}
-
- cpufreq_regex = re.compile(r'^[/\S]+/(cpu\d+)/[/\S]+\s+(\d+)$')
- cputemp_regex = re.compile(r'^([^/\s]+)\s+(\d+)$')
-
- for st in read_data:
- match = cpufreq_regex.match(st)
- if match:
- cpu = match.group(1)
- # CPU frequency comes in kHz.
- freq_khz = int(match.group(2))
- freq_mhz = freq_khz / 1000
- # cpufreq represents a dictionary with CPU frequency-related
- # data from cpustats.log.
- cpufreq = cpustats.setdefault('cpufreq', {})
- cpu_n_freq = cpufreq.setdefault(cpu, [])
- cpu_n_freq.append(freq_mhz)
- else:
- match = cputemp_regex.match(st)
- if match:
- therm_type = match.group(1)
- # The value is int, uCelsius unit.
- temp_uc = float(match.group(2))
- # Round to XX.X float.
- temp_c = round(temp_uc / 1000, 1)
- # cputemp represents a dictionary with temperature measurements
- # from cpustats.log.
- cputemp = cpustats.setdefault('cputemp', {})
- therm_type = cputemp.setdefault(therm_type, [])
- therm_type.append(temp_c)
-
- # Remove duplicate statistics from cpustats.
- pruned_stats = {}
- for cpukey, cpuparam in cpustats.items():
- # Copy 'cpufreq' and 'cputemp'.
- pruned_params = pruned_stats.setdefault(cpukey, {})
- for paramkey, paramvalue in sorted(cpuparam.items()):
- # paramvalue is list of all measured data.
- if paramvalue not in pruned_params.values():
- pruned_params[paramkey] = paramvalue
-
- return pruned_stats
-
- def ProcessHistogramsResults(self):
- # Open and parse the json results file generated by telemetry/test_that.
- if not self.results_file:
- raise IOError('No results file found.')
- filename = self.results_file[0]
- if not filename.endswith('.json'):
- raise IOError('Attempt to call json on non-json file: %s' % filename)
- if not os.path.exists(filename):
- raise IOError('%s does not exist' % filename)
-
- keyvals = {}
- with open(filename) as f:
- histograms = json.load(f)
- value_map = {}
- # Gets generic set values.
- for obj in histograms:
- if 'type' in obj and obj['type'] == 'GenericSet':
- value_map[obj['guid']] = obj['values']
-
- for obj in histograms:
- if 'name' not in obj or 'sampleValues' not in obj:
- continue
- metric_name = obj['name']
- vals = obj['sampleValues']
- if isinstance(vals, list):
- # Remove None elements from the list
- vals = [val for val in vals if val is not None]
- if vals:
- result = float(sum(vals)) / len(vals)
- else:
- result = 0
- else:
- result = vals
- unit = obj['unit']
- diagnostics = obj['diagnostics']
- # for summaries of benchmarks
- key = metric_name
- if key not in keyvals:
- keyvals[key] = [[result], unit]
- else:
- keyvals[key][0].append(result)
- # TODO: do we need summaries of stories?
- # for summaries of story tags
- if 'storyTags' in diagnostics:
- guid = diagnostics['storyTags']
- if guid not in value_map:
- raise RuntimeError('Unrecognized storyTags in %s ' % (obj))
- for story_tag in value_map[guid]:
- key = metric_name + '__' + story_tag
- if key not in keyvals:
- keyvals[key] = [[result], unit]
- else:
- keyvals[key][0].append(result)
- # calculate summary
- for key in keyvals:
- vals = keyvals[key][0]
- unit = keyvals[key][1]
- result = float(sum(vals)) / len(vals)
- keyvals[key] = [result, unit]
- return keyvals
-
def ProcessResults(self, use_cache=False):
# Note that this function doesn't know anything about whether there is a
# cache hit or miss. It should process results agnostic of the cache hit
# state.
- if (self.results_file and self.suite == 'telemetry_Crosperf' and
- 'histograms.json' in self.results_file[0]):
- self.keyvals = self.ProcessHistogramsResults()
- elif (self.results_file and self.suite != 'telemetry_Crosperf' and
- 'results-chart.json' in self.results_file[0]):
- self.keyvals = self.ProcessChartResults()
+ if self.results_file and self.results_file[0].find(
+ 'results-chart.json') != -1:
+ self.keyvals = self.ProcessJsonResults()
else:
if not use_cache:
print('\n ** WARNING **: Had to use deprecated output-method to '
'collect results.\n')
self.keyvals = self.GetKeyvals()
self.keyvals['retval'] = self.retval
- # If we are in CWP approximation mode, we want to collect DSO samples
- # for each perf.data file
- if self.cwp_dso and self.retval == 0:
- self.keyvals['samples'] = self.GetSamples()
- # If the samples count collected from perf file is 0, we will treat
- # it as a failed run.
- if self.keyvals['samples'][0] == 0:
- del self.keyvals['samples']
- self.keyvals['retval'] = 1
# Generate report from all perf.data files.
# Now parse all perf report files and include them in keyvals.
self.GatherPerfResults()
- cpustats = {}
- # Turbostat output has higher priority of processing.
- if self.turbostat_log_file:
- cpustats = self.ProcessTurbostatResults()
- # Process cpustats output only if turbostat has no data.
- if not cpustats and self.cpustats_log_file:
- cpustats = self.ProcessCpustatsResults()
- if self.top_log_file:
- self.top_cmds = self.ProcessTopResults()
-
- for param_key, param in cpustats.items():
- for param_type, param_values in param.items():
- val_avg = sum(param_values) / len(param_values)
- val_min = min(param_values)
- val_max = max(param_values)
- # Average data is always included.
- self.keyvals['_'.join([param_key, param_type, 'avg'])] = val_avg
- # Insert min/max results only if they deviate
- # from average.
- if val_min != val_avg:
- self.keyvals['_'.join([param_key, param_type, 'min'])] = val_min
- if val_max != val_avg:
- self.keyvals['_'.join([param_key, param_type, 'max'])] = val_max
-
def GetChromeVersionFromCache(self, cache_dir):
# Read chrome_version from keys file, if present.
chrome_version = ''
@@ -848,10 +354,9 @@ class Result(object):
break
return chrome_version
- def PopulateFromCacheDir(self, cache_dir, test, suite, cwp_dso):
+ def PopulateFromCacheDir(self, cache_dir, test, suite):
self.test_name = test
self.suite = suite
- self.cwp_dso = cwp_dso
# Read in everything from the cache directory.
with open(os.path.join(cache_dir, RESULTS_FILE), 'r') as f:
self.out = pickle.load(f)
@@ -859,8 +364,8 @@ class Result(object):
self.retval = pickle.load(f)
# Untar the tarball to a temporary directory
- self.temp_dir = tempfile.mkdtemp(
- dir=os.path.join(self.chromeos_root, 'chroot', 'tmp'))
+ self.temp_dir = tempfile.mkdtemp(dir=os.path.join(self.chromeos_root,
+ 'chroot', 'tmp'))
command = ('cd %s && tar xf %s' %
(self.temp_dir, os.path.join(cache_dir, AUTOTEST_TARBALL)))
@@ -932,8 +437,8 @@ class Result(object):
if ret:
command = 'rm -rf {0}'.format(temp_dir)
self.ce.RunCommand(command)
- raise RuntimeError(
- 'Could not move dir %s to dir %s' % (temp_dir, cache_dir))
+ raise RuntimeError('Could not move dir %s to dir %s' % (temp_dir,
+ cache_dir))
@classmethod
def CreateFromRun(cls,
@@ -945,13 +450,12 @@ class Result(object):
err,
retval,
test,
- suite='telemetry_Crosperf',
- cwp_dso=''):
+ suite='telemetry_Crosperf'):
if suite == 'telemetry':
result = TelemetryResult(logger, label, log_level, machine)
else:
result = cls(logger, label, log_level, machine)
- result.PopulateFromRun(out, err, retval, test, suite, cwp_dso)
+ result.PopulateFromRun(out, err, retval, test, suite)
return result
@classmethod
@@ -962,14 +466,13 @@ class Result(object):
machine,
cache_dir,
test,
- suite='telemetry_Crosperf',
- cwp_dso=''):
+ suite='telemetry_Crosperf'):
if suite == 'telemetry':
result = TelemetryResult(logger, label, log_level, machine)
else:
result = cls(logger, label, log_level, machine)
try:
- result.PopulateFromCacheDir(cache_dir, test, suite, cwp_dso)
+ result.PopulateFromCacheDir(cache_dir, test, suite)
except RuntimeError as e:
logger.LogError('Exception while using cache: %s' % e)
@@ -984,7 +487,7 @@ class TelemetryResult(Result):
super(TelemetryResult, self).__init__(logger, label, log_level, machine,
cmd_exec)
- def PopulateFromRun(self, out, err, retval, test, suite, cwp_dso):
+ def PopulateFromRun(self, out, err, retval, test, suite):
self.out = out
self.err = err
self.retval = retval
@@ -1018,16 +521,15 @@ class TelemetryResult(Result):
fields = line.split(',')
if len(fields) != len(labels):
continue
- for i in range(1, len(labels)):
+ for i in xrange(1, len(labels)):
key = '%s %s' % (fields[0], labels[i])
value = fields[i]
self.keyvals[key] = value
self.keyvals['retval'] = self.retval
- def PopulateFromCacheDir(self, cache_dir, test, suite, cwp_dso):
+ def PopulateFromCacheDir(self, cache_dir, test, suite):
self.test_name = test
self.suite = suite
- self.cwp_dso = cwp_dso
with open(os.path.join(cache_dir, RESULTS_FILE), 'r') as f:
self.out = pickle.load(f)
self.err = pickle.load(f)
@@ -1094,12 +596,11 @@ class ResultsCache(object):
self.log_level = None
self.show_all = None
self.run_local = None
- self.cwp_dso = None
def Init(self, chromeos_image, chromeos_root, test_name, iteration, test_args,
profiler_args, machine_manager, machine, board, cache_conditions,
logger_to_use, log_level, label, share_cache, suite,
- show_all_results, run_local, cwp_dso):
+ show_all_results, run_local):
self.chromeos_image = chromeos_image
self.chromeos_root = chromeos_root
self.test_name = test_name
@@ -1119,7 +620,6 @@ class ResultsCache(object):
self.log_level = log_level
self.show_all = show_all_results
self.run_local = run_local
- self.cwp_dso = cwp_dso
def GetCacheDirForRead(self):
matching_dirs = []
@@ -1154,7 +654,7 @@ class ResultsCache(object):
else:
cache_path = [os.path.join(SCRATCH_DIR, cache_dir)]
- if self.share_cache:
+ if len(self.share_cache):
for path in [x.strip() for x in self.share_cache.split(',')]:
if os.path.exists(path):
cache_path.append(os.path.join(path, cache_dir))
@@ -1219,7 +719,7 @@ class ResultsCache(object):
self._logger.LogOutput('Trying to read from cache dir: %s' % cache_dir)
result = Result.CreateFromCacheHit(self._logger, self.log_level, self.label,
self.machine, cache_dir, self.test_name,
- self.suite, self.cwp_dso)
+ self.suite)
if not result:
return None
@@ -1250,7 +750,7 @@ class MockResultsCache(ResultsCache):
class MockResult(Result):
"""Class for mock testing, corresponding to Result class."""
- def PopulateFromRun(self, out, err, retval, test, suite, cwp_dso):
+ def PopulateFromRun(self, out, err, retval, test, suite):
self.out = out
self.err = err
self.retval = retval
diff --git a/crosperf/results_cache_unittest.py b/crosperf/results_cache_unittest.py
index 7ce04221..a2480d21 100755
--- a/crosperf/results_cache_unittest.py
+++ b/crosperf/results_cache_unittest.py
@@ -1,21 +1,17 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Module of result cache unittest."""
from __future__ import print_function
+import mock
import os
-import shutil
import tempfile
import unittest
-import mock
-
import image_checksummer
import machine_manager
import test_flag
@@ -29,7 +25,6 @@ from cros_utils import command_executer
from cros_utils import logger
from cros_utils import misc
-# pylint: disable=line-too-long
OUTPUT = """CMD (True): ./test_that.sh\
--remote=172.17.128.241 --board=lumpy LibCBench
CMD (None): cd /usr/local/google/home/yunlian/gd/src/build/images/lumpy/latest/../../../../..; cros_sdk -- ./in_chroot_cmd6X7Cxu.sh
@@ -159,246 +154,8 @@ keyvals = {
'b_string_strstr___abcdefghijklmnopqrstuvwxyz__': '0.0134553343333'
}
-TURBOSTAT_LOG_OUTPUT = \
-"""CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
-- 329 12.13 2723 2393 10975 77
-0 336 12.41 2715 2393 6328 77
-2 323 11.86 2731 2393 4647 69
-CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
-- 1940 67.46 2884 2393 39920 83
-0 1827 63.70 2877 2393 21184 83
-2 2053 71.22 2891 2393 18736 67
-CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
-- 1927 66.02 2927 2393 48946 84
-0 1880 64.47 2925 2393 24457 84
-2 1973 67.57 2928 2393 24489 69
-CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
-- 1899 64.84 2937 2393 42540 72
-0 2135 72.82 2940 2393 23615 65
-2 1663 56.85 2934 2393 18925 72
-CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
-- 1908 65.24 2932 2393 43172 75
-0 1876 64.25 2928 2393 20743 75
-2 1939 66.24 2936 2393 22429 69
-CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
-- 1553 53.12 2933 2393 35488 46
-0 1484 50.80 2929 2393 18246 46
-2 1623 55.44 2936 2393 17242 45
-CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
-- 843 29.83 2832 2393 28161 47
-0 827 29.35 2826 2393 16093 47
-2 858 30.31 2838 2393 12068 46
-"""
-TURBOSTAT_DATA = {
- 'cpufreq': {
- 'all': [2723, 2884, 2927, 2937, 2932, 2933, 2832]
- },
- 'cputemp': {
- 'all': [77, 83, 84, 72, 75, 46, 47]
- },
-}
-
-TOP_LOG = \
-"""
- PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
- 4102 chronos 12 -8 3454472 238300 118188 R 41.8 6.1 0:08.37 chrome
- 4204 chronos 12 -8 2492716 205728 179016 S 11.8 5.3 0:03.89 chrome
- 4890 root 20 0 3396 2064 1596 R 11.8 0.1 0:00.03 top
- 375 root 0 -20 0 0 0 S 5.9 0.0 0:00.17 kworker/u13
- 617 syslog 20 0 25332 8372 7888 S 5.9 0.2 0:00.77 sys-journal
-
- PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
- 5745 chronos 20 0 5438580 139328 67988 R 122.8 3.6 0:04.26 chrome
- 912 root -51 0 0 0 0 S 2.0 0.0 0:01.04 irq/cros-ec
- 121 root 20 0 0 0 0 S 1.0 0.0 0:00.45 spi5
- 4811 root 20 0 6808 4084 3492 S 1.0 0.1 0:00.02 sshd
- 4890 root 20 0 3364 2148 1596 R 1.0 0.1 0:00.36 top
- 5205 chronos 12 -8 3673780 240928 130864 S 1.0 6.2 0:07.30 chrome
-
-
- PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
- 5745 chronos 20 0 5434484 139432 63892 R 107.9 3.6 0:05.35 chrome
- 5713 chronos 20 0 5178652 103120 50372 S 17.8 2.6 0:01.13 chrome
- 7 root 20 0 0 0 0 S 1.0 0.0 0:00.73 rcu_preempt
- 855 root 20 0 0 0 0 S 1.0 0.0 0:00.01 kworker/4:2
-"""
-TOP_DATA = [
- {
- 'cmd': 'chrome',
- 'cpu_avg': 124.75,
- 'count': 2,
- 'top5': [125.7, 123.8],
- },
- {
- 'cmd': 'irq/cros-ec',
- 'cpu_avg': 1.0,
- 'count': 1,
- 'top5': [2.0],
- },
- {
- 'cmd': 'sshd',
- 'cpu_avg': 0.5,
- 'count': 1,
- 'top5': [1.0],
- },
- {
- 'cmd': 'spi5',
- 'cpu_avg': 0.5,
- 'count': 1,
- 'top5': [1.0],
- },
- {
- 'cmd': 'rcu_preempt',
- 'cpu_avg': 0.5,
- 'count': 1,
- 'top5': [1.0],
- },
- {
- 'cmd': 'kworker/4:2',
- 'cpu_avg': 0.5,
- 'count': 1,
- 'top5': [1.0],
- },
-]
-TOP_OUTPUT = \
-""" COMMAND AVG CPU% SEEN HIGHEST 5
- chrome 128.250000 6 [122.8, 107.9, 17.8, 5.0, 2.0]
- irq/230-cros-ec 1.000000 1 [2.0]
- sshd 0.500000 1 [1.0]
- irq/231-cros-ec 0.500000 1 [1.0]
- spi5 0.500000 1 [1.0]
- rcu_preempt 0.500000 1 [1.0]
- kworker/4:2 0.500000 1 [1.0]
-"""
-
-CPUSTATS_UNIQ_OUTPUT = \
-"""
-/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1512000
-/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1512000
-/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2016000
-soc-thermal 44444
-little-cpu 41234
-big-cpu 51234
-/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1500000
-/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1600000
-/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2012000
-soc-thermal 45456
-little-cpu 42555
-big-cpu 61724
-"""
-CPUSTATS_UNIQ_DATA = {
- 'cpufreq': {
- 'cpu0': [1512, 1500],
- 'cpu1': [1512, 1600],
- 'cpu3': [2016, 2012]
- },
- 'cputemp': {
- 'soc-thermal': [44.4, 45.5],
- 'little-cpu': [41.2, 42.6],
- 'big-cpu': [51.2, 61.7]
- }
-}
-CPUSTATS_DUPL_OUTPUT = \
-"""
-/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1512000
-/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1512000
-/sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 1512000
-/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2016000
-/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1500000
-/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1500000
-/sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 1500000
-/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2016000
-/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1614000
-/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1614000
-/sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 1614000
-/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 1982000
-"""
-CPUSTATS_DUPL_DATA = {
- 'cpufreq': {
- 'cpu0': [1512, 1500, 1614],
- 'cpu3': [2016, 2016, 1982]
- },
-}
-
TMP_DIR1 = '/tmp/tmpAbcXyz'
-HISTOGRAMSET = \
-"""
-[
- {
- "values": [
- "cache_temperature_cold",
- "typical",
- "cache_temperature:cold"
- ],
- "guid": "db6d463b-7c07-4873-b839-db0652ccb97e",
- "type": "GenericSet"
- },
- {
- "values": [
- "cache_temperature_warm",
- "typical",
- "cache_temperature:warm"
- ],
- "guid": "a270eb9d-3bb0-472a-951d-74ac3398b718",
- "type": "GenericSet"
- },
- {
- "sampleValues": [
- 1111.672
- ],
- "name": "timeToFirstContentfulPaint",
- "diagnostics": {
- "storyTags": "a270eb9d-3bb0-472a-951d-74ac3398b718"
- },
- "unit": "ms_smallerIsBetter"
- },
- {
- "sampleValues": [
- 1146.459
- ],
- "name": "timeToFirstContentfulPaint",
- "diagnostics": {
- "storyTags": "db6d463b-7c07-4873-b839-db0652ccb97e"
- },
- "unit": "ms_smallerIsBetter"
- },
- {
- "sampleValues": [
- 888.328
- ],
- "name": "timeToFirstContentfulPaint",
- "diagnostics": {
- "storyTags": "a270eb9d-3bb0-472a-951d-74ac3398b718"
- },
- "unit": "ms_smallerIsBetter"
- },
- {
- "sampleValues": [
- 853.541
- ],
- "name": "timeToFirstContentfulPaint",
- "diagnostics": {
- "storyTags": "db6d463b-7c07-4873-b839-db0652ccb97e"
- },
- "unit": "ms_smallerIsBetter"
- },
- {
- "sampleValues": [
- 400.000
- ],
- "name": "timeToFirstContentfulPaint",
- "diagnostics": {
- "storyTags": "a270eb9d-3bb0-472a-951d-74ac3398b718"
- },
- "unit": "ms_smallerIsBetter"
- }
-
-]
-"""
-
-# pylint: enable=line-too-long
-
class MockResult(Result):
"""Mock result class."""
@@ -431,22 +188,18 @@ class ResultTest(unittest.TestCase):
self.callGetNewKeyvals = False
self.callGetResultsFile = False
self.callGetPerfDataFiles = False
- self.callGetTurbostatFile = False
- self.callGetCpustatsFile = False
- self.callGetTopFile = False
self.args = None
self.callGatherPerfResults = False
self.mock_logger = mock.Mock(spec=logger.Logger)
self.mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter)
- self.mock_label = MockLabel('mock_label', 'build', 'chromeos_image',
- 'autotest_dir', 'debug_dir', '/tmp', 'lumpy',
- 'remote', 'image_args', 'cache_dir', 'average',
- 'gcc', False, None)
+ self.mock_label = MockLabel('mock_label', 'chromeos_image', 'autotest_dir',
+ '/tmp', 'lumpy', 'remote', 'image_args',
+ 'cache_dir', 'average', 'gcc', None)
def testCreateFromRun(self):
result = MockResult.CreateFromRun(logger.GetLogger(), 'average',
self.mock_label, 'remote1', OUTPUT, error,
- 0, True)
+ 0, True, 0)
self.assertEqual(result.keyvals, keyvals)
self.assertEqual(result.chroot_results_dir,
'/tmp/test_that.PO1234567/platform_LibCBench')
@@ -470,7 +223,7 @@ class ResultTest(unittest.TestCase):
mock_copyfiles.return_value = 0
- # test 1. dest_dir exists; CopyFiles returns 0.
+ #test 1. dest_dir exists; CopyFiles returns 0.
mock_isdir.return_value = True
self.result.CopyFilesTo(dest_dir, files)
self.assertEqual(mock_runcmd.call_count, 0)
@@ -484,7 +237,7 @@ class ResultTest(unittest.TestCase):
mock_runcmd.reset_mock()
mock_copyfiles.reset_mock()
- # test 2. dest_dir does not exist; CopyFiles returns 0.
+ #test 2. dest_dir does not exist; CopyFiles returns 0.
mock_isdir.return_value = False
self.result.CopyFilesTo(dest_dir, files)
self.assertEqual(mock_runcmd.call_count, 3)
@@ -495,7 +248,7 @@ class ResultTest(unittest.TestCase):
mock_runcmd.call_args_list[2])
self.assertEqual(mock_runcmd.call_args_list[0][0], ('mkdir -p /tmp/test',))
- # test 3. CopyFiles returns 1 (fails).
+ #test 3. CopyFiles returns 1 (fails).
mock_copyfiles.return_value = 1
self.assertRaises(Exception, self.result.CopyFilesTo, dest_dir, files)
@@ -515,10 +268,10 @@ class ResultTest(unittest.TestCase):
self.result.CopyResultsTo('/tmp/results/')
self.assertEqual(mockCopyFilesTo.call_count, 2)
self.assertEqual(len(mockCopyFilesTo.call_args_list), 2)
- self.assertEqual(mockCopyFilesTo.call_args_list[0][0],
- ('/tmp/results/', perf_data_files))
- self.assertEqual(mockCopyFilesTo.call_args_list[1][0],
- ('/tmp/results/', perf_report_files))
+ self.assertEqual(mockCopyFilesTo.call_args_list[0][0], ('/tmp/results/',
+ perf_data_files))
+ self.assertEqual(mockCopyFilesTo.call_args_list[1][0], ('/tmp/results/',
+ perf_report_files))
def test_get_new_keyvals(self):
kv_dict = {}
@@ -529,52 +282,50 @@ class ResultTest(unittest.TestCase):
self.result.GetDataMeasurementsFiles = FakeGetDataMeasurementsFiles
kv_dict2, udict = self.result.GetNewKeyvals(kv_dict)
- self.assertEqual(
- kv_dict2, {
- u'Box2D__Box2D': 4775,
- u'Mandreel__Mandreel': 6620,
- u'Gameboy__Gameboy': 9901,
- u'Crypto__Crypto': 8737,
- u'telemetry_page_measurement_results__num_errored': 0,
- u'telemetry_page_measurement_results__num_failed': 0,
- u'PdfJS__PdfJS': 6455,
- u'Total__Score': 7918,
- u'EarleyBoyer__EarleyBoyer': 14340,
- u'MandreelLatency__MandreelLatency': 5188,
- u'CodeLoad__CodeLoad': 6271,
- u'DeltaBlue__DeltaBlue': 14401,
- u'Typescript__Typescript': 9815,
- u'SplayLatency__SplayLatency': 7653,
- u'zlib__zlib': 16094,
- u'Richards__Richards': 10358,
- u'RegExp__RegExp': 1765,
- u'NavierStokes__NavierStokes': 9815,
- u'Splay__Splay': 4425,
- u'RayTrace__RayTrace': 16600
- })
- self.assertEqual(
- udict, {
- u'Box2D__Box2D': u'score',
- u'Mandreel__Mandreel': u'score',
- u'Gameboy__Gameboy': u'score',
- u'Crypto__Crypto': u'score',
- u'telemetry_page_measurement_results__num_errored': u'count',
- u'telemetry_page_measurement_results__num_failed': u'count',
- u'PdfJS__PdfJS': u'score',
- u'Total__Score': u'score',
- u'EarleyBoyer__EarleyBoyer': u'score',
- u'MandreelLatency__MandreelLatency': u'score',
- u'CodeLoad__CodeLoad': u'score',
- u'DeltaBlue__DeltaBlue': u'score',
- u'Typescript__Typescript': u'score',
- u'SplayLatency__SplayLatency': u'score',
- u'zlib__zlib': u'score',
- u'Richards__Richards': u'score',
- u'RegExp__RegExp': u'score',
- u'NavierStokes__NavierStokes': u'score',
- u'Splay__Splay': u'score',
- u'RayTrace__RayTrace': u'score'
- })
+ self.assertEqual(kv_dict2, {
+ u'Box2D__Box2D': 4775,
+ u'Mandreel__Mandreel': 6620,
+ u'Gameboy__Gameboy': 9901,
+ u'Crypto__Crypto': 8737,
+ u'telemetry_page_measurement_results__num_errored': 0,
+ u'telemetry_page_measurement_results__num_failed': 0,
+ u'PdfJS__PdfJS': 6455,
+ u'Total__Score': 7918,
+ u'EarleyBoyer__EarleyBoyer': 14340,
+ u'MandreelLatency__MandreelLatency': 5188,
+ u'CodeLoad__CodeLoad': 6271,
+ u'DeltaBlue__DeltaBlue': 14401,
+ u'Typescript__Typescript': 9815,
+ u'SplayLatency__SplayLatency': 7653,
+ u'zlib__zlib': 16094,
+ u'Richards__Richards': 10358,
+ u'RegExp__RegExp': 1765,
+ u'NavierStokes__NavierStokes': 9815,
+ u'Splay__Splay': 4425,
+ u'RayTrace__RayTrace': 16600
+ })
+ self.assertEqual(udict, {
+ u'Box2D__Box2D': u'score',
+ u'Mandreel__Mandreel': u'score',
+ u'Gameboy__Gameboy': u'score',
+ u'Crypto__Crypto': u'score',
+ u'telemetry_page_measurement_results__num_errored': u'count',
+ u'telemetry_page_measurement_results__num_failed': u'count',
+ u'PdfJS__PdfJS': u'score',
+ u'Total__Score': u'score',
+ u'EarleyBoyer__EarleyBoyer': u'score',
+ u'MandreelLatency__MandreelLatency': u'score',
+ u'CodeLoad__CodeLoad': u'score',
+ u'DeltaBlue__DeltaBlue': u'score',
+ u'Typescript__Typescript': u'score',
+ u'SplayLatency__SplayLatency': u'score',
+ u'zlib__zlib': u'score',
+ u'Richards__Richards': u'score',
+ u'RegExp__RegExp': u'score',
+ u'NavierStokes__NavierStokes': u'score',
+ u'Splay__Splay': u'score',
+ u'RayTrace__RayTrace': u'score'
+ })
def test_append_telemetry_units(self):
kv_dict = {
@@ -619,27 +370,26 @@ class ResultTest(unittest.TestCase):
}
results_dict = self.result.AppendTelemetryUnits(kv_dict, units_dict)
- self.assertEqual(
- results_dict, {
- u'Box2D__Box2D': [4775, u'score'],
- u'Splay__Splay': [4425, u'score'],
- u'Gameboy__Gameboy': [9901, u'score'],
- u'Crypto__Crypto': [8737, u'score'],
- u'PdfJS__PdfJS': [6455, u'score'],
- u'Total__Score': [7918, u'score'],
- u'EarleyBoyer__EarleyBoyer': [14340, u'score'],
- u'MandreelLatency__MandreelLatency': [5188, u'score'],
- u'DeltaBlue__DeltaBlue': [14401, u'score'],
- u'SplayLatency__SplayLatency': [7653, u'score'],
- u'Mandreel__Mandreel': [6620, u'score'],
- u'Richards__Richards': [10358, u'score'],
- u'zlib__zlib': [16094, u'score'],
- u'CodeLoad__CodeLoad': [6271, u'score'],
- u'Typescript__Typescript': [9815, u'score'],
- u'RegExp__RegExp': [1765, u'score'],
- u'RayTrace__RayTrace': [16600, u'score'],
- u'NavierStokes__NavierStokes': [9815, u'score']
- })
+ self.assertEqual(results_dict, {
+ u'Box2D__Box2D': [4775, u'score'],
+ u'Splay__Splay': [4425, u'score'],
+ u'Gameboy__Gameboy': [9901, u'score'],
+ u'Crypto__Crypto': [8737, u'score'],
+ u'PdfJS__PdfJS': [6455, u'score'],
+ u'Total__Score': [7918, u'score'],
+ u'EarleyBoyer__EarleyBoyer': [14340, u'score'],
+ u'MandreelLatency__MandreelLatency': [5188, u'score'],
+ u'DeltaBlue__DeltaBlue': [14401, u'score'],
+ u'SplayLatency__SplayLatency': [7653, u'score'],
+ u'Mandreel__Mandreel': [6620, u'score'],
+ u'Richards__Richards': [10358, u'score'],
+ u'zlib__zlib': [16094, u'score'],
+ u'CodeLoad__CodeLoad': [6271, u'score'],
+ u'Typescript__Typescript': [9815, u'score'],
+ u'RegExp__RegExp': [1765, u'score'],
+ u'RayTrace__RayTrace': [16600, u'score'],
+ u'NavierStokes__NavierStokes': [9815, u'score']
+ })
@mock.patch.object(misc, 'GetInsideChrootPath')
@mock.patch.object(tempfile, 'mkdtemp')
@@ -720,19 +470,6 @@ class ResultTest(unittest.TestCase):
res = self.result.GetKeyvals()
self.assertEqual(res, {'Total': 10, 'first_time': 680})
- @mock.patch.object(misc, 'GetInsideChrootPath')
- @mock.patch.object(command_executer.CommandExecuter,
- 'ChrootRunCommandWOutput')
- def test_get_samples(self, mock_chrootruncmd, mock_getpath):
- fake_file = '/usr/chromeos/chroot/tmp/results/fake_file'
- self.result.perf_data_files = ['/tmp/results/perf.data']
- self.result.board = 'samus'
- mock_getpath.return_value = fake_file
- self.result.ce.ChrootRunCommandWOutput = mock_chrootruncmd
- mock_chrootruncmd.return_value = ['', '45.42% 237210 chrome ', '']
- samples = self.result.GetSamples()
- self.assertEqual(samples, [237210, u'samples'])
-
def test_get_results_dir(self):
self.result.out = ''
@@ -747,7 +484,7 @@ class ResultTest(unittest.TestCase):
self.result.results_dir = None
res = self.result.FindFilesInResultsDir('-name perf.data')
- self.assertEqual(res, '')
+ self.assertIsNone(res)
self.result.ce.RunCommand = mock_runcmd
self.result.results_dir = '/tmp/test_results'
@@ -797,268 +534,6 @@ class ResultTest(unittest.TestCase):
self.assertEqual(res, ['line1', 'line1'])
self.assertEqual(self.args, '-name perf_measurements')
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_turbostat_file_finds_single_log(self, mock_runcmd):
- """Expected behavior when a single log file found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0, 'some/long/path/turbostat.log', '')
- found_single_log = self.result.GetTurbostatFile()
- self.assertEqual(found_single_log, 'some/long/path/turbostat.log')
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_turbostat_file_finds_multiple_logs(self, mock_runcmd):
- """Error case when multiple files found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0,
- 'some/long/path/turbostat.log\nturbostat.log',
- '')
- found_first_logs = self.result.GetTurbostatFile()
- self.assertEqual(found_first_logs, 'some/long/path/turbostat.log')
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_turbostat_file_finds_no_logs(self, mock_runcmd):
- """Error case when no log file found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0, '', '')
- found_no_logs = self.result.GetTurbostatFile()
- self.assertEqual(found_no_logs, '')
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_turbostat_file_with_failing_find(self, mock_runcmd):
- """Error case when file search returns an error."""
- self.result.results_dir = '/tmp/test_results'
- mock_runcmd.return_value = (-1, '', 'error')
- with self.assertRaises(RuntimeError):
- self.result.GetTurbostatFile()
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_top_file_finds_single_log(self, mock_runcmd):
- """Expected behavior when a single top log file found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0, 'some/long/path/top.log', '')
- found_single_log = self.result.GetTopFile()
- self.assertEqual(found_single_log, 'some/long/path/top.log')
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_top_file_finds_multiple_logs(self, mock_runcmd):
- """The case when multiple top files found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0, 'some/long/path/top.log\ntop.log', '')
- found_first_logs = self.result.GetTopFile()
- self.assertEqual(found_first_logs, 'some/long/path/top.log')
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_top_file_finds_no_logs(self, mock_runcmd):
- """Error case when no log file found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0, '', '')
- found_no_logs = self.result.GetTopFile()
- self.assertEqual(found_no_logs, '')
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_cpustats_file_finds_single_log(self, mock_runcmd):
- """Expected behavior when a single log file found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0, 'some/long/path/cpustats.log', '')
- found_single_log = self.result.GetCpustatsFile()
- self.assertEqual(found_single_log, 'some/long/path/cpustats.log')
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_cpustats_file_finds_multiple_logs(self, mock_runcmd):
- """The case when multiple files found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0, 'some/long/path/cpustats.log\ncpustats.log',
- '')
- found_first_logs = self.result.GetCpustatsFile()
- self.assertEqual(found_first_logs, 'some/long/path/cpustats.log')
-
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_get_cpustats_file_finds_no_logs(self, mock_runcmd):
- """Error case when no log file found."""
- self.result.results_dir = '/tmp/test_results'
- self.result.ce.RunCommandWOutput = mock_runcmd
- mock_runcmd.return_value = (0, '', '')
- found_no_logs = self.result.GetCpustatsFile()
- self.assertEqual(found_no_logs, '')
-
- def test_process_turbostat_results_with_valid_data(self):
- """Normal case when log exists and contains valid data."""
- self.result.turbostat_log_file = '/tmp/somelogfile.log'
- with mock.patch('__builtin__.open',
- mock.mock_open(read_data=TURBOSTAT_LOG_OUTPUT)) as mo:
- cpustats = self.result.ProcessTurbostatResults()
- # Check that the log got opened and data were read/parsed.
- calls = [mock.call('/tmp/somelogfile.log')]
- mo.assert_has_calls(calls)
- self.assertEqual(cpustats, TURBOSTAT_DATA)
-
- def test_process_turbostat_results_from_empty_file(self):
- """Error case when log exists but file is empty."""
- self.result.turbostat_log_file = '/tmp/emptylogfile.log'
- with mock.patch('__builtin__.open', mock.mock_open(read_data='')) as mo:
- cpustats = self.result.ProcessTurbostatResults()
- # Check that the log got opened and parsed successfully and empty data
- # returned.
- calls = [mock.call('/tmp/emptylogfile.log')]
- mo.assert_has_calls(calls)
- self.assertEqual(cpustats, {})
-
- def test_process_turbostat_results_when_file_doesnt_exist(self):
- """Error case when file does not exist."""
- nonexistinglog = '/tmp/1'
- while os.path.exists(nonexistinglog):
- # Extend file path if it happens to exist.
- nonexistinglog = os.path.join(nonexistinglog, '1')
- self.result.turbostat_log_file = nonexistinglog
- # Allow the tested function to call a 'real' open and hopefully crash.
- with self.assertRaises(IOError):
- self.result.ProcessTurbostatResults()
-
- def test_process_cpustats_results_with_uniq_data(self):
- """Process cpustats log which has freq unique to each core.
-
- Testing normal case when frequency data vary between
- different cores.
- Expecting that data for all cores will be present in
- returned cpustats.
- """
- self.result.cpustats_log_file = '/tmp/somelogfile.log'
- with mock.patch('__builtin__.open',
- mock.mock_open(read_data=CPUSTATS_UNIQ_OUTPUT)) as mo:
- cpustats = self.result.ProcessCpustatsResults()
- # Check that the log got opened and data were read/parsed.
- calls = [mock.call('/tmp/somelogfile.log')]
- mo.assert_has_calls(calls)
- self.assertEqual(cpustats, CPUSTATS_UNIQ_DATA)
-
- def test_process_cpustats_results_with_dupl_data(self):
- """Process cpustats log where cores have duplicate freq.
-
- Testing normal case when frequency data on some cores
- are duplicated.
- Expecting that duplicated data is discarded in
- returned cpustats.
- """
- self.result.cpustats_log_file = '/tmp/somelogfile.log'
- with mock.patch('__builtin__.open',
- mock.mock_open(read_data=CPUSTATS_DUPL_OUTPUT)) as mo:
- cpustats = self.result.ProcessCpustatsResults()
- # Check that the log got opened and data were read/parsed.
- calls = [mock.call('/tmp/somelogfile.log')]
- mo.assert_has_calls(calls)
- self.assertEqual(cpustats, CPUSTATS_DUPL_DATA)
-
- def test_process_cpustats_results_from_empty_file(self):
- """Error case when log exists but file is empty."""
- self.result.cpustats_log_file = '/tmp/emptylogfile.log'
- with mock.patch('__builtin__.open', mock.mock_open(read_data='')) as mo:
- cpustats = self.result.ProcessCpustatsResults()
- # Check that the log got opened and parsed successfully and empty data
- # returned.
- calls = [mock.call('/tmp/emptylogfile.log')]
- mo.assert_has_calls(calls)
- self.assertEqual(cpustats, {})
-
- def test_process_top_results_with_valid_data(self):
- """Process top log with valid data."""
-
- self.result.top_log_file = '/tmp/fakelogfile.log'
- with mock.patch('__builtin__.open',
- mock.mock_open(read_data=TOP_LOG)) as mo:
- topproc = self.result.ProcessTopResults()
- # Check that the log got opened and data were read/parsed.
- calls = [mock.call('/tmp/fakelogfile.log')]
- mo.assert_has_calls(calls)
- self.assertEqual(topproc, TOP_DATA)
-
- def test_process_top_results_from_empty_file(self):
- """Error case when log exists but file is empty."""
- self.result.top_log_file = '/tmp/emptylogfile.log'
- with mock.patch('__builtin__.open', mock.mock_open(read_data='')) as mo:
- topcalls = self.result.ProcessTopResults()
- # Check that the log got opened and parsed successfully and empty data
- # returned.
- calls = [mock.call('/tmp/emptylogfile.log')]
- mo.assert_has_calls(calls)
- self.assertEqual(topcalls, [])
-
- def test_format_string_top5_cmds(self):
- """Test formatted string with top5 commands."""
- self.result.top_cmds = [
- {
- 'cmd': 'chrome',
- 'cpu_avg': 119.753453465,
- 'count': 44444,
- 'top5': [222.8, 217.9, 217.8, 191.0, 189.9],
- },
- {
- 'cmd': 'irq/230-cros-ec',
- 'cpu_avg': 10.000000000000001,
- 'count': 1000,
- 'top5': [11.5, 11.4, 11.3, 11.2, 11.1],
- },
- {
- 'cmd': 'powerd',
- 'cpu_avg': 2.0,
- 'count': 2,
- 'top5': [3.0, 1.0]
- },
- {
- 'cmd': 'cmd1',
- 'cpu_avg': 1.0,
- 'count': 1,
- 'top5': [1.0],
- },
- {
- 'cmd': 'cmd2',
- 'cpu_avg': 1.0,
- 'count': 1,
- 'top5': [1.0],
- },
- {
- 'cmd': 'not_for_print',
- 'cpu_avg': 1.0,
- 'count': 1,
- 'top5': [1.0],
- },
- ]
- form_str = self.result.FormatStringTop5()
- self.assertEqual(
- form_str, '\n'.join([
- 'Top 5 commands with highest CPU usage:',
- ' COMMAND AVG CPU% COUNT HIGHEST 5',
- '-' * 50,
- ' chrome 119.75 44444 '
- '[222.8, 217.9, 217.8, 191.0, 189.9]',
- ' irq/230-cros-ec 10.00 1000 '
- '[11.5, 11.4, 11.3, 11.2, 11.1]',
- ' powerd 2.00 2 [3.0, 1.0]',
- ' cmd1 1.00 1 [1.0]',
- ' cmd2 1.00 1 [1.0]',
- '-' * 50,
- ]))
-
- def test_format_string_top5_calls_no_data(self):
- """Test formatted string of top5 with no data."""
- self.result.top_cmds = []
- form_str = self.result.FormatStringTop5()
- self.assertEqual(
- form_str, '\n'.join([
- 'Top 5 commands with highest CPU usage:',
- ' COMMAND AVG CPU% COUNT HIGHEST 5',
- '-' * 50,
- '[NO DATA FROM THE TOP LOG]',
- '-' * 50,
- ]))
-
@mock.patch.object(misc, 'GetInsideChrootPath')
@mock.patch.object(command_executer.CommandExecuter, 'ChrootRunCommand')
def test_generate_perf_report_files(self, mock_chrootruncmd, mock_getpath):
@@ -1067,33 +542,14 @@ class ResultTest(unittest.TestCase):
self.result.board = 'lumpy'
mock_getpath.return_value = fake_file
self.result.ce.ChrootRunCommand = mock_chrootruncmd
- mock_chrootruncmd.return_value = 0
- # Debug path not found
- self.result.label.debug_path = ''
tmp = self.result.GeneratePerfReportFiles()
self.assertEqual(tmp, ['/tmp/chroot%s' % fake_file])
self.assertEqual(mock_chrootruncmd.call_args_list[0][0],
- ('/tmp', ('/usr/sbin/perf report -n '
- '-i %s --stdio > %s') % (fake_file, fake_file)))
-
- @mock.patch.object(misc, 'GetInsideChrootPath')
- @mock.patch.object(command_executer.CommandExecuter, 'ChrootRunCommand')
- def test_generate_perf_report_files_debug(self, mock_chrootruncmd,
- mock_getpath):
- fake_file = '/usr/chromeos/chroot/tmp/results/fake_file'
- self.result.perf_data_files = ['/tmp/results/perf.data']
- self.result.board = 'lumpy'
- mock_getpath.return_value = fake_file
- self.result.ce.ChrootRunCommand = mock_chrootruncmd
- mock_chrootruncmd.return_value = 0
- # Debug path found
- self.result.label.debug_path = '/tmp/debug'
- tmp = self.result.GeneratePerfReportFiles()
- self.assertEqual(tmp, ['/tmp/chroot%s' % fake_file])
- self.assertEqual(mock_chrootruncmd.call_args_list[0][0],
- ('/tmp', ('/usr/sbin/perf report -n --symfs /tmp/debug '
- '--vmlinux /tmp/debug/boot/vmlinux '
- '-i %s --stdio > %s') % (fake_file, fake_file)))
+ ('/tmp',
+ ('/usr/sbin/perf report -n --symfs /build/lumpy '
+ '--vmlinux /build/lumpy/usr/lib/debug/boot/vmlinux '
+ '--kallsyms /build/lumpy/boot/System.map-* -i '
+ '%s --stdio > %s') % (fake_file, fake_file)))
@mock.patch.object(misc, 'GetOutsideChrootPath')
def test_populate_from_run(self, mock_getpath):
@@ -1114,18 +570,6 @@ class ResultTest(unittest.TestCase):
self.callGetPerfReportFiles = True
return []
- def FakeGetTurbostatFile():
- self.callGetTurbostatFile = True
- return []
-
- def FakeGetCpustatsFile():
- self.callGetCpustatsFile = True
- return []
-
- def FakeGetTopFile():
- self.callGetTopFile = True
- return []
-
def FakeProcessResults(show_results=False):
if show_results:
pass
@@ -1140,49 +584,35 @@ class ResultTest(unittest.TestCase):
self.callGetResultsFile = False
self.callGetPerfDataFiles = False
self.callGetPerfReportFiles = False
- self.callGetTurbostatFile = False
- self.callGetCpustatsFile = False
- self.callGetTopFile = False
self.callProcessResults = False
self.result.GetResultsDir = FakeGetResultsDir
self.result.GetResultsFile = FakeGetResultsFile
self.result.GetPerfDataFiles = FakeGetPerfDataFiles
self.result.GeneratePerfReportFiles = FakeGetPerfReportFiles
- self.result.GetTurbostatFile = FakeGetTurbostatFile
- self.result.GetCpustatsFile = FakeGetCpustatsFile
- self.result.GetTopFile = FakeGetTopFile
self.result.ProcessResults = FakeProcessResults
- self.result.PopulateFromRun(OUTPUT, '', 0, 'test', 'telemetry_Crosperf',
- 'chrome')
+ self.result.PopulateFromRun(OUTPUT, '', 0, 'test', 'telemetry_Crosperf')
self.assertTrue(self.callGetResultsDir)
self.assertTrue(self.callGetResultsFile)
self.assertTrue(self.callGetPerfDataFiles)
self.assertTrue(self.callGetPerfReportFiles)
- self.assertTrue(self.callGetTurbostatFile)
- self.assertTrue(self.callGetCpustatsFile)
- self.assertTrue(self.callGetTopFile)
self.assertTrue(self.callProcessResults)
- def FakeGetKeyvals(self, show_all=False):
- if show_all:
- return {'first_time': 680, 'Total': 10}
- else:
- return {'Total': 10}
-
def test_process_results(self):
+ def FakeGetKeyvals(show_all=False):
+ if show_all:
+ return {'first_time': 680, 'Total': 10}
+ else:
+ return {'Total': 10}
+
def FakeGatherPerfResults():
self.callGatherPerfResults = True
- def FakeGetSamples():
- return (1, 'samples')
-
- # Test 1
self.callGatherPerfResults = False
- self.result.GetKeyvals = self.FakeGetKeyvals
+ self.result.GetKeyvals = FakeGetKeyvals
self.result.GatherPerfResults = FakeGatherPerfResults
self.result.retval = 0
@@ -1191,161 +621,11 @@ class ResultTest(unittest.TestCase):
self.assertEqual(len(self.result.keyvals), 2)
self.assertEqual(self.result.keyvals, {'Total': 10, 'retval': 0})
- # Test 2
self.result.retval = 1
self.result.ProcessResults()
self.assertEqual(len(self.result.keyvals), 2)
self.assertEqual(self.result.keyvals, {'Total': 10, 'retval': 1})
- # Test 3
- self.result.cwp_dso = 'chrome'
- self.result.retval = 0
- self.result.GetSamples = FakeGetSamples
- self.result.ProcessResults()
- self.assertEqual(len(self.result.keyvals), 3)
- self.assertEqual(self.result.keyvals, {
- 'Total': 10,
- 'samples': (1, 'samples'),
- 'retval': 0
- })
-
- # Test 4. Parse output of benchmarks with multiple sotries in histogram
- # format
- self.result.suite = 'telemetry_Crosperf'
- self.result.results_file = [tempfile.mkdtemp() + '/histograms.json']
- with open(self.result.results_file[0], 'w') as f:
- f.write(HISTOGRAMSET)
- self.result.ProcessResults()
- shutil.rmtree(os.path.dirname(self.result.results_file[0]))
- # Verify the summary for the story is correct
- self.assertEqual(self.result.keyvals['timeToFirstContentfulPaint__typical'],
- [880.000, u'ms_smallerIsBetter'])
- # Veirfy the summary for a certain stroy tag is correct
- self.assertEqual(
- self.result
- .keyvals['timeToFirstContentfulPaint__cache_temperature:cold'],
- [1000.000, u'ms_smallerIsBetter'])
- self.assertEqual(
- self.result
- .keyvals['timeToFirstContentfulPaint__cache_temperature:warm'],
- [800.000, u'ms_smallerIsBetter'])
-
- @mock.patch.object(Result, 'ProcessCpustatsResults')
- @mock.patch.object(Result, 'ProcessTurbostatResults')
- def test_process_results_with_turbostat_log(self, mock_proc_turbo,
- mock_proc_cpustats):
- self.result.GetKeyvals = self.FakeGetKeyvals
-
- self.result.retval = 0
- self.result.turbostat_log_file = '/tmp/turbostat.log'
- mock_proc_turbo.return_value = {
- 'cpufreq': {
- 'all': [1, 2, 3]
- },
- 'cputemp': {
- 'all': [5.0, 6.0, 7.0]
- }
- }
- self.result.ProcessResults()
- mock_proc_turbo.assert_has_calls([mock.call()])
- mock_proc_cpustats.assert_not_called()
- self.assertEqual(len(self.result.keyvals), 8)
- self.assertEqual(
- self.result.keyvals, {
- 'Total': 10,
- 'cpufreq_all_avg': 2,
- 'cpufreq_all_max': 3,
- 'cpufreq_all_min': 1,
- 'cputemp_all_avg': 6.0,
- 'cputemp_all_min': 5.0,
- 'cputemp_all_max': 7.0,
- 'retval': 0
- })
-
- @mock.patch.object(Result, 'ProcessCpustatsResults')
- @mock.patch.object(Result, 'ProcessTurbostatResults')
- def test_process_results_with_cpustats_log(self, mock_proc_turbo,
- mock_proc_cpustats):
- self.result.GetKeyvals = self.FakeGetKeyvals
-
- self.result.retval = 0
- self.result.cpustats_log_file = '/tmp/cpustats.log'
- mock_proc_cpustats.return_value = {
- 'cpufreq': {
- 'cpu0': [100, 100, 100],
- 'cpu1': [4, 5, 6]
- },
- 'cputemp': {
- 'little': [20.2, 20.2, 20.2],
- 'big': [55.2, 66.1, 77.3]
- }
- }
- self.result.ProcessResults()
- mock_proc_turbo.assert_not_called()
- mock_proc_cpustats.assert_has_calls([mock.call()])
- self.assertEqual(len(self.result.keyvals), 10)
- self.assertEqual(
- self.result.keyvals, {
- 'Total': 10,
- 'cpufreq_cpu0_avg': 100,
- 'cpufreq_cpu1_avg': 5,
- 'cpufreq_cpu1_max': 6,
- 'cpufreq_cpu1_min': 4,
- 'cputemp_big_avg': 66.2,
- 'cputemp_big_max': 77.3,
- 'cputemp_big_min': 55.2,
- 'cputemp_little_avg': 20.2,
- 'retval': 0
- })
-
- @mock.patch.object(Result, 'ProcessCpustatsResults')
- @mock.patch.object(Result, 'ProcessTurbostatResults')
- def test_process_results_with_turbostat_and_cpustats_logs(
- self, mock_proc_turbo, mock_proc_cpustats):
- self.result.GetKeyvals = self.FakeGetKeyvals
-
- self.result.retval = 0
- self.result.turbostat_log_file = '/tmp/turbostat.log'
- self.result.cpustats_log_file = '/tmp/cpustats.log'
- mock_proc_turbo.return_value = {
- 'cpufreq': {
- 'all': [1, 2, 3]
- },
- 'cputemp': {
- 'all': [5.0, 6.0, 7.0]
- }
- }
- self.result.ProcessResults()
- mock_proc_turbo.assert_has_calls([mock.call()])
- mock_proc_cpustats.assert_not_called()
- self.assertEqual(len(self.result.keyvals), 8)
- self.assertEqual(
- self.result.keyvals, {
- 'Total': 10,
- 'cpufreq_all_avg': 2,
- 'cpufreq_all_max': 3,
- 'cpufreq_all_min': 1,
- 'cputemp_all_avg': 6.0,
- 'cputemp_all_min': 5.0,
- 'cputemp_all_max': 7.0,
- 'retval': 0
- })
-
- @mock.patch.object(Result, 'ProcessCpustatsResults')
- @mock.patch.object(Result, 'ProcessTurbostatResults')
- def test_process_results_without_cpu_data(self, mock_proc_turbo,
- mock_proc_cpustats):
- self.result.GetKeyvals = self.FakeGetKeyvals
-
- self.result.retval = 0
- self.result.turbostat_log_file = ''
- self.result.cpustats_log_file = ''
- self.result.ProcessResults()
- mock_proc_turbo.assert_not_called()
- mock_proc_cpustats.assert_not_called()
- self.assertEqual(len(self.result.keyvals), 2)
- self.assertEqual(self.result.keyvals, {'Total': 10, 'retval': 0})
-
@mock.patch.object(misc, 'GetInsideChrootPath')
@mock.patch.object(command_executer.CommandExecuter,
'ChrootRunCommandWOutput')
@@ -1357,9 +637,6 @@ class ResultTest(unittest.TestCase):
pass
return self.tmpdir
- def FakeGetSamples():
- return [1, u'samples']
-
current_path = os.getcwd()
cache_dir = os.path.join(current_path, 'test_cache/test_input')
self.result.ce = command_executer.GetCommandExecuter(log_level='average')
@@ -1374,82 +651,40 @@ class ResultTest(unittest.TestCase):
tempfile.mkdtemp = FakeMkdtemp
self.result.PopulateFromCacheDir(cache_dir, 'sunspider',
- 'telemetry_Crosperf', '')
- self.assertEqual(
- self.result.keyvals, {
- u'Total__Total': [444.0, u'ms'],
- u'regexp-dna__regexp-dna': [16.2, u'ms'],
- u'telemetry_page_measurement_results__num_failed': [0, u'count'],
- u'telemetry_page_measurement_results__num_errored': [0, u'count'],
- u'string-fasta__string-fasta': [23.2, u'ms'],
- u'crypto-sha1__crypto-sha1': [11.6, u'ms'],
- u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte': [3.2, u'ms'],
- u'access-nsieve__access-nsieve': [7.9, u'ms'],
- u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'],
- u'string-validate-input__string-validate-input': [19.3, u'ms'],
- u'3d-raytrace__3d-raytrace': [24.7, u'ms'],
- u'3d-cube__3d-cube': [28.0, u'ms'],
- u'string-unpack-code__string-unpack-code': [46.7, u'ms'],
- u'date-format-tofte__date-format-tofte': [26.3, u'ms'],
- u'math-partial-sums__math-partial-sums': [22.0, u'ms'],
- '\telemetry_Crosperf': ['PASS', ''],
- u'crypto-aes__crypto-aes': [15.2, u'ms'],
- u'bitops-bitwise-and__bitops-bitwise-and': [8.4, u'ms'],
- u'crypto-md5__crypto-md5': [10.5, u'ms'],
- u'string-tagcloud__string-tagcloud': [52.8, u'ms'],
- u'access-nbody__access-nbody': [8.5, u'ms'],
- 'retval':
- 0,
- u'math-spectral-norm__math-spectral-norm': [6.6, u'ms'],
- u'math-cordic__math-cordic': [8.7, u'ms'],
- u'access-binary-trees__access-binary-trees': [4.5, u'ms'],
- u'controlflow-recursive__controlflow-recursive': [4.4, u'ms'],
- u'access-fannkuch__access-fannkuch': [17.8, u'ms'],
- u'string-base64__string-base64': [16.0, u'ms'],
- u'date-format-xparb__date-format-xparb': [20.9, u'ms'],
- u'3d-morph__3d-morph': [22.1, u'ms'],
- u'bitops-bits-in-byte__bitops-bits-in-byte': [9.1, u'ms']
- })
-
- self.result.GetSamples = FakeGetSamples
- self.result.PopulateFromCacheDir(cache_dir, 'sunspider',
- 'telemetry_Crosperf', 'chrome')
- self.assertEqual(
- self.result.keyvals, {
- u'Total__Total': [444.0, u'ms'],
- u'regexp-dna__regexp-dna': [16.2, u'ms'],
- u'telemetry_page_measurement_results__num_failed': [0, u'count'],
- u'telemetry_page_measurement_results__num_errored': [0, u'count'],
- u'string-fasta__string-fasta': [23.2, u'ms'],
- u'crypto-sha1__crypto-sha1': [11.6, u'ms'],
- u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte': [3.2, u'ms'],
- u'access-nsieve__access-nsieve': [7.9, u'ms'],
- u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'],
- u'string-validate-input__string-validate-input': [19.3, u'ms'],
- u'3d-raytrace__3d-raytrace': [24.7, u'ms'],
- u'3d-cube__3d-cube': [28.0, u'ms'],
- u'string-unpack-code__string-unpack-code': [46.7, u'ms'],
- u'date-format-tofte__date-format-tofte': [26.3, u'ms'],
- u'math-partial-sums__math-partial-sums': [22.0, u'ms'],
- '\telemetry_Crosperf': ['PASS', ''],
- u'crypto-aes__crypto-aes': [15.2, u'ms'],
- u'bitops-bitwise-and__bitops-bitwise-and': [8.4, u'ms'],
- u'crypto-md5__crypto-md5': [10.5, u'ms'],
- u'string-tagcloud__string-tagcloud': [52.8, u'ms'],
- u'access-nbody__access-nbody': [8.5, u'ms'],
- 'retval':
- 0,
- u'math-spectral-norm__math-spectral-norm': [6.6, u'ms'],
- u'math-cordic__math-cordic': [8.7, u'ms'],
- u'access-binary-trees__access-binary-trees': [4.5, u'ms'],
- u'controlflow-recursive__controlflow-recursive': [4.4, u'ms'],
- u'access-fannkuch__access-fannkuch': [17.8, u'ms'],
- u'string-base64__string-base64': [16.0, u'ms'],
- u'date-format-xparb__date-format-xparb': [20.9, u'ms'],
- u'3d-morph__3d-morph': [22.1, u'ms'],
- u'bitops-bits-in-byte__bitops-bits-in-byte': [9.1, u'ms'],
- u'samples': [1, u'samples']
- })
+ 'telemetry_Crosperf')
+ self.assertEqual(self.result.keyvals, {
+ u'Total__Total': [444.0, u'ms'],
+ u'regexp-dna__regexp-dna': [16.2, u'ms'],
+ u'telemetry_page_measurement_results__num_failed': [0, u'count'],
+ u'telemetry_page_measurement_results__num_errored': [0, u'count'],
+ u'string-fasta__string-fasta': [23.2, u'ms'],
+ u'crypto-sha1__crypto-sha1': [11.6, u'ms'],
+ u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte': [3.2, u'ms'],
+ u'access-nsieve__access-nsieve': [7.9, u'ms'],
+ u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'],
+ u'string-validate-input__string-validate-input': [19.3, u'ms'],
+ u'3d-raytrace__3d-raytrace': [24.7, u'ms'],
+ u'3d-cube__3d-cube': [28.0, u'ms'],
+ u'string-unpack-code__string-unpack-code': [46.7, u'ms'],
+ u'date-format-tofte__date-format-tofte': [26.3, u'ms'],
+ u'math-partial-sums__math-partial-sums': [22.0, u'ms'],
+ '\telemetry_Crosperf': ['PASS', ''],
+ u'crypto-aes__crypto-aes': [15.2, u'ms'],
+ u'bitops-bitwise-and__bitops-bitwise-and': [8.4, u'ms'],
+ u'crypto-md5__crypto-md5': [10.5, u'ms'],
+ u'string-tagcloud__string-tagcloud': [52.8, u'ms'],
+ u'access-nbody__access-nbody': [8.5, u'ms'],
+ 'retval': 0,
+ u'math-spectral-norm__math-spectral-norm': [6.6, u'ms'],
+ u'math-cordic__math-cordic': [8.7, u'ms'],
+ u'access-binary-trees__access-binary-trees': [4.5, u'ms'],
+ u'controlflow-recursive__controlflow-recursive': [4.4, u'ms'],
+ u'access-fannkuch__access-fannkuch': [17.8, u'ms'],
+ u'string-base64__string-base64': [16.0, u'ms'],
+ u'date-format-xparb__date-format-xparb': [20.9, u'ms'],
+ u'3d-morph__3d-morph': [22.1, u'ms'],
+ u'bitops-bits-in-byte__bitops-bits-in-byte': [9.1, u'ms']
+ })
# Clean up after test.
tempfile.mkdtemp = save_real_mkdtemp
@@ -1663,10 +898,9 @@ class TelemetryResultTest(unittest.TestCase):
self.result = None
self.mock_logger = mock.Mock(spec=logger.Logger)
self.mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter)
- self.mock_label = MockLabel('mock_label', 'build', 'chromeos_image',
- 'autotest_dir', 'debug_dir', '/tmp', 'lumpy',
- 'remote', 'image_args', 'cache_dir', 'average',
- 'gcc', False, None)
+ self.mock_label = MockLabel('mock_label', 'chromeos_image', 'autotest_dir',
+ '/tmp', 'lumpy', 'remote', 'image_args',
+ 'cache_dir', 'average', 'gcc', None)
self.mock_machine = machine_manager.MockCrosMachine(
'falco.cros', '/tmp/chromeos', 'average')
@@ -1680,7 +914,7 @@ class TelemetryResultTest(unittest.TestCase):
self.mock_cmd_exec)
self.result.ProcessResults = FakeProcessResults
self.result.PopulateFromRun(OUTPUT, error, 3, 'fake_test',
- 'telemetry_Crosperf', '')
+ 'telemetry_Crosperf')
self.assertTrue(self.callFakeProcessResults)
self.assertEqual(self.result.out, OUTPUT)
self.assertEqual(self.result.err, error)
@@ -1693,7 +927,7 @@ class TelemetryResultTest(unittest.TestCase):
current_path = os.getcwd()
cache_dir = os.path.join(current_path,
'test_cache/test_puretelemetry_input')
- self.result.PopulateFromCacheDir(cache_dir, '', '', '')
+ self.result.PopulateFromCacheDir(cache_dir, '', '')
self.assertEqual(self.result.out.strip(), PURE_TELEMETRY_OUTPUT.strip())
self.assertEqual(self.result.err, '')
self.assertEqual(self.result.retval, 0)
@@ -1707,10 +941,9 @@ class ResultsCacheTest(unittest.TestCase):
super(ResultsCacheTest, self).__init__(*args, **kwargs)
self.fakeCacheReturnResult = None
self.mock_logger = mock.Mock(spec=logger.Logger)
- self.mock_label = MockLabel('mock_label', 'build', 'chromeos_image',
- 'autotest_dir', 'debug_dir', '/tmp', 'lumpy',
- 'remote', 'image_args', 'cache_dir', 'average',
- 'gcc', False, None)
+ self.mock_label = MockLabel('mock_label', 'chromeos_image', 'autotest_dir',
+ '/tmp', 'lumpy', 'remote', 'image_args',
+ 'cache_dir', 'average', 'gcc', None)
def setUp(self):
self.results_cache = ResultsCache()
@@ -1739,8 +972,7 @@ class ResultsCacheTest(unittest.TestCase):
'', # benchmark_run.share_cache
'telemetry_Crosperf',
True, # benchmark_run.show_all_results
- False, # benchmark_run.run_local
- '') # benchmark_run.cwp_dso
+ False) # benchmark_run.run_local
@mock.patch.object(image_checksummer.ImageChecksummer, 'Checksum')
def test_get_cache_dir_for_write(self, mock_checksum):
@@ -1764,11 +996,11 @@ class ResultsCacheTest(unittest.TestCase):
# Verify that the returned directory is correct (since the label
# contained a cache_dir, named 'cache_dir', that's what is expected in
# the result, rather than '~/cros_scratch').
- comp_path = os.path.join(
- os.getcwd(), 'cache_dir/54524606abaae4fdf7b02f49f7ae7127_'
- 'sunspider_1_fda29412ceccb72977516c4785d08e2c_'
- 'FakeImageChecksumabc123_FakeMachineChecksum'
- 'abc987__6')
+ comp_path = os.path.join(os.getcwd(),
+ 'cache_dir/54524606abaae4fdf7b02f49f7ae7127_'
+ 'sunspider_1_fda29412ceccb72977516c4785d08e2c_'
+ 'FakeImageChecksumabc123_FakeMachineChecksum'
+ 'abc987__6')
self.assertEqual(result_path, comp_path)
def test_form_cache_dir(self):
diff --git a/crosperf/results_organizer.py b/crosperf/results_organizer.py
index 4879caeb..bda0cc17 100644
--- a/crosperf/results_organizer.py
+++ b/crosperf/results_organizer.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Parse data from benchmark_runs for tabulator."""
from __future__ import print_function
@@ -46,7 +44,7 @@ def _GetMaxDup(data):
def _Repeat(func, times):
"""Returns the result of running func() n times."""
- return [func() for _ in range(times)]
+ return [func() for _ in xrange(times)]
def _DictWithReturnValues(retval, pass_fail):
@@ -171,7 +169,6 @@ def OrganizeResults(benchmark_runs, labels, benchmarks=None, json_report=False):
label_names = [label.name for label in labels]
label_indices = {name: i for i, name in enumerate(label_names)}
summary_file = _ReadSummaryFile(sys.argv[0])
-
if benchmarks is None:
benchmarks = []
@@ -185,30 +182,15 @@ def OrganizeResults(benchmark_runs, labels, benchmarks=None, json_report=False):
show_all_results = json_report or benchmark.show_all_results
if not show_all_results:
- summary_list = summary_file.get(benchmark.name)
+ summary_list = summary_file.get(benchmark.test_name)
if summary_list:
- for key in benchmark_run.result.keyvals.keys():
- if any(
- key.startswith(added_key)
- for added_key in ['retval', 'cpufreq', 'cputemp']):
- summary_list.append(key)
+ summary_list.append('retval')
else:
# Did not find test_name in json file; show everything.
show_all_results = True
- if benchmark_run.result.cwp_dso:
- # If we are in cwp approximation mode, we only care about samples
- if 'samples' in benchmark_run.result.keyvals:
- cur_dict['samples'] = benchmark_run.result.keyvals['samples']
- cur_dict['retval'] = benchmark_run.result.keyvals['retval']
- for key, value in benchmark_run.result.keyvals.items():
- if any(
- key.startswith(cpustat_keyword)
- for cpustat_keyword in ['cpufreq', 'cputemp']):
- cur_dict[key] = value
- else:
- for test_key in benchmark_run.result.keyvals:
- if show_all_results or test_key in summary_list:
- cur_dict[test_key] = benchmark_run.result.keyvals[test_key]
+ for test_key in benchmark_run.result.keyvals:
+ if show_all_results or test_key in summary_list:
+ cur_dict[test_key] = benchmark_run.result.keyvals[test_key]
# Occasionally Telemetry tests will not fail but they will not return a
# result, either. Look for those cases, and force them to be a fail.
# (This can happen if, for example, the test has been disabled.)
diff --git a/crosperf/results_organizer_unittest.py b/crosperf/results_organizer_unittest.py
index 39a8cce6..e7657373 100755
--- a/crosperf/results_organizer_unittest.py
+++ b/crosperf/results_organizer_unittest.py
@@ -1,10 +1,8 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Testing of ResultsOrganizer
We create some labels, benchmark_runs and then create a ResultsOrganizer,
@@ -129,7 +127,7 @@ result = {
'': 'PASS',
'test': '6'
}]]
-} # yapf: disable
+}
class ResultOrganizerTest(unittest.TestCase):
@@ -140,21 +138,21 @@ class ResultOrganizerTest(unittest.TestCase):
benchmarks = [mock_instance.benchmark1, mock_instance.benchmark2]
benchmark_runs = [None] * 8
benchmark_runs[0] = BenchmarkRun('b1', benchmarks[0], labels[0], 1, '', '',
- '', 'average', '', {})
+ '', 'average', '')
benchmark_runs[1] = BenchmarkRun('b2', benchmarks[0], labels[0], 2, '', '',
- '', 'average', '', {})
+ '', 'average', '')
benchmark_runs[2] = BenchmarkRun('b3', benchmarks[0], labels[1], 1, '', '',
- '', 'average', '', {})
+ '', 'average', '')
benchmark_runs[3] = BenchmarkRun('b4', benchmarks[0], labels[1], 2, '', '',
- '', 'average', '', {})
+ '', 'average', '')
benchmark_runs[4] = BenchmarkRun('b5', benchmarks[1], labels[0], 1, '', '',
- '', 'average', '', {})
+ '', 'average', '')
benchmark_runs[5] = BenchmarkRun('b6', benchmarks[1], labels[0], 2, '', '',
- '', 'average', '', {})
+ '', 'average', '')
benchmark_runs[6] = BenchmarkRun('b7', benchmarks[1], labels[1], 1, '', '',
- '', 'average', '', {})
+ '', 'average', '')
benchmark_runs[7] = BenchmarkRun('b8', benchmarks[1], labels[1], 2, '', '',
- '', 'average', '', {})
+ '', 'average', '')
i = 0
for b in benchmark_runs:
diff --git a/crosperf/results_report.py b/crosperf/results_report.py
index edbdd4d7..fac044fb 100644
--- a/crosperf/results_report.py
+++ b/crosperf/results_report.py
@@ -1,8 +1,6 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""A module to handle the report format."""
from __future__ import print_function
@@ -18,9 +16,7 @@ from cros_utils.tabulator import Cell
from cros_utils.tabulator import CoeffVarFormat
from cros_utils.tabulator import CoeffVarResult
from cros_utils.tabulator import Column
-from cros_utils.tabulator import SamplesTableGenerator
from cros_utils.tabulator import Format
-from cros_utils.tabulator import IterationResult
from cros_utils.tabulator import GmeanRatioResult
from cros_utils.tabulator import LiteralResult
from cros_utils.tabulator import MaxResult
@@ -56,12 +52,12 @@ def ParseChromeosImage(chromeos_image):
part after '/chroot/tmp' in the second case.
Args:
- chromeos_image: string containing the path to the chromeos_image that
- crosperf used for the test.
+ chromeos_image: string containing the path to the chromeos_image that
+ crosperf used for the test.
Returns:
- version, image: The results of parsing the input string, as explained
- above.
+ version, image: The results of parsing the input string, as explained
+ above.
"""
# Find the Chromeos Version, e.g. R45-2345.0.0.....
# chromeos_image should have been something like:
@@ -90,7 +86,7 @@ def _AppendUntilLengthIs(gen, the_list, target_len):
Uses `gen` to generate elements.
"""
- the_list.extend(gen() for _ in range(target_len - len(the_list)))
+ the_list.extend(gen() for _ in xrange(target_len - len(the_list)))
return the_list
@@ -99,7 +95,8 @@ def _FilterPerfReport(event_threshold, report):
def filter_dict(m):
return {
- fn_name: pct for fn_name, pct in m.iteritems() if pct >= event_threshold
+ fn_name: pct
+ for fn_name, pct in m.iteritems() if pct >= event_threshold
}
return {event: filter_dict(m) for event, m in report.iteritems()}
@@ -132,7 +129,7 @@ class _PerfTable(object):
self.perf_data = {}
for label in label_names:
for bench_name, bench_iterations in benchmark_names_and_iterations:
- for i in range(bench_iterations):
+ for i in xrange(bench_iterations):
report = read_perf_report(label, bench_name, i)
self._ProcessPerfReport(report, label, bench_name, i)
@@ -161,14 +158,6 @@ def _GetResultsTableHeader(ben_name, iterations):
return [[cell]]
-def _GetDSOHeader(cwp_dso):
- info = 'CWP_DSO: %s' % cwp_dso
- cell = Cell()
- cell.string_value = info
- cell.header = False
- return [[cell]]
-
-
def _ParseColumn(columns, iteration):
new_column = []
for column in columns:
@@ -177,7 +166,7 @@ def _ParseColumn(columns, iteration):
else:
new_column.extend(
Column(LiteralResult(i), Format(), str(i + 1))
- for i in range(iteration))
+ for i in xrange(iteration))
return new_column
@@ -234,20 +223,6 @@ def _GetPerfTables(benchmark_results, columns, table_type):
return tables
-def _GetSamplesTables(benchmark_results, columns, table_type):
- tables = []
- dso_header_table = _GetDSOHeader(benchmark_results.cwp_dso)
- tables.append(dso_header_table)
- (table, new_keyvals, iter_counts) = SamplesTableGenerator(
- benchmark_results.run_keyvals, benchmark_results.label_names,
- benchmark_results.iter_counts, benchmark_results.weights).GetTable()
- parsed_columns = _ParseColumn(columns, 1)
- tf = TableFormatter(table, parsed_columns, samples_table=True)
- cell_table = tf.GetCellTable(table_type)
- tables.append(cell_table)
- return (tables, new_keyvals, iter_counts)
-
-
class ResultsReport(object):
"""Class to handle the report format."""
MAX_COLOR_CODE = 255
@@ -256,52 +231,30 @@ class ResultsReport(object):
def __init__(self, results):
self.benchmark_results = results
- def _GetTablesWithColumns(self, columns, table_type, summary_type):
- if summary_type == 'perf':
- get_tables = _GetPerfTables
- elif summary_type == 'samples':
- get_tables = _GetSamplesTables
- else:
- get_tables = _GetTables
- ret = get_tables(self.benchmark_results, columns, table_type)
- # If we are generating a samples summary table, the return value of
- # get_tables will be a tuple, and we will update the benchmark_results for
- # composite benchmark so that full table can use it.
- if isinstance(ret, tuple):
- self.benchmark_results.run_keyvals = ret[1]
- self.benchmark_results.iter_counts = ret[2]
- ret = ret[0]
- return ret
+ def _GetTablesWithColumns(self, columns, table_type, perf):
+ get_tables = _GetPerfTables if perf else _GetTables
+ return get_tables(self.benchmark_results, columns, table_type)
def GetFullTables(self, perf=False):
- ignore_min_max = self.benchmark_results.ignore_min_max
columns = [
- Column(RawResult(), Format()),
- Column(MinResult(), Format()),
- Column(MaxResult(), Format()),
- Column(AmeanResult(ignore_min_max), Format()),
- Column(StdResult(ignore_min_max), Format(), 'StdDev'),
- Column(CoeffVarResult(ignore_min_max), CoeffVarFormat(), 'StdDev/Mean'),
- Column(GmeanRatioResult(ignore_min_max), RatioFormat(), 'GmeanSpeedup'),
- Column(PValueResult(ignore_min_max), PValueFormat(), 'p-value')
+ Column(RawResult(), Format()), Column(MinResult(), Format()), Column(
+ MaxResult(), Format()), Column(AmeanResult(), Format()), Column(
+ StdResult(), Format(), 'StdDev'),
+ Column(CoeffVarResult(), CoeffVarFormat(), 'StdDev/Mean'), Column(
+ GmeanRatioResult(), RatioFormat(), 'GmeanSpeedup'), Column(
+ PValueResult(), PValueFormat(), 'p-value')
]
return self._GetTablesWithColumns(columns, 'full', perf)
- def GetSummaryTables(self, summary_type=''):
- ignore_min_max = self.benchmark_results.ignore_min_max
- columns = []
- if summary_type == 'samples':
- columns += [Column(IterationResult(), Format(), 'Iterations [Pass:Fail]')]
- columns += [
- Column(
- AmeanResult(ignore_min_max), Format(),
- 'Weighted Samples Amean' if summary_type == 'samples' else ''),
- Column(StdResult(ignore_min_max), Format(), 'StdDev'),
- Column(CoeffVarResult(ignore_min_max), CoeffVarFormat(), 'StdDev/Mean'),
- Column(GmeanRatioResult(ignore_min_max), RatioFormat(), 'GmeanSpeedup'),
- Column(PValueResult(ignore_min_max), PValueFormat(), 'p-value')
+ def GetSummaryTables(self, perf=False):
+ columns = [
+ Column(AmeanResult(), Format()), Column(StdResult(), Format(),
+ 'StdDev'),
+ Column(CoeffVarResult(), CoeffVarFormat(), 'StdDev/Mean'), Column(
+ GmeanRatioResult(), RatioFormat(), 'GmeanSpeedup'), Column(
+ PValueResult(), PValueFormat(), 'p-value')
]
- return self._GetTablesWithColumns(columns, 'summary', summary_type)
+ return self._GetTablesWithColumns(columns, 'summary', perf)
def _PrintTable(tables, out_to):
@@ -358,8 +311,8 @@ class TextResultsReport(ResultsReport):
"""Generate the status table by the tabulator."""
table = [['', '']]
columns = [
- Column(LiteralResult(iteration=0), Format(), 'Status'),
- Column(LiteralResult(iteration=1), Format(), 'Failing Reason')
+ Column(LiteralResult(iteration=0), Format(), 'Status'), Column(
+ LiteralResult(iteration=1), Format(), 'Failing Reason')
]
for benchmark_run in self.experiment.benchmark_runs:
@@ -371,11 +324,6 @@ class TextResultsReport(ResultsReport):
cell_table = TableFormatter(table, columns).GetCellTable('status')
return [cell_table]
- def _GetTotalWaitCooldownTime(self):
- """Get cooldown wait time in seconds from experiment benchmark runs."""
- return sum(br.suite_runner.GetCooldownWaitTime()
- for br in self.experiment.benchmark_runs)
-
def GetReport(self):
"""Generate the report for email and console."""
output_type = 'EMAIL' if self.email else 'CONSOLE'
@@ -388,20 +336,15 @@ class TextResultsReport(ResultsReport):
title_contents = 'Results report'
sections.append(self._MakeTitle(title_contents))
- if not self.benchmark_results.cwp_dso:
- summary_table = _PrintTable(self.GetSummaryTables(), output_type)
- else:
- summary_table = _PrintTable(
- self.GetSummaryTables(summary_type='samples'), output_type)
+ summary_table = _PrintTable(self.GetSummaryTables(perf=False), output_type)
sections.append(self._MakeSection('Summary', summary_table))
if experiment is not None:
table = _PrintTable(self.GetStatusTable(), output_type)
sections.append(self._MakeSection('Benchmark Run Status', table))
- if not self.benchmark_results.cwp_dso:
- perf_table = _PrintTable(
- self.GetSummaryTables(summary_type='perf'), output_type)
+ perf_table = _PrintTable(self.GetSummaryTables(perf=True), output_type)
+ if perf_table:
sections.append(self._MakeSection('Perf Data', perf_table))
if experiment is not None:
@@ -411,9 +354,6 @@ class TextResultsReport(ResultsReport):
cpu_info = experiment.machine_manager.GetAllCPUInfo(experiment.labels)
sections.append(self._MakeSection('CPUInfo', cpu_info))
- waittime_str = '%d min' % (self._GetTotalWaitCooldownTime() // 60)
- sections.append(self._MakeSection('Cooldown wait time', waittime_str))
-
return '\n'.join(sections)
@@ -424,9 +364,8 @@ def _GetHTMLCharts(label_names, test_results):
# never add headers. We still need to pass it anyway.
table = TableGenerator(runs, label_names).GetTable()
columns = [
- Column(AmeanResult(), Format()),
- Column(MinResult(), Format()),
- Column(MaxResult(), Format())
+ Column(AmeanResult(), Format()), Column(MinResult(), Format()), Column(
+ MaxResult(), Format())
]
tf = TableFormatter(table, columns)
data_table = tf.GetCellTable('full', headers=False)
@@ -475,14 +414,9 @@ class HTMLResultsReport(ResultsReport):
chart_javascript = ''.join(chart.GetJavascript() for chart in charts)
chart_divs = ''.join(chart.GetDiv() for chart in charts)
- if not self.benchmark_results.cwp_dso:
- summary_table = self.GetSummaryTables()
- perf_table = self.GetSummaryTables(summary_type='perf')
- else:
- summary_table = self.GetSummaryTables(summary_type='samples')
- perf_table = None
+ summary_table = self.GetSummaryTables()
full_table = self.GetFullTables()
-
+ perf_table = self.GetSummaryTables(perf=True)
experiment_file = ''
if self.experiment is not None:
experiment_file = self.experiment.experiment_file
@@ -519,11 +453,8 @@ def ParseStandardPerfReport(report_data):
"""
# This function fails silently on its if it's handed a string (as opposed to a
# list of lines). So, auto-split if we do happen to get a string.
- if isinstance(report_data, str):
+ if isinstance(report_data, basestring):
report_data = report_data.splitlines()
- # When switching to python3 catch the case when bytes are passed.
- elif isinstance(report_data, bytes):
- raise TypeError()
# Samples: N{K,M,G} of event 'event-name'
samples_regex = re.compile(r"#\s+Samples: \d+\S? of event '([^']+)'")
@@ -609,10 +540,7 @@ class BenchmarkResults(object):
label_names,
benchmark_names_and_iterations,
run_keyvals,
- ignore_min_max=False,
- read_perf_report=None,
- cwp_dso=None,
- weights=None):
+ read_perf_report=None):
if read_perf_report is None:
def _NoPerfReport(*_args, **_kwargs):
@@ -624,10 +552,7 @@ class BenchmarkResults(object):
self.benchmark_names_and_iterations = benchmark_names_and_iterations
self.iter_counts = dict(benchmark_names_and_iterations)
self.run_keyvals = run_keyvals
- self.ignore_min_max = ignore_min_max
self.read_perf_report = read_perf_report
- self.cwp_dso = cwp_dso
- self.weights = dict(weights) if weights else None
@staticmethod
def FromExperiment(experiment, for_json_report=False):
@@ -635,15 +560,10 @@ class BenchmarkResults(object):
benchmark_names_and_iterations = [(benchmark.name, benchmark.iterations)
for benchmark in experiment.benchmarks]
run_keyvals = _ExperimentToKeyvals(experiment, for_json_report)
- ignore_min_max = experiment.ignore_min_max
read_perf_report = functools.partial(_ReadExperimentPerfReport,
experiment.results_directory)
- cwp_dso = experiment.cwp_dso
- weights = [(benchmark.name, benchmark.weight)
- for benchmark in experiment.benchmarks]
return BenchmarkResults(label_names, benchmark_names_and_iterations,
- run_keyvals, ignore_min_max, read_perf_report,
- cwp_dso, weights)
+ run_keyvals, read_perf_report)
def _GetElemByName(name, from_list):
diff --git a/crosperf/results_report_unittest.py b/crosperf/results_report_unittest.py
index ae51fda6..2a23aa78 100755
--- a/crosperf/results_report_unittest.py
+++ b/crosperf/results_report_unittest.py
@@ -1,22 +1,20 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
# Copyright 2016 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Unittest for the results reporter."""
from __future__ import division
from __future__ import print_function
-import collections
-import io
-import os
-import unittest
+from StringIO import StringIO
+import collections
import mock
+import os
import test_flag
+import unittest
from benchmark_run import MockBenchmarkRun
from cros_utils import logger
@@ -83,7 +81,7 @@ def FakePath(ext):
def MakeMockExperiment(compiler='gcc'):
"""Mocks an experiment using the given compiler."""
- mock_experiment_file = io.BytesIO("""
+ mock_experiment_file = StringIO("""
board: x86-alex
remote: 127.0.0.1
perf_args: record -a -e cycles
@@ -128,30 +126,24 @@ def _InjectSuccesses(experiment, how_many, keyvals, for_benchmark=0,
machine_manager = MockMachineManager(
FakePath('chromeos_root'), 0, log_level, locks_dir)
machine_manager.AddMachine('testing_machine')
- machine = next(
- m for m in machine_manager.GetMachines() if m.name == 'testing_machine')
+ machine = next(m for m in machine_manager.GetMachines()
+ if m.name == 'testing_machine')
for label in experiment.labels:
def MakeSuccessfulRun(n):
run = MockBenchmarkRun('mock_success%d' % (n,), bench, label,
1 + n + num_runs, cache_conditions,
- machine_manager, log, log_level, share_cache, {})
+ machine_manager, log, log_level, share_cache)
mock_result = MockResult(log, label, log_level, machine)
mock_result.keyvals = keyvals
run.result = mock_result
return run
experiment.benchmark_runs.extend(
- MakeSuccessfulRun(n) for n in range(how_many))
+ MakeSuccessfulRun(n) for n in xrange(how_many))
return experiment
-def _InjectCooldownTime(experiment, cooldown_time):
- """Inject cooldown wait time in every benchmark run."""
- for br in experiment.benchmark_runs:
- br.suite_runner.cooldown_wait_time = cooldown_time
-
-
class TextResultsReportTest(unittest.TestCase):
"""Tests that the output of a text report contains the things we pass in.
@@ -164,18 +156,11 @@ class TextResultsReportTest(unittest.TestCase):
success_keyvals = {'retval': 0, 'machine': 'some bot', 'a_float': 3.96}
experiment = _InjectSuccesses(MakeMockExperiment(), num_success,
success_keyvals)
- # Set 120 sec cooldown time for every benchmark run.
- cooldown_time = 120
- _InjectCooldownTime(experiment, cooldown_time)
- text_report = TextResultsReport.FromExperiment(
- experiment, email=email).GetReport()
+ text_report = TextResultsReport.FromExperiment(experiment, email=email) \
+ .GetReport()
self.assertIn(str(success_keyvals['a_float']), text_report)
self.assertIn(success_keyvals['machine'], text_report)
self.assertIn(MockCrosMachine.CPUINFO_STRING, text_report)
- self.assertIn('Cooldown wait time', text_report)
- self.assertIn(
- '%d min' % (len(experiment.benchmark_runs) * cooldown_time // 60),
- text_report)
return text_report
def testOutput(self):
@@ -242,7 +227,7 @@ class HTMLResultsReportTest(unittest.TestCase):
_InjectSuccesses(MakeMockExperiment(), num_success, success_keyvals))
self.assertNotIn('no result', output.summary_table)
- # self.assertIn(success_keyvals['machine'], output.summary_table)
+ #self.assertIn(success_keyvals['machine'], output.summary_table)
self.assertIn('a_float', output.summary_table)
self.assertIn(str(success_keyvals['a_float']), output.summary_table)
self.assertIn('a_float', output.full_table)
@@ -334,11 +319,8 @@ class JSONResultsReportTest(unittest.TestCase):
def testFailedJSONReportOutputWithoutExperiment(self):
labels = ['label1']
- # yapf:disable
benchmark_names_and_iterations = [('bench1', 1), ('bench2', 2),
('bench3', 1), ('bench4', 0)]
- # yapf:enable
-
benchmark_keyvals = {
'bench1': [[{
'retval': 1,
@@ -431,7 +413,7 @@ class PerfReportParserTest(unittest.TestCase):
}
report_cycles = report['cycles']
self.assertEqual(len(report_cycles), 214)
- for k, v in known_cycles_percentages.items():
+ for k, v in known_cycles_percentages.iteritems():
self.assertIn(k, report_cycles)
self.assertEqual(v, report_cycles[k])
@@ -443,7 +425,7 @@ class PerfReportParserTest(unittest.TestCase):
}
report_instructions = report['instructions']
self.assertEqual(len(report_instructions), 492)
- for k, v in known_instrunctions_percentages.items():
+ for k, v in known_instrunctions_percentages.iteritems():
self.assertIn(k, report_instructions)
self.assertEqual(v, report_instructions[k])
diff --git a/crosperf/run_tests.sh b/crosperf/run_tests.sh
index d70fc99d..78a2b9fd 100755
--- a/crosperf/run_tests.sh
+++ b/crosperf/run_tests.sh
@@ -3,4 +3,30 @@
# Copyright 2011 Google Inc. All Rights Reserved.
# Author: raymes@google.com (Raymes Khoury)
-../run_tests_for.py .
+# Make sure the base toolchain-utils directory is in our PYTHONPATH before
+# trying to run this script.
+export PYTHONPATH+=":.."
+
+num_tests=0
+num_failed=0
+
+for test in $(find -name \*test.py); do
+ echo RUNNING: ${test}
+ ((num_tests++))
+ if ! ./${test} ; then
+ echo
+ echo "*** Test Failed! (${test}) ***"
+ echo
+ ((num_failed++))
+ fi
+done
+
+echo
+
+if [ ${num_failed} -eq 0 ] ; then
+ echo "ALL TESTS PASSED (${num_tests} ran)"
+ exit 0
+fi
+
+echo "${num_failed} TESTS FAILED (out of ${num_tests})"
+exit 1
diff --git a/crosperf/schedv2.py b/crosperf/schedv2.py
index 768d29d8..e661f307 100644
--- a/crosperf/schedv2.py
+++ b/crosperf/schedv2.py
@@ -1,24 +1,18 @@
-# -*- coding: utf-8 -*-
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Module to optimize the scheduling of benchmark_run tasks."""
-from __future__ import division
from __future__ import print_function
import sys
-import time
+import test_flag
import traceback
from collections import defaultdict
+from machine_image_manager import MachineImageManager
from threading import Lock
from threading import Thread
-
-import test_flag
-
-from machine_image_manager import MachineImageManager
from cros_utils import command_executer
from cros_utils import logger
@@ -49,164 +43,14 @@ class DutWorker(Thread):
# suite_runner.Terminate and updates timeline.
self._active_br.Terminate()
- def _kerncmd_update_needed(self, intel_pstate):
- """Check whether kernel cmdline update is needed.
-
- Args:
- intel_pstate: kernel command line argument (active, passive, no_hwp)
-
- Returns:
- True if update is needed.
- """
-
- ce = command_executer.GetCommandExecuter()
- good = 0
-
- # Check that dut platform supports hwp
- cmd = "grep -q '^flags.*hwp' /proc/cpuinfo"
- ret_code = ce.CrosRunCommand(
- cmd,
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- if ret_code != good:
- # Intel hwp is not supported, update is not needed.
- return False
-
- kern_cmdline_cmd = 'grep -q "intel_pstate=%s" /proc/cmdline' % intel_pstate
- ret_code = ce.CrosRunCommand(
- kern_cmdline_cmd,
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- self._logger.LogOutput('grep /proc/cmdline returned %d' % ret_code)
- if (intel_pstate and ret_code == good or
- not intel_pstate and ret_code != good):
- # No need to updated cmdline if:
- # 1. We are setting intel_pstate and we found it is already set.
- # 2. Not using intel_pstate and it is not in cmdline.
- return False
-
- # Otherwise we need to update intel_pstate.
- return True
-
- def _update_kerncmd_intel_pstate(self, intel_pstate):
- """Update kernel command line.
-
- Args:
- intel_pstate: kernel command line argument (active, passive, no_hwp)
- """
-
- ce = command_executer.GetCommandExecuter()
- good = 0
-
- # First phase is to remove rootfs verification to allow cmdline change.
- remove_verif_cmd = ' '.join([
- '/usr/share/vboot/bin/make_dev_ssd.sh',
- '--remove_rootfs_verification',
- '--partition %d',
- ])
- # Command for partition 2.
- verif_part2_failed = ce.CrosRunCommand(
- remove_verif_cmd % 2,
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- # Command for partition 4
- # Some machines in the lab use partition 4 to boot from,
- # so cmdline should be update for both partitions.
- verif_part4_failed = ce.CrosRunCommand(
- remove_verif_cmd % 4,
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- if verif_part2_failed or verif_part4_failed:
- self._logger.LogFatal(
- 'ERROR. Failed to update kernel cmdline on partition %d.\n'
- 'Remove verification failed with status %d' %
- (2 if verif_part2_failed else 4, verif_part2_failed or
- verif_part4_failed))
-
- ce.CrosRunCommand(
- 'reboot && exit',
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- # Give enough time for dut to complete reboot
- # TODO(denik): Replace with the function checking machine availability.
- time.sleep(30)
-
- # Second phase to update intel_pstate in kernel cmdline.
- kern_cmdline = '\n'.join([
- 'tmpfile=$(mktemp)',
- 'partnumb=%d',
- 'pstate=%s',
- # Store kernel cmdline in a temp file.
- '/usr/share/vboot/bin/make_dev_ssd.sh --partition ${partnumb}'
- ' --save_config ${tmpfile}',
- # Remove intel_pstate argument if present.
- "sed -i -r 's/ intel_pstate=[A-Za-z_]+//g' ${tmpfile}.${partnumb}",
- # Insert intel_pstate with a new value if it is set.
- '[[ -n ${pstate} ]] &&'
- ' sed -i -e \"s/ *$/ intel_pstate=${pstate}/\" ${tmpfile}.${partnumb}',
- # Save the change in kernel cmdline.
- # After completion we have to reboot.
- '/usr/share/vboot/bin/make_dev_ssd.sh --partition ${partnumb}'
- ' --set_config ${tmpfile}'
- ])
- kern_part2_cmdline_cmd = kern_cmdline % (2, intel_pstate)
- self._logger.LogOutput(
- 'Command to change kernel command line: %s' % kern_part2_cmdline_cmd)
- upd_part2_failed = ce.CrosRunCommand(
- kern_part2_cmdline_cmd,
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- # Again here we are updating cmdline for partition 4
- # in addition to partition 2. Without this some machines
- # in the lab might fail.
- kern_part4_cmdline_cmd = kern_cmdline % (4, intel_pstate)
- self._logger.LogOutput(
- 'Command to change kernel command line: %s' % kern_part4_cmdline_cmd)
- upd_part4_failed = ce.CrosRunCommand(
- kern_part4_cmdline_cmd,
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- if upd_part2_failed or upd_part4_failed:
- self._logger.LogFatal(
- 'ERROR. Failed to update kernel cmdline on partition %d.\n'
- 'intel_pstate update failed with status %d' %
- (2 if upd_part2_failed else 4, upd_part2_failed or upd_part4_failed))
-
- ce.CrosRunCommand(
- 'reboot && exit',
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- # Wait 30s after reboot.
- time.sleep(30)
-
- # Verification phase.
- # Check that cmdline was updated.
- # Throw an exception if not.
- kern_cmdline_cmd = 'grep -q "intel_pstate=%s" /proc/cmdline' % intel_pstate
- ret_code = ce.CrosRunCommand(
- kern_cmdline_cmd,
- chromeos_root=self._sched.get_labels(0).chromeos_root,
- machine=self._dut.name)
- if (intel_pstate and ret_code != good or
- not intel_pstate and ret_code == good):
- # Kernel cmdline doesn't match input intel_pstate.
- self._logger.LogFatal(
- 'ERROR. Failed to update kernel cmdline. '
- 'Final verification failed with status %d' % ret_code)
-
- self._logger.LogOutput('Kernel cmdline updated successfully.')
-
def run(self):
"""Do the "run-test->(optionally reimage)->run-test" chore.
Note - 'br' below means 'benchmark_run'.
"""
- intel_pstate = self._sched.get_experiment().intel_pstate
# Firstly, handle benchmarkruns that have cache hit.
br = self._sched.get_cached_benchmark_run()
- # Total wait time for cooling down.
- total_waittime = 0
while br:
try:
self._stat_annotation = 'finishing cached {}'.format(br)
@@ -238,22 +82,9 @@ class DutWorker(Thread):
'working thread {}.'.format(self))
break
else:
- self._logger.LogOutput('Update kernel cmdline if necessary '
- 'and reboot')
- if self._kerncmd_update_needed(intel_pstate):
- self._update_kerncmd_intel_pstate(intel_pstate)
-
- # When calculating cooldown wait time we assume that suite_runner is
- # never reused so we can sum up the values across all benchmark_runs.
- # If implementation changes causing the assert below to fail the
- # calculation should be adjusted accordingly.
- assert br.suite_runner.GetCooldownWaitTime() == 0
# Execute the br.
self._execute_benchmark_run(br)
- total_waittime += br.suite_runner.GetCooldownWaitTime()
finally:
- self._logger.LogOutput(
- 'Total wait time for cooldown: %d min' % (total_waittime // 60))
self._stat_annotation = 'finished'
# Thread finishes. Notify scheduler that I'm done.
self._sched.dut_worker_finished(self)
@@ -333,8 +164,8 @@ class DutWorker(Thread):
checksum = checksum.strip()
for l in self._sched.get_labels():
if l.checksum == checksum:
- self._logger.LogOutput("Dut '{}' is pre-installed with '{}'".format(
- self._dut.name, l))
+ self._logger.LogOutput(
+ "Dut '{}' is pre-installed with '{}'".format(self._dut.name, l))
self._dut.label = l
return
except RuntimeError:
@@ -353,10 +184,9 @@ class DutWorker(Thread):
return ('Worker thread "{}", label="{}", benchmark_run={}, '
'reimage={}, now {}'.format(
- self._dut.name,
- 'None' if self._dut.label is None else self._dut.label.name,
- self._stat_num_br_run, self._stat_num_reimage,
- self._stat_annotation))
+ self._dut.name, 'None' if self._dut.label is None else
+ self._dut.label.name, self._stat_num_br_run,
+ self._stat_num_reimage, self._stat_annotation))
class BenchmarkRunCacheReader(Thread):
@@ -453,19 +283,18 @@ class Schedv2(object):
# Split benchmarkruns set into segments. Each segment will be handled by
# a thread. Note, we use (x+3)/4 to mimic math.ceil(x/4).
- n_threads = max(2, min(20, (n_benchmarkruns + 3) // 4))
+ n_threads = max(2, min(20, (n_benchmarkruns + 3) / 4))
self._logger.LogOutput(('Starting {} threads to read cache status for '
'{} benchmark runs ...').format(
n_threads, n_benchmarkruns))
- benchmarkruns_per_thread = (n_benchmarkruns + n_threads - 1) // n_threads
+ benchmarkruns_per_thread = (n_benchmarkruns + n_threads - 1) / n_threads
benchmarkrun_segments = []
for i in range(n_threads - 1):
start = i * benchmarkruns_per_thread
end = (i + 1) * benchmarkruns_per_thread
benchmarkrun_segments.append(self._experiment.benchmark_runs[start:end])
- benchmarkrun_segments.append(
- self._experiment.benchmark_runs[(n_threads - 1) *
- benchmarkruns_per_thread:])
+ benchmarkrun_segments.append(self._experiment.benchmark_runs[(
+ n_threads - 1) * benchmarkruns_per_thread:])
# Assert: aggregation of benchmarkrun_segments equals to benchmark_runs.
assert sum(len(x) for x in benchmarkrun_segments) == n_benchmarkruns
@@ -483,9 +312,8 @@ class Schedv2(object):
x.join()
# Summarize.
- self._logger.LogOutput(
- 'Total {} cache hit out of {} benchmark_runs.'.format(
- len(self._cached_br_list), n_benchmarkruns))
+ self._logger.LogOutput('Total {} cache hit out of {} benchmark_runs.'.
+ format(len(self._cached_br_list), n_benchmarkruns))
def get_cached_run_list(self):
return self._cached_br_list
@@ -497,7 +325,7 @@ class Schedv2(object):
return self._experiment
def get_labels(self, i=None):
- if i is None:
+ if i == None:
return self._labels
return self._labels[i]
diff --git a/crosperf/schedv2_unittest.py b/crosperf/schedv2_unittest.py
index 4aced646..250968dc 100755
--- a/crosperf/schedv2_unittest.py
+++ b/crosperf/schedv2_unittest.py
@@ -1,19 +1,13 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright 2015 Google Inc. All Rights Reserved.
"""This contains the unit tests for the new Crosperf task scheduler."""
from __future__ import print_function
-import functools
-import io
-import unittest
-
import mock
+import unittest
+import StringIO
import benchmark_run
import test_flag
@@ -79,7 +73,7 @@ class Schedv2Test(unittest.TestCase):
Note - we mock out BenchmarkRun in this step.
"""
- experiment_file = ExperimentFile(io.BytesIO(expstr))
+ experiment_file = ExperimentFile(StringIO.StringIO(expstr))
experiment = ExperimentFactory().GetExperiment(
experiment_file, working_directory='', log_dir='')
return experiment
@@ -197,8 +191,8 @@ class Schedv2Test(unittest.TestCase):
self.assertEquals(len(my_schedv2.get_cached_run_list()), 30)
# The non-cache-hit brs are put into Schedv2._label_brl_map.
self.assertEquals(
- functools.reduce(lambda a, x: a + len(x[1]),
- my_schedv2.get_label_map().iteritems(), 0), 30)
+ reduce(lambda a, x: a + len(x[1]),
+ my_schedv2.get_label_map().iteritems(), 0), 30)
def test_nocachehit(self):
"""Test no cache-hit."""
@@ -215,8 +209,8 @@ class Schedv2Test(unittest.TestCase):
self.assertEquals(len(my_schedv2.get_cached_run_list()), 0)
# The non-cache-hit brs are put into Schedv2._label_brl_map.
self.assertEquals(
- functools.reduce(lambda a, x: a + len(x[1]),
- my_schedv2.get_label_map().iteritems(), 0), 60)
+ reduce(lambda a, x: a + len(x[1]),
+ my_schedv2.get_label_map().iteritems(), 0), 60)
if __name__ == '__main__':
diff --git a/crosperf/settings.py b/crosperf/settings.py
index 290abfc2..8d5a25fd 100644
--- a/crosperf/settings.py
+++ b/crosperf/settings.py
@@ -1,8 +1,4 @@
-#-*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+# Copyright 2011 Google Inc. All Rights Reserved.
"""Module to get the settings from experiment file."""
from __future__ import print_function
@@ -43,8 +39,8 @@ class Settings(object):
def GetField(self, name):
"""Get the value of a field with a given name."""
if name not in self.fields:
- raise SyntaxError(
- "Field '%s' not a valid field in '%s' settings." % (name, self.name))
+ raise SyntaxError("Field '%s' not a valid field in '%s' settings." %
+ (name, self.name))
field = self.fields[name]
if not field.assigned and field.required:
raise SyntaxError("Required field '%s' not defined in '%s' settings." %
@@ -70,8 +66,8 @@ class Settings(object):
if not self.fields[name].assigned and self.fields[name].required:
raise SyntaxError('Field %s is invalid.' % name)
- def GetXbuddyPath(self, path_str, autotest_path, debug_path, board,
- chromeos_root, log_level, perf_args):
+ def GetXbuddyPath(self, path_str, autotest_path, board, chromeos_root,
+ log_level):
prefix = 'remote'
l = logger.GetLogger()
if (path_str.find('trybot') < 0 and path_str.find('toolchain') < 0 and
@@ -80,7 +76,6 @@ class Settings(object):
else:
xbuddy_path = '%s/%s' % (prefix, path_str)
image_downloader = ImageDownloader(l, log_level)
- # Returns three variables: image, autotest_path, debug_path
- return image_downloader.Run(
- misc.CanonicalizePath(chromeos_root), xbuddy_path, autotest_path,
- debug_path, perf_args)
+ image_and_autotest_path = image_downloader.Run(
+ misc.CanonicalizePath(chromeos_root), xbuddy_path, autotest_path)
+ return image_and_autotest_path
diff --git a/crosperf/settings_factory.py b/crosperf/settings_factory.py
index 9057703f..efbb534f 100644
--- a/crosperf/settings_factory.py
+++ b/crosperf/settings_factory.py
@@ -1,15 +1,11 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Setting files for global, benchmark and labels."""
from __future__ import print_function
from field import BooleanField
-from field import EnumField
-from field import FloatField
from field import IntegerField
from field import ListField
from field import TextField
@@ -55,11 +51,6 @@ class BenchmarkSettings(Settings):
'telemetry_Crosperf.',
required=False,
default=True))
- self.AddField(
- FloatField(
- 'weight',
- default=0.0,
- description='Weight of the benchmark for CWP approximation'))
class LabelSettings(Settings):
@@ -83,12 +74,6 @@ class LabelSettings(Settings):
'files.'))
self.AddField(
TextField(
- 'debug_path',
- required=False,
- description='Debug info directory relative to chroot which has '
- 'symbols and vmlinux that can be used by perf tool.'))
- self.AddField(
- TextField(
'chromeos_root',
description='The path to a chromeos checkout which '
'contains a src/scripts directory. Defaults to '
@@ -153,11 +138,6 @@ class GlobalSettings(Settings):
description='The target board for running '
'experiments on, e.g. x86-alex.'))
self.AddField(
- BooleanField(
- 'skylab',
- description='Whether to run experiments via skylab.',
- default=False))
- self.AddField(
ListField(
'remote',
description='A comma-separated list of IPs of '
@@ -202,8 +182,9 @@ class GlobalSettings(Settings):
BooleanField(
'use_file_locks',
default=False,
- description='DEPRECATED: Whether to use the file locks '
- 'or AFE server lock mechanism.'))
+ description='Whether to use the file locks '
+ 'mechanism (deprecated) instead of the AFE '
+ 'server lock mechanism.'))
self.AddField(
IntegerField(
'iterations',
@@ -285,9 +266,8 @@ class GlobalSettings(Settings):
'locks_dir',
default='',
description='An alternate directory to use for '
- 'storing/checking machine file locks for local machines. '
- 'By default the file locks directory is '
- '/google/data/rw/users/mo/mobiletc-prebuild/locks.\n'
+ 'storing/checking machine locks. Using this field '
+ 'automatically sets use_file_locks to True.\n'
'WARNING: If you use your own locks directory, '
'there is no guarantee that someone else might not '
'hold a lock on the same machine in a different '
@@ -306,116 +286,6 @@ class GlobalSettings(Settings):
default=0,
description='Number of times to retry a '
'benchmark run.'))
- self.AddField(
- TextField(
- 'cwp_dso',
- description='The DSO type that we want to use for '
- 'CWP approximation. This is used to run telemetry '
- 'benchmarks. Valid DSO types can be found from dso_list '
- 'in experiment_factory.py. The default value is set to '
- 'be empty.',
- required=False,
- default=''))
- self.AddField(
- BooleanField(
- 'enable_aslr',
- description='Enable ASLR on the machine to run the '
- 'benchmarks. ASLR is disabled by default',
- required=False,
- default=False))
- self.AddField(
- BooleanField(
- 'ignore_min_max',
- description='When doing math for the raw results, '
- 'ignore min and max values to reduce noise.',
- required=False,
- default=False))
- self.AddField(
- TextField(
- 'intel_pstate',
- description='Intel Pstate mode.\n'
- 'Supported modes: passive, no_hwp.\n'
- 'By default kernel works in active HWP mode if HWP is supported'
- " by CPU. This corresponds to a default intel_pstate=''",
- required=False,
- default=''))
- self.AddField(
- BooleanField(
- 'turbostat',
- description='Run turbostat process in the background'
- ' of a benchmark',
- required=False,
- default=True))
- self.AddField(
- FloatField(
- 'top_interval',
- description='Run top command in the background of a benchmark with'
- ' interval of sampling specified in seconds.\n'
- 'Recommended values 1-5. Lower number provides more accurate'
- ' data.\n'
- 'With 0 - do not run top.\n'
- 'NOTE: Running top with interval 1-5 sec has insignificant'
- ' performance impact (performance degradation does not exceed 0.3%,'
- ' measured on x86_64, ARM32, and ARM64).',
- required=False,
- default=0))
- self.AddField(
- IntegerField(
- 'cooldown_temp',
- required=False,
- default=40,
- description='Wait until CPU temperature goes down below'
- ' specified temperature in Celsius'
- ' prior starting a benchmark.'))
- self.AddField(
- IntegerField(
- 'cooldown_time',
- required=False,
- default=0,
- description='Wait specified time in minutes allowing'
- ' CPU to cool down. Zero value disables cooldown.'))
- self.AddField(
- EnumField(
- 'governor',
- options=[
- 'performance',
- 'powersave',
- 'userspace',
- 'ondemand',
- 'conservative',
- 'schedutils',
- 'sched',
- 'interactive',
- ],
- default='performance',
- required=False,
- description='Setup CPU governor for all cores.\n'
- 'For more details refer to:\n'
- 'https://www.kernel.org/doc/Documentation/cpu-freq/governors.txt'))
- self.AddField(
- EnumField(
- 'cpu_usage',
- options=[
- 'all',
- 'big_only',
- 'little_only',
- 'exclusive_cores',
- ],
- default='all',
- required=False,
- description='Restrict usage CPUs to decrease CPU interference.\n'
- 'all - no restrictions;\n'
- 'big-only, little-only - enable only big/little cores,'
- ' applicable only on ARM;\n'
- 'exclusive-cores - (for future use)'
- ' isolate cores for exclusive use of benchmark processes.'))
- self.AddField(
- IntegerField(
- 'cpu_freq_pct',
- required=False,
- default=100,
- description='Setup CPU frequency to a supported value less than'
- ' or equal to a percent of max_freq.'))
class SettingsFactory(object):
diff --git a/crosperf/settings_factory_unittest.py b/crosperf/settings_factory_unittest.py
index d80dbb12..1ff6a133 100755
--- a/crosperf/settings_factory_unittest.py
+++ b/crosperf/settings_factory_unittest.py
@@ -1,10 +1,8 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
# Copyright 2017 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""Unittest for crosperf."""
from __future__ import print_function
@@ -20,7 +18,7 @@ class BenchmarkSettingsTest(unittest.TestCase):
def test_init(self):
res = settings_factory.BenchmarkSettings('b_settings')
self.assertIsNotNone(res)
- self.assertEqual(len(res.fields), 7)
+ self.assertEqual(len(res.fields), 6)
self.assertEqual(res.GetField('test_name'), '')
self.assertEqual(res.GetField('test_args'), '')
self.assertEqual(res.GetField('iterations'), 0)
@@ -33,7 +31,7 @@ class LabelSettingsTest(unittest.TestCase):
def test_init(self):
res = settings_factory.LabelSettings('l_settings')
self.assertIsNotNone(res)
- self.assertEqual(len(res.fields), 10)
+ self.assertEqual(len(res.fields), 9)
self.assertEqual(res.GetField('chromeos_image'), '')
self.assertEqual(res.GetField('autotest_path'), '')
self.assertEqual(res.GetField('chromeos_root'), '')
@@ -50,10 +48,9 @@ class GlobalSettingsTest(unittest.TestCase):
def test_init(self):
res = settings_factory.GlobalSettings('g_settings')
self.assertIsNotNone(res)
- self.assertEqual(len(res.fields), 37)
+ self.assertEqual(len(res.fields), 25)
self.assertEqual(res.GetField('name'), '')
self.assertEqual(res.GetField('board'), '')
- self.assertEqual(res.GetField('skylab'), False)
self.assertEqual(res.GetField('remote'), None)
self.assertEqual(res.GetField('rerun_if_failed'), False)
self.assertEqual(res.GetField('rm_chroot_tmp'), False)
@@ -73,17 +70,6 @@ class GlobalSettingsTest(unittest.TestCase):
self.assertEqual(res.GetField('share_cache'), '')
self.assertEqual(res.GetField('results_dir'), '')
self.assertEqual(res.GetField('chrome_src'), '')
- self.assertEqual(res.GetField('cwp_dso'), '')
- self.assertEqual(res.GetField('enable_aslr'), False)
- self.assertEqual(res.GetField('ignore_min_max'), False)
- self.assertEqual(res.GetField('intel_pstate'), '')
- self.assertEqual(res.GetField('turbostat'), True)
- self.assertEqual(res.GetField('top_interval'), 0)
- self.assertEqual(res.GetField('cooldown_time'), 0)
- self.assertEqual(res.GetField('cooldown_temp'), 40)
- self.assertEqual(res.GetField('governor'), 'performance')
- self.assertEqual(res.GetField('cpu_usage'), 'all')
- self.assertEqual(res.GetField('cpu_freq_pct'), 100)
class SettingsFactoryTest(unittest.TestCase):
@@ -96,17 +82,17 @@ class SettingsFactoryTest(unittest.TestCase):
l_settings = settings_factory.SettingsFactory().GetSettings(
'label', 'label')
self.assertIsInstance(l_settings, settings_factory.LabelSettings)
- self.assertEqual(len(l_settings.fields), 10)
+ self.assertEqual(len(l_settings.fields), 9)
b_settings = settings_factory.SettingsFactory().GetSettings(
'benchmark', 'benchmark')
self.assertIsInstance(b_settings, settings_factory.BenchmarkSettings)
- self.assertEqual(len(b_settings.fields), 7)
+ self.assertEqual(len(b_settings.fields), 6)
g_settings = settings_factory.SettingsFactory().GetSettings(
'global', 'global')
self.assertIsInstance(g_settings, settings_factory.GlobalSettings)
- self.assertEqual(len(g_settings.fields), 37)
+ self.assertEqual(len(g_settings.fields), 25)
if __name__ == '__main__':
diff --git a/crosperf/settings_unittest.py b/crosperf/settings_unittest.py
index b9d87e9e..fea55c05 100755
--- a/crosperf/settings_unittest.py
+++ b/crosperf/settings_unittest.py
@@ -1,9 +1,6 @@
#!/usr/bin/env python2
-#-*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+#
+# Copyright 2014 Google Inc. All Rights Reserved.
"""unittest for settings."""
from __future__ import print_function
@@ -200,34 +197,27 @@ class TestSettings(unittest.TestCase):
official_str = 'lumpy-release/R34-5417.0.0'
xbuddy_str = 'latest-dev'
autotest_path = ''
- debug_path = ''
- perf_args = '-a'
- self.settings.GetXbuddyPath(trybot_str, autotest_path, debug_path, board,
- chromeos_root, log_level, perf_args)
+ self.settings.GetXbuddyPath(trybot_str, autotest_path, board, chromeos_root,
+ log_level)
self.assertEqual(mock_run.call_count, 1)
- self.assertEqual(mock_run.call_args_list[0][0], (
- '/tmp/chromeos',
- 'remote/trybot-lumpy-paladin/R34-5417.0.0-b1506',
- '',
- '',
- '-a',
- ))
+ self.assertEqual(mock_run.call_args_list[0][0],
+ ('/tmp/chromeos',
+ 'remote/trybot-lumpy-paladin/R34-5417.0.0-b1506', ''))
mock_run.reset_mock()
- self.settings.GetXbuddyPath(official_str, autotest_path, debug_path, board,
- chromeos_root, log_level, perf_args)
+ self.settings.GetXbuddyPath(official_str, autotest_path, board,
+ chromeos_root, log_level)
self.assertEqual(mock_run.call_count, 1)
- self.assertEqual(
- mock_run.call_args_list[0][0],
- ('/tmp/chromeos', 'remote/lumpy-release/R34-5417.0.0', '', '', '-a'))
+ self.assertEqual(mock_run.call_args_list[0][0],
+ ('/tmp/chromeos', 'remote/lumpy-release/R34-5417.0.0', ''))
mock_run.reset_mock()
- self.settings.GetXbuddyPath(xbuddy_str, autotest_path, debug_path, board,
- chromeos_root, log_level, perf_args)
+ self.settings.GetXbuddyPath(xbuddy_str, autotest_path, board, chromeos_root,
+ log_level)
self.assertEqual(mock_run.call_count, 1)
self.assertEqual(mock_run.call_args_list[0][0],
- ('/tmp/chromeos', 'remote/lumpy/latest-dev', '', '', '-a'))
+ ('/tmp/chromeos', 'remote/lumpy/latest-dev', ''))
if mock_logger:
return
diff --git a/crosperf/suite_runner.py b/crosperf/suite_runner.py
index 01e7114f..b4b669a8 100644
--- a/crosperf/suite_runner.py
+++ b/crosperf/suite_runner.py
@@ -1,24 +1,18 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Copyright (c) 2013~2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-
"""SuiteRunner defines the interface from crosperf to test script."""
-from __future__ import division
from __future__ import print_function
import os
-import re
-import shlex
import time
+import shlex
from cros_utils import command_executer
+import test_flag
TEST_THAT_PATH = '/usr/bin/test_that'
-# TODO: Need to check whether Skylab is installed and set up correctly.
-SKYLAB_PATH = '/usr/local/bin/skylab'
-GS_UTIL = 'src/chromium/depot_tools/gsutil.py'
AUTOTEST_DIR = '~/trunk/src/third_party/autotest/files'
CHROME_MOUNT_DIR = '/tmp/chrome_root'
@@ -53,121 +47,29 @@ class SuiteRunner(object):
"""This defines the interface from crosperf to test script."""
def __init__(self,
- dut_config,
logger_to_use=None,
log_level='verbose',
cmd_exec=None,
- cmd_term=None,
- enable_aslr=False):
+ cmd_term=None):
self.logger = logger_to_use
self.log_level = log_level
self._ce = cmd_exec or command_executer.GetCommandExecuter(
self.logger, log_level=self.log_level)
- # DUT command executer.
- # Will be initialized and used within Run.
self._ct = cmd_term or command_executer.CommandTerminator()
- self.enable_aslr = enable_aslr
- self.dut_config = dut_config
- self.cooldown_wait_time = 0
-
- def ResetCooldownWaitTime(self):
- self.cooldown_wait_time = 0
-
- def GetCooldownWaitTime(self):
- return self.cooldown_wait_time
-
- def DutWrapper(self, machine_name, chromeos_root):
- """Wrap DUT parameters inside.
-
- Eventially CommandExecuter will reqiure only one
- argument - command.
- """
-
- def RunCommandOnDut(command, ignore_status=False):
- ret, msg, err_msg = self._ce.CrosRunCommandWOutput(
- command, machine=machine_name, chromeos_root=chromeos_root)
-
- if ret:
- err_msg = ('Command execution on DUT %s failed.\n'
- 'Failing command: %s\n'
- 'returned %d\n'
- 'Error message: %s' % (machine_name, command, ret, err_msg))
- if ignore_status:
- self.logger.LogError(err_msg +
- '\n(Failure is considered non-fatal. Continue.)')
- else:
- self.logger.LogFatal(err_msg)
-
- return ret, msg, err_msg
-
- return RunCommandOnDut
def Run(self, machine, label, benchmark, test_args, profiler_args):
- if not label.skylab:
- # Initialize command executer on DUT.
- run_on_dut = self.DutWrapper(machine, label.chromeos_root)
for i in range(0, benchmark.retries + 1):
- if label.skylab:
- # TODO: need to migrate DisableASLR and PinGovernorExecutionFrequencies
- # since in skylab mode, we may not know the DUT until one is assigned
- # to the test. For telemetry_Crosperf run, we can move them into the
- # server test script, for client runs, need to figure out wrapper to do
- # it before running.
- ret_tup = self.Skylab_Run(label, benchmark, test_args, profiler_args)
+ self.PinGovernorExecutionFrequencies(machine, label.chromeos_root)
+ if benchmark.suite == 'telemetry':
+ self.DecreaseWaitTime(machine, label.chromeos_root)
+ ret_tup = self.Telemetry_Run(machine, label, benchmark, profiler_args)
+ elif benchmark.suite == 'telemetry_Crosperf':
+ self.DecreaseWaitTime(machine, label.chromeos_root)
+ ret_tup = self.Telemetry_Crosperf_Run(machine, label, benchmark,
+ test_args, profiler_args)
else:
- # Unless the user turns on ASLR in the flag, we first disable ASLR
- # before running the benchmarks
- if not self.enable_aslr:
- self.DisableASLR(run_on_dut)
-
- # CPU usage setup comes first where we enable/disable cores.
- self.SetupCpuUsage(run_on_dut)
- cpu_online_status = self.GetCpuOnline(run_on_dut)
- # List of online cores of type int (core number).
- online_cores = [
- core for core, status in cpu_online_status.items() if status
- ]
- if self.dut_config['cooldown_time']:
- # Setup power conservative mode for effective cool down.
- # Set ignore status since powersave may no be available
- # on all platforms and we are going to handle it.
- ret = self.SetCpuGovernor('powersave', run_on_dut, ignore_status=True)
- if ret:
- # "powersave" is not available, use "ondemand".
- # Still not a fatal error if it fails.
- ret = self.SetCpuGovernor(
- 'ondemand', run_on_dut, ignore_status=True)
- # TODO(denik): Run comparison test for 'powersave' and 'ondemand'
- # on scarlet and kevin64.
- # We might have to consider reducing freq manually to the min
- # if it helps to reduce waiting time.
- self.cooldown_wait_time += self.WaitCooldown(run_on_dut)
-
- # Setup CPU governor for the benchmark run.
- # It overwrites the previous governor settings.
- governor = self.dut_config['governor']
- # FIXME(denik): Pass online cores to governor setup.
- self.SetCpuGovernor(governor, run_on_dut, ignore_status=False)
-
- # Disable Turbo and Setup CPU freq should ALWAYS proceed governor setup
- # since governor may change:
- # - frequency;
- # - turbo/boost.
- self.DisableTurbo(run_on_dut)
- self.SetupCpuFreq(run_on_dut, online_cores)
- # FIXME(denik): Currently we are not recovering the previous cpufreq
- # settings since we do reboot/setup every time anyway.
- # But it may change in the future and then we have to recover the
- # settings.
-
- if benchmark.suite == 'telemetry_Crosperf':
- self.DecreaseWaitTime(run_on_dut)
- ret_tup = self.Telemetry_Crosperf_Run(machine, label, benchmark,
- test_args, profiler_args)
- else:
- ret_tup = self.Test_That_Run(machine, label, benchmark, test_args,
- profiler_args)
-
+ ret_tup = self.Test_That_Run(machine, label, benchmark, test_args,
+ profiler_args)
if ret_tup[0] != 0:
self.logger.LogOutput('benchmark %s failed. Retries left: %s' %
(benchmark.name, benchmark.retries - i))
@@ -181,273 +83,60 @@ class SuiteRunner(object):
break
return ret_tup
- def DisableASLR(self, run_on_dut):
- disable_aslr = ('set -e && '
- 'stop ui; '
- 'if [[ -e /proc/sys/kernel/randomize_va_space ]]; then '
- ' echo 0 > /proc/sys/kernel/randomize_va_space; '
- 'fi; '
- 'start ui ')
- if self.log_level == 'average':
- self.logger.LogOutput('Disable ASLR.')
- run_on_dut(disable_aslr)
-
- def SetCpuGovernor(self, governor, run_on_dut, ignore_status=False):
- set_gov_cmd = (
- 'for f in `ls -d /sys/devices/system/cpu/cpu*/cpufreq 2>/dev/null`; do '
- # Skip writing scaling_governor if cpu is offline.
- ' [[ -e ${f/cpufreq/online} ]] && grep -q 0 ${f/cpufreq/online} '
- ' && continue; '
- ' cd $f; '
- ' if [[ -e scaling_governor ]]; then '
- ' echo %s > scaling_governor; fi; '
- 'done; ')
- if self.log_level == 'average':
- self.logger.LogOutput('Setup CPU Governor: %s.' % governor)
- ret, _, _ = run_on_dut(set_gov_cmd % governor, ignore_status=ignore_status)
- return ret
-
- def DisableTurbo(self, run_on_dut):
- dis_turbo_cmd = (
+ def PinGovernorExecutionFrequencies(self, machine_name, chromeos_root):
+ """Set min and max frequencies to max static frequency."""
+ # pyformat: disable
+ set_cpu_freq = (
+ 'set -e && '
+ # Disable Turbo in Intel pstate driver
'if [[ -e /sys/devices/system/cpu/intel_pstate/no_turbo ]]; then '
' if grep -q 0 /sys/devices/system/cpu/intel_pstate/no_turbo; then '
' echo -n 1 > /sys/devices/system/cpu/intel_pstate/no_turbo; '
' fi; '
- 'fi; ')
+ 'fi; '
+ # Set governor to performance for each cpu
+ 'for f in /sys/devices/system/cpu/cpu*/cpufreq; do '
+ 'cd $f; '
+ 'echo performance > scaling_governor; '
+ # Uncomment rest of lines to enable setting frequency by crosperf
+ #'val=0; '
+ #'if [[ -e scaling_available_frequencies ]]; then '
+ # pylint: disable=line-too-long
+ #' val=`cat scaling_available_frequencies | tr " " "\\n" | sort -n -b -r`; '
+ #'else '
+ #' val=`cat scaling_max_freq | tr " " "\\n" | sort -n -b -r`; fi ; '
+ #'set -- $val; '
+ #'highest=$1; '
+ #'if [[ $# -gt 1 ]]; then '
+ #' case $highest in *1000) highest=$2;; esac; '
+ #'fi ;'
+ #'echo $highest > scaling_max_freq; '
+ #'echo $highest > scaling_min_freq; '
+ 'done'
+ )
+ # pyformat: enable
if self.log_level == 'average':
- self.logger.LogOutput('Disable Turbo.')
- run_on_dut(dis_turbo_cmd)
-
- def WaitCooldown(self, run_on_dut):
- waittime = 0
- timeout_in_sec = int(self.dut_config['cooldown_time']) * 60
- # Temperature from sensors come in uCelsius units.
- temp_in_ucels = int(self.dut_config['cooldown_temp']) * 1000
- sleep_interval = 30
-
- # Wait until any of two events occurs:
- # 1. CPU cools down to a specified temperature.
- # 2. Timeout cooldown_time expires.
- # For the case when targeted temperature is not reached within specified
- # timeout the benchmark is going to start with higher initial CPU temp.
- # In the worst case it may affect test results but at the same time we
- # guarantee the upper bound of waiting time.
- # TODO(denik): Report (or highlight) "high" CPU temperature in test results.
- # "high" should be calculated based on empirical data per platform.
- # Based on such reports we can adjust CPU configuration or
- # cooldown limits accordingly.
- while waittime < timeout_in_sec:
- _, temp_output, _ = run_on_dut(
- 'cat /sys/class/thermal/thermal_zone*/temp', ignore_status=True)
- if any(int(temp) > temp_in_ucels for temp in temp_output.split()):
- time.sleep(sleep_interval)
- waittime += sleep_interval
- else:
- # Exit the loop when:
- # 1. Reported temp numbers from all thermal sensors do not exceed
- # 'cooldown_temp' or
- # 2. No data from the sensors.
- break
-
- self.logger.LogOutput('Cooldown wait time: %.1f min' % (waittime / 60))
- return waittime
-
- def SetupCpuUsage(self, run_on_dut):
- """Setup CPU usage.
-
- Based on self.dut_config['cpu_usage'] configure CPU cores
- utilization.
- """
-
- if (self.dut_config['cpu_usage'] == 'big_only' or
- self.dut_config['cpu_usage'] == 'little_only'):
- _, arch, _ = run_on_dut('uname -m')
-
- if arch.lower().startswith('arm') or arch.lower().startswith('aarch64'):
- self.SetupArmCores(run_on_dut)
-
- def SetupArmCores(self, run_on_dut):
- """Setup ARM big/little cores."""
-
- # CPU implemeters/part numbers of big/LITTLE CPU.
- # Format: dict(CPU implementer: set(CPU part numbers))
- LITTLE_CORES = {
- '0x41': {
- '0xd01', # Cortex A32
- '0xd03', # Cortex A53
- '0xd04', # Cortex A35
- '0xd05', # Cortex A55
- },
- }
- BIG_CORES = {
- '0x41': {
- '0xd07', # Cortex A57
- '0xd08', # Cortex A72
- '0xd09', # Cortex A73
- '0xd0a', # Cortex A75
- '0xd0b', # Cortex A76
- },
- }
-
- # Values of CPU Implementer and CPU part number are exposed by cpuinfo.
- # Format:
- # =================
- # processor : 0
- # model name : ARMv8 Processor rev 4 (v8l)
- # BogoMIPS : 48.00
- # Features : half thumb fastmult vfp edsp neon vfpv3 tls vfpv4
- # CPU implementer : 0x41
- # CPU architecture: 8
- # CPU variant : 0x0
- # CPU part : 0xd03
- # CPU revision : 4
-
- _, cpuinfo, _ = run_on_dut('cat /proc/cpuinfo')
-
- # List of all CPU cores: 0, 1, ..
- proc_matches = re.findall(r'^processor\s*: (\d+)$', cpuinfo, re.MULTILINE)
- # List of all corresponding CPU implementers
- impl_matches = re.findall(r'^CPU implementer\s*: (0x[\da-f]+)$', cpuinfo,
- re.MULTILINE)
- # List of all corresponding CPU part numbers
- part_matches = re.findall(r'^CPU part\s*: (0x[\da-f]+)$', cpuinfo,
- re.MULTILINE)
- assert len(proc_matches) == len(impl_matches)
- assert len(part_matches) == len(impl_matches)
-
- all_cores = set(proc_matches)
- dut_big_cores = {
- core
- for core, impl, part in zip(proc_matches, impl_matches, part_matches)
- if impl in BIG_CORES and part in BIG_CORES[impl]
- }
- dut_lit_cores = {
- core
- for core, impl, part in zip(proc_matches, impl_matches, part_matches)
- if impl in LITTLE_CORES and part in LITTLE_CORES[impl]
- }
-
- if self.dut_config['cpu_usage'] == 'big_only':
- cores_to_enable = dut_big_cores
- cores_to_disable = all_cores - dut_big_cores
- elif self.dut_config['cpu_usage'] == 'little_only':
- cores_to_enable = dut_lit_cores
- cores_to_disable = all_cores - dut_lit_cores
- else:
- self.logger.LogError(
- 'cpu_usage=%s is not supported on ARM.\n'
- 'Ignore ARM CPU setup and continue.' % self.dut_config['cpu_usage'])
- return
-
- if cores_to_enable:
- cmd_enable_cores = ('echo 1 | tee /sys/devices/system/cpu/cpu{%s}/online'
- % ','.join(sorted(cores_to_enable)))
-
- cmd_disable_cores = ''
- if cores_to_disable:
- cmd_disable_cores = (
- 'echo 0 | tee /sys/devices/system/cpu/cpu{%s}/online' % ','.join(
- sorted(cores_to_disable)))
-
- run_on_dut('; '.join([cmd_enable_cores, cmd_disable_cores]))
- else:
- # If there are no cores enabled by dut_config then configuration
- # is invalid for current platform and should be ignored.
- self.logger.LogError(
- '"cpu_usage" is invalid for targeted platform.\n'
- 'dut_config[cpu_usage]=%s\n'
- 'dut big cores: %s\n'
- 'dut little cores: %s\n'
- 'Ignore ARM CPU setup and continue.' % (self.dut_config['cpu_usage'],
- dut_big_cores, dut_lit_cores))
-
- def GetCpuOnline(self, run_on_dut):
- """Get online status of CPU cores.
-
- Return dict of {int(cpu_num): <0|1>}.
- """
- get_cpu_online_cmd = ('paste -d" "'
- ' <(ls /sys/devices/system/cpu/cpu*/online)'
- ' <(cat /sys/devices/system/cpu/cpu*/online)')
- _, online_output_str, _ = run_on_dut(get_cpu_online_cmd)
-
- # Here is the output we expect to see:
- # -----------------
- # /sys/devices/system/cpu/cpu0/online 0
- # /sys/devices/system/cpu/cpu1/online 1
-
- cpu_online = {}
- cpu_online_match = re.compile(r'^[/\S]+/cpu(\d+)/[/\S]+\s+(\d+)$')
- for line in online_output_str.splitlines():
- match = cpu_online_match.match(line)
- if match:
- cpu = int(match.group(1))
- status = int(match.group(2))
- cpu_online[cpu] = status
- # At least one CPU has to be online.
- assert cpu_online
-
- return cpu_online
-
- def SetupCpuFreq(self, run_on_dut, online_cores):
- """Setup CPU frequency.
-
- Based on self.dut_config['cpu_freq_pct'] setup frequency of online CPU cores
- to a supported value which is less or equal to (freq_pct * max_freq / 100)
- limited by min_freq.
-
- NOTE: scaling_available_frequencies support is required.
- Otherwise the function has no effect.
- """
- freq_percent = self.dut_config['cpu_freq_pct']
- list_all_avail_freq_cmd = ('ls /sys/devices/system/cpu/cpu{%s}/cpufreq/'
- 'scaling_available_frequencies')
- # Ignore error to support general usage of frequency setup.
- # Not all platforms support scaling_available_frequencies.
- ret, all_avail_freq_str, _ = run_on_dut(
- list_all_avail_freq_cmd % ','.join(str(core) for core in online_cores),
- ignore_status=True)
- if ret or not all_avail_freq_str:
- # No scalable frequencies available for the core.
- return ret
- for avail_freq_path in all_avail_freq_str.split():
- # Get available freq from every scaling_available_frequency path.
- # Error is considered fatal in run_on_dut().
- _, avail_freq_str, _ = run_on_dut('cat ' + avail_freq_path)
- assert avail_freq_str
-
- all_avail_freq = sorted(
- int(freq_str) for freq_str in avail_freq_str.split())
- min_freq = all_avail_freq[0]
- max_freq = all_avail_freq[-1]
- # Calculate the frequency we are targeting.
- target_freq = round(max_freq * freq_percent / 100)
- # More likely it's not in the list of supported frequencies
- # and our goal is to find the one which is less or equal.
- # Default is min and we will try to maximize it.
- avail_ngt_target = min_freq
- # Find the largest not greater than the target.
- for next_largest in reversed(all_avail_freq):
- if next_largest <= target_freq:
- avail_ngt_target = next_largest
- break
-
- max_freq_path = avail_freq_path.replace('scaling_available_frequencies',
- 'scaling_max_freq')
- min_freq_path = avail_freq_path.replace('scaling_available_frequencies',
- 'scaling_min_freq')
- # With default ignore_status=False we expect 0 status or Fatal error.
- run_on_dut('echo %s | tee %s %s' % (avail_ngt_target, max_freq_path,
- min_freq_path))
-
- def DecreaseWaitTime(self, run_on_dut):
+ self.logger.LogOutput(
+ 'Pinning governor execution frequencies for %s' % machine_name)
+ ret = self._ce.CrosRunCommand(
+ set_cpu_freq, machine=machine_name, chromeos_root=chromeos_root)
+ self.logger.LogFatalIf(
+ ret, 'Could not pin frequencies on machine: %s' % machine_name)
+
+ def DecreaseWaitTime(self, machine_name, chromeos_root):
"""Change the ten seconds wait time for pagecycler to two seconds."""
FILE = '/usr/local/telemetry/src/tools/perf/page_sets/page_cycler_story.py'
- ret = run_on_dut('ls ' + FILE)
+ ret = self._ce.CrosRunCommand(
+ 'ls ' + FILE, machine=machine_name, chromeos_root=chromeos_root)
+ self.logger.LogFatalIf(
+ ret, 'Could not find {} on machine: {}'.format(FILE, machine_name))
if not ret:
sed_command = 'sed -i "s/_TTI_WAIT_TIME = 10/_TTI_WAIT_TIME = 2/g" '
- run_on_dut(sed_command + FILE)
+ ret = self._ce.CrosRunCommand(
+ sed_command + FILE, machine=machine_name, chromeos_root=chromeos_root)
+ self.logger.LogFatalIf(
+ ret, 'Could not modify {} on machine: {}'.format(FILE, machine_name))
def RestartUI(self, machine_name, chromeos_root):
command = 'stop ui; sleep 5; start ui'
@@ -497,105 +186,6 @@ class SuiteRunner(object):
command_terminator=self._ct,
cros_sdk_options='--no-ns-pid')
- def DownloadResult(self, label, task_id):
- gsutil_cmd = os.path.join(label.chromeos_root, GS_UTIL)
- result_dir = 'gs://chromeos-autotest-results/swarming-%s' % task_id
- download_path = os.path.join(label.chromeos_root, 'chroot/tmp')
- ls_command = '%s ls %s' % (gsutil_cmd,
- os.path.join(result_dir, 'autoserv_test'))
- cp_command = '%s -mq cp -r %s %s' % (gsutil_cmd, result_dir, download_path)
-
- # Server sometimes will not be able to generate the result directory right
- # after the test. Will try to access this gs location every 60s for 5 mins.
- t = 0
- RETRY_LIMIT = 5
- while t < RETRY_LIMIT:
- t += 1
- status = self._ce.RunCommand(ls_command, print_to_console=False)
- if status == 0:
- break
- if t < RETRY_LIMIT:
- self.logger.LogOutput('Result directory not generated yet, '
- 'retry (%d) in 60s.' % t)
- time.sleep(60)
- else:
- self.logger.LogOutput('No result directory for task %s' % task_id)
- return status
-
- # Wait for 60s to make sure server finished writing to gs location.
- time.sleep(60)
-
- status = self._ce.RunCommand(cp_command)
- if status != 0:
- self.logger.LogOutput('Cannot download results from task %s' % task_id)
- return status
-
- def Skylab_Run(self, label, benchmark, test_args, profiler_args):
- """Run the test via skylab.."""
- # Skylab by default uses cros_test_platform to start test.
- # We don't use it for now since we want to directly interact with dut.
- options = '-bb=false'
-
- if benchmark.suite != 'telemetry_Crosperf':
- options += ' -client-test'
- if label.board:
- options += ' -board=%s' % label.board
- if label.build:
- options += ' -image=%s' % label.build
- # TODO: now only put quota pool here, user need to be able to specify which
- # pool to use. Need to request feature to not use this option at all.
- options += ' -pool=DUT_POOL_QUOTA'
- if benchmark.suite == 'telemetry_Crosperf':
- if test_args:
- # Strip double quotes off args (so we can wrap them in single
- # quotes, to pass through to Telemetry).
- if test_args[0] == '"' and test_args[-1] == '"':
- test_args = test_args[1:-1]
- if profiler_args:
- test_args += GetProfilerArgs(profiler_args)
- test_args += ' run_local={} test={}'.format(
- benchmark.run_local,
- benchmark.test_name,
- )
- else:
- if profiler_args:
- self.logger.LogFatal('Client tests do not support profiler.')
- if test_args:
- options += ' -test-args="%s"' % test_args
-
- dimensions = ''
- for dut in label.remote:
- dimensions += ' -dim dut_name:%s' % dut.rstrip('.cros')
-
- command = (('%s create-test %s %s %s') % \
- (SKYLAB_PATH, dimensions, options, benchmark.test_name))
-
- if self.log_level != 'verbose':
- self.logger.LogOutput('Starting skylab test.')
- self.logger.LogOutput('CMD: %s' % command)
- ret_tup = self._ce.RunCommandWOutput(command, command_terminator=self._ct)
-
- if ret_tup[0] != 0:
- self.logger.LogOutput('Skylab test not created successfully.')
- return ret_tup
-
- # Std output of the command will look like:
- # Created Swarming task https://chromeos-swarming.appspot.com/task?id=12345
- # We want to parse it and get the id number of the task.
- task_id = ret_tup[1].strip().split('id=')[1]
-
- command = ('skylab wait-task -bb=false %s' % (task_id))
- if self.log_level != 'verbose':
- self.logger.LogOutput('Waiting for skylab test to finish.')
- self.logger.LogOutput('CMD: %s' % command)
-
- ret_tup = self._ce.RunCommandWOutput(command, command_terminator=self._ct)
- if '"success":true' in ret_tup[1]:
- if self.DownloadResult(label, task_id) == 0:
- result_dir = '\nResults placed in tmp/swarming-%s\n' % task_id
- return (ret_tup[0], result_dir, ret_tup[2])
- return ret_tup
-
def RemoveTelemetryTempFile(self, machine, chromeos_root):
filename = 'telemetry@%s' % machine
fullname = os.path.join(chromeos_root, 'chroot', 'tmp', filename)
@@ -620,8 +210,11 @@ class SuiteRunner(object):
autotest_dir_arg = '--autotest_dir %s' % label.autotest_path
profiler_args = GetProfilerArgs(profiler_args)
- # --fast avoids unnecessary copies of syslogs.
- fast_arg = '--fast'
+ fast_arg = ''
+ if not profiler_args:
+ # --fast works unless we are doing profiling (autotest limitation).
+ # --fast avoids unnecessary copies of syslogs.
+ fast_arg = '--fast'
args_string = ''
if test_args:
# Strip double quotes off args (so we can wrap them in single
@@ -630,19 +223,18 @@ class SuiteRunner(object):
test_args = test_args[1:-1]
args_string = "test_args='%s'" % test_args
- top_interval = self.dut_config['top_interval']
cmd = ('{} {} {} --board={} --args="{} run_local={} test={} '
- 'turbostat={} top_interval={} {}" {} telemetry_Crosperf'.format(
+ '{}" {} telemetry_Crosperf'.format(
TEST_THAT_PATH, autotest_dir_arg, fast_arg, label.board,
args_string, benchmark.run_local, benchmark.test_name,
- benchmark.turbostat, top_interval, profiler_args, machine))
+ profiler_args, machine))
# Use --no-ns-pid so that cros_sdk does not create a different
# process namespace and we can kill process created easily by their
# process group.
chrome_root_options = ('--no-ns-pid '
'--chrome_root={} --chrome_root_mount={} '
- 'FEATURES="-usersandbox" '
+ "FEATURES=\"-usersandbox\" "
'CHROME_ROOT={}'.format(label.chrome_src,
CHROME_MOUNT_DIR,
CHROME_MOUNT_DIR))
@@ -655,6 +247,41 @@ class SuiteRunner(object):
command_terminator=self._ct,
cros_sdk_options=chrome_root_options)
+ def Telemetry_Run(self, machine, label, benchmark, profiler_args):
+ telemetry_run_path = ''
+ if not os.path.isdir(label.chrome_src):
+ self.logger.LogFatal('Cannot find chrome src dir to' ' run telemetry.')
+ else:
+ telemetry_run_path = os.path.join(label.chrome_src, 'src/tools/perf')
+ if not os.path.exists(telemetry_run_path):
+ self.logger.LogFatal('Cannot find %s directory.' % telemetry_run_path)
+
+ if profiler_args:
+ self.logger.LogFatal('Telemetry does not support the perf profiler.')
+
+ # Check for and remove temporary file that may have been left by
+ # previous telemetry runs (and which might prevent this run from
+ # working).
+ if not test_flag.GetTestMode():
+ self.RemoveTelemetryTempFile(machine, label.chromeos_root)
+
+ rsa_key = os.path.join(
+ label.chromeos_root,
+ 'src/scripts/mod_for_test_scripts/ssh_keys/testing_rsa')
+
+ cmd = ('cd {0} && '
+ './run_measurement '
+ '--browser=cros-chrome '
+ '--output-format=csv '
+ '--remote={1} '
+ '--identity {2} '
+ '{3} {4}'.format(telemetry_run_path, machine, rsa_key,
+ benchmark.test_name, benchmark.test_args))
+ if self.log_level != 'verbose':
+ self.logger.LogOutput('Running test.')
+ self.logger.LogOutput('CMD: %s' % cmd)
+ return self._ce.RunCommandWOutput(cmd, print_to_console=False)
+
def CommandTerminator(self):
return self._ct
diff --git a/crosperf/suite_runner_unittest.py b/crosperf/suite_runner_unittest.py
index 8b336eda..d7b9e770 100755
--- a/crosperf/suite_runner_unittest.py
+++ b/crosperf/suite_runner_unittest.py
@@ -1,10 +1,6 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+# Copyright 2014 Google Inc. All Rights Reserved.
"""Unittest for suite_runner."""
from __future__ import print_function
@@ -12,91 +8,18 @@ from __future__ import print_function
import os.path
import time
-import unittest
import mock
+import unittest
import suite_runner
import label
+import test_flag
from benchmark import Benchmark
from cros_utils import command_executer
from cros_utils import logger
-BIG_LITTLE_CPUINFO = """processor : 0
-model name : ARMv8 Processor rev 4 (v8l)
-BogoMIPS : 48.00
-Features : half thumb fastmult vfp edsp neon vfpv3 tls vfpv4
-CPU implementer : 0x41
-CPU architecture: 8
-CPU variant : 0x0
-CPU part : 0xd03
-CPU revision : 4
-
-processor : 1
-model name : ARMv8 Processor rev 4 (v8l)
-BogoMIPS : 48.00
-Features : half thumb fastmult vfp edsp neon vfpv3 tls vfpv4
-CPU implementer : 0x41
-CPU architecture: 8
-CPU variant : 0x0
-CPU part : 0xd03
-CPU revision : 4
-
-processor : 2
-model name : ARMv8 Processor rev 2 (v8l)
-BogoMIPS : 48.00
-Features : half thumb fastmult vfp edsp neon vfpv3 tls vfpv4
-CPU implementer : 0x41
-CPU architecture: 8
-CPU variant : 0x0
-CPU part : 0xd08
-CPU revision : 2
-"""
-LITTLE_ONLY_CPUINFO = """processor : 0
-model name : ARMv8 Processor rev 4 (v8l)
-BogoMIPS : 48.00
-Features : half thumb fastmult vfp edsp neon vfpv3 tls vfpv4
-CPU implementer : 0x41
-CPU architecture: 8
-CPU variant : 0x0
-CPU part : 0xd03
-CPU revision : 4
-
-processor : 1
-model name : ARMv8 Processor rev 4 (v8l)
-BogoMIPS : 48.00
-Features : half thumb fastmult vfp edsp neon vfpv3 tls vfpv4
-CPU implementer : 0x41
-CPU architecture: 8
-CPU variant : 0x0
-CPU part : 0xd03
-CPU revision : 4
-"""
-
-NOT_BIG_LITTLE_CPUINFO = """processor : 0
-model name : ARMv7 Processor rev 1 (v7l)
-Features : swp half thumb fastmult vfp edsp thumbee neon vfpv3 tls vfpv4
-CPU implementer : 0x41
-CPU architecture: 7
-CPU variant : 0x0
-CPU part : 0xc0d
-CPU revision : 1
-
-processor : 1
-model name : ARMv7 Processor rev 1 (v7l)
-Features : swp half thumb fastmult vfp edsp thumbee neon vfpv3 tls vfpv4
-CPU implementer : 0x41
-CPU architecture: 7
-CPU variant : 0x0
-CPU part : 0xc0d
-CPU revision : 1
-
-Hardware : Rockchip (Device Tree)
-Revision : 0000
-Serial : 0000000000000000
-"""
-
class SuiteRunnerTest(unittest.TestCase):
"""Class of SuiteRunner test."""
@@ -105,10 +28,9 @@ class SuiteRunnerTest(unittest.TestCase):
mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter)
mock_cmd_term = mock.Mock(spec=command_executer.CommandTerminator)
mock_logger = mock.Mock(spec=logger.Logger)
- mock_label = label.MockLabel('lumpy', 'build', 'lumpy_chromeos_image', '', '',
- '/tmp/chromeos', 'lumpy',
- ['lumpy1.cros', 'lumpy.cros2'], '', '', False,
- 'average', 'gcc', False, '')
+ mock_label = label.MockLabel(
+ 'lumpy', 'lumpy_chromeos_image', '', '/tmp/chromeos', 'lumpy',
+ ['lumpy1.cros', 'lumpy.cros2'], '', '', False, 'average', 'gcc', '')
telemetry_crosperf_bench = Benchmark(
'b1_test', # name
'octane', # test_name
@@ -140,20 +62,20 @@ class SuiteRunnerTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(SuiteRunnerTest, self).__init__(*args, **kwargs)
self.call_test_that_run = False
- self.skylab_run_args = []
+ self.pin_governor_args = []
self.test_that_args = []
self.telemetry_run_args = []
self.telemetry_crosperf_args = []
- self.call_skylab_run = False
self.call_telemetry_crosperf_run = False
- self.call_disable_aslr = False
+ self.call_pin_governor = False
+ self.call_telemetry_run = False
def setUp(self):
self.runner = suite_runner.SuiteRunner(
- {}, self.mock_logger, 'verbose', self.mock_cmd_exec, self.mock_cmd_term)
+ self.mock_logger, 'verbose', self.mock_cmd_exec, self.mock_cmd_term)
def test_get_profiler_args(self):
- input_str = ("--profiler=custom_perf --profiler_args='perf_options"
+ input_str = ('--profiler=custom_perf --profiler_args=\'perf_options'
'="record -a -e cycles,instructions"\'')
output_str = ("profiler=custom_perf profiler_args='record -a -e "
"cycles,instructions'")
@@ -163,24 +85,23 @@ class SuiteRunnerTest(unittest.TestCase):
def test_run(self):
def reset():
+ self.call_pin_governor = False
self.call_test_that_run = False
- self.call_skylab_run = False
+ self.call_telemetry_run = False
self.call_telemetry_crosperf_run = False
- self.call_disable_aslr = False
- self.skylab_run_args = []
+ self.pin_governor_args = []
self.test_that_args = []
self.telemetry_run_args = []
self.telemetry_crosperf_args = []
- def FakeDisableASLR(runner):
- # pylint fix for unused variable.
- del runner
- self.call_disable_aslr = True
+ def FakePinGovernor(machine, chroot):
+ self.call_pin_governor = True
+ self.pin_governor_args = [machine, chroot]
- def FakeSkylabRun(test_label, benchmark, test_args, profiler_args):
- self.skylab_run_args = [test_label, benchmark, test_args, profiler_args]
- self.call_skylab_run = True
- return 'Ran FakeSkylabRun'
+ def FakeTelemetryRun(machine, test_label, benchmark, profiler_args):
+ self.telemetry_run_args = [machine, test_label, benchmark, profiler_args]
+ self.call_telemetry_run = True
+ return 'Ran FakeTelemetryRun'
def FakeTelemetryCrosperfRun(machine, test_label, benchmark, test_args,
profiler_args):
@@ -198,621 +119,95 @@ class SuiteRunnerTest(unittest.TestCase):
self.call_test_that_run = True
return 'Ran FakeTestThatRun'
- def FakeRunner(command, ignore_status=False):
- # pylint fix for unused variable.
- del command, ignore_status
- return 0, '', ''
-
- self.runner.DisableASLR = FakeDisableASLR
- self.runner.Skylab_Run = FakeSkylabRun
+ self.runner.PinGovernorExecutionFrequencies = FakePinGovernor
+ self.runner.Telemetry_Run = FakeTelemetryRun
self.runner.Telemetry_Crosperf_Run = FakeTelemetryCrosperfRun
self.runner.Test_That_Run = FakeTestThatRun
- self.runner.SetupCpuUsage = mock.Mock()
- self.runner.SetupCpuFreq = mock.Mock()
- self.runner.DutWrapper = mock.Mock(return_value=FakeRunner)
- self.runner.DisableTurbo = mock.Mock()
- self.runner.SetCpuGovernor = mock.Mock()
- self.runner.WaitCooldown = mock.Mock(return_value=0)
- self.runner.GetCpuOnline = mock.Mock(return_value={0: 1, 1: 1, 2: 0})
- self.runner.dut_config['cooldown_time'] = 0
- self.runner.dut_config['governor'] = 'fake_governor'
- self.runner.dut_config['cpu_freq_pct'] = 65
+
machine = 'fake_machine'
test_args = ''
profiler_args = ''
-
reset()
- self.mock_label.skylab = True
self.runner.Run(machine, self.mock_label, self.telemetry_bench, test_args,
profiler_args)
- self.assertFalse(self.call_disable_aslr)
- self.assertTrue(self.call_skylab_run)
+ self.assertTrue(self.call_pin_governor)
+ self.assertTrue(self.call_telemetry_run)
self.assertFalse(self.call_test_that_run)
self.assertFalse(self.call_telemetry_crosperf_run)
- self.assertEqual(self.skylab_run_args,
- [self.mock_label, self.telemetry_bench, '', ''])
- self.runner.SetupCpuUsage.assert_not_called()
- self.runner.SetupCpuFreq.assert_not_called()
- self.runner.GetCpuOnline.assert_not_called()
- self.runner.DutWrapper.assert_not_called()
- self.runner.SetCpuGovernor.assert_not_called()
- self.runner.DisableTurbo.assert_not_called()
- self.runner.WaitCooldown.assert_not_called()
- self.mock_label.skylab = False
+ self.assertEqual(
+ self.telemetry_run_args,
+ ['fake_machine', self.mock_label, self.telemetry_bench, ''])
reset()
self.runner.Run(machine, self.mock_label, self.test_that_bench, test_args,
profiler_args)
- self.assertTrue(self.call_disable_aslr)
+ self.assertTrue(self.call_pin_governor)
+ self.assertFalse(self.call_telemetry_run)
self.assertTrue(self.call_test_that_run)
self.assertFalse(self.call_telemetry_crosperf_run)
self.assertEqual(
self.test_that_args,
['fake_machine', self.mock_label, self.test_that_bench, '', ''])
- self.runner.SetupCpuUsage.assert_called_once_with(FakeRunner)
- self.runner.SetupCpuFreq.assert_called_once_with(FakeRunner, [0, 1])
- self.runner.GetCpuOnline.assert_called_once_with(FakeRunner)
- self.runner.DutWrapper.assert_called_once_with(
- machine, self.mock_label.chromeos_root)
- self.runner.SetCpuGovernor.assert_called_once_with(
- 'fake_governor', FakeRunner, ignore_status=False)
- self.runner.DisableTurbo.assert_called_once_with(FakeRunner)
- self.runner.WaitCooldown.assert_not_called()
reset()
self.runner.Run(machine, self.mock_label, self.telemetry_crosperf_bench,
test_args, profiler_args)
- self.assertTrue(self.call_disable_aslr)
+ self.assertTrue(self.call_pin_governor)
+ self.assertFalse(self.call_telemetry_run)
self.assertFalse(self.call_test_that_run)
self.assertTrue(self.call_telemetry_crosperf_run)
self.assertEqual(self.telemetry_crosperf_args, [
'fake_machine', self.mock_label, self.telemetry_crosperf_bench, '', ''
])
- self.runner.DutWrapper.assert_called_with(machine,
- self.mock_label.chromeos_root)
-
- def test_run_with_cooldown(self):
-
- def FakeRunner(command, ignore_status=False):
- # pylint fix for unused variable.
- del command, ignore_status
- return 0, '', ''
-
- self.runner.DisableASLR = mock.Mock()
- self.runner.DutWrapper = mock.Mock(return_value=FakeRunner)
- self.runner.DisableTurbo = mock.Mock()
- self.runner.SetCpuGovernor = mock.Mock()
- self.runner.SetupCpuUsage = mock.Mock()
- self.runner.SetupCpuFreq = mock.Mock()
- self.runner.WaitCooldown = mock.Mock(return_value=0)
- self.runner.GetCpuOnline = mock.Mock(return_value={0: 0, 1: 1})
- self.runner.Telemetry_Crosperf_Run = mock.Mock(return_value=(0, '', ''))
- self.runner.dut_config['cooldown_time'] = 10
- self.runner.dut_config['governor'] = 'fake_governor'
- self.runner.dut_config['cpu_freq_pct'] = 75
-
- self.runner.Run('fake_machine', self.mock_label,
- self.telemetry_crosperf_bench, '', '')
-
- self.runner.WaitCooldown.assert_called_once_with(FakeRunner)
- self.runner.DisableASLR.assert_called_once()
- self.runner.Telemetry_Crosperf_Run.assert_called_once()
- self.runner.DisableTurbo.assert_called_once_with(FakeRunner)
- self.runner.SetupCpuUsage.assert_called_once_with(FakeRunner)
- self.runner.SetupCpuFreq.assert_called_once_with(FakeRunner, [1])
- self.runner.SetCpuGovernor.assert_called()
- self.runner.GetCpuOnline.assert_called_once_with(FakeRunner)
- self.assertGreater(self.runner.SetCpuGovernor.call_count, 1)
- self.assertEqual(
- self.runner.SetCpuGovernor.call_args,
- mock.call('fake_governor', FakeRunner, ignore_status=False))
-
- @mock.patch.object(command_executer.CommandExecuter, 'CrosRunCommandWOutput')
- def test_dut_wrapper(self, mock_cros_runcmd):
- self.mock_cmd_exec.CrosRunCommandWOutput = mock_cros_runcmd
- mock_cros_runcmd.return_value = (0, '', '')
- run_on_dut = self.runner.DutWrapper('lumpy.cros2', '/tmp/chromeos')
- mock_cros_runcmd.assert_not_called()
- run_on_dut('run command;')
- mock_cros_runcmd.assert_called_once_with(
- 'run command;', chromeos_root='/tmp/chromeos', machine='lumpy.cros2')
-
- @mock.patch.object(command_executer.CommandExecuter, 'CrosRunCommandWOutput')
- def test_dut_wrapper_fatal_error(self, mock_cros_runcmd):
- self.mock_cmd_exec.CrosRunCommandWOutput = mock_cros_runcmd
- # Command returns error 1.
- mock_cros_runcmd.return_value = (1, '', 'Error!')
- run_on_dut = self.runner.DutWrapper('lumpy.cros2', '/tmp/chromeos')
- mock_cros_runcmd.assert_not_called()
- run_on_dut('run command;')
- mock_cros_runcmd.assert_called_once_with(
- 'run command;', chromeos_root='/tmp/chromeos', machine='lumpy.cros2')
- # Error status causes log fatal.
- self.assertEqual(
- self.mock_logger.method_calls[-1],
- mock.call.LogFatal('Command execution on DUT lumpy.cros2 failed.\n'
- 'Failing command: run command;\nreturned 1\n'
- 'Error message: Error!'))
-
- @mock.patch.object(command_executer.CommandExecuter, 'CrosRunCommandWOutput')
- def test_dut_wrapper_ignore_error(self, mock_cros_runcmd):
- self.mock_cmd_exec.CrosRunCommandWOutput = mock_cros_runcmd
- # Command returns error 1.
- mock_cros_runcmd.return_value = (1, '', 'Error!')
- run_on_dut = self.runner.DutWrapper('lumpy.cros2', '/tmp/chromeos')
- run_on_dut('run command;', ignore_status=True)
- mock_cros_runcmd.assert_called_once_with(
- 'run command;', chromeos_root='/tmp/chromeos', machine='lumpy.cros2')
- # Error status is not fatal. LogError records the error message.
- self.assertEqual(
- self.mock_logger.method_calls[-1],
- mock.call.LogError('Command execution on DUT lumpy.cros2 failed.\n'
- 'Failing command: run command;\nreturned 1\n'
- 'Error message: Error!\n'
- '(Failure is considered non-fatal. Continue.)'))
-
- def test_disable_aslr(self):
- run_on_dut = mock.Mock()
- self.runner.DisableASLR(run_on_dut)
+
+ @mock.patch.object(command_executer.CommandExecuter, 'CrosRunCommand')
+ def test_pin_governor_execution_frequencies(self, mock_cros_runcmd):
+ self.mock_cmd_exec.CrosRunCommand = mock_cros_runcmd
+ self.runner.PinGovernorExecutionFrequencies('lumpy1.cros', '/tmp/chromeos')
+ self.assertEqual(mock_cros_runcmd.call_count, 1)
+ cmd = mock_cros_runcmd.call_args_list[0][0]
# pyformat: disable
- set_cpu_cmd = ('set -e && '
- 'stop ui; '
- 'if [[ -e /proc/sys/kernel/randomize_va_space ]]; then '
- ' echo 0 > /proc/sys/kernel/randomize_va_space; '
- 'fi; '
- 'start ui ')
- run_on_dut.assert_called_once_with(set_cpu_cmd)
-
- def test_set_cpu_governor(self):
- dut_runner = mock.Mock(return_value=(0, '', ''))
- self.runner.SetCpuGovernor('new_governor', dut_runner, ignore_status=False)
- set_cpu_cmd = (
- 'for f in `ls -d /sys/devices/system/cpu/cpu*/cpufreq 2>/dev/null`; do '
- # Skip writing scaling_governor if cpu is not online.
- ' [[ -e ${f/cpufreq/online} ]] && grep -q 0 ${f/cpufreq/online} '
- ' && continue; '
- ' cd $f; '
- ' if [[ -e scaling_governor ]]; then '
- ' echo %s > scaling_governor; fi; '
- 'done; ')
- dut_runner.assert_called_once_with(
- set_cpu_cmd % 'new_governor', ignore_status=False)
-
- def test_set_cpu_governor_propagate_error(self):
- dut_runner = mock.Mock(return_value=(1, '', 'Error.'))
- self.runner.SetCpuGovernor('non-exist_governor', dut_runner)
- set_cpu_cmd = (
- 'for f in `ls -d /sys/devices/system/cpu/cpu*/cpufreq 2>/dev/null`; do '
- # Skip writing scaling_governor if cpu is not online.
- ' [[ -e ${f/cpufreq/online} ]] && grep -q 0 ${f/cpufreq/online} '
- ' && continue; '
- ' cd $f; '
- ' if [[ -e scaling_governor ]]; then '
- ' echo %s > scaling_governor; fi; '
- 'done; ')
- # By default error status is fatal.
- dut_runner.assert_called_once_with(
- set_cpu_cmd % 'non-exist_governor', ignore_status=False)
-
- def test_set_cpu_governor_ignore_status(self):
- dut_runner = mock.Mock(return_value=(1, '', 'Error.'))
- ret_code = self.runner.SetCpuGovernor(
- 'non-exist_governor', dut_runner, ignore_status=True)
- set_cpu_cmd = (
- 'for f in `ls -d /sys/devices/system/cpu/cpu*/cpufreq 2>/dev/null`; do '
- # Skip writing scaling_governor if cpu is not online.
- ' [[ -e ${f/cpufreq/online} ]] && grep -q 0 ${f/cpufreq/online} '
- ' && continue; '
- ' cd $f; '
- ' if [[ -e scaling_governor ]]; then '
- ' echo %s > scaling_governor; fi; '
- 'done; ')
- dut_runner.assert_called_once_with(
- set_cpu_cmd % 'non-exist_governor', ignore_status=True)
- self.assertEqual(ret_code, 1)
-
- def test_disable_turbo(self):
- dut_runner = mock.Mock(return_value=(0, '', ''))
- self.runner.DisableTurbo(dut_runner)
set_cpu_cmd = (
+ 'set -e && '
# Disable Turbo in Intel pstate driver
'if [[ -e /sys/devices/system/cpu/intel_pstate/no_turbo ]]; then '
' if grep -q 0 /sys/devices/system/cpu/intel_pstate/no_turbo; then '
' echo -n 1 > /sys/devices/system/cpu/intel_pstate/no_turbo; '
' fi; '
- 'fi; ')
- dut_runner.assert_called_once_with(set_cpu_cmd)
-
- def test_get_cpu_online_two(self):
- """Test one digit CPU #."""
- dut_runner = mock.Mock(
- return_value=(0, '/sys/devices/system/cpu/cpu0/online 0\n'
- '/sys/devices/system/cpu/cpu1/online 1\n', ''))
- cpu_online = self.runner.GetCpuOnline(dut_runner)
- self.assertEqual(cpu_online, {0: 0, 1: 1})
-
- def test_get_cpu_online_twelve(self):
- """Test two digit CPU #."""
- dut_runner = mock.Mock(
- return_value=(0, '/sys/devices/system/cpu/cpu0/online 1\n'
- '/sys/devices/system/cpu/cpu1/online 0\n'
- '/sys/devices/system/cpu/cpu10/online 1\n'
- '/sys/devices/system/cpu/cpu11/online 1\n'
- '/sys/devices/system/cpu/cpu2/online 1\n'
- '/sys/devices/system/cpu/cpu3/online 0\n'
- '/sys/devices/system/cpu/cpu4/online 1\n'
- '/sys/devices/system/cpu/cpu5/online 0\n'
- '/sys/devices/system/cpu/cpu6/online 1\n'
- '/sys/devices/system/cpu/cpu7/online 0\n'
- '/sys/devices/system/cpu/cpu8/online 1\n'
- '/sys/devices/system/cpu/cpu9/online 0\n', ''))
- cpu_online = self.runner.GetCpuOnline(dut_runner)
- self.assertEqual(cpu_online, {
- 0: 1,
- 1: 0,
- 2: 1,
- 3: 0,
- 4: 1,
- 5: 0,
- 6: 1,
- 7: 0,
- 8: 1,
- 9: 0,
- 10: 1,
- 11: 1
- })
-
- def test_get_cpu_online_no_output(self):
- """Test error case, no output."""
- dut_runner = mock.Mock(return_value=(0, '', ''))
- with self.assertRaises(AssertionError):
- self.runner.GetCpuOnline(dut_runner)
-
- def test_get_cpu_online_command_error(self):
- """Test error case, command error."""
- dut_runner = mock.Mock(side_effect=AssertionError)
- with self.assertRaises(AssertionError):
- self.runner.GetCpuOnline(dut_runner)
-
- @mock.patch.object(suite_runner.SuiteRunner, 'SetupArmCores')
- def test_setup_cpu_usage_little_on_arm(self, mock_setup_arm):
- self.runner.SetupArmCores = mock_setup_arm
- dut_runner = mock.Mock(return_value=(0, 'armv7l', ''))
- self.runner.dut_config['cpu_usage'] = 'little_only'
- self.runner.SetupCpuUsage(dut_runner)
- self.runner.SetupArmCores.assert_called_once_with(dut_runner)
-
- @mock.patch.object(suite_runner.SuiteRunner, 'SetupArmCores')
- def test_setup_cpu_usage_big_on_aarch64(self, mock_setup_arm):
- self.runner.SetupArmCores = mock_setup_arm
- dut_runner = mock.Mock(return_value=(0, 'aarch64', ''))
- self.runner.dut_config['cpu_usage'] = 'big_only'
- self.runner.SetupCpuUsage(dut_runner)
- self.runner.SetupArmCores.assert_called_once_with(dut_runner)
-
- @mock.patch.object(suite_runner.SuiteRunner, 'SetupArmCores')
- def test_setup_cpu_usage_big_on_intel(self, mock_setup_arm):
- self.runner.SetupArmCores = mock_setup_arm
- dut_runner = mock.Mock(return_value=(0, 'x86_64', ''))
- self.runner.dut_config['cpu_usage'] = 'big_only'
- self.runner.SetupCpuUsage(dut_runner)
- # Check that SetupArmCores not called with invalid setup.
- self.runner.SetupArmCores.assert_not_called()
-
- @mock.patch.object(suite_runner.SuiteRunner, 'SetupArmCores')
- def test_setup_cpu_usage_all_on_intel(self, mock_setup_arm):
- self.runner.SetupArmCores = mock_setup_arm
- dut_runner = mock.Mock(return_value=(0, 'x86_64', ''))
- self.runner.dut_config['cpu_usage'] = 'all'
- self.runner.SetupCpuUsage(dut_runner)
- # Check that SetupArmCores not called in general case.
- self.runner.SetupArmCores.assert_not_called()
-
- def test_setup_arm_cores_big_on_big_little(self):
- dut_runner = mock.Mock(side_effect=[
- (0, BIG_LITTLE_CPUINFO, ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_usage'] = 'big_only'
- self.runner.SetupArmCores(dut_runner)
- dut_runner.assert_called_with(
- 'echo 1 | tee /sys/devices/system/cpu/cpu{2}/online; '
- 'echo 0 | tee /sys/devices/system/cpu/cpu{0,1}/online')
-
- def test_setup_arm_cores_little_on_big_little(self):
- dut_runner = mock.Mock(side_effect=[
- (0, BIG_LITTLE_CPUINFO, ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_usage'] = 'little_only'
- self.runner.SetupArmCores(dut_runner)
- dut_runner.assert_called_with(
- 'echo 1 | tee /sys/devices/system/cpu/cpu{0,1}/online; '
- 'echo 0 | tee /sys/devices/system/cpu/cpu{2}/online')
-
- def test_setup_arm_cores_invalid_config(self):
- dut_runner = mock.Mock(side_effect=[
- (0, LITTLE_ONLY_CPUINFO, ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_usage'] = 'big_only'
- self.runner.SetupArmCores(dut_runner)
- # Check that setup command is not sent when trying
- # to use 'big_only' on a platform with all little cores.
- dut_runner.assert_called_once_with('cat /proc/cpuinfo')
-
- def test_setup_arm_cores_not_big_little(self):
- dut_runner = mock.Mock(side_effect=[
- (0, NOT_BIG_LITTLE_CPUINFO, ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_usage'] = 'big_only'
- self.runner.SetupArmCores(dut_runner)
- # Check that setup command is not sent when trying
- # to use 'big_only' on a platform w/o support of big/little.
- dut_runner.assert_called_once_with('cat /proc/cpuinfo')
-
- def test_setup_arm_cores_unsupported_cpu_usage(self):
- dut_runner = mock.Mock(side_effect=[
- (0, BIG_LITTLE_CPUINFO, ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_usage'] = 'exclusive_cores'
- self.runner.SetupArmCores(dut_runner)
- # Check that setup command is not sent when trying to use
- # 'exclusive_cores' on ARM CPU setup.
- dut_runner.assert_called_once_with('cat /proc/cpuinfo')
-
- def test_setup_cpu_freq_single_full(self):
- online = [0]
- dut_runner = mock.Mock(side_effect=[
- (0,
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_available_frequencies\n',
- ''),
- (0, '1 2 3 4 5 6 7 8 9 10', ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_freq_pct'] = 100
- self.runner.SetupCpuFreq(dut_runner, online)
- self.assertGreaterEqual(dut_runner.call_count, 3)
- self.assertEqual(
- dut_runner.call_args,
- mock.call('echo 10 | tee '
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq '
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq'))
-
- def test_setup_cpu_freq_middle(self):
- online = [0]
- dut_runner = mock.Mock(side_effect=[
- (0,
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_available_frequencies\n',
- ''),
- (0, '1 2 3 4 5 6 7 8 9 10', ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_freq_pct'] = 60
- self.runner.SetupCpuFreq(dut_runner, online)
- self.assertGreaterEqual(dut_runner.call_count, 2)
- self.assertEqual(
- dut_runner.call_args,
- mock.call('echo 6 | tee '
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq '
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq'))
-
- def test_setup_cpu_freq_lowest(self):
- online = [0]
- dut_runner = mock.Mock(side_effect=[
- (0,
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_available_frequencies\n',
- ''),
- (0, '1 2 3 4 5 6 7 8 9 10', ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_freq_pct'] = 0
- self.runner.SetupCpuFreq(dut_runner, online)
- self.assertGreaterEqual(dut_runner.call_count, 2)
- self.assertEqual(
- dut_runner.call_args,
- mock.call('echo 1 | tee '
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq '
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq'))
-
- def test_setup_cpu_freq_multiple_middle(self):
- online = [0, 1]
- dut_runner = mock.Mock(side_effect=[
- (0,
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_available_frequencies\n'
- '/sys/devices/system/cpu/cpu1/cpufreq/scaling_available_frequencies\n',
- ''),
- (0, '1 2 3 4 5 6 7 8 9 10', ''),
- (0, '', ''),
- (0, '1 4 6 8 10 12 14 16 18 20', ''),
- (0, '', ''),
- ])
- self.runner.dut_config['cpu_freq_pct'] = 70
- self.runner.SetupCpuFreq(dut_runner, online)
- self.assertEqual(dut_runner.call_count, 5)
- self.assertEqual(
- dut_runner.call_args_list[2],
- mock.call('echo 7 | tee '
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq '
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq'))
- self.assertEqual(
- dut_runner.call_args_list[4],
- mock.call('echo 14 | tee '
- '/sys/devices/system/cpu/cpu1/cpufreq/scaling_max_freq '
- '/sys/devices/system/cpu/cpu1/cpufreq/scaling_min_freq'))
-
- def test_setup_cpu_freq_no_scaling_available(self):
- online = [0, 1]
- dut_runner = mock.Mock(return_value=(2, '', 'No such file or directory'))
- self.runner.dut_config['cpu_freq_pct'] = 50
- self.runner.SetupCpuFreq(dut_runner, online)
- dut_runner.assert_called_once()
- self.assertNotRegexpMatches(dut_runner.call_args_list[0][0][0],
- '^echo.*scaling_max_freq$')
-
- def test_setup_cpu_freq_multiple_no_access(self):
- online = [0, 1]
- dut_runner = mock.Mock(side_effect=[
- (0,
- '/sys/devices/system/cpu/cpu0/cpufreq/scaling_available_frequencies\n'
- '/sys/devices/system/cpu/cpu1/cpufreq/scaling_available_frequencies\n',
- ''),
- (0, '1 4 6 8 10 12 14 16 18 20', ''),
- AssertionError(),
- ])
- self.runner.dut_config['cpu_freq_pct'] = 30
- # Error status causes log fatal.
- with self.assertRaises(AssertionError):
- self.runner.SetupCpuFreq(dut_runner, online)
-
- @mock.patch.object(time, 'sleep')
- def test_wait_cooldown_nowait(self, mock_sleep):
- mock_sleep.return_value = 0
- dut_runner = mock.Mock(return_value=(0, '39000', ''))
- self.runner.dut_config['cooldown_time'] = 10
- self.runner.dut_config['cooldown_temp'] = 40
- wait_time = self.runner.WaitCooldown(dut_runner)
- # Send command to DUT only once to check temperature
- # and make sure it does not exceed the threshold.
- dut_runner.assert_called_once()
- mock_sleep.assert_not_called()
- self.assertEqual(wait_time, 0)
-
- @mock.patch.object(time, 'sleep')
- def test_wait_cooldown_needwait_once(self, mock_sleep):
- """Wait one iteration for cooldown.
-
- Set large enough timeout and changing temperature
- output. Make sure it exits when expected value
- received.
- Expect that WaitCooldown check temp twice.
- """
- mock_sleep.return_value = 0
- dut_runner = mock.Mock(side_effect=[(0, '41000', ''), (0, '39999', '')])
- self.runner.dut_config['cooldown_time'] = 100
- self.runner.dut_config['cooldown_temp'] = 40
- wait_time = self.runner.WaitCooldown(dut_runner)
- dut_runner.assert_called()
- self.assertEqual(dut_runner.call_count, 2)
- mock_sleep.assert_called()
- self.assertGreater(wait_time, 0)
-
- @mock.patch.object(time, 'sleep')
- def test_wait_cooldown_needwait(self, mock_sleep):
- """Test exit by timeout.
-
- Send command to DUT checking the temperature and
- check repeatedly until timeout goes off.
- Output from temperature sensor never changes.
- """
- mock_sleep.return_value = 0
- dut_runner = mock.Mock(return_value=(0, '41000', ''))
- self.runner.dut_config['cooldown_time'] = 60
- self.runner.dut_config['cooldown_temp'] = 40
- wait_time = self.runner.WaitCooldown(dut_runner)
- dut_runner.assert_called()
- self.assertGreater(dut_runner.call_count, 2)
- mock_sleep.assert_called()
- self.assertGreater(wait_time, 0)
-
- @mock.patch.object(time, 'sleep')
- def test_wait_cooldown_needwait_multtemp(self, mock_sleep):
- """Wait until all temps go down.
-
- Set large enough timeout and changing temperature
- output. Make sure it exits when expected value
- for all temperatures received.
- Expect 3 checks.
- """
- mock_sleep.return_value = 0
- dut_runner = mock.Mock(side_effect=[
- (0, '41000\n20000\n30000\n45000', ''),
- (0, '39000\n20000\n30000\n41000', ''),
- (0, '39000\n20000\n30000\n31000', ''),
- ])
- self.runner.dut_config['cooldown_time'] = 100
- self.runner.dut_config['cooldown_temp'] = 40
- wait_time = self.runner.WaitCooldown(dut_runner)
- dut_runner.assert_called()
- self.assertEqual(dut_runner.call_count, 3)
- mock_sleep.assert_called()
- self.assertGreater(wait_time, 0)
+ 'fi; '
+ # Set governor to performance for each cpu
+ 'for f in /sys/devices/system/cpu/cpu*/cpufreq; do '
+ 'cd $f; '
+ 'echo performance > scaling_governor; '
+ 'done'
+ )
+ # pyformat: enable
+ self.assertEqual(cmd, (set_cpu_cmd,))
@mock.patch.object(time, 'sleep')
- def test_wait_cooldown_thermal_error(self, mock_sleep):
- """Handle error status.
-
- Any error should be considered non-fatal.
- """
- mock_sleep.return_value = 0
- dut_runner = mock.Mock(side_effect=[
- (1, '39000\n20000\n30000\n41000', 'Thermal error'),
- (1, '39000\n20000\n30000\n31000', 'Thermal error'),
- ])
- self.runner.dut_config['cooldown_time'] = 10
- self.runner.dut_config['cooldown_temp'] = 40
- wait_time = self.runner.WaitCooldown(dut_runner)
- # Check that errors are ignored.
- dut_runner.assert_called_with(
- 'cat /sys/class/thermal/thermal_zone*/temp', ignore_status=True)
- self.assertEqual(dut_runner.call_count, 2)
- # Check that we are waiting even when an error is returned
- # as soon as data is coming.
- mock_sleep.assert_called()
- self.assertGreater(wait_time, 0)
-
- @mock.patch.object(time, 'sleep')
- def test_wait_cooldown_thermal_no_output(self, mock_sleep):
- """Handle no output.
-
- Check handling of empty stdout.
- """
- mock_sleep.return_value = 0
- dut_runner = mock.Mock(side_effect=[(1, '', 'Thermal error')])
- self.runner.dut_config['cooldown_time'] = 10
- self.runner.dut_config['cooldown_temp'] = 40
- wait_time = self.runner.WaitCooldown(dut_runner)
- # Check that errors are ignored.
- dut_runner.assert_called_once_with(
- 'cat /sys/class/thermal/thermal_zone*/temp', ignore_status=True)
- # No wait.
- mock_sleep.assert_not_called()
- self.assertEqual(wait_time, 0)
+ @mock.patch.object(command_executer.CommandExecuter, 'CrosRunCommand')
+ def test_reboot_machine(self, mock_cros_runcmd, mock_sleep):
- @mock.patch.object(time, 'sleep')
- def test_wait_cooldown_thermal_ws_output(self, mock_sleep):
- """Handle whitespace output.
-
- Check handling of whitespace only.
- """
- mock_sleep.return_value = 0
- dut_runner = mock.Mock(side_effect=[(1, '\n', 'Thermal error')])
- self.runner.dut_config['cooldown_time'] = 10
- self.runner.dut_config['cooldown_temp'] = 40
- wait_time = self.runner.WaitCooldown(dut_runner)
- # Check that errors are ignored.
- dut_runner.assert_called_once_with(
- 'cat /sys/class/thermal/thermal_zone*/temp', ignore_status=True)
- # No wait.
- mock_sleep.assert_not_called()
- self.assertEqual(wait_time, 0)
+ def FakePinGovernor(machine_name, chromeos_root):
+ if machine_name or chromeos_root:
+ pass
- @mock.patch.object(command_executer.CommandExecuter, 'CrosRunCommand')
- def test_restart_ui(self, mock_cros_runcmd):
self.mock_cmd_exec.CrosRunCommand = mock_cros_runcmd
+ self.runner.PinGovernorExecutionFrequencies = FakePinGovernor
self.runner.RestartUI('lumpy1.cros', '/tmp/chromeos')
- mock_cros_runcmd.assert_called_once_with(
- 'stop ui; sleep 5; start ui',
- chromeos_root='/tmp/chromeos',
- machine='lumpy1.cros')
+ self.assertEqual(mock_cros_runcmd.call_count, 1)
+ self.assertEqual(mock_cros_runcmd.call_args_list[0][0],
+ ('stop ui; sleep 5; start ui',))
@mock.patch.object(command_executer.CommandExecuter, 'CrosRunCommand')
@mock.patch.object(command_executer.CommandExecuter,
'ChrootRunCommandWOutput')
def test_test_that_run(self, mock_chroot_runcmd, mock_cros_runcmd):
+ def FakeRebootMachine(machine, chroot):
+ if machine or chroot:
+ pass
+
def FakeLogMsg(fd, termfd, msg, flush=True):
if fd or termfd or msg or flush:
pass
@@ -820,6 +215,7 @@ class SuiteRunnerTest(unittest.TestCase):
save_log_msg = self.real_logger.LogMsg
self.real_logger.LogMsg = FakeLogMsg
self.runner.logger = self.real_logger
+ self.runner.RebootMachine = FakeRebootMachine
raised_exception = False
try:
@@ -857,9 +253,8 @@ class SuiteRunnerTest(unittest.TestCase):
mock_isdir.return_value = True
mock_chroot_runcmd.return_value = 0
self.mock_cmd_exec.ChrootRunCommandWOutput = mock_chroot_runcmd
- profiler_args = ("--profiler=custom_perf --profiler_args='perf_options"
+ profiler_args = ('--profiler=custom_perf --profiler_args=\'perf_options'
'="record -a -e cycles,instructions"\'')
- self.runner.dut_config['top_interval'] = 3
res = self.runner.Telemetry_Crosperf_Run('lumpy1.cros', self.mock_label,
self.telemetry_crosperf_bench, '',
profiler_args)
@@ -870,11 +265,10 @@ class SuiteRunnerTest(unittest.TestCase):
self.assertEqual(args_list[0], '/tmp/chromeos')
self.assertEqual(args_list[1],
('/usr/bin/test_that --autotest_dir '
- '~/trunk/src/third_party/autotest/files --fast '
- '--board=lumpy --args=" run_local=False test=octane '
- 'turbostat=True top_interval=3 profiler=custom_perf '
- 'profiler_args=\'record -a -e cycles,instructions\'" '
- 'lumpy1.cros telemetry_Crosperf'))
+ '~/trunk/src/third_party/autotest/files '
+ ' --board=lumpy --args=" run_local=False test=octane '
+ 'profiler=custom_perf profiler_args=\'record -a -e '
+ 'cycles,instructions\'" lumpy1.cros telemetry_Crosperf'))
self.assertEqual(args_dict['cros_sdk_options'],
('--no-ns-pid --chrome_root= '
'--chrome_root_mount=/tmp/chrome_root '
@@ -882,59 +276,67 @@ class SuiteRunnerTest(unittest.TestCase):
self.assertEqual(args_dict['command_terminator'], self.mock_cmd_term)
self.assertEqual(len(args_dict), 2)
+ @mock.patch.object(os.path, 'isdir')
+ @mock.patch.object(os.path, 'exists')
@mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
- def test_skylab_run(self, mock_runcmd):
+ def test_telemetry_run(self, mock_runcmd, mock_exists, mock_isdir):
+
+ def FakeLogMsg(fd, termfd, msg, flush=True):
+ if fd or termfd or msg or flush:
+ pass
- def FakeDownloadResult(l, task_id):
- if l and task_id:
- self.assertEqual(task_id, '12345')
- return 0
+ save_log_msg = self.real_logger.LogMsg
+ self.real_logger.LogMsg = FakeLogMsg
+ mock_runcmd.return_value = 0
- mock_runcmd.return_value = \
- (0,
- '"success":true\nCreated Swarming task https://swarming/task?id=12345',
- '')
self.mock_cmd_exec.RunCommandWOutput = mock_runcmd
- self.mock_label.skylab = True
- self.runner.DownloadResult = FakeDownloadResult
- res = self.runner.Skylab_Run(self.mock_label, self.test_that_bench, '', '')
- ret_tup = (0, '\nResults placed in tmp/swarming-12345\n', '')
- self.assertEqual(res, ret_tup)
- self.assertEqual(mock_runcmd.call_count, 2)
-
- args_list = mock_runcmd.call_args_list[0][0]
- args_dict = mock_runcmd.call_args_list[0][1]
- self.assertEqual(args_list[0],
- ('/usr/local/bin/skylab create-test '
- '-dim dut_name:lumpy1 -dim dut_name:lumpy.cros2 '
- '-bb=false -client-test -board=lumpy -image=build '
- '-pool=DUT_POOL_QUOTA octane'))
- self.assertEqual(args_dict['command_terminator'], self.mock_cmd_term)
+ self.runner.logger = self.real_logger
- args_list = mock_runcmd.call_args_list[1][0]
- self.assertEqual(args_list[0], ('skylab wait-task -bb=false 12345'))
- self.assertEqual(args_dict['command_terminator'], self.mock_cmd_term)
+ profiler_args = ('--profiler=custom_perf --profiler_args=\'perf_options'
+ '="record -a -e cycles,instructions"\'')
- @mock.patch.object(time, 'sleep')
- @mock.patch.object(command_executer.CommandExecuter, 'RunCommand')
- def test_download_result(self, mock_runcmd, mock_sleep):
- mock_runcmd.return_value = 0
- mock_sleep.return_value = 0
- self.mock_cmd_exec.RunCommand = mock_runcmd
-
- self.runner.DownloadResult(self.mock_label, '12345')
-
- self.assertEqual(mock_runcmd.call_count, 2)
- cmd = mock_runcmd.call_args_list[0][0][0]
- self.assertEqual(cmd,
- ('/tmp/chromeos/src/chromium/depot_tools/gsutil.py ls '
- 'gs://chromeos-autotest-results/swarming-12345/'
- 'autoserv_test'))
- cmd = mock_runcmd.call_args_list[1][0][0]
- self.assertEqual(cmd,
- ('/tmp/chromeos/src/chromium/depot_tools/gsutil.py -mq '
- 'cp -r gs://chromeos-autotest-results/swarming-12345 '
- '/tmp/chromeos/chroot/tmp'))
+ raises_exception = False
+ mock_isdir.return_value = False
+ try:
+ self.runner.Telemetry_Run('lumpy1.cros', self.mock_label,
+ self.telemetry_bench, '')
+ except SystemExit:
+ raises_exception = True
+ self.assertTrue(raises_exception)
+
+ raises_exception = False
+ mock_isdir.return_value = True
+ mock_exists.return_value = False
+ try:
+ self.runner.Telemetry_Run('lumpy1.cros', self.mock_label,
+ self.telemetry_bench, '')
+ except SystemExit:
+ raises_exception = True
+ self.assertTrue(raises_exception)
+
+ raises_exception = False
+ mock_isdir.return_value = True
+ mock_exists.return_value = True
+ try:
+ self.runner.Telemetry_Run('lumpy1.cros', self.mock_label,
+ self.telemetry_bench, profiler_args)
+ except SystemExit:
+ raises_exception = True
+ self.assertTrue(raises_exception)
+
+ test_flag.SetTestMode(True)
+ res = self.runner.Telemetry_Run('lumpy1.cros', self.mock_label,
+ self.telemetry_bench, '')
+ self.assertEqual(res, 0)
+ self.assertEqual(mock_runcmd.call_count, 1)
+ self.assertEqual(
+ mock_runcmd.call_args_list[0][0],
+ (('cd src/tools/perf && ./run_measurement '
+ '--browser=cros-chrome --output-format=csv '
+ '--remote=lumpy1.cros --identity /tmp/chromeos/src/scripts'
+ '/mod_for_test_scripts/ssh_keys/testing_rsa octane '),))
+
+ self.real_logger.LogMsg = save_log_msg
if __name__ == '__main__':
diff --git a/cwp/bartlett/server.py b/cwp/bartlett/server.py
index 8fb7d61e..f6b35361 100755
--- a/cwp/bartlett/server.py
+++ b/cwp/bartlett/server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
# Author: mrdmnd@ (Matt Redmond)
# Based off of code in //depot/google3/experimental/mobile_gwp
diff --git a/debug_info_test/debug_info_test.py b/debug_info_test/debug_info_test.py
index 4839e69c..f5afd644 100755
--- a/debug_info_test/debug_info_test.py
+++ b/debug_info_test/debug_info_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
# Copyright 2018 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/debug_info_test/exist_debug_info.whitelist b/debug_info_test/exist_debug_info.whitelist
index e0076fd6..417607c0 100644
--- a/debug_info_test/exist_debug_info.whitelist
+++ b/debug_info_test/exist_debug_info.whitelist
@@ -9,5 +9,4 @@
.*/usr/lib64/python2.7/site-packages/selenium/webdriver/firefox/.*/x_ignore_nofocus\.so\.debug
.*/lib.*/libiptc\.so.*\.debug
.*/autotest/.*\.debug
-.*/opt/intel/fw_parser\.debug
# todos:
diff --git a/dejagnu/gdb_dejagnu.py b/dejagnu/gdb_dejagnu.py
index 91fa51fd..c01d909b 100755
--- a/dejagnu/gdb_dejagnu.py
+++ b/dejagnu/gdb_dejagnu.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#! /usr/bin/python
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -196,7 +196,7 @@ class DejagnuExecuter(object):
self.PrepareGdbserverDefault()
def PrepareGdbserverDefault(self):
- cmd = ('setup_board --board {0}; '
+ cmd = ('./setup_board --board {0}; '
'{1} emerge-{0} gdb'.format(self._board, self._mount_flag))
ret = self._executer.ChrootRunCommand(self._chromeos_root,
cmd,
diff --git a/dejagnu/run_dejagnu.py b/dejagnu/run_dejagnu.py
index 7b6e6fa3..b4cbc8f4 100755
--- a/dejagnu/run_dejagnu.py
+++ b/dejagnu/run_dejagnu.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
"""Tool script for auto dejagnu."""
diff --git a/fdo_scripts/divide_and_merge_profiles.py b/fdo_scripts/divide_and_merge_profiles.py
index 5108aa6f..99155852 100755
--- a/fdo_scripts/divide_and_merge_profiles.py
+++ b/fdo_scripts/divide_and_merge_profiles.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
"""Script to divide and merge profiles."""
diff --git a/fdo_scripts/divide_and_merge_profiles_test.py b/fdo_scripts/divide_and_merge_profiles_test.py
index 2bfb5cf1..27141df3 100755
--- a/fdo_scripts/divide_and_merge_profiles_test.py
+++ b/fdo_scripts/divide_and_merge_profiles_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
diff --git a/fdo_scripts/profile_cycler.py b/fdo_scripts/profile_cycler.py
index 176f2d4a..7715612f 100755
--- a/fdo_scripts/profile_cycler.py
+++ b/fdo_scripts/profile_cycler.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
"""Script to profile a page cycler, and get it back to the host."""
diff --git a/go/chromeos/build_go b/go/chromeos/build_go
index 0dec7dfd..164ea3f7 100755
--- a/go/chromeos/build_go
+++ b/go/chromeos/build_go
@@ -6,8 +6,7 @@ set -e -o pipefail
# Usage: build_go
#
# It assumes that the "x86_64-cros-linux-gnu" toolchain is already installed.
-# It assumes that the "armv7a-cros-linux-gnueabihf" toolchain is
-# already installed.
+# It assumes that the "armv7a-cros-linux-gnueabi" toolchain is already installed.
# It assumes that the "aarch64-cros-linux-gnu" toolchain is already installed.
if [[ ! -e "make.bash" && -e "src/make.bash" ]]
@@ -27,12 +26,12 @@ GOOS="linux" GOARCH="amd64" CGO_ENABLED="1" \
# Build the Go toolchain for arm targets.
GOOS="linux" GOARCH="arm" CGO_ENABLED="1" \
- CC_FOR_TARGET="armv7a-cros-linux-gnueabihf-clang" \
- CXX_FOR_TARGET="armv7a-cros-linux-gnueabihf-clang++" \
+ CC_FOR_TARGET="armv7a-cros-linux-gnueabi-clang" \
+ CXX_FOR_TARGET="armv7a-cros-linux-gnueabi-clang++" \
./make.bash --no-clean
GOOS="linux" GOARCH="arm" CGO_ENABLED="1" \
- CC="armv7a-cros-linux-gnueabihf-clang" \
- CXX="armv7a-cros-linux-gnueabihf-clang++" \
+ CC="armv7a-cros-linux-gnueabi-clang" \
+ CXX="armv7a-cros-linux-gnueabi-clang++" \
../bin/go install -v -buildmode=pie std
# Build the Go toolchain for arm64 targets.
diff --git a/go/chromeos/go_chell b/go/chromeos/go_chell
new file mode 100755
index 00000000..ca6a7dba
--- /dev/null
+++ b/go/chromeos/go_chell
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+# Invoke the Go cross compiler for chell.
+# Uses ../go_target to add PIE flags.
+#
+# This is just an example for an amd64 board.
+
+GOOS="linux" GOARCH="amd64" CGO_ENABLED="1" \
+ CC="x86_64-cros-linux-gnu-clang" \
+ CXX="x86_64-cros-linux-gnu-clang++" \
+ exec go_target "$@"
diff --git a/go/chromeos/go_chell_exec b/go/chromeos/go_chell_exec
new file mode 100755
index 00000000..8fac94bd
--- /dev/null
+++ b/go/chromeos/go_chell_exec
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Copy and remotely execute a binary on the chell device.
+#
+# For this to work, the corresponding entry must exist in
+# ~/.ssh/config and the device must already be setup for
+# password-less authentication. See setup instructions at
+# http://go/chromeos-toolchain-team/go-toolchain
+
+GOOS="linux" GOARCH="amd64" \
+ GOLOADER="/tmp/glibc/ld.so" \
+ exec go_target_exec chell "$@"
diff --git a/go/chromeos/go_elm b/go/chromeos/go_elm
new file mode 100755
index 00000000..a92d9c64
--- /dev/null
+++ b/go/chromeos/go_elm
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+# Invoke the Go cross compiler for elm.
+# Uses ../go_target to add PIE flags.
+#
+# This is just an example for an arm64 board.
+
+GOOS="linux" GOARCH="arm64" CGO_ENABLED="1" \
+ CC="aarch64-cros-linux-gnu-clang" \
+ CXX="aarch64-cros-linux-gnu-clang++" \
+ exec go_target "$@"
diff --git a/go/chromeos/go_elm32 b/go/chromeos/go_elm32
new file mode 100755
index 00000000..2bcb3f3b
--- /dev/null
+++ b/go/chromeos/go_elm32
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+# Invoke the Go cross compiler for elm32.
+# Uses ../go_target to add PIE flags.
+#
+# This is just an example for an arm board.
+
+GOOS="linux" GOARCH="arm" CGO_ENABLED="1" \
+ CC="armv7a-cros-linux-gnueabi-clang" \
+ CXX="armv7a-cros-linux-gnueabi-clang++" \
+ exec go_target "$@"
diff --git a/go/chromeos/go_elm32_exec b/go/chromeos/go_elm32_exec
new file mode 100755
index 00000000..3e115a9a
--- /dev/null
+++ b/go/chromeos/go_elm32_exec
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Copy and remotely execute a binary on the elm32 device.
+#
+# For this to work, the corresponding entry must exist in
+# ~/.ssh/config and the device must already be setup for
+# password-less authentication. See setup instructions at
+# http://go/chromeos-toolchain-team/go-toolchain
+
+GOOS="linux" GOARCH="arm" \
+ GOLOADER="/tmp/glibc32/ld.so" \
+ exec go_target_exec elm32 "$@"
diff --git a/go/chromeos/go_elm_exec b/go/chromeos/go_elm_exec
new file mode 100755
index 00000000..da244c2e
--- /dev/null
+++ b/go/chromeos/go_elm_exec
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Copy and remotely execute a binary on the elm device.
+#
+# For this to work, the corresponding entry must exist in
+# ~/.ssh/config and the device must already be setup for
+# password-less authentication. See setup instructions at
+# http://go/chromeos-toolchain-team/go-toolchain
+
+GOOS="linux" GOARCH="arm64" \
+ GOLOADER="/tmp/glibc/ld.so" \
+ exec go_target_exec elm "$@"
diff --git a/go/chromeos/push_glibc b/go/chromeos/push_glibc
index 8211d9d5..7528284c 100755
--- a/go/chromeos/push_glibc
+++ b/go/chromeos/push_glibc
@@ -13,7 +13,7 @@ set -e -o pipefail
for target in "$@"
do
echo -n "pushing glibc to ${target} ... "
- case "$(ssh -i ${HOME}/.ssh/testing_rsa ${target} uname -m)" in
+ case "$(ssh ${target} uname -m)" in
x86_64)
glibc="/usr/x86_64-cros-linux-gnu/lib64"
loader="ld-linux-x86-64.so.2"
@@ -23,7 +23,7 @@ do
aarch64)
glibc="/usr/aarch64-cros-linux-gnu/lib64"
loader="ld-linux-aarch64.so.1"
- glibc32="/usr/armv7a-cros-linux-gnueabihf/lib"
+ glibc32="/usr/armv7a-cros-linux-gnueabi/lib"
loader32="ld-linux-armhf.so.3"
;;
*)
diff --git a/go/chromeos/setup_chromeos_testing.py b/go/chromeos/setup_chromeos_testing.py
deleted file mode 100755
index b679ddfc..00000000
--- a/go/chromeos/setup_chromeos_testing.py
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-#
-# Copyright 2018 Google Inc. All Rightes Reserved
-"""Generate board-specific scripts for Go compiler testing."""
-
-from __future__ import print_function
-
-import argparse
-import getpass
-import os
-import sys
-
-from cros_utils import command_executer
-
-SUCCESS = 0
-DEBUG = False
-
-ARCH_DATA = {'x86_64': 'amd64', 'arm32': 'arm', 'arm64': 'arm64'}
-
-CROS_TOOLCHAIN_DATA = {
- 'x86_64': 'x86_64-cros-linux-gnu',
- 'arm32': 'armv7a-cros-linux-gnueabihf',
- 'arm64': 'aarch64-cros-linux-gnu'
-}
-
-GLIBC_DATA = {'x86_64': 'glibc', 'arm32': 'glibc32', 'arm64': 'glibc'}
-
-CONFIG_TEMPLATE = """
-Host %s
- HostName %s
- User root
- UserKnownHostsFile /dev/null
- BatchMode yes
- CheckHostIP no
- StrictHostKeyChecking no
- IdentityFile %%d/.ssh/testing_rsa
-"""
-
-BASE_TEMPLATE = """#!/bin/bash
-
-# Invoke the Go cross compiler for %s.
-# Uses ../go_target to add PIE flags.
-#
-# This is just an example for an %s board.
-
-GOOS="linux" GOARCH="%s" CGO_ENABLED="1" \\
- GOROOT="/usr/lib/go/%s" \\
- CC="%s-clang" \\
- CXX="%s-clang++" \\
- exec go_target "$@"
-"""
-
-EXEC_TEMPLATE = """#!/bin/bash
-
-# Copy and remotely execute a binary on the %s device.
-#
-# For this to work, the corresponding entry must exist in
-# ~/.ssh/config and the device must already be setup for
-# password-less authentication. See setup instructions at
-# http://go/chromeos-toolchain-team/go-toolchain
-
-GOOS="linux" GOARCH="%s" \\
- GOLOADER="/tmp/%s/ld.so" \\
- exec go_target_exec %s "$@"
-"""
-
-
-def log(msg):
-
- if DEBUG:
- print(msg)
-
-
-def WriteFile(file_content, file_name):
- with open(file_name, 'w') as out_file:
- out_file.write(file_content)
-
-
-def GenerateGoHelperScripts(ce, arm_board, x86_board, chromeos_root):
- keys = ['x86_64', 'arm32', 'arm64']
- names = {
- 'x86_64': x86_board,
- 'arm64': arm_board,
- 'arm32': ("%s32" % arm_board)
- }
-
- toolchain_dir = os.path.join(chromeos_root, 'src', 'third_party',
- 'toolchain-utils', 'go', 'chromeos')
- for k in keys:
- name = names[k]
- arch = ARCH_DATA[k]
- toolchain = CROS_TOOLCHAIN_DATA[k]
- glibc = GLIBC_DATA[k]
-
- base_file = os.path.join(toolchain_dir, ("go_%s" % name))
- base_file_content = BASE_TEMPLATE % (name, arch, arch, toolchain, toolchain,
- toolchain)
- WriteFile(base_file_content, base_file)
- cmd = "chmod 755 %s" % base_file
- ce.RunCommand(cmd)
-
- exec_file = os.path.join(toolchain_dir, ("go_%s_exec" % name))
- exec_file_content = EXEC_TEMPLATE % (name, arch, glibc, name)
- WriteFile(exec_file_content, exec_file)
- cmd = "chmod 755 %s" % exec_file
- ce.RunCommand(cmd)
-
- return 0
-
-
-def UpdateChrootSshConfig(ce, arm_board, arm_dut, x86_board, x86_dut,
- chromeos_root):
- log("Entering UpdateChrootSshConfig")
- # Copy testing_rsa to .ssh and set file protections properly.
- user = getpass.getuser()
- ssh_dir = os.path.join(chromeos_root, 'chroot', 'home', user, '.ssh')
- dest_file = os.path.join(ssh_dir, 'testing_rsa')
- src_file = os.path.join(chromeos_root, 'src', 'scripts',
- 'mod_for_test_scripts', 'testing_rsa')
- if not os.path.exists(dest_file):
- if os.path.exists(src_file):
- cmd = 'cp %s %s' % (src_file, dest_file)
- ret = ce.RunCommand(cmd)
- if ret != SUCCESS:
- print('Error executing "%s". Exiting now...' % cmd)
- sys.exit(1)
- cmd = 'chmod 600 %s' % dest_file
- ret = ce.RunCommand(cmd)
- if ret != SUCCESS:
- print('Error executing %s; may need to re-run this manually.' % cmd)
- else:
- print('Cannot find %s; you will need to update testing_rsa by hand.' %
- src_file)
- else:
- log("testing_rsa exists already.")
-
- # Save ~/.ssh/config file, if not already done.
- config_file = os.path.expanduser("~/.ssh/config")
- saved_config_file = os.path.join(
- os.path.expanduser("~/.ssh"), "config.save.go-scripts")
- if not os.path.exists(saved_config_file):
- cmd = "cp %s %s" % (config_file, saved_config_file)
- ret = ce.RunCommand(cmd)
- if ret != SUCCESS:
- print("Error making save copy of ~/.ssh/config. Exiting...")
- sys.exit(1)
-
- # Update ~/.ssh/config file
- log("Reading ssh config file")
- with open(config_file, "r") as input_file:
- config_lines = input_file.read()
-
- x86_host_config = CONFIG_TEMPLATE % (x86_board, x86_dut)
- arm_names = '%s %s32' % (arm_board, arm_board)
- arm_host_config = CONFIG_TEMPLATE % (arm_names, arm_dut)
-
- config_lines += x86_host_config
- config_lines += arm_host_config
-
- log("Writing ~/.ssh/config")
- WriteFile(config_lines, config_file)
-
- return 0
-
-
-def CleanUp(ce, x86_board, arm_board, chromeos_root):
- # Find and remove go helper scripts
- keys = ['x86_64', 'arm32', 'arm64']
- names = {
- 'x86_64': x86_board,
- 'arm64': arm_board,
- 'arm32': ("%s32" % arm_board)
- }
-
- toolchain_dir = os.path.join(chromeos_root, 'src', 'third_party',
- 'toolchain-utils', 'go', 'chromeos')
- for k in keys:
- name = names[k]
- base_file = os.path.join(toolchain_dir, ("go_%s" % name))
- exec_file = os.path.join(toolchain_dir, ("go_%s_exec" % name))
- cmd = ('rm -f %s; rm -f %s' % (base_file, exec_file))
- ce.RunCommand(cmd)
-
- # Restore saved config_file
- config_file = os.path.expanduser("~/.ssh/config")
- saved_config_file = os.path.join(
- os.path.expanduser("~/.ssh"), "config.save.go-scripts")
- if not os.path.exists(saved_config_file):
- print("Could not find file: %s; unable to restore ~/.ssh/config ." %
- saved_config_file)
- else:
- cmd = "mv %s %s" % (saved_config_file, config_file)
- ce.RunCommand(cmd)
-
- return 0
-
-
-def Main(argv):
- # pylint: disable=global-statement
- global DEBUG
-
- parser = argparse.ArgumentParser()
- parser.add_argument('-a', '--arm64_board', dest='arm_board', required=True)
- parser.add_argument(
- '-b', '--x86_64_board', dest='x86_64_board', required=True)
- parser.add_argument(
- '-c', '--chromeos_root', dest='chromeos_root', required=True)
- parser.add_argument('-x', '--x86_64_dut', dest='x86_64_dut', required=True)
- parser.add_argument('-y', '--arm64_dut', dest='arm_dut', required=True)
- parser.add_argument(
- '-z', '--cleanup', dest='cleanup', default=False, action='store_true')
- parser.add_argument(
- '-v', '--verbose', dest='verbose', default=False, action='store_true')
-
- options = parser.parse_args(argv[1:])
-
- if options.verbose:
- DEBUG = True
-
- if not os.path.exists(options.chromeos_root):
- print("Invalid ChromeOS Root: %s" % options.chromeos_root)
-
- ce = command_executer.GetCommandExecuter()
- all_good = True
- for m in (options.x86_64_dut, options.arm_dut):
- cmd = 'ping -c 3 %s > /dev/null' % m
- ret = ce.RunCommand(cmd)
- if ret != SUCCESS:
- print('Machine %s is currently not responding to ping.' % m)
- all_good = False
-
- if not all_good:
- return 1
-
- if not options.cleanup:
- UpdateChrootSshConfig(ce, options.arm_board, options.arm_dut,
- options.x86_64_board, options.x86_64_dut,
- options.chromeos_root)
- GenerateGoHelperScripts(ce, options.arm_board, options.x86_64_board,
- options.chromeos_root)
- else:
- CleanUp(ce, options.x86_64_board, options.arm_board, options.chromeos_root)
-
- return 0
-
-
-if __name__ == "__main__":
- val = Main(sys.argv)
- sys.exit(val)
diff --git a/go/chromeos/target_cp b/go/chromeos/target_cp
index 10f4bf72..8e0c4055 100755
--- a/go/chromeos/target_cp
+++ b/go/chromeos/target_cp
@@ -22,7 +22,7 @@ fi
if [[ -d ${src} ]]
then
- tar -C $(dirname ${src}) -zcf - $(basename ${src}) | ssh -i ${HOME}/.ssh/testing_rsa ${target} "tar -C ${dest} -zxf -"
+ tar -C $(dirname ${src}) -zcf - $(basename ${src}) | ssh ${target} "tar -C ${dest} -zxf -"
else
- scp -i ${HOME}/.ssh/testing_rsa -q ${src} ${target}:${dest}
+ scp -q ${src} ${target}:${dest}
fi
diff --git a/go/chromeos/target_sh b/go/chromeos/target_sh
index 4c56252e..43c5fac6 100755
--- a/go/chromeos/target_sh
+++ b/go/chromeos/target_sh
@@ -8,4 +8,4 @@ set -e -o pipefail
target="$1"
shift
-ssh -i ${HOME}/.ssh/testing_rsa ${target} "$*"
+ssh ${target} "$*"
diff --git a/go/patch/go-1.10.3/go0.patch b/go/patch/go-1.10.3/go0.patch
deleted file mode 100644
index f80045c0..00000000
--- a/go/patch/go-1.10.3/go0.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-diff --git src/go/build/deps_test.go src/go/build/deps_test.go
-index 29dbe47d29..53e0e287bc 100644
---- src/go/build/deps_test.go
-+++ src/go/build/deps_test.go
-@@ -191,7 +191,7 @@ var pkgDeps = map[string][]string{
- "testing": {"L2", "flag", "fmt", "internal/race", "os", "runtime/debug", "runtime/pprof", "runtime/trace", "time"},
- "testing/iotest": {"L2", "log"},
- "testing/quick": {"L2", "flag", "fmt", "reflect", "time"},
-- "internal/testenv": {"L2", "OS", "flag", "testing", "syscall"},
-+ "internal/testenv": {"L2", "OS", "os.exec", "flag", "testing", "syscall"},
-
- // L4 is defined as L3+fmt+log+time, because in general once
- // you're using L3 packages, use of fmt, log, or time is not a big deal.
-diff --git src/internal/testenv/testenv.go src/internal/testenv/testenv.go
-index 8f69fe0da5..d52b85e122 100644
---- src/internal/testenv/testenv.go
-+++ src/internal/testenv/testenv.go
-@@ -48,6 +48,9 @@ func HasGoBuild() bool {
- return false
- }
- }
-+ if _, err := exec.LookPath("go"); err != nil {
-+ return false
-+ }
- return true
- }
-
diff --git a/go/patch/go-1.10.3/go1.patch b/go/patch/go-1.10.3/go1.patch
deleted file mode 100644
index e05fcce4..00000000
--- a/go/patch/go-1.10.3/go1.patch
+++ /dev/null
@@ -1,50 +0,0 @@
-diff --git test/chanlinear.go test/chanlinear.go
-index 55fee4ab9b..89533da282 100644
---- test/chanlinear.go
-+++ test/chanlinear.go
-@@ -1,4 +1,4 @@
--// +build darwin linux
-+// +build darwin linux android
- // run
-
- // Copyright 2014 The Go Authors. All rights reserved.
-diff --git a/test/fixedbugs/bug385_64.go b/test/fixedbugs/bug385_64.go
-index 0f941ca2f4..3bcd62f3ad 100644
---- test/fixedbugs/bug385_64.go
-+++ test/fixedbugs/bug385_64.go
-@@ -1,4 +1,4 @@
--// +build amd64
-+// +build amd64 arm64
- // errorcheck
-
- // Copyright 2011 The Go Authors. All rights reserved.
-diff --git test/fixedbugs/issue10607.go test/fixedbugs/issue10607.go
-index 8831547da8..9ee6c72bc6 100644
---- test/fixedbugs/issue10607.go
-+++ test/fixedbugs/issue10607.go
-@@ -1,4 +1,4 @@
--// +build linux,!ppc64
-+// +build linux,!ppc64 android
- // run
-
- // Copyright 2015 The Go Authors. All rights reserved.
-diff --git test/maplinear.go test/maplinear.go
-index 34d0914914..afddab627d 100644
---- test/maplinear.go
-+++ test/maplinear.go
-@@ -1,4 +1,4 @@
--// +build darwin linux
-+// +build darwin linux android
- // run
-
- // Copyright 2013 The Go Authors. All rights reserved.
-diff --git test/recover4.go test/recover4.go
-index 67ed970ecb..95a89dab00 100644
---- test/recover4.go
-+++ test/recover4.go
-@@ -1,4 +1,4 @@
--// +build linux darwin
-+// +build linux android darwin
- // run
-
- // Copyright 2015 The Go Authors. All rights reserved.
diff --git a/go/patch/go-1.10.3/go2.patch b/go/patch/go-1.10.3/go2.patch
deleted file mode 100644
index bbd2b744..00000000
--- a/go/patch/go-1.10.3/go2.patch
+++ /dev/null
@@ -1,267 +0,0 @@
-diff --git test/run.go test/run.go
-index 22ec7576f8..ac5d3c3e8d 100644
---- test/run.go
-+++ test/run.go
-@@ -39,9 +39,9 @@ var (
- summary = flag.Bool("summary", false, "show summary of results")
- showSkips = flag.Bool("show_skips", false, "show skipped tests")
- runSkips = flag.Bool("run_skips", false, "run skipped tests (ignore skip and build tags)")
-- linkshared = flag.Bool("linkshared", false, "")
- updateErrors = flag.Bool("update_errors", false, "update error messages in test file based on compiler output")
- runoutputLimit = flag.Int("l", defaultRunOutputLimit(), "number of parallel runoutput tests to run")
-+ target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-
- shard = flag.Int("shard", 0, "shard index to run. Only applicable if -shards is non-zero.")
- shards = flag.Int("shards", 0, "number of shards. If 0, all tests are run. This is used by the continuous build.")
-@@ -194,21 +194,15 @@ func goFiles(dir string) []string {
- type runCmd func(...string) ([]byte, error)
-
- func compileFile(runcmd runCmd, longname string, flags []string) (out []byte, err error) {
-- cmd := []string{"go", "tool", "compile", "-e"}
-+ cmd := []string{findGoCmd(), "tool", "compile", "-e"}
- cmd = append(cmd, flags...)
-- if *linkshared {
-- cmd = append(cmd, "-dynlink", "-installsuffix=dynlink")
-- }
- cmd = append(cmd, longname)
- return runcmd(cmd...)
- }
-
- func compileInDir(runcmd runCmd, dir string, flags []string, names ...string) (out []byte, err error) {
-- cmd := []string{"go", "tool", "compile", "-e", "-D", ".", "-I", "."}
-+ cmd := []string{findGoCmd(), "tool", "compile", "-e", "-D", ".", "-I", "."}
- cmd = append(cmd, flags...)
-- if *linkshared {
-- cmd = append(cmd, "-dynlink", "-installsuffix=dynlink")
-- }
- for _, name := range names {
- cmd = append(cmd, filepath.Join(dir, name))
- }
-@@ -217,15 +211,24 @@ func compileInDir(runcmd runCmd, dir string, flags []string, names ...string) (o
-
- func linkFile(runcmd runCmd, goname string) (err error) {
- pfile := strings.Replace(goname, ".go", ".o", -1)
-- cmd := []string{"go", "tool", "link", "-w", "-o", "a.exe", "-L", "."}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared", "-installsuffix=dynlink")
-- }
-- cmd = append(cmd, pfile)
-- _, err = runcmd(cmd...)
-+ cmd := []string{findGoCmd, "tool", "link", "-w", "-o", "a.exe", "-L", "."}
-+ _, err = runcmd(findGoCmd(), "tool", "link", "-w", "-o", "a.exe", "-L", ".", pfile)
- return
- }
-
-+
-+func goRun(runcmd runCmd, flags []string, goname string, args ...string) (out []byte, err error) {
-+ cmd := []string{findGoCmd(), "run", goGcflags()}
-+ if len(findExecCmd()) > 0 {
-+ cmd = append(cmd, "-exec")
-+ cmd = append(cmd, findExecCmd()...)
-+ }
-+ cmd = append(cmd, flags...)
-+ cmd = append(cmd, goname)
-+ cmd = append(cmd, args...)
-+ return runcmd(cmd...)
-+}
-+
- // skipError describes why a test was skipped.
- type skipError string
-
-@@ -595,7 +598,7 @@ func (t *test) run() {
-
- case "errorcheck":
- // TODO(gri) remove need for -C (disable printing of columns in error messages)
-- cmdline := []string{"go", "tool", "compile", "-C", "-e", "-o", "a.o"}
-+ cmdline := []string{findGoCmd(), "tool", "compile", "-C", "-e", "-o", "a.o"}
- // No need to add -dynlink even if linkshared if we're just checking for errors...
- cmdline = append(cmdline, flags...)
- cmdline = append(cmdline, long)
-@@ -709,7 +712,7 @@ func (t *test) run() {
- }
-
- case "build":
-- _, err := runcmd("go", "build", goGcflags(), "-o", "a.exe", long)
-+ _, err := runcmd(findGoCmd(), "build", goGcflags(), "-o", "a.exe", long)
- if err != nil {
- t.err = err
- }
-@@ -735,7 +738,7 @@ func (t *test) run() {
-
- }
- var objs []string
-- cmd := []string{"go", "tool", "compile", "-e", "-D", ".", "-I", ".", "-o", "go.o"}
-+ cmd := []string{findGoCmd(), "tool", "compile", "-e", "-D", ".", "-I", ".", "-o", "go.o"}
- if len(asms) > 0 {
- cmd = append(cmd, "-asmhdr", "go_asm.h")
- }
-@@ -749,7 +752,7 @@ func (t *test) run() {
- }
- objs = append(objs, "go.o")
- if len(asms) > 0 {
-- cmd = []string{"go", "tool", "asm", "-e", "-I", ".", "-o", "asm.o"}
-+ cmd = []string{findGoCmd(), "tool", "asm", "-e", "-I", ".", "-o", "asm.o"}
- for _, file := range asms {
- cmd = append(cmd, filepath.Join(longdir, file.Name()))
- }
-@@ -760,14 +763,14 @@ func (t *test) run() {
- }
- objs = append(objs, "asm.o")
- }
-- cmd = []string{"go", "tool", "pack", "c", "all.a"}
-+ cmd = []string{findGoCmd(), "tool", "pack", "c", "all.a"}
- cmd = append(cmd, objs...)
- _, err = runcmd(cmd...)
- if err != nil {
- t.err = err
- break
- }
-- cmd = []string{"go", "tool", "link", "all.a"}
-+ cmd = []string{findGoCmd(), "tool", "link", "-o", "a.exe", "all.a"}
- _, err = runcmd(cmd...)
- if err != nil {
- t.err = err
-@@ -777,10 +780,7 @@ func (t *test) run() {
- case "buildrun": // build binary, then run binary, instead of go run. Useful for timeout tests where failure mode is infinite loop.
- // TODO: not supported on NaCl
- useTmp = true
-- cmd := []string{"go", "build", goGcflags(), "-o", "a.exe"}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-+ cmd := []string{findGoCmd(), "build", goGcflags(), "-o", "a.exe"}
- longdirgofile := filepath.Join(filepath.Join(cwd, t.dir), t.gofile)
- cmd = append(cmd, flags...)
- cmd = append(cmd, longdirgofile)
-@@ -789,7 +789,12 @@ func (t *test) run() {
- t.err = err
- return
- }
-- cmd = []string{"./a.exe"}
-+ cmd = []string{}
-+ if len(findExecCmd()) > 0 {
-+ cmd = append(cmd, findExecCmd()...)
-+ }
-+ cmd = append(cmd, "./a.exe")
-+
- out, err = runcmd(append(cmd, args...)...)
- if err != nil {
- t.err = err
-@@ -802,38 +807,7 @@ func (t *test) run() {
-
- case "run":
- useTmp = false
-- var out []byte
-- var err error
-- if len(flags)+len(args) == 0 && goGcflags() == "" && !*linkshared {
-- // If we're not using special go command flags,
-- // skip all the go command machinery.
-- // This avoids any time the go command would
-- // spend checking whether, for example, the installed
-- // package runtime is up to date.
-- // Because we run lots of trivial test programs,
-- // the time adds up.
-- pkg := filepath.Join(t.tempDir, "pkg.a")
-- if _, err := runcmd("go", "tool", "compile", "-o", pkg, t.goFileName()); err != nil {
-- t.err = err
-- return
-- }
-- exe := filepath.Join(t.tempDir, "test.exe")
-- cmd := []string{"go", "tool", "link", "-s", "-w"}
-- cmd = append(cmd, "-o", exe, pkg)
-- if _, err := runcmd(cmd...); err != nil {
-- t.err = err
-- return
-- }
-- out, err = runcmd(append([]string{exe}, args...)...)
-- } else {
-- cmd := []string{"go", "run", goGcflags()}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-- cmd = append(cmd, flags...)
-- cmd = append(cmd, t.goFileName())
-- out, err = runcmd(append(cmd, args...)...)
-- }
-+ out, err := goRun(runcmd, flags, t.goFileName(), args...)
- if err != nil {
- t.err = err
- return
-@@ -848,12 +822,7 @@ func (t *test) run() {
- <-rungatec
- }()
- useTmp = false
-- cmd := []string{"go", "run", goGcflags()}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-- cmd = append(cmd, t.goFileName())
-- out, err := runcmd(append(cmd, args...)...)
-+ out, err := goRun(runcmd, nil, t.goFileName(), args...)
- if err != nil {
- t.err = err
- return
-@@ -863,12 +832,7 @@ func (t *test) run() {
- t.err = fmt.Errorf("write tempfile:%s", err)
- return
- }
-- cmd = []string{"go", "run", goGcflags()}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-- cmd = append(cmd, tfile)
-- out, err = runcmd(cmd...)
-+ out, err = goRun(runcmd, nil, tfile)
- if err != nil {
- t.err = err
- return
-@@ -879,12 +843,7 @@ func (t *test) run() {
-
- case "errorcheckoutput":
- useTmp = false
-- cmd := []string{"go", "run", goGcflags()}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-- cmd = append(cmd, t.goFileName())
-- out, err := runcmd(append(cmd, args...)...)
-+ out, err := goRun(runcmd, nil, t.goFileName(), args...)
- if err != nil {
- t.err = err
- return
-@@ -895,7 +854,7 @@ func (t *test) run() {
- t.err = fmt.Errorf("write tempfile:%s", err)
- return
- }
-- cmdline := []string{"go", "tool", "compile", "-e", "-o", "a.o"}
-+ cmdline := []string{findGoCmd(), "tool", "compile", "-e", "-o", "a.o"}
- cmdline = append(cmdline, flags...)
- cmdline = append(cmdline, tfile)
- out, err = runcmd(cmdline...)
-@@ -922,6 +881,11 @@ func findExecCmd() []string {
- return execCmd
- }
- execCmd = []string{} // avoid work the second time
-+ if *target != "" {
-+ execCmd = []string{"go_" + *target + "_exec"}
-+ return execCmd
-+ }
-+
- if goos == runtime.GOOS && goarch == runtime.GOARCH {
- return execCmd
- }
-@@ -932,6 +896,14 @@ func findExecCmd() []string {
- return execCmd
- }
-
-+func findGoCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+
- func (t *test) String() string {
- return filepath.Join(t.dir, t.gofile)
- }
diff --git a/go/patch/go-1.10.3/go3.patch b/go/patch/go-1.10.3/go3.patch
deleted file mode 100644
index 223ccb85..00000000
--- a/go/patch/go-1.10.3/go3.patch
+++ /dev/null
@@ -1,732 +0,0 @@
-diff --git test/fixedbugs/bug302.go test/fixedbugs/bug302.go
-index e4de25d5d0..ea566e6e44 100644
---- test/fixedbugs/bug302.go
-+++ test/fixedbugs/bug302.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2010 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -8,16 +8,27 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
- func main() {
-- run("go", "tool", "compile", filepath.Join("fixedbugs", "bug302.dir", "p.go"))
-- run("go", "tool", "pack", "grc", "pp.a", "p.o")
-- run("go", "tool", "compile", "-I", ".", filepath.Join("fixedbugs", "bug302.dir", "main.go"))
-+ flag.Parse()
-+ run(goCmd(), "tool", "compile", filepath.Join("fixedbugs", "bug302.dir", "p.go"))
-+ run(goCmd(), "tool", "pack", "grc", "pp.a", "p.o")
-+ run(goCmd(), "tool", "compile", "-I", ".", filepath.Join("fixedbugs", "bug302.dir", "main.go"))
- os.Remove("p.o")
- os.Remove("pp.a")
- os.Remove("main.o")
-diff --git test/fixedbugs/bug369.go test/fixedbugs/bug369.go
-index 60162ab1cb..4470d5a076 100644
---- test/fixedbugs/bug369.go
-+++ test/fixedbugs/bug369.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!windows
--// run
-+// runtarget
-
- // Copyright 2011 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,21 +10,40 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(cmd ...string) {
-+ if *target == "" {
-+ run(cmd[0], cmd[1:]...)
-+ } else {
-+ run("go_"+*target+"_exec", cmd...)
-+ }
-+}
-+
- func main() {
-+ flag.Parse()
- err := os.Chdir(filepath.Join(".", "fixedbugs", "bug369.dir"))
- check(err)
-
-- run("go", "tool", "compile", "-N", "-o", "slow.o", "pkg.go")
-- run("go", "tool", "compile", "-o", "fast.o", "pkg.go")
-- run("go", "tool", "compile", "-o", "main.o", "main.go")
-- run("go", "tool", "link", "-o", "a.exe", "main.o")
-- run("." + string(filepath.Separator) + "a.exe")
-+ run(goCmd(), "tool", "compile", "-N", "-o", "slow.o", "pkg.go")
-+ run(goCmd(), "tool", "compile", "-o", "fast.o", "pkg.go")
-+ run(goCmd(), "tool", "compile", "-o", "main.o", "main.go")
-+ run(goCmd(), "tool", "link", "-o", "a.exe", "main.o")
-+ goRun("." + string(filepath.Separator) + "a.exe")
-
- os.Remove("slow.o")
- os.Remove("fast.o")
-diff --git test/fixedbugs/bug429_run.go test/fixedbugs/bug429_run.go
-index 284033d1f7..e8d18b13e8 100644
---- test/fixedbugs/bug429_run.go
-+++ test/fixedbugs/bug429_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,6 +10,7 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
-@@ -17,8 +18,27 @@ import (
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(args ...string) *exec.Cmd {
-+ cmd := []string{"run"}
-+ if *target != "" {
-+ cmd = append(cmd, "-exec", "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, args...)
-+ return exec.Command(goCmd(), cmd...)
-+}
-+
- func main() {
-- cmd := exec.Command("go", "run", filepath.Join("fixedbugs", "bug429.go"))
-+ flag.Parse()
-+ cmd := goRun(filepath.Join("fixedbugs", "bug429.go"))
- out, err := cmd.CombinedOutput()
- if err == nil {
- fmt.Println("expected deadlock")
-diff --git test/fixedbugs/issue10607.go test/fixedbugs/issue10607.go
-index 9ee6c72bc6..e819a3085a 100644
---- test/fixedbugs/issue10607.go
-+++ test/fixedbugs/issue10607.go
-@@ -1,5 +1,5 @@
- // +build linux,!ppc64 android
--// run
-+// runtarget
-
- // Copyright 2015 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -11,19 +11,39 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(args ...string) *exec.Cmd {
-+ cmd := []string{"run"}
-+ if *target != "" {
-+ cmd = append(cmd, "-exec", "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, args...)
-+ return exec.Command(goCmd(), cmd...)
-+}
-+
- func main() {
-- test("internal")
-+ flag.Parse()
-+ //test("internal")
- test("external")
- }
-
- func test(linkmode string) {
-- out, err := exec.Command("go", "run", "-ldflags", "-B=0x12345678 -linkmode="+linkmode, filepath.Join("fixedbugs", "issue10607a.go")).CombinedOutput()
-+ out, err := goRun("-ldflags", "-B=0x12345678 -linkmode="+linkmode, filepath.Join("fixedbugs", "issue10607a.go")).CombinedOutput()
- if err != nil {
- fmt.Printf("BUG: linkmode=%s %v\n%s\n", linkmode, err, out)
- os.Exit(1)
-diff --git test/fixedbugs/issue11771.go test/fixedbugs/issue11771.go
-index d91fc5d966..4f55ce6982 100644
---- test/fixedbugs/issue11771.go
-+++ test/fixedbugs/issue11771.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2015 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -11,6 +11,7 @@ package main
-
- import (
- "bytes"
-+ "flag"
- "fmt"
- "io/ioutil"
- "log"
-@@ -20,7 +21,17 @@ import (
- "runtime"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
- func main() {
-+ flag.Parse()
- if runtime.Compiler != "gc" {
- return
- }
-@@ -52,7 +63,7 @@ func x() {
- log.Fatal(err)
- }
-
-- cmd := exec.Command("go", "tool", "compile", "x.go")
-+ cmd := exec.Command(goCmd(), "tool", "compile", "x.go")
- cmd.Dir = dir
- output, err := cmd.CombinedOutput()
- if err == nil {
-diff --git test/fixedbugs/issue9355.go test/fixedbugs/issue9355.go
-index 10f8c73069..87356c7402 100644
---- test/fixedbugs/issue9355.go
-+++ test/fixedbugs/issue9355.go
-@@ -1,4 +1,4 @@
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -7,6 +7,7 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
-@@ -15,7 +16,17 @@ import (
- "runtime"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
- func main() {
-+ flag.Parse()
- if runtime.Compiler != "gc" || runtime.GOOS == "nacl" {
- return
- }
-@@ -23,7 +34,7 @@ func main() {
- err := os.Chdir(filepath.Join("fixedbugs", "issue9355.dir"))
- check(err)
-
-- out := run("go", "tool", "compile", "-S", "a.go")
-+ out := run(goCmd(), "tool", "compile", "-S", "a.go")
- os.Remove("a.o")
-
- // 6g/8g print the offset as dec, but 5g/9g print the offset as hex.
-diff --git test/fixedbugs/issue9862_run.go test/fixedbugs/issue9862_run.go
-index be22f40580..a72a59fda2 100644
---- test/fixedbugs/issue9862_run.go
-+++ test/fixedbugs/issue9862_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2015 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,12 +10,32 @@
- package main
-
- import (
-+ "flag"
- "os/exec"
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(args ...string) *exec.Cmd {
-+ cmd := []string{"run"}
-+ if *target != "" {
-+ cmd = append(cmd, "-exec", "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, args...)
-+ return exec.Command(goCmd(), cmd...)
-+}
-+
- func main() {
-- out, err := exec.Command("go", "run", "fixedbugs/issue9862.go").CombinedOutput()
-+ flag.Parse()
-+ out, err := goRun("fixedbugs/issue9862.go").CombinedOutput()
- outstr := string(out)
- if err == nil {
- println("go run issue9862.go succeeded, should have failed\n", outstr)
-diff --git test/linkmain_run.go test/linkmain_run.go
-index 55de481a81..03666e6b29 100644
---- test/linkmain_run.go
-+++ test/linkmain_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,12 +10,22 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
- func cleanup() {
- os.Remove("linkmain.o")
- os.Remove("linkmain.a")
-@@ -51,16 +61,17 @@ func runFail(cmdline string) {
- }
-
- func main() {
-+ flag.Parse()
- // helloworld.go is package main
-- run("go tool compile -o linkmain.o helloworld.go")
-- run("go tool compile -pack -o linkmain.a helloworld.go")
-- run("go tool link -o linkmain.exe linkmain.o")
-- run("go tool link -o linkmain.exe linkmain.a")
-+ run(goCmd() + " tool compile -o linkmain.o helloworld.go")
-+ run(goCmd() + " tool compile -pack -o linkmain.a helloworld.go")
-+ run(goCmd() + " tool link -o linkmain.exe linkmain.o")
-+ run(goCmd() + " tool link -o linkmain.exe linkmain.a")
-
- // linkmain.go is not
-- run("go tool compile -o linkmain1.o linkmain.go")
-- run("go tool compile -pack -o linkmain1.a linkmain.go")
-- runFail("go tool link -o linkmain.exe linkmain1.o")
-- runFail("go tool link -o linkmain.exe linkmain1.a")
-+ run(goCmd() + " tool compile -o linkmain1.o linkmain.go")
-+ run(goCmd() + " tool compile -pack -o linkmain1.a linkmain.go")
-+ runFail(goCmd() + " tool link -o linkmain.exe linkmain1.o")
-+ runFail(goCmd() + " tool link -o linkmain.exe linkmain1.a")
- cleanup()
- }
-diff --git test/linkobj.go test/linkobj.go
-index 8a86aa872f..0d1964e7fb 100644
---- test/linkobj.go
-+++ test/linkobj.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2016 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,6 +10,7 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "io/ioutil"
- "log"
-@@ -18,9 +19,27 @@ import (
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(cmd ...string) string {
-+ if *target == "" {
-+ return run(cmd...)
-+ } else {
-+ return run(append([]string{"go_"+*target+"_exec"}, cmd...)...)
-+ }
-+}
-+
- var pwd, tmpdir string
-
- func main() {
-+ flag.Parse()
- dir, err := ioutil.TempDir("", "go-test-linkobj-")
- if err != nil {
- log.Fatal(err)
-@@ -37,28 +56,28 @@ func main() {
-
- writeFile("p1.go", `
- package p1
--
-+
- func F() {
- println("hello from p1")
- }
- `)
- writeFile("p2.go", `
- package p2
--
-+
- import "./p1"
-
- func F() {
- p1.F()
- println("hello from p2")
- }
--
-+
- func main() {}
- `)
- writeFile("p3.go", `
- package main
-
- import "./p2"
--
-+
- func main() {
- p2.F()
- println("hello from main")
-@@ -76,9 +95,9 @@ func main() {
- }
-
- // inlining is disabled to make sure that the link objects contain needed code.
-- run("go", "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p1."+o, "-linkobj", "p1.lo", "p1.go")
-- run("go", "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p2."+o, "-linkobj", "p2.lo", "p2.go")
-- run("go", "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p3."+o, "-linkobj", "p3.lo", "p3.go")
-+ run(goCmd(), "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p1."+o, "-linkobj", "p1.lo", "p1.go")
-+ run(goCmd(), "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p2."+o, "-linkobj", "p2.lo", "p2.go")
-+ run(goCmd(), "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p3."+o, "-linkobj", "p3.lo", "p3.go")
-
- cp("p1."+o, "p1.oo")
- cp("p2."+o, "p2.oo")
-@@ -86,13 +105,13 @@ func main() {
- cp("p1.lo", "p1."+o)
- cp("p2.lo", "p2."+o)
- cp("p3.lo", "p3."+o)
-- out := runFail("go", "tool", "link", "p2."+o)
-+ out := runFail(goCmd(), "tool", "link", "p2."+o)
- if !strings.Contains(out, "not package main") {
- fatalf("link p2.o failed but not for package main:\n%s", out)
- }
-
-- run("go", "tool", "link", "-L", ".", "-o", "a.out.exe", "p3."+o)
-- out = run("./a.out.exe")
-+ run(goCmd(), "tool", "link", "-L", ".", "-o", "a.out.exe", "p3."+o)
-+ out = goRun("./a.out.exe")
- if !strings.Contains(out, "hello from p1\nhello from p2\nhello from main\n") {
- fatalf("running main, incorrect output:\n%s", out)
- }
-diff --git test/linkx_run.go test/linkx_run.go
-index cc249c9cfc..530159ab9d 100644
---- test/linkx_run.go
-+++ test/linkx_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -11,20 +11,40 @@ package main
-
- import (
- "bytes"
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(args ...string) *exec.Cmd {
-+ cmd := []string{"run"}
-+ if *target != "" {
-+ cmd = append(cmd, "-exec", "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, args...)
-+ return exec.Command(goCmd(), cmd...)
-+}
-+
- func main() {
-+ flag.Parse()
- // test(" ") // old deprecated & removed syntax
- test("=") // new syntax
- }
-
- func test(sep string) {
- // Successful run
-- cmd := exec.Command("go", "run", "-ldflags=-X main.tbd"+sep+"hello -X main.overwrite"+sep+"trumped -X main.nosuchsymbol"+sep+"neverseen", "linkx.go")
-+ cmd := goRun("-ldflags=-X main.tbd"+sep+"hello -X main.overwrite"+sep+"trumped -X main.nosuchsymbol"+sep+"neverseen", "linkx.go")
- var out, errbuf bytes.Buffer
- cmd.Stdout = &out
- cmd.Stderr = &errbuf
-@@ -44,7 +64,7 @@ func test(sep string) {
- }
-
- // Issue 8810
-- cmd = exec.Command("go", "run", "-ldflags=-X main.tbd", "linkx.go")
-+ cmd = goRun("-ldflags=-X main.tbd", "linkx.go")
- _, err = cmd.CombinedOutput()
- if err == nil {
- fmt.Println("-X linker flag should not accept keys without values")
-@@ -52,7 +72,7 @@ func test(sep string) {
- }
-
- // Issue 9621
-- cmd = exec.Command("go", "run", "-ldflags=-X main.b=false -X main.x=42", "linkx.go")
-+ cmd = goRun("-ldflags=-X main.b=false -X main.x=42", "linkx.go")
- outx, err := cmd.CombinedOutput()
- if err == nil {
- fmt.Println("-X linker flag should not overwrite non-strings")
-diff --git test/nosplit.go test/nosplit.go
-index e6cecebde3..fed1c0e510 100644
---- test/nosplit.go
-+++ test/nosplit.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -9,6 +9,7 @@ package main
-
- import (
- "bytes"
-+ "flag"
- "fmt"
- "io/ioutil"
- "log"
-@@ -21,6 +22,24 @@ import (
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goArch() string {
-+ goarch, err := exec.Command(goCmd(), "env", "GOARCH").Output()
-+ if err != nil {
-+ bug()
-+ fmt.Printf("running go env GOARCH: %v\n", err)
-+ }
-+ return strings.TrimSpace(string(goarch))
-+}
-+
- var tests = `
- # These are test cases for the linker analysis that detects chains of
- # nosplit functions that would cause a stack overflow.
-@@ -194,12 +213,13 @@ var (
- )
-
- func main() {
-- goarch := os.Getenv("GOARCH")
-+ flag.Parse()
-+ goarch := goArch()
- if goarch == "" {
-- goarch = runtime.GOARCH
-+ return
- }
-
-- version, err := exec.Command("go", "tool", "compile", "-V").Output()
-+ version, err := exec.Command(goCmd(), "tool", "compile", "-V").Output()
- if err != nil {
- bug()
- fmt.Printf("running go tool compile -V: %v\n", err)
-@@ -345,7 +365,7 @@ TestCases:
- log.Fatal(err)
- }
-
-- cmd := exec.Command("go", "build")
-+ cmd := exec.Command(goCmd(), "build")
- cmd.Dir = dir
- output, err := cmd.CombinedOutput()
- if err == nil {
-diff --git test/run.go test/run.go
-index ac5d3c3e8d..62041226b0 100644
---- test/run.go
-+++ test/run.go
-@@ -229,6 +229,16 @@ func goRun(runcmd runCmd, flags []string, goname string, args ...string) (out []
- return runcmd(cmd...)
- }
-
-+func goRunTarget(runcmd runCmd, goname string, args ...string) (out []byte, err error) {
-+ cmd := []string{"go_local", "run"}
-+ cmd = append(cmd, goname)
-+ if *target != "" {
-+ cmd = append(cmd, "-target", *target)
-+ }
-+ cmd = append(cmd, args...)
-+ return runcmd(cmd...)
-+}
-+
- // skipError describes why a test was skipped.
- type skipError string
-
-@@ -491,7 +501,7 @@ func (t *test) run() {
- action = "rundir"
- case "cmpout":
- action = "run" // the run case already looks for <dir>/<test>.out files
-- case "compile", "compiledir", "build", "builddir", "run", "buildrun", "runoutput", "rundir":
-+ case "compile", "compiledir", "build", "builddir", "run", "runtarget", "buildrun", "runoutput", "rundir":
- // nothing to do
- case "errorcheckandrundir":
- wantError = false // should be no error if also will run
-@@ -816,6 +826,17 @@ func (t *test) run() {
- t.err = fmt.Errorf("incorrect output\n%s", out)
- }
-
-+ case "runtarget":
-+ useTmp = false
-+ out, err := goRunTarget(runcmd, t.goFileName(), args...)
-+ if err != nil {
-+ t.err = err
-+ return
-+ }
-+ if strings.Replace(string(out), "\r\n", "\n", -1) != t.expectedOutput() {
-+ t.err = fmt.Errorf("incorrect output\n%s", out)
-+ }
-+
- case "runoutput":
- rungatec <- true
- defer func() {
-diff --git test/sinit_run.go test/sinit_run.go
-index c9afd3b777..dc885ecffd 100644
---- test/sinit_run.go
-+++ test/sinit_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -11,13 +11,24 @@ package main
-
- import (
- "bytes"
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
- func main() {
-- cmd := exec.Command("go", "tool", "compile", "-S", "sinit.go")
-+ flag.Parse()
-+ cmd := exec.Command(goCmd(), "tool", "compile", "-S", "sinit.go")
- out, err := cmd.CombinedOutput()
- if err != nil {
- fmt.Println(string(out))
diff --git a/go/patch/go-1.10.3/go4.patch b/go/patch/go-1.10.3/go4.patch
deleted file mode 100644
index 290de390..00000000
--- a/go/patch/go-1.10.3/go4.patch
+++ /dev/null
@@ -1,199 +0,0 @@
-runtime, crypto/x509: add -target flag.
-
---- src/crypto/x509/x509_test.go
-+++ src/crypto/x509/x509_test.go
-@@ -13,29 +13,32 @@ import (
- "crypto/rsa"
- _ "crypto/sha256"
- _ "crypto/sha512"
- "crypto/x509/pkix"
- "encoding/asn1"
- "encoding/base64"
- "encoding/hex"
- "encoding/pem"
-+ "flag"
- "fmt"
- "internal/testenv"
- "math/big"
- "net"
- "net/url"
- "os/exec"
- "reflect"
- "runtime"
- "strings"
- "testing"
- "time"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
- func TestParsePKCS1PrivateKey(t *testing.T) {
- block, _ := pem.Decode([]byte(pemPrivateKey))
- priv, err := ParsePKCS1PrivateKey(block.Bytes)
- if err != nil {
- t.Errorf("Failed to parse private key: %s", err)
- return
- }
- if priv.PublicKey.N.Cmp(rsaPrivateKey.PublicKey.N) != 0 ||
-@@ -1089,17 +1092,23 @@ func TestParsePEMCRL(t *testing.T) {
- }
-
- // Can't check the signature here without a package cycle.
- }
-
- func TestImports(t *testing.T) {
- testenv.MustHaveGoRun(t)
-
-- if err := exec.Command(testenv.GoToolPath(t), "run", "x509_test_import.go").Run(); err != nil {
-+ var cmd *exec.Cmd
-+ if *target == "" {
-+ cmd = exec.Command(testenv.GoToolPath(t), "run", "x509_test_import.go")
-+ } else {
-+ cmd = exec.Command("go_"+*target, "run", "-exec", "go_"+*target+"_exec", "x509_test_import.go")
-+ }
-+ if err := cmd.Run(); err != nil {
- t.Errorf("failed to run x509_test_import.go: %s", err)
- }
- }
-
- const derCRLBase64 = "MIINqzCCDJMCAQEwDQYJKoZIhvcNAQEFBQAwVjEZMBcGA1UEAxMQUEtJIEZJTk1FQ0NBTklDQTEVMBMGA1UEChMMRklOTUVDQ0FOSUNBMRUwEwYDVQQLEwxGSU5NRUNDQU5JQ0ExCzAJBgNVBAYTAklUFw0xMTA1MDQxNjU3NDJaFw0xMTA1MDQyMDU3NDJaMIIMBzAhAg4Ze1od49Lt1qIXBydAzhcNMDkwNzE2MDg0MzIyWjAAMCECDl0HSL9bcZ1Ci/UHJ0DPFw0wOTA3MTYwODQzMTNaMAAwIQIOESB9tVAmX3cY7QcnQNAXDTA5MDcxNjA4NDUyMlowADAhAg4S1tGAQ3mHt8uVBydA1RcNMDkwODA0MTUyNTIyWjAAMCECDlQ249Y7vtC25ScHJ0DWFw0wOTA4MDQxNTI1MzdaMAAwIQIOISMop3NkA4PfYwcnQNkXDTA5MDgwNDExMDAzNFowADAhAg56/BMoS29KEShTBydA2hcNMDkwODA0MTEwMTAzWjAAMCECDnBp/22HPH5CSWoHJ0DbFw0wOTA4MDQxMDU0NDlaMAAwIQIOV9IP+8CD8bK+XAcnQNwXDTA5MDgwNDEwNTcxN1owADAhAg4v5aRz0IxWqYiXBydA3RcNMDkwODA0MTA1NzQ1WjAAMCECDlOU34VzvZAybQwHJ0DeFw0wOTA4MDQxMDU4MjFaMAAwIAINO4CD9lluIxcwBydBAxcNMDkwNzIyMTUzMTU5WjAAMCECDgOllfO8Y1QA7/wHJ0ExFw0wOTA3MjQxMTQxNDNaMAAwIQIOJBX7jbiCdRdyjgcnQUQXDTA5MDkxNjA5MzAwOFowADAhAg5iYSAgmDrlH/RZBydBRRcNMDkwOTE2MDkzMDE3WjAAMCECDmu6k6srP3jcMaQHJ0FRFw0wOTA4MDQxMDU2NDBaMAAwIQIOX8aHlO0V+WVH4QcnQVMXDTA5MDgwNDEwNTcyOVowADAhAg5flK2rg3NnsRgDBydBzhcNMTEwMjAxMTUzMzQ2WjAAMCECDg35yJDL1jOPTgoHJ0HPFw0xMTAyMDExNTM0MjZaMAAwIQIOMyFJ6+e9iiGVBQcnQdAXDTA5MDkxODEzMjAwNVowADAhAg5Emb/Oykucmn8fBydB1xcNMDkwOTIxMTAxMDQ3WjAAMCECDjQKCncV+MnUavMHJ0HaFw0wOTA5MjIwODE1MjZaMAAwIQIOaxiFUt3dpd+tPwcnQfQXDTEwMDYxODA4NDI1MVowADAhAg5G7P8nO0tkrMt7BydB9RcNMTAwNjE4MDg0MjMwWjAAMCECDmTCC3SXhmDRst4HJ0H2Fw0wOTA5MjgxMjA3MjBaMAAwIQIOHoGhUr/pRwzTKgcnQfcXDTA5MDkyODEyMDcyNFowADAhAg50wrcrCiw8mQmPBydCBBcNMTAwMjE2MTMwMTA2WjAAMCECDifWmkvwyhEqwEcHJ0IFFw0xMDAyMTYxMzAxMjBaMAAwIQIOfgPmlW9fg+osNgcnQhwXDTEwMDQxMzA5NTIwMFowADAhAg4YHAGuA6LgCk7tBydCHRcNMTAwNDEzMDk1MTM4WjAAMCECDi1zH1bxkNJhokAHJ0IsFw0xMDA0MTMwOTU5MzBaMAAwIQIOMipNccsb/wo2fwcnQi0XDTEwMDQxMzA5NTkwMFowADAhAg46lCmvPl4GpP6ABydCShcNMTAwMTE5MDk1MjE3WjAAMCECDjaTcaj+wBpcGAsHJ0JLFw0xMDAxMTkwOTUyMzRaMAAwIQIOOMC13EOrBuxIOQcnQloXDTEwMDIwMTA5NDcwNVowADAhAg5KmZl+krz4RsmrBydCWxcNMTAwMjAxMDk0NjQwWjAAMCECDmLG3zQJ/fzdSsUHJ0JiFw0xMDAzMDEwOTUxNDBaMAAwIQIOP39ksgHdojf4owcnQmMXDTEwMDMwMTA5NTExN1owADAhAg4LDQzvWNRlD6v9BydCZBcNMTAwMzAxMDk0NjIyWjAAMCECDkmNfeclaFhIaaUHJ0JlFw0xMDAzMDEwOTQ2MDVaMAAwIQIOT/qWWfpH/m8NTwcnQpQXDTEwMDUxMTA5MTgyMVowADAhAg5m/ksYxvCEgJSvBydClRcNMTAwNTExMDkxODAxWjAAMCECDgvf3Ohq6JOPU9AHJ0KWFw0xMDA1MTEwOTIxMjNaMAAwIQIOKSPas10z4jNVIQcnQpcXDTEwMDUxMTA5MjEwMlowADAhAg4mCWmhoZ3lyKCDBydCohcNMTEwNDI4MTEwMjI1WjAAMCECDkeiyRsBMK0Gvr4HJ0KjFw0xMTA0MjgxMTAyMDdaMAAwIQIOa09b/nH2+55SSwcnQq4XDTExMDQwMTA4Mjk0NlowADAhAg5O7M7iq7gGplr1BydCrxcNMTEwNDAxMDgzMDE3WjAAMCECDjlT6mJxUjTvyogHJ0K1Fw0xMTAxMjcxNTQ4NTJaMAAwIQIODS/l4UUFLe21NAcnQrYXDTExMDEyNzE1NDgyOFowADAhAg5lPRA0XdOUF6lSBydDHhcNMTEwMTI4MTQzNTA1WjAAMCECDixKX4fFGGpENwgHJ0MfFw0xMTAxMjgxNDM1MzBaMAAwIQIORNBkqsPnpKTtbAcnQ08XDTEwMDkwOTA4NDg0MlowADAhAg5QL+EMM3lohedEBydDUBcNMTAwOTA5MDg0ODE5WjAAMCECDlhDnHK+HiTRAXcHJ0NUFw0xMDEwMTkxNjIxNDBaMAAwIQIOdBFqAzq/INz53gcnQ1UXDTEwMTAxOTE2MjA0NFowADAhAg4OjR7s8MgKles1BydDWhcNMTEwMTI3MTY1MzM2WjAAMCECDmfR/elHee+d0SoHJ0NbFw0xMTAxMjcxNjUzNTZaMAAwIQIOBTKv2ui+KFMI+wcnQ5YXDTEwMDkxNTEwMjE1N1owADAhAg49F3c/GSah+oRUBydDmxcNMTEwMTI3MTczMjMzWjAAMCECDggv4I61WwpKFMMHJ0OcFw0xMTAxMjcxNzMyNTVaMAAwIQIOXx/Y8sEvwS10LAcnQ6UXDTExMDEyODExMjkzN1owADAhAg5LSLbnVrSKaw/9BydDphcNMTEwMTI4MTEyOTIwWjAAMCECDmFFoCuhKUeACQQHJ0PfFw0xMTAxMTExMDE3MzdaMAAwIQIOQTDdFh2fSPF6AAcnQ+AXDTExMDExMTEwMTcxMFowADAhAg5B8AOXX61FpvbbBydD5RcNMTAxMDA2MTAxNDM2WjAAMCECDh41P2Gmi7PkwI4HJ0PmFw0xMDEwMDYxMDE2MjVaMAAwIQIOWUHGLQCd+Ale9gcnQ/0XDTExMDUwMjA3NTYxMFowADAhAg5Z2c9AYkikmgWOBydD/hcNMTEwNTAyMDc1NjM0WjAAMCECDmf/UD+/h8nf+74HJ0QVFw0xMTA0MTUwNzI4MzNaMAAwIQIOICvj4epy3MrqfwcnRBYXDTExMDQxNTA3Mjg1NlowADAhAg4bouRMfOYqgv4xBydEHxcNMTEwMzA4MTYyNDI1WjAAMCECDhebWHGoKiTp7pEHJ0QgFw0xMTAzMDgxNjI0NDhaMAAwIQIOX+qnxxAqJ8LtawcnRDcXDTExMDEzMTE1MTIyOFowADAhAg4j0fICqZ+wkOdqBydEOBcNMTEwMTMxMTUxMTQxWjAAMCECDhmXjsV4SUpWtAMHJ0RLFw0xMTAxMjgxMTI0MTJaMAAwIQIODno/w+zG43kkTwcnREwXDTExMDEyODExMjM1MlowADAhAg4b1gc88767Fr+LBydETxcNMTEwMTI4MTEwMjA4WjAAMCECDn+M3Pa1w2nyFeUHJ0RQFw0xMTAxMjgxMDU4NDVaMAAwIQIOaduoyIH61tqybAcnRJUXDTEwMTIxNTA5NDMyMlowADAhAg4nLqQPkyi3ESAKBydElhcNMTAxMjE1MDk0MzM2WjAAMCECDi504NIMH8578gQHJ0SbFw0xMTAyMTQxNDA1NDFaMAAwIQIOGuaM8PDaC5u1egcnRJwXDTExMDIxNDE0MDYwNFowADAhAg4ehYq/BXGnB5PWBydEnxcNMTEwMjA0MDgwOTUxWjAAMCECDkSD4eS4FxW5H20HJ0SgFw0xMTAyMDQwODA5MjVaMAAwIQIOOCcb6ilYObt1egcnRKEXDTExMDEyNjEwNDEyOVowADAhAg58tISWCCwFnKGnBydEohcNMTEwMjA0MDgxMzQyWjAAMCECDn5rjtabY/L/WL0HJ0TJFw0xMTAyMDQxMTAzNDFaMAAwDQYJKoZIhvcNAQEFBQADggEBAGnF2Gs0+LNiYCW1Ipm83OXQYP/bd5tFFRzyz3iepFqNfYs4D68/QihjFoRHQoXEB0OEe1tvaVnnPGnEOpi6krwekquMxo4H88B5SlyiFIqemCOIss0SxlCFs69LmfRYvPPvPEhoXtQ3ZThe0UvKG83GOklhvGl6OaiRf4Mt+m8zOT4Wox/j6aOBK6cw6qKCdmD+Yj1rrNqFGg1CnSWMoD6S6mwNgkzwdBUJZ22BwrzAAo4RHa2Uy3ef1FjwD0XtU5N3uDSxGGBEDvOe5z82rps3E22FpAA8eYl8kaXtmWqyvYU0epp4brGuTxCuBMCAsxt/OjIjeNNQbBGkwxgfYA0="
-
- const pemCRLBase64 = "LS0tLS1CRUdJTiBYNTA5IENSTC0tLS0tDQpNSUlCOWpDQ0FWOENBUUV3RFFZSktvWklodmNOQVFFRkJRQXdiREVhTUJnR0ExVUVDaE1SVWxOQklGTmxZM1Z5DQphWFI1SUVsdVl5NHhIakFjQmdOVkJBTVRGVkpUUVNCUWRXSnNhV01nVW05dmRDQkRRU0IyTVRFdU1Dd0dDU3FHDQpTSWIzRFFFSkFSWWZjbk5oYTJWdmJuSnZiM1J6YVdkdVFISnpZWE5sWTNWeWFYUjVMbU52YlJjTk1URXdNakl6DQpNVGt5T0RNd1doY05NVEV3T0RJeU1Ua3lPRE13V2pDQmpEQktBaEVBckRxb2g5RkhKSFhUN09QZ3V1bjQrQmNODQpNRGt4TVRBeU1UUXlOekE1V2pBbU1Bb0dBMVVkRlFRRENnRUpNQmdHQTFVZEdBUVJHQTh5TURBNU1URXdNakUwDQpNalExTlZvd1BnSVJBTEd6blowOTVQQjVhQU9MUGc1N2ZNTVhEVEF5TVRBeU16RTBOVEF4TkZvd0dqQVlCZ05WDQpIUmdFRVJnUE1qQXdNakV3TWpNeE5EVXdNVFJhb0RBd0xqQWZCZ05WSFNNRUdEQVdnQlQxVERGNlVRTS9MTmVMDQpsNWx2cUhHUXEzZzltekFMQmdOVkhSUUVCQUlDQUlRd0RRWUpLb1pJaHZjTkFRRUZCUUFEZ1lFQUZVNUFzNk16DQpxNVBSc2lmYW9iUVBHaDFhSkx5QytNczVBZ2MwYld5QTNHQWR4dXI1U3BQWmVSV0NCamlQL01FSEJXSkNsQkhQDQpHUmNxNXlJZDNFakRrYUV5eFJhK2k2N0x6dmhJNmMyOUVlNks5cFNZd2ppLzdSVWhtbW5Qclh0VHhsTDBsckxyDQptUVFKNnhoRFJhNUczUUE0Q21VZHNITnZicnpnbUNZcHZWRT0NCi0tLS0tRU5EIFg1MDkgQ1JMLS0tLS0NCg0K"
-
---- src/runtime/crash_cgo_test.go
-+++ src/runtime/crash_cgo_test.go
-@@ -279,17 +279,17 @@ func testCgoPprof(t *testing.T, buildArg, runArg string) {
- }
- testenv.MustHaveGoRun(t)
-
- exe, err := buildTestProg(t, "testprogcgo", buildArg)
- if err != nil {
- t.Fatal(err)
- }
-
-- got, err := testenv.CleanCmdEnv(exec.Command(exe, runArg)).CombinedOutput()
-+ got, err := testenv.CleanCmdEnv(goExecCmd(exe, runArg)).CombinedOutput()
- if err != nil {
- if testenv.Builder() == "linux-amd64-alpine" {
- // See Issue 18243 and Issue 19938.
- t.Skipf("Skipping failing test on Alpine (golang.org/issue/18243). Ignoring error: %v", err)
- }
- t.Fatal(err)
- }
- fn := strings.TrimSpace(string(got))
---- src/runtime/crash_test.go
-+++ src/runtime/crash_test.go
-@@ -17,16 +17,35 @@ import (
- "runtime"
- "strconv"
- "strings"
- "sync"
- "testing"
- "time"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd(t *testing.T) string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return testenv.GoToolPath(t)
-+}
-+
-+func goExecCmd(name string, arg ...string) *exec.Cmd {
-+ var cmd []string
-+ if *target != "" {
-+ cmd = append(cmd, "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, name)
-+ cmd = append(cmd, arg...)
-+ return exec.Command(cmd[0], cmd[1:]...)
-+}
-+
- var toRemove []string
-
- func TestMain(m *testing.M) {
- status := m.Run()
- for _, file := range toRemove {
- os.RemoveAll(file)
- }
- os.Exit(status)
-@@ -50,17 +69,17 @@ func runTestProg(t *testing.T, binary, name string, env ...string) string {
-
- testenv.MustHaveGoBuild(t)
-
- exe, err := buildTestProg(t, binary)
- if err != nil {
- t.Fatal(err)
- }
-
-- cmd := testenv.CleanCmdEnv(exec.Command(exe, name))
-+ cmd := testenv.CleanCmdEnv(goExecCmd(exe, name))
- cmd.Env = append(cmd.Env, env...)
- if testing.Short() {
- cmd.Env = append(cmd.Env, "RUNTIME_TEST_SHORT=1")
- }
- var b bytes.Buffer
- cmd.Stdout = &b
- cmd.Stderr = &b
- if err := cmd.Start(); err != nil {
-@@ -125,17 +144,17 @@ func buildTestProg(t *testing.T, binary string, flags ...string) (string, error)
- name += "_" + strings.Join(flags, "_")
- }
- target, ok := testprog.target[name]
- if ok {
- return target.exe, target.err
- }
-
- exe := filepath.Join(testprog.dir, name+".exe")
-- cmd := exec.Command(testenv.GoToolPath(t), append([]string{"build", "-o", exe}, flags...)...)
-+ cmd := exec.Command(goCmd(t), append([]string{"build", "-o", exe}, flags...)...)
- cmd.Dir = "testdata/" + binary
- out, err := testenv.CleanCmdEnv(cmd).CombinedOutput()
- if err != nil {
- target.err = fmt.Errorf("building %s %v: %v\n%s", binary, flags, err, out)
- testprog.target[name] = target
- return "", target.err
- }
- target.exe = exe
-@@ -456,17 +475,17 @@ func TestPanicLoop(t *testing.T) {
- func TestMemPprof(t *testing.T) {
- testenv.MustHaveGoRun(t)
-
- exe, err := buildTestProg(t, "testprog")
- if err != nil {
- t.Fatal(err)
- }
-
-- got, err := testenv.CleanCmdEnv(exec.Command(exe, "MemProf")).CombinedOutput()
-+ got, err := testenv.CleanCmdEnv(goExecCmd(exe, "MemProf")).CombinedOutput()
- if err != nil {
- t.Fatal(err)
- }
- fn := strings.TrimSpace(string(got))
- defer os.Remove(fn)
-
- for try := 0; try < 2; try++ {
- cmd := testenv.CleanCmdEnv(exec.Command(testenv.GoToolPath(t), "tool", "pprof", "-alloc_space", "-top"))
---- src/runtime/crash_unix_test.go
-+++ src/runtime/crash_unix_test.go
-@@ -244,17 +244,17 @@ func testPanicSystemstackInternal() {
- }
-
- func TestSignalExitStatus(t *testing.T) {
- testenv.MustHaveGoBuild(t)
- exe, err := buildTestProg(t, "testprog")
- if err != nil {
- t.Fatal(err)
- }
-- err = testenv.CleanCmdEnv(exec.Command(exe, "SignalExitStatus")).Run()
-+ err = testenv.CleanCmdEnv(goExecCmd(exe, "SignalExitStatus")).Run()
- if err == nil {
- t.Error("test program succeeded unexpectedly")
- } else if ee, ok := err.(*exec.ExitError); !ok {
- t.Errorf("error (%v) has type %T; expected exec.ExitError", err, err)
- } else if ws, ok := ee.Sys().(syscall.WaitStatus); !ok {
- t.Errorf("error.Sys (%v) has type %T; expected syscall.WaitStatus", ee.Sys(), ee.Sys())
- } else if !ws.Signaled() || ws.Signal() != syscall.SIGTERM {
- t.Errorf("got %v; expected SIGTERM", ee)
diff --git a/go/patch/go-1.10.3/go5.patch b/go/patch/go-1.10.3/go5.patch
deleted file mode 100644
index 658b0346..00000000
--- a/go/patch/go-1.10.3/go5.patch
+++ /dev/null
@@ -1,106 +0,0 @@
-diff --git src/runtime/crash_test.go src/runtime/crash_test.go
-index e34f0fa95f..ea1eb4150a 100644
---- src/runtime/crash_test.go
-+++ src/runtime/crash_test.go
-@@ -219,22 +219,27 @@ func testDeadlock(t *testing.T, name string) {
- }
-
- func TestSimpleDeadlock(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- testDeadlock(t, "SimpleDeadlock")
- }
-
- func TestInitDeadlock(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- testDeadlock(t, "InitDeadlock")
- }
-
- func TestLockedDeadlock(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- testDeadlock(t, "LockedDeadlock")
- }
-
- func TestLockedDeadlock2(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- testDeadlock(t, "LockedDeadlock2")
- }
-
- func TestGoexitDeadlock(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- output := runTestProg(t, "testprog", "GoexitDeadlock")
- want := "no goroutines (main called runtime.Goexit) - deadlock!"
- if !strings.Contains(output, want) {
-@@ -271,6 +276,7 @@ panic: again
- }
-
- func TestGoexitCrash(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- output := runTestProg(t, "testprog", "GoexitExit")
- want := "no goroutines (main called runtime.Goexit) - deadlock!"
- if !strings.Contains(output, want) {
-@@ -329,6 +335,7 @@ func TestBreakpoint(t *testing.T) {
- }
-
- func TestGoexitInPanic(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- // see issue 8774: this code used to trigger an infinite recursion
- output := runTestProg(t, "testprog", "GoexitInPanic")
- want := "fatal error: no goroutines (main called runtime.Goexit) - deadlock!"
-@@ -393,6 +400,7 @@ func TestPanicAfterGoexit(t *testing.T) {
- }
-
- func TestRecoveredPanicAfterGoexit(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- output := runTestProg(t, "testprog", "RecoveredPanicAfterGoexit")
- want := "fatal error: no goroutines (main called runtime.Goexit) - deadlock!"
- if !strings.HasPrefix(output, want) {
-diff --git src/runtime/proc_test.go src/runtime/proc_test.go
-index 2ece829071..942d65eedb 100644
---- src/runtime/proc_test.go
-+++ src/runtime/proc_test.go
-@@ -354,9 +354,10 @@ func TestGCFairness2(t *testing.T) {
-
- func TestNumGoroutine(t *testing.T) {
- output := runTestProg(t, "testprog", "NumGoroutine")
-- want := "1\n"
-- if output != want {
-- t.Fatalf("want %q, got %q", want, output)
-+ want1 := "1\n"
-+ want2 := "2\n"
-+ if output != want1 && output != want2 {
-+ t.Fatalf("want %q, got %q", want1, output)
- }
-
- buf := make([]byte, 1<<20)
-diff --git test/fixedbugs/bug429_run.go test/fixedbugs/bug429_run.go
-index e8d18b13e8..6a555286cf 100644
---- test/fixedbugs/bug429_run.go
-+++ test/fixedbugs/bug429_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl
--// runtarget
-+// skip
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-diff --git test/goprint.go test/goprint.go
-index 0648c77e7d..147f0c91db 100644
---- test/goprint.go
-+++ test/goprint.go
-@@ -8,14 +8,9 @@
-
- package main
-
--import (
-- "runtime"
-- "time"
--)
-+import "time"
-
- func main() {
- go println(42, true, false, true, 1.5, "world", (chan int)(nil), []int(nil), (map[string]int)(nil), (func())(nil), byte(255))
-- for runtime.NumGoroutine() > 1 {
-- time.Sleep(10*time.Millisecond)
-- }
-+ time.Sleep(100*time.Millisecond)
- }
diff --git a/go/patch/go-1.10.3/go6.patch b/go/patch/go-1.10.3/go6.patch
deleted file mode 100644
index 5fb512a9..00000000
--- a/go/patch/go-1.10.3/go6.patch
+++ /dev/null
@@ -1,142 +0,0 @@
-diff --git src/encoding/gob/encoder_test.go src/encoding/gob/encoder_test.go
-index a1ca252ccd..c66e623499 100644
---- src/encoding/gob/encoder_test.go
-+++ src/encoding/gob/encoder_test.go
-@@ -1130,10 +1130,7 @@ func TestBadData(t *testing.T) {
-
- // TestHugeWriteFails tests that enormous messages trigger an error.
- func TestHugeWriteFails(t *testing.T) {
-- if testing.Short() {
-- // Requires allocating a monster, so don't do this from all.bash.
-- t.Skip("skipping huge allocation in short mode")
-- }
-+ t.Skip("skipping test due to huge memory requirement")
- huge := make([]byte, tooBig)
- huge[0] = 7 // Make sure it's not all zeros.
- buf := new(bytes.Buffer)
-diff --git src/runtime/crash_cgo_test.go src/runtime/crash_cgo_test.go
-index 998055972a..60fe2a52d2 100644
---- src/runtime/crash_cgo_test.old
-+++ src/runtime/crash_cgo_test.go
-@@ -251,10 +251,7 @@ func TestCgoCCodeSIGPROF(t *testing.T) {
- }
-
- func TestCgoCrashTraceback(t *testing.T) {
-- t.Parallel()
-- if runtime.GOOS != "linux" || (runtime.GOARCH != "amd64" && runtime.GOARCH != "ppc64le") {
-- t.Skipf("not yet supported on %s/%s", runtime.GOOS, runtime.GOARCH)
-- }
-+ t.Skipf("skip running remotely")
- got := runTestProg(t, "testprogcgo", "CrashTraceback")
- for i := 1; i <= 3; i++ {
- if !strings.Contains(got, fmt.Sprintf("cgo symbolizer:%d", i)) {
-@@ -273,10 +270,7 @@ func TestCgoTracebackContext(t *testing.T) {
- }
-
- func testCgoPprof(t *testing.T, buildArg, runArg string) {
-- t.Parallel()
-- if runtime.GOOS != "linux" || (runtime.GOARCH != "amd64" && runtime.GOARCH != "ppc64le") {
-- t.Skipf("not yet supported on %s/%s", runtime.GOOS, runtime.GOARCH)
-- }
-+ t.Skipf("skip pprof test")
- testenv.MustHaveGoRun(t)
-
- exe, err := buildTestProg(t, "testprogcgo", buildArg)
-diff --git src/runtime/crash_test.go src/runtime/crash_test.go
-index 3607992788..e53ffb6a81 100644
---- src/runtime/crash_test.go
-+++ src/runtime/crash_test.go
-@@ -481,6 +481,7 @@ func TestPanicLoop(t *testing.T) {
- }
-
- func TestMemPprof(t *testing.T) {
-+ t.Skipf("skip pprof test")
- testenv.MustHaveGoRun(t)
-
- exe, err := buildTestProg(t, "testprog")
-diff --git src/runtime/crash_unix_test.go src/runtime/crash_unix_test.go
-index 02891ec1ad..fd2723f16e 100644
---- src/runtime/crash_unix_test.go
-+++ src/runtime/crash_unix_test.go
-@@ -174,9 +174,7 @@ func TestPanicSystemstack(t *testing.T) {
- // The GOTRACEBACK=crash handler takes 0.1 seconds even if
- // it's not writing a core file and potentially much longer if
- // it is. Skip in short mode.
-- if testing.Short() {
-- t.Skip("Skipping in short mode (GOTRACEBACK=crash is slow)")
-- }
-+ t.Skip("Skipping (GOTRACEBACK=crash hangs on arm)")
-
- if runtime.Sigisblocked(int(syscall.SIGQUIT)) {
- t.Skip("skipping; SIGQUIT is blocked, see golang.org/issue/19196")
-@@ -244,6 +242,7 @@ func testPanicSystemstackInternal() {
- }
-
- func TestSignalExitStatus(t *testing.T) {
-+ t.Skipf("skip running remotely")
- testenv.MustHaveGoBuild(t)
- exe, err := buildTestProg(t, "testprog")
- if err != nil {
-diff --git src/runtime/fastlog2_test.go src/runtime/fastlog2_test.go
-index ae0f40b2bb..a93933d7ac 100644
---- src/runtime/fastlog2_test.go
-+++ src/runtime/fastlog2_test.go
-@@ -16,11 +16,7 @@ func TestFastLog2(t *testing.T) {
- const randomBitCount = 26
- var e float64
-
-- inc := 1
-- if testing.Short() {
-- // Check 1K total values, down from 64M.
-- inc = 1 << 16
-- }
-+ inc := 1 << 16
- for i := 1; i < 1<<randomBitCount; i += inc {
- l, fl := math.Log2(float64(i)), runtime.Fastlog2(float64(i))
- d := l - fl
-diff --git src/runtime/hash_test.go src/runtime/hash_test.go
-index 1400579cda..4c5de7fbef 100644
---- src/runtime/hash_test.go
-+++ src/runtime/hash_test.go
-@@ -161,9 +161,7 @@ func TestSmhasherZeros(t *testing.T) {
-
- // Strings with up to two nonzero bytes all have distinct hashes.
- func TestSmhasherTwoNonzero(t *testing.T) {
-- if testing.Short() {
-- t.Skip("Skipping in short mode")
-- }
-+ t.Skip("skipping test due to huge memory requirement")
- h := newHashSet()
- for n := 2; n <= 16; n++ {
- twoNonZero(h, n)
-@@ -264,9 +262,7 @@ func setbits(h *HashSet, b []byte, i int, k int) {
- // Test all possible combinations of n blocks from the set s.
- // "permutation" is a bad name here, but it is what Smhasher uses.
- func TestSmhasherPermutation(t *testing.T) {
-- if testing.Short() {
-- t.Skip("Skipping in short mode")
-- }
-+ t.Skip("skipping test due to huge memory requirement")
- permutation(t, []uint32{0, 1, 2, 3, 4, 5, 6, 7}, 8)
- permutation(t, []uint32{0, 1 << 29, 2 << 29, 3 << 29, 4 << 29, 5 << 29, 6 << 29, 7 << 29}, 8)
- permutation(t, []uint32{0, 1}, 20)
-diff --git src/runtime/pprof/pprof_test.go src/runtime/pprof/pprof_test.go
-index 44d514393e..f46f00894c 100644
---- src/runtime/pprof/pprof_test.go
-+++ src/runtime/pprof/pprof_test.go
-@@ -283,14 +283,7 @@ func profileOk(t *testing.T, need []string, prof bytes.Buffer, duration time.Dur
- func TestCPUProfileWithFork(t *testing.T) {
- testenv.MustHaveExec(t)
-
-- heap := 1 << 30
-- if runtime.GOOS == "android" {
-- // Use smaller size for Android to avoid crash.
-- heap = 100 << 20
-- }
-- if testing.Short() {
-- heap = 100 << 20
-- }
-+ heap = 100 << 20
- // This makes fork slower.
- garbage := make([]byte, heap)
- // Need to touch the slice, otherwise it won't be paged in.
diff --git a/go/patch/go-1.11.2/go0.patch b/go/patch/go-1.11.2/go0.patch
deleted file mode 100644
index f80045c0..00000000
--- a/go/patch/go-1.11.2/go0.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-diff --git src/go/build/deps_test.go src/go/build/deps_test.go
-index 29dbe47d29..53e0e287bc 100644
---- src/go/build/deps_test.go
-+++ src/go/build/deps_test.go
-@@ -191,7 +191,7 @@ var pkgDeps = map[string][]string{
- "testing": {"L2", "flag", "fmt", "internal/race", "os", "runtime/debug", "runtime/pprof", "runtime/trace", "time"},
- "testing/iotest": {"L2", "log"},
- "testing/quick": {"L2", "flag", "fmt", "reflect", "time"},
-- "internal/testenv": {"L2", "OS", "flag", "testing", "syscall"},
-+ "internal/testenv": {"L2", "OS", "os.exec", "flag", "testing", "syscall"},
-
- // L4 is defined as L3+fmt+log+time, because in general once
- // you're using L3 packages, use of fmt, log, or time is not a big deal.
-diff --git src/internal/testenv/testenv.go src/internal/testenv/testenv.go
-index 8f69fe0da5..d52b85e122 100644
---- src/internal/testenv/testenv.go
-+++ src/internal/testenv/testenv.go
-@@ -48,6 +48,9 @@ func HasGoBuild() bool {
- return false
- }
- }
-+ if _, err := exec.LookPath("go"); err != nil {
-+ return false
-+ }
- return true
- }
-
diff --git a/go/patch/go-1.11.2/go1.patch b/go/patch/go-1.11.2/go1.patch
deleted file mode 100644
index e05fcce4..00000000
--- a/go/patch/go-1.11.2/go1.patch
+++ /dev/null
@@ -1,50 +0,0 @@
-diff --git test/chanlinear.go test/chanlinear.go
-index 55fee4ab9b..89533da282 100644
---- test/chanlinear.go
-+++ test/chanlinear.go
-@@ -1,4 +1,4 @@
--// +build darwin linux
-+// +build darwin linux android
- // run
-
- // Copyright 2014 The Go Authors. All rights reserved.
-diff --git a/test/fixedbugs/bug385_64.go b/test/fixedbugs/bug385_64.go
-index 0f941ca2f4..3bcd62f3ad 100644
---- test/fixedbugs/bug385_64.go
-+++ test/fixedbugs/bug385_64.go
-@@ -1,4 +1,4 @@
--// +build amd64
-+// +build amd64 arm64
- // errorcheck
-
- // Copyright 2011 The Go Authors. All rights reserved.
-diff --git test/fixedbugs/issue10607.go test/fixedbugs/issue10607.go
-index 8831547da8..9ee6c72bc6 100644
---- test/fixedbugs/issue10607.go
-+++ test/fixedbugs/issue10607.go
-@@ -1,4 +1,4 @@
--// +build linux,!ppc64
-+// +build linux,!ppc64 android
- // run
-
- // Copyright 2015 The Go Authors. All rights reserved.
-diff --git test/maplinear.go test/maplinear.go
-index 34d0914914..afddab627d 100644
---- test/maplinear.go
-+++ test/maplinear.go
-@@ -1,4 +1,4 @@
--// +build darwin linux
-+// +build darwin linux android
- // run
-
- // Copyright 2013 The Go Authors. All rights reserved.
-diff --git test/recover4.go test/recover4.go
-index 67ed970ecb..95a89dab00 100644
---- test/recover4.go
-+++ test/recover4.go
-@@ -1,4 +1,4 @@
--// +build linux darwin
-+// +build linux android darwin
- // run
-
- // Copyright 2015 The Go Authors. All rights reserved.
diff --git a/go/patch/go-1.11.2/go2.patch b/go/patch/go-1.11.2/go2.patch
deleted file mode 100644
index 362a53fe..00000000
--- a/go/patch/go-1.11.2/go2.patch
+++ /dev/null
@@ -1,277 +0,0 @@
-diff --git test/run.go test/run.go
-index ad38d420c9..e2b93d35da 100644
---- test/run.go
-+++ test/run.go
-@@ -36,13 +36,13 @@ var (
- summary = flag.Bool("summary", false, "show summary of results")
- showSkips = flag.Bool("show_skips", false, "show skipped tests")
- runSkips = flag.Bool("run_skips", false, "run skipped tests (ignore skip and build tags)")
-- linkshared = flag.Bool("linkshared", false, "")
- updateErrors = flag.Bool("update_errors", false, "update error messages in test file based on compiler output")
- runoutputLimit = flag.Int("l", defaultRunOutputLimit(), "number of parallel runoutput tests to run")
-
- shard = flag.Int("shard", 0, "shard index to run. Only applicable if -shards is non-zero.")
- shards = flag.Int("shards", 0, "number of shards. If 0, all tests are run. This is used by the continuous build.")
- )
-+ target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-
- var (
- goos, goarch string
-@@ -207,25 +207,19 @@ func goFiles(dir string) []string {
- type runCmd func(...string) ([]byte, error)
-
- func compileFile(runcmd runCmd, longname string, flags []string) (out []byte, err error) {
-- cmd := []string{goTool(), "tool", "compile", "-e"}
-+ cmd := []string{findGoCmd, "tool", "compile", "-e"}
- cmd = append(cmd, flags...)
-- if *linkshared {
-- cmd = append(cmd, "-dynlink", "-installsuffix=dynlink")
-- }
- cmd = append(cmd, longname)
- return runcmd(cmd...)
- }
-
- func compileInDir(runcmd runCmd, dir string, flags []string, localImports bool, names ...string) (out []byte, err error) {
-- cmd := []string{goTool(), "tool", "compile", "-e"}
-+ cmd := []string{findGoCmd(), "tool", "compile", "-e"}
- if localImports {
- // Set relative path for local imports and import search path to current dir.
- cmd = append(cmd, "-D", ".", "-I", ".")
- }
- cmd = append(cmd, flags...)
-- if *linkshared {
-- cmd = append(cmd, "-dynlink", "-installsuffix=dynlink")
-- }
- for _, name := range names {
- cmd = append(cmd, filepath.Join(dir, name))
- }
-@@ -234,15 +228,28 @@ func compileInDir(runcmd runCmd, dir string, flags []string, localImports bool,
-
- func linkFile(runcmd runCmd, goname string) (err error) {
- pfile := strings.Replace(goname, ".go", ".o", -1)
-- cmd := []string{goTool(), "tool", "link", "-w", "-o", "a.exe", "-L", "."}
-+ cmd := []string{findGoCmd(), "tool", "link", "-w", "-o", "a.exe", "-L", "."}
- if *linkshared {
- cmd = append(cmd, "-linkshared", "-installsuffix=dynlink")
- }
- cmd = append(cmd, pfile)
-- _, err = runcmd(cmd...)
-+ _, err = runcmd(findGoCmd(), "tool", "link", "-w", "-o", "a.exe", "-L", ".", pfile)
- return
- }
-
-+
-+func goRun(runcmd runCmd, flags []string, goname string, args ...string) (out []byte, err error) {
-+ cmd := []string{findGoCmd(), "run", goGcflags()}
-+ if len(findExecCmd()) > 0 {
-+ cmd = append(cmd, "-exec")
-+ cmd = append(cmd, findExecCmd()...)
-+ }
-+ cmd = append(cmd, flags...)
-+ cmd = append(cmd, goname)
-+ cmd = append(cmd, args...)
-+ return runcmd(cmd...)
-+}
-+
- // skipError describes why a test was skipped.
- type skipError string
-
-@@ -646,7 +653,7 @@ func (t *test) run() {
- // Fail if wantError is true and compilation was successful and vice versa.
- // Match errors produced by gc against errors in comments.
- // TODO(gri) remove need for -C (disable printing of columns in error messages)
-- cmdline := []string{goTool(), "tool", "compile", "-C", "-e", "-o", "a.o"}
-+ cmdline := []string{findGoCmd(), "tool", "compile", "-C", "-e", "-o", "a.o"}
- // No need to add -dynlink even if linkshared if we're just checking for errors...
- cmdline = append(cmdline, flags...)
- cmdline = append(cmdline, long)
-@@ -773,7 +780,7 @@ func (t *test) run() {
-
- case "build":
- // Build Go file.
-- _, err := runcmd(goTool(), "build", goGcflags(), "-o", "a.exe", long)
-+ _, err := runcmd(findGoCmd(), "build", goGcflags(), "-o", "a.exe", long)
- if err != nil {
- t.err = err
- }
-@@ -799,7 +806,7 @@ func (t *test) run() {
-
- }
- var objs []string
-- cmd := []string{goTool(), "tool", "compile", "-e", "-D", ".", "-I", ".", "-o", "go.o"}
-+ cmd := []string{findGoCmd(), "tool", "compile", "-e", "-D", ".", "-I", ".", "-o", "go.o"}
- if len(asms) > 0 {
- cmd = append(cmd, "-asmhdr", "go_asm.h")
- }
-@@ -813,7 +820,7 @@ func (t *test) run() {
- }
- objs = append(objs, "go.o")
- if len(asms) > 0 {
-- cmd = []string{goTool(), "tool", "asm", "-e", "-I", ".", "-o", "asm.o"}
-+ cmd = []string{findGoCmd(), "tool", "asm", "-e", "-I", ".", "-o", "asm.o"}
- for _, file := range asms {
- cmd = append(cmd, filepath.Join(longdir, file.Name()))
- }
-@@ -857,14 +864,14 @@ func (t *test) run() {
- }
- objs = append(objs, "asm.o")
- }
-- cmd = []string{goTool(), "tool", "pack", "c", "all.a"}
-+ cmd = []string{findGoCmd(), "tool", "pack", "c", "all.a"}
- cmd = append(cmd, objs...)
- _, err = runcmd(cmd...)
- if err != nil {
- t.err = err
- break
- }
-- cmd = []string{goTool(), "tool", "link", "-o", "a.exe", "all.a"}
-+ cmd = []string{findGoCmd(), "tool", "link", "-o", "a.exe", "all.a"}
- _, err = runcmd(cmd...)
- if err != nil {
- t.err = err
-@@ -886,10 +893,7 @@ func (t *test) run() {
- // Build an executable from Go file, then run it, verify its output.
- // Useful for timeout tests where failure mode is infinite loop.
- // TODO: not supported on NaCl
-- cmd := []string{goTool(), "build", goGcflags(), "-o", "a.exe"}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-+ cmd := []string{findGoCmd(), "build", goGcflags(), "-o", "a.exe"}
- longdirgofile := filepath.Join(filepath.Join(cwd, t.dir), t.gofile)
- cmd = append(cmd, flags...)
- cmd = append(cmd, longdirgofile)
-@@ -898,8 +902,13 @@ func (t *test) run() {
- t.err = err
- return
- }
-- cmd = []string{"./a.exe"}
-- out, err = runcmd(append(cmd, args...)...)
-+ cmd = []string{}
-+ if len(findExecCmd()) > 0 {
-+ cmd = append(cmd, findExecCmd()...)
-+ }
-+ cmd = append(cmd, "./a.exe")
-+
-+ out, err = runcmd(append(cmd, args...)...)
- if err != nil {
- t.err = err
- return
-@@ -914,38 +923,7 @@ func (t *test) run() {
- // otherwise build an executable and run it.
- // Verify the output.
- useTmp = false
-- var out []byte
-- var err error
-- if len(flags)+len(args) == 0 && goGcflags() == "" && !*linkshared {
-- // If we're not using special go command flags,
-- // skip all the go command machinery.
-- // This avoids any time the go command would
-- // spend checking whether, for example, the installed
-- // package runtime is up to date.
-- // Because we run lots of trivial test programs,
-- // the time adds up.
-- pkg := filepath.Join(t.tempDir, "pkg.a")
-- if _, err := runcmd(goTool(), "tool", "compile", "-o", pkg, t.goFileName()); err != nil {
-- t.err = err
-- return
-- }
-- exe := filepath.Join(t.tempDir, "test.exe")
-- cmd := []string{goTool(), "tool", "link", "-s", "-w"}
-- cmd = append(cmd, "-o", exe, pkg)
-- if _, err := runcmd(cmd...); err != nil {
-- t.err = err
-- return
-- }
-- out, err = runcmd(append([]string{exe}, args...)...)
-- } else {
-- cmd := []string{goTool(), "run", goGcflags()}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-- cmd = append(cmd, flags...)
-- cmd = append(cmd, t.goFileName())
-- out, err = runcmd(append(cmd, args...)...)
-- }
-+ out, err := goRun(runcmd, flags, t.goFileName(), args...)
- if err != nil {
- t.err = err
- return
-@@ -962,12 +940,7 @@ func (t *test) run() {
- <-rungatec
- }()
- useTmp = false
-- cmd := []string{goTool(), "run", goGcflags()}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-- cmd = append(cmd, t.goFileName())
-- out, err := runcmd(append(cmd, args...)...)
-+ out, err := goRun(runcmd, nil, t.goFileName(), args...)
- if err != nil {
- t.err = err
- return
-@@ -977,12 +950,7 @@ func (t *test) run() {
- t.err = fmt.Errorf("write tempfile:%s", err)
- return
- }
-- cmd = []string{goTool(), "run", goGcflags()}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-- cmd = append(cmd, tfile)
-- out, err = runcmd(cmd...)
-+ out, err = goRun(runcmd, nil, tfile)
- if err != nil {
- t.err = err
- return
-@@ -995,12 +963,7 @@ func (t *test) run() {
- // Run Go file and write its output into temporary Go file.
- // Compile and errorCheck generated Go file.
- useTmp = false
-- cmd := []string{goTool(), "run", goGcflags()}
-- if *linkshared {
-- cmd = append(cmd, "-linkshared")
-- }
-- cmd = append(cmd, t.goFileName())
-- out, err := runcmd(append(cmd, args...)...)
-+ out, err := goRun(runcmd, nil, t.goFileName(), args...)
- if err != nil {
- t.err = err
- return
-@@ -1011,7 +974,7 @@ func (t *test) run() {
- t.err = fmt.Errorf("write tempfile:%s", err)
- return
- }
-- cmdline := []string{goTool(), "tool", "compile", "-e", "-o", "a.o"}
-+ cmdline := []string{findGoCmd(), "tool", "compile", "-e", "-o", "a.o"}
- cmdline = append(cmdline, flags...)
- cmdline = append(cmdline, tfile)
- out, err = runcmd(cmdline...)
-@@ -1038,6 +1001,11 @@ func findExecCmd() []string {
- return execCmd
- }
- execCmd = []string{} // avoid work the second time
-+ if *target != "" {
-+ execCmd = []string{"go_" + *target + "_exec"}
-+ return execCmd
-+ }
-+
- if goos == runtime.GOOS && goarch == runtime.GOARCH {
- return execCmd
- }
-@@ -1048,6 +1016,14 @@ func findExecCmd() []string {
- return execCmd
- }
-
-+func findGoCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+
- func (t *test) String() string {
- return filepath.Join(t.dir, t.gofile)
- }
diff --git a/go/patch/go-1.11.2/go3.patch b/go/patch/go-1.11.2/go3.patch
deleted file mode 100644
index c97cd302..00000000
--- a/go/patch/go-1.11.2/go3.patch
+++ /dev/null
@@ -1,730 +0,0 @@
-diff --git test/fixedbugs/bug302.go test/fixedbugs/bug302.go
-index c763b87786..470841f676 100644
---- test/fixedbugs/bug302.go
-+++ test/fixedbugs/bug302.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2010 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -8,16 +8,28 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+
- func main() {
-- run("go", "tool", "compile", filepath.Join("fixedbugs", "bug302.dir", "p.go"))
-- run("go", "tool", "pack", "grc", "pp.a", "p.o")
-- run("go", "tool", "compile", "-I", ".", filepath.Join("fixedbugs", "bug302.dir", "main.go"))
-+ flag.Parse()
-+ run(goCmd(), "tool", "compile", filepath.Join("fixedbugs", "bug302.dir", "p.go"))
-+ run(goCmd(), "tool", "pack", "grc", "pp.a", "p.o")
-+ run(goCmd(), "tool", "compile", "-I", ".", filepath.Join("fixedbugs", "bug302.dir", "main.go"))
- os.Remove("p.o")
- os.Remove("pp.a")
- os.Remove("main.o")
-diff --git test/fixedbugs/bug369.go test/fixedbugs/bug369.go
-index e2a1147735..769364d503 100644
---- test/fixedbugs/bug369.go
-+++ test/fixedbugs/bug369.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js,!windows
--// run
-+// runtarget
-
- // Copyright 2011 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,21 +10,40 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(cmd ...string) {
-+ if *target == "" {
-+ run(cmd[0], cmd[1:]...)
-+ } else {
-+ run("go_"+*target+"_exec", cmd...)
-+ }
-+}
-+
- func main() {
-+ flag.Parse()
- err := os.Chdir(filepath.Join(".", "fixedbugs", "bug369.dir"))
- check(err)
-
-- run("go", "tool", "compile", "-N", "-o", "slow.o", "pkg.go")
-- run("go", "tool", "compile", "-o", "fast.o", "pkg.go")
-- run("go", "tool", "compile", "-o", "main.o", "main.go")
-- run("go", "tool", "link", "-o", "a.exe", "main.o")
-- run("." + string(filepath.Separator) + "a.exe")
-+ run(goCmd(), "tool", "compile", "-N", "-o", "slow.o", "pkg.go")
-+ run(goCmd(), "tool", "compile", "-o", "fast.o", "pkg.go")
-+ run(goCmd(), "tool", "compile", "-o", "main.o", "main.go")
-+ run(goCmd(), "tool", "link", "-o", "a.exe", "main.o")
-+ goRun("." + string(filepath.Separator) + "a.exe")
-
- os.Remove("slow.o")
- os.Remove("fast.o")
-diff --git test/fixedbugs/bug429_run.go test/fixedbugs/bug429_run.go
-index c6a02aae5e..30298de97b 100644
---- test/fixedbugs/bug429_run.go
-+++ test/fixedbugs/bug429_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,6 +10,7 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
-@@ -17,8 +18,27 @@ import (
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(args ...string) *exec.Cmd {
-+ cmd := []string{"run"}
-+ if *target != "" {
-+ cmd = append(cmd, "-exec", "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, args...)
-+ return exec.Command(goCmd(), cmd...)
-+}
-+
- func main() {
-- cmd := exec.Command("go", "run", filepath.Join("fixedbugs", "bug429.go"))
-+ flag.Parse()
-+ cmd := goRun(filepath.Join("fixedbugs", "bug429.go"))
- out, err := cmd.CombinedOutput()
- if err == nil {
- fmt.Println("expected deadlock")
-diff --git test/fixedbugs/issue10607.go test/fixedbugs/issue10607.go
-index 9ee6c72bc6..e819a3085a 100644
---- test/fixedbugs/issue10607.go
-+++ test/fixedbugs/issue10607.go
-@@ -1,5 +1,5 @@
- // +build linux,!ppc64 android
--// run
-+// runtarget
-
- // Copyright 2015 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -11,19 +11,39 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(args ...string) *exec.Cmd {
-+ cmd := []string{"run"}
-+ if *target != "" {
-+ cmd = append(cmd, "-exec", "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, args...)
-+ return exec.Command(goCmd(), cmd...)
-+}
-+
- func main() {
-- test("internal")
-+ flag.Parse()
-+ //test("internal")
- test("external")
- }
-
- func test(linkmode string) {
-- out, err := exec.Command("go", "run", "-ldflags", "-B=0x12345678 -linkmode="+linkmode, filepath.Join("fixedbugs", "issue10607a.go")).CombinedOutput()
-+ out, err := goRun("-ldflags", "-B=0x12345678 -linkmode="+linkmode, filepath.Join("fixedbugs", "issue10607a.go")).CombinedOutput()
- if err != nil {
- fmt.Printf("BUG: linkmode=%s %v\n%s\n", linkmode, err, out)
- os.Exit(1)
-diff --git test/fixedbugs/issue11771.go test/fixedbugs/issue11771.go
-index 99d7060d44..777cb7b9c4 100644
---- test/fixedbugs/issue11771.go
-+++ test/fixedbugs/issue11771.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2015 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -11,6 +11,7 @@ package main
-
- import (
- "bytes"
-+ "flag"
- "fmt"
- "io/ioutil"
- "log"
-@@ -19,8 +20,17 @@ import (
- "path/filepath"
- "runtime"
- )
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-
- func main() {
-+ flag.Parse()
- if runtime.Compiler != "gc" {
- return
- }
-@@ -52,7 +62,7 @@ func x() {
- log.Fatal(err)
- }
-
-- cmd := exec.Command("go", "tool", "compile", "x.go")
-+ cmd := exec.Command(goCmd(), "tool", "compile", "x.go")
- cmd.Dir = dir
- output, err := cmd.CombinedOutput()
- if err == nil {
-diff --git test/fixedbugs/issue9355.go test/fixedbugs/issue9355.go
-index 9657e64491..bad099f440 100644
---- test/fixedbugs/issue9355.go
-+++ test/fixedbugs/issue9355.go
-@@ -1,4 +1,4 @@
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -7,6 +7,7 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
-@@ -15,7 +16,17 @@ import (
- "runtime"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
- func main() {
-+ flag.Parse()
- if runtime.Compiler != "gc" || runtime.GOOS == "nacl" || runtime.GOOS == "js" {
- return
- }
-@@ -23,7 +34,7 @@ func main() {
- err := os.Chdir(filepath.Join("fixedbugs", "issue9355.dir"))
- check(err)
-
-- out := run("go", "tool", "compile", "-S", "a.go")
-+ out := run(goCmd(), "tool", "compile", "-S", "a.go")
- os.Remove("a.o")
-
- // 6g/8g print the offset as dec, but 5g/9g print the offset as hex.
-diff --git test/fixedbugs/issue9862_run.go test/fixedbugs/issue9862_run.go
-index 299e809545..02b8ea83c2 100644
---- test/fixedbugs/issue9862_run.go
-+++ test/fixedbugs/issue9862_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2015 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,12 +10,32 @@
- package main
-
- import (
-+ "flag"
- "os/exec"
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(args ...string) *exec.Cmd {
-+ cmd := []string{"run"}
-+ if *target != "" {
-+ cmd = append(cmd, "-exec", "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, args...)
-+ return exec.Command(goCmd(), cmd...)
-+}
-+
- func main() {
-- out, err := exec.Command("go", "run", "fixedbugs/issue9862.go").CombinedOutput()
-+ flag.Parse()
-+ out, err := goRun("fixedbugs/issue9862.go").CombinedOutput()
- outstr := string(out)
- if err == nil {
- println("go run issue9862.go succeeded, should have failed\n", outstr)
-diff --git test/linkmain_run.go test/linkmain_run.go
-index 68d53e8cad..0aa5e0fe2d 100644
---- test/linkmain_run.go
-+++ test/linkmain_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,12 +10,22 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
- func cleanup() {
- os.Remove("linkmain.o")
- os.Remove("linkmain.a")
-@@ -51,16 +61,18 @@ func runFail(cmdline string) {
- }
-
- func main() {
-+ flag.Parse()
-+
- // helloworld.go is package main
-- run("go tool compile -o linkmain.o helloworld.go")
-- run("go tool compile -pack -o linkmain.a helloworld.go")
-- run("go tool link -o linkmain.exe linkmain.o")
-- run("go tool link -o linkmain.exe linkmain.a")
-+ run(goCmd() + " tool compile -o linkmain.o helloworld.go")
-+ run(goCmd() + " tool compile -pack -o linkmain.a helloworld.go")
-+ run(goCmd() + " tool link -o linkmain.exe linkmain.o")
-+ run(goCmd() + " tool link -o linkmain.exe linkmain.a")
-
- // linkmain.go is not
-- run("go tool compile -o linkmain1.o linkmain.go")
-- run("go tool compile -pack -o linkmain1.a linkmain.go")
-- runFail("go tool link -o linkmain.exe linkmain1.o")
-- runFail("go tool link -o linkmain.exe linkmain1.a")
-+ run(goCmd() + " tool compile -o linkmain1.o linkmain.go")
-+ run(goCmd() + " tool compile -pack -o linkmain1.a linkmain.go")
-+ runFail(goCmd() + " tool link -o linkmain.exe linkmain1.o")
-+ runFail(goCmd() + " tool link -o linkmain.exe linkmain1.a")
- cleanup()
- }
-diff --git test/linkobj.go test/linkobj.go
-index 2902d23f4b..c17dfd3da9 100644
---- test/linkobj.go
-+++ test/linkobj.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2016 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -10,6 +10,7 @@
- package main
-
- import (
-+ "flag"
- "fmt"
- "io/ioutil"
- "log"
-@@ -18,9 +19,27 @@ import (
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(cmd ...string) string {
-+ if *target == "" {
-+ return run(cmd...)
-+ } else {
-+ return run(append([]string{"go_"+*target+"_exec"}, cmd...)...)
-+ }
-+}
-+
- var pwd, tmpdir string
-
- func main() {
-+ flag.Parse()
- dir, err := ioutil.TempDir("", "go-test-linkobj-")
- if err != nil {
- log.Fatal(err)
-@@ -37,28 +56,28 @@ func main() {
-
- writeFile("p1.go", `
- package p1
--
-+
- func F() {
- println("hello from p1")
- }
- `)
- writeFile("p2.go", `
- package p2
--
-+
- import "./p1"
-
- func F() {
- p1.F()
- println("hello from p2")
- }
--
-+
- func main() {}
- `)
- writeFile("p3.go", `
- package main
-
- import "./p2"
--
-+
- func main() {
- p2.F()
- println("hello from main")
-@@ -76,9 +95,9 @@ func main() {
- }
-
- // inlining is disabled to make sure that the link objects contain needed code.
-- run("go", "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p1."+o, "-linkobj", "p1.lo", "p1.go")
-- run("go", "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p2."+o, "-linkobj", "p2.lo", "p2.go")
-- run("go", "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p3."+o, "-linkobj", "p3.lo", "p3.go")
-+ run(goCmd(), "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p1."+o, "-linkobj", "p1.lo", "p1.go")
-+ run(goCmd(), "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p2."+o, "-linkobj", "p2.lo", "p2.go")
-+ run(goCmd(), "tool", "compile", pkg, "-D", ".", "-I", ".", "-l", "-o", "p3."+o, "-linkobj", "p3.lo", "p3.go")
-
- cp("p1."+o, "p1.oo")
- cp("p2."+o, "p2.oo")
-@@ -86,13 +105,13 @@ func main() {
- cp("p1.lo", "p1."+o)
- cp("p2.lo", "p2."+o)
- cp("p3.lo", "p3."+o)
-- out := runFail("go", "tool", "link", "p2."+o)
-+ out := runFail(goCmd(), "tool", "link", "p2."+o)
- if !strings.Contains(out, "not package main") {
- fatalf("link p2.o failed but not for package main:\n%s", out)
- }
-
-- run("go", "tool", "link", "-L", ".", "-o", "a.out.exe", "p3."+o)
-- out = run("./a.out.exe")
-+ run(goCmd(), "tool", "link", "-L", ".", "-o", "a.out.exe", "p3."+o)
-+ out = goRun("./a.out.exe")
- if !strings.Contains(out, "hello from p1\nhello from p2\nhello from main\n") {
- fatalf("running main, incorrect output:\n%s", out)
- }
-diff --git test/linkx_run.go test/linkx_run.go
-index ca9d31612a..631b95ee67 100644
---- test/linkx_run.go
-+++ test/linkx_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -11,20 +11,40 @@ package main
-
- import (
- "bytes"
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goRun(args ...string) *exec.Cmd {
-+ cmd := []string{"run"}
-+ if *target != "" {
-+ cmd = append(cmd, "-exec", "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, args...)
-+ return exec.Command(goCmd(), cmd...)
-+}
-+
- func main() {
-+ flag.Parse()
- // test(" ") // old deprecated & removed syntax
- test("=") // new syntax
- }
-
- func test(sep string) {
- // Successful run
-- cmd := exec.Command("go", "run", "-ldflags=-X main.tbd"+sep+"hello -X main.overwrite"+sep+"trumped -X main.nosuchsymbol"+sep+"neverseen", "linkx.go")
-+ cmd := goRun("-ldflags=-X main.tbd"+sep+"hello -X main.overwrite"+sep+"trumped -X main.nosuchsymbol"+sep+"neverseen", "linkx.go")
- var out, errbuf bytes.Buffer
- cmd.Stdout = &out
- cmd.Stderr = &errbuf
-@@ -44,7 +64,7 @@ func test(sep string) {
- }
-
- // Issue 8810
-- cmd = exec.Command("go", "run", "-ldflags=-X main.tbd", "linkx.go")
-+ cmd = goRun("-ldflags=-X main.tbd", "linkx.go")
- _, err = cmd.CombinedOutput()
- if err == nil {
- fmt.Println("-X linker flag should not accept keys without values")
-@@ -52,7 +72,7 @@ func test(sep string) {
- }
-
- // Issue 9621
-- cmd = exec.Command("go", "run", "-ldflags=-X main.b=false -X main.x=42", "linkx.go")
-+ cmd = goRun("-ldflags=-X main.b=false -X main.x=42", "linkx.go")
- outx, err := cmd.CombinedOutput()
- if err == nil {
- fmt.Println("-X linker flag should not overwrite non-strings")
-diff --git test/nosplit.go test/nosplit.go
-index e6cd04e563..baeea80e37 100644
---- test/nosplit.go
-+++ test/nosplit.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -9,6 +9,7 @@ package main
-
- import (
- "bytes"
-+ "flag"
- "fmt"
- "io/ioutil"
- "log"
-@@ -21,6 +22,24 @@ import (
- "strings"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
-+func goArch() string {
-+ goarch, err := exec.Command(goCmd(), "env", "GOARCH").Output()
-+ if err != nil {
-+ bug()
-+ fmt.Printf("running go env GOARCH: %v\n", err)
-+ }
-+ return strings.TrimSpace(string(goarch))
-+}
-+
- var tests = `
- # These are test cases for the linker analysis that detects chains of
- # nosplit functions that would cause a stack overflow.
-@@ -194,12 +213,13 @@ var (
- )
-
- func main() {
-- goarch := os.Getenv("GOARCH")
-+ flag.Parse()
-+ goarch := goArch()
- if goarch == "" {
-- goarch = runtime.GOARCH
-+ return
- }
-
-- version, err := exec.Command("go", "tool", "compile", "-V").Output()
-+ version, err := exec.Command(goCmd(), "tool", "compile", "-V").Output()
- if err != nil {
- bug()
- fmt.Printf("running go tool compile -V: %v\n", err)
-@@ -345,7 +365,7 @@ TestCases:
- log.Fatal(err)
- }
-
-- cmd := exec.Command("go", "build")
-+ cmd := exec.Command(goCmd(), "build")
- cmd.Dir = dir
- output, err := cmd.CombinedOutput()
- if err == nil {
-diff --git test/run.go test/run.go
-index 2af3ee43ba..28c87c3583 100644
---- test/run.go
-+++ test/run.go
-@@ -246,6 +246,16 @@ func goRun(runcmd runCmd, flags []string, goname string, args ...string) (out []
- }
-
-
-+func goRunTarget(runcmd runCmd, goname string, args ...string) (out []byte, err error) {
-+ cmd := []string{"go_local", "run"}
-+ cmd = append(cmd, goname)
-+ if *target != "" {
-+ cmd = append(cmd, "-target", *target)
-+ }
-+ cmd = append(cmd, args...)
-+ return runcmd(cmd...)
-+}
-+
- // skipError describes why a test was skipped.
- type skipError string
-
-@@ -505,7 +515,7 @@ func (t *test) run() {
-
- // TODO: Clean up/simplify this switch statement.
- switch action {
-- case "compile", "compiledir", "build", "builddir", "buildrundir", "run", "buildrun", "runoutput", "rundir", "asmcheck":
-+ case "compile", "compiledir", "build", "builddir", "buildrundir", "run", "runtarget", "buildrun", "runoutput", "rundir", "asmcheck":
- // nothing to do
- case "errorcheckandrundir":
- wantError = false // should be no error if also will run
-@@ -894,6 +904,17 @@ func (t *test) run() {
- t.err = fmt.Errorf("incorrect output\n%s", out)
- }
-
-+ case "runtarget":
-+ useTmp = false
-+ out, err := goRunTarget(runcmd, t.goFileName(), args...)
-+ if err != nil {
-+ t.err = err
-+ return
-+ }
-+ if strings.Replace(string(out), "\r\n", "\n", -1) != t.expectedOutput() {
-+ t.err = fmt.Errorf("incorrect output\n%s", out)
-+ }
-+
- case "runoutput":
- // Run Go file and write its output into temporary Go file.
- // Run generated Go file and verify its output.
-diff --git test/sinit_run.go test/sinit_run.go
-index fdd19c492f..0b3cb76083 100644
---- test/sinit_run.go
-+++ test/sinit_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// run
-+// runtarget
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-@@ -11,11 +11,21 @@ package main
-
- import (
- "bytes"
-+ "flag"
- "fmt"
- "os"
- "os/exec"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd() string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return "go"
-+}
-+
- func main() {
- cmd := exec.Command("go", "tool", "compile", "-S", "sinit.go")
- out, err := cmd.CombinedOutput()
diff --git a/go/patch/go-1.11.2/go4.patch b/go/patch/go-1.11.2/go4.patch
deleted file mode 100644
index 290de390..00000000
--- a/go/patch/go-1.11.2/go4.patch
+++ /dev/null
@@ -1,199 +0,0 @@
-runtime, crypto/x509: add -target flag.
-
---- src/crypto/x509/x509_test.go
-+++ src/crypto/x509/x509_test.go
-@@ -13,29 +13,32 @@ import (
- "crypto/rsa"
- _ "crypto/sha256"
- _ "crypto/sha512"
- "crypto/x509/pkix"
- "encoding/asn1"
- "encoding/base64"
- "encoding/hex"
- "encoding/pem"
-+ "flag"
- "fmt"
- "internal/testenv"
- "math/big"
- "net"
- "net/url"
- "os/exec"
- "reflect"
- "runtime"
- "strings"
- "testing"
- "time"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
- func TestParsePKCS1PrivateKey(t *testing.T) {
- block, _ := pem.Decode([]byte(pemPrivateKey))
- priv, err := ParsePKCS1PrivateKey(block.Bytes)
- if err != nil {
- t.Errorf("Failed to parse private key: %s", err)
- return
- }
- if priv.PublicKey.N.Cmp(rsaPrivateKey.PublicKey.N) != 0 ||
-@@ -1089,17 +1092,23 @@ func TestParsePEMCRL(t *testing.T) {
- }
-
- // Can't check the signature here without a package cycle.
- }
-
- func TestImports(t *testing.T) {
- testenv.MustHaveGoRun(t)
-
-- if err := exec.Command(testenv.GoToolPath(t), "run", "x509_test_import.go").Run(); err != nil {
-+ var cmd *exec.Cmd
-+ if *target == "" {
-+ cmd = exec.Command(testenv.GoToolPath(t), "run", "x509_test_import.go")
-+ } else {
-+ cmd = exec.Command("go_"+*target, "run", "-exec", "go_"+*target+"_exec", "x509_test_import.go")
-+ }
-+ if err := cmd.Run(); err != nil {
- t.Errorf("failed to run x509_test_import.go: %s", err)
- }
- }
-
- const derCRLBase64 = "MIINqzCCDJMCAQEwDQYJKoZIhvcNAQEFBQAwVjEZMBcGA1UEAxMQUEtJIEZJTk1FQ0NBTklDQTEVMBMGA1UEChMMRklOTUVDQ0FOSUNBMRUwEwYDVQQLEwxGSU5NRUNDQU5JQ0ExCzAJBgNVBAYTAklUFw0xMTA1MDQxNjU3NDJaFw0xMTA1MDQyMDU3NDJaMIIMBzAhAg4Ze1od49Lt1qIXBydAzhcNMDkwNzE2MDg0MzIyWjAAMCECDl0HSL9bcZ1Ci/UHJ0DPFw0wOTA3MTYwODQzMTNaMAAwIQIOESB9tVAmX3cY7QcnQNAXDTA5MDcxNjA4NDUyMlowADAhAg4S1tGAQ3mHt8uVBydA1RcNMDkwODA0MTUyNTIyWjAAMCECDlQ249Y7vtC25ScHJ0DWFw0wOTA4MDQxNTI1MzdaMAAwIQIOISMop3NkA4PfYwcnQNkXDTA5MDgwNDExMDAzNFowADAhAg56/BMoS29KEShTBydA2hcNMDkwODA0MTEwMTAzWjAAMCECDnBp/22HPH5CSWoHJ0DbFw0wOTA4MDQxMDU0NDlaMAAwIQIOV9IP+8CD8bK+XAcnQNwXDTA5MDgwNDEwNTcxN1owADAhAg4v5aRz0IxWqYiXBydA3RcNMDkwODA0MTA1NzQ1WjAAMCECDlOU34VzvZAybQwHJ0DeFw0wOTA4MDQxMDU4MjFaMAAwIAINO4CD9lluIxcwBydBAxcNMDkwNzIyMTUzMTU5WjAAMCECDgOllfO8Y1QA7/wHJ0ExFw0wOTA3MjQxMTQxNDNaMAAwIQIOJBX7jbiCdRdyjgcnQUQXDTA5MDkxNjA5MzAwOFowADAhAg5iYSAgmDrlH/RZBydBRRcNMDkwOTE2MDkzMDE3WjAAMCECDmu6k6srP3jcMaQHJ0FRFw0wOTA4MDQxMDU2NDBaMAAwIQIOX8aHlO0V+WVH4QcnQVMXDTA5MDgwNDEwNTcyOVowADAhAg5flK2rg3NnsRgDBydBzhcNMTEwMjAxMTUzMzQ2WjAAMCECDg35yJDL1jOPTgoHJ0HPFw0xMTAyMDExNTM0MjZaMAAwIQIOMyFJ6+e9iiGVBQcnQdAXDTA5MDkxODEzMjAwNVowADAhAg5Emb/Oykucmn8fBydB1xcNMDkwOTIxMTAxMDQ3WjAAMCECDjQKCncV+MnUavMHJ0HaFw0wOTA5MjIwODE1MjZaMAAwIQIOaxiFUt3dpd+tPwcnQfQXDTEwMDYxODA4NDI1MVowADAhAg5G7P8nO0tkrMt7BydB9RcNMTAwNjE4MDg0MjMwWjAAMCECDmTCC3SXhmDRst4HJ0H2Fw0wOTA5MjgxMjA3MjBaMAAwIQIOHoGhUr/pRwzTKgcnQfcXDTA5MDkyODEyMDcyNFowADAhAg50wrcrCiw8mQmPBydCBBcNMTAwMjE2MTMwMTA2WjAAMCECDifWmkvwyhEqwEcHJ0IFFw0xMDAyMTYxMzAxMjBaMAAwIQIOfgPmlW9fg+osNgcnQhwXDTEwMDQxMzA5NTIwMFowADAhAg4YHAGuA6LgCk7tBydCHRcNMTAwNDEzMDk1MTM4WjAAMCECDi1zH1bxkNJhokAHJ0IsFw0xMDA0MTMwOTU5MzBaMAAwIQIOMipNccsb/wo2fwcnQi0XDTEwMDQxMzA5NTkwMFowADAhAg46lCmvPl4GpP6ABydCShcNMTAwMTE5MDk1MjE3WjAAMCECDjaTcaj+wBpcGAsHJ0JLFw0xMDAxMTkwOTUyMzRaMAAwIQIOOMC13EOrBuxIOQcnQloXDTEwMDIwMTA5NDcwNVowADAhAg5KmZl+krz4RsmrBydCWxcNMTAwMjAxMDk0NjQwWjAAMCECDmLG3zQJ/fzdSsUHJ0JiFw0xMDAzMDEwOTUxNDBaMAAwIQIOP39ksgHdojf4owcnQmMXDTEwMDMwMTA5NTExN1owADAhAg4LDQzvWNRlD6v9BydCZBcNMTAwMzAxMDk0NjIyWjAAMCECDkmNfeclaFhIaaUHJ0JlFw0xMDAzMDEwOTQ2MDVaMAAwIQIOT/qWWfpH/m8NTwcnQpQXDTEwMDUxMTA5MTgyMVowADAhAg5m/ksYxvCEgJSvBydClRcNMTAwNTExMDkxODAxWjAAMCECDgvf3Ohq6JOPU9AHJ0KWFw0xMDA1MTEwOTIxMjNaMAAwIQIOKSPas10z4jNVIQcnQpcXDTEwMDUxMTA5MjEwMlowADAhAg4mCWmhoZ3lyKCDBydCohcNMTEwNDI4MTEwMjI1WjAAMCECDkeiyRsBMK0Gvr4HJ0KjFw0xMTA0MjgxMTAyMDdaMAAwIQIOa09b/nH2+55SSwcnQq4XDTExMDQwMTA4Mjk0NlowADAhAg5O7M7iq7gGplr1BydCrxcNMTEwNDAxMDgzMDE3WjAAMCECDjlT6mJxUjTvyogHJ0K1Fw0xMTAxMjcxNTQ4NTJaMAAwIQIODS/l4UUFLe21NAcnQrYXDTExMDEyNzE1NDgyOFowADAhAg5lPRA0XdOUF6lSBydDHhcNMTEwMTI4MTQzNTA1WjAAMCECDixKX4fFGGpENwgHJ0MfFw0xMTAxMjgxNDM1MzBaMAAwIQIORNBkqsPnpKTtbAcnQ08XDTEwMDkwOTA4NDg0MlowADAhAg5QL+EMM3lohedEBydDUBcNMTAwOTA5MDg0ODE5WjAAMCECDlhDnHK+HiTRAXcHJ0NUFw0xMDEwMTkxNjIxNDBaMAAwIQIOdBFqAzq/INz53gcnQ1UXDTEwMTAxOTE2MjA0NFowADAhAg4OjR7s8MgKles1BydDWhcNMTEwMTI3MTY1MzM2WjAAMCECDmfR/elHee+d0SoHJ0NbFw0xMTAxMjcxNjUzNTZaMAAwIQIOBTKv2ui+KFMI+wcnQ5YXDTEwMDkxNTEwMjE1N1owADAhAg49F3c/GSah+oRUBydDmxcNMTEwMTI3MTczMjMzWjAAMCECDggv4I61WwpKFMMHJ0OcFw0xMTAxMjcxNzMyNTVaMAAwIQIOXx/Y8sEvwS10LAcnQ6UXDTExMDEyODExMjkzN1owADAhAg5LSLbnVrSKaw/9BydDphcNMTEwMTI4MTEyOTIwWjAAMCECDmFFoCuhKUeACQQHJ0PfFw0xMTAxMTExMDE3MzdaMAAwIQIOQTDdFh2fSPF6AAcnQ+AXDTExMDExMTEwMTcxMFowADAhAg5B8AOXX61FpvbbBydD5RcNMTAxMDA2MTAxNDM2WjAAMCECDh41P2Gmi7PkwI4HJ0PmFw0xMDEwMDYxMDE2MjVaMAAwIQIOWUHGLQCd+Ale9gcnQ/0XDTExMDUwMjA3NTYxMFowADAhAg5Z2c9AYkikmgWOBydD/hcNMTEwNTAyMDc1NjM0WjAAMCECDmf/UD+/h8nf+74HJ0QVFw0xMTA0MTUwNzI4MzNaMAAwIQIOICvj4epy3MrqfwcnRBYXDTExMDQxNTA3Mjg1NlowADAhAg4bouRMfOYqgv4xBydEHxcNMTEwMzA4MTYyNDI1WjAAMCECDhebWHGoKiTp7pEHJ0QgFw0xMTAzMDgxNjI0NDhaMAAwIQIOX+qnxxAqJ8LtawcnRDcXDTExMDEzMTE1MTIyOFowADAhAg4j0fICqZ+wkOdqBydEOBcNMTEwMTMxMTUxMTQxWjAAMCECDhmXjsV4SUpWtAMHJ0RLFw0xMTAxMjgxMTI0MTJaMAAwIQIODno/w+zG43kkTwcnREwXDTExMDEyODExMjM1MlowADAhAg4b1gc88767Fr+LBydETxcNMTEwMTI4MTEwMjA4WjAAMCECDn+M3Pa1w2nyFeUHJ0RQFw0xMTAxMjgxMDU4NDVaMAAwIQIOaduoyIH61tqybAcnRJUXDTEwMTIxNTA5NDMyMlowADAhAg4nLqQPkyi3ESAKBydElhcNMTAxMjE1MDk0MzM2WjAAMCECDi504NIMH8578gQHJ0SbFw0xMTAyMTQxNDA1NDFaMAAwIQIOGuaM8PDaC5u1egcnRJwXDTExMDIxNDE0MDYwNFowADAhAg4ehYq/BXGnB5PWBydEnxcNMTEwMjA0MDgwOTUxWjAAMCECDkSD4eS4FxW5H20HJ0SgFw0xMTAyMDQwODA5MjVaMAAwIQIOOCcb6ilYObt1egcnRKEXDTExMDEyNjEwNDEyOVowADAhAg58tISWCCwFnKGnBydEohcNMTEwMjA0MDgxMzQyWjAAMCECDn5rjtabY/L/WL0HJ0TJFw0xMTAyMDQxMTAzNDFaMAAwDQYJKoZIhvcNAQEFBQADggEBAGnF2Gs0+LNiYCW1Ipm83OXQYP/bd5tFFRzyz3iepFqNfYs4D68/QihjFoRHQoXEB0OEe1tvaVnnPGnEOpi6krwekquMxo4H88B5SlyiFIqemCOIss0SxlCFs69LmfRYvPPvPEhoXtQ3ZThe0UvKG83GOklhvGl6OaiRf4Mt+m8zOT4Wox/j6aOBK6cw6qKCdmD+Yj1rrNqFGg1CnSWMoD6S6mwNgkzwdBUJZ22BwrzAAo4RHa2Uy3ef1FjwD0XtU5N3uDSxGGBEDvOe5z82rps3E22FpAA8eYl8kaXtmWqyvYU0epp4brGuTxCuBMCAsxt/OjIjeNNQbBGkwxgfYA0="
-
- const pemCRLBase64 = "LS0tLS1CRUdJTiBYNTA5IENSTC0tLS0tDQpNSUlCOWpDQ0FWOENBUUV3RFFZSktvWklodmNOQVFFRkJRQXdiREVhTUJnR0ExVUVDaE1SVWxOQklGTmxZM1Z5DQphWFI1SUVsdVl5NHhIakFjQmdOVkJBTVRGVkpUUVNCUWRXSnNhV01nVW05dmRDQkRRU0IyTVRFdU1Dd0dDU3FHDQpTSWIzRFFFSkFSWWZjbk5oYTJWdmJuSnZiM1J6YVdkdVFISnpZWE5sWTNWeWFYUjVMbU52YlJjTk1URXdNakl6DQpNVGt5T0RNd1doY05NVEV3T0RJeU1Ua3lPRE13V2pDQmpEQktBaEVBckRxb2g5RkhKSFhUN09QZ3V1bjQrQmNODQpNRGt4TVRBeU1UUXlOekE1V2pBbU1Bb0dBMVVkRlFRRENnRUpNQmdHQTFVZEdBUVJHQTh5TURBNU1URXdNakUwDQpNalExTlZvd1BnSVJBTEd6blowOTVQQjVhQU9MUGc1N2ZNTVhEVEF5TVRBeU16RTBOVEF4TkZvd0dqQVlCZ05WDQpIUmdFRVJnUE1qQXdNakV3TWpNeE5EVXdNVFJhb0RBd0xqQWZCZ05WSFNNRUdEQVdnQlQxVERGNlVRTS9MTmVMDQpsNWx2cUhHUXEzZzltekFMQmdOVkhSUUVCQUlDQUlRd0RRWUpLb1pJaHZjTkFRRUZCUUFEZ1lFQUZVNUFzNk16DQpxNVBSc2lmYW9iUVBHaDFhSkx5QytNczVBZ2MwYld5QTNHQWR4dXI1U3BQWmVSV0NCamlQL01FSEJXSkNsQkhQDQpHUmNxNXlJZDNFakRrYUV5eFJhK2k2N0x6dmhJNmMyOUVlNks5cFNZd2ppLzdSVWhtbW5Qclh0VHhsTDBsckxyDQptUVFKNnhoRFJhNUczUUE0Q21VZHNITnZicnpnbUNZcHZWRT0NCi0tLS0tRU5EIFg1MDkgQ1JMLS0tLS0NCg0K"
-
---- src/runtime/crash_cgo_test.go
-+++ src/runtime/crash_cgo_test.go
-@@ -279,17 +279,17 @@ func testCgoPprof(t *testing.T, buildArg, runArg string) {
- }
- testenv.MustHaveGoRun(t)
-
- exe, err := buildTestProg(t, "testprogcgo", buildArg)
- if err != nil {
- t.Fatal(err)
- }
-
-- got, err := testenv.CleanCmdEnv(exec.Command(exe, runArg)).CombinedOutput()
-+ got, err := testenv.CleanCmdEnv(goExecCmd(exe, runArg)).CombinedOutput()
- if err != nil {
- if testenv.Builder() == "linux-amd64-alpine" {
- // See Issue 18243 and Issue 19938.
- t.Skipf("Skipping failing test on Alpine (golang.org/issue/18243). Ignoring error: %v", err)
- }
- t.Fatal(err)
- }
- fn := strings.TrimSpace(string(got))
---- src/runtime/crash_test.go
-+++ src/runtime/crash_test.go
-@@ -17,16 +17,35 @@ import (
- "runtime"
- "strconv"
- "strings"
- "sync"
- "testing"
- "time"
- )
-
-+var target = flag.String("target", "", "if non empty, use 'go_target' to compile test files and 'go_target_exec' to run the binaries")
-+
-+func goCmd(t *testing.T) string {
-+ if *target != "" {
-+ return "go_" + *target
-+ }
-+ return testenv.GoToolPath(t)
-+}
-+
-+func goExecCmd(name string, arg ...string) *exec.Cmd {
-+ var cmd []string
-+ if *target != "" {
-+ cmd = append(cmd, "go_"+*target+"_exec")
-+ }
-+ cmd = append(cmd, name)
-+ cmd = append(cmd, arg...)
-+ return exec.Command(cmd[0], cmd[1:]...)
-+}
-+
- var toRemove []string
-
- func TestMain(m *testing.M) {
- status := m.Run()
- for _, file := range toRemove {
- os.RemoveAll(file)
- }
- os.Exit(status)
-@@ -50,17 +69,17 @@ func runTestProg(t *testing.T, binary, name string, env ...string) string {
-
- testenv.MustHaveGoBuild(t)
-
- exe, err := buildTestProg(t, binary)
- if err != nil {
- t.Fatal(err)
- }
-
-- cmd := testenv.CleanCmdEnv(exec.Command(exe, name))
-+ cmd := testenv.CleanCmdEnv(goExecCmd(exe, name))
- cmd.Env = append(cmd.Env, env...)
- if testing.Short() {
- cmd.Env = append(cmd.Env, "RUNTIME_TEST_SHORT=1")
- }
- var b bytes.Buffer
- cmd.Stdout = &b
- cmd.Stderr = &b
- if err := cmd.Start(); err != nil {
-@@ -125,17 +144,17 @@ func buildTestProg(t *testing.T, binary string, flags ...string) (string, error)
- name += "_" + strings.Join(flags, "_")
- }
- target, ok := testprog.target[name]
- if ok {
- return target.exe, target.err
- }
-
- exe := filepath.Join(testprog.dir, name+".exe")
-- cmd := exec.Command(testenv.GoToolPath(t), append([]string{"build", "-o", exe}, flags...)...)
-+ cmd := exec.Command(goCmd(t), append([]string{"build", "-o", exe}, flags...)...)
- cmd.Dir = "testdata/" + binary
- out, err := testenv.CleanCmdEnv(cmd).CombinedOutput()
- if err != nil {
- target.err = fmt.Errorf("building %s %v: %v\n%s", binary, flags, err, out)
- testprog.target[name] = target
- return "", target.err
- }
- target.exe = exe
-@@ -456,17 +475,17 @@ func TestPanicLoop(t *testing.T) {
- func TestMemPprof(t *testing.T) {
- testenv.MustHaveGoRun(t)
-
- exe, err := buildTestProg(t, "testprog")
- if err != nil {
- t.Fatal(err)
- }
-
-- got, err := testenv.CleanCmdEnv(exec.Command(exe, "MemProf")).CombinedOutput()
-+ got, err := testenv.CleanCmdEnv(goExecCmd(exe, "MemProf")).CombinedOutput()
- if err != nil {
- t.Fatal(err)
- }
- fn := strings.TrimSpace(string(got))
- defer os.Remove(fn)
-
- for try := 0; try < 2; try++ {
- cmd := testenv.CleanCmdEnv(exec.Command(testenv.GoToolPath(t), "tool", "pprof", "-alloc_space", "-top"))
---- src/runtime/crash_unix_test.go
-+++ src/runtime/crash_unix_test.go
-@@ -244,17 +244,17 @@ func testPanicSystemstackInternal() {
- }
-
- func TestSignalExitStatus(t *testing.T) {
- testenv.MustHaveGoBuild(t)
- exe, err := buildTestProg(t, "testprog")
- if err != nil {
- t.Fatal(err)
- }
-- err = testenv.CleanCmdEnv(exec.Command(exe, "SignalExitStatus")).Run()
-+ err = testenv.CleanCmdEnv(goExecCmd(exe, "SignalExitStatus")).Run()
- if err == nil {
- t.Error("test program succeeded unexpectedly")
- } else if ee, ok := err.(*exec.ExitError); !ok {
- t.Errorf("error (%v) has type %T; expected exec.ExitError", err, err)
- } else if ws, ok := ee.Sys().(syscall.WaitStatus); !ok {
- t.Errorf("error.Sys (%v) has type %T; expected syscall.WaitStatus", ee.Sys(), ee.Sys())
- } else if !ws.Signaled() || ws.Signal() != syscall.SIGTERM {
- t.Errorf("got %v; expected SIGTERM", ee)
diff --git a/go/patch/go-1.11.2/go5.patch b/go/patch/go-1.11.2/go5.patch
deleted file mode 100644
index c0807e9b..00000000
--- a/go/patch/go-1.11.2/go5.patch
+++ /dev/null
@@ -1,106 +0,0 @@
-diff --git src/runtime/crash_test.go src/runtime/crash_test.go
-index 81cf5df42d..3607992788 100644
---- src/runtime/crash_test.go
-+++ src/runtime/crash_test.go
-@@ -219,22 +219,27 @@ func testDeadlock(t *testing.T, name string) {
- }
-
- func TestSimpleDeadlock(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- testDeadlock(t, "SimpleDeadlock")
- }
-
- func TestInitDeadlock(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- testDeadlock(t, "InitDeadlock")
- }
-
- func TestLockedDeadlock(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- testDeadlock(t, "LockedDeadlock")
- }
-
- func TestLockedDeadlock2(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- testDeadlock(t, "LockedDeadlock2")
- }
-
- func TestGoexitDeadlock(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- output := runTestProg(t, "testprog", "GoexitDeadlock")
- want := "no goroutines (main called runtime.Goexit) - deadlock!"
- if !strings.Contains(output, want) {
-@@ -271,6 +276,7 @@ panic: again
- }
-
- func TestGoexitCrash(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- output := runTestProg(t, "testprog", "GoexitExit")
- want := "no goroutines (main called runtime.Goexit) - deadlock!"
- if !strings.Contains(output, want) {
-@@ -329,6 +335,7 @@ func TestBreakpoint(t *testing.T) {
- }
-
- func TestGoexitInPanic(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- // see issue 8774: this code used to trigger an infinite recursion
- output := runTestProg(t, "testprog", "GoexitInPanic")
- want := "fatal error: no goroutines (main called runtime.Goexit) - deadlock!"
-@@ -393,6 +400,7 @@ func TestPanicAfterGoexit(t *testing.T) {
- }
-
- func TestRecoveredPanicAfterGoexit(t *testing.T) {
-+ t.Skip("deadlock detection fails with external linker")
- output := runTestProg(t, "testprog", "RecoveredPanicAfterGoexit")
- want := "fatal error: no goroutines (main called runtime.Goexit) - deadlock!"
- if !strings.HasPrefix(output, want) {
-diff --git src/runtime/proc_test.go src/runtime/proc_test.go
-index ad325987ac..d9d6feb498 100644
---- src/runtime/proc_test.go
-+++ src/runtime/proc_test.go
-@@ -373,9 +373,10 @@ func TestGCFairness2(t *testing.T) {
-
- func TestNumGoroutine(t *testing.T) {
- output := runTestProg(t, "testprog", "NumGoroutine")
-- want := "1\n"
-- if output != want {
-- t.Fatalf("want %q, got %q", want, output)
-+ want1 := "1\n"
-+ want2 := "2\n"
-+ if output != want1 && out != want2 {
-+ t.Fatalf("want %q, got %q", want1, output)
- }
-
- buf := make([]byte, 1<<20)
-diff --git test/fixedbugs/bug429_run.go test/fixedbugs/bug429_run.go
-index 30298de97b..3301a11ad9 100644
---- test/fixedbugs/bug429_run.go
-+++ test/fixedbugs/bug429_run.go
-@@ -1,5 +1,5 @@
- // +build !nacl,!js
--// runtarget
-+// skip
-
- // Copyright 2014 The Go Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style
-diff --git test/goprint.go test/goprint.go
-index 57eeac53a8..5951d4694f 100644
---- test/goprint.go
-+++ test/goprint.go
-@@ -8,14 +8,9 @@
-
- package main
-
--import (
-- "runtime"
-- "time"
--)
-+import "time"
-
- func main() {
- go println(42, true, false, true, 1.5, "world", (chan int)(nil), []int(nil), (map[string]int)(nil), (func())(nil), byte(255))
-- for runtime.NumGoroutine() > 1 {
-- time.Sleep(10*time.Millisecond)
-- }
-+ time.Sleep(100*time.Millisecond)
- }
diff --git a/go/patch/go-1.11.2/go6.patch b/go/patch/go-1.11.2/go6.patch
deleted file mode 100644
index 04134c77..00000000
--- a/go/patch/go-1.11.2/go6.patch
+++ /dev/null
@@ -1,149 +0,0 @@
-diff --git src/encoding/gob/encoder_test.go src/encoding/gob/encoder_test.go
-index dc9bbcf35d..10c30a91af 100644
---- src/encoding/gob/encoder_test.go
-+++ src/encoding/gob/encoder_test.go
-@@ -1131,13 +1131,7 @@ func TestBadData(t *testing.T) {
-
- // TestHugeWriteFails tests that enormous messages trigger an error.
- func TestHugeWriteFails(t *testing.T) {
-- if runtime.GOARCH == "wasm" {
-- t.Skip("out of memory on wasm")
-- }
-- if testing.Short() {
-- // Requires allocating a monster, so don't do this from all.bash.
-- t.Skip("skipping huge allocation in short mode")
-- }
-+ t.Skip("skipping test due to huge memory requirement")
- huge := make([]byte, tooBig)
- huge[0] = 7 // Make sure it's not all zeros.
- buf := new(bytes.Buffer)
-diff --git src/runtime/crash_cgo_test.go src/runtime/crash_cgo_test.go
-index 9ff4bbe121..5fa1340cb2 100644
---- src/runtime/crash_cgo_test.go
-+++ src/runtime/crash_cgo_test.go
-@@ -238,14 +238,7 @@ func TestCgoCCodeSIGPROF(t *testing.T) {
- }
-
- func TestCgoCrashTraceback(t *testing.T) {
-- t.Parallel()
-- switch platform := runtime.GOOS + "/" + runtime.GOARCH; platform {
-- case "darwin/amd64":
-- case "linux/amd64":
-- case "linux/ppc64le":
-- default:
-- t.Skipf("not yet supported on %s", platform)
-- }
-+ t.Skipf("skip running remotely")
- got := runTestProg(t, "testprogcgo", "CrashTraceback")
- for i := 1; i <= 3; i++ {
- if !strings.Contains(got, fmt.Sprintf("cgo symbolizer:%d", i)) {
-@@ -264,10 +257,7 @@ func TestCgoTracebackContext(t *testing.T) {
- }
-
- func testCgoPprof(t *testing.T, buildArg, runArg string) {
-- t.Parallel()
-- if runtime.GOOS != "linux" || (runtime.GOARCH != "amd64" && runtime.GOARCH != "ppc64le") {
-- t.Skipf("not yet supported on %s/%s", runtime.GOOS, runtime.GOARCH)
-- }
-+ t.Skipf("skip pprof test")
- testenv.MustHaveGoRun(t)
-
- exe, err := buildTestProg(t, "testprogcgo", buildArg)
-diff --git src/runtime/crash_test.go src/runtime/crash_test.go
-index 3607992788..e53ffb6a81 100644
---- src/runtime/crash_test.go
-+++ src/runtime/crash_test.go
-@@ -481,6 +481,7 @@ func TestPanicLoop(t *testing.T) {
- }
-
- func TestMemPprof(t *testing.T) {
-+ t.Skipf("skip pprof test")
- testenv.MustHaveGoRun(t)
-
- exe, err := buildTestProg(t, "testprog")
-diff --git src/runtime/crash_unix_test.go src/runtime/crash_unix_test.go
-index 02891ec1ad..fd2723f16e 100644
---- src/runtime/crash_unix_test.go
-+++ src/runtime/crash_unix_test.go
-@@ -174,9 +174,7 @@ func TestPanicSystemstack(t *testing.T) {
- // The GOTRACEBACK=crash handler takes 0.1 seconds even if
- // it's not writing a core file and potentially much longer if
- // it is. Skip in short mode.
-- if testing.Short() {
-- t.Skip("Skipping in short mode (GOTRACEBACK=crash is slow)")
-- }
-+ t.Skip("Skipping (GOTRACEBACK=crash hangs on arm)")
-
- if runtime.Sigisblocked(int(syscall.SIGQUIT)) {
- t.Skip("skipping; SIGQUIT is blocked, see golang.org/issue/19196")
-@@ -244,6 +242,7 @@ func testPanicSystemstackInternal() {
- }
-
- func TestSignalExitStatus(t *testing.T) {
-+ t.Skipf("skip running remotely")
- testenv.MustHaveGoBuild(t)
- exe, err := buildTestProg(t, "testprog")
- if err != nil {
-diff --git src/runtime/fastlog2_test.go src/runtime/fastlog2_test.go
-index ae0f40b2bb..a93933d7ac 100644
---- src/runtime/fastlog2_test.go
-+++ src/runtime/fastlog2_test.go
-@@ -16,11 +16,7 @@ func TestFastLog2(t *testing.T) {
- const randomBitCount = 26
- var e float64
-
-- inc := 1
-- if testing.Short() {
-- // Check 1K total values, down from 64M.
-- inc = 1 << 16
-- }
-+ inc := 1 << 16
- for i := 1; i < 1<<randomBitCount; i += inc {
- l, fl := math.Log2(float64(i)), runtime.Fastlog2(float64(i))
- d := l - fl
-diff --git src/runtime/hash_test.go src/runtime/hash_test.go
-index 7b8ebc4f3c..9fc5b995fc 100644
---- src/runtime/hash_test.go
-+++ src/runtime/hash_test.go
-@@ -164,9 +164,7 @@ func TestSmhasherTwoNonzero(t *testing.T) {
- if GOARCH == "wasm" {
- t.Skip("Too slow on wasm")
- }
-- if testing.Short() {
-- t.Skip("Skipping in short mode")
-- }
-+ t.Skip("skipping test due to huge memory requirement")
- h := newHashSet()
- for n := 2; n <= 16; n++ {
- twoNonZero(h, n)
-@@ -273,9 +271,7 @@ func TestSmhasherPermutation(t *testing.T) {
- if GOARCH == "wasm" {
- t.Skip("Too slow on wasm")
- }
-- if testing.Short() {
-- t.Skip("Skipping in short mode")
-- }
-+ t.Skip("skipping test due to huge memory requirement")
- permutation(t, []uint32{0, 1, 2, 3, 4, 5, 6, 7}, 8)
- permutation(t, []uint32{0, 1 << 29, 2 << 29, 3 << 29, 4 << 29, 5 << 29, 6 << 29, 7 << 29}, 8)
- permutation(t, []uint32{0, 1}, 20)
-diff --git src/runtime/pprof/pprof_test.go src/runtime/pprof/pprof_test.go
-index 44d514393e..f46f00894c 100644
---- src/runtime/pprof/pprof_test.go
-+++ src/runtime/pprof/pprof_test.go
-@@ -283,14 +283,7 @@ func profileOk(t *testing.T, need []string, prof bytes.Buffer, duration time.Dur
- func TestCPUProfileWithFork(t *testing.T) {
- testenv.MustHaveExec(t)
-
-- heap := 1 << 30
-- if runtime.GOOS == "android" {
-- // Use smaller size for Android to avoid crash.
-- heap = 100 << 20
-- }
-- if testing.Short() {
-- heap = 100 << 20
-- }
-+ heap = 100 << 20
- // This makes fork slower.
- garbage := make([]byte, heap)
- // Need to touch the slice, otherwise it won't be paged in.
diff --git a/go/patch/go-1.10.2/go0.patch b/go/patch/go0.patch
index c539865e..c539865e 100644
--- a/go/patch/go-1.10.2/go0.patch
+++ b/go/patch/go0.patch
diff --git a/go/patch/go-1.10.2/go1.patch b/go/patch/go1.patch
index e32268ac..e32268ac 100644
--- a/go/patch/go-1.10.2/go1.patch
+++ b/go/patch/go1.patch
diff --git a/go/patch/go-1.10.2/go2.patch b/go/patch/go2.patch
index 20f04791..20f04791 100644
--- a/go/patch/go-1.10.2/go2.patch
+++ b/go/patch/go2.patch
diff --git a/go/patch/go-1.10.2/go3.patch b/go/patch/go3.patch
index 62247a03..62247a03 100644
--- a/go/patch/go-1.10.2/go3.patch
+++ b/go/patch/go3.patch
diff --git a/go/patch/go-1.10.2/go4.patch b/go/patch/go4.patch
index 290de390..290de390 100644
--- a/go/patch/go-1.10.2/go4.patch
+++ b/go/patch/go4.patch
diff --git a/go/patch/go-1.10.2/go5.patch b/go/patch/go5.patch
index 7189c89e..7189c89e 100644
--- a/go/patch/go-1.10.2/go5.patch
+++ b/go/patch/go5.patch
diff --git a/go/patch/go-1.10.2/go6.patch b/go/patch/go6.patch
index 9f32ed84..9f32ed84 100644
--- a/go/patch/go-1.10.2/go6.patch
+++ b/go/patch/go6.patch
diff --git a/heat_map.py b/heat_map.py
new file mode 100755
index 00000000..39e3f8fd
--- /dev/null
+++ b/heat_map.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python2
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wrapper to generate heat maps for chrome."""
+
+from __future__ import print_function
+
+import argparse
+import shutil
+import os
+import sys
+import tempfile
+from sets import Set
+
+from cros_utils import command_executer
+
+
+def IsARepoRoot(directory):
+ """Returns True if directory is the root of a repo checkout."""
+ return os.path.exists(os.path.join(directory, '.repo'))
+
+
+class HeatMapProducer(object):
+ """Class to produce heat map."""
+
+ def __init__(self, chromeos_root, perf_data, page_size, binary):
+ self.chromeos_root = os.path.realpath(chromeos_root)
+ self.perf_data = os.path.realpath(perf_data)
+ self.page_size = page_size
+ self.dir = os.path.dirname(os.path.realpath(__file__))
+ self.binary = binary
+ self.tempDir = ''
+ self.ce = command_executer.GetCommandExecuter()
+ self.loading_address = None
+ self.temp_perf = ''
+ self.temp_perf_inchroot = ''
+ self.perf_report = ''
+
+ def copyFileToChroot(self):
+ self.tempDir = tempfile.mkdtemp(prefix=os.path.join(self.chromeos_root,
+ 'src/'))
+ self.temp_perf = os.path.join(self.tempDir, 'perf.data')
+ shutil.copy2(self.perf_data, self.temp_perf)
+ self.temp_perf_inchroot = os.path.join('~/trunk/src',
+ os.path.basename(self.tempDir))
+
+ def getPerfReport(self):
+ cmd = ('cd %s; perf report -D -i perf.data > perf_report.txt' %
+ self.temp_perf_inchroot)
+ retval = self.ce.ChrootRunCommand(self.chromeos_root, cmd)
+ if retval:
+ raise RuntimeError('Failed to generate perf report')
+ self.perf_report = os.path.join(self.tempDir, 'perf_report.txt')
+
+ def getBinaryBaseAddress(self):
+ cmd = 'grep PERF_RECORD_MMAP %s | grep "%s$"' % (self.perf_report,
+ self.binary)
+ retval, output, _ = self.ce.RunCommandWOutput(cmd)
+ if retval:
+ raise RuntimeError('Failed to run grep to get base address')
+ baseAddresses = Set()
+ for line in output.strip().split('\n'):
+ head = line.split('[')[2]
+ address = head.split('(')[0]
+ baseAddresses.add(address)
+ if len(baseAddresses) > 1:
+ raise RuntimeError(
+ 'Multiple base address found, please disable ASLR and collect '
+ 'profile again')
+ if not len(baseAddresses):
+ raise RuntimeError('Could not find the base address in the profile')
+ self.loading_address = baseAddresses.pop()
+
+ def RemoveFiles(self):
+ shutil.rmtree(self.tempDir)
+ if os.path.isfile(os.path.join(os.getcwd(), 'out.txt')):
+ os.remove(os.path.join(os.getcwd(), 'out.txt'))
+ if os.path.isfile(os.path.join(os.getcwd(), 'inst-histo.txt')):
+ os.remove(os.path.join(os.getcwd(), 'inst-histo.txt'))
+
+ def getHeatmap(self):
+ if not self.loading_address:
+ return
+ heatmap_script = os.path.join(self.dir, 'perf-to-inst-page.sh')
+ cmd = '{0} {1} {2} {3} {4}'.format(heatmap_script, self.binary,
+ self.perf_report, self.loading_address,
+ self.page_size)
+ retval = self.ce.RunCommand(cmd)
+ if retval:
+ raise RuntimeError('Failed to run script to generate heatmap')
+
+
+def main(argv):
+ """Parse the options.
+
+ Args:
+ argv: The options with which this script was invoked.
+
+ Returns:
+ 0 unless an exception is raised.
+ """
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument(
+ '--chromeos_root',
+ dest='chromeos_root',
+ required=True,
+ help='ChromeOS root to use for generate heatmaps.')
+ parser.add_argument(
+ '--perf_data', dest='perf_data', required=True, help='The raw perf data.')
+ parser.add_argument(
+ '--binary',
+ dest='binary',
+ required=False,
+ help='The name of the binary.',
+ default='chrome')
+ parser.add_argument(
+ '--page_size',
+ dest='page_size',
+ required=False,
+ help='The page size for heat maps.',
+ default=4096)
+ options = parser.parse_args(argv)
+
+ if not IsARepoRoot(options.chromeos_root):
+ parser.error('% does not contain .repo dir.' % options.chromeos_root)
+
+ if not os.path.isfile(options.perf_data):
+ parser.error('Cannot find perf_data: %s.' % options.perf_data)
+
+ heatmap_producer = HeatMapProducer(options.chromeos_root, options.perf_data,
+ options.page_size, options.binary)
+ try:
+ heatmap_producer.copyFileToChroot()
+ heatmap_producer.getPerfReport()
+ heatmap_producer.getBinaryBaseAddress()
+ heatmap_producer.getHeatmap()
+ print('\nheat map and time histgram genereated in the current directory '
+ 'with name heat_map.png and timeline.png accordingly.')
+ except RuntimeError, e:
+ print(e)
+ finally:
+ heatmap_producer.RemoveFiles()
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/heatmaps/heat_map.py b/heatmaps/heat_map.py
deleted file mode 100755
index 2fd742d2..00000000
--- a/heatmaps/heat_map.py
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Wrapper to generate heat maps for chrome."""
-
-from __future__ import print_function
-
-import argparse
-import os
-import shutil
-import sys
-import tempfile
-
-from cros_utils import command_executer
-import heatmap_generator
-
-
-def IsARepoRoot(directory):
- """Returns True if directory is the root of a repo checkout."""
- return os.path.exists(
- os.path.join(os.path.realpath(os.path.expanduser(directory)), '.repo'))
-
-
-class HeatMapProducer(object):
- """Class to produce heat map."""
-
- def __init__(self,
- chromeos_root,
- perf_data,
- hugepage,
- binary,
- title,
- logger=None):
- self.chromeos_root = os.path.realpath(os.path.expanduser(chromeos_root))
- self.perf_data = os.path.realpath(os.path.expanduser(perf_data))
- self.hugepage = hugepage
- self.dir = os.path.dirname(os.path.realpath(__file__))
- self.binary = binary
- self.ce = command_executer.GetCommandExecuter()
- self.temp_dir = ''
- self.temp_perf_inchroot = ''
- self.temp_dir_created = False
- self.perf_report = ''
- self.title = title
- self.logger = logger
-
- def _EnsureFileInChroot(self):
- chroot_prefix = os.path.join(self.chromeos_root, 'chroot')
- if self.perf_data.startswith(chroot_prefix):
- # If the path to perf_data starts with the same chromeos_root, assume
- # it's in the chromeos_root so no need for temporary directory and copy.
- self.temp_dir = self.perf_data.replace('perf.data', '')
- self.temp_perf_inchroot = self.temp_dir.replace(chroot_prefix, '')
-
- else:
- # Otherwise, create a temporary directory and copy perf.data into chroot.
- self.temp_dir = tempfile.mkdtemp(
- prefix=os.path.join(self.chromeos_root, 'src/'))
- temp_perf = os.path.join(self.temp_dir, 'perf.data')
- shutil.copy2(self.perf_data, temp_perf)
- self.temp_perf_inchroot = os.path.join('~/trunk/src',
- os.path.basename(self.temp_dir))
- self.temp_dir_created = True
-
- def _GeneratePerfReport(self):
- cmd = ('cd %s && perf report -D -i perf.data > perf_report.txt' %
- self.temp_perf_inchroot)
- retval = self.ce.ChrootRunCommand(self.chromeos_root, cmd)
- if retval:
- raise RuntimeError('Failed to generate perf report')
- self.perf_report = os.path.join(self.temp_dir, 'perf_report.txt')
-
- def _GetHeatMap(self, top_n_pages):
- generator = heatmap_generator.HeatmapGenerator(
- perf_report=self.perf_report,
- page_size=4096,
- hugepage=self.hugepage,
- title=self.title)
- generator.draw()
- # Analyze top N hottest symbols with the binary, if provided
- if self.binary:
- generator.analyze(self.binary, top_n_pages)
-
- def _RemoveFiles(self):
- files = [
- 'out.txt', 'inst-histo.txt', 'inst-histo-hp.txt', 'inst-histo-sp.txt'
- ]
- for f in files:
- if os.path.exists(f):
- os.remove(f)
-
- def Run(self, top_n_pages):
- try:
- self._EnsureFileInChroot()
- self._GeneratePerfReport()
- self._GetHeatMap(top_n_pages)
- finally:
- self._RemoveFiles()
- msg = ('heat map and time histogram genereated in the current '
- 'directory with name heat_map.png and timeline.png '
- 'accordingly.')
- if self.binary:
- msg += ('\nThe hottest %d pages inside and outside hugepage '
- 'is symbolized and saved to addr2symbol.txt' % top_n_pages)
- if self.logger:
- self.logger.LogOutput(msg)
- else:
- print(msg)
-
-
-def main(argv):
- """Parse the options.
-
- Args:
- argv: The options with which this script was invoked.
-
- Returns:
- 0 unless an exception is raised.
- """
- parser = argparse.ArgumentParser()
-
- parser.add_argument(
- '--chromeos_root',
- dest='chromeos_root',
- required=True,
- help='ChromeOS root to use for generate heatmaps.')
- parser.add_argument(
- '--perf_data',
- dest='perf_data',
- required=True,
- help='The raw perf data. Must be collected with -e instructions while '
- 'disabling ASLR.')
- parser.add_argument(
- '--binary',
- dest='binary',
- help='The path to the Chrome binary. Only useful if want to print '
- 'symbols on hottest pages',
- default=None)
- parser.add_argument(
- '--top_n',
- dest='top_n',
- type=int,
- default=10,
- help='Print out top N hottest pages within/outside huge page range. '
- 'Must be used with --hugepage and --binary. (Default: %(default)s)')
- parser.add_argument(
- '--title', dest='title', help='Title of the heatmap', default='')
- parser.add_argument(
- '--hugepage',
- dest='hugepage',
- help='A range of addresses (start,end) where huge page starts and ends'
- ' in text section, separated by a comma.'
- ' Used to differentiate regions in heatmap.'
- ' Example: --hugepage=0,4096'
- ' If not specified, no effect on the heatmap.',
- default=None)
-
- options = parser.parse_args(argv)
-
- if not IsARepoRoot(options.chromeos_root):
- parser.error('%s does not contain .repo dir.' % options.chromeos_root)
-
- if not os.path.isfile(options.perf_data):
- parser.error('Cannot find perf_data: %s.' % options.perf_data)
-
- hugepage_range = None
- if options.hugepage:
- hugepage_range = options.hugepage.split(',')
- if len(hugepage_range) != 2 or \
- int(hugepage_range[0]) > int(hugepage_range[1]):
- parser.error('Wrong format of hugepage range: %s' % options.hugepage)
- hugepage_range = [int(x) for x in hugepage_range]
-
- heatmap_producer = HeatMapProducer(options.chromeos_root, options.perf_data,
- hugepage_range, options.binary,
- options.title)
-
- heatmap_producer.Run(options.top_n)
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/heatmaps/heat_map_test.py b/heatmaps/heat_map_test.py
deleted file mode 100755
index 21f90d41..00000000
--- a/heatmaps/heat_map_test.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for heat_map.py."""
-
-from __future__ import print_function
-
-import mock
-import unittest
-
-import os
-
-from cros_utils import command_executer
-
-import heat_map
-
-
-def make_heatmap(chromeos_root='/path/to/fake/chromeos_root/',
- perf_data='/any_path/perf.data'):
- return heat_map.HeatMapProducer(chromeos_root, perf_data, None, None, '')
-
-
-def fake_mkdtemp(prefix):
- """Mock tempfile.mkdtemp() by just create a pathname."""
- return prefix + 'random_dir'
-
-
-def fake_parser_error(_, msg):
- """Redirect parser.error() to exception."""
- raise Exception(msg)
-
-
-def fake_generate_perf_report_exception(_):
- raise Exception
-
-
-class HeatmapTest(unittest.TestCase):
- """All of our tests for heat_map."""
-
- #pylint: disable=protected-access
- @mock.patch('shutil.copy2')
- @mock.patch('tempfile.mkdtemp')
- def test_EnsureFileInChrootAlreadyInside(self, mock_mkdtemp, mock_copy):
- perf_data_inchroot = (
- '/path/to/fake/chromeos_root/chroot/inchroot_path/perf.data')
- heatmap = make_heatmap(perf_data=perf_data_inchroot)
- heatmap._EnsureFileInChroot()
- self.assertFalse(heatmap.temp_dir_created)
- self.assertEqual(heatmap.temp_dir,
- '/path/to/fake/chromeos_root/chroot/inchroot_path/')
- self.assertEqual(heatmap.temp_perf_inchroot, '/inchroot_path/')
- mock_mkdtemp.assert_not_called()
- mock_copy.assert_not_called()
-
- @mock.patch('shutil.copy2')
- @mock.patch('tempfile.mkdtemp', fake_mkdtemp)
- def test_EnsureFileInChrootOutsideNeedCopy(self, mock_copy):
- heatmap = make_heatmap()
- heatmap._EnsureFileInChroot()
- self.assertTrue(heatmap.temp_dir_created)
- self.assertEqual(mock_copy.call_count, 1)
- self.assertEqual(heatmap.temp_dir,
- '/path/to/fake/chromeos_root/src/random_dir')
- self.assertEqual(heatmap.temp_perf_inchroot, '~/trunk/src/random_dir')
-
- @mock.patch.object(command_executer.CommandExecuter, 'ChrootRunCommand')
- def test_GeneratePerfReport(self, mock_ChrootRunCommand):
- heatmap = make_heatmap()
- heatmap.temp_dir = '/fake/chroot/inchroot_path/'
- heatmap.temp_perf_inchroot = '/inchroot_path/'
- mock_ChrootRunCommand.return_value = 0
- heatmap._GeneratePerfReport()
- cmd = ('cd %s && perf report -D -i perf.data > perf_report.txt' %
- heatmap.temp_perf_inchroot)
- mock_ChrootRunCommand.assert_called_with(heatmap.chromeos_root, cmd)
- self.assertEqual(mock_ChrootRunCommand.call_count, 1)
- self.assertEqual(heatmap.perf_report,
- '/fake/chroot/inchroot_path/perf_report.txt')
-
- @mock.patch('heatmap_generator.HeatmapGenerator')
- def test_GetHeatMap(self, mock_heatmap_generator):
- heatmap = make_heatmap()
- heatmap._GetHeatMap(10)
- self.assertTrue(mock_heatmap_generator.called)
-
- @mock.patch.object(heat_map.HeatMapProducer, '_EnsureFileInChroot')
- @mock.patch.object(heat_map.HeatMapProducer, '_GeneratePerfReport')
- @mock.patch.object(heat_map.HeatMapProducer, '_GetHeatMap')
- @mock.patch.object(heat_map.HeatMapProducer, '_RemoveFiles')
- def test_Run(self, mock_remove_files, mock_get_heatmap,
- mock_generate_perf_report, mock_ensure_file_in_chroot):
- heatmap = make_heatmap()
- heatmap.Run(10)
- mock_ensure_file_in_chroot.assert_called_once_with()
- mock_generate_perf_report.assert_called_once_with()
- mock_get_heatmap.assert_called_once_with(10)
- mock_remove_files.assert_called_once_with()
-
- @mock.patch.object(heat_map.HeatMapProducer, '_EnsureFileInChroot')
- @mock.patch.object(
- heat_map.HeatMapProducer,
- '_GeneratePerfReport',
- new=fake_generate_perf_report_exception)
- @mock.patch.object(heat_map.HeatMapProducer, '_GetHeatMap')
- @mock.patch.object(heat_map.HeatMapProducer, '_RemoveFiles')
- @mock.patch('__builtin__.print')
- def test_Run_with_exception(self, mock_print, mock_remove_files,
- mock_get_heatmap, mock_ensure_file_in_chroot):
- heatmap = make_heatmap()
- with self.assertRaises(Exception):
- heatmap.Run(10)
- mock_ensure_file_in_chroot.assert_called_once_with()
- mock_get_heatmap.assert_not_called()
- mock_remove_files.assert_called_once_with()
- mock_print.assert_not_called()
-
- @mock.patch('argparse.ArgumentParser.error', fake_parser_error)
- @mock.patch.object(os.path, 'isfile')
- @mock.patch.object(heat_map, 'IsARepoRoot')
- def test_main_arg_format(self, mock_IsARepoRoot, mock_isfile):
- """Test wrong arg format are detected."""
- args = ['--chromeos_root=/fake/chroot/', '--perf_data=/path/to/perf.data']
-
- # Test --chromeos_root format
- mock_IsARepoRoot.return_value = False
- with self.assertRaises(Exception) as msg:
- heat_map.main(args)
- self.assertIn('does not contain .repo dir.', str(msg.exception))
-
- # Test --perf_data format
- mock_IsARepoRoot.return_value = True
- mock_isfile.return_value = False
- with self.assertRaises(Exception) as msg:
- heat_map.main(args)
- self.assertIn('Cannot find perf_data', str(msg.exception))
-
- # Test --hugepage format
- mock_isfile.return_value = True
- args.append('--hugepage=0')
- with self.assertRaises(Exception) as msg:
- heat_map.main(args)
- self.assertIn('Wrong format of hugepage range', str(msg.exception))
-
- # Test --hugepage parse
- args[-1] = '--hugepage=0,4096'
- heat_map.HeatMapProducer = mock.MagicMock()
- heat_map.main(args)
- heat_map.HeatMapProducer.assert_called_with(
- '/fake/chroot/', '/path/to/perf.data', [0, 4096], None, '')
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/heatmaps/heatmap_generator.py b/heatmaps/heatmap_generator.py
deleted file mode 100644
index 42fd6352..00000000
--- a/heatmaps/heatmap_generator.py
+++ /dev/null
@@ -1,468 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Python module to draw heat map for Chrome
-
-heat map is a histogram used to analyze the locality of function layout.
-
-This module is used by heat_map.py. HeatmapGenerator is a class to
-generate data for drawing heat maps (the actual drawing of heat maps is
-performed by another script perf-to-inst-page.sh). It can also analyze
-the symbol names in hot pages.
-"""
-
-from __future__ import print_function
-
-import bisect
-import collections
-import os
-import pipes
-import subprocess
-
-from cros_utils import command_executer
-
-HugepageRange = collections.namedtuple('HugepageRange', ['start', 'end'])
-
-
-class MMap(object):
- """Class to store mmap information in perf report.
-
- We assume ASLR is disabled, so MMap for all Chrome is assumed to be
- the same. This class deals with the case hugepage creates several
- mmaps for Chrome but should be merged together. In these case, we
- assume the first MMAP is not affected by the bug and use the MMAP.
- """
-
- def __init__(self, addr, size, offset):
- self.start_address = addr
- self.size = size
- self.offset = offset
-
- def __str__(self):
- return '(%x, %x, %x)' % (self.start_address, self.size, self.offset)
-
- def merge(self, mmap):
- # This function should not be needed, since we should only have
- # one MMAP on Chrome of each process. This function only deals with
- # images that is affected by http://crbug.com/931465.
-
- # This function is only checking a few conditions to make sure
- # the bug is within our expectation.
-
- if self.start_address == mmap.start_address:
- assert self.size >= mmap.size, \
- 'Original MMAP size(%x) is smaller than the forked process(%x).' % (
- self.size, mmap.size)
- # The case that the MMAP is forked from the previous process
- # No need to do anything, OR
- # The case where hugepage causes a small Chrome mmap.
- # In this case, we use the prior MMAP for the whole Chrome
- return
-
- assert self.start_address < mmap.start_address, \
- 'Original MMAP starting address(%x) is larger than the forked' \
- 'process(%x).' % (self.start_address, mmap.start_address)
-
- assert self.start_address + self.size >= mmap.start_address + mmap.size, \
- 'MMAP of the forked process exceeds the end of original MMAP.'
-
-
-class HeatmapGenerator(object):
- """Class to generate heat map with a perf report, containing mmaps and
-
- samples. This class contains two interfaces with other modules:
- draw() and analyze().
-
- draw() draws a heatmap with the sample information given in the perf report
- analyze() prints out the symbol names in hottest pages with the given
- chrome binary
- """
-
- def __init__(self,
- perf_report,
- page_size,
- hugepage,
- title,
- log_level='verbose'):
- self.perf_report = perf_report
- # Pick 1G as a relatively large number. All addresses less than it will
- # be recorded. The actual heatmap will show up to a boundary of the
- # largest address in text segment.
- self.max_addr = 1024 * 1024 * 1024
- self.ce = command_executer.GetCommandExecuter(log_level=log_level)
- self.dir = os.path.dirname(os.path.realpath(__file__))
- with open(perf_report) as f:
- self.perf_report_contents = f.readlines()
- # Write histogram results to a text file, in order to use gnu plot to draw
- self.hist_temp_output = open('out.txt', 'w')
- self.processes = {}
- self.deleted_processes = {}
- self.count = 0
- if hugepage:
- self.hugepage = HugepageRange(start=hugepage[0], end=hugepage[1])
- else:
- self.hugepage = None
- self.title = title
- self.symbol_addresses = []
- self.symbol_names = []
- self.page_size = page_size
-
- def _parse_perf_sample(self, line):
- # In a perf report, generated with -D, a PERF_RECORD_SAMPLE command should
- # look like this: TODO: some arguments are unknown
- #
- # cpuid cycle unknown [unknown]: PERF_RECORD_SAMPLE(IP, 0x2): pid/tid:
- # 0xaddr period: period addr: addr
- # ... thread: threadname:tid
- # ...... dso: process
- #
- # This is an example:
- # 1 136712833349 0x6a558 [0x30]: PERF_RECORD_SAMPLE(IP, 0x2): 5227/5227:
- # 0x55555683b810 period: 372151 addr: 0
- # ... thread: chrome:5227
- # ...... dso: /opt/google/chrome/chrome
- #
- # For this function, the 7th argument (args[6]) after spltting with spaces
- # is pid/tid. We use the combination of the two as the pid.
- # Also, we add an assertion here to check the tid in the 7th argument(
- # args[6]) and the 15th argument(arg[14]) are the same
- #
- # The function returns the ((pid,tid), address) pair if the sampling
- # is on Chrome. Otherwise, return (None, None) pair.
-
- if 'thread: chrome' not in line or \
- 'dso: /opt/google/chrome/chrome' not in line:
- return None, None
- args = line.split(' ')
- pid_raw = args[6].split('/')
- assert pid_raw[1][:-1] == args[14].split(':')[1][:-1], \
- 'TID in %s of sample is not the same: %s/%s' % (
- line[:-1], pid_raw[1][:-1], args[14].split(':')[1][:-1])
- key = (int(pid_raw[0]), int(pid_raw[1][:-1]))
- address = int(args[7], base=16)
- return key, address
-
- def _parse_perf_record(self, line):
- # In a perf report, generated with -D, a PERF_RECORD_MMAP2 command should
- # look like this: TODO: some arguments are unknown
- #
- # cpuid cycle unknown [unknown]: PERF_RECORD_MMAP2 pid/tid:
- # [0xaddr(0xlength) @ pageoffset maj:min ino ino_generation]:
- # permission process
- #
- # This is an example.
- # 2 136690556823 0xa6898 [0x80]: PERF_RECORD_MMAP2 5227/5227:
- # [0x555556496000(0x8d1b000) @ 0xf42000 b3:03 92844 1892514370]:
- # r-xp /opt/google/chrome/chrome
- #
- # For this function, the 6th argument (args[5]) after spltting with spaces
- # is pid/tid. We use the combination of the two as the pid.
- # The 7th argument (args[6]) is the [0xaddr(0xlength). We can peel the
- # string to get the address and size of the mmap.
- # The 9th argument (args[8]) is the page offset.
- # The function returns the ((pid,tid), mmap) pair if the mmap is for Chrome
- # is on Chrome. Otherwise, return (None, None) pair.
-
- if 'chrome/chrome' not in line:
- return None, None
- args = line.split(' ')
- pid_raw = args[5].split('/')
- assert pid_raw[0] == pid_raw[1][:-1], \
- 'PID in %s of mmap is not the same: %s/%s' % (
- line[:-1], pid_raw[0], pid_raw[1])
- pid = (int(pid_raw[0]), int(pid_raw[1][:-1]))
- address_raw = args[6].split('(')
- start_address = int(address_raw[0][1:], base=16)
- size = int(address_raw[1][:-1], base=16)
- offset = int(args[8], base=16)
- # Return an mmap object instead of only starting address,
- # in case there are many mmaps for the sample PID
- return pid, MMap(start_address, size, offset)
-
- def _parse_pair_event(self, arg):
- # This function is called by the _parse_* functions that has a pattern of
- # pids like: (pid:tid):(pid:tid), i.e.
- # PERF_RECORD_FORK and PERF_RECORD_COMM
- _, remain = arg.split('(', 1)
- pid1, remain = remain.split(':', 1)
- pid2, remain = remain.split(')', 1)
- _, remain = remain.split('(', 1)
- pid3, remain = remain.split(':', 1)
- pid4, remain = remain.split(')', 1)
- return (int(pid1), int(pid2)), (int(pid3), int(pid4))
-
- def _process_perf_record(self, line):
- # This function calls _parse_perf_record() to get information from
- # PERF_RECORD_MMAP2. It records the mmap object for each pid (a pair of
- # pid,tid), into a dictionary.
- pid, mmap = self._parse_perf_record(line)
- if pid is None:
- # PID = None meaning the mmap is not for chrome
- return
- if pid in self.processes:
- # This should never happen for a correct profiling result, as we
- # should only have one MMAP for Chrome for each process.
- # If it happens, see http://crbug.com/931465
- self.processes[pid].merge(mmap)
- else:
- self.processes[pid] = mmap
-
- def _process_perf_fork(self, line):
- # In a perf report, generated with -D, a PERF_RECORD_FORK command should
- # look like this:
- #
- # cpuid cycle unknown [unknown]:
- # PERF_RECORD_FORK(pid_to:tid_to):(pid_from:tid_from)
- #
- # This is an example.
- # 0 0 0x22a8 [0x38]: PERF_RECORD_FORK(1:1):(0:0)
- #
- # In this function, we need to peel the information of pid:tid pairs
- # So we get the last argument and send it to function _parse_pair_event()
- # for analysis.
- # We use (pid, tid) as the pid.
- args = line.split(' ')
- pid_to, pid_from = self._parse_pair_event(args[-1])
- if pid_from in self.processes:
- assert pid_to not in self.processes
- self.processes[pid_to] = MMap(self.processes[pid_from].start_address,
- self.processes[pid_from].size,
- self.processes[pid_from].offset)
-
- def _process_perf_exit(self, line):
- # In a perf report, generated with -D, a PERF_RECORD_EXIT command should
- # look like this:
- #
- # cpuid cycle unknown [unknown]:
- # PERF_RECORD_EXIT(pid1:tid1):(pid2:tid2)
- #
- # This is an example.
- # 1 136082505621 0x30810 [0x38]: PERF_RECORD_EXIT(3851:3851):(3851:3851)
- #
- # In this function, we need to peel the information of pid:tid pairs
- # So we get the last argument and send it to function _parse_pair_event()
- # for analysis.
- # We use (pid, tid) as the pid.
- args = line.split(' ')
- pid_to, pid_from = self._parse_pair_event(args[-1])
- assert pid_to == pid_from, '(%d, %d) (%d, %d)' % (pid_to[0], pid_to[1],
- pid_from[0], pid_from[1])
- if pid_to in self.processes:
- # Don't delete the process yet
- self.deleted_processes[pid_from] = self.processes[pid_from]
-
- def _process_perf_sample(self, line):
- # This function calls _parse_perf_sample() to get information from
- # the perf report.
- # It needs to check the starting address of allocated mmap from
- # the dictionary (self.processes) to calculate the offset within
- # the text section of the sampling.
- # The offset is calculated into pages (4KB or 2MB) and writes into
- # out.txt together with the total counts, which will be used to
- # calculate histogram.
- pid, addr = self._parse_perf_sample(line)
- if pid is None:
- return
-
- assert pid in self.processes and pid not in self.deleted_processes, \
- 'PID %d not found mmap and not forked from another process'
-
- start_address = self.processes[pid].start_address
- address = addr - start_address
- assert address >= 0 and \
- 'addresses accessed in PERF_RECORD_SAMPLE should be larger than' \
- ' the starting address of Chrome'
- if address < self.max_addr:
- self.count += 1
- line = '%d/%d: %d %d' % (pid[0], pid[1], self.count,
- address / self.page_size * self.page_size)
- if self.hugepage:
- if self.hugepage.start <= address < self.hugepage.end:
- line += ' hugepage'
- else:
- line += ' smallpage'
- print(line, file=self.hist_temp_output)
-
- def _read_perf_report(self):
- # Serve as main function to read perf report, generated by -D
- lines = iter(self.perf_report_contents)
- for line in lines:
- if 'PERF_RECORD_MMAP' in line:
- self._process_perf_record(line)
- elif 'PERF_RECORD_FORK' in line:
- self._process_perf_fork(line)
- elif 'PERF_RECORD_EXIT' in line:
- self._process_perf_exit(line)
- elif 'PERF_RECORD_SAMPLE' in line:
- # Perf sample is multi-line
- self._process_perf_sample(line + next(lines) + next(lines))
- self.hist_temp_output.close()
-
- def _draw_heat_map(self):
- # Calls a script (perf-to-inst-page.sh) to calculate histogram
- # of results written in out.txt and also generate pngs for
- # heat maps.
- heatmap_script = os.path.join(self.dir, 'perf-to-inst-page.sh')
- if self.hugepage:
- hp_arg = 'hugepage'
- else:
- hp_arg = 'none'
-
- cmd = '{0} {1} {2}'.format(heatmap_script, pipes.quote(self.title), hp_arg)
- retval = self.ce.RunCommand(cmd)
- if retval:
- raise RuntimeError('Failed to run script to generate heatmap')
-
- def _restore_histogram(self):
- # When hugepage is used, there are two files inst-histo-{hp,sp}.txt
- # So we need to read in all the files.
- names = [x for x in os.listdir('.') if 'inst-histo' in x and '.txt' in x]
- hist = {}
- for n in names:
- with open(n) as f:
- for l in f.readlines():
- num, addr = l.strip().split(' ')
- assert int(addr) not in hist
- hist[int(addr)] = int(num)
- return hist
-
- def _read_symbols_from_binary(self, binary):
- # FIXME: We are using nm to read symbol names from Chrome binary
- # for now. Can we get perf to hand us symbol names, instead of
- # using nm in the future?
- #
- # Get all the symbols (and their starting addresses) that fall into
- # the page. Will be used to print out information of hot pages
- # Each line shows the information of a symbol:
- # [symbol value (0xaddr)] [symbol type] [symbol name]
- # For some symbols, the [symbol name] field might be missing.
- # e.g.
- # 0000000001129da0 t Builtins_LdaNamedPropertyHandler
-
- # Generate a list of symbols from nm tool and check each line
- # to extract symbols names
- text_section_start = 0
- for l in subprocess.check_output(['nm', '-n', binary]).split('\n'):
- args = l.strip().split(' ')
- if len(args) < 3:
- # No name field
- continue
- addr_raw, symbol_type, name = args
- addr = int(addr_raw, base=16)
- if 't' not in symbol_type and 'T' not in symbol_type:
- # Filter out symbols not in text sections
- continue
- if len(self.symbol_addresses) == 0:
- # The first symbol in text sections
- text_section_start = addr
- self.symbol_addresses.append(0)
- self.symbol_names.append(name)
- else:
- assert text_section_start != 0, \
- 'The starting address of text section has not been found'
- if addr == self.symbol_addresses[-1]:
- # if the same address has multiple symbols, put them together
- # and separate symbol names with '/'
- self.symbol_names[-1] += '/' + name
- else:
- # The output of nm -n command is already sorted by address
- # Insert to the end will result in a sorted array for bisect
- self.symbol_addresses.append(addr - text_section_start)
- self.symbol_names.append(name)
-
- def _map_addr_to_symbol(self, addr):
- # Find out the symbol name
- assert len(self.symbol_addresses) > 0
- index = bisect.bisect(self.symbol_addresses, addr)
- assert index > 0 and index <= len(self.symbol_names), \
- 'Failed to find an index (%d) in the list (len=%d)' % (
- index, len(self.symbol_names))
- return self.symbol_names[index - 1]
-
- def _print_symbols_in_hot_pages(self, fp, pages_to_show):
- # Print symbols in all the pages of interest
- for page_num, sample_num in pages_to_show:
- print(
- '----------------------------------------------------------', file=fp)
- print(
- 'Page Offset: %d MB, Count: %d' % (page_num / 1024 / 1024,
- sample_num),
- file=fp)
-
- symbol_counts = collections.Counter()
- # Read Sample File and find out the occurance of symbols in the page
- lines = iter(self.perf_report_contents)
- for line in lines:
- if 'PERF_RECORD_SAMPLE' in line:
- pid, addr = self._parse_perf_sample(line + next(lines) + next(lines))
- if pid is None:
- # The sampling is not on Chrome
- continue
- if addr / self.page_size != (
- self.processes[pid].start_address + page_num) / self.page_size:
- # Sampling not in the current page
- continue
-
- name = self._map_addr_to_symbol(addr -
- self.processes[pid].start_address)
- assert name, 'Failed to find symbol name of addr %x' % addr
- symbol_counts[name] += 1
-
- assert sum(symbol_counts.itervalues()) == sample_num, \
- 'Symbol name matching missing for some addresses: %d vs %d' % (
- sum(symbol_counts.itervalues()), sample_num)
-
- # Print out the symbol names sorted by the number of samples in
- # the page
- for name, count in sorted(
- symbol_counts.iteritems(), key=lambda kv: kv[1], reverse=True):
- if count == 0:
- break
- print('> %s : %d' % (name, count), file=fp)
- print('\n\n', file=fp)
-
- def draw(self):
- # First read perf report to process information and save histogram
- # into a text file
- self._read_perf_report()
- # Then use gnu plot to draw heat map
- self._draw_heat_map()
-
- def analyze(self, binary, top_n):
- # Read histogram from histo.txt
- hist = self._restore_histogram()
- # Sort the pages in histogram
- sorted_hist = sorted(
- hist.iteritems(), key=lambda value: value[1], reverse=True)
-
- # Generate symbolizations
- self._read_symbols_from_binary(binary)
-
- # Write hottest pages
- with open('addr2symbol.txt', 'w') as fp:
- if self.hugepage:
- # Print hugepage region first
- print(
- 'Hugepage top %d hot pages (%d MB - %d MB):' %
- (top_n, self.hugepage.start / 1024 / 1024,
- self.hugepage.end / 1024 / 1024),
- file=fp)
- pages_to_print = [(k, v)
- for k, v in sorted_hist
- if self.hugepage.start <= k < self.hugepage.end
- ][:top_n]
- self._print_symbols_in_hot_pages(fp, pages_to_print)
- print('==========================================', file=fp)
- print('Top %d hot pages landed outside of hugepage:' % top_n, file=fp)
- # Then print outside pages
- pages_to_print = [(k, v)
- for k, v in sorted_hist
- if k < self.hugepage.start or k >= self.hugepage.end
- ][:top_n]
- self._print_symbols_in_hot_pages(fp, pages_to_print)
- else:
- # Print top_n hottest pages.
- pages_to_print = [(k, v) for k, v in sorted_hist][:top_n]
- self._print_symbols_in_hot_pages(fp, pages_to_print)
diff --git a/heatmaps/heatmap_generator_test.py b/heatmaps/heatmap_generator_test.py
deleted file mode 100755
index 0c0bbfc8..00000000
--- a/heatmaps/heatmap_generator_test.py
+++ /dev/null
@@ -1,315 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for heatmap_generator.py."""
-
-from __future__ import division, print_function
-
-import mock
-import os
-import unittest
-
-import heatmap_generator
-
-
-def _write_perf_mmap(pid, tid, addr, size, fp):
- print(
- '0 0 0 0 PERF_RECORD_MMAP2 %d/%d: '
- '[%x(%x) @ 0x0 0:0 0 0] '
- 'r-xp /opt/google/chrome/chrome\n' % (pid, tid, addr, size),
- file=fp)
-
-
-def _write_perf_fork(pid_from, tid_from, pid_to, tid_to, fp):
- print(
- '0 0 0 0 PERF_RECORD_FORK(%d:%d):(%d:%d)\n' % (pid_to, tid_to, pid_from,
- tid_from),
- file=fp)
-
-
-def _write_perf_exit(pid_from, tid_from, pid_to, tid_to, fp):
- print(
- '0 0 0 0 PERF_RECORD_EXIT(%d:%d):(%d:%d)\n' % (pid_to, tid_to, pid_from,
- tid_from),
- file=fp)
-
-
-def _write_perf_sample(pid, tid, addr, fp):
- print(
- '0 0 0 0 PERF_RECORD_SAMPLE(IP, 0x2): '
- '%d/%d: %x period: 100000 addr: 0' % (pid, tid, addr),
- file=fp)
- print(' ... thread: chrome:%d' % tid, file=fp)
- print(' ...... dso: /opt/google/chrome/chrome\n', file=fp)
-
-
-def _heatmap(file_name, page_size=4096, hugepage=None, analyze=False, top_n=10):
- generator = heatmap_generator.HeatmapGenerator(
- file_name, page_size, hugepage, '',
- log_level='none') # Don't log to stdout
- generator.draw()
- if analyze:
- generator.analyze('/path/to/chrome', top_n)
-
-
-def _cleanup(file_name):
- files = [
- file_name, 'out.txt', 'inst-histo.txt', 'inst-histo-hp.txt',
- 'inst-histo-sp.txt', 'heat_map.png', 'timeline.png', 'addr2symbol.txt'
- ]
- for f in files:
- if os.path.exists(f):
- os.remove(f)
-
-
-class HeatmapGeneratorDrawTests(unittest.TestCase):
- """All of our tests for heatmap_generator.draw() and related."""
-
- def test_with_one_mmap_one_sample(self):
- """Tests one perf record and one sample."""
- fname = 'test.txt'
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x100, f)
- _write_perf_sample(101, 101, 0xABCD101, f)
- self.addCleanup(_cleanup, fname)
- _heatmap(fname)
- self.assertIn('out.txt', os.listdir('.'))
- with open('out.txt') as f:
- lines = f.readlines()
- self.assertEqual(len(lines), 1)
- self.assertIn('101/101: 1 0', lines[0])
-
- def test_with_one_mmap_multiple_samples(self):
- """Tests one perf record and three samples."""
- fname = 'test.txt'
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x100, f)
- _write_perf_sample(101, 101, 0xABCD101, f)
- _write_perf_sample(101, 101, 0xABCD102, f)
- _write_perf_sample(101, 101, 0xABCE102, f)
- self.addCleanup(_cleanup, fname)
- _heatmap(fname)
- self.assertIn('out.txt', os.listdir('.'))
- with open('out.txt') as f:
- lines = f.readlines()
- self.assertEqual(len(lines), 3)
- self.assertIn('101/101: 1 0', lines[0])
- self.assertIn('101/101: 2 0', lines[1])
- self.assertIn('101/101: 3 4096', lines[2])
-
- def test_with_fork_and_exit(self):
- """Tests perf fork and perf exit."""
- fname = 'test_fork.txt'
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x100, f)
- _write_perf_fork(101, 101, 202, 202, f)
- _write_perf_sample(101, 101, 0xABCD101, f)
- _write_perf_sample(202, 202, 0xABCE101, f)
- _write_perf_exit(202, 202, 202, 202, f)
- self.addCleanup(_cleanup, fname)
- _heatmap(fname)
- self.assertIn('out.txt', os.listdir('.'))
- with open('out.txt') as f:
- lines = f.readlines()
- self.assertEqual(len(lines), 2)
- self.assertIn('101/101: 1 0', lines[0])
- self.assertIn('202/202: 2 4096', lines[1])
-
- def test_hugepage_creates_two_chrome_mmaps(self):
- """Test two chrome mmaps for the same process."""
- fname = 'test_hugepage.txt'
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x1000, f)
- _write_perf_fork(101, 101, 202, 202, f)
- _write_perf_mmap(202, 202, 0xABCD000, 0x100, f)
- _write_perf_mmap(202, 202, 0xABCD300, 0xD00, f)
- _write_perf_sample(101, 101, 0xABCD102, f)
- _write_perf_sample(202, 202, 0xABCD102, f)
- self.addCleanup(_cleanup, fname)
- _heatmap(fname)
- self.assertIn('out.txt', os.listdir('.'))
- with open('out.txt') as f:
- lines = f.readlines()
- self.assertEqual(len(lines), 2)
- self.assertIn('101/101: 1 0', lines[0])
- self.assertIn('202/202: 2 0', lines[1])
-
- def test_hugepage_creates_two_chrome_mmaps_fail(self):
- """Test two chrome mmaps for the same process."""
- fname = 'test_hugepage.txt'
- # Cases where first_mmap.size < second_mmap.size
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x1000, f)
- _write_perf_fork(101, 101, 202, 202, f)
- _write_perf_mmap(202, 202, 0xABCD000, 0x10000, f)
- self.addCleanup(_cleanup, fname)
- with self.assertRaises(AssertionError) as msg:
- _heatmap(fname)
- self.assertIn('Original MMAP size', str(msg.exception))
-
- # Cases where first_mmap.address > second_mmap.address
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x1000, f)
- _write_perf_fork(101, 101, 202, 202, f)
- _write_perf_mmap(202, 202, 0xABCC000, 0x10000, f)
- with self.assertRaises(AssertionError) as msg:
- _heatmap(fname)
- self.assertIn('Original MMAP starting address', str(msg.exception))
-
- # Cases where first_mmap.address + size <
- # second_mmap.address + second_mmap.size
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x1000, f)
- _write_perf_fork(101, 101, 202, 202, f)
- _write_perf_mmap(202, 202, 0xABCD100, 0x10000, f)
- with self.assertRaises(AssertionError) as msg:
- _heatmap(fname)
- self.assertIn('exceeds the end of original MMAP', str(msg.exception))
-
- def test_histogram(self):
- """Tests if the tool can generate correct histogram.
-
- In the tool, histogram is generated from statistics
- of perf samples (saved to out.txt). The histogram is
- generated by perf-to-inst-page.sh and saved to
- inst-histo.txt. It will be used to draw heat maps.
- """
- fname = 'test_histo.txt'
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x100, f)
- for i in range(100):
- _write_perf_sample(101, 101, 0xABCD000 + i, f)
- _write_perf_sample(101, 101, 0xABCE000 + i, f)
- _write_perf_sample(101, 101, 0xABFD000 + i, f)
- _write_perf_sample(101, 101, 0xAFCD000 + i, f)
- self.addCleanup(_cleanup, fname)
- _heatmap(fname)
- self.assertIn('inst-histo.txt', os.listdir('.'))
- with open('inst-histo.txt') as f:
- lines = f.readlines()
- self.assertEqual(len(lines), 4)
- self.assertIn('100 0', lines[0])
- self.assertIn('100 4096', lines[1])
- self.assertIn('100 196608', lines[2])
- self.assertIn('100 4194304', lines[3])
-
- def test_histogram_two_mb_page(self):
- """Tests handling of 2MB page."""
- fname = 'test_histo.txt'
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x100, f)
- for i in range(100):
- _write_perf_sample(101, 101, 0xABCD000 + i, f)
- _write_perf_sample(101, 101, 0xABCE000 + i, f)
- _write_perf_sample(101, 101, 0xABFD000 + i, f)
- _write_perf_sample(101, 101, 0xAFCD000 + i, f)
- self.addCleanup(_cleanup, fname)
- _heatmap(fname, page_size=2 * 1024 * 1024)
- self.assertIn('inst-histo.txt', os.listdir('.'))
- with open('inst-histo.txt') as f:
- lines = f.readlines()
- self.assertEqual(len(lines), 2)
- self.assertIn('300 0', lines[0])
- self.assertIn('100 4194304', lines[1])
-
- def test_histogram_in_and_out_hugepage(self):
- """Tests handling the case of separating samples in and out huge page."""
- fname = 'test_histo.txt'
- with open(fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x100, f)
- for i in range(100):
- _write_perf_sample(101, 101, 0xABCD000 + i, f)
- _write_perf_sample(101, 101, 0xABCE000 + i, f)
- _write_perf_sample(101, 101, 0xABFD000 + i, f)
- _write_perf_sample(101, 101, 0xAFCD000 + i, f)
- self.addCleanup(_cleanup, fname)
- _heatmap(fname, hugepage=[0, 8192])
- file_list = os.listdir('.')
- self.assertNotIn('inst-histo.txt', file_list)
- self.assertIn('inst-histo-hp.txt', file_list)
- self.assertIn('inst-histo-sp.txt', file_list)
- with open('inst-histo-hp.txt') as f:
- lines = f.readlines()
- self.assertEqual(len(lines), 2)
- self.assertIn('100 0', lines[0])
- self.assertIn('100 4096', lines[1])
- with open('inst-histo-sp.txt') as f:
- lines = f.readlines()
- self.assertEqual(len(lines), 2)
- self.assertIn('100 196608', lines[0])
- self.assertIn('100 4194304', lines[1])
-
-
-class HeatmapGeneratorAnalyzeTests(unittest.TestCase):
- """All of our tests for heatmap_generator.analyze() and related."""
-
- def setUp(self):
- # Use the same perf report for testing
- self.fname = 'test_histo.txt'
- with open(self.fname, 'w') as f:
- _write_perf_mmap(101, 101, 0xABCD000, 0x100, f)
- for i in range(10):
- _write_perf_sample(101, 101, 0xABCD000 + i, f)
- _write_perf_sample(101, 101, 0xABCE000 + i, f)
- _write_perf_sample(101, 101, 0xABFD000 + i, f)
- self.nm = ('000000000abcd000 t Func1@Page1\n'
- '000000000abcd001 t Func2@Page1\n'
- '000000000abcd0a0 t Func3@Page1andFunc1@Page2\n'
- '000000000abce010 t Func2@Page2\n'
- '000000000abfd000 t Func1@Page3\n')
-
- def tearDown(self):
- _cleanup(self.fname)
-
- @mock.patch('subprocess.check_output')
- def test_analyze_hot_pages_with_hp_top(self, mock_nm):
- """Test if the analyze() can print the top page with hugepage."""
- mock_nm.return_value = self.nm
- _heatmap(self.fname, hugepage=[0, 8192], analyze=True, top_n=1)
- file_list = os.listdir('.')
- self.assertIn('addr2symbol.txt', file_list)
- with open('addr2symbol.txt') as f:
- contents = f.read()
- self.assertIn('Func2@Page1 : 9', contents)
- self.assertIn('Func1@Page1 : 1', contents)
- self.assertIn('Func1@Page3 : 10', contents)
- # Only displaying one page in hugepage
- self.assertNotIn('Func3@Page1andFunc1@Page2 : 10', contents)
-
- @mock.patch('subprocess.check_output')
- def test_analyze_hot_pages_without_hp_top(self, mock_nm):
- """Test if the analyze() can print the top page without hugepage."""
- mock_nm.return_value = self.nm
- _heatmap(self.fname, analyze=True, top_n=1)
- file_list = os.listdir('.')
- self.assertIn('addr2symbol.txt', file_list)
- with open('addr2symbol.txt') as f:
- contents = f.read()
- self.assertIn('Func2@Page1 : 9', contents)
- self.assertIn('Func1@Page1 : 1', contents)
- # Only displaying one page
- self.assertNotIn('Func3@Page1andFunc1@Page2 : 10', contents)
- self.assertNotIn('Func1@Page3 : 10', contents)
-
- @mock.patch('subprocess.check_output')
- def test_analyze_hot_pages_with_hp_top10(self, mock_nm):
- """Test if the analyze() can print with default top 10."""
- mock_nm.return_value = self.nm
- _heatmap(self.fname, analyze=True)
- # Make sure nm command is called correctly.
- mock_nm.assert_called_with(['nm', '-n', '/path/to/chrome'])
- file_list = os.listdir('.')
- self.assertIn('addr2symbol.txt', file_list)
- with open('addr2symbol.txt') as f:
- contents = f.read()
- self.assertIn('Func2@Page1 : 9', contents)
- self.assertIn('Func1@Page1 : 1', contents)
- self.assertIn('Func3@Page1andFunc1@Page2 : 10', contents)
- self.assertIn('Func1@Page3 : 10', contents)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/heatmaps/perf-to-inst-page.sh b/heatmaps/perf-to-inst-page.sh
deleted file mode 100755
index d6acd5ed..00000000
--- a/heatmaps/perf-to-inst-page.sh
+++ /dev/null
@@ -1,68 +0,0 @@
-#! /bin/bash -u
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script takes the out.txt, generated by heatmap_generator.py
-# and sorted into a heatmap data file (inst-histo.txt) and then
-# call gnu plot to draw the heat map and the time map.
-# A heat map shows the overall hotness of instructions being executed
-# while the time map shows the hotness of instruction at different time.
-#
-# Usage:
-# ./perf-to-inst-page.sh HEATMAP_TITLE
-# HEATMAP_TITLE: the title to display on the heatmap
-
-HEAT_PNG="heat_map.png"
-TIMELINE_PNG="timeline.png"
-HEATMAP_TITLE=$1
-ENABLE_HUGEPAGE=$2
-
-heatmap_command="
-set terminal png size 600,450
-set xlabel \"Instruction Virtual Address (MB)\"
-set ylabel \"Sample Occurance\"
-set grid
-
-set output \"${HEAT_PNG}\"
-set title \"${HEATMAP_TITLE}\"
-"
-
-if [[ "${ENABLE_HUGEPAGE}" = "hugepage" ]]; then
- hugepage_hist="inst-histo-hp.txt"
- smallpage_hist="inst-histo-sp.txt"
- cat out.txt | grep hugepage | awk '{print $3}' \
- | sort -n | uniq -c > "${hugepage_hist}"
- cat out.txt | grep smallpage | awk '{print $3}' \
- | sort -n | uniq -c > "${smallpage_hist}"
- # generate inst heat map
- heatmap_in_hugepage=("'${hugepage_hist}' using \
-(\$2/1024/1024):1 with impulses notitle lt rgb 'red'")
- heatmap_outside_hugepage=("'${smallpage_hist}' using \
-(\$2/1024/1024):1 with impulses notitle lt rgb 'blue'")
- echo "
- ${heatmap_command}
- plot ${heatmap_in_hugepage}, ${heatmap_outside_hugepage}
- " | gnuplot
-else
- awk '{print $3}' out.txt | sort -n | uniq -c > inst-histo.txt
- # generate inst heat map
- echo "
- ${heatmap_command}
- plot 'inst-histo.txt' using (\$2/1024/1024):1 with impulses notitle
- " | gnuplot
-fi
-
-# generate instruction page access timeline
-num=$(awk 'END {print NR+1}' out.txt)
-
-echo "
-set terminal png size 600,450
-set xlabel \"time (sec)\"
-set ylabel \"Instruction Virtual Address (MB)\"
-
-set output \"${TIMELINE_PNG}\"
-set title \"instruction page accessd timeline\"
-
-plot 'out.txt' using (\$0/$num*10):(\$3/1024/1024) with dots notitle
-" | gnuplot
diff --git a/image_chromeos.py b/image_chromeos.py
index 726b5659..aa8824b6 100755
--- a/image_chromeos.py
+++ b/image_chromeos.py
@@ -1,10 +1,6 @@
#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+# Copyright 2011 Google Inc. All Rights Reserved.
"""Script to image a ChromeOS device.
This script images a remote ChromeOS device with a specific image."
@@ -242,6 +238,11 @@ def DoImage(argv):
cmd_executer.CrosRunCommand(
command, chromeos_root=options.chromeos_root, machine=options.remote)
+ real_src_dir = os.path.join(
+ os.path.realpath(options.chromeos_root), 'src')
+ real_chroot_dir = os.path.join(
+ os.path.realpath(options.chromeos_root), 'chroot')
+
# Check to see if cros flash will work for the remote machine.
CheckForCrosFlash(options.chromeos_root, options.remote, log_level)
@@ -288,7 +289,7 @@ def DoImage(argv):
# If this is a non-local image, then the ret returned from
# EnsureMachineUp is the one that will be returned by this function;
# in that case, make sure the value in 'ret' is appropriate.
- if not local_image and ret:
+ if not local_image and ret == True:
ret = 0
else:
ret = 1
@@ -311,25 +312,17 @@ def DoImage(argv):
TryRemountPartitionAsRW(options.chromeos_root, options.remote,
log_level)
- if not found:
+ if found == False:
temp_dir = os.path.dirname(located_image)
l.LogOutput('Deleting temp image dir: %s' % temp_dir)
shutil.rmtree(temp_dir)
- l.LogOutput('Image updated.')
else:
- l.LogOutput('Checksums match, skip image update and reboot.')
- command = 'reboot && exit'
- _ = cmd_executer.CrosRunCommand(
- command, chromeos_root=options.chromeos_root, machine=options.remote)
- # Wait 30s after reboot.
- time.sleep(30)
-
+ l.LogOutput('Checksums match. Skipping reimage')
+ return ret
finally:
if should_unlock:
locks.ReleaseLock(list(options.remote.split()), options.chromeos_root)
- return ret
-
def LocateOrCopyImage(chromeos_root, image, board=None):
l = logger.GetLogger()
@@ -345,8 +338,8 @@ def LocateOrCopyImage(chromeos_root, image, board=None):
return [True, image]
# First search within the existing build dirs for any matching files.
- images_glob = (
- '%s/src/build/images/%s/*/*.bin' % (chromeos_root_realpath, board_glob))
+ images_glob = ('%s/src/build/images/%s/*/*.bin' % (chromeos_root_realpath,
+ board_glob))
images_list = glob.glob(images_glob)
for potential_image in images_list:
if filecmp.cmp(potential_image, image):
@@ -366,7 +359,7 @@ def LocateOrCopyImage(chromeos_root, image, board=None):
return [False, new_image]
-def GetImageMountCommand(image, rootfs_mp, stateful_mp):
+def GetImageMountCommand(chromeos_root, image, rootfs_mp, stateful_mp):
image_dir = os.path.dirname(image)
image_file = os.path.basename(image)
mount_command = ('cd ~/trunk/src/scripts &&'
@@ -386,7 +379,7 @@ def MountImage(chromeos_root,
unmount=False,
extra_commands=''):
cmd_executer = command_executer.GetCommandExecuter(log_level=log_level)
- command = GetImageMountCommand(image, rootfs_mp, stateful_mp)
+ command = GetImageMountCommand(chromeos_root, image, rootfs_mp, stateful_mp)
if unmount:
command = '%s --unmount' % command
if extra_commands:
@@ -407,7 +400,8 @@ def IsImageModdedForTest(chromeos_root, image, log_level):
rootfs_mp = rootfs_mp.strip()
stateful_mp = stateful_mp.strip()
lsb_release_file = os.path.join(rootfs_mp, 'etc/lsb-release')
- extra = ('grep CHROMEOS_RELEASE_TRACK %s | grep -i test' % lsb_release_file)
+ extra = (
+ 'grep CHROMEOS_RELEASE_DESCRIPTION %s | grep -i test' % lsb_release_file)
output = MountImage(
chromeos_root,
image,
@@ -446,7 +440,10 @@ def VerifyChromeChecksum(chromeos_root, image, remote, log_level):
[_, o, _] = cmd_executer.CrosRunCommandWOutput(
command, chromeos_root=chromeos_root, machine=remote)
device_chrome_checksum = o.split()[0]
- return image_chrome_checksum.strip() == device_chrome_checksum.strip()
+ if image_chrome_checksum.strip() == device_chrome_checksum.strip():
+ return True
+ else:
+ return False
# Remount partition as writable.
diff --git a/llvm_tools/README.md b/llvm_tools/README.md
deleted file mode 100644
index abf7a6f9..00000000
--- a/llvm_tools/README.md
+++ /dev/null
@@ -1,480 +0,0 @@
-# LLVM Tools
-
-## Overview
-
-These scripts helps automate tasks such as updating the LLVM next hash,
-determing whether a new patch applies correctly, and patch management.
-
-In addition, there are scripts that automate the process of retrieving the
-git hash of LLVM from either google3, top of trunk, or for a specific SVN
-version.
-
-**NOTE: All scripts must must be run outside the chroot**
-
-**NOTE: sudo must be permissive (i.e. **`cros_sdk`** should NOT prompt for a
-password)**
-
-## `update_packages_and_run_tryjobs.py`
-
-### Usage
-
-This script is used for updating a package's `LLVM_NEXT_HASH` (sys-devel/llvm,
-sys-libs/compiler-rt, sys-libs/libcxx, sys-libs/libcxxabi, and
-sys-libs/llvm-libunwind) and then run tryjobs after updating the git hash.
-
-An example when this script should be run is when certain boards would like
-to be tested with the updated `LLVM_NEXT_HASH`.
-
-For example:
-
-```
-$ ./update_packages_and_run_tryjobs.py \
- --llvm_version tot \
- --options nochromesdk latest-toolchain \
- --builders kevin-release-tryjob nocturne-release-tryjob
-```
-
-The above example would update the packages' `LLVM_NEXT_HASH` to the top of
-trunk's git hash and would submit tryjobs for kevin and nocturne boards, passing
-in 'nochromesdk' and 'latest-toolchain' for each tryjob.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./update_packages_and_run_tryjobs.py --help
-```
-
-Similarly as the previous example, but for updating `LLVM_NEXT_HASH` to
-google3:
-
-```
-$ ./update_packages_and_run_tryjobs.py \
- --llvm_version google3 \
- --options nochromesdk latest-toolchain \
- --builders kevin-release-tryjob nocturne-release-tryjob
-```
-
-Similarly as the previous example, but for updating `LLVM_NEXT_HASH` to
-the git hash of revision 367622:
-
-```
-$ ./update_packages_and_run_tryjobs.py \
- --llvm_version 367622 \
- --options nochromesdk latest-toolchain \
- --builders kevin-release-tryjob nocturne-release-tryjob
-```
-
-## `update_chromeos_llvm_next_hash.py`
-
-### Usage
-
-This script is used for updating a package's/packages' `LLVM_NEXT_HASH` and
-creating a change list of those changes which will uploaded for review. For
-example, some changes that would be included in the change list are
-the updated ebuilds, changes made to the patches of the updated packages such
-as being removed or an updated patch metadata file. These changes are determined
-by the `--failure_mode` option.
-
-An example where this script would be used is when multiple packages need to
-have their `LLVM_NEXT_HASH` updated.
-
-For example:
-
-```
-$ ./update_chromeos_llvm_next_hash.py \
- --update_packages sys-devel/llvm sys-libs/compiler-rt \
- --llvm_version google3 \
- --failure_mode disable_patches
-```
-
-The example above would update sys-devel/llvm and sys-libs/compiler-rt
-`LLVM_NEXT_HASH` to the latest google3's git hash of LLVM. And the change list
-may include patches that were disabled for either sys-devel/llvm or
-sys-libs/compiler-rt.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./update_chromeos_llvm_next.py --help
-```
-
-For example, to update `LLVM_NEXT_HASH` to top of trunk of LLVM:
-
-```
-$ ./update_chromeos_llvm_next_hash.py \
- --update_packages sys-devel/llvm sys-libs/compiler-rt \
- --llvm_version tot \
- --failure_mode disable_patches
-```
-
-For example, to update `LLVM_NEXT_HASH` to the git hash of revision 367622:
-
-```
-$ ./update_chromeos_llvm_next_hash.py \
- --update_packages sys-devel/llvm sys-libs/compiler-rt \
- --llvm_version 367622 \
- --failure_mode disable_patches
-```
-
-## `llvm_patch_management.py`
-
-### Usage
-
-This script is used to test whether a newly added patch in a package's patch
-metadata file would apply successfully. The script is also used to make sure
-the patches of a package applied successfully, failed, etc., depending on the
-failure mode specified.
-
-An example of using this script is when multiple packages would like to be
-tested when a new patch was added to their patch metadata file.
-
-For example:
-
-```
-$ ./llvm_patch_management.py \
- --packages sys-devel/llvm sys-libs/compiler-rt \
- --failure_mode continue
-```
-
-The above example tests sys-devel/llvm and sys-libs/compiler-rt patch metadata
-file with the failure mode `continue`.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./llvm_patch_management.py --help
-```
-
-## `patch_manager.py`
-
-### Usage
-
-This script is used when when all the command line arguments are known such as
-testing a specific metadata file or a specific source tree.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./patch_manager.py --help
-```
-
-For example, to see all the failed (if any) patches:
-
-```
-$ ./patch_manager.py \
- --svn_version 367622 \
- --patch_metadata_file /abs/path/to/patch/file \
- --filesdir_path /abs/path/to/$FILESDIR \
- --src_path /abs/path/to/src/tree \
- --failure_mode continue
-```
-
-For example, to disable all patches that failed to apply:
-
-```
-$ ./patch_manager.py \
- --svn_version 367622 \
- --patch_metadata_file /abs/path/to/patch/file \
- --filesdir_path /abs/path/to/$FILESDIR \
- --src_path /abs/path/to/src/tree \
- --failure_mode disable_patches
-```
-
-For example, to remove all patches that no longer apply:
-
-```
-$ ./patch_manager.py \
- --svn_version 367622 \
- --patch_metadata_file /abs/path/to/patch/file \
- --filesdir_path /abs/path/to/$FILESDIR \
- --src_path /abs/path/to/src/tree \
- --failure_mode remove_patches
-```
-
-For example, to bisect a failing patch and stop at the first bisected patch:
-
-```
-$ ./patch_manager.py \
- --svn_version 367622 \
- --patch_metadata_file /abs/path/to/patch/file \
- --filesdir_path /abs/path/to/$FILESDIR \
- --src_path /abs/path/to/src/tree \
- --failure_mode bisect_patches \
- --good_svn_version 365631
-```
-
-For example, to bisect a failing patch and then continue bisecting the rest of
-the failed patches:
-
-```
-$ ./patch_manager.py \
- --svn_version 367622 \
- --patch_metadata_file /abs/path/to/patch/file \
- --filesdir_path /abs/path/to/$FILESDIR \
- --src_path /abs/path/to/src/tree \
- --failure_mode bisect_patches \
- --good_svn_version 365631 \
- --continue_bisection True
-```
-
-## LLVM Bisection
-
-### `llvm_bisection.py`
-
-#### Usage
-
-This script is used to bisect a bad revision of LLVM. After the script finishes,
-the user requires to run the script again to continue the bisection. Intially,
-the script creates a JSON file that does not exist which then continues
-bisection (after invoking the script again) based off of the JSON file.
-
-For example, assuming the revision 369420 is the bad revision:
-
-```
-$ ./llvm_bisection.py \
- --parallel 3 \
- --start_rev 369410 \
- --end_rev 369420 \
- --last_tested /abs/path/to/tryjob/file/ \
- --extra_change_lists 513590 \
- --builder eve-release-tryjob \
- --options latest-toolchain
-```
-
-The above example bisects the bad revision (369420), starting from the good
-revision 369410 and launching 3 tryjobs in between if possible (`--parallel`).
-Here, the `--last_tested` path is a path to a JSON file that does not exist. The
-tryjobs are tested on the eve board. `--extra_change_lists` and `--options`
-indicate what parameters to pass into launching a tryjob.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./llvm_bisection.py --help
-```
-
-### `auto_llvm_bisection.py`
-
-#### Usage
-
-This script automates the LLVM bisection process by using `cros buildresult` to
-update the status of each tryjob.
-
-An example when this script would be used to do LLVM bisection overnight
-because tryjobs take very long to finish.
-
-For example, assuming the good revision is 369410 and the bad revision is
-369420, then:
-
-```
-$ ./auto_llvm_bisection.py --start_rev 369410 --end_rev 369420 \
- --last_tested /abs/path/to/last_tested_file.json \
- --extra_change_lists 513590 1394249 \
- --options latest-toolchain nochromesdk \
- --builder eve-release-tryjob
-```
-
-The example above bisects LLVM between revision 369410 and 369420. If the file
-exists, the script resumes bisection. Otherwise, the script creates the file
-provided by `--last_tested`. `--extra_change_lists` and `--options` are used for
-each tryjob when being submitted. Lastly, the tryjobs are launched for the board
-provided by `--builder` (in this example, for the eve board).
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./auto_llvm_bisection.py --help
-```
-
-### `update_tryjob_status.py`
-
-#### Usage
-
-This script updates a tryjob's 'status' value when bisecting LLVM. This script
-can use the file that was created by `llvm_bisection.py`.
-
-An example when this script would be used is when the result of tryjob that was
-launched was 'fail' (due to flaky infra) but it should really have been
-'success'.
-
-For example, to update a tryjob's 'status' to 'good':
-
-```
-$ ./update_tryjob_status.py \
- --set_status good \
- --revision 369412 \
- --status_file /abs/path/to/tryjob/file
-```
-
-The above example uses the file in `--status_file` to update a tryjob in that
-file that tested the revision 369412 and sets its 'status' value to 'good'.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./update_tryjob_status.py --help
-```
-
-For example, to update a tryjob's 'status' to 'bad':
-
-```
-$ ./update_tryjob_status.py \
- --set_status bad \
- --revision 369412 \
- --status_file /abs/path/to/tryjob/file
-```
-
-For example, to update a tryjob's 'status' to 'pending':
-
-```
-$ ./update_tryjob_status.py \
- --set_status pending \
- --revision 369412 \
- --status_file /abs/path/to/tryjob/file
-```
-
-For example, to update a tryjob's 'status' to 'skip':
-
-```
-$ ./update_tryjob_status.py \
- --set_status skip \
- --revision 369412 \
- --status_file /abs/path/to/tryjob/file
-```
-
-For example, to update a tryjob's 'status' based off a custom script exit code:
-
-```
-$ ./update_tryjob_status.py \
- --set_status custom_script \
- --revision 369412 \
- --status_file /abs/path/to/tryjob/file \
- --custom_script /abs/path/to/script.py
-```
-
-### `update_all_tryjobs_with_auto.py`
-
-#### Usage
-
-This script updates all tryjobs that are 'pending' to the result provided by
-`cros buildresult`.
-
-For example:
-
-```
-$ ./update_all_tryjobs_with_auto.py \
- --last_tested /abs/path/to/last_tested_file.json \
- --chroot_path /abs/path/to/chroot
-```
-
-The above example will update all tryjobs whose 'status' is 'pending' in the
-file provided by `--last_tested`.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./update_all_tryjobs_with_auto.py --help
-```
-
-### `modify_a_tryjob.py`
-
-#### Usage
-
-This script modifies a tryjob directly given an already created tryjob file when
-bisecting LLVM. The file created by `llvm_bisection.py` can be used in this
-script.
-
-An example when this script would be used is when a tryjob needs to be manually
-added.
-
-For example:
-
-```
-$ ./modify_a_tryjob.py \
- --modify_a_tryjob add \
- --revision 369416 \
- --extra_change_lists 513590 \
- --options latest-toolchain \
- --builder eve-release-tryjob \
- --status_file /abs/path/to/tryjob/file
-```
-
-The above example creates a tryjob that tests revision 369416 on the eve board,
-passing in the extra arguments (`--extra_change_lists` and `--options`). The
-tryjob is then inserted into the file passed in via `--status_file`.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./modify_a_tryjob.py --help
-```
-
-For example, to remove a tryjob that tested revision 369412:
-
-```
-$ ./modify_a_tryjob.py \
- --modify_a_tryjob remove \
- --revision 369412 \
- --status_file /abs/path/to/tryjob/file
-```
-
-For example, to relaunch a tryjob that tested revision 369418:
-
-```
-$ ./modify_a_tryjob.py \
- --modify_a_tryjob relaunch \
- --revision 369418 \
- --status_file /abs/path/to/tryjob/file
-```
-
-## Other Helpful Scripts
-
-### `get_llvm_hash.py`
-
-#### Usage
-
-The script has a class that deals with retrieving either the top of trunk git
-hash of LLVM, the git hash of google3, or a specific git hash of a SVN version.
-It also has other functions when dealing with a git hash of LLVM.
-
-In addition, it has a function to retrieve the latest google3 LLVM version.
-
-For example, to retrieve the top of trunk git hash of LLVM:
-
-```
-from get_llvm_hash import LLVMHash
-
-LLVMHash().GetTopOfTrunkGitHash()
-```
-
-For example, to retrieve the git hash of google3:
-
-```
-from get_llvm_hash import LLVMHash
-
-LLVMHash().GetGoogle3LLVMHash()
-```
-
-For example, to retrieve the git hash of a specific SVN version:
-
-```
-from get_llvm_hash import LLVMHash
-
-LLVMHash().GetLLVMHash(<svn_version>)
-```
-
-For example, to retrieve the commit message of a git hash of LLVM:
-
-```
-from get_llvm_hash import LLVMHash
-
-LLVMHash.GetCommitMessageForHash(<git_hash>)
-```
-
-For example, to retrieve the latest google3 LLVM version:
-
-```
-from get_llvm_hash import GetGoogle3LLVMVersion
-
-GetGoogle3LLVMVersion(stable=True)
-```
diff --git a/llvm_tools/assert_not_in_chroot.py b/llvm_tools/assert_not_in_chroot.py
deleted file mode 100644
index 6b78d95c..00000000
--- a/llvm_tools/assert_not_in_chroot.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper module to determine whether a script is executed inside the chroot."""
-
-from __future__ import print_function
-
-import os
-
-
-def VerifyOutsideChroot():
- """Checks whether the script invoked was executed in the chroot.
-
- Raises:
- AssertionError: The script was run inside the chroot.
- """
-
- chroot_only_path = '/mnt/host/depot_tools'
-
- in_chroot_err_message = 'Script should be run outside the chroot.'
-
- assert not os.path.isdir(chroot_only_path), in_chroot_err_message
diff --git a/llvm_tools/auto_llvm_bisection.py b/llvm_tools/auto_llvm_bisection.py
deleted file mode 100755
index cd3d70b6..00000000
--- a/llvm_tools/auto_llvm_bisection.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Performs bisection on LLVM based off a .JSON file."""
-
-from __future__ import print_function
-
-import os
-import subprocess
-import sys
-import time
-import traceback
-
-from assert_not_in_chroot import VerifyOutsideChroot
-from update_all_tryjobs_with_auto import GetPathToUpdateAllTryjobsWithAutoScript
-from llvm_bisection import BisectionExitStatus
-import llvm_bisection
-
-# Used to re-try for 'llvm_bisection.py' to attempt to launch more tryjobs.
-BISECTION_RETRY_TIME_SECS = 10 * 60
-
-# Wait time to then poll each tryjob whose 'status' value is 'pending'.
-POLL_RETRY_TIME_SECS = 30 * 60
-
-# The number of attempts for 'llvm_bisection.py' to launch more tryjobs.
-#
-# It is reset (break out of the `for` loop/ exit the program) if successfully
-# launched more tryjobs or bisection is finished (no more revisions between
-# start and end of the bisection).
-BISECTION_ATTEMPTS = 3
-
-# The limit for updating all tryjobs whose 'status' is 'pending'.
-#
-# If the time that has passed for polling exceeds this value, then the program
-# will exit with the appropriate exit code.
-POLLING_LIMIT_SECS = 18 * 60 * 60
-
-
-def main():
- """Bisects LLVM using the result of `cros buildresult` of each tryjob.
-
- Raises:
- AssertionError: The script was run inside the chroot.
- """
-
- VerifyOutsideChroot()
-
- args_output = llvm_bisection.GetCommandLineArgs()
-
- exec_update_tryjobs = [
- GetPathToUpdateAllTryjobsWithAutoScript(), '--chroot_path',
- args_output.chroot_path, '--last_tested', args_output.last_tested
- ]
-
- if os.path.isfile(args_output.last_tested):
- print('Resuming bisection for %s' % args_output.last_tested)
- else:
- print('Starting a new bisection for %s' % args_output.last_tested)
-
- while True:
- if os.path.isfile(args_output.last_tested):
- update_start_time = time.time()
-
- # Update all tryjobs whose status is 'pending' to the result of `cros
- # buildresult`.
- while True:
- print('\nAttempting to update all tryjobs whose "status" is '
- '"pending":')
- print('-' * 40)
-
- update_ret = subprocess.call(exec_update_tryjobs)
-
- print('-' * 40)
-
- # Successfully updated all tryjobs whose 'status' was 'pending'/ no
- # updates were needed (all tryjobs already have been updated).
- if update_ret == 0:
- break
-
- delta_time = time.time() - update_start_time
-
- if delta_time > POLLING_LIMIT_SECS:
- print('Unable to update tryjobs whose status is "pending" to '
- 'the result of `cros buildresult`.')
-
- # Something is wrong with updating the tryjobs's 'status' via
- # `cros buildresult` (e.g. network issue, etc.).
- sys.exit(1)
-
- print('Sleeping for %d minutes.' % (POLL_RETRY_TIME_SECS // 60))
- time.sleep(POLL_RETRY_TIME_SECS)
-
- # Launch more tryjobs if possible to narrow down the bad commit/revision or
- # terminate the bisection because the bad commit/revision was found.
- for cur_try in range(1, BISECTION_ATTEMPTS + 1):
- try:
- print('\nAttempting to launch more tryjobs if possible:')
- print('-' * 40)
-
- bisection_ret = llvm_bisection.main(args_output)
-
- print('-' * 40)
-
- # Exit code 126 means that there are no more revisions to test between
- # 'start' and 'end', so bisection is complete.
- if bisection_ret == BisectionExitStatus.BISECTION_COMPLETE.value:
- sys.exit(0)
-
- # Successfully launched more tryjobs.
- break
- except Exception:
- traceback.print_exc()
-
- print('-' * 40)
-
- # Exceeded the number of times to launch more tryjobs.
- if cur_try == BISECTION_ATTEMPTS:
- print('Unable to continue bisection.')
-
- sys.exit(1)
-
- num_retries_left = BISECTION_ATTEMPTS - cur_try
-
- print('Retries left to continue bisection %d.' % num_retries_left)
-
- print('Sleeping for %d minutes.' % (BISECTION_RETRY_TIME_SECS // 60))
- time.sleep(BISECTION_RETRY_TIME_SECS)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/auto_llvm_bisection_unittest.py b/llvm_tools/auto_llvm_bisection_unittest.py
deleted file mode 100755
index 3e6e3a3e..00000000
--- a/llvm_tools/auto_llvm_bisection_unittest.py
+++ /dev/null
@@ -1,232 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for auto bisection of LLVM."""
-
-from __future__ import print_function
-
-import os
-import subprocess
-import time
-import traceback
-import unittest
-import unittest.mock as mock
-
-from test_helpers import ArgsOutputTest
-from test_helpers import CallCountsToMockFunctions
-import auto_llvm_bisection
-import llvm_bisection
-
-
-class AutoLLVMBisectionTest(unittest.TestCase):
- """Unittests for auto bisection of LLVM."""
-
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
- @mock.patch.object(
- auto_llvm_bisection, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
- @mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when an exception happened in
- # `llvm_bisection.main()`.
- @mock.patch.object(traceback, 'print_exc')
- # Simulate behavior of `llvm_bisection.main()` when failed to launch tryjobs
- # (exception happened along the way, etc.).
- @mock.patch.object(llvm_bisection, 'main')
- # Simulate behavior of `os.path.isfile()` when starting a new bisection.
- @mock.patch.object(os.path, 'isfile', return_value=False)
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
- @mock.patch.object(
- llvm_bisection, 'GetCommandLineArgs', return_value=ArgsOutputTest())
- def testFailedToStartBisection(
- self, mock_get_args, mock_get_auto_script, mock_is_file,
- mock_llvm_bisection, mock_traceback, mock_sleep, mock_outside_chroot):
-
- def MockLLVMBisectionRaisesException(args_output):
- raise ValueError('Failed to launch more tryjobs.')
-
- # Use the test function to simulate the behavior of an exception happening
- # when launching more tryjobs.
- mock_llvm_bisection.side_effect = MockLLVMBisectionRaisesException
-
- # Verify the exception is raised when the number of attempts to launched
- # more tryjobs is exceeded, so unable to continue
- # bisection.
- with self.assertRaises(SystemExit) as err:
- auto_llvm_bisection.main()
-
- self.assertEqual(err.exception.code, 1)
-
- mock_outside_chroot.assert_called_once()
- mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 2)
- self.assertEqual(mock_llvm_bisection.call_count, 3)
- self.assertEqual(mock_traceback.call_count, 3)
- self.assertEqual(mock_sleep.call_count, 2)
-
- # Simulate the behavior of `subprocess.call()` when successfully updated all
- # tryjobs whose 'status' value is 'pending'.
- @mock.patch.object(subprocess, 'call', return_value=0)
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
- @mock.patch.object(
- auto_llvm_bisection, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
- @mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when an exception happened in
- # `llvm_bisection.main()`.
- @mock.patch.object(traceback, 'print_exc')
- # Simulate behavior of `llvm_bisection.main()` when failed to launch tryjobs
- # (exception happened along the way, etc.).
- @mock.patch.object(llvm_bisection, 'main')
- # Simulate behavior of `os.path.isfile()` when starting a new bisection.
- @mock.patch.object(os.path, 'isfile')
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
- @mock.patch.object(
- llvm_bisection, 'GetCommandLineArgs', return_value=ArgsOutputTest())
- def testSuccessfullyBisectedLLVMRevision(
- self, mock_get_args, mock_get_auto_script, mock_is_file,
- mock_llvm_bisection, mock_traceback, mock_sleep, mock_outside_chroot,
- mock_update_tryjobs):
-
- # Simulate the behavior of `os.path.isfile()` when checking whether the
- # status file provided exists.
- @CallCountsToMockFunctions
- def MockStatusFileCheck(call_count, last_tested):
- # Simulate that the status file does not exist, so the LLVM bisection
- # script would create the status file and launch tryjobs.
- if call_count < 2:
- return False
-
- # Simulate when the status file exists and `subprocess.call()` executes
- # the script that updates all the 'pending' tryjobs to the result of `cros
- # buildresult`.
- if call_count == 2:
- return True
-
- assert False, 'os.path.isfile() called more times than expected.'
-
- # Simulate behavior of `llvm_bisection.main()` when successfully bisected
- # between the good and bad LLVM revision.
- @CallCountsToMockFunctions
- def MockLLVMBisectionReturnValue(call_count, args_output):
- # Simulate that successfully launched more tryjobs.
- if call_count == 0:
- return 0
-
- # Simulate that failed to launch more tryjobs.
- if call_count == 1:
- raise ValueError('Failed to launch more tryjobs.')
-
- # Simulate that the bad revision has been found.
- if call_count == 2:
- return llvm_bisection.BisectionExitStatus.BISECTION_COMPLETE.value
-
- assert False, 'Called `llvm_bisection.main()` more than expected.'
-
- # Use the test function to simulate the behavior of `llvm_bisection.main()`.
- mock_llvm_bisection.side_effect = MockLLVMBisectionReturnValue
-
- # Use the test function to simulate the behavior of `os.path.isfile()`.
- mock_is_file.side_effect = MockStatusFileCheck
-
- # Verify the excpetion is raised when successfully found the bad revision.
- # Uses `sys.exit(0)` to indicate success.
- with self.assertRaises(SystemExit) as err:
- auto_llvm_bisection.main()
-
- self.assertEqual(err.exception.code, 0)
-
- mock_outside_chroot.assert_called_once()
- mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 3)
- self.assertEqual(mock_llvm_bisection.call_count, 3)
- mock_traceback.assert_called_once()
- mock_sleep.assert_called_once()
- mock_update_tryjobs.assert_called_once()
-
- # Simulate behavior of `subprocess.call()` when failed to update tryjobs to
- # `cros buildresult` (script failed).
- @mock.patch.object(subprocess, 'call', return_value=1)
- # Simulate behavior of `time.time()` when determining the time passed when
- # updating tryjobs whose 'status' is 'pending'.
- @mock.patch.object(time, 'time')
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
- @mock.patch.object(
- auto_llvm_bisection, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
- @mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when resuming bisection.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
- @mock.patch.object(
- llvm_bisection, 'GetCommandLineArgs', return_value=ArgsOutputTest())
- def testFailedToUpdatePendingTryJobs(
- self, mock_get_args, mock_get_auto_script, mock_is_file, mock_sleep,
- mock_outside_chroot, mock_time, mock_update_tryjobs):
-
- # Simulate behavior of `time.time()` for time passed.
- @CallCountsToMockFunctions
- def MockTimePassed(call_count):
- if call_count < 3:
- return call_count
-
- assert False, 'Called `time.time()` more than expected.'
-
- # Use the test function to simulate the behavior of `time.time()`.
- mock_time.side_effect = MockTimePassed
-
- # Reduce the polling limit for the test case to terminate faster.
- auto_llvm_bisection.POLLING_LIMIT_SECS = 1
-
- # Verify the exception is raised when unable to update tryjobs whose
- # 'status' value is 'pending'.
- with self.assertRaises(SystemExit) as err:
- auto_llvm_bisection.main()
-
- self.assertEqual(err.exception.code, 1)
-
- mock_outside_chroot.assert_called_once()
- mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 2)
- mock_sleep.assert_called_once()
- self.assertEqual(mock_time.call_count, 3)
- self.assertEqual(mock_update_tryjobs.call_count, 2)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/llvm_tools/copy_helpers_to_chromiumos_overlay.py b/llvm_tools/copy_helpers_to_chromiumos_overlay.py
deleted file mode 100755
index dd6dc9d6..00000000
--- a/llvm_tools/copy_helpers_to_chromiumos_overlay.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Clones helper scripts into chromiumos-overlay.
-
-Some files in here also need to live in chromiumos-overlay (e.g., the
-patch_manager ones). This script simplifies the copying of those around.
-"""
-
-# Necessary until crbug.com/1006448 is fixed
-from __future__ import print_function
-
-import argparse
-import os
-import shutil
-import sys
-
-
-def _find_repo_root(script_root):
- repo_root = os.path.abspath(os.path.join(script_root, '../../../../'))
- if not os.path.isdir(os.path.join(repo_root, '.repo')):
- return None
- return repo_root
-
-
-def main():
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument(
- '--chroot_path',
- help="Path to where CrOS' source tree lives. Will autodetect if you're "
- 'running this from inside the CrOS source tree.')
- args = parser.parse_args()
-
- my_dir = os.path.abspath(os.path.dirname(__file__))
-
- repo_root = args.chroot_path
- if repo_root is None:
- repo_root = _find_repo_root(my_dir)
- if repo_root is None:
- sys.exit("Couldn't detect the CrOS checkout root; please provide a "
- 'value for --chroot_path')
-
- chromiumos_overlay = os.path.join(repo_root,
- 'src/third_party/chromiumos-overlay')
-
- clone_files = [
- 'failure_modes.py',
- 'get_llvm_hash.py',
- 'patch_manager.py',
- 'subprocess_helpers.py',
- ]
-
- filesdir = os.path.join(chromiumos_overlay,
- 'sys-devel/llvm/files/patch_manager')
- for f in clone_files:
- source = os.path.join(my_dir, f)
- dest = os.path.join(filesdir, f)
- print('%r => %r' % (source, dest))
- shutil.copyfile(source, dest)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/custom_script_example.py b/llvm_tools/custom_script_example.py
deleted file mode 100755
index 7e107ad8..00000000
--- a/llvm_tools/custom_script_example.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A custom script example that utilizes the .JSON contents of the tryjob."""
-
-from __future__ import print_function
-
-import json
-import sys
-
-from update_tryjob_status import TryjobStatus
-
-
-def main():
- """Determines the exit code based off of the contents of the .JSON file."""
-
- # Index 1 in 'sys.argv' is the path to the .JSON file which contains
- # the contents of the tryjob.
- #
- # Format of the tryjob contents:
- # {
- # "status" : [TRYJOB_STATUS],
- # "buildbucket_id" : [BUILDBUCKET_ID],
- # "extra_cls" : [A_LIST_OF_EXTRA_CLS_PASSED_TO_TRYJOB],
- # "url" : [GERRIT_URL],
- # "builder" : [TRYJOB_BUILDER_LIST],
- # "rev" : [REVISION],
- # "link" : [LINK_TO_TRYJOB],
- # "options" : [A_LIST_OF_OPTIONS_PASSED_TO_TRYJOB]
- # }
- abs_path_json_file = sys.argv[1]
-
- with open(abs_path_json_file) as f:
- tryjob_contents = json.load(f)
-
- CUTOFF_PENDING_REVISION = 369416
-
- SKIP_REVISION_CUTOFF_START = 369420
- SKIP_REVISION_CUTOFF_END = 369428
-
- if tryjob_contents['status'] == TryjobStatus.PENDING.value:
- if tryjob_contents['rev'] <= CUTOFF_PENDING_REVISION:
- # Exit code 0 means to set the tryjob 'status' as 'good'.
- sys.exit(0)
-
- # Exit code 124 means to set the tryjob 'status' as 'bad'.
- sys.exit(124)
-
- if tryjob_contents['status'] == TryjobStatus.BAD.value:
- # Need to take a closer look at the contents of the tryjob to then decide
- # what that tryjob's 'status' value should be.
- #
- # Since the exit code is not in the mapping, an exception will occur which
- # will save the file in the directory of this custom script example.
- sys.exit(1)
-
- if tryjob_contents['status'] == TryjobStatus.SKIP.value:
- # Validate that the 'skip value is really set between the cutoffs.
- if SKIP_REVISION_CUTOFF_START < tryjob_contents['rev'] < \
- SKIP_REVISION_CUTOFF_END:
- # Exit code 125 means to set the tryjob 'status' as 'skip'.
- sys.exit(125)
-
- if tryjob_contents['rev'] >= SKIP_REVISION_CUTOFF_END:
- sys.exit(124)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/failure_modes.py b/llvm_tools/failure_modes.py
deleted file mode 100644
index 1e05dfcf..00000000
--- a/llvm_tools/failure_modes.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Failure mode constants avaiable to the patch manager."""
-
-from __future__ import print_function
-
-import enum
-
-
-class FailureModes(enum.Enum):
- """Different modes for the patch manager when handling a failed patch."""
-
- FAIL = 'fail'
- CONTINUE = 'continue'
- DISABLE_PATCHES = 'disable_patches'
- BISECT_PATCHES = 'bisect_patches'
- REMOVE_PATCHES = 'remove_patches'
-
- # Only used by 'bisect_patches'.
- INTERNAL_BISECTION = 'internal_bisection'
diff --git a/llvm_tools/get_llvm_hash.py b/llvm_tools/get_llvm_hash.py
deleted file mode 100755
index 914bbfbf..00000000
--- a/llvm_tools/get_llvm_hash.py
+++ /dev/null
@@ -1,437 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Returns the latest LLVM version's hash."""
-
-from __future__ import print_function
-
-import argparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-
-from contextlib import contextmanager
-from subprocess_helpers import CheckCommand
-from subprocess_helpers import check_output
-
-import requests
-
-_LLVM_GIT_URL = ('https://chromium.googlesource.com/external/github.com/llvm'
- '/llvm-project')
-
-KNOWN_HASH_SOURCES = {'google3', 'google3-unstable', 'tot'}
-
-
-@contextmanager
-def CreateTempLLVMRepo(temp_dir):
- """Adds a LLVM worktree to 'temp_dir'.
-
- Creating a worktree because the LLVM source tree in
- '../toolchain-utils/llvm_tools/llvm-project-copy' should not be modified.
-
- This is useful for applying patches to a source tree but do not want to modify
- the actual LLVM source tree in 'llvm-project-copy'.
-
- Args:
- temp_dir: An absolute path to the temporary directory to put the worktree in
- (obtained via 'tempfile.mkdtemp()').
-
- Returns:
- The absolute path to 'temp_dir'.
-
- Raises:
- subprocess.CalledProcessError: Failed to remove the worktree.
- ValueError: Failed to add a worktree.
- """
-
- abs_path_to_llvm_project_dir = GetAndUpdateLLVMProjectInLLVMTools()
-
- add_worktree_cmd = [
- 'git', '-C', abs_path_to_llvm_project_dir, 'worktree', 'add', '--detach',
- temp_dir, 'master'
- ]
-
- CheckCommand(add_worktree_cmd)
-
- try:
- yield temp_dir
- finally:
- if os.path.isdir(temp_dir):
- check_output([
- 'git', '-C', abs_path_to_llvm_project_dir, 'worktree', 'remove', '-f',
- temp_dir
- ])
-
-
-def GetAndUpdateLLVMProjectInLLVMTools():
- """Gets the absolute path to 'llvm-project-copy' directory in 'llvm_tools'.
-
- The intent of this function is to avoid cloning the LLVM repo and then
- discarding the contents of the repo. The function will create a directory
- in '../toolchain-utils/llvm_tools' called 'llvm-project-copy' if this
- directory does not exist yet. If it does not exist, then it will use the
- LLVMHash() class to clone the LLVM repo into 'llvm-project-copy'. Otherwise,
- it will clean the contents of that directory and then fetch from the chromium
- LLVM mirror. In either case, this function will return the absolute path to
- 'llvm-project-copy' directory.
-
- Raises:
- ValueError: LLVM repo (in 'llvm-project-copy' dir.) has changes or failed to
- checkout to master or failed to fetch from chromium mirror of LLVM.
- """
-
- abs_path_to_llvm_tools_dir = os.path.dirname(os.path.abspath(__file__))
-
- abs_path_to_llvm_project_dir = os.path.join(abs_path_to_llvm_tools_dir,
- 'llvm-project-copy')
-
- if not os.path.isdir(abs_path_to_llvm_project_dir):
- print(
- 'Checking out LLVM from scratch. This could take a while...',
- file=sys.stderr)
- os.mkdir(abs_path_to_llvm_project_dir)
-
- LLVMHash().CloneLLVMRepo(abs_path_to_llvm_project_dir)
- else:
- # `git status` has a '-s'/'--short' option that shortens the output.
- # With the '-s' option, if no changes were made to the LLVM repo, then the
- # output (assigned to 'repo_status') would be empty.
- repo_status = check_output(
- ['git', '-C', abs_path_to_llvm_project_dir, 'status', '-s'])
-
- if repo_status.rstrip():
- raise ValueError('LLVM repo in %s has changes, please remove.' %
- abs_path_to_llvm_project_dir)
-
- checkout_to_master_cmd = [
- 'git', '-C', abs_path_to_llvm_project_dir, 'checkout', 'master'
- ]
-
- CheckCommand(checkout_to_master_cmd)
-
- update_master_cmd = ['git', '-C', abs_path_to_llvm_project_dir, 'pull']
-
- CheckCommand(update_master_cmd)
-
- return abs_path_to_llvm_project_dir
-
-
-def GetGoogle3LLVMVersion(stable):
- """Gets the latest google3 LLVM version.
-
- Returns:
- The latest LLVM SVN version as an integer.
-
- Raises:
- subprocess.CalledProcessError: An invalid path has been provided to the
- `cat` command.
- """
-
- subdir = 'stable' if stable else 'llvm_unstable'
- path_to_google3_llvm_version = os.path.join(
- '/google/src/head/depot/google3/third_party/crosstool/v18', subdir,
- 'installs/llvm/revision')
-
- # Cmd to get latest google3 LLVM version.
- cat_cmd = ['cat', path_to_google3_llvm_version]
-
- # Get latest version.
- g3_version = check_output(cat_cmd)
-
- # Change type to an integer
- return int(g3_version.rstrip())
-
-
-def is_svn_option(svn_option):
- """Validates whether the argument (string) is a git hash option.
-
- The argument is used to find the git hash of LLVM.
-
- Args:
- svn_option: The option passed in as a command line argument.
-
- Raises:
- ValueError: Invalid svn option provided.
- """
-
- if svn_option.lower() in KNOWN_HASH_SOURCES:
- return svn_option.lower()
-
- try:
- svn_version = int(svn_option)
-
- return svn_version
-
- # Unable to convert argument to an int, so the option is invalid.
- #
- # Ex: 'one'.
- except ValueError:
- pass
-
- raise ValueError('Invalid LLVM git hash option provided: %s' % svn_option)
-
-
-def GetLLVMHashAndVersionFromSVNOption(svn_option):
- """Gets the LLVM hash and LLVM version based off of the svn option.
-
- Args:
- svn_option: A valid svn option obtained from the command line.
- Ex: 'google3', 'tot', or <svn_version> such as 365123.
-
- Returns:
- A tuple that is the LLVM git hash and LLVM version.
- """
-
- new_llvm_hash = LLVMHash()
-
- # Determine which LLVM git hash to retrieve.
- if svn_option == 'tot':
- llvm_hash = new_llvm_hash.GetTopOfTrunkGitHash()
-
- tot_commit_message = new_llvm_hash.GetCommitMessageForHash(llvm_hash)
-
- llvm_version = new_llvm_hash.GetSVNVersionFromCommitMessage(
- tot_commit_message)
- elif isinstance(svn_option, int):
- llvm_version = svn_option
- llvm_hash = new_llvm_hash.GetGitHashForVersion(
- GetAndUpdateLLVMProjectInLLVMTools(), llvm_version)
- else:
- assert svn_option in ('google3', 'google3-unstable')
- llvm_version = GetGoogle3LLVMVersion(stable=svn_option == 'google3')
-
- llvm_hash = new_llvm_hash.GetGitHashForVersion(
- GetAndUpdateLLVMProjectInLLVMTools(), llvm_version)
-
- return llvm_hash, llvm_version
-
-
-class LLVMHash(object):
- """Provides methods to retrieve a LLVM hash."""
-
- @staticmethod
- @contextmanager
- def CreateTempDirectory():
- temp_dir = tempfile.mkdtemp()
-
- try:
- yield temp_dir
- finally:
- if os.path.isdir(temp_dir):
- shutil.rmtree(temp_dir, ignore_errors=True)
-
- def CloneLLVMRepo(self, temp_dir):
- """Clones the LLVM repo.
-
- Args:
- temp_dir: The temporary directory to clone the repo to.
-
- Raises:
- ValueError: Failed to clone the LLVM repo.
- """
-
- clone_cmd = ['git', 'clone', _LLVM_GIT_URL, temp_dir]
-
- clone_cmd_obj = subprocess.Popen(clone_cmd, stderr=subprocess.PIPE)
- _, stderr = clone_cmd_obj.communicate()
-
- if clone_cmd_obj.returncode:
- raise ValueError('Failed to clone the LLVM repo: %s' % stderr)
-
- def GetCommitMessageForHash(self, git_hash):
- """Gets the commit message from the git hash.
-
- Args:
- git_hash: A git hash of LLVM.
-
- Returns:
- The commit message of the git hash.
-
- Raises:
- ValueError: Unable to retrieve json contents from the LLVM commit URL.
- """
-
- llvm_commit_url = ('https://api.github.com/repos/llvm/llvm-project/git/'
- 'commits/')
-
- commit_url = os.path.join(llvm_commit_url, git_hash)
-
- url_response = requests.get(commit_url)
-
- if not url_response:
- raise ValueError('Failed to get response from url %s: Status Code %d' %
- (commit_url, url_response.status_code))
-
- unicode_json_contents = url_response.json()
-
- return str(unicode_json_contents['message'])
-
- def GetSVNVersionFromCommitMessage(self, commit_message):
- """Gets the 'llvm-svn' from the commit message.
-
- A commit message may contain multiple 'llvm-svn' (reverting commits), so
- the last 'llvm-svn' is the real 'llvm-svn' for that commit message.
-
- Args:
- commit_message: A commit message that contains a 'llvm-svn:'.
-
- Returns:
- The last LLVM version as an integer or 'None' if there is no 'llvm-svn'.
- """
-
- # Find all "llvm-svn:" instances.
- llvm_versions = re.findall(r'llvm-svn: ([0-9]+)', commit_message)
-
- if llvm_versions:
- return int(llvm_versions[-1])
-
- return None
-
- def _ParseCommitMessages(self, subdir, hash_vals, llvm_version):
- """Parses the hashes that match the LLVM version.
-
- Args:
- subdir: The directory where the git history resides.
- hash_vals: All the hashes that match the LLVM version.
- llvm_version: The version to compare to in the commit message.
-
- Returns:
- The hash that matches the LLVM version.
-
- Raises:
- subprocess.CalledProcessError: Failed to retrieve the commit message body.
- ValueError: Failed to parse a commit message or did not find a commit
- hash.
- """
-
- # For each hash, grab the last "llvm-svn:" line
- # and compare the llvm version of that line against
- # the llvm version we are looking for and return
- # that hash only if they match.
- for cur_commit in hash_vals.splitlines():
- cur_hash = cur_commit.split()[0] # Get hash.
-
- # Cmd to output the commit body.
- find_llvm_cmd = [
- 'git', '-C', subdir, 'log', '--format=%B', '-n', '1', cur_hash
- ]
-
- out = check_output(find_llvm_cmd)
-
- commit_svn_version = self.GetSVNVersionFromCommitMessage(out.rstrip())
-
- # Check the svn version from the commit message against the llvm version
- # we are looking for.
- if commit_svn_version and commit_svn_version == llvm_version:
- return cur_hash
-
- # Failed to find the commit hash.
- raise ValueError('Could not find commit hash.')
-
- def GetGitHashForVersion(self, llvm_git_dir, llvm_version):
- """Finds the commit hash(es) of the LLVM version in the git log history.
-
- Args:
- llvm_git_dir: The LLVM git directory.
- llvm_version: The version to search for in the git log history.
-
- Returns:
- A string of the hash corresponding to the LLVM version.
-
- Raises:
- subprocess.CalledProcessError: Failed to retrieve git hashes that match
- 'llvm_version'.
- """
-
- # Get all the git hashes that match 'llvm_version'.
- hash_cmd = [
- 'git', '-C', llvm_git_dir, 'log', '--oneline', '--no-abbrev', '--grep',
- 'llvm-svn: %d' % llvm_version
- ]
-
- hash_vals = check_output(hash_cmd)
-
- return self._ParseCommitMessages(llvm_git_dir, hash_vals.rstrip(),
- llvm_version)
-
- def GetLLVMHash(self, llvm_version):
- """Retrieves the LLVM hash corresponding to the LLVM version passed in.
-
- Args:
- llvm_version: The LLVM version to use as a delimiter.
-
- Returns:
- The hash as a string that corresponds to the LLVM version.
- """
-
- hash_value = self.GetGitHashForVersion(GetAndUpdateLLVMProjectInLLVMTools(),
- llvm_version)
-
- return hash_value
-
- def GetGoogle3LLVMHash(self):
- """Retrieves the google3 LLVM hash."""
-
- return self.GetLLVMHash(GetGoogle3LLVMVersion(stable=True))
-
- def GetGoogle3UnstableLLVMHash(self):
- """Retrieves the LLVM hash of google3's unstable compiler."""
- return self.GetLLVMHash(GetGoogle3LLVMVersion(stable=False))
-
- def GetTopOfTrunkGitHash(self):
- """Gets the latest git hash from top of trunk of LLVM."""
-
- path_to_master_branch = 'refs/heads/master'
-
- llvm_tot_git_hash_cmd = [
- 'git', 'ls-remote', _LLVM_GIT_URL, path_to_master_branch
- ]
-
- llvm_tot_git_hash = check_output(llvm_tot_git_hash_cmd)
-
- return llvm_tot_git_hash.rstrip().split()[0]
-
-
-def main():
- """Prints the git hash of LLVM.
-
- Parses the command line for the optional command line
- arguments.
- """
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(description='Finds the LLVM hash.')
- parser.add_argument(
- '--llvm_version',
- type=is_svn_option,
- required=True,
- help='which git hash of LLVM to find. Either a svn revision, or one '
- 'of %s' % sorted(KNOWN_HASH_SOURCES))
-
- # Parse command-line arguments.
- args_output = parser.parse_args()
-
- cur_llvm_version = args_output.llvm_version
-
- new_llvm_hash = LLVMHash()
-
- if isinstance(cur_llvm_version, int):
- # Find the git hash of the specific LLVM version.
- print(new_llvm_hash.GetLLVMHash(cur_llvm_version))
- elif cur_llvm_version == 'google3':
- print(new_llvm_hash.GetGoogle3LLVMHash())
- elif cur_llvm_version == 'google3-unstable':
- print(new_llvm_hash.GetGoogle3UnstableLLVMHash())
- else:
- assert cur_llvm_version == 'tot'
- print(new_llvm_hash.GetTopOfTrunkGitHash())
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/get_llvm_hash_unittest.py b/llvm_tools/get_llvm_hash_unittest.py
deleted file mode 100755
index 895ee2b6..00000000
--- a/llvm_tools/get_llvm_hash_unittest.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for retrieving the LLVM hash."""
-
-from __future__ import print_function
-
-import subprocess
-import unittest
-
-import get_llvm_hash
-from get_llvm_hash import LLVMHash
-import mock
-import test_helpers
-
-# We grab protected stuff from get_llvm_hash. That's OK.
-# pylint: disable=protected-access
-
-
-def MakeMockPopen(return_code):
-
- def MockPopen(*_args, **_kwargs):
- result = mock.MagicMock()
- result.returncode = return_code
-
- communicate_result = result.communicate.return_value
- # Communicate returns stdout, stderr.
- communicate_result.__iter__.return_value = (None, 'some stderr')
- return result
-
- return MockPopen
-
-
-class TestGetLLVMHash(unittest.TestCase):
- """The LLVMHash test class."""
-
- @mock.patch.object(subprocess, 'Popen')
- def testCloneRepoSucceedsWhenGitSucceeds(self, popen_mock):
- popen_mock.side_effect = MakeMockPopen(return_code=0)
- llvm_hash = LLVMHash()
-
- into_tempdir = '/tmp/tmpTest'
- llvm_hash.CloneLLVMRepo(into_tempdir)
- popen_mock.assert_called_with(
- ['git', 'clone', get_llvm_hash._LLVM_GIT_URL, into_tempdir],
- stderr=subprocess.PIPE)
-
- @mock.patch.object(subprocess, 'Popen')
- def testCloneRepoFailsWhenGitFails(self, popen_mock):
- popen_mock.side_effect = MakeMockPopen(return_code=1)
-
- with self.assertRaises(ValueError) as err:
- LLVMHash().CloneLLVMRepo('/tmp/tmp1')
-
- self.assertIn('Failed to clone', err.exception.message)
- self.assertIn('some stderr', err.exception.message)
-
- @mock.patch.object(subprocess, 'check_output')
- def testParseCommitMessageWithoutAHashFails(self, check_output_mock):
- check_output_mock.return_value = ('[Test] Test sentence.\n\n'
- 'A change was made.\n\n'
- 'llvm-svn: 1000')
-
- # Verify the exception is raised when failed to find the commit hash.
- with self.assertRaises(ValueError) as err:
- LLVMHash()._ParseCommitMessages('/tmp/tmpTest',
- 'a13testhash2 This is a test', 100)
-
- self.assertEqual(err.exception.message, 'Could not find commit hash.')
- check_output_mock.assert_called_once()
-
- @mock.patch.object(subprocess, 'check_output')
- def testParseCommitMessageIgnoresSVNMarkersInReverts(self, check_output_mock):
- output_messages = [
- '[Test] Test sentence.\n\n'
- 'A change was made.\n\n'
- 'llvm-svn: 1001',
- '[Revert] Reverted commit.\n\n'
- 'This reverts r1000:\n\n'
- ' [Test2] Update.\n\n'
- ' This updates stuff.\n\n'
- ' llvm-svn: 1000\n\n'
- 'llvm-svn: 58',
- '[Revert] Reverted commit.\n\n'
- 'This reverts r958:\n\n'
- ' [Test2] Update.\n\n'
- ' This updates stuff.\n\n'
- ' llvm-svn: 958\n\n'
- 'llvm-svn: 1000',
- ]
-
- @test_helpers.CallCountsToMockFunctions
- def MultipleCommitMessages(call_count, *_args, **_kwargs):
- return output_messages[call_count]
-
- check_output_mock.side_effect = MultipleCommitMessages
-
- hash_vals = ('a13testhash2 [Test] Test sentence.\n'
- 'a13testhash3 [Revert] Reverted commit.\n'
- 'a13testhash4 [Revert] Reverted commit.')
-
- self.assertEqual(
- LLVMHash()._ParseCommitMessages('/tmp/tmpTest', hash_vals, 1000),
- 'a13testhash4')
-
- self.assertEqual(check_output_mock.call_count, 3)
-
- @mock.patch.object(subprocess, 'check_output')
- @mock.patch.object(LLVMHash, '_ParseCommitMessages')
- def testGetGitHashWorks(self, mock_return_hash_val, mock_check_output):
- mock_check_output.return_value = 'a13testhash2 [Test] Test sentence.'
- mock_return_hash_val.return_value = 'a13testhash2'
-
- self.assertEqual(LLVMHash().GetGitHashForVersion('/tmp/tmpTest', 100),
- 'a13testhash2')
-
- mock_return_hash_val.assert_called_once_with(
- '/tmp/tmpTest', 'a13testhash2 [Test] Test sentence.', 100)
- mock_check_output.assert_called_once()
-
- @mock.patch.object(LLVMHash, 'GetLLVMHash')
- @mock.patch.object(get_llvm_hash, 'GetGoogle3LLVMVersion')
- def testReturnGoogle3LLVMHash(self, mock_google3_llvm_version,
- mock_get_llvm_hash):
- mock_get_llvm_hash.return_value = 'a13testhash3'
- mock_google3_llvm_version.return_value = 1000
- self.assertEqual(LLVMHash().GetGoogle3LLVMHash(), 'a13testhash3')
- mock_get_llvm_hash.assert_called_once_with(1000)
-
- @mock.patch.object(LLVMHash, 'GetLLVMHash')
- @mock.patch.object(get_llvm_hash, 'GetGoogle3LLVMVersion')
- def testReturnGoogle3UnstableLLVMHash(self, mock_google3_llvm_version,
- mock_get_llvm_hash):
- mock_get_llvm_hash.return_value = 'a13testhash3'
- mock_google3_llvm_version.return_value = 1000
- self.assertEqual(LLVMHash().GetGoogle3UnstableLLVMHash(), 'a13testhash3')
- mock_get_llvm_hash.assert_called_once_with(1000)
-
- @mock.patch.object(subprocess, 'check_output')
- def testSuccessfullyGetGitHashFromToTOfLLVM(self, mock_check_output):
- mock_check_output.return_value = 'a123testhash1 path/to/master\n'
- self.assertEqual(LLVMHash().GetTopOfTrunkGitHash(), 'a123testhash1')
- mock_check_output.assert_called_once()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/llvm_tools/llvm_bisection.py b/llvm_tools/llvm_bisection.py
deleted file mode 100755
index b45be0aa..00000000
--- a/llvm_tools/llvm_bisection.py
+++ /dev/null
@@ -1,463 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Performs bisection on LLVM based off a .JSON file."""
-
-from __future__ import print_function
-
-import argparse
-import enum
-import errno
-import json
-import os
-import sys
-
-from assert_not_in_chroot import VerifyOutsideChroot
-from get_llvm_hash import CreateTempLLVMRepo
-from get_llvm_hash import LLVMHash
-from modify_a_tryjob import AddTryjob
-from update_tryjob_status import FindTryjobIndex
-from update_tryjob_status import TryjobStatus
-
-
-class BisectionExitStatus(enum.Enum):
- """Exit code when performing bisection."""
-
- # Means that there are no more revisions available to bisect.
- BISECTION_COMPLETE = 126
-
-
-def is_file_and_json(json_file):
- """Validates that the file exists and is a JSON file."""
- return os.path.isfile(json_file) and json_file.endswith('.json')
-
-
-def GetCommandLineArgs():
- """Parses the command line for the command line arguments."""
-
- # Default path to the chroot if a path is not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(
- description='Bisects LLVM via tracking a JSON file.')
-
- # Add argument for other change lists that want to run alongside the tryjob
- # which has a change list of updating a package's git hash.
- parser.add_argument(
- '--parallel',
- type=int,
- default=3,
- help='How many tryjobs to create between the last good version and '
- 'the first bad version (default: %(default)s)')
-
- # Add argument for the good LLVM revision for bisection.
- parser.add_argument(
- '--start_rev',
- required=True,
- type=int,
- help='The good revision for the bisection.')
-
- # Add argument for the bad LLVM revision for bisection.
- parser.add_argument(
- '--end_rev',
- required=True,
- type=int,
- help='The bad revision for the bisection.')
-
- # Add argument for the absolute path to the file that contains information on
- # the previous tested svn version.
- parser.add_argument(
- '--last_tested',
- required=True,
- help='the absolute path to the file that contains the tryjobs')
-
- # Add argument for the absolute path to the LLVM source tree.
- parser.add_argument(
- '--src_path',
- help='the path to the LLVM source tree to use (used for retrieving the '
- 'git hash of each version between the last good version and first bad '
- 'version)')
-
- # Add argument for other change lists that want to run alongside the tryjob
- # which has a change list of updating a package's git hash.
- parser.add_argument(
- '--extra_change_lists',
- type=int,
- nargs='+',
- help='change lists that would like to be run alongside the change list '
- 'of updating the packages')
-
- # Add argument for custom options for the tryjob.
- parser.add_argument(
- '--options',
- required=False,
- nargs='+',
- help='options to use for the tryjob testing')
-
- # Add argument for the builder to use for the tryjob.
- parser.add_argument(
- '--builder', required=True, help='builder to use for the tryjob testing')
-
- # Add argument for the description of the tryjob.
- parser.add_argument(
- '--description',
- required=False,
- nargs='+',
- help='the description of the tryjob')
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
- # Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
-
- args_output = parser.parse_args()
-
- assert args_output.start_rev < args_output.end_rev, (
- 'Start revision %d is >= end revision %d' % (args_output.start_rev,
- args_output.end_rev))
-
- if args_output.last_tested and not args_output.last_tested.endswith('.json'):
- raise ValueError(
- 'Filed provided %s does not end in ".json"' % args_output.last_tested)
-
- return args_output
-
-
-def _ValidateStartAndEndAgainstJSONStartAndEnd(start, end, json_start,
- json_end):
- """Valides that the command line arguments are the same as the JSON."""
-
- if start != json_start or end != json_end:
- raise ValueError('The start %d or the end %d version provided is '
- 'different than "start" %d or "end" %d in the .JSON '
- 'file' % (start, end, json_start, json_end))
-
-
-def GetStartAndEndRevision(start, end, tryjobs):
- """Gets the start and end intervals in 'json_file'.
-
- Args:
- start: The start version of the bisection provided via the command line.
- end: The end version of the bisection provided via the command line.
- tryjobs: A list of tryjobs where each element is in the following format:
- [
- {[TRYJOB_INFORMATION]},
- {[TRYJOB_INFORMATION]},
- ...,
- {[TRYJOB_INFORMATION]}
- ]
-
- Returns:
- The new start version and end version for bisection, a set of revisions
- that are 'pending' and a set of revisions that are to be skipped.
-
- Raises:
- ValueError: The value for 'status' is missing or there is a mismatch
- between 'start' and 'end' compared to the 'start' and 'end' in the JSON
- file.
- AssertionError: The new start version is >= than the new end version.
- """
-
- if not tryjobs:
- return start, end, {}, {}
-
- # Verify that each tryjob has a value for the 'status' key.
- for cur_tryjob_dict in tryjobs:
- if not cur_tryjob_dict.get('status', None):
- raise ValueError('"status" is missing or has no value, please '
- 'go to %s and update it' % cur_tryjob_dict['link'])
-
- all_bad_revisions = [end]
- all_bad_revisions.extend(cur_tryjob['rev']
- for cur_tryjob in tryjobs
- if cur_tryjob['status'] == TryjobStatus.BAD.value)
-
- # The minimum value for the 'bad' field in the tryjobs is the new end
- # version.
- bad_rev = min(all_bad_revisions)
-
- all_good_revisions = [start]
- all_good_revisions.extend(cur_tryjob['rev']
- for cur_tryjob in tryjobs
- if cur_tryjob['status'] == TryjobStatus.GOOD.value)
-
- # The maximum value for the 'good' field in the tryjobs is the new start
- # version.
- good_rev = max(all_good_revisions)
-
- # The good version should always be strictly less than the bad version;
- # otherwise, bisection is broken.
- assert good_rev < bad_rev, ('Bisection is broken because %d (good) is >= '
- '%d (bad)' % (good_rev, bad_rev))
-
- # Find all revisions that are 'pending' within 'good_rev' and 'bad_rev'.
- #
- # NOTE: The intent is to not launch tryjobs between 'good_rev' and 'bad_rev'
- # that have already been launched (this set is used when constructing the
- # list of revisions to launch tryjobs for).
- pending_revisions = {
- tryjob['rev']
- for tryjob in tryjobs
- if tryjob['status'] == TryjobStatus.PENDING.value and
- good_rev < tryjob['rev'] < bad_rev
- }
-
- # Find all revisions that are to be skipped within 'good_rev' and 'bad_rev'.
- #
- # NOTE: The intent is to not launch tryjobs between 'good_rev' and 'bad_rev'
- # that have already been marked as 'skip' (this set is used when constructing
- # the list of revisions to launch tryjobs for).
- skip_revisions = {
- tryjob['rev']
- for tryjob in tryjobs
- if tryjob['status'] == TryjobStatus.SKIP.value and
- good_rev < tryjob['rev'] < bad_rev
- }
-
- return good_rev, bad_rev, pending_revisions, skip_revisions
-
-
-def GetRevisionsBetweenBisection(start, end, parallel, src_path,
- pending_revisions, skip_revisions):
- """Gets the revisions between 'start' and 'end'.
-
- Sometimes, the LLVM source tree's revisions do not increment by 1 (there is
- a jump), so need to construct a list of all revisions that are NOT missing
- between 'start' and 'end'. Then, the step amount (i.e. length of the list
- divided by ('parallel' + 1)) will be used for indexing into the list.
-
- Args:
- start: The start revision.
- end: The end revision.
- parallel: The number of tryjobs to create between 'start' and 'end'.
- src_path: The absolute path to the LLVM source tree to use.
- pending_revisions: A set containing 'pending' revisions that are between
- 'start' and 'end'.
- skip_revisions: A set containing revisions between 'start' and 'end' that
- are to be skipped.
-
- Returns:
- A list of revisions between 'start' and 'end'.
- """
-
- new_llvm = LLVMHash()
-
- valid_revisions = []
-
- # Start at ('start' + 1) because 'start' is the good revision.
- #
- # FIXME: Searching for each revision from ('start' + 1) up to 'end' in the
- # LLVM source tree is a quadratic algorithm. It's a good idea to optimize
- # this.
- for cur_revision in range(start + 1, end):
- try:
- if cur_revision not in pending_revisions and \
- cur_revision not in skip_revisions:
- # Verify that the current revision exists by finding its corresponding
- # git hash in the LLVM source tree.
- new_llvm.GetGitHashForVersion(src_path, cur_revision)
- valid_revisions.append(cur_revision)
- except ValueError:
- # Could not find the git hash for the current revision.
- continue
-
- # ('parallel' + 1) so that the last revision in the list is not close to
- # 'end' (have a bit more coverage).
- index_step = len(valid_revisions) // (parallel + 1)
-
- if not index_step:
- index_step = 1
-
- result = [valid_revisions[index] \
- for index in range(0, len(valid_revisions), index_step)]
-
- return result
-
-
-def GetRevisionsListAndHashList(start, end, parallel, src_path,
- pending_revisions, skip_revisions):
- """Determines the revisions between start and end."""
-
- new_llvm = LLVMHash()
-
- with new_llvm.CreateTempDirectory() as temp_dir:
- with CreateTempLLVMRepo(temp_dir) as new_repo:
- if not src_path:
- src_path = new_repo
-
- # Get a list of revisions between start and end.
- revisions = GetRevisionsBetweenBisection(
- start, end, parallel, src_path, pending_revisions, skip_revisions)
-
- git_hashes = [
- new_llvm.GetGitHashForVersion(src_path, rev) for rev in revisions
- ]
-
- return revisions, git_hashes
-
-
-def DieWithNoRevisionsError(start, end, skip_revisions, pending_revisions):
- """Raises a ValueError exception with useful information."""
-
- no_revisions_message = ('No revisions between start %d and end '
- '%d to create tryjobs' % (start, end))
-
- if pending_revisions:
- no_revisions_message += '\nThe following tryjobs are pending:\n' \
- + '\n'.join(str(rev) for rev in pending_revisions)
-
- if skip_revisions:
- no_revisions_message += '\nThe following tryjobs were skipped:\n' \
- + '\n'.join(str(rev) for rev in skip_revisions)
-
- raise ValueError(no_revisions_message)
-
-
-def CheckForExistingTryjobsInRevisionsToLaunch(revisions, jobs):
- """Checks if a revision in 'revisions' exists in 'jobs' list."""
-
- for rev in revisions:
- if FindTryjobIndex(rev, jobs) is not None:
- raise ValueError('Revision %d exists already in "jobs"' % rev)
-
-
-def UpdateBisection(revisions, git_hashes, bisect_contents, last_tested,
- update_packages, chroot_path, patch_metadata_file,
- extra_change_lists, options, builder, verbose):
- """Adds tryjobs and updates the status file with the new tryjobs."""
-
- try:
- for svn_revision, git_hash in zip(revisions, git_hashes):
- tryjob_dict = AddTryjob(update_packages, git_hash, svn_revision,
- chroot_path, patch_metadata_file,
- extra_change_lists, options, builder, verbose,
- svn_revision)
-
- bisect_contents['jobs'].append(tryjob_dict)
- finally:
- # Do not want to lose progress if there is an exception.
- if last_tested:
- new_file = '%s.new' % last_tested
- with open(new_file, 'w') as json_file:
- json.dump(bisect_contents, json_file, indent=4, separators=(',', ': '))
-
- os.rename(new_file, last_tested)
-
-
-def _NoteCompletedBisection(last_tested, src_path, end):
- """Prints that bisection is complete."""
-
- print('Finished bisecting for %s' % last_tested)
-
- if src_path:
- bad_llvm_hash = LLVMHash().GetGitHashForVersion(src_path, end)
- else:
- bad_llvm_hash = LLVMHash().GetLLVMHash(end)
-
- print(
- 'The bad revision is %d and its commit hash is %s' % (end, bad_llvm_hash))
-
-
-def LoadStatusFile(last_tested, start, end):
- """Loads the status file for bisection."""
-
- try:
- with open(last_tested) as f:
- return json.load(f)
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
-
- return {'start': start, 'end': end, 'jobs': []}
-
-
-def main(args_output):
- """Bisects LLVM based off of a .JSON file.
-
- Raises:
- AssertionError: The script was run inside the chroot.
- """
-
- VerifyOutsideChroot()
-
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
-
- patch_metadata_file = 'PATCHES.json'
-
- start = args_output.start_rev
- end = args_output.end_rev
-
- bisect_contents = LoadStatusFile(args_output.last_tested, start, end)
-
- _ValidateStartAndEndAgainstJSONStartAndEnd(
- start, end, bisect_contents['start'], bisect_contents['end'])
-
- # Pending and skipped revisions are between 'start_revision' and
- # 'end_revision'.
- start_revision, end_revision, pending_revisions, skip_revisions = \
- GetStartAndEndRevision(start, end, bisect_contents['jobs'])
-
- revisions, git_hashes = GetRevisionsListAndHashList(
- start_revision, end_revision, args_output.parallel, args_output.src_path,
- pending_revisions, skip_revisions)
-
- # No more revisions between 'start_revision' and 'end_revision', so
- # bisection is complete.
- #
- # This is determined by finding all valid revisions between 'start_revision'
- # and 'end_revision' and that are NOT in the 'pending' and 'skipped' set.
- if not revisions:
- # Successfully completed bisection where there are 2 cases:
- # 1) 'start_revision' and 'end_revision' are back-to-back (example:
- # 'start_revision' is 369410 and 'end_revision' is 369411).
- #
- # 2) 'start_revision' and 'end_revision' are NOT back-to-back, so there must
- # be tryjobs in between which are labeled as 'skip' for their 'status'
- # value.
- #
- # In either case, there are no 'pending' jobs.
- if not pending_revisions:
- _NoteCompletedBisection(args_output.last_tested, args_output.src_path,
- end_revision)
-
- if skip_revisions:
- skip_revisions_message = ('\nThe following revisions were skipped:\n' +
- '\n'.join(str(rev) for rev in skip_revisions))
-
- print(skip_revisions_message)
-
- return BisectionExitStatus.BISECTION_COMPLETE.value
-
- # Some tryjobs are not finished which may change the actual bad
- # commit/revision when those tryjobs are finished.
- DieWithNoRevisionsError(start_revision, end_revision, skip_revisions,
- pending_revisions)
-
- CheckForExistingTryjobsInRevisionsToLaunch(revisions, bisect_contents['jobs'])
-
- UpdateBisection(revisions, git_hashes, bisect_contents,
- args_output.last_tested, update_packages,
- args_output.chroot_path, patch_metadata_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builder, args_output.verbose)
-
-
-if __name__ == '__main__':
- args_output = GetCommandLineArgs()
- sys.exit(main(args_output))
diff --git a/llvm_tools/llvm_bisection_unittest.py b/llvm_tools/llvm_bisection_unittest.py
deleted file mode 100755
index 946a56ff..00000000
--- a/llvm_tools/llvm_bisection_unittest.py
+++ /dev/null
@@ -1,595 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for LLVM bisection."""
-
-from __future__ import print_function
-
-import json
-import unittest
-import unittest.mock as mock
-
-from get_llvm_hash import LLVMHash
-from test_helpers import ArgsOutputTest
-from test_helpers import CallCountsToMockFunctions
-from test_helpers import CreateTemporaryJsonFile
-from test_helpers import WritePrettyJsonFile
-import llvm_bisection
-
-
-class LLVMBisectionTest(unittest.TestCase):
- """Unittests for LLVM bisection."""
-
- def testStartAndEndDoNotMatchJsonStartAndEnd(self):
- start = 100
- end = 150
-
- json_start = 110
- json_end = 150
-
- # Verify the exception is raised when the start and end revision for LLVM
- # bisection do not match the .JSON's 'start' and 'end' values.
- with self.assertRaises(ValueError) as err:
- llvm_bisection._ValidateStartAndEndAgainstJSONStartAndEnd(
- start, end, json_start, json_end)
-
- expected_error_message = ('The start %d or the end %d version provided is '
- 'different than "start" %d or "end" %d in the '
- '.JSON file' % (start, end, json_start, json_end))
-
- self.assertEqual(str(err.exception), expected_error_message)
-
- def testStartAndEndMatchJsonStartAndEnd(self):
- start = 100
- end = 150
-
- json_start = 100
- json_end = 150
-
- llvm_bisection._ValidateStartAndEndAgainstJSONStartAndEnd(
- start, end, json_start, json_end)
-
- def testTryjobStatusIsMissing(self):
- start = 100
- end = 150
-
- test_tryjobs = [{
- 'rev': 105,
- 'status': 'good',
- 'link': 'https://some_tryjob_1_url.com'
- }, {
- 'rev': 120,
- 'status': None,
- 'link': 'https://some_tryjob_2_url.com'
- }, {
- 'rev': 140,
- 'status': 'bad',
- 'link': 'https://some_tryjob_3_url.com'
- }]
-
- # Verify the exception is raised when a tryjob does not have a value for
- # the 'status' key or the 'status' key is missing.
- with self.assertRaises(ValueError) as err:
- llvm_bisection.GetStartAndEndRevision(start, end, test_tryjobs)
-
- expected_error_message = (
- '"status" is missing or has no value, please '
- 'go to %s and update it' % test_tryjobs[1]['link'])
-
- self.assertEqual(str(err.exception), expected_error_message)
-
- def testGoodRevisionGreaterThanBadRevision(self):
- start = 100
- end = 150
-
- test_tryjobs = [{
- 'rev': 110,
- 'status': 'bad',
- 'link': 'https://some_tryjob_1_url.com'
- }, {
- 'rev': 125,
- 'status': 'skip',
- 'link': 'https://some_tryjob_2_url.com'
- }, {
- 'rev': 140,
- 'status': 'good',
- 'link': 'https://some_tryjob_3_url.com'
- }]
-
- # Verify the exception is raised when the new 'start' revision is greater
- # than the new 'bad' revision for bisection (i.e. bisection is broken).
- with self.assertRaises(AssertionError) as err:
- llvm_bisection.GetStartAndEndRevision(start, end, test_tryjobs)
-
- expected_error_message = (
- 'Bisection is broken because %d (good) is >= '
- '%d (bad)' % (test_tryjobs[2]['rev'], test_tryjobs[0]['rev']))
-
- self.assertEqual(str(err.exception), expected_error_message)
-
- def testSuccessfullyGetNewStartAndNewEndRevision(self):
- start = 100
- end = 150
-
- test_tryjobs = [{
- 'rev': 110,
- 'status': 'good',
- 'link': 'https://some_tryjob_1_url.com'
- }, {
- 'rev': 120,
- 'status': 'good',
- 'link': 'https://some_tryjob_2_url.com'
- }, {
- 'rev': 130,
- 'status': 'pending',
- 'link': 'https://some_tryjob_3_url.com'
- }, {
- 'rev': 135,
- 'status': 'skip',
- 'link': 'https://some_tryjob_4_url.com'
- }, {
- 'rev': 140,
- 'status': 'bad',
- 'link': 'https://some_tryjob_5_url.com'
- }]
-
- # Tuple consists of the new good revision, the new bad revision, a set of
- # 'pending' revisions, and a set of 'skip' revisions.
- expected_revisions_tuple = 120, 140, {130}, {135}
-
- self.assertTupleEqual(
- llvm_bisection.GetStartAndEndRevision(start, end, test_tryjobs),
- expected_revisions_tuple)
-
- @mock.patch.object(LLVMHash, 'GetGitHashForVersion')
- def testNoRevisionsBetweenStartAndEnd(self, mock_get_git_hash):
- start = 100
- end = 110
-
- test_pending_revisions = {107}
- test_skip_revisions = {101, 102, 103, 104, 108, 109}
-
- # Simulate behavior of `GetGitHashForVersion()` when the revision does not
- # exist in the LLVM source tree.
- def MockGetGitHashForRevisionRaiseException(src_path, revision):
- raise ValueError('Revision does not exist')
-
- mock_get_git_hash.side_effect = MockGetGitHashForRevisionRaiseException
-
- parallel = 3
-
- abs_path_to_src = '/abs/path/to/src'
-
- self.assertListEqual(
- llvm_bisection.GetRevisionsBetweenBisection(
- start, end, parallel, abs_path_to_src, test_pending_revisions,
- test_skip_revisions), [])
-
- @mock.patch.object(LLVMHash, 'GetGitHashForVersion')
- def testSuccessfullyRetrievedRevisionsBetweenStartAndEnd(
- self, mock_get_git_hash):
-
- start = 100
- end = 110
-
- test_pending_revisions = set()
- test_skip_revisions = {101, 102, 103, 104, 106, 108, 109}
-
- parallel = 3
-
- abs_path_to_src = '/abs/path/to/src'
-
- # Valid revision that exist in the LLVM source tree between 'start' and
- # 'end' and were not in the 'pending' set or 'skip' set.
- expected_revisions_between_start_and_end = [105, 107]
-
- self.assertListEqual(
- llvm_bisection.GetRevisionsBetweenBisection(
- start, end, parallel, abs_path_to_src, test_pending_revisions,
- test_skip_revisions), expected_revisions_between_start_and_end)
-
- self.assertEqual(mock_get_git_hash.call_count, 2)
-
- # Simulate behavior of `GetGitHashForVersion()` when successfully retrieved
- # a list git hashes for each revision in the revisions list.
- @mock.patch.object(LLVMHash, 'GetGitHashForVersion')
- # Simulate behavior of `GetRevisionsBetweenBisection()` when successfully
- # retrieved a list of valid revisions between 'start' and 'end'.
- @mock.patch.object(llvm_bisection, 'GetRevisionsBetweenBisection')
- # Simulate behavior of `CreatTempLLVMRepo()` when successfully created a
- # worktree when a source path was not provided.
- @mock.patch.object(llvm_bisection, 'CreateTempLLVMRepo')
- def testSuccessfullyGetRevisionsListAndHashList(
- self, mock_create_temp_llvm_repo, mock_get_revisions_between_bisection,
- mock_get_git_hash):
-
- expected_revisions_and_hash_tuple = ([102, 105, 108], [
- 'a123testhash1', 'a123testhash2', 'a123testhash3'
- ])
-
- @CallCountsToMockFunctions
- def MockGetGitHashForRevision(call_count, src_path, rev):
- # Simulate retrieving the git hash for the revision.
- if call_count < 3:
- return expected_revisions_and_hash_tuple[1][call_count]
-
- assert False, 'Called `GetGitHashForVersion()` more than expected.'
-
- temp_worktree = '/abs/path/to/tmpDir'
-
- mock_create_temp_llvm_repo.return_value.__enter__.return_value.name = \
- temp_worktree
-
- # Simulate the valid revisions list.
- mock_get_revisions_between_bisection.return_value = \
- expected_revisions_and_hash_tuple[0]
-
- # Simulate behavior of `GetGitHashForVersion()` by using the testing
- # function.
- mock_get_git_hash.side_effect = MockGetGitHashForRevision
-
- start = 100
- end = 110
- parallel = 3
- src_path = None
- pending_revisions = {103, 104}
- skip_revisions = {101, 106, 107, 109}
-
- self.assertTupleEqual(
- llvm_bisection.GetRevisionsListAndHashList(
- start, end, parallel, src_path, pending_revisions, skip_revisions),
- expected_revisions_and_hash_tuple)
-
- mock_get_revisions_between_bisection.assert_called_once()
-
- self.assertEqual(mock_get_git_hash.call_count, 3)
-
- def testSuccessfullyDieWithNoRevisionsError(self):
- start = 100
- end = 110
-
- pending_revisions = {105, 108}
- skip_revisions = {101, 102, 103, 104, 106, 107, 109}
-
- expected_no_revisions_message = ('No revisions between start %d and end '
- '%d to create tryjobs' % (start, end))
-
- expected_no_revisions_message += '\nThe following tryjobs are pending:\n' \
- + '\n'.join(str(rev) for rev in pending_revisions)
-
- expected_no_revisions_message += '\nThe following tryjobs were skipped:\n' \
- + '\n'.join(str(rev) for rev in skip_revisions)
-
- # Verify that an exception is raised when there are no revisions to launch
- # tryjobs for between 'start' and 'end' and some tryjobs are 'pending'.
- with self.assertRaises(ValueError) as err:
- llvm_bisection.DieWithNoRevisionsError(start, end, skip_revisions,
- pending_revisions)
-
- self.assertEqual(str(err.exception), expected_no_revisions_message)
-
- # Simulate behavior of `FindTryjobIndex()` when the index of the tryjob was
- # found.
- @mock.patch.object(llvm_bisection, 'FindTryjobIndex', return_value=0)
- def testTryjobExistsInRevisionsToLaunch(self, mock_find_tryjob_index):
- test_existing_jobs = [{'rev': 102, 'status': 'good'}]
-
- revision_to_launch = [102]
-
- expected_revision_that_exists = 102
-
- with self.assertRaises(ValueError) as err:
- llvm_bisection.CheckForExistingTryjobsInRevisionsToLaunch(
- revision_to_launch, test_existing_jobs)
-
- expected_found_tryjob_index_error_message = (
- 'Revision %d exists already '
- 'in "jobs"' % expected_revision_that_exists)
-
- self.assertEqual(
- str(err.exception), expected_found_tryjob_index_error_message)
-
- mock_find_tryjob_index.assert_called_once()
-
- @mock.patch.object(llvm_bisection, 'AddTryjob')
- def testSuccessfullyUpdatedStatusFileWhenExceptionIsRaised(
- self, mock_add_tryjob):
-
- git_hash_list = ['a123testhash1', 'a123testhash2', 'a123testhash3']
- revisions_list = [102, 104, 106]
-
- # Simulate behavior of `AddTryjob()` when successfully launched a tryjob for
- # the updated packages.
- @CallCountsToMockFunctions
- def MockAddTryjob(call_count, packages, git_hash, revision, chroot_path,
- patch_file, extra_cls, options, builder, verbose,
- svn_revision):
-
- if call_count < 2:
- return {'rev': revisions_list[call_count], 'status': 'pending'}
-
- # Simulate an exception happened along the way when updating the
- # packages' `LLVM_NEXT_HASH`.
- if call_count == 2:
- raise ValueError('Unable to launch tryjob')
-
- assert False, 'Called `AddTryjob()` more than expected.'
-
- # Use the test function to simulate `AddTryjob()`.
- mock_add_tryjob.side_effect = MockAddTryjob
-
- start = 100
- end = 110
-
- bisection_contents = {'start': start, 'end': end, 'jobs': []}
-
- args_output = ArgsOutputTest()
-
- packages = ['sys-devel/llvm']
- patch_file = '/abs/path/to/PATCHES.json'
-
- # Create a temporary .JSON file to simulate a status file for bisection.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisection_contents, f)
-
- # Verify that the status file is updated when an exception happened when
- # attempting to launch a revision (i.e. progress is not lost).
- with self.assertRaises(ValueError) as err:
- llvm_bisection.UpdateBisection(
- revisions_list, git_hash_list, bisection_contents, temp_json_file,
- packages, args_output.chroot_path, patch_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.verbose)
-
- expected_bisection_contents = {
- 'start':
- start,
- 'end':
- end,
- 'jobs': [{
- 'rev': revisions_list[0],
- 'status': 'pending'
- }, {
- 'rev': revisions_list[1],
- 'status': 'pending'
- }]
- }
-
- # Verify that the launched tryjobs were added to the status file when
- # an exception happened.
- with open(temp_json_file) as f:
- json_contents = json.load(f)
-
- self.assertDictEqual(json_contents, expected_bisection_contents)
-
- self.assertEqual(str(err.exception), 'Unable to launch tryjob')
-
- self.assertEqual(mock_add_tryjob.call_count, 3)
-
- # Simulate behavior of `GetGitHashForVersion()` when successfully retrieved
- # the git hash of the bad revision.
- @mock.patch.object(
- LLVMHash, 'GetGitHashForVersion', return_value='a123testhash4')
- def testCompletedBisectionWhenProvidedSrcPath(self, mock_get_git_hash):
- last_tested = '/some/last/tested_file.json'
-
- src_path = '/abs/path/to/src/path'
-
- # The bad revision.
- end = 150
-
- llvm_bisection._NoteCompletedBisection(last_tested, src_path, end)
-
- mock_get_git_hash.assert_called_once()
-
- # Simulate behavior of `GetLLVMHash()` when successfully retrieved
- # the git hash of the bad revision.
- @mock.patch.object(LLVMHash, 'GetLLVMHash', return_value='a123testhash5')
- def testCompletedBisectionWhenNotProvidedSrcPath(self, mock_get_git_hash):
- last_tested = '/some/last/tested_file.json'
-
- src_path = None
-
- # The bad revision.
- end = 200
-
- llvm_bisection._NoteCompletedBisection(last_tested, src_path, end)
-
- mock_get_git_hash.assert_called_once()
-
- def testSuccessfullyLoadedStatusFile(self):
- start = 100
- end = 150
-
- test_bisect_contents = {'start': start, 'end': end, 'jobs': []}
-
- # Simulate that the status file exists.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(test_bisect_contents, f)
-
- self.assertDictEqual(
- llvm_bisection.LoadStatusFile(temp_json_file, start, end),
- test_bisect_contents)
-
- def testLoadedStatusFileThatDoesNotExist(self):
- start = 200
- end = 250
-
- expected_bisect_contents = {'start': start, 'end': end, 'jobs': []}
-
- last_tested = '/abs/path/to/file_that_does_not_exist.json'
-
- self.assertDictEqual(
- llvm_bisection.LoadStatusFile(last_tested, start, end),
- expected_bisect_contents)
-
- # Simulate behavior of `_NoteCompletedBisection()` when there are no more
- # tryjobs to launch between start and end, so bisection is complete.
- @mock.patch.object(llvm_bisection, '_NoteCompletedBisection')
- @mock.patch.object(llvm_bisection, 'GetRevisionsListAndHashList')
- @mock.patch.object(llvm_bisection, 'GetStartAndEndRevision')
- # Simulate behavior of `_ValidateStartAndEndAgainstJSONStartAndEnd()` when
- # both start and end revisions match.
- @mock.patch.object(llvm_bisection,
- '_ValidateStartAndEndAgainstJSONStartAndEnd')
- @mock.patch.object(llvm_bisection, 'LoadStatusFile')
- # Simulate behavior of `VerifyOutsideChroot()` when successfully invoked the
- # script outside of the chroot.
- @mock.patch.object(llvm_bisection, 'VerifyOutsideChroot', return_value=True)
- def testSuccessfullyBisectedLLVM(
- self, mock_outside_chroot, mock_load_status_file,
- mock_validate_start_and_end, mock_get_start_and_end_revision,
- mock_get_revision_and_hash_list, mock_note_completed_bisection):
-
- start = 500
- end = 502
-
- bisect_contents = {
- 'start': start,
- 'end': end,
- 'jobs': [{
- 'rev': 501,
- 'status': 'skip'
- }]
- }
-
- skip_revisions = {501}
- pending_revisions = {}
-
- # Simulate behavior of `LoadStatusFile()` when successfully loaded the
- # status file.
- mock_load_status_file.return_value = bisect_contents
-
- # Simulate behavior of `GetStartAndEndRevision()` when successfully found
- # the new start and end revision of the bisection.
- #
- # Returns new start revision, new end revision, a set of pending revisions,
- # and a set of skip revisions.
- mock_get_start_and_end_revision.return_value = (start, end,
- pending_revisions,
- skip_revisions)
-
- # Simulate behavior of `GetRevisionsListAndHashList()` when successfully
- # retrieved valid revisions (along with their git hashes) between start and
- # end (in this case, none).
- mock_get_revision_and_hash_list.return_value = [], []
-
- args_output = ArgsOutputTest()
- args_output.start_rev = start
- args_output.end_rev = end
- args_output.parallel = 3
- args_output.src_path = None
-
- self.assertEqual(
- llvm_bisection.main(args_output),
- llvm_bisection.BisectionExitStatus.BISECTION_COMPLETE.value)
-
- mock_outside_chroot.assert_called_once()
-
- mock_load_status_file.assert_called_once()
-
- mock_validate_start_and_end.assert_called_once()
-
- mock_get_start_and_end_revision.assert_called_once()
-
- mock_get_revision_and_hash_list.assert_called_once()
-
- mock_note_completed_bisection.assert_called_once()
-
- @mock.patch.object(llvm_bisection, 'DieWithNoRevisionsError')
- # Simulate behavior of `_NoteCompletedBisection()` when there are no more
- # tryjobs to launch between start and end, so bisection is complete.
- @mock.patch.object(llvm_bisection, 'GetRevisionsListAndHashList')
- @mock.patch.object(llvm_bisection, 'GetStartAndEndRevision')
- # Simulate behavior of `_ValidateStartAndEndAgainstJSONStartAndEnd()` when
- # both start and end revisions match.
- @mock.patch.object(llvm_bisection,
- '_ValidateStartAndEndAgainstJSONStartAndEnd')
- @mock.patch.object(llvm_bisection, 'LoadStatusFile')
- # Simulate behavior of `VerifyOutsideChroot()` when successfully invoked the
- # script outside of the chroot.
- @mock.patch.object(llvm_bisection, 'VerifyOutsideChroot', return_value=True)
- def testNoMoreTryjobsToLaunch(
- self, mock_outside_chroot, mock_load_status_file,
- mock_validate_start_and_end, mock_get_start_and_end_revision,
- mock_get_revision_and_hash_list, mock_die_with_no_revisions_error):
-
- start = 500
- end = 502
-
- bisect_contents = {
- 'start': start,
- 'end': end,
- 'jobs': [{
- 'rev': 501,
- 'status': 'pending'
- }]
- }
-
- skip_revisions = {}
- pending_revisions = {501}
-
- no_revisions_error_message = ('No more tryjobs to launch between %d and '
- '%d' % (start, end))
-
- def MockNoRevisionsErrorException(start, end, skip, pending):
- raise ValueError(no_revisions_error_message)
-
- # Simulate behavior of `LoadStatusFile()` when successfully loaded the
- # status file.
- mock_load_status_file.return_value = bisect_contents
-
- # Simulate behavior of `GetStartAndEndRevision()` when successfully found
- # the new start and end revision of the bisection.
- #
- # Returns new start revision, new end revision, a set of pending revisions,
- # and a set of skip revisions.
- mock_get_start_and_end_revision.return_value = (start, end,
- pending_revisions,
- skip_revisions)
-
- # Simulate behavior of `GetRevisionsListAndHashList()` when successfully
- # retrieved valid revisions (along with their git hashes) between start and
- # end (in this case, none).
- mock_get_revision_and_hash_list.return_value = [], []
-
- # Use the test function to simulate `DieWithNoRevisionsWithError()`
- # behavior.
- mock_die_with_no_revisions_error.side_effect = MockNoRevisionsErrorException
-
- # Simulate behavior of arguments passed into the command line.
- args_output = ArgsOutputTest()
- args_output.start_rev = start
- args_output.end_rev = end
- args_output.parallel = 3
- args_output.src_path = None
-
- # Verify the exception is raised when there are no more tryjobs to launch
- # between start and end when there are tryjobs that are 'pending', so
- # the actual bad revision can change when those tryjobs's 'status' are
- # updated.
- with self.assertRaises(ValueError) as err:
- llvm_bisection.main(args_output)
-
- self.assertEqual(str(err.exception), no_revisions_error_message)
-
- mock_outside_chroot.assert_called_once()
-
- mock_load_status_file.assert_called_once()
-
- mock_validate_start_and_end.assert_called_once()
-
- mock_get_start_and_end_revision.assert_called_once()
-
- mock_get_revision_and_hash_list.assert_called_once()
-
- mock_die_with_no_revisions_error.assert_called_once()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/llvm_tools/llvm_patch_management.py b/llvm_tools/llvm_patch_management.py
deleted file mode 100755
index dff992a5..00000000
--- a/llvm_tools/llvm_patch_management.py
+++ /dev/null
@@ -1,280 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Creates the arguments for the patch manager for LLVM."""
-
-from __future__ import print_function
-
-import argparse
-import os
-from pipes import quote
-
-import patch_manager
-from assert_not_in_chroot import VerifyOutsideChroot
-from failure_modes import FailureModes
-from get_llvm_hash import CreateTempLLVMRepo
-from get_llvm_hash import GetGoogle3LLVMVersion
-from get_llvm_hash import LLVMHash
-from subprocess_helpers import ChrootRunCommand
-from subprocess_helpers import ExecCommandAndCaptureOutput
-
-# If set to `True`, then the contents of `stdout` after executing a command will
-# be displayed to the terminal.
-verbose = False
-
-
-def GetCommandLineArgs():
- """Parses the commandline for the optional commandline arguments.
-
- Returns:
- An argument parser object that contains all the commandline arguments.
- """
-
- # Default path to the chroot if a path is not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(description='Patch management for packages.')
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- type=patch_manager.is_directory,
- default=cros_root,
- help='the absolute path to the chroot (default: %(default)s)')
-
- # Add argument for which packages to manage their patches.
- parser.add_argument(
- '--packages',
- required=False,
- nargs='+',
- default=['sys-devel/llvm'],
- help='the packages to manage their patches (default: %(default)s)')
-
- # Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
-
- # Add argument for the LLVM version to use for patch management.
- parser.add_argument(
- '--llvm_version',
- type=int,
- help='the LLVM version to use for patch management. Alternatively, you '
- 'can pass "google3" or "google3-unstable". (Default: "google3")')
-
- # Add argument for the mode of the patch management when handling patches.
- parser.add_argument(
- '--failure_mode',
- default=FailureModes.FAIL.value,
- choices=[FailureModes.FAIL.value, FailureModes.CONTINUE.value,
- FailureModes.DISABLE_PATCHES.value,
- FailureModes.REMOVE_PATCHES.value],
- help='the mode of the patch manager when handling failed patches ' \
- '(default: %(default)s)')
-
- # Add argument for the patch metadata file in $FILESDIR of LLVM.
- parser.add_argument(
- '--patch_metadata_file',
- default='PATCHES.json',
- help='the .json file in $FILESDIR that has all the patches and their '
- 'metadata if applicable (default: %(default)s)')
-
- # Parse the command line.
- args_output = parser.parse_args()
-
- global verbose
-
- verbose = args_output.verbose
-
- unique_packages = list(set(args_output.packages))
-
- # Duplicate packages were passed into the command line
- if len(unique_packages) != len(args_output.packages):
- raise ValueError('Duplicate packages were passed in: %s' %
- ' '.join(args_output.packages))
-
- args_output.packages = unique_packages
-
- return args_output
-
-
-def GetPathToFilesDirectory(chroot_path, package):
- """Gets the absolute path to $FILESDIR of the package.
-
- Args:
- chroot_path: The absolute path to the chroot.
- package: The package to find its absolute path to $FILESDIR.
-
- Returns:
- The absolute path to $FILESDIR.
-
- Raises:
- ValueError: An invalid chroot path has been provided.
- """
-
- if not os.path.isdir(chroot_path):
- raise ValueError('Invalid chroot provided: %s' % chroot_path)
-
- # Get the absolute chroot path to the ebuild.
- chroot_ebuild_path = ChrootRunCommand(
- chroot_path, ['equery', 'w', package], verbose=verbose)
-
- # Get the absolute chroot path to $FILESDIR's parent directory.
- filesdir_parent_path = os.path.dirname(chroot_ebuild_path.strip())
-
- # Get the relative path to $FILESDIR's parent directory.
- rel_path = _GetRelativePathOfChrootPath(filesdir_parent_path)
-
- # Construct the absolute path to the package's 'files' directory.
- return os.path.join(chroot_path, rel_path, 'files/')
-
-
-def _GetRelativePathOfChrootPath(chroot_path):
- """Gets the relative path of the chroot path passed in.
-
- Args:
- chroot_path: The chroot path to get its relative path.
-
- Returns:
- The relative path after '/mnt/host/source/'.
-
- Raises:
- ValueError: The prefix of 'chroot_path' did not match '/mnt/host/source/'.
- """
-
- chroot_prefix = '/mnt/host/source/'
-
- if not chroot_path.startswith(chroot_prefix):
- raise ValueError('Invalid prefix for the chroot path: %s' % chroot_path)
-
- return chroot_path[len(chroot_prefix):]
-
-
-def _CheckPatchMetadataPath(patch_metadata_path):
- """Checks that the patch metadata path is valid.
-
- Args:
- patch_metadata_path: The absolute path to the .json file that has the
- patches and their metadata.
-
- Raises:
- ValueError: The file does not exist or the file does not end in '.json'.
- """
-
- if not os.path.isfile(patch_metadata_path):
- raise ValueError('Invalid file provided: %s' % patch_metadata_path)
-
- if not patch_metadata_path.endswith('.json'):
- raise ValueError('File does not end in ".json": %s' % patch_metadata_path)
-
-
-def _MoveSrcTreeHEADToGitHash(src_path, git_hash):
- """Moves HEAD to 'git_hash'."""
-
- move_head_cmd = ['git', '-C', src_path, 'checkout', git_hash]
-
- ExecCommandAndCaptureOutput(move_head_cmd, verbose=verbose)
-
-
-def UpdatePackagesPatchMetadataFile(chroot_path, svn_version,
- patch_metadata_file, packages, mode):
- """Updates the packages metadata file.
-
- Args:
- chroot_path: The absolute path to the chroot.
- svn_version: The version to use for patch management.
- patch_metadata_file: The patch metadta file where all the patches and
- their metadata are.
- packages: All the packages to update their patch metadata file.
- mode: The mode for the patch manager to use when an applicable patch
- fails to apply.
- Ex: 'FailureModes.FAIL'
-
- Returns:
- A dictionary where the key is the package name and the value is a dictionary
- that has information on the patches.
- """
-
- # A dictionary where the key is the package name and the value is a dictionary
- # that has information on the patches.
- package_info = {}
-
- llvm_hash = LLVMHash()
-
- with llvm_hash.CreateTempDirectory() as temp_dir:
- with CreateTempLLVMRepo(temp_dir) as src_path:
- # Ensure that 'svn_version' exists in the chromiumum mirror of LLVM by
- # finding its corresponding git hash.
- git_hash = llvm_hash.GetGitHashForVersion(src_path, svn_version)
-
- # Git hash of 'svn_version' exists, so move the source tree's HEAD to
- # 'git_hash' via `git checkout`.
- _MoveSrcTreeHEADToGitHash(src_path, git_hash)
-
- for cur_package in packages:
- # Get the absolute path to $FILESDIR of the package.
- filesdir_path = GetPathToFilesDirectory(chroot_path, cur_package)
-
- # Construct the absolute path to the patch metadata file where all the
- # patches and their metadata are.
- patch_metadata_path = os.path.join(filesdir_path, patch_metadata_file)
-
- # Make sure the patch metadata path is valid.
- _CheckPatchMetadataPath(patch_metadata_path)
-
- patch_manager.CleanSrcTree(src_path)
-
- # Get the patch results for the current package.
- patches_info = patch_manager.HandlePatches(svn_version,
- patch_metadata_path,
- filesdir_path, src_path,
- mode)
-
- package_info[cur_package] = patches_info._asdict()
-
- return package_info
-
-
-def main():
- """Updates the patch metadata file of each package if possible.
-
- Raises:
- AssertionError: The script was run inside the chroot.
- """
-
- VerifyOutsideChroot()
-
- args_output = GetCommandLineArgs()
-
- # Get the google3 LLVM version if a LLVM version was not provided.
- llvm_version = args_output.llvm_version
- if llvm_version in ('', 'google3', 'google3-unstable'):
- llvm_version = GetGoogle3LLVMVersion(
- stable=llvm_version != 'google3-unstable')
-
- UpdatePackagesPatchMetadataFile(args_output.chroot_path, llvm_version,
- args_output.patch_metadata_file,
- args_output.packages,
- FailureModes(args_output.failure_mode))
-
- # Only 'disable_patches' and 'remove_patches' can potentially modify the patch
- # metadata file.
- if args_output.failure_mode == FailureModes.DISABLE_PATCHES.value or \
- args_output.failure_mode == FailureModes.REMOVE_PATCHES.value:
- print('The patch file %s has been modified for the packages:' %
- args_output.patch_metadata_file)
- print('\n'.join(args_output.packages))
- else:
- print('Applicable patches in %s applied successfully.' %
- args_output.patch_metadata_file)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/llvm_patch_management_unittest.py b/llvm_tools/llvm_patch_management_unittest.py
deleted file mode 100755
index c9bc7fce..00000000
--- a/llvm_tools/llvm_patch_management_unittest.py
+++ /dev/null
@@ -1,305 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests when creating the arguments for the patch manager."""
-
-from __future__ import print_function
-
-from collections import namedtuple
-import os
-import unittest
-import unittest.mock as mock
-
-from failure_modes import FailureModes
-from get_llvm_hash import LLVMHash
-import llvm_patch_management
-import patch_manager
-
-
-class LlvmPatchManagementTest(unittest.TestCase):
- """Test class when constructing the arguments for the patch manager."""
-
- # Simulate the behavior of `os.path.isdir()` when the chroot path does not
- # exist or is not a directory.
- @mock.patch.object(os.path, 'isdir', return_value=False)
- def testInvalidChrootPathWhenGetPathToFilesDir(self, mock_isdir):
- chroot_path = '/some/path/to/chroot'
- package = 'sys-devel/llvm'
-
- # Verify the exception is raised when an invalid absolute path to the chroot
- # is passed in.
- with self.assertRaises(ValueError) as err:
- llvm_patch_management.GetPathToFilesDirectory(chroot_path, package)
-
- self.assertEqual(
- str(err.exception), 'Invalid chroot provided: %s' % chroot_path)
-
- mock_isdir.assert_called_once()
-
- # Simulate the behavior of 'os.path.isdir()' when a valid chroot path is
- # passed in.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- @mock.patch.object(llvm_patch_management, 'ChrootRunCommand')
- @mock.patch.object(llvm_patch_management, '_GetRelativePathOfChrootPath')
- def testSuccessfullyGetPathToFilesDir(
- self, mock_get_relative_path_of_chroot_path, mock_chroot_cmd, mock_isdir):
-
- package_chroot_path = '/mnt/host/source/path/to/llvm/llvm.ebuild'
-
- # Simulate behavior of 'ChrootRunCommand()' when successfully
- # retrieved the absolute chroot path to the package's ebuild.
- mock_chroot_cmd.return_value = package_chroot_path
-
- # Simulate behavior of '_GetRelativePathOfChrootPath()' when successfully
- # removed '/mnt/host/source' of the absolute chroot path to the package's
- # ebuild.
- #
- # Returns relative path after '/mnt/host/source/'.
- mock_get_relative_path_of_chroot_path.return_value = 'path/to/llvm'
-
- chroot_path = '/some/path/to/chroot'
-
- package = 'sys-devel/llvm'
-
- self.assertEqual(
- llvm_patch_management.GetPathToFilesDirectory(chroot_path, package),
- '/some/path/to/chroot/path/to/llvm/files/')
-
- mock_isdir.assert_called_once()
-
- mock_chroot_cmd.assert_called_once()
-
- mock_get_relative_path_of_chroot_path.assert_called_once_with(
- '/mnt/host/source/path/to/llvm')
-
- def testInvalidPrefixForChrootPath(self):
- package_chroot_path = '/path/to/llvm'
-
- # Verify the exception is raised when the chroot path does not start with
- # '/mnt/host/source/'.
- with self.assertRaises(ValueError) as err:
- llvm_patch_management._GetRelativePathOfChrootPath(package_chroot_path)
-
- self.assertEqual(
- str(err.exception),
- 'Invalid prefix for the chroot path: %s' % package_chroot_path)
-
- def testValidPrefixForChrootPath(self):
- package_chroot_path = '/mnt/host/source/path/to/llvm'
-
- package_rel_path = 'path/to/llvm'
-
- self.assertEqual(
- llvm_patch_management._GetRelativePathOfChrootPath(package_chroot_path),
- package_rel_path)
-
- # Simulate behavior of 'os.path.isfile()' when the patch metadata file does
- # not exist.
- @mock.patch.object(os.path, 'isfile', return_value=False)
- def testInvalidFileForPatchMetadataPath(self, mock_isfile):
- abs_path_to_patch_file = '/abs/path/to/files/test.json'
-
- # Verify the exception is raised when the absolute path to the patch
- # metadata file does not exist.
- with self.assertRaises(ValueError) as err:
- llvm_patch_management._CheckPatchMetadataPath(abs_path_to_patch_file)
-
- self.assertEqual(
- str(err.exception),
- 'Invalid file provided: %s' % abs_path_to_patch_file)
-
- mock_isfile.assert_called_once()
-
- # Simulate behavior of 'os.path.isfile()' when the absolute path to the
- # patch metadata file exists.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- def testPatchMetadataFileDoesNotEndInJson(self, mock_isfile):
- abs_path_to_patch_file = '/abs/path/to/files/PATCHES'
-
- # Verify the exception is raised when the patch metadata file does not end
- # in '.json'.
- with self.assertRaises(ValueError) as err:
- llvm_patch_management._CheckPatchMetadataPath(abs_path_to_patch_file)
-
- self.assertEqual(
- str(err.exception),
- 'File does not end in ".json": %s' % abs_path_to_patch_file)
-
- mock_isfile.assert_called_once()
-
- @mock.patch.object(os.path, 'isfile')
- def testValidPatchMetadataFile(self, mock_isfile):
- abs_path_to_patch_file = '/abs/path/to/files/PATCHES.json'
-
- # Simulate behavior of 'os.path.isfile()' when the absolute path to the
- # patch metadata file exists.
- mock_isfile.return_value = True
-
- llvm_patch_management._CheckPatchMetadataPath(abs_path_to_patch_file)
-
- mock_isfile.assert_called_once()
-
- # Simulate `GetGitHashForVersion()` when successfully retrieved the git hash
- # of the version passed in.
- @mock.patch.object(
- LLVMHash, 'GetGitHashForVersion', return_value='a123testhash1')
- # Simulate `CreateTempLLVMRepo()` when successfully created a work tree from
- # the LLVM repo copy in `llvm_tools` directory.
- @mock.patch.object(llvm_patch_management, 'CreateTempLLVMRepo')
- # Simulate behavior of `_MoveSrcTreeHEADToGitHash()` when successfully moved
- # the head pointer to the git hash of the revision.
- @mock.patch.object(llvm_patch_management, '_MoveSrcTreeHEADToGitHash')
- @mock.patch.object(llvm_patch_management, 'GetPathToFilesDirectory')
- @mock.patch.object(llvm_patch_management, '_CheckPatchMetadataPath')
- def testExceptionIsRaisedWhenUpdatingAPackagesMetadataFile(
- self, mock_check_patch_metadata_path, mock_get_filesdir_path,
- mock_move_head_pointer, mock_create_temp_llvm_repo, mock_get_git_hash):
-
- abs_path_to_patch_file = \
- '/some/path/to/chroot/some/path/to/filesdir/PATCHES'
-
- # Simulate the behavior of '_CheckPatchMetadataPath()' when the patch
- # metadata file in $FILESDIR does not exist or does not end in '.json'.
- def InvalidPatchMetadataFile(patch_metadata_path):
- self.assertEqual(patch_metadata_path, abs_path_to_patch_file)
-
- raise ValueError(
- 'File does not end in ".json": %s' % abs_path_to_patch_file)
-
- # Use the test function to simulate behavior of '_CheckPatchMetadataPath()'.
- mock_check_patch_metadata_path.side_effect = InvalidPatchMetadataFile
-
- abs_path_to_filesdir = '/some/path/to/chroot/some/path/to/filesdir'
-
- # Simulate the behavior of 'GetPathToFilesDirectory()' when successfully
- # constructed the absolute path to $FILESDIR of a package.
- mock_get_filesdir_path.return_value = abs_path_to_filesdir
-
- temp_work_tree = '/abs/path/to/tmpWorkTree'
-
- # Simulate the behavior of returning the absolute path to a worktree via
- # `git worktree add`.
- mock_create_temp_llvm_repo.return_value.__enter__.return_value.name = \
- temp_work_tree
-
- chroot_path = '/some/path/to/chroot'
- revision = 1000
- patch_file_name = 'PATCHES'
- package_name = 'test-package/package1'
-
- # Verify the exception is raised when a package is constructing the
- # arguments for the patch manager to update its patch metadata file and an
- # exception is raised in the process.
- with self.assertRaises(ValueError) as err:
- llvm_patch_management.UpdatePackagesPatchMetadataFile(
- chroot_path, revision, patch_file_name, [package_name],
- FailureModes.FAIL)
-
- self.assertEqual(
- str(err.exception),
- 'File does not end in ".json": %s' % abs_path_to_patch_file)
-
- mock_get_filesdir_path.assert_called_once_with(chroot_path, package_name)
-
- mock_get_git_hash.assert_called_once()
-
- mock_check_patch_metadata_path.assert_called_once()
-
- mock_move_head_pointer.assert_called_once()
-
- mock_create_temp_llvm_repo.assert_called_once()
-
- # Simulate `CleanSrcTree()` when successfully removed changes from the
- # worktree.
- @mock.patch.object(patch_manager, 'CleanSrcTree')
- # Simulate `GetGitHashForVersion()` when successfully retrieved the git hash
- # of the version passed in.
- @mock.patch.object(
- LLVMHash, 'GetGitHashForVersion', return_value='a123testhash1')
- # Simulate `CreateTempLLVMRepo()` when successfully created a work tree from
- # the LLVM repo copy in `llvm_tools` directory.
- @mock.patch.object(llvm_patch_management, 'CreateTempLLVMRepo')
- # Simulate behavior of `_MoveSrcTreeHEADToGitHash()` when successfully moved
- # the head pointer to the git hash of the revision.
- @mock.patch.object(llvm_patch_management, '_MoveSrcTreeHEADToGitHash')
- @mock.patch.object(llvm_patch_management, 'GetPathToFilesDirectory')
- @mock.patch.object(llvm_patch_management, '_CheckPatchMetadataPath')
- @mock.patch.object(patch_manager, 'HandlePatches')
- def testSuccessfullyRetrievedPatchResults(
- self, mock_handle_patches, mock_check_patch_metadata_path,
- mock_get_filesdir_path, mock_move_head_pointer,
- mock_create_temp_llvm_repo, mock_get_git_hash, mock_clean_src_tree):
-
- abs_path_to_filesdir = '/some/path/to/chroot/some/path/to/filesdir'
-
- abs_path_to_patch_file = \
- '/some/path/to/chroot/some/path/to/filesdir/PATCHES.json'
-
- # Simulate the behavior of 'GetPathToFilesDirectory()' when successfully
- # constructed the absolute path to $FILESDIR of a package.
- mock_get_filesdir_path.return_value = abs_path_to_filesdir
-
- PatchInfo = namedtuple('PatchInfo', [
- 'applied_patches', 'failed_patches', 'non_applicable_patches',
- 'disabled_patches', 'removed_patches', 'modified_metadata'
- ])
-
- # Simulate the behavior of 'HandlePatches()' when successfully iterated
- # through every patch in the patch metadata file and a dictionary is
- # returned that contains information about the patches' status.
- mock_handle_patches.return_value = PatchInfo(
- applied_patches=['fixes_something.patch'],
- failed_patches=['disables_output.patch'],
- non_applicable_patches=[],
- disabled_patches=[],
- removed_patches=[],
- modified_metadata=None)
-
- temp_work_tree = '/abs/path/to/tmpWorkTree'
-
- # Simulate the behavior of returning the absolute path to a worktree via
- # `git worktree add`.
- mock_create_temp_llvm_repo.return_value.__enter__.return_value.name = \
- temp_work_tree
-
- expected_patch_results = {
- 'applied_patches': ['fixes_something.patch'],
- 'failed_patches': ['disables_output.patch'],
- 'non_applicable_patches': [],
- 'disabled_patches': [],
- 'removed_patches': [],
- 'modified_metadata': None
- }
-
- chroot_path = '/some/path/to/chroot'
- revision = 1000
- patch_file_name = 'PATCHES.json'
- package_name = 'test-package/package2'
-
- patch_info = llvm_patch_management.UpdatePackagesPatchMetadataFile(
- chroot_path, revision, patch_file_name, [package_name],
- FailureModes.CONTINUE)
-
- self.assertDictEqual(patch_info, {package_name: expected_patch_results})
-
- mock_get_filesdir_path.assert_called_once_with(chroot_path, package_name)
-
- mock_check_patch_metadata_path.assert_called_once_with(
- abs_path_to_patch_file)
-
- mock_handle_patches.assert_called_once()
-
- mock_create_temp_llvm_repo.assert_called_once()
-
- mock_get_git_hash.assert_called_once()
-
- mock_move_head_pointer.assert_called_once()
-
- mock_clean_src_tree.assert_called_once()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/llvm_tools/modify_a_tryjob.py b/llvm_tools/modify_a_tryjob.py
deleted file mode 100755
index 20ba3541..00000000
--- a/llvm_tools/modify_a_tryjob.py
+++ /dev/null
@@ -1,288 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Modifies a tryjob based off of arguments."""
-
-from __future__ import print_function
-
-import argparse
-import enum
-import json
-import os
-import sys
-
-from assert_not_in_chroot import VerifyOutsideChroot
-from failure_modes import FailureModes
-from get_llvm_hash import GetLLVMHashAndVersionFromSVNOption
-from update_packages_and_run_tryjobs import RunTryJobs
-from update_tryjob_status import FindTryjobIndex
-from update_tryjob_status import TryjobStatus
-import update_chromeos_llvm_next_hash
-
-
-class ModifyTryjob(enum.Enum):
- """Options to modify a tryjob."""
-
- REMOVE = 'remove'
- RELAUNCH = 'relaunch'
- ADD = 'add'
-
-
-def GetCommandLineArgs():
- """Parses the command line for the command line arguments."""
-
- # Default path to the chroot if a path is not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(
- description='Removes, relaunches, or adds a tryjob.')
-
- # Add argument for the JSON file to use for the update of a tryjob.
- parser.add_argument(
- '--status_file',
- required=True,
- help='The absolute path to the JSON file that contains the tryjobs used '
- 'for bisecting LLVM.')
-
- # Add argument that determines what action to take on the revision specified.
- parser.add_argument(
- '--modify_tryjob',
- required=True,
- choices=[modify_tryjob.value for modify_tryjob in ModifyTryjob],
- help='What action to perform on the tryjob.')
-
- # Add argument that determines which revision to search for in the list of
- # tryjobs.
- parser.add_argument(
- '--revision',
- required=True,
- type=int,
- help='The revision to either remove or relaunch.')
-
- # Add argument for other change lists that want to run alongside the tryjob.
- parser.add_argument(
- '--extra_change_lists',
- type=int,
- nargs='+',
- help='change lists that would like to be run alongside the change list '
- 'of updating the packages')
-
- # Add argument for custom options for the tryjob.
- parser.add_argument(
- '--options',
- required=False,
- nargs='+',
- help='options to use for the tryjob testing')
-
- # Add argument for the builder to use for the tryjob.
- parser.add_argument('--builder', help='builder to use for the tryjob testing')
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
- # Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
-
- args_output = parser.parse_args()
-
- if not os.path.isfile(args_output.status_file) or \
- not args_output.status_file.endswith('.json'):
- raise ValueError('File does not exist or does not ending in ".json" '
- ': %s' % args_output.status_file)
-
- if args_output.modify_tryjob == ModifyTryjob.ADD.value and \
- not args_output.builder:
- raise ValueError('A builder is required for adding a tryjob.')
- elif args_output.modify_tryjob != ModifyTryjob.ADD.value and \
- args_output.builder:
- raise ValueError('Specifying a builder is only available when adding a '
- 'tryjob.')
-
- return args_output
-
-
-def GetCLAfterUpdatingPackages(packages, git_hash, svn_version, chroot_path,
- patch_metadata_file, svn_option):
- """Updates the packages' LLVM_NEXT."""
-
- change_list = update_chromeos_llvm_next_hash.UpdatePackages(
- packages, git_hash, svn_version, chroot_path, patch_metadata_file,
- FailureModes.DISABLE_PATCHES, svn_option)
-
- print('\nSuccessfully updated packages to %d' % svn_version)
- print('Gerrit URL: %s' % change_list.url)
- print('Change list number: %d' % change_list.cl_number)
-
- return change_list
-
-
-def CreateNewTryjobEntryForBisection(cl, extra_cls, options, builder,
- chroot_path, verbose, cl_url, revision):
- """Submits a tryjob and adds additional information."""
-
- # Get the tryjob results after submitting the tryjob.
- # Format of 'tryjob_results':
- # [
- # {
- # 'link' : [TRYJOB_LINK],
- # 'buildbucket_id' : [BUILDBUCKET_ID],
- # 'extra_cls' : [EXTRA_CLS_LIST],
- # 'options' : [EXTRA_OPTIONS_LIST],
- # 'builder' : [BUILDER_AS_A_LIST]
- # }
- # ]
- tryjob_results = RunTryJobs(cl, extra_cls, options, [builder], chroot_path,
- verbose)
- print('\nTryjob:')
- print(tryjob_results[0])
-
- # Add necessary information about the tryjob.
- tryjob_results[0]['url'] = cl_url
- tryjob_results[0]['rev'] = revision
- tryjob_results[0]['status'] = TryjobStatus.PENDING.value
- tryjob_results[0]['cl'] = cl
-
- return tryjob_results[0]
-
-
-def AddTryjob(packages, git_hash, revision, chroot_path, patch_metadata_file,
- extra_cls, options, builder, verbose, svn_option):
- """Submits a tryjob."""
-
- update_chromeos_llvm_next_hash.verbose = verbose
-
- change_list = GetCLAfterUpdatingPackages(packages, git_hash, revision,
- chroot_path, patch_metadata_file,
- svn_option)
-
- tryjob_dict = CreateNewTryjobEntryForBisection(
- change_list.cl_number, extra_cls, options, builder, chroot_path, verbose,
- change_list.url, revision)
-
- return tryjob_dict
-
-
-def PerformTryjobModification(revision, modify_tryjob, status_file, extra_cls,
- options, builder, chroot_path, verbose):
- """Removes, relaunches, or adds a tryjob.
-
- Args:
- revision: The revision associated with the tryjob.
- modify_tryjob: What action to take on the tryjob.
- Ex: ModifyTryjob.REMOVE, ModifyTryjob.RELAUNCH, ModifyTryjob.ADD
- status_file: The .JSON file that contains the tryjobs.
- extra_cls: Extra change lists to be run alongside tryjob
- options: Extra options to pass into 'cros tryjob'.
- builder: The builder to use for 'cros tryjob'.
- chroot_path: The absolute path to the chroot (used by 'cros tryjob' when
- relaunching a tryjob).
- verbose: Determines whether to print the contents of a command to `stdout`.
- """
-
- # Format of 'bisect_contents':
- # {
- # 'start': [START_REVISION_OF_BISECTION]
- # 'end': [END_REVISION_OF_BISECTION]
- # 'jobs' : [
- # {[TRYJOB_INFORMATION]},
- # {[TRYJOB_INFORMATION]},
- # ...,
- # {[TRYJOB_INFORMATION]}
- # ]
- # }
- with open(status_file) as tryjobs:
- bisect_contents = json.load(tryjobs)
-
- if not bisect_contents['jobs'] and modify_tryjob != ModifyTryjob.ADD:
- sys.exit('No tryjobs in %s' % status_file)
-
- tryjob_index = FindTryjobIndex(revision, bisect_contents['jobs'])
-
- # 'FindTryjobIndex()' returns None if the tryjob was not found.
- if tryjob_index is None and modify_tryjob != ModifyTryjob.ADD:
- raise ValueError(
- 'Unable to find tryjob for %d in %s' % (revision, status_file))
-
- # Determine the action to take based off of 'modify_tryjob'.
- if modify_tryjob == ModifyTryjob.REMOVE:
- del bisect_contents['jobs'][tryjob_index]
-
- print('Successfully deleted the tryjob of revision %d' % revision)
- elif modify_tryjob == ModifyTryjob.RELAUNCH:
- # Need to update the tryjob link and buildbucket ID.
- tryjob_results = RunTryJobs(
- bisect_contents['jobs'][tryjob_index]['cl'],
- bisect_contents['jobs'][tryjob_index]['extra_cls'],
- bisect_contents['jobs'][tryjob_index]['options'],
- bisect_contents['jobs'][tryjob_index]['builder'], chroot_path, verbose)
-
- bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.PENDING.value
- bisect_contents['jobs'][tryjob_index]['link'] = tryjob_results[0]['link']
- bisect_contents['jobs'][tryjob_index]['buildbucket_id'] = tryjob_results[0][
- 'buildbucket_id']
-
- print('Successfully relaunched the tryjob for revision %d and updated '
- 'the tryjob link to %s' % (revision, tryjob_results[0]['link']))
- elif modify_tryjob == ModifyTryjob.ADD:
- # Tryjob exists already.
- if tryjob_index is not None:
- raise ValueError('Tryjob already exists (index is %d) in %s.' %
- (tryjob_index, status_file))
-
- # Make sure the revision is within the bounds of the start and end of the
- # bisection.
- elif bisect_contents['start'] < revision < bisect_contents['end']:
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
-
- patch_metadata_file = 'PATCHES.json'
-
- git_hash, revision = GetLLVMHashAndVersionFromSVNOption(revision)
-
- tryjob_dict = AddTryjob(update_packages, git_hash, revision, chroot_path,
- patch_metadata_file, extra_cls, options, builder,
- verbose, revision)
-
- bisect_contents['jobs'].append(tryjob_dict)
-
- print('Successfully added tryjob of revision %d' % revision)
- else:
- raise ValueError('Failed to add tryjob to %s' % status_file)
- else:
- raise ValueError(
- 'Invalid "modify_tryjob" option provided: %s' % modify_tryjob)
-
- with open(status_file, 'w') as update_tryjobs:
- json.dump(bisect_contents, update_tryjobs, indent=4, separators=(',', ': '))
-
-
-def main():
- """Removes, relaunches, or adds a tryjob."""
-
- VerifyOutsideChroot()
-
- args_output = GetCommandLineArgs()
-
- PerformTryjobModification(
- args_output.revision, ModifyTryjob(
- args_output.modify_tryjob), args_output.status_file,
- args_output.extra_change_lists, args_output.options, args_output.builder,
- args_output.chroot_path, args_output.verbose)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/modify_a_tryjob_unittest.py b/llvm_tools/modify_a_tryjob_unittest.py
deleted file mode 100755
index 2b492f1d..00000000
--- a/llvm_tools/modify_a_tryjob_unittest.py
+++ /dev/null
@@ -1,401 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for modifying a tryjob."""
-
-from __future__ import print_function
-
-import json
-import unittest
-import unittest.mock as mock
-
-from test_helpers import ArgsOutputTest
-from test_helpers import CreateTemporaryJsonFile
-from test_helpers import WritePrettyJsonFile
-import modify_a_tryjob
-
-
-class ModifyATryjobTest(unittest.TestCase):
- """Unittests for modifying a tryjob."""
-
- def testNoTryjobsInStatusFile(self):
- bisect_test_contents = {'start': 369410, 'end': 369420, 'jobs': []}
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_modify = 369411
-
- args_output = ArgsOutputTest()
- args_output.builders = None
- args_output.options = None
-
- # Verify the exception is raised there are no tryjobs in the status file
- # and the mode is not to 'add' a tryjob.
- with self.assertRaises(SystemExit) as err:
- modify_a_tryjob.PerformTryjobModification(
- revision_to_modify, modify_a_tryjob.ModifyTryjob.REMOVE,
- temp_json_file, args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.chroot_path, args_output.verbose)
-
- self.assertEqual(str(err.exception), 'No tryjobs in %s' % temp_json_file)
-
- # Simulate the behavior of `FindTryjobIndex()` when the index of the tryjob
- # was not found.
- @mock.patch.object(modify_a_tryjob, 'FindTryjobIndex', return_value=None)
- def testNoTryjobIndexFound(self, mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending',
- 'buildbucket_id': 1200
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_modify = 369412
-
- args_output = ArgsOutputTest()
- args_output.builders = None
- args_output.options = None
-
- # Verify the exception is raised when the index of the tryjob was not
- # found in the status file and the mode is not to 'add' a tryjob.
- with self.assertRaises(ValueError) as err:
- modify_a_tryjob.PerformTryjobModification(
- revision_to_modify, modify_a_tryjob.ModifyTryjob.REMOVE,
- temp_json_file, args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.chroot_path, args_output.verbose)
-
- self.assertEqual(
- str(err.exception), 'Unable to find tryjob for %d in %s' %
- (revision_to_modify, temp_json_file))
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the index of the tryjob
- # was found.
- @mock.patch.object(modify_a_tryjob, 'FindTryjobIndex', return_value=0)
- def testSuccessfullyRemovedTryjobInStatusFile(self, mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369414,
- 'status': 'pending',
- 'buildbucket_id': 1200
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_modify = 369414
-
- args_output = ArgsOutputTest()
- args_output.builders = None
- args_output.options = None
-
- modify_a_tryjob.PerformTryjobModification(
- revision_to_modify, modify_a_tryjob.ModifyTryjob.REMOVE,
- temp_json_file, args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.chroot_path, args_output.verbose)
-
- # Verify that the tryjob was removed from the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- expected_file_contents = {'start': 369410, 'end': 369420, 'jobs': []}
-
- self.assertDictEqual(bisect_contents, expected_file_contents)
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `RunTryJobs()` when successfully submitted a
- # tryjob.
- @mock.patch.object(modify_a_tryjob, 'RunTryJobs')
- # Simulate the behavior of `FindTryjobIndex()` when the index of the tryjob
- # was found.
- @mock.patch.object(modify_a_tryjob, 'FindTryjobIndex', return_value=0)
- def testSuccessfullyRelaunchedTryjob(self, mock_find_tryjob_index,
- mock_run_tryjob):
-
- bisect_test_contents = {
- 'start':
- 369410,
- 'end':
- 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'bad',
- 'link': 'https://some_tryjob_link.com',
- 'buildbucket_id': 1200,
- 'cl': 123,
- 'extra_cls': None,
- 'options': None,
- 'builder': ['some-builder-tryjob']
- }]
- }
-
- tryjob_result = [{
- 'link': 'https://some_new_tryjob_link.com',
- 'buildbucket_id': 20
- }]
-
- mock_run_tryjob.return_value = tryjob_result
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_modify = 369411
-
- args_output = ArgsOutputTest()
- args_output.builders = None
- args_output.options = None
-
- modify_a_tryjob.PerformTryjobModification(
- revision_to_modify, modify_a_tryjob.ModifyTryjob.RELAUNCH,
- temp_json_file, args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.chroot_path, args_output.verbose)
-
- # Verify that the tryjob's information was updated after submtting the
- # tryjob.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- expected_file_contents = {
- 'start':
- 369410,
- 'end':
- 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending',
- 'link': 'https://some_new_tryjob_link.com',
- 'buildbucket_id': 20,
- 'cl': 123,
- 'extra_cls': None,
- 'options': None,
- 'builder': ['some-builder-tryjob']
- }]
- }
-
- self.assertDictEqual(bisect_contents, expected_file_contents)
-
- mock_find_tryjob_index.assert_called_once()
-
- mock_run_tryjob.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the index of the tryjob
- # was found.
- @mock.patch.object(modify_a_tryjob, 'FindTryjobIndex', return_value=0)
- def testAddingTryjobThatAlreadyExists(self, mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'bad',
- 'builder': ['some-builder']
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_add = 369411
-
- # Index of the tryjob in 'jobs' list.
- tryjob_index = 0
-
- args_output = ArgsOutputTest()
- args_output.options = None
-
- # Verify the exception is raised when the tryjob that is going to added
- # already exists in the status file (found its index).
- with self.assertRaises(ValueError) as err:
- modify_a_tryjob.PerformTryjobModification(
- revision_to_add, modify_a_tryjob.ModifyTryjob.ADD, temp_json_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.chroot_path, args_output.verbose)
-
- self.assertEqual(
- str(err.exception), 'Tryjob already exists (index is %d) in %s.' %
- (tryjob_index, temp_json_file))
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob was not found.
- @mock.patch.object(modify_a_tryjob, 'FindTryjobIndex', return_value=None)
- def testSuccessfullyDidNotAddTryjobOutsideOfBisectionBounds(
- self, mock_find_tryjob_index):
-
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'bad'
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- # Add a revision that is outside of 'start' and 'end'.
- revision_to_add = 369450
-
- args_output = ArgsOutputTest()
- args_output.options = None
-
- # Verify the exception is raised when adding a tryjob that does not exist
- # and is not within 'start' and 'end'.
- with self.assertRaises(ValueError) as err:
- modify_a_tryjob.PerformTryjobModification(
- revision_to_add, modify_a_tryjob.ModifyTryjob.ADD, temp_json_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.chroot_path, args_output.verbose)
-
- self.assertEqual(
- str(err.exception), 'Failed to add tryjob to %s' % temp_json_file)
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `AddTryjob()` when successfully submitted the
- # tryjob and constructed the tryjob information (a dictionary).
- @mock.patch.object(modify_a_tryjob, 'AddTryjob')
- # Simulate the behavior of `GetLLVMHashAndVersionFromSVNOption()` when
- # successfully retrieved the git hash of the revision to launch a tryjob for.
- @mock.patch.object(
- modify_a_tryjob,
- 'GetLLVMHashAndVersionFromSVNOption',
- return_value=('a123testhash1', 369418))
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob was not found.
- @mock.patch.object(modify_a_tryjob, 'FindTryjobIndex', return_value=None)
- def testSuccessfullyAddedTryjob(self, mock_find_tryjob_index,
- mock_get_llvm_hash, mock_add_tryjob):
-
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'bad'
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- # Add a revision that is outside of 'start' and 'end'.
- revision_to_add = 369418
-
- args_output = ArgsOutputTest()
- args_output.options = None
-
- new_tryjob_info = {
- 'rev': revision_to_add,
- 'status': 'pending',
- 'options': args_output.options,
- 'extra_cls': args_output.extra_change_lists,
- 'builder': args_output.builders
- }
-
- mock_add_tryjob.return_value = new_tryjob_info
-
- modify_a_tryjob.PerformTryjobModification(
- revision_to_add, modify_a_tryjob.ModifyTryjob.ADD, temp_json_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.chroot_path, args_output.verbose)
-
- # Verify that the tryjob was added to the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- expected_file_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'bad'
- }, new_tryjob_info]
- }
-
- self.assertDictEqual(bisect_contents, expected_file_contents)
-
- mock_find_tryjob_index.assert_called_once()
-
- mock_get_llvm_hash.assert_called_once_with(revision_to_add)
-
- mock_add_tryjob.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob was found.
- @mock.patch.object(modify_a_tryjob, 'FindTryjobIndex', return_value=0)
- def testModifyATryjobOptionDoesNotExist(self, mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369414,
- 'status': 'bad'
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- # Add a revision that is outside of 'start' and 'end'.
- revision_to_modify = 369414
-
- args_output = ArgsOutputTest()
- args_output.builders = None
- args_output.options = None
-
- # Verify the exception is raised when the modify a tryjob option does not
- # exist.
- with self.assertRaises(ValueError) as err:
- modify_a_tryjob.PerformTryjobModification(
- revision_to_modify, 'remove_link', temp_json_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builders, args_output.chroot_path, args_output.verbose)
-
- self.assertEqual(
- str(err.exception),
- 'Invalid "modify_tryjob" option provided: remove_link')
-
- mock_find_tryjob_index.assert_called_once()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/llvm_tools/patch_manager.py b/llvm_tools/patch_manager.py
deleted file mode 100755
index 806b944f..00000000
--- a/llvm_tools/patch_manager.py
+++ /dev/null
@@ -1,748 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A manager for patches."""
-
-from __future__ import print_function
-
-import argparse
-import json
-import os
-import subprocess
-import sys
-
-from collections import namedtuple
-from failure_modes import FailureModes
-from get_llvm_hash import LLVMHash
-from subprocess_helpers import check_call
-from subprocess_helpers import check_output
-
-
-def is_directory(dir_path):
- """Validates that the argument passed into 'argparse' is a directory."""
-
- if not os.path.isdir(dir_path):
- raise ValueError('Path is not a directory: %s' % dir_path)
-
- return dir_path
-
-
-def is_patch_metadata_file(patch_metadata_file):
- """Valides the argument into 'argparse' is a patch file."""
-
- if not os.path.isfile(patch_metadata_file):
- raise ValueError(
- 'Invalid patch metadata file provided: %s' % patch_metadata_file)
-
- if not patch_metadata_file.endswith('.json'):
- raise ValueError(
- 'Patch metadata file does not end in ".json": %s' % patch_metadata_file)
-
- return patch_metadata_file
-
-
-def is_valid_failure_mode(failure_mode):
- """Validates that the failure mode passed in is correct."""
-
- cur_failure_modes = [mode.value for mode in FailureModes]
-
- if failure_mode not in cur_failure_modes:
- raise ValueError('Invalid failure mode provided: %s' % failure_mode)
-
- return failure_mode
-
-
-def EnsureBisectModeAndSvnVersionAreSpecifiedTogether(failure_mode,
- good_svn_version):
- """Validates that 'good_svn_version' is passed in only for bisection."""
-
- if failure_mode != FailureModes.BISECT_PATCHES.value and good_svn_version:
- raise ValueError('"good_svn_version" is only available for bisection.')
- elif failure_mode == FailureModes.BISECT_PATCHES.value and \
- not good_svn_version:
- raise ValueError('A good SVN version is required for bisection (used by'
- '"git bisect start".')
-
-
-def GetCommandLineArgs():
- """Get the required arguments from the command line."""
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(description='A manager for patches.')
-
- # Add argument for the last good SVN version which is required by
- # `git bisect start` (only valid for bisection mode).
- parser.add_argument(
- '--good_svn_version',
- type=int,
- help='INTERNAL USE ONLY... (used for bisection.)')
-
- # Add argument for the number of patches it iterate. Only used when performing
- # `git bisect run`.
- parser.add_argument(
- '--num_patches_to_iterate', type=int, help=argparse.SUPPRESS)
-
- # Add argument for whether bisection should continue. Only used for
- # 'bisect_patches.'
- parser.add_argument(
- '--continue_bisection',
- type=bool,
- default=False,
- help='Determines whether bisection should continue after successfully '
- 'bisecting a patch (default: %(default)s) - only used for '
- '"bisect_patches"')
-
- # Add argument for the LLVM version to use for patch management.
- parser.add_argument(
- '--svn_version',
- type=int,
- required=True,
- help='the LLVM svn version to use for patch management (determines '
- 'whether a patch is applicable)')
-
- # Add argument for the patch metadata file that is in $FILESDIR.
- parser.add_argument(
- '--patch_metadata_file',
- required=True,
- type=is_patch_metadata_file,
- help='the absolute path to the .json file in "$FILESDIR/" of the '
- 'package which has all the patches and their metadata if applicable')
-
- # Add argument for the absolute path to the ebuild's $FILESDIR path.
- # Example: '.../sys-devel/llvm/files/'.
- parser.add_argument(
- '--filesdir_path',
- required=True,
- type=is_directory,
- help='the absolute path to the ebuild "files/" directory')
-
- # Add argument for the absolute path to the unpacked sources.
- parser.add_argument(
- '--src_path',
- required=True,
- type=is_directory,
- help='the absolute path to the unpacked LLVM sources')
-
- # Add argument for the mode of the patch manager when handling failing
- # applicable patches.
- parser.add_argument(
- '--failure_mode',
- default=FailureModes.FAIL.value,
- type=is_valid_failure_mode,
- help='the mode of the patch manager when handling failed patches ' \
- '(default: %(default)s)')
-
- # Parse the command line.
- args_output = parser.parse_args()
-
- EnsureBisectModeAndSvnVersionAreSpecifiedTogether(
- args_output.failure_mode, args_output.good_svn_version)
-
- return args_output
-
-
-def GetHEADSVNVersion(src_path):
- """Gets the SVN version of HEAD in the src tree."""
-
- get_head_cmd = ['git', '-C', src_path, 'log', '-1', '--pretty=%B']
-
- head_commit_message = check_output(get_head_cmd)
-
- head_svn_version = LLVMHash().GetSVNVersionFromCommitMessage(
- head_commit_message)
-
- return head_svn_version
-
-
-def VerifyHEADIsTheSameAsSVNVersion(src_path, svn_version):
- """Verifies that HEAD's SVN version matches 'svn_version'."""
-
- head_svn_version = GetHEADSVNVersion(src_path)
-
- if head_svn_version != svn_version:
- raise ValueError('HEAD\'s SVN version %d does not match "svn_version"'
- ' %d, please move HEAD to "svn_version"s\' git hash.' %
- (head_svn_version, svn_version))
-
-
-def GetPathToPatch(filesdir_path, rel_patch_path):
- """Gets the absolute path to a patch in $FILESDIR.
-
- Args:
- filesdir_path: The absolute path to $FILESDIR.
- rel_patch_path: The relative path to the patch in '$FILESDIR/'.
-
- Returns:
- The absolute path to the patch in $FILESDIR.
-
- Raises:
- ValueError: Unable to find the path to the patch in $FILESDIR.
- """
-
- if not os.path.isdir(filesdir_path):
- raise ValueError('Invalid path to $FILESDIR provided: %s' % filesdir_path)
-
- # Combine $FILESDIR + relative path of patch to $FILESDIR.
- patch_path = os.path.join(filesdir_path, rel_patch_path)
-
- if not os.path.isfile(patch_path):
- raise ValueError('The absolute path %s to the patch %s does not exist' %
- (patch_path, rel_patch_path))
-
- return patch_path
-
-
-def GetPatchMetadata(patch_dict):
- """Gets the patch's metadata.
-
- Args:
- patch_dict: A dictionary that has the patch metadata.
-
- Returns:
- A tuple that contains the metadata values.
- """
-
- # Get the metadata values of a patch if possible.
- start_version = patch_dict.get('start_version', 0)
- end_version = patch_dict.get('end_version', None)
- is_critical = patch_dict.get('is_critical', False)
-
- return start_version, end_version, is_critical
-
-
-def ApplyPatch(src_path, patch_path):
- """Attempts to apply the patch.
-
- Args:
- src_path: The absolute path to the unpacked sources of the package.
- patch_path: The absolute path to the patch in $FILESDIR/
-
- Returns:
- A boolean where 'True' means that the patch applied fine or 'False' means
- that the patch failed to apply.
- """
-
- if not os.path.isdir(src_path):
- raise ValueError('Invalid src path provided: %s' % src_path)
-
- if not os.path.isfile(patch_path):
- raise ValueError('Invalid patch file provided: %s' % patch_path)
-
- # Test the patch with '--dry-run' before actually applying the patch.
- test_patch_cmd = [
- 'patch', '--dry-run', '-d', src_path, '-f', '-p1', '-E',
- '--no-backup-if-mismatch', '-i', patch_path
- ]
-
- # Cmd to apply a patch in the src unpack path.
- apply_patch_cmd = [
- 'patch', '-d', src_path, '-f', '-p1', '-E', '--no-backup-if-mismatch',
- '-i', patch_path
- ]
-
- try:
- check_output(test_patch_cmd)
-
- # If the mode is 'continue', then catching the exception makes sure that
- # the program does not exit on the first failed applicable patch.
- except subprocess.CalledProcessError:
- # Test run on the patch failed to apply.
- return False
-
- # Test run succeeded on the patch.
- check_output(apply_patch_cmd)
-
- return True
-
-
-def UpdatePatchMetadataFile(patch_metadata_file, patches):
- """Updates the .json file with unchanged and at least one changed patch.
-
- Args:
- patch_metadata_file: The absolute path to the .json file that has all the
- patches and its metadata.
- patches: A list of patches whose metadata were or were not updated.
-
- Raises:
- ValueError: The patch metadata file does not have the correct extension.
- """
-
- if not patch_metadata_file.endswith('.json'):
- raise ValueError('File does not end in ".json": %s' % patch_metadata_file)
-
- with open(patch_metadata_file, 'w') as patch_file:
- json.dump(patches, patch_file, indent=4, separators=(',', ': '))
-
-
-def GetCommitHashesForBisection(src_path, good_svn_version, bad_svn_version):
- """Gets the good and bad commit hashes required by `git bisect start`."""
-
- new_llvm_hash = LLVMHash()
-
- bad_commit_hash = new_llvm_hash.GetGitHashForVersion(src_path,
- bad_svn_version)
-
- good_commit_hash = new_llvm_hash.GetGitHashForVersion(src_path,
- good_svn_version)
-
- return good_commit_hash, bad_commit_hash
-
-
-def PerformBisection(src_path, good_commit, bad_commit, svn_version,
- patch_metadata_file, filesdir_path, num_patches):
- """Performs bisection to determine where a patch stops applying."""
-
- bisect_start_cmd = [
- 'git', '-C', src_path, 'bisect', 'start', bad_commit, good_commit
- ]
-
- check_output(bisect_start_cmd)
-
- bisect_run_cmd = [
- 'git', '-C', src_path, 'bisect', 'run',
- os.path.abspath(__file__), '--svn_version',
- '%d' % svn_version, '--patch_metadata_file', patch_metadata_file,
- '--filesdir_path', filesdir_path, '--src_path', src_path,
- '--failure_mode', 'internal_bisection', '--num_patches_to_iterate',
- '%d' % num_patches
- ]
-
- check_call(bisect_run_cmd)
-
- # Successfully bisected the patch, so retrieve the SVN version from the
- # commit message.
- get_bad_commit_from_bisect_run_cmd = [
- 'git', '-C', src_path, 'show', 'refs/bisect/bad'
- ]
-
- bad_commit_message = check_output(get_bad_commit_from_bisect_run_cmd)
-
- end_bisection_cmd = ['git', '-C', src_path, 'bisect', 'reset']
-
- check_output(end_bisection_cmd)
-
- # `git bisect run` returns the bad commit hash and the commit message.
- bad_version = LLVMHash().GetSVNVersionFromCommitMessage(
- bad_commit_message.rstrip())
-
- return bad_version
-
-
-def CleanSrcTree(src_path):
- """Cleans the source tree of the changes made in 'src_path'."""
-
- reset_src_tree_cmd = ['git', '-C', src_path, 'reset', 'HEAD', '--hard']
-
- check_output(reset_src_tree_cmd)
-
- clean_src_tree_cmd = ['git', '-C', src_path, 'clean', '-fd']
-
- check_output(clean_src_tree_cmd)
-
-
-def SaveSrcTreeState(src_path):
- """Stashes the changes made so far to the source tree."""
-
- save_src_tree_cmd = ['git', '-C', src_path, 'stash', '-a']
-
- check_output(save_src_tree_cmd)
-
-
-def RestoreSrcTreeState(src_path, bad_commit_hash):
- """Restores the changes made to the source tree."""
-
- checkout_cmd = ['git', '-C', src_path, 'checkout', bad_commit_hash]
-
- check_output(checkout_cmd)
-
- get_changes_cmd = ['git', '-C', src_path, 'stash', 'pop']
-
- check_output(get_changes_cmd)
-
-
-def HandlePatches(svn_version,
- patch_metadata_file,
- filesdir_path,
- src_path,
- mode,
- good_svn_version=None,
- num_patches_to_iterate=None,
- continue_bisection=False):
- """Handles the patches in the .json file for the package.
-
- Args:
- svn_version: The LLVM version to use for patch management.
- patch_metadata_file: The absolute path to the .json file in '$FILESDIR/'
- that has all the patches and their metadata.
- filesdir_path: The absolute path to $FILESDIR.
- src_path: The absolute path to the unpacked destination of the package.
- mode: The action to take when an applicable patch failed to apply.
- Ex: 'FailureModes.FAIL'
- good_svn_version: Only used by 'bisect_patches' which tells
- `git bisect start` the good version.
- num_patches_to_iterate: The number of patches to iterate in the .JSON file
- (internal use). Only used by `git bisect run`.
- continue_bisection: Only used for 'bisect_patches' mode. If flag is set,
- then bisection will continue to the next patch when successfully bisected a
- patch.
-
- Returns:
- Depending on the mode, 'None' would be returned if everything went well or
- the .json file was not updated. Otherwise, a list or multiple lists would
- be returned that indicates what has changed.
-
- Raises:
- ValueError: The patch metadata file does not exist or does not end with
- '.json' or the absolute path to $FILESDIR does not exist or the unpacked
- path does not exist or if the mode is 'fail', then an applicable patch
- failed to apply.
- """
-
- # A flag for whether the mode specified would possible modify the patches.
- can_modify_patches = False
-
- # 'fail' or 'continue' mode would not modify a patch's metadata, so the .json
- # file would stay the same.
- if mode != FailureModes.FAIL and mode != FailureModes.CONTINUE:
- can_modify_patches = True
-
- # A flag that determines whether at least one patch's metadata was
- # updated due to the mode that is passed in.
- updated_patch = False
-
- # A list of patches that will be in the updated .json file.
- applicable_patches = []
-
- # A list of patches that successfully applied.
- applied_patches = []
-
- # A list of patches that were disabled.
- disabled_patches = []
-
- # A list of bisected patches.
- bisected_patches = []
-
- # A list of non applicable patches.
- non_applicable_patches = []
-
- # A list of patches that will not be included in the updated .json file
- removed_patches = []
-
- # Whether the patch metadata file was modified where 'None' means that the
- # patch metadata file was not modified otherwise the absolute path to the
- # patch metadata file is stored.
- modified_metadata = None
-
- # A list of patches that failed to apply.
- failed_patches = []
-
- with open(patch_metadata_file) as patch_file:
- patch_file_contents = json.load(patch_file)
-
- if mode == FailureModes.BISECT_PATCHES:
- # A good and bad commit are required by `git bisect start`.
- good_commit, bad_commit = GetCommitHashesForBisection(
- src_path, good_svn_version, svn_version)
-
- # Patch format:
- # {
- # "rel_patch_path" : "[REL_PATCH_PATH_FROM_$FILESDIR]"
- # [PATCH_METADATA] if available.
- # }
- #
- # For each patch, find the path to it in $FILESDIR and get its metadata if
- # available, then check if the patch is applicable.
- for patch_dict_index, cur_patch_dict in enumerate(patch_file_contents):
- # Used by the internal bisection. All the patches in the interval [0, N]
- # have been iterated.
- if num_patches_to_iterate and \
- (patch_dict_index + 1) > num_patches_to_iterate:
- break
-
- # Get the absolute path to the patch in $FILESDIR.
- path_to_patch = GetPathToPatch(filesdir_path,
- cur_patch_dict['rel_patch_path'])
-
- # Get the patch's metadata.
- #
- # Index information of 'patch_metadata':
- # [0]: start_version
- # [1]: end_version
- # [2]: is_critical
- patch_metadata = GetPatchMetadata(cur_patch_dict)
-
- if not patch_metadata[1]:
- # Patch does not have an 'end_version' value which implies 'end_version'
- # == 'inf' ('svn_version' will always be less than 'end_version'), so
- # the patch is applicable if 'svn_version' >= 'start_version'.
- patch_applicable = svn_version >= patch_metadata[0]
- else:
- # Patch is applicable if 'svn_version' >= 'start_version' &&
- # "svn_version" < "end_version".
- patch_applicable = (svn_version >= patch_metadata[0] and \
- svn_version < patch_metadata[1])
-
- if can_modify_patches:
- # Add to the list only if the mode can potentially modify a patch.
- #
- # If the mode is 'remove_patches', then all patches that are
- # applicable or are from the future will be added to the updated .json
- # file and all patches that are not applicable will be added to the
- # remove patches list which will not be included in the updated .json
- # file.
- if patch_applicable or svn_version < patch_metadata[0] or \
- mode != FailureModes.REMOVE_PATCHES:
- applicable_patches.append(cur_patch_dict)
- elif mode == FailureModes.REMOVE_PATCHES:
- removed_patches.append(path_to_patch)
-
- if not modified_metadata:
- # At least one patch will be removed from the .json file.
- modified_metadata = patch_metadata_file
-
- if not patch_applicable:
- non_applicable_patches.append(os.path.basename(path_to_patch))
-
- # There is no need to apply patches in 'remove_patches' mode because the
- # mode removes patches that do not apply anymore based off of
- # 'svn_version.'
- if patch_applicable and mode != FailureModes.REMOVE_PATCHES:
- patch_applied = ApplyPatch(src_path, path_to_patch)
-
- if not patch_applied: # Failed to apply patch.
- failed_patches.append(os.path.basename(path_to_patch))
-
- # Check the mode to determine what action to take on the failing
- # patch.
- if mode == FailureModes.DISABLE_PATCHES:
- # Set the patch's 'end_version' to 'svn_version' so the patch
- # would not be applicable anymore (i.e. the patch's 'end_version'
- # would not be greater than 'svn_version').
-
- # Last element in 'applicable_patches' is the current patch.
- applicable_patches[-1]['end_version'] = svn_version
-
- disabled_patches.append(os.path.basename(path_to_patch))
-
- if not updated_patch:
- # At least one patch has been modified, so the .json file
- # will be updated with the new patch metadata.
- updated_patch = True
-
- modified_metadata = patch_metadata_file
- elif mode == FailureModes.BISECT_PATCHES:
- # Figure out where the patch's stops applying and set the patch's
- # 'end_version' to that version.
-
- # Do not want to overwrite the changes to the current progress of
- # 'bisect_patches' on the source tree.
- SaveSrcTreeState(src_path)
-
- # Need a clean source tree for `git bisect run` to avoid unnecessary
- # fails for patches.
- CleanSrcTree(src_path)
-
- print('\nStarting to bisect patch %s for SVN version %d:\n' %
- (os.path.basename(cur_patch_dict['rel_patch_path']),
- svn_version))
-
- # Performs the bisection: calls `git bisect start` and
- # `git bisect run`, where `git bisect run` is going to call this
- # script as many times as needed with specific arguments.
- bad_svn_version = PerformBisection(
- src_path, good_commit, bad_commit, svn_version,
- patch_metadata_file, filesdir_path, patch_dict_index + 1)
-
- print('\nSuccessfully bisected patch %s, starts to fail to apply '
- 'at %d\n' % (os.path.basename(
- cur_patch_dict['rel_patch_path']), bad_svn_version))
-
- # Overwrite the .JSON file with the new 'end_version' for the
- # current failed patch so that if there are other patches that
- # fail to apply, then the 'end_version' for the current patch could
- # be applicable when `git bisect run` is performed on the next
- # failed patch because the same .JSON file is used for `git bisect
- # run`.
- patch_file_contents[patch_dict_index][
- 'end_version'] = bad_svn_version
- UpdatePatchMetadataFile(patch_metadata_file, patch_file_contents)
-
- # Clear the changes made to the source tree by `git bisect run`.
- CleanSrcTree(src_path)
-
- if not continue_bisection:
- # Exiting program early because 'continue_bisection' is not set.
- sys.exit(0)
-
- bisected_patches.append(
- '%s starts to fail to apply at %d' % (os.path.basename(
- cur_patch_dict['rel_patch_path']), bad_svn_version))
-
- # Continue where 'bisect_patches' left off.
- RestoreSrcTreeState(src_path, bad_commit)
-
- if not modified_metadata:
- # At least one patch's 'end_version' has been updated.
- modified_metadata = patch_metadata_file
-
- elif mode == FailureModes.FAIL:
- if applied_patches:
- print('The following patches applied successfully up to the '
- 'failed patch:')
- print('\n'.join(applied_patches))
-
- # Throw an exception on the first patch that failed to apply.
- raise ValueError(
- 'Failed to apply patch: %s' % os.path.basename(path_to_patch))
- elif mode == FailureModes.INTERNAL_BISECTION:
- # Determine the exit status for `git bisect run` because of the
- # failed patch in the interval [0, N].
- #
- # NOTE: `git bisect run` exit codes are as follows:
- # 130: Terminates the bisection.
- # 1: Similar as `git bisect bad`.
-
- # Some patch in the interval [0, N) failed, so terminate bisection
- # (the patch stack is broken).
- if (patch_dict_index + 1) != num_patches_to_iterate:
- print('\nTerminating bisection due to patch %s failed to apply '
- 'on SVN version %d.\n' % (os.path.basename(
- cur_patch_dict['rel_patch_path']), svn_version))
-
- # Man page for `git bisect run` states that any value over 127
- # terminates it.
- sys.exit(130)
-
- # Changes to the source tree need to be removed, otherwise some
- # patches may fail when applying the patch to the source tree when
- # `git bisect run` calls this script again.
- CleanSrcTree(src_path)
-
- # The last patch in the interval [0, N] failed to apply, so let
- # `git bisect run` know that the last patch (the patch that failed
- # originally which led to `git bisect run` to be invoked) is bad
- # with exit code 1.
- sys.exit(1)
- else: # Successfully applied patch
- applied_patches.append(os.path.basename(path_to_patch))
-
- # All patches in the interval [0, N] applied successfully, so let
- # `git bisect run` know that the program exited with exit code 0 (good).
- if mode == FailureModes.INTERNAL_BISECTION:
- # Changes to the source tree need to be removed, otherwise some
- # patches may fail when applying the patch to the source tree when
- # `git bisect run` calls this script again.
- #
- # Also, if `git bisect run` will NOT call this script again (terminated) and
- # if the source tree changes are not removed, `git bisect reset` will
- # complain that the changes would need to be 'stashed' or 'removed' in
- # order to reset HEAD back to the bad commit's git hash, so HEAD will remain
- # on the last git hash used by `git bisect run`.
- CleanSrcTree(src_path)
-
- # NOTE: Exit code 0 is similar to `git bisect good`.
- sys.exit(0)
-
- # Create a namedtuple of the patch results.
- PatchInfo = namedtuple('PatchInfo', [
- 'applied_patches', 'failed_patches', 'non_applicable_patches',
- 'disabled_patches', 'removed_patches', 'modified_metadata'
- ])
-
- patch_info = PatchInfo(
- applied_patches=applied_patches,
- failed_patches=failed_patches,
- non_applicable_patches=non_applicable_patches,
- disabled_patches=disabled_patches,
- removed_patches=removed_patches,
- modified_metadata=modified_metadata)
-
- # Determine post actions after iterating through the patches.
- if mode == FailureModes.REMOVE_PATCHES:
- if removed_patches:
- UpdatePatchMetadataFile(patch_metadata_file, applicable_patches)
- elif mode == FailureModes.DISABLE_PATCHES:
- if updated_patch:
- UpdatePatchMetadataFile(patch_metadata_file, applicable_patches)
- elif mode == FailureModes.BISECT_PATCHES:
- PrintPatchResults(patch_info)
- if modified_metadata:
- print('\nThe following patches have been bisected:')
- print('\n'.join(bisected_patches))
-
- # Exiting early because 'bisect_patches' will not be called from other
- # scripts, only this script uses 'bisect_patches'. The intent is to provide
- # bisection information on the patches and aid in the bisection process.
- sys.exit(0)
-
- return patch_info
-
-
-def PrintPatchResults(patch_info):
- """Prints the results of handling the patches of a package.
-
- Args:
- patch_info: A namedtuple that has information on the patches.
- """
-
- if patch_info.applied_patches:
- print('\nThe following patches applied successfully:')
- print('\n'.join(patch_info.applied_patches))
-
- if patch_info.failed_patches:
- print('\nThe following patches failed to apply:')
- print('\n'.join(patch_info.failed_patches))
-
- if patch_info.non_applicable_patches:
- print('\nThe following patches were not applicable:')
- print('\n'.join(patch_info.non_applicable_patches))
-
- if patch_info.modified_metadata:
- print('\nThe patch metadata file %s has been modified' % os.path.basename(
- patch_info.modified_metadata))
-
- if patch_info.disabled_patches:
- print('\nThe following patches were disabled:')
- print('\n'.join(patch_info.disabled_patches))
-
- if patch_info.removed_patches:
- print('\nThe following patches were removed from the patch metadata file:')
- for cur_patch_path in patch_info.removed_patches:
- print('%s' % os.path.basename(cur_patch_path))
-
-
-def main():
- """Applies patches to the source tree and takes action on a failed patch."""
-
- args_output = GetCommandLineArgs()
-
- if args_output.failure_mode != FailureModes.INTERNAL_BISECTION.value:
- # If the SVN version of HEAD is not the same as 'svn_version', then some
- # patches that fail to apply could successfully apply if HEAD's SVN version
- # was the same as 'svn_version'. In other words, HEAD's git hash should be
- # what is being updated to (e.g. LLVM_NEXT_HASH).
- VerifyHEADIsTheSameAsSVNVersion(args_output.src_path,
- args_output.svn_version)
- else:
- # `git bisect run` called this script.
- #
- # `git bisect run` moves HEAD each time it invokes this script, so set the
- # 'svn_version' to be current HEAD's SVN version so that the previous
- # SVN version is not used in determining whether a patch is applicable.
- args_output.svn_version = GetHEADSVNVersion(args_output.src_path)
-
- # Get the results of handling the patches of the package.
- patch_info = HandlePatches(
- args_output.svn_version, args_output.patch_metadata_file,
- args_output.filesdir_path, args_output.src_path,
- FailureModes(args_output.failure_mode), args_output.good_svn_version,
- args_output.num_patches_to_iterate, args_output.continue_bisection)
-
- PrintPatchResults(patch_info)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/patch_manager_unittest.py b/llvm_tools/patch_manager_unittest.py
deleted file mode 100755
index 90200ab8..00000000
--- a/llvm_tools/patch_manager_unittest.py
+++ /dev/null
@@ -1,911 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests when handling patches."""
-
-from __future__ import print_function
-
-import json
-import os
-import subprocess
-import unittest
-import unittest.mock as mock
-
-from failure_modes import FailureModes
-from test_helpers import CallCountsToMockFunctions
-from test_helpers import CreateTemporaryJsonFile
-from test_helpers import WritePrettyJsonFile
-import patch_manager
-
-
-class PatchManagerTest(unittest.TestCase):
- """Test class when handling patches of packages."""
-
- # Simulate behavior of 'os.path.isdir()' when the path is not a directory.
- @mock.patch.object(os.path, 'isdir', return_value=False)
- def testInvalidDirectoryPassedAsCommandLineArgument(self, mock_isdir):
- test_dir = '/some/path/that/is/not/a/directory'
-
- # Verify the exception is raised when the command line argument for
- # '--filesdir_path' or '--src_path' is not a directory.
- with self.assertRaises(ValueError) as err:
- patch_manager.is_directory(test_dir)
-
- self.assertEqual(
- str(err.exception), 'Path is not a directory: '
- '%s' % test_dir)
-
- mock_isdir.assert_called_once()
-
- # Simulate the behavior of 'os.path.isdir()' when a path to a directory is
- # passed as the command line argument for '--filesdir_path' or '--src_path'.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- def testValidDirectoryPassedAsCommandLineArgument(self, mock_isdir):
- test_dir = '/some/path/that/is/a/directory'
-
- self.assertEqual(patch_manager.is_directory(test_dir), test_dir)
-
- mock_isdir.assert_called_once()
-
- # Simulate behavior of 'os.path.isfile()' when the patch metadata file is does
- # not exist.
- @mock.patch.object(os.path, 'isfile', return_value=False)
- def testInvalidPathToPatchMetadataFilePassedAsCommandLineArgument(
- self, mock_isfile):
-
- abs_path_to_patch_file = '/abs/path/to/PATCHES.json'
-
- # Verify the exception is raised when the command line argument for
- # '--patch_metadata_file' does not exist or is not a file.
- with self.assertRaises(ValueError) as err:
- patch_manager.is_patch_metadata_file(abs_path_to_patch_file)
-
- self.assertEqual(
- str(err.exception), 'Invalid patch metadata file provided: '
- '%s' % abs_path_to_patch_file)
-
- mock_isfile.assert_called_once()
-
- # Simulate the behavior of 'os.path.isfile()' when the path to the patch
- # metadata file exists and is a file.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- def testPatchMetadataFileDoesNotEndInJson(self, mock_isfile):
- abs_path_to_patch_file = '/abs/path/to/PATCHES'
-
- # Verify the exception is raises when the command line argument for
- # '--patch_metadata_file' exists and is a file but does not end in
- # '.json'.
- with self.assertRaises(ValueError) as err:
- patch_manager.is_patch_metadata_file(abs_path_to_patch_file)
-
- self.assertEqual(
- str(err.exception), 'Patch metadata file does not end in ".json": '
- '%s' % abs_path_to_patch_file)
-
- mock_isfile.assert_called_once()
-
- # Simulate the behavior of 'os.path.isfile()' when the command line argument
- # for '--patch_metadata_file' exists and is a file.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- def testValidPatchMetadataFilePassedAsCommandLineArgument(self, mock_isfile):
- abs_path_to_patch_file = '/abs/path/to/PATCHES.json'
-
- self.assertEqual(
- patch_manager.is_patch_metadata_file(abs_path_to_patch_file),
- '%s' % abs_path_to_patch_file)
-
- mock_isfile.assert_called_once()
-
- # Simulate behavior of 'os.path.isdir()' when the path to $FILESDIR
- # does not exist.
- @mock.patch.object(os.path, 'isdir', return_value=False)
- def testInvalidPathToFilesDirWhenConstructingPathToPatch(self, mock_isdir):
- abs_path_to_filesdir = '/abs/path/to/filesdir'
-
- rel_patch_path = 'cherry/fixes_stdout.patch'
-
- # Verify the exception is raised when the the absolute path to $FILESDIR of
- # a package is not a directory.
- with self.assertRaises(ValueError) as err:
- patch_manager.GetPathToPatch(abs_path_to_filesdir, rel_patch_path)
-
- self.assertEqual(
- str(err.exception), 'Invalid path to $FILESDIR provided: '
- '%s' % abs_path_to_filesdir)
-
- mock_isdir.assert_called_once()
-
- # Simulate behavior of 'os.path.isdir()' when the absolute path to the
- # $FILESDIR of a package exists and is a directory.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- # Simulate the behavior of 'os.path.isfile()' when the absolute path to the
- # patch does not exist.
- @mock.patch.object(os.path, 'isfile', return_value=False)
- def testConstructedPathToPatchDoesNotExist(self, mock_isfile, mock_isdir):
- abs_path_to_filesdir = '/abs/path/to/filesdir'
-
- rel_patch_path = 'cherry/fixes_stdout.patch'
-
- abs_patch_path = os.path.join(abs_path_to_filesdir, rel_patch_path)
-
- # Verify the exception is raised when the absolute path to the patch does
- # not exist.
- with self.assertRaises(ValueError) as err:
- patch_manager.GetPathToPatch(abs_path_to_filesdir, rel_patch_path)
-
- self.assertEqual(
- str(err.exception), 'The absolute path %s to the patch %s does not '
- 'exist' % (abs_patch_path, rel_patch_path))
-
- mock_isdir.assert_called_once()
-
- mock_isfile.assert_called_once()
-
- # Simulate behavior of 'os.path.isdir()' when the absolute path to the
- # $FILESDIR of a package exists and is a directory.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- # Simulate behavior of 'os.path.isfile()' when the absolute path to the
- # patch exists and is a file.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- def testConstructedPathToPatchSuccessfully(self, mock_isfile, mock_isdir):
- abs_path_to_filesdir = '/abs/path/to/filesdir'
-
- rel_patch_path = 'cherry/fixes_stdout.patch'
-
- abs_patch_path = os.path.join(abs_path_to_filesdir, rel_patch_path)
-
- self.assertEqual(
- patch_manager.GetPathToPatch(abs_path_to_filesdir, rel_patch_path),
- abs_patch_path)
-
- mock_isdir.assert_called_once()
-
- mock_isfile.assert_called_once()
-
- def testSuccessfullyGetPatchMetadataForPatchWithNoMetadata(self):
- expected_patch_metadata = 0, None, False
-
- test_patch = {
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_stdout.patch'
- }
-
- self.assertEqual(
- patch_manager.GetPatchMetadata(test_patch), expected_patch_metadata)
-
- def testSuccessfullyGetPatchMetdataForPatchWithSomeMetadata(self):
- expected_patch_metadata = 0, 1000, False
-
- test_patch = {
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_stdout.patch',
- 'end_version': 1000
- }
-
- self.assertEqual(
- patch_manager.GetPatchMetadata(test_patch), expected_patch_metadata)
-
- def testFailedToApplyPatchWhenInvalidSrcPathIsPassedIn(self):
- src_path = '/abs/path/to/src'
-
- abs_patch_path = '/abs/path/to/filesdir/cherry/fixes_stdout.patch'
-
- # Verify the exception is raised when the absolute path to the unpacked
- # sources of a package is not a directory.
- with self.assertRaises(ValueError) as err:
- patch_manager.ApplyPatch(src_path, abs_patch_path)
-
- self.assertEqual(
- str(err.exception), 'Invalid src path provided: %s' % src_path)
-
- # Simulate behavior of 'os.path.isdir()' when the absolute path to the
- # unpacked sources of the package is valid and exists.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- def testFailedToApplyPatchWhenPatchPathIsInvalid(self, mock_isdir):
- src_path = '/abs/path/to/src'
-
- abs_patch_path = '/abs/path/to/filesdir/cherry/fixes_stdout.patch'
-
- # Verify the exception is raised when the absolute path to the patch does
- # not exist or is not a file.
- with self.assertRaises(ValueError) as err:
- patch_manager.ApplyPatch(src_path, abs_patch_path)
-
- self.assertEqual(
- str(err.exception), 'Invalid patch file provided: '
- '%s' % abs_patch_path)
-
- mock_isdir.assert_called_once()
-
- # Simulate behavior of 'os.path.isdir()' when the absolute path to the
- # unpacked sources of the package is valid and exists.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- @mock.patch.object(os.path, 'isfile', return_value=True)
- # Simulate behavior of 'os.path.isfile()' when the absolute path to the
- # patch exists and is a file.
- @mock.patch.object(patch_manager, 'check_output')
- def testFailedToApplyPatchInDryRun(self, mock_dry_run, mock_isfile,
- mock_isdir):
-
- # Simulate behavior of 'subprocess.check_output()' when '--dry-run'
- # fails on the applying patch.
- def FailedToApplyPatch(test_patch_cmd):
- # First argument is the return error code, the second argument is the
- # command that was run, and the third argument is the output.
- raise subprocess.CalledProcessError(1, test_patch_cmd, None)
-
- mock_dry_run.side_effect = FailedToApplyPatch
-
- src_path = '/abs/path/to/src'
-
- abs_patch_path = '/abs/path/to/filesdir/cherry/fixes_stdout.patch'
-
- self.assertEqual(patch_manager.ApplyPatch(src_path, abs_patch_path), False)
-
- mock_isdir.assert_called_once()
-
- mock_isfile.assert_called_once()
-
- mock_dry_run.assert_called_once()
-
- # Simulate behavior of 'os.path.isdir()' when the absolute path to the
- # unpacked sources of the package is valid and exists.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- @mock.patch.object(os.path, 'isfile', return_value=True)
- # Simulate behavior of 'os.path.isfile()' when the absolute path to the
- # patch exists and is a file.
- @mock.patch.object(patch_manager, 'check_output')
- def testSuccessfullyAppliedPatch(self, mock_dry_run, mock_isfile, mock_isdir):
- src_path = '/abs/path/to/src'
-
- abs_patch_path = '/abs/path/to/filesdir/cherry/fixes_stdout.patch'
-
- self.assertEqual(patch_manager.ApplyPatch(src_path, abs_patch_path), True)
-
- mock_isdir.assert_called_once()
-
- mock_isfile.assert_called_once()
-
- self.assertEqual(mock_dry_run.call_count, 2)
-
- def testFailedToUpdatePatchMetadataFileWhenPatchFileNotEndInJson(self):
- patch = [{
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
- }]
-
- abs_patch_path = '/abs/path/to/filesdir/PATCHES'
-
- # Verify the exception is raised when the absolute path to the patch
- # metadata file does not end in '.json'.
- with self.assertRaises(ValueError) as err:
- patch_manager.UpdatePatchMetadataFile(abs_patch_path, patch)
-
- self.assertEqual(
- str(err.exception), 'File does not end in ".json": '
- '%s' % abs_patch_path)
-
- def testSuccessfullyUpdatedPatchMetadataFile(self):
- test_updated_patch_metadata = [{
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
- }]
-
- expected_patch_metadata = {
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
- }
-
- with CreateTemporaryJsonFile() as json_test_file:
- patch_manager.UpdatePatchMetadataFile(json_test_file,
- test_updated_patch_metadata)
-
- # Make sure the updated patch metadata was written into the temporary
- # .json file.
- with open(json_test_file) as patch_file:
- patch_contents = json.load(patch_file)
-
- self.assertEqual(len(patch_contents), 1)
-
- self.assertDictEqual(patch_contents[0], expected_patch_metadata)
-
- @mock.patch.object(patch_manager, 'GetPathToPatch')
- def testExceptionThrownWhenHandlingPatches(self, mock_get_path_to_patch):
- filesdir_path = '/abs/path/to/filesdir'
-
- abs_patch_path = '/abs/path/to/filesdir/cherry/fixes_output.patch'
-
- rel_patch_path = 'cherry/fixes_output.patch'
-
- # Simulate behavior of 'GetPathToPatch()' when the absolute path to the
- # patch does not exist.
- def PathToPatchDoesNotExist(filesdir_path, rel_patch_path):
- raise ValueError(
- 'The absolute path to %s does not exist' % abs_patch_path)
-
- # Use the test function to simulate the behavior of 'GetPathToPatch()'.
- mock_get_path_to_patch.side_effect = PathToPatchDoesNotExist
-
- test_patch_metadata = [{
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': rel_patch_path,
- 'start_version': 10
- }]
-
- with CreateTemporaryJsonFile() as json_test_file:
- # Write the test patch metadata to the temporary .json file.
- with open(json_test_file, 'w') as json_file:
- WritePrettyJsonFile(test_patch_metadata, json_file)
-
- src_path = '/some/path/to/src'
-
- revision = 1000
-
- # Verify the exception is raised when the absolute path to a patch does
- # not exist.
- with self.assertRaises(ValueError) as err:
- patch_manager.HandlePatches(revision, json_test_file, filesdir_path,
- src_path, FailureModes.FAIL)
-
- self.assertEqual(
- str(err.exception),
- 'The absolute path to %s does not exist' % abs_patch_path)
-
- mock_get_path_to_patch.assert_called_once_with(filesdir_path,
- rel_patch_path)
-
- @mock.patch.object(patch_manager, 'GetPathToPatch')
- # Simulate behavior for 'ApplyPatch()' when an applicable patch failed to
- # apply.
- @mock.patch.object(patch_manager, 'ApplyPatch', return_value=False)
- def testExceptionThrownOnAFailedPatchInFailMode(self, mock_apply_patch,
- mock_get_path_to_patch):
- filesdir_path = '/abs/path/to/filesdir'
-
- abs_patch_path = '/abs/path/to/filesdir/cherry/fixes_output.patch'
-
- rel_patch_path = 'cherry/fixes_output.patch'
-
- # Simulate behavior for 'GetPathToPatch()' when successfully constructed the
- # absolute path to the patch and the patch exists.
- mock_get_path_to_patch.return_value = abs_patch_path
-
- test_patch_metadata = [{
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': rel_patch_path,
- 'start_version': 1000
- }]
-
- with CreateTemporaryJsonFile() as json_test_file:
- # Write the test patch metadata to the temporary .json file.
- with open(json_test_file, 'w') as json_file:
- WritePrettyJsonFile(test_patch_metadata, json_file)
-
- src_path = '/some/path/to/src'
-
- revision = 1000
-
- patch_name = 'fixes_output.patch'
-
- # Verify the exception is raised when the mode is 'fail' and an applicable
- # patch fails to apply.
- with self.assertRaises(ValueError) as err:
- patch_manager.HandlePatches(revision, json_test_file, filesdir_path,
- src_path, FailureModes.FAIL)
-
- self.assertEqual(
- str(err.exception), 'Failed to apply patch: %s' % patch_name)
-
- mock_get_path_to_patch.assert_called_once_with(filesdir_path,
- rel_patch_path)
-
- mock_apply_patch.assert_called_once_with(src_path, abs_patch_path)
-
- @mock.patch.object(patch_manager, 'GetPathToPatch')
- @mock.patch.object(patch_manager, 'ApplyPatch')
- def testSomePatchesFailedToApplyInContinueMode(self, mock_apply_patch,
- mock_get_path_to_patch):
-
- test_patch_1 = {
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1250
- }
-
- test_patch_2 = {
- 'comment': 'Fixes input',
- 'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
- }
-
- test_patch_3 = {
- 'comment': 'Adds a warning',
- 'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
- }
-
- test_patch_4 = {
- 'comment': 'Adds a helper function',
- 'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 900
- }
-
- test_patch_metadata = [
- test_patch_1, test_patch_2, test_patch_3, test_patch_4
- ]
-
- abs_path_to_filesdir = '/abs/path/to/filesdir'
-
- # Simulate behavior for 'GetPathToPatch()' when successfully constructed the
- # absolute path to the patch and the patch exists.
- @CallCountsToMockFunctions
- def MultipleCallsToGetPatchPath(call_count, filesdir_path, rel_patch_path):
- self.assertEqual(filesdir_path, abs_path_to_filesdir)
-
- if call_count < 4:
- self.assertEqual(rel_patch_path,
- test_patch_metadata[call_count]['rel_patch_path'])
-
- return os.path.join(abs_path_to_filesdir,
- test_patch_metadata[call_count]['rel_patch_path'])
-
- assert False, 'Unexpectedly called more than 4 times.'
-
- # Simulate behavior for 'ApplyPatch()' when applying multiple applicable
- # patches.
- @CallCountsToMockFunctions
- def MultipleCallsToApplyPatches(call_count, src_path, path_to_patch):
- if call_count < 3:
- self.assertEqual(
- path_to_patch,
- os.path.join(abs_path_to_filesdir,
- test_patch_metadata[call_count]['rel_patch_path']))
-
- # Simulate that the first patch successfully applied.
- return call_count == 0
-
- # 'ApplyPatch()' was called more times than expected (3 times).
- assert False, 'Unexpectedly called more than 3 times.'
-
- # Use test functions to simulate behavior.
- mock_get_path_to_patch.side_effect = MultipleCallsToGetPatchPath
- mock_apply_patch.side_effect = MultipleCallsToApplyPatches
-
- expected_applied_patches = ['fixes_output.patch']
- expected_failed_patches = ['fixes_input.patch', 'add_warning.patch']
- expected_non_applicable_patches = ['add_helper.patch']
-
- expected_patch_info_dict = {
- 'applied_patches': expected_applied_patches,
- 'failed_patches': expected_failed_patches,
- 'non_applicable_patches': expected_non_applicable_patches,
- 'disabled_patches': [],
- 'removed_patches': [],
- 'modified_metadata': None
- }
-
- with CreateTemporaryJsonFile() as json_test_file:
- # Write the test patch metadata to the temporary .json file.
- with open(json_test_file, 'w') as json_file:
- WritePrettyJsonFile(test_patch_metadata, json_file)
-
- src_path = '/some/path/to/src/'
-
- revision = 1000
-
- patch_info = patch_manager.HandlePatches(revision, json_test_file,
- abs_path_to_filesdir, src_path,
- FailureModes.CONTINUE)
-
- self.assertDictEqual(patch_info._asdict(), expected_patch_info_dict)
-
- self.assertEqual(mock_get_path_to_patch.call_count, 4)
-
- self.assertEqual(mock_apply_patch.call_count, 3)
-
- @mock.patch.object(patch_manager, 'GetPathToPatch')
- @mock.patch.object(patch_manager, 'ApplyPatch')
- def testSomePatchesAreDisabled(self, mock_apply_patch,
- mock_get_path_to_patch):
-
- test_patch_1 = {
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
- }
-
- test_patch_2 = {
- 'comment': 'Fixes input',
- 'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
- }
-
- test_patch_3 = {
- 'comment': 'Adds a warning',
- 'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
- }
-
- test_patch_4 = {
- 'comment': 'Adds a helper function',
- 'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 2000
- }
-
- test_patch_metadata = [
- test_patch_1, test_patch_2, test_patch_3, test_patch_4
- ]
-
- abs_path_to_filesdir = '/abs/path/to/filesdir'
-
- # Simulate behavior for 'GetPathToPatch()' when successfully constructed the
- # absolute path to the patch and the patch exists.
- @CallCountsToMockFunctions
- def MultipleCallsToGetPatchPath(call_count, filesdir_path, rel_patch_path):
- self.assertEqual(filesdir_path, abs_path_to_filesdir)
-
- if call_count < 4:
- self.assertEqual(rel_patch_path,
- test_patch_metadata[call_count]['rel_patch_path'])
-
- return os.path.join(abs_path_to_filesdir,
- test_patch_metadata[call_count]['rel_patch_path'])
-
- # 'GetPathToPatch()' was called more times than expected (4 times).
- assert False, 'Unexpectedly called more than 4 times.'
-
- # Simulate behavior for 'ApplyPatch()' when applying multiple applicable
- # patches.
- @CallCountsToMockFunctions
- def MultipleCallsToApplyPatches(call_count, src_path, path_to_patch):
- if call_count < 3:
- self.assertEqual(
- path_to_patch,
- os.path.join(abs_path_to_filesdir,
- test_patch_metadata[call_count + 1]['rel_patch_path']))
-
- # Simulate that the second patch applied successfully.
- return call_count == 1
-
- # 'ApplyPatch()' was called more times than expected (3 times).
- assert False, 'Unexpectedly called more than 3 times.'
-
- # Use test functions to simulate behavior.
- mock_get_path_to_patch.side_effect = MultipleCallsToGetPatchPath
- mock_apply_patch.side_effect = MultipleCallsToApplyPatches
-
- expected_applied_patches = ['add_warning.patch']
- expected_failed_patches = ['fixes_input.patch', 'add_helper.patch']
- expected_disabled_patches = ['fixes_input.patch', 'add_helper.patch']
- expected_non_applicable_patches = ['fixes_output.patch']
-
- # Assigned 'None' for now, but it is expected that the patch metadata file
- # will be modified, so the 'expected_patch_info_dict's' value for the
- # key 'modified_metadata' will get updated to the temporary .json file once
- # the file is created.
- expected_modified_metadata_file = None
-
- expected_patch_info_dict = {
- 'applied_patches': expected_applied_patches,
- 'failed_patches': expected_failed_patches,
- 'non_applicable_patches': expected_non_applicable_patches,
- 'disabled_patches': expected_disabled_patches,
- 'removed_patches': [],
- 'modified_metadata': expected_modified_metadata_file
- }
-
- with CreateTemporaryJsonFile() as json_test_file:
- # Write the test patch metadata to the temporary .json file.
- with open(json_test_file, 'w') as json_file:
- WritePrettyJsonFile(test_patch_metadata, json_file)
-
- expected_patch_info_dict['modified_metadata'] = json_test_file
-
- src_path = '/some/path/to/src/'
-
- revision = 1200
-
- patch_info = patch_manager.HandlePatches(revision, json_test_file,
- abs_path_to_filesdir, src_path,
- FailureModes.DISABLE_PATCHES)
-
- self.assertDictEqual(patch_info._asdict(), expected_patch_info_dict)
-
- # 'test_patch_1' and 'test_patch_3' were not modified/disabled, so their
- # dictionary is the same, but 'test_patch_2' and 'test_patch_4' were
- # disabled, so their 'end_version' would be set to 1200, which was the
- # value passed into 'HandlePatches()' for the 'svn_version'.
- test_patch_2['end_version'] = 1200
- test_patch_4['end_version'] = 1200
-
- expected_json_file = [
- test_patch_1, test_patch_2, test_patch_3, test_patch_4
- ]
-
- # Make sure the updated patch metadata was written into the temporary
- # .json file.
- with open(json_test_file) as patch_file:
- new_json_file_contents = json.load(patch_file)
-
- self.assertListEqual(new_json_file_contents, expected_json_file)
-
- self.assertEqual(mock_get_path_to_patch.call_count, 4)
-
- self.assertEqual(mock_apply_patch.call_count, 3)
-
- @mock.patch.object(patch_manager, 'GetPathToPatch')
- @mock.patch.object(patch_manager, 'ApplyPatch')
- def testSomePatchesAreRemoved(self, mock_apply_patch, mock_get_path_to_patch):
- # For the 'remove_patches' mode, this patch is expected to be in the
- # 'non_applicable_patches' list and 'removed_patches' list because
- # the 'svn_version' (1500) >= 'end_version' (1190).
- test_patch_1 = {
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
- }
-
- # For the 'remove_patches' mode, this patch is expected to be in the
- # 'applicable_patches' list (which is the list that the .json file will be
- # updated with) because the 'svn_version' < 'inf' (this patch does not have
- # an 'end_version' value which implies 'end_version' == 'inf').
- test_patch_2 = {
- 'comment': 'Fixes input',
- 'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
- }
-
- # For the 'remove_patches' mode, this patch is expected to be in the
- # 'non_applicable_patches' list and 'removed_patches' list because
- # the 'svn_version' (1500) >= 'end_version' (1500).
- test_patch_3 = {
- 'comment': 'Adds a warning',
- 'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
- }
-
- # For the 'remove_patches' mode, this patch is expected to be in the
- # 'non_applicable_patches' list and 'removed_patches' list because
- # the 'svn_version' (1500) >= 'end_version' (1400).
- test_patch_4 = {
- 'comment': 'Adds a helper function',
- 'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 1400
- }
-
- test_patch_metadata = [
- test_patch_1, test_patch_2, test_patch_3, test_patch_4
- ]
-
- abs_path_to_filesdir = '/abs/path/to/filesdir'
-
- # Simulate behavior for 'GetPathToPatch()' when successfully constructed the
- # absolute path to the patch and the patch exists.
- @CallCountsToMockFunctions
- def MultipleCallsToGetPatchPath(call_count, filesdir_path, rel_patch_path):
- self.assertEqual(filesdir_path, abs_path_to_filesdir)
-
- if call_count < 4:
- self.assertEqual(rel_patch_path,
- test_patch_metadata[call_count]['rel_patch_path'])
-
- return os.path.join(abs_path_to_filesdir,
- test_patch_metadata[call_count]['rel_patch_path'])
-
- assert False, 'Unexpectedly called more than 4 times.'
-
- # Use the test function to simulate behavior of 'GetPathToPatch()'.
- mock_get_path_to_patch.side_effect = MultipleCallsToGetPatchPath
-
- expected_applied_patches = []
- expected_failed_patches = []
- expected_disabled_patches = []
- expected_non_applicable_patches = [
- 'fixes_output.patch', 'add_warning.patch', 'add_helper.patch'
- ]
- expected_removed_patches = [
- '/abs/path/to/filesdir/cherry/fixes_output.patch',
- '/abs/path/to/filesdir/add_warning.patch',
- '/abs/path/to/filesdir/add_helper.patch'
- ]
-
- # Assigned 'None' for now, but it is expected that the patch metadata file
- # will be modified, so the 'expected_patch_info_dict's' value for the
- # key 'modified_metadata' will get updated to the temporary .json file once
- # the file is created.
- expected_modified_metadata_file = None
-
- expected_patch_info_dict = {
- 'applied_patches': expected_applied_patches,
- 'failed_patches': expected_failed_patches,
- 'non_applicable_patches': expected_non_applicable_patches,
- 'disabled_patches': expected_disabled_patches,
- 'removed_patches': expected_removed_patches,
- 'modified_metadata': expected_modified_metadata_file
- }
-
- with CreateTemporaryJsonFile() as json_test_file:
- # Write the test patch metadata to the temporary .json file.
- with open(json_test_file, 'w') as json_file:
- WritePrettyJsonFile(test_patch_metadata, json_file)
-
- expected_patch_info_dict['modified_metadata'] = json_test_file
-
- abs_path_to_filesdir = '/abs/path/to/filesdir'
-
- src_path = '/some/path/to/src/'
-
- revision = 1500
-
- patch_info = patch_manager.HandlePatches(revision, json_test_file,
- abs_path_to_filesdir, src_path,
- FailureModes.REMOVE_PATCHES)
-
- self.assertDictEqual(patch_info._asdict(), expected_patch_info_dict)
-
- # 'test_patch_2' was an applicable patch, so this patch will be the only
- # patch that is in temporary .json file. The other patches were not
- # applicable (they failed the applicable check), so they will not be in
- # the .json file.
- expected_json_file = [test_patch_2]
-
- # Make sure the updated patch metadata was written into the temporary
- # .json file.
- with open(json_test_file) as patch_file:
- new_json_file_contents = json.load(patch_file)
-
- self.assertListEqual(new_json_file_contents, expected_json_file)
-
- self.assertEqual(mock_get_path_to_patch.call_count, 4)
-
- mock_apply_patch.assert_not_called()
-
- @mock.patch.object(patch_manager, 'GetPathToPatch')
- @mock.patch.object(patch_manager, 'ApplyPatch')
- def testSuccessfullyDidNotRemoveAFuturePatch(self, mock_apply_patch,
- mock_get_path_to_patch):
-
- # For the 'remove_patches' mode, this patch is expected to be in the
- # 'non_applicable_patches' list and 'removed_patches' list because
- # the 'svn_version' (1200) >= 'end_version' (1190).
- test_patch_1 = {
- 'comment': 'Redirects output to stdout',
- 'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
- }
-
- # For the 'remove_patches' mode, this patch is expected to be in the
- # 'applicable_patches' list (which is the list that the .json file will be
- # updated with) because the 'svn_version' < 'inf' (this patch does not have
- # an 'end_version' value which implies 'end_version' == 'inf').
- test_patch_2 = {
- 'comment': 'Fixes input',
- 'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
- }
-
- # For the 'remove_patches' mode, this patch is expected to be in the
- # 'applicable_patches' list because 'svn_version' >= 'start_version' and
- # 'svn_version' < 'end_version'.
- test_patch_3 = {
- 'comment': 'Adds a warning',
- 'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
- }
-
- # For the 'remove_patches' mode, this patch is expected to be in the
- # 'applicable_patches' list because the patch is from the future (e.g.
- # 'start_version' > 'svn_version' (1200), so it should NOT be removed.
- test_patch_4 = {
- 'comment': 'Adds a helper function',
- 'rel_patch_path': 'add_helper.patch',
- 'start_version': 1600,
- 'end_version': 2000
- }
-
- test_patch_metadata = [
- test_patch_1, test_patch_2, test_patch_3, test_patch_4
- ]
-
- abs_path_to_filesdir = '/abs/path/to/filesdir'
-
- # Simulate behavior for 'GetPathToPatch()' when successfully constructed the
- # absolute path to the patch and the patch exists.
- @CallCountsToMockFunctions
- def MultipleCallsToGetPatchPath(call_count, filesdir_path, rel_patch_path):
- self.assertEqual(filesdir_path, abs_path_to_filesdir)
-
- if call_count < 4:
- self.assertEqual(rel_patch_path,
- test_patch_metadata[call_count]['rel_patch_path'])
-
- return os.path.join(abs_path_to_filesdir,
- test_patch_metadata[call_count]['rel_patch_path'])
-
- # 'GetPathToPatch()' was called more times than expected (4 times).
- assert False, 'Unexpectedly called more than 4 times.'
-
- # Use the test function to simulate behavior of 'GetPathToPatch()'.
- mock_get_path_to_patch.side_effect = MultipleCallsToGetPatchPath
-
- expected_applied_patches = []
- expected_failed_patches = []
- expected_disabled_patches = []
-
- # 'add_helper.patch' is still a 'non applicable' patch meaning it does not
- # apply in revision 1200 but it will NOT be removed because it is a future
- # patch.
- expected_non_applicable_patches = ['fixes_output.patch', 'add_helper.patch']
- expected_removed_patches = [
- '/abs/path/to/filesdir/cherry/fixes_output.patch'
- ]
-
- # Assigned 'None' for now, but it is expected that the patch metadata file
- # will be modified, so the 'expected_patch_info_dict's' value for the
- # key 'modified_metadata' will get updated to the temporary .json file once
- # the file is created.
- expected_modified_metadata_file = None
-
- expected_patch_info_dict = {
- 'applied_patches': expected_applied_patches,
- 'failed_patches': expected_failed_patches,
- 'non_applicable_patches': expected_non_applicable_patches,
- 'disabled_patches': expected_disabled_patches,
- 'removed_patches': expected_removed_patches,
- 'modified_metadata': expected_modified_metadata_file
- }
-
- with CreateTemporaryJsonFile() as json_test_file:
- # Write the test patch metadata to the temporary .json file.
- with open(json_test_file, 'w') as json_file:
- WritePrettyJsonFile(test_patch_metadata, json_file)
-
- expected_patch_info_dict['modified_metadata'] = json_test_file
-
- src_path = '/some/path/to/src/'
-
- revision = 1200
-
- patch_info = patch_manager.HandlePatches(revision, json_test_file,
- abs_path_to_filesdir, src_path,
- FailureModes.REMOVE_PATCHES)
-
- self.assertDictEqual(patch_info._asdict(), expected_patch_info_dict)
-
- # 'test_patch_2' was an applicable patch, so this patch will be the only
- # patch that is in temporary .json file. The other patches were not
- # applicable (they failed the applicable check), so they will not be in
- # the .json file.
- expected_json_file = [test_patch_2, test_patch_3, test_patch_4]
-
- # Make sure the updated patch metadata was written into the temporary
- # .json file.
- with open(json_test_file) as patch_file:
- new_json_file_contents = json.load(patch_file)
-
- self.assertListEqual(new_json_file_contents, expected_json_file)
-
- self.assertEqual(mock_get_path_to_patch.call_count, 4)
-
- mock_apply_patch.assert_not_called()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/llvm_tools/subprocess_helpers.py b/llvm_tools/subprocess_helpers.py
deleted file mode 100644
index 8845112c..00000000
--- a/llvm_tools/subprocess_helpers.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helpers/wrappers for the subprocess module for migration to python3."""
-
-from __future__ import print_function
-
-import subprocess
-
-
-def CheckCommand(cmd):
- """Executes the command using Popen()."""
-
- cmd_obj = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='UTF-8')
-
- stdout, _ = cmd_obj.communicate()
-
- if cmd_obj.returncode:
- print(stdout)
- raise subprocess.CalledProcessError(cmd_obj.returncode, cmd)
-
-
-def check_output(cmd, cwd=None):
- """Wrapper for pre-python3 subprocess.check_output()."""
-
- return subprocess.check_output(cmd, encoding='UTF-8', cwd=cwd)
-
-
-def check_call(cmd, cwd=None):
- """Wrapper for pre-python3 subprocess.check_call()."""
-
- subprocess.check_call(cmd, encoding='UTF-8', cwd=cwd)
-
-
-# FIXME: CTRL+C does not work when executing a command inside the chroot via
-# `cros_sdk`.
-def ChrootRunCommand(chroot_path, cmd, verbose=False):
- """Runs the command inside the chroot."""
-
- exec_chroot_cmd = ['cros_sdk', '--']
- exec_chroot_cmd.extend(cmd)
-
- return ExecCommandAndCaptureOutput(
- exec_chroot_cmd, cwd=chroot_path, verbose=verbose)
-
-
-def ExecCommandAndCaptureOutput(cmd, cwd=None, verbose=False):
- """Executes the command and prints to stdout if possible."""
-
- out = check_output(cmd, cwd=cwd).rstrip()
-
- if verbose and out:
- print(out)
-
- return out
diff --git a/llvm_tools/test_helpers.py b/llvm_tools/test_helpers.py
deleted file mode 100644
index 99448181..00000000
--- a/llvm_tools/test_helpers.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper functions for unit testing."""
-
-from __future__ import print_function
-
-from contextlib import contextmanager
-from tempfile import mkstemp
-import json
-import os
-
-
-class ArgsOutputTest(object):
- """Testing class to simulate a argument parser object."""
-
- def __init__(self, svn_option='google3'):
- self.chroot_path = '/abs/path/to/chroot'
- self.last_tested = '/abs/path/to/last_tested_file.json'
- self.llvm_version = svn_option
- self.verbose = False
- self.extra_change_lists = None
- self.options = ['latest-toolchain']
- self.builders = ['some-builder']
-
-
-# FIXME: Migrate modules with similar helper to use this module.
-def CallCountsToMockFunctions(mock_function):
- """A decorator that passes a call count to the function it decorates.
-
- Examples:
- @CallCountsToMockFunctions
- def foo(call_count):
- return call_count
- ...
- ...
- [foo(), foo(), foo()]
- [0, 1, 2]
- """
-
- counter = [0]
-
- def Result(*args, **kwargs):
- # For some values of `counter`, the mock function would simulate raising
- # an exception, so let the test case catch the exception via
- # `unittest.TestCase.assertRaises()` and to also handle recursive functions.
- prev_counter = counter[0]
- counter[0] += 1
-
- ret_value = mock_function(prev_counter, *args, **kwargs)
-
- return ret_value
-
- return Result
-
-
-def WritePrettyJsonFile(file_name, json_object):
- """Writes the contents of the file to the json object.
-
- Args:
- file_name: The file that has contents to be used for the json object.
- json_object: The json object to write to.
- """
-
- json.dump(file_name, json_object, indent=4, separators=(',', ': '))
-
-
-def CreateTemporaryJsonFile():
- """Makes a temporary .json file."""
-
- return CreateTemporaryFile(suffix='.json')
-
-
-@contextmanager
-def CreateTemporaryFile(suffix=''):
- """Makes a temporary file."""
-
- fd, temp_file_path = mkstemp(suffix=suffix)
-
- os.close(fd)
-
- try:
- yield temp_file_path
-
- finally:
- if os.path.isfile(temp_file_path):
- os.remove(temp_file_path)
diff --git a/llvm_tools/update_all_tryjobs_with_auto.py b/llvm_tools/update_all_tryjobs_with_auto.py
deleted file mode 100755
index 511bfffa..00000000
--- a/llvm_tools/update_all_tryjobs_with_auto.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Updates the status of all tryjobs to the result of `cros buildresult`."""
-
-from __future__ import print_function
-
-import argparse
-import json
-import os
-
-from assert_not_in_chroot import VerifyOutsideChroot
-from update_tryjob_status import GetAutoResult
-from update_tryjob_status import TryjobStatus
-
-
-def GetPathToUpdateAllTryjobsWithAutoScript():
- """Returns the absolute path to this script."""
-
- return os.path.abspath(__file__)
-
-
-def GetCommandLineArgs():
- """Parses the command line for the command line arguments."""
-
- # Default absoute path to the chroot if not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(description=__doc__)
-
- # Add argument for the JSON file to use for the update of a tryjob.
- parser.add_argument(
- '--last_tested',
- required=True,
- help='The absolute path to the JSON file that contains the tryjobs used '
- 'for bisecting LLVM.')
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
- args_output = parser.parse_args()
-
- if not os.path.isfile(args_output.last_tested) or \
- not args_output.last_tested.endswith('.json'):
- raise ValueError('File does not exist or does not ending in ".json" '
- ': %s' % args_output.last_tested)
-
- return args_output
-
-
-def main():
- """Updates the status of a tryjob."""
-
- VerifyOutsideChroot()
-
- args_output = GetCommandLineArgs()
-
- with open(args_output.last_tested) as tryjobs:
- bisect_contents = json.load(tryjobs)
-
- for tryjob in bisect_contents['jobs']:
- if tryjob['status'] == TryjobStatus.PENDING.value:
- tryjob['status'] = GetAutoResult(args_output.chroot_path,
- tryjob['buildbucket_id'])
-
- new_file = '%s.new' % args_output.last_tested
- with open(new_file, 'w') as update_tryjobs:
- json.dump(bisect_contents, update_tryjobs, indent=4, separators=(',', ': '))
- os.rename(new_file, args_output.last_tested)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/update_chromeos_llvm_next_hash.py b/llvm_tools/update_chromeos_llvm_next_hash.py
deleted file mode 100755
index 30660de3..00000000
--- a/llvm_tools/update_chromeos_llvm_next_hash.py
+++ /dev/null
@@ -1,715 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Updates LLVM_NEXT_HASH and uprevs the build of a package or packages.
-
-For each package, a temporary repo is created and the changes are uploaded
-for review.
-"""
-
-from __future__ import print_function
-
-import argparse
-import os
-import re
-import subprocess
-from collections import namedtuple
-
-from assert_not_in_chroot import VerifyOutsideChroot
-from failure_modes import FailureModes
-from get_llvm_hash import GetLLVMHashAndVersionFromSVNOption, is_svn_option
-import get_llvm_hash
-import llvm_patch_management
-from subprocess_helpers import ChrootRunCommand, ExecCommandAndCaptureOutput
-
-# If set to `True`, then the contents of `stdout` after executing a command will
-# be displayed to the terminal.
-verbose = False
-
-CommitContents = namedtuple('CommitContents', ['url', 'cl_number'])
-
-
-def GetCommandLineArgs():
- """Parses the command line for the optional command line arguments.
-
- Returns:
- The log level to use when retrieving the LLVM hash or google3 LLVM version,
- the chroot path to use for executing chroot commands,
- a list of a package or packages to update their LLVM next hash,
- and the LLVM version to use when retrieving the LLVM hash.
- """
-
- # Default path to the chroot if a path is not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(
- description="Updates the build's hash for llvm-next.")
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
- # Add argument for specific builds to uprev and update their llvm-next hash.
- parser.add_argument(
- '--update_packages',
- default=['sys-devel/llvm'],
- required=False,
- nargs='+',
- help='the ebuilds to update their hash for llvm-next ' \
- '(default: %(default)s)')
-
- # Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
-
- # Add argument for the LLVM version to use.
- parser.add_argument(
- '--llvm_version',
- type=is_svn_option,
- required=True,
- help='which git hash of LLVM to find. Either a svn revision, or one '
- 'of %s' % sorted(get_llvm_hash.KNOWN_HASH_SOURCES))
-
- # Add argument for the mode of the patch management when handling patches.
- parser.add_argument(
- '--failure_mode',
- default=FailureModes.FAIL.value,
- choices=[FailureModes.FAIL.value, FailureModes.CONTINUE.value,
- FailureModes.DISABLE_PATCHES.value,
- FailureModes.REMOVE_PATCHES.value],
- help='the mode of the patch manager when handling failed patches ' \
- '(default: %(default)s)')
-
- # Add argument for the patch metadata file.
- parser.add_argument(
- '--patch_metadata_file',
- default='PATCHES.json',
- help='the .json file that has all the patches and their '
- 'metadata if applicable (default: PATCHES.json inside $FILESDIR)')
-
- # Parse the command line.
- args_output = parser.parse_args()
-
- # FIXME: We shouldn't be using globals here, but until we fix it, make pylint
- # stop complaining about it.
- # pylint: disable=global-statement
- global verbose
-
- verbose = args_output.verbose
-
- return args_output
-
-
-def GetChrootBuildPaths(chromeos_root, package_list):
- """Gets the chroot path(s) of the package(s).
-
- Args:
- chromeos_root: The absolute path to the chroot to
- use for executing chroot commands.
- package_list: A list of a package/packages to
- be used to find their chroot path.
-
- Returns:
- A list of a chroot path/chroot paths of the package's ebuild file.
-
- Raises:
- ValueError: Failed to get the chroot path of a package.
- """
-
- chroot_paths = []
-
- # Find the chroot path for each package's ebuild.
- for cur_package in sorted(set(package_list)):
- # Cmd to find the chroot path for the package.
- equery_cmd = ['equery', 'w', cur_package]
-
- chroot_path = ChrootRunCommand(chromeos_root, equery_cmd, verbose=verbose)
-
- chroot_paths.append(chroot_path.strip())
-
- return chroot_paths
-
-
-def _ConvertChrootPathsToSymLinkPaths(chromeos_root, chroot_file_paths):
- """Converts the chroot path(s) to absolute symlink path(s).
-
- Args:
- chromeos_root: The absolute path to the chroot.
- chroot_file_paths: A list of a chroot path/chroot paths to convert to
- a absolute symlink path/symlink paths.
-
- Returns:
- A list of absolute path(s) which are symlinks that point to
- the ebuild of the package(s).
-
- Raises:
- ValueError: Invalid prefix for the chroot path or
- invalid chroot path(s) were provided.
- """
-
- symlink_file_paths = []
-
- chroot_prefix = '/mnt/host/source/'
-
- # Iterate through the chroot paths.
- #
- # For each chroot file path, remove '/mnt/host/source/' prefix
- # and combine the chroot path with the result and add it to the list.
- for cur_chroot_file_path in chroot_file_paths:
- if not cur_chroot_file_path.startswith(chroot_prefix):
- raise ValueError('Invalid prefix for the chroot path: %s' %
- cur_chroot_file_path)
-
- rel_path = cur_chroot_file_path[len(chroot_prefix):]
-
- # combine the chromeos root path + '/src/...'
- absolute_symlink_path = os.path.join(chromeos_root, rel_path)
-
- symlink_file_paths.append(absolute_symlink_path)
-
- return symlink_file_paths
-
-
-def GetEbuildPathsFromSymLinkPaths(symlinks):
- """Reads the symlink(s) to get the ebuild path(s) to the package(s).
-
- Args:
- symlinks: A list of absolute path symlink/symlinks that point
- to the package's ebuild.
-
- Returns:
- A dictionary where the key is the absolute path of the symlink and the value
- is the absolute path to the ebuild that was read from the symlink.
-
- Raises:
- ValueError: Invalid symlink(s) were provided.
- """
-
- # A dictionary that holds:
- # key: absolute symlink path
- # value: absolute ebuild path
- resolved_paths = {}
-
- # Iterate through each symlink.
- #
- # For each symlink, check that it is a valid symlink,
- # and then construct the ebuild path, and
- # then add the ebuild path to the dict.
- for cur_symlink in symlinks:
- if not os.path.islink(cur_symlink):
- raise ValueError('Invalid symlink provided: %s' % cur_symlink)
-
- # Construct the absolute path to the ebuild.
- ebuild_path = os.path.realpath(cur_symlink)
-
- if cur_symlink not in resolved_paths:
- resolved_paths[cur_symlink] = ebuild_path
-
- return resolved_paths
-
-
-def UpdateBuildLLVMNextHash(ebuild_path, llvm_hash, llvm_version):
- """Updates the build's LLVM_NEXT_HASH.
-
- The build changes are staged for commit in the temporary repo.
-
- Args:
- ebuild_path: The absolute path to the ebuild.
- llvm_hash: The new LLVM hash to use for LLVM_NEXT_HASH.
- llvm_version: The revision number of 'llvm_hash'.
-
- Raises:
- ValueError: Invalid ebuild path provided or failed to stage the commit
- of the changes or failed to update the LLVM hash.
- """
-
- # Iterate through each ebuild.
- #
- # For each ebuild, read the file in
- # advance and then create a temporary file
- # that gets updated with the new LLVM hash
- # and revision number and then the ebuild file
- # gets updated to the temporary file.
-
- if not os.path.isfile(ebuild_path):
- raise ValueError('Invalid ebuild path provided: %s' % ebuild_path)
-
- # Create regex that finds 'LLVM_NEXT_HASH'.
- llvm_regex = re.compile('^LLVM_NEXT_HASH=\"[a-z0-9]+\"')
-
- temp_ebuild_file = '%s.temp' % ebuild_path
-
- # A flag for whether 'LLVM_NEXT_HASH=...' was updated.
- is_updated = False
-
- with open(ebuild_path) as ebuild_file:
- # write updates to a temporary file in case of interrupts
- with open(temp_ebuild_file, 'w') as temp_file:
- for cur_line in ReplaceLLVMNextHash(ebuild_file, is_updated, llvm_regex,
- llvm_hash, llvm_version):
- temp_file.write(cur_line)
-
- os.rename(temp_ebuild_file, ebuild_path)
-
- # Get the path to the parent directory.
- parent_dir = os.path.dirname(ebuild_path)
-
- # Stage the changes.
- stage_changes_cmd = ['git', '-C', parent_dir, 'add', ebuild_path]
-
- ExecCommandAndCaptureOutput(stage_changes_cmd, verbose=verbose)
-
-
-def ReplaceLLVMNextHash(ebuild_lines, is_updated, llvm_regex, llvm_hash,
- llvm_version):
- """Iterates through the ebuild file and updates the 'LLVM_NEXT_HASH'.
-
- Args:
- ebuild_lines: The contents of the ebuild file.
- is_updated: A flag for whether 'LLVM_NEXT_HASH' was updated.
- llvm_regex: The regex object for finding 'LLVM_NEXT_HASH=...' when
- iterating through the contents of the file.
- llvm_hash: The new LLVM hash to use for LLVM_NEXT_HASH.
- llvm_version: The revision number of 'llvm_hash'.
- """
-
- for cur_line in ebuild_lines:
- if not is_updated and llvm_regex.search(cur_line):
- # Update the LLVM next hash and revision number.
- cur_line = 'LLVM_NEXT_HASH=\"%s\" # r%d\n' % (llvm_hash, llvm_version)
-
- is_updated = True
-
- yield cur_line
-
- if not is_updated: # failed to update 'LLVM_NEXT_HASH'
- raise ValueError('Failed to update the LLVM hash.')
-
-
-def UprevEbuild(symlink):
- """Uprevs the ebuild's revision number.
-
- Increases the revision number by 1 and stages the change in
- the temporary repo.
-
- Args:
- symlink: The absolute path of the symlink that points to
- the ebuild of the package.
-
- Raises:
- ValueError: Failed to uprev the symlink or failed to stage the changes.
- """
-
- if not os.path.islink(symlink):
- raise ValueError('Invalid symlink provided: %s' % symlink)
-
- # Find the revision number and increment it by 1.
- new_symlink, is_changed = re.subn(
- r'r([0-9]+).ebuild',
- lambda match: 'r%s.ebuild' % str(int(match.group(1)) + 1),
- symlink,
- count=1)
-
- if not is_changed: # failed to increment the revision number
- raise ValueError('Failed to uprev the ebuild.')
-
- path_to_symlink_dir = os.path.dirname(symlink)
-
- # Stage the new symlink for commit.
- stage_symlink_cmd = [
- 'git', '-C', path_to_symlink_dir, 'mv', symlink, new_symlink
- ]
-
- ExecCommandAndCaptureOutput(stage_symlink_cmd, verbose=verbose)
-
-
-def _CreateRepo(path_to_repo_dir, llvm_hash):
- """Creates a temporary repo for the changes.
-
- Args:
- path_to_repo_dir: The absolute path to the repo.
- llvm_hash: The LLVM hash to use for the name of the repo.
-
- Raises:
- ValueError: Failed to create a repo in that directory.
- """
-
- if not os.path.isdir(path_to_repo_dir):
- raise ValueError('Invalid directory path provided: %s' % path_to_repo_dir)
-
- reset_changes_cmd = [
- 'git',
- '-C',
- path_to_repo_dir,
- 'reset',
- 'HEAD',
- '--hard',
- ]
-
- ExecCommandAndCaptureOutput(reset_changes_cmd, verbose=verbose)
-
- create_repo_cmd = ['repo', 'start', 'llvm-next-update-%s' % llvm_hash]
-
- ExecCommandAndCaptureOutput(
- create_repo_cmd, cwd=path_to_repo_dir, verbose=verbose)
-
-
-def _DeleteRepo(path_to_repo_dir, llvm_hash):
- """Deletes the temporary repo.
-
- Args:
- path_to_repo_dir: The absolute path of the repo.
- llvm_hash: The LLVM hash used for the name of the repo.
-
- Raises:
- ValueError: Failed to delete the repo in that directory.
- """
-
- if not os.path.isdir(path_to_repo_dir):
- raise ValueError('Invalid directory path provided: %s' % path_to_repo_dir)
-
- checkout_to_master_cmd = [
- 'git', '-C', path_to_repo_dir, 'checkout', 'cros/master'
- ]
-
- ExecCommandAndCaptureOutput(checkout_to_master_cmd, verbose=verbose)
-
- reset_head_cmd = ['git', '-C', path_to_repo_dir, 'reset', 'HEAD', '--hard']
-
- ExecCommandAndCaptureOutput(reset_head_cmd, verbose=verbose)
-
- delete_repo_cmd = [
- 'git', '-C', path_to_repo_dir, 'branch', '-D',
- 'llvm-next-update-%s' % llvm_hash
- ]
-
- ExecCommandAndCaptureOutput(delete_repo_cmd, verbose=verbose)
-
-
-def GetGerritRepoUploadContents(repo_upload_contents):
- """Parses 'repo upload' to get the Gerrit commit URL and CL number.
-
- Args:
- repo_upload_contents: The contents of the 'repo upload' command.
-
- Returns:
- A nametuple that has two (key, value) pairs, where the first pair is the
- Gerrit commit URL and the second pair is the change list number.
-
- Raises:
- ValueError: The contents of the 'repo upload' command did not contain a
- Gerrit commit URL.
- """
-
- found_url = re.search(
- r'https://chromium-review.googlesource.com/c/'
- r'chromiumos/overlays/chromiumos-overlay/\+/([0-9]+)',
- repo_upload_contents)
-
- if not found_url:
- raise ValueError('Failed to find change list URL.')
-
- cl_number = int(found_url.group(1))
-
- return CommitContents(url=found_url.group(0), cl_number=cl_number)
-
-
-def UploadChanges(path_to_repo_dir, llvm_hash, commit_messages):
- """Uploads the changes (updating LLVM next hash and uprev symlink) for review.
-
- Args:
- path_to_repo_dir: The absolute path to the repo where changes were made.
- llvm_hash: The LLVM hash used for the name of the repo.
- commit_messages: A string of commit message(s) (i.e. '-m [message]'
- of the changes made.
-
- Returns:
- A nametuple that has two (key, value) pairs, where the first pair is the
- Gerrit commit URL and the second pair is the change list number.
-
- Raises:
- ValueError: Failed to create a commit or failed to upload the
- changes for review.
- """
-
- if not os.path.isdir(path_to_repo_dir):
- raise ValueError('Invalid directory path provided: %s' % path_to_repo_dir)
-
- commit_cmd = [
- 'git',
- 'commit',
- ]
- commit_cmd.extend(commit_messages)
-
- ExecCommandAndCaptureOutput(commit_cmd, cwd=path_to_repo_dir, verbose=verbose)
-
- # Upload the changes for review.
- upload_change_cmd = (
- 'yes | repo upload --br=llvm-next-update-%s --no-verify' % llvm_hash)
-
- # Pylint currently doesn't lint things in py3 mode, and py2 didn't allow
- # users to specify `encoding`s for Popen. Hence, pylint is "wrong" here.
- # pylint: disable=unexpected-keyword-arg
-
- # NOTE: Need `shell=True` in order to pipe `yes` into `repo upload ...`.
- #
- # The CL URL is sent to 'stderr', so need to redirect 'stderr' to 'stdout'.
- upload_changes_obj = subprocess.Popen(
- upload_change_cmd,
- cwd=path_to_repo_dir,
- shell=True,
- encoding='UTF-8',
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
-
- out, _ = upload_changes_obj.communicate()
-
- if upload_changes_obj.returncode: # Failed to upload changes.
- print(out)
- raise ValueError('Failed to upload changes for review')
-
- return GetGerritRepoUploadContents(out.rstrip())
-
-
-def CreatePathDictionaryFromPackages(chroot_path, update_packages):
- """Creates a symlink and ebuild path pair dictionary from the packages.
-
- Args:
- chroot_path: The absolute path to the chroot.
- update_packages: The filtered packages to be updated.
-
- Returns:
- A dictionary where the key is the absolute path to the symlink
- of the package and the value is the absolute path to the ebuild of
- the package.
- """
-
- # Construct a list containing the chroot file paths of the package(s).
- chroot_file_paths = GetChrootBuildPaths(chroot_path, update_packages)
-
- # Construct a list containing the symlink(s) of the package(s).
- symlink_file_paths = _ConvertChrootPathsToSymLinkPaths(
- chroot_path, chroot_file_paths)
-
- # Create a dictionary where the key is the absolute path of the symlink to
- # the package and the value is the absolute path to the ebuild of the package.
- return GetEbuildPathsFromSymLinkPaths(symlink_file_paths)
-
-
-def RemovePatchesFromFilesDir(patches_to_remove):
- """Removes the patches from $FILESDIR of a package.
-
- Args:
- patches_to_remove: A list where each entry is the absolute path to a patch.
-
- Raises:
- ValueError: Failed to remove a patch in $FILESDIR.
- """
-
- for cur_patch in patches_to_remove:
- remove_patch_cmd = [
- 'git', '-C',
- os.path.dirname(cur_patch), 'rm', '-f', cur_patch
- ]
-
- ExecCommandAndCaptureOutput(remove_patch_cmd, verbose=verbose)
-
-
-def StagePatchMetadataFileForCommit(patch_metadata_file_path):
- """Stages the updated patch metadata file for commit.
-
- Args:
- patch_metadata_file_path: The absolute path to the patch metadata file.
-
- Raises:
- ValueError: Failed to stage the patch metadata file for commit or invalid
- patch metadata file.
- """
-
- if not os.path.isfile(patch_metadata_file_path):
- raise ValueError('Invalid patch metadata file provided: %s' %
- patch_metadata_file_path)
-
- # Cmd to stage the patch metadata file for commit.
- stage_patch_file = [
- 'git', '-C',
- os.path.dirname(patch_metadata_file_path), 'add', patch_metadata_file_path
- ]
-
- ExecCommandAndCaptureOutput(stage_patch_file, verbose=verbose)
-
-
-def StagePackagesPatchResultsForCommit(package_info_dict, commit_messages):
- """Stages the patch results of the packages to the commit message.
-
- Args:
- package_info_dict: A dictionary where the key is the package name and the
- value is a dictionary that contains information about the patches of the
- package (key).
- commit_messages: The commit message that has the updated ebuilds and
- upreving information.
- """
-
- # For each package, check if any patches for that package have
- # changed, if so, add which patches have changed to the commit
- # message.
- for package_name, patch_info_dict in package_info_dict.items():
- if patch_info_dict['disabled_patches'] or \
- patch_info_dict['removed_patches'] or \
- patch_info_dict['modified_metadata']:
- cur_package_header = 'For the package %s:' % package_name
- commit_messages.append('-m %s' % cur_package_header)
-
- # Add to the commit message that the patch metadata file was modified.
- if patch_info_dict['modified_metadata']:
- patch_metadata_path = patch_info_dict['modified_metadata']
- commit_messages.append('-m %s' % 'The patch metadata file %s was '
- 'modified' % os.path.basename(patch_metadata_path))
-
- StagePatchMetadataFileForCommit(patch_metadata_path)
-
- # Add each disabled patch to the commit message.
- if patch_info_dict['disabled_patches']:
- commit_messages.append('-m %s' % 'The following patches were disabled:')
-
- for patch_path in patch_info_dict['disabled_patches']:
- commit_messages.append('-m %s' % os.path.basename(patch_path))
-
- # Add each removed patch to the commit message.
- if patch_info_dict['removed_patches']:
- commit_messages.append('-m %s' % 'The following patches were removed:')
-
- for patch_path in patch_info_dict['removed_patches']:
- commit_messages.append('-m %s' % os.path.basename(patch_path))
-
- RemovePatchesFromFilesDir(patch_info_dict['removed_patches'])
-
- return commit_messages
-
-
-def UpdatePackages(packages, llvm_hash, llvm_version, chroot_path,
- patch_metadata_file, mode, svn_option):
- """Updates the package's LLVM_NEXT_HASH and uprevs the ebuild.
-
- A temporary repo is created for the changes. The changes are
- then uploaded for review.
-
- Args:
- packages: A list of all the packages that are going to be updated.
- llvm_hash: The LLVM hash to use for 'LLVM_NEXT_HASH'.
- llvm_version: The LLVM version of the 'llvm_hash'.
- chroot_path: The absolute path to the chroot.
- patch_metadata_file: The name of the .json file in '$FILESDIR/' that has
- the patches and its metadata.
- mode: The mode of the patch manager when handling an applicable patch
- that failed to apply.
- Ex: 'FailureModes.FAIL'
- svn_option: The git hash to use based off of the svn option.
- Ex: 'google3', 'tot', or <svn_version> such as 365123
-
- Returns:
- A nametuple that has two (key, value) pairs, where the first pair is the
- Gerrit commit URL and the second pair is the change list number.
- """
-
- # Determines whether to print the result of each executed command.
- llvm_patch_management.verbose = verbose
-
- # Construct a dictionary where the key is the absolute path of the symlink to
- # the package and the value is the absolute path to the ebuild of the package.
- paths_dict = CreatePathDictionaryFromPackages(chroot_path, packages)
-
- repo_path = os.path.dirname(next(iter(paths_dict.values())))
-
- _CreateRepo(repo_path, llvm_hash)
-
- try:
- if svn_option in get_llvm_hash.KNOWN_HASH_SOURCES:
- commit_message_header = ('llvm-next/%s: Update packages to r%d' %
- (svn_option, llvm_version))
- else:
- commit_message_header = 'llvm-next: Update packages to r%d' % llvm_version
-
- commit_messages = ['-m %s' % commit_message_header]
-
- commit_messages.append('-m %s' % 'Following packages have been updated:')
-
- # Holds the list of packages that are updating.
- packages = []
-
- # Iterate through the dictionary.
- #
- # For each iteration:
- # 1) Update the ebuild's LLVM_NEXT_HASH.
- # 2) Uprev the ebuild (symlink).
- # 3) Add the modified package to the commit message.
- for symlink_path, ebuild_path in paths_dict.items():
- path_to_ebuild_dir = os.path.dirname(ebuild_path)
-
- UpdateBuildLLVMNextHash(ebuild_path, llvm_hash, llvm_version)
-
- UprevEbuild(symlink_path)
-
- cur_dir_name = os.path.basename(path_to_ebuild_dir)
- parent_dir_name = os.path.basename(os.path.dirname(path_to_ebuild_dir))
-
- packages.append('%s/%s' % (parent_dir_name, cur_dir_name))
-
- new_commit_message = '%s/%s' % (parent_dir_name, cur_dir_name)
-
- commit_messages.append('-m %s' % new_commit_message)
-
- # Handle the patches for each package.
- package_info_dict = llvm_patch_management.UpdatePackagesPatchMetadataFile(
- chroot_path, llvm_version, patch_metadata_file, packages, mode)
-
- # Update the commit message if changes were made to a package's patches.
- commit_messages = StagePackagesPatchResultsForCommit(
- package_info_dict, commit_messages)
-
- change_list = UploadChanges(repo_path, llvm_hash, commit_messages)
-
- finally:
- _DeleteRepo(repo_path, llvm_hash)
-
- return change_list
-
-
-def main():
- """Updates the LLVM next hash for each package.
-
- Raises:
- AssertionError: The script was run inside the chroot.
- """
-
- VerifyOutsideChroot()
-
- args_output = GetCommandLineArgs()
-
- svn_option = args_output.llvm_version
-
- llvm_hash, llvm_version = GetLLVMHashAndVersionFromSVNOption(svn_option)
-
- change_list = UpdatePackages(args_output.update_packages, llvm_hash,
- llvm_version, args_output.chroot_path,
- args_output.patch_metadata_file,
- FailureModes(args_output.failure_mode),
- svn_option)
-
- print('Successfully updated packages to %d' % llvm_version)
- print('Gerrit URL: %s' % change_list.url)
- print('Change list number: %d' % change_list.cl_number)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/update_chromeos_llvm_next_hash_unittest.py b/llvm_tools/update_chromeos_llvm_next_hash_unittest.py
deleted file mode 100755
index 756ee9c9..00000000
--- a/llvm_tools/update_chromeos_llvm_next_hash_unittest.py
+++ /dev/null
@@ -1,941 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for updating the LLVM next hash."""
-
-from __future__ import print_function
-
-from collections import namedtuple
-import os
-import subprocess
-import unittest
-import unittest.mock as mock
-
-from failure_modes import FailureModes
-from test_helpers import CreateTemporaryJsonFile
-import llvm_patch_management
-import update_chromeos_llvm_next_hash
-
-# These are unittests; protected access is OK to a point.
-# pylint: disable=protected-access
-
-
-class UpdateLLVMNextHashTest(unittest.TestCase):
- """Test class for updating 'LLVM_NEXT_HASH' of packages."""
-
- @mock.patch.object(update_chromeos_llvm_next_hash, 'ChrootRunCommand')
- def testSucceedsToGetChrootPathForPackage(self, mock_chroot_command):
- package_chroot_path = '/chroot/path/to/package.ebuild'
-
- # Emulate ChrootRunCommandWOutput behavior when a chroot path is found for
- # a valid package.
- mock_chroot_command.return_value = package_chroot_path
-
- chroot_path = '/test/chroot/path'
- package_list = ['new-test/package']
-
- self.assertEqual(
- update_chromeos_llvm_next_hash.GetChrootBuildPaths(
- chroot_path, package_list), [package_chroot_path])
-
- mock_chroot_command.assert_called_once()
-
- def testFailedToConvertChrootPathWithInvalidPrefixToSymlinkPath(self):
- chroot_path = '/path/to/chroot'
- chroot_file_path = '/src/package.ebuild'
-
- # Verify the exception is raised when a symlink does not have the prefix
- # '/mnt/host/source/'.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash._ConvertChrootPathsToSymLinkPaths(
- chroot_path, [chroot_file_path])
-
- self.assertEqual(
- str(err.exception), 'Invalid prefix for the chroot path: '
- '%s' % chroot_file_path)
-
- def testSucceedsToConvertChrootPathToSymlinkPath(self):
- chroot_path = '/path/to/chroot'
- chroot_file_paths = ['/mnt/host/source/src/package.ebuild']
-
- expected_symlink_path = '/path/to/chroot/src/package.ebuild'
-
- self.assertEqual(
- update_chromeos_llvm_next_hash._ConvertChrootPathsToSymLinkPaths(
- chroot_path, chroot_file_paths), [expected_symlink_path])
-
- # Simulate 'os.path.islink' when a path is not a symbolic link.
- @mock.patch.object(os.path, 'islink', return_value=False)
- def testFailedToGetEbuildPathFromInvalidSymlink(self, mock_islink):
- symlink_path = '/symlink/path/src/to/package-r1.ebuild'
-
- # Verify the exception is raised when the argument is not a symbolic link.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.GetEbuildPathsFromSymLinkPaths(
- [symlink_path])
-
- self.assertEqual(
- str(err.exception), 'Invalid symlink provided: %s' % symlink_path)
-
- mock_islink.assert_called_once_with(symlink_path)
-
- # Simulate 'os.path.islink' when a path is a symbolic link.
- @mock.patch.object(os.path, 'islink', return_value=True)
- @mock.patch.object(os.path, 'realpath')
- def testSucceedsToGetEbuildPathFromValidSymlink(self, mock_realpath,
- mock_islink):
-
- symlink_path = '/path/to/chroot/src/package-r1.ebuild'
-
- abs_path_to_package = '/abs/path/to/src/package.ebuild'
-
- # Simulate 'os.path.realpath' when a valid path is passed in.
- mock_realpath.return_value = abs_path_to_package
-
- expected_resolved_paths = {symlink_path: abs_path_to_package}
-
- self.assertEqual(
- update_chromeos_llvm_next_hash.GetEbuildPathsFromSymLinkPaths(
- [symlink_path]), expected_resolved_paths)
-
- mock_realpath.assert_called_once_with(symlink_path)
-
- mock_islink.assert_called_once_with(symlink_path)
-
- # Simulate behavior of 'os.path.isfile()' when the ebuild path to a package
- # does not exist.
- @mock.patch.object(os.path, 'isfile', return_value=False)
- def testFailedToUpdateLLVMNextHashForInvalidEbuildPath(self, mock_isfile):
- ebuild_path = '/some/path/to/package.ebuild'
-
- llvm_hash = 'a123testhash1'
- llvm_revision = 1000
-
- # Verify the exception is raised when the ebuild path does not exist.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.UpdateBuildLLVMNextHash(
- ebuild_path, llvm_hash, llvm_revision)
-
- self.assertEqual(
- str(err.exception), 'Invalid ebuild path provided: %s' % ebuild_path)
-
- mock_isfile.assert_called_once()
-
- # Simulate 'os.path.isfile' behavior on a valid ebuild path.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- def testFailedToUpdateLLVMNextHash(self, mock_isfile):
- # Create a temporary file to simulate an ebuild file of a package.
- with CreateTemporaryJsonFile() as ebuild_file:
- with open(ebuild_file, 'w') as f:
- f.write('\n'.join([
- 'First line in the ebuild', 'Second line in the ebuild',
- 'Last line in the ebuild'
- ]))
-
- llvm_hash = 'a123testhash1'
- llvm_revision = 1000
-
- # Verify the exception is raised when the ebuild file does not have
- # 'LLVM_NEXT_HASH'.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.UpdateBuildLLVMNextHash(
- ebuild_file, llvm_hash, llvm_revision)
-
- self.assertEqual(str(err.exception), 'Failed to update the LLVM hash.')
-
- self.assertEqual(mock_isfile.call_count, 2)
-
- # Simulate 'os.path.isfile' behavior on a valid ebuild path.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- # Simulate 'ExecCommandAndCaptureOutput()' when successfully staged the
- # ebuild file for commit.
- @mock.patch.object(
- update_chromeos_llvm_next_hash,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- def testSuccessfullyStageTheEbuildForCommitForLLVMNextHashUpdate(
- self, mock_stage_commit_command, mock_isfile):
-
- # Create a temporary file to simulate an ebuild file of a package.
- with CreateTemporaryJsonFile() as ebuild_file:
- with open(ebuild_file, 'w') as f:
- f.write('\n'.join([
- 'First line in the ebuild', 'Second line in the ebuild',
- 'LLVM_NEXT_HASH=\"a12b34c56d78e90\" # r500',
- 'Last line in the ebuild'
- ]))
-
- # Updates the ebuild's git hash to 'llvm_hash' and revision to
- # 'llvm_revision'.
- llvm_hash = 'a123testhash1'
- llvm_revision = 1000
-
- update_chromeos_llvm_next_hash.UpdateBuildLLVMNextHash(
- ebuild_file, llvm_hash, llvm_revision)
-
- expected_file_contents = [
- 'First line in the ebuild\n', 'Second line in the ebuild\n',
- 'LLVM_NEXT_HASH=\"a123testhash1\" # r1000\n',
- 'Last line in the ebuild'
- ]
-
- # Verify the new file contents of the ebuild file match the expected file
- # contents.
- with open(ebuild_file) as new_file:
- file_contents_as_a_list = [cur_line for cur_line in new_file]
- self.assertListEqual(file_contents_as_a_list, expected_file_contents)
-
- self.assertEqual(mock_isfile.call_count, 2)
-
- mock_stage_commit_command.assert_called_once()
-
- # Simulate behavior of 'os.path.islink()' when the argument passed in is not a
- # symbolic link.
- @mock.patch.object(os.path, 'islink', return_value=False)
- def testFailedToUprevEbuildForInvalidSymlink(self, mock_islink):
- symlink_to_uprev = '/symlink/to/package.ebuild'
-
- # Verify the exception is raised when a symbolic link is not passed in.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.UprevEbuild(symlink_to_uprev)
-
- self.assertEqual(
- str(err.exception), 'Invalid symlink provided: %s' % symlink_to_uprev)
-
- mock_islink.assert_called_once()
-
- # Simulate 'os.path.islink' when a symbolic link is passed in.
- @mock.patch.object(os.path, 'islink', return_value=True)
- def testFailedToUprevEbuild(self, mock_islink):
- symlink_to_uprev = '/symlink/to/package.ebuild'
-
- # Verify the exception is raised when the symlink does not have a revision
- # number.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.UprevEbuild(symlink_to_uprev)
-
- self.assertEqual(str(err.exception), 'Failed to uprev the ebuild.')
-
- mock_islink.assert_called_once_with(symlink_to_uprev)
-
- # Simulate 'os.path.islink' when a valid symbolic link is passed in.
- @mock.patch.object(os.path, 'islink', return_value=True)
- # Simulate 'os.path.dirname' when returning a path to the directory of a
- # valid symbolic link.
- @mock.patch.object(os.path, 'dirname', return_value='/symlink/to')
- # Simulate 'RunCommandWOutput' when successfully added the upreved symlink
- # for commit.
- @mock.patch.object(
- update_chromeos_llvm_next_hash,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- def testSuccessfullyUprevEbuild(self, mock_command_output, mock_dirname,
- mock_islink):
-
- symlink_to_uprev = '/symlink/to/package-r1.ebuild'
-
- update_chromeos_llvm_next_hash.UprevEbuild(symlink_to_uprev)
-
- mock_islink.assert_called_once_with(symlink_to_uprev)
-
- mock_dirname.assert_called_once_with(symlink_to_uprev)
-
- mock_command_output.assert_called_once()
-
- # Simulate behavior of 'os.path.isdir()' when the path to the repo is not a
- # directory.
- @mock.patch.object(os.path, 'isdir', return_value=False)
- def testFailedToCreateRepoForInvalidDirectoryPath(self, mock_isdir):
- path_to_repo = '/path/to/repo'
-
- # The name to use for the repo name.
- llvm_hash = 'a123testhash1'
-
- # Verify the exception is raised when provided an invalid directory path.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash._CreateRepo(path_to_repo, llvm_hash)
-
- self.assertEqual(
- str(err.exception),
- 'Invalid directory path provided: %s' % path_to_repo)
-
- mock_isdir.assert_called_once()
-
- # Simulate 'os.path.isdir' when a valid repo path is provided.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- # Simulate behavior of 'ExecCommandAndCaptureOutput()' when successfully reset
- # changes and created a repo.
- @mock.patch.object(
- update_chromeos_llvm_next_hash,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- def testSuccessfullyCreatedRepo(self, mock_command_output, mock_isdir):
- path_to_repo = '/path/to/repo'
-
- # The name to use for the repo name.
- llvm_hash = 'a123testhash1'
-
- update_chromeos_llvm_next_hash._CreateRepo(path_to_repo, llvm_hash)
-
- mock_isdir.assert_called_once_with(path_to_repo)
-
- self.assertEqual(mock_command_output.call_count, 2)
-
- # Simulate behavior of 'os.path.isdir()' when the path to the repo is not a
- # directory.
- @mock.patch.object(os.path, 'isdir', return_value=False)
- def testFailedToDeleteRepoForInvalidDirectoryPath(self, mock_isdir):
- path_to_repo = '/some/path/to/repo'
-
- # The name to use for the repo name.
- llvm_hash = 'a123testhash2'
-
- # Verify the exception is raised on an invalid repo path.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash._DeleteRepo(path_to_repo, llvm_hash)
-
- self.assertEqual(
- str(err.exception),
- 'Invalid directory path provided: %s' % path_to_repo)
-
- mock_isdir.assert_called_once()
-
- # Simulate 'os.path.isdir' on valid directory path.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- # Simulate 'ExecCommandAndCaptureOutput()' when successfully checkout to
- # cros/master, reset changes, and deleted the repo.
- @mock.patch.object(
- update_chromeos_llvm_next_hash,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- def testSuccessfullyDeletedRepo(self, mock_command_output, mock_isdir):
- path_to_repo = '/some/path/to/repo'
-
- # The name of the repo to be deleted.
- llvm_hash = 'a123testhash2'
-
- update_chromeos_llvm_next_hash._DeleteRepo(path_to_repo, llvm_hash)
-
- mock_isdir.assert_called_once_with(path_to_repo)
-
- self.assertEqual(mock_command_output.call_count, 3)
-
- def testFailedToFindChangeListURL(self):
- repo_upload_contents = 'remote: https://some_url'
-
- # Verify the exception is raised when failed to find the Gerrit URL when
- # parsing the 'repo upload' contents.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.GetGerritRepoUploadContents(
- repo_upload_contents)
-
- self.assertEqual(str(err.exception), 'Failed to find change list URL.')
-
- def testSuccessfullyGetGerritRepoUploadContents(self):
- repo_upload_contents = ('remote: https://chromium-review.googlesource.com'
- '/c/chromiumos/overlays/chromiumos-overlay/+/'
- '193147 Some commit header')
-
- change_list = update_chromeos_llvm_next_hash.GetGerritRepoUploadContents(
- repo_upload_contents)
-
- self.assertEqual(
- change_list.url,
- 'https://chromium-review.googlesource.com/c/chromiumos/overlays/'
- 'chromiumos-overlay/+/193147')
-
- self.assertEqual(change_list.cl_number, 193147)
-
- # Simulate behavior of 'os.path.isdir()' when the path to the repo is not a
- # directory.
- @mock.patch.object(os.path, 'isdir', return_value=False)
- def testFailedToUploadChangesForInvalidPathDirectory(self, mock_isdir):
- path_to_repo = '/some/path/to/repo'
-
- # The name of repo to upload for review.
- llvm_hash = 'a123testhash3'
-
- # Commit messages to add to the CL.
- commit_messages = ['-m Test message']
-
- # Verify exception is raised when on an invalid repo path.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.UploadChanges(path_to_repo, llvm_hash,
- commit_messages)
-
- self.assertEqual(
- str(err.exception),
- 'Invalid directory path provided: %s' % path_to_repo)
-
- mock_isdir.assert_called_once()
-
- # Simulate 'os.path.isdir' on a valid repo path.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- # Simulate behavior of 'ExecCommandAndCaptureOutput()' when successfully
- # committed the changes.
- @mock.patch.object(
- update_chromeos_llvm_next_hash,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- @mock.patch.object(subprocess, 'Popen')
- def testFailedToUploadChangesForReview(self, mock_repo_upload,
- mock_command_output, mock_isdir):
-
- # Simulate the behavior of 'subprocess.Popen()' when uploading the changes
- # for review
- #
- # `Popen.communicate()` returns a tuple of `stdout` and `stderr`.
- mock_repo_upload.return_value.communicate.return_value = (
- None, 'Branch does not exist.')
-
- # Exit code of 1 means failed to upload changes for review.
- mock_repo_upload.return_value.returncode = 1
-
- path_to_repo = '/some/path/to/repo'
-
- # The name of repo to upload for review.
- llvm_hash = 'a123testhash3'
-
- # Commit messages to add to the CL.
- commit_messages = ['-m Test message']
-
- # Verify exception is raised when failed to upload the changes for review.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.UploadChanges(path_to_repo, llvm_hash,
- commit_messages)
-
- self.assertEqual(str(err.exception), 'Failed to upload changes for review')
-
- mock_isdir.assert_called_once_with(path_to_repo)
-
- mock_command_output.assert_called_once()
-
- mock_repo_upload.assert_called_once()
-
- # Simulate 'os.path.isdir' when a valid repo path is passed in.
- @mock.patch.object(os.path, 'isdir', return_value=True)
- # Simulate behavior of 'ExecCommandAndCaptureOutput()' when successfully
- # committed the changes.
- @mock.patch.object(
- update_chromeos_llvm_next_hash,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- @mock.patch.object(subprocess, 'Popen')
- def testSuccessfullyUploadedChangesForReview(self, mock_repo_upload,
- mock_command_output, mock_isdir):
-
- # A test CL generated by `repo upload`.
- repo_upload_contents = ('remote: https://chromium-review.googlesource.'
- 'com/c/chromiumos/overlays/chromiumos-overlay/'
- '+/193147 Fix stdout')
-
- # Simulate the behavior of 'subprocess.Popen()' when uploading the changes
- # for review
- #
- # `Popen.communicate()` returns a tuple of `stdout` and `stderr`.
- mock_repo_upload.return_value.communicate.return_value = (
- repo_upload_contents, None)
-
- # Exit code of 0 means successfully uploaded changes for review.
- mock_repo_upload.return_value.returncode = 0
-
- path_to_repo = '/some/path/to/repo'
-
- # The name of the hash to upload for review.
- llvm_hash = 'a123testhash3'
-
- # Commit messages to add to the CL.
- commit_messages = ['-m Test message']
-
- change_list = update_chromeos_llvm_next_hash.UploadChanges(
- path_to_repo, llvm_hash, commit_messages)
-
- self.assertEqual(
- change_list.url,
- 'https://chromium-review.googlesource.com/c/chromiumos/overlays/'
- 'chromiumos-overlay/+/193147')
-
- self.assertEqual(change_list.cl_number, 193147)
-
- mock_isdir.assert_called_once_with(path_to_repo)
-
- mock_command_output.assert_called_once()
-
- mock_repo_upload.assert_called_once()
-
- @mock.patch.object(update_chromeos_llvm_next_hash, 'GetChrootBuildPaths')
- @mock.patch.object(update_chromeos_llvm_next_hash,
- '_ConvertChrootPathsToSymLinkPaths')
- def testExceptionRaisedWhenCreatingPathDictionaryFromPackages(
- self, mock_chroot_paths_to_symlinks, mock_get_chroot_paths):
-
- chroot_path = '/some/path/to/chroot'
-
- package_name = 'test-pckg/package'
- package_chroot_path = '/some/chroot/path/to/package-r1.ebuild'
-
- # Test function to simulate '_ConvertChrootPathsToSymLinkPaths' when a
- # symlink does not start with the prefix '/mnt/host/source'.
- def BadPrefixChrootPath(_chroot_path, _chroot_file_paths):
- raise ValueError('Invalid prefix for the chroot path: '
- '%s' % package_chroot_path)
-
- # Simulate 'GetChrootBuildPaths' when valid packages are passed in.
- #
- # Returns a list of chroot paths.
- mock_get_chroot_paths.return_value = [package_chroot_path]
-
- # Use test function to simulate '_ConvertChrootPathsToSymLinkPaths'
- # behavior.
- mock_chroot_paths_to_symlinks.side_effect = BadPrefixChrootPath
-
- # Verify exception is raised when for an invalid prefix in the symlink.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.CreatePathDictionaryFromPackages(
- chroot_path, [package_name])
-
- self.assertEqual(
- str(err.exception), 'Invalid prefix for the chroot path: '
- '%s' % package_chroot_path)
-
- mock_get_chroot_paths.assert_called_once_with(chroot_path, [package_name])
-
- mock_chroot_paths_to_symlinks.assert_called_once_with(
- chroot_path, [package_chroot_path])
-
- @mock.patch.object(update_chromeos_llvm_next_hash, 'GetChrootBuildPaths')
- @mock.patch.object(update_chromeos_llvm_next_hash,
- '_ConvertChrootPathsToSymLinkPaths')
- @mock.patch.object(update_chromeos_llvm_next_hash,
- 'GetEbuildPathsFromSymLinkPaths')
- def testSuccessfullyCreatedPathDictionaryFromPackages(
- self, mock_ebuild_paths_from_symlink_paths, mock_chroot_paths_to_symlinks,
- mock_get_chroot_paths):
-
- package_chroot_path = '/mnt/host/source/src/path/to/package-r1.ebuild'
-
- # Simulate 'GetChrootBuildPaths' when returning a chroot path for a valid
- # package.
- #
- # Returns a list of chroot paths.
- mock_get_chroot_paths.return_value = [package_chroot_path]
-
- package_symlink_path = '/some/path/to/chroot/src/path/to/package-r1.ebuild'
-
- # Simulate '_ConvertChrootPathsToSymLinkPaths' when returning a symlink to
- # a chroot path that points to a package.
- #
- # Returns a list of symlink file paths.
- mock_chroot_paths_to_symlinks.return_value = [package_symlink_path]
-
- chroot_package_path = '/some/path/to/chroot/src/path/to/package.ebuild'
-
- # Simulate 'GetEbuildPathsFromSymlinkPaths' when returning a dictionary of
- # a symlink that points to an ebuild.
- #
- # Returns a dictionary of a symlink and ebuild file path pair
- # where the key is the absolute path to the symlink of the ebuild file
- # and the value is the absolute path to the ebuild file of the package.
- mock_ebuild_paths_from_symlink_paths.return_value = {
- package_symlink_path: chroot_package_path
- }
-
- chroot_path = '/some/path/to/chroot'
- package_name = 'test-pckg/package'
-
- self.assertEqual(
- update_chromeos_llvm_next_hash.CreatePathDictionaryFromPackages(
- chroot_path, [package_name]),
- {package_symlink_path: chroot_package_path})
-
- mock_get_chroot_paths.assert_called_once_with(chroot_path, [package_name])
-
- mock_chroot_paths_to_symlinks.assert_called_once_with(
- chroot_path, [package_chroot_path])
-
- mock_ebuild_paths_from_symlink_paths.assert_called_once_with(
- [package_symlink_path])
-
- # Simulate behavior of 'ExecCommandAndCaptureOutput()' when successfully
- # removed patches.
- @mock.patch.object(
- update_chromeos_llvm_next_hash,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- def testSuccessfullyRemovedPatchesFromFilesDir(self, mock_run_cmd):
- patches_to_remove_list = [
- '/abs/path/to/filesdir/cherry/fix_output.patch',
- '/abs/path/to/filesdir/display_results.patch'
- ]
-
- update_chromeos_llvm_next_hash.RemovePatchesFromFilesDir(
- patches_to_remove_list)
-
- self.assertEqual(mock_run_cmd.call_count, 2)
-
- # Simulate behavior of 'os.path.isfile()' when the absolute path to the patch
- # metadata file does not exist.
- @mock.patch.object(os.path, 'isfile', return_value=False)
- def testInvalidPatchMetadataFileStagedForCommit(self, mock_isfile):
- patch_metadata_path = '/abs/path/to/filesdir/PATCHES'
-
- # Verify the exception is raised when the absolute path to the patch
- # metadata file does not exist or is not a file.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.StagePatchMetadataFileForCommit(
- patch_metadata_path)
-
- self.assertEqual(
- str(err.exception), 'Invalid patch metadata file provided: '
- '%s' % patch_metadata_path)
-
- mock_isfile.assert_called_once()
-
- # Simulate the behavior of 'os.path.isfile()' when the absolute path to the
- # patch metadata file exists.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- # Simulate the behavior of 'ExecCommandAndCaptureOutput()' when successfully
- # staged the patch metadata file for commit.
- @mock.patch.object(
- update_chromeos_llvm_next_hash,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- def testSuccessfullyStagedPatchMetadataFileForCommit(self, mock_run_cmd,
- _mock_isfile):
-
- patch_metadata_path = '/abs/path/to/filesdir/PATCHES.json'
-
- update_chromeos_llvm_next_hash.StagePatchMetadataFileForCommit(
- patch_metadata_path)
-
- mock_run_cmd.assert_called_once()
-
- def testNoPatchResultsForCommit(self):
- package_1_patch_info_dict = {
- 'applied_patches': ['display_results.patch'],
- 'failed_patches': ['fixes_output.patch'],
- 'non_applicable_patches': [],
- 'disabled_patches': [],
- 'removed_patches': [],
- 'modified_metadata': None
- }
-
- package_2_patch_info_dict = {
- 'applied_patches': ['redirects_stdout.patch', 'fix_display.patch'],
- 'failed_patches': [],
- 'non_applicable_patches': [],
- 'disabled_patches': [],
- 'removed_patches': [],
- 'modified_metadata': None
- }
-
- test_package_info_dict = {
- 'test-packages/package1': package_1_patch_info_dict,
- 'test-packages/package2': package_2_patch_info_dict
- }
-
- test_commit_message = ['-m %s' % 'Updated packages']
-
- self.assertListEqual(
- update_chromeos_llvm_next_hash.StagePackagesPatchResultsForCommit(
- test_package_info_dict, test_commit_message), test_commit_message)
-
- @mock.patch.object(update_chromeos_llvm_next_hash,
- 'StagePatchMetadataFileForCommit')
- @mock.patch.object(update_chromeos_llvm_next_hash,
- 'RemovePatchesFromFilesDir')
- def testAddedPatchResultsForCommit(self, mock_remove_patches,
- mock_stage_patches_for_commit):
-
- package_1_patch_info_dict = {
- 'applied_patches': [],
- 'failed_patches': [],
- 'non_applicable_patches': [],
- 'disabled_patches': ['fixes_output.patch'],
- 'removed_patches': [],
- 'modified_metadata': '/abs/path/to/filesdir/PATCHES.json'
- }
-
- package_2_patch_info_dict = {
- 'applied_patches': ['fix_display.patch'],
- 'failed_patches': [],
- 'non_applicable_patches': [],
- 'disabled_patches': [],
- 'removed_patches': ['/abs/path/to/filesdir/redirect_stdout.patch'],
- 'modified_metadata': '/abs/path/to/filesdir/PATCHES.json'
- }
-
- test_package_info_dict = {
- 'test-packages/package1': package_1_patch_info_dict,
- 'test-packages/package2': package_2_patch_info_dict
- }
-
- test_commit_message = ['-m %s' % 'Updated packages']
-
- expected_commit_messages = [
- '-m %s' % 'Updated packages',
- '-m %s' % 'For the package test-packages/package1:',
- '-m %s' % 'The patch metadata file PATCHES.json was modified',
- '-m %s' % 'The following patches were disabled:',
- '-m %s' % 'fixes_output.patch',
- '-m %s' % 'For the package test-packages/package2:',
- '-m %s' % 'The patch metadata file PATCHES.json was modified',
- '-m %s' % 'The following patches were removed:',
- '-m %s' % 'redirect_stdout.patch'
- ]
-
- self.assertListEqual(
- update_chromeos_llvm_next_hash.StagePackagesPatchResultsForCommit(
- test_package_info_dict, test_commit_message),
- expected_commit_messages)
-
- path_to_removed_patch = '/abs/path/to/filesdir/redirect_stdout.patch'
-
- mock_remove_patches.assert_called_once_with([path_to_removed_patch])
-
- self.assertEqual(mock_stage_patches_for_commit.call_count, 2)
-
- @mock.patch.object(update_chromeos_llvm_next_hash,
- 'CreatePathDictionaryFromPackages')
- @mock.patch.object(update_chromeos_llvm_next_hash, '_CreateRepo')
- @mock.patch.object(update_chromeos_llvm_next_hash, 'UpdateBuildLLVMNextHash')
- @mock.patch.object(update_chromeos_llvm_next_hash, 'UprevEbuild')
- @mock.patch.object(update_chromeos_llvm_next_hash, 'UploadChanges')
- @mock.patch.object(update_chromeos_llvm_next_hash, '_DeleteRepo')
- def testExceptionRaisedWhenUpdatingPackages(
- self, mock_delete_repo, mock_upload_changes, mock_uprev_ebuild,
- mock_update_llvm_next, mock_create_repo, mock_create_path_dict):
-
- abs_path_to_package = '/some/path/to/chroot/src/path/to/package.ebuild'
-
- symlink_path_to_package = \
- '/some/path/to/chroot/src/path/to/package-r1.ebuild'
-
- path_to_package_dir = '/some/path/to/chroot/src/path/to'
-
- # Test function to simulate '_CreateRepo' when successfully created the
- # repo on a valid repo path.
- def SuccessfullyCreateRepoForChanges(_repo_path, llvm_hash):
- self.assertEqual(llvm_hash, 'a123testhash4')
- return
-
- # Test function to simulate 'UpdateBuildLLVMNextHash' when successfully
- # updated the ebuild's 'LLVM_NEXT_HASH'.
- def SuccessfullyUpdatedLLVMNextHash(ebuild_path, llvm_hash, llvm_version):
- self.assertEqual(ebuild_path, abs_path_to_package)
- self.assertEqual(llvm_hash, 'a123testhash4')
- self.assertEqual(llvm_version, 1000)
- return
-
- # Test function to simulate 'UprevEbuild' when the symlink to the ebuild
- # does not have a revision number.
- def FailedToUprevEbuild(_symlink_path):
- # Raises a 'ValueError' exception because the symlink did not have have a
- # revision number.
- raise ValueError('Failed to uprev the ebuild.')
-
- # Test function to fail on 'UploadChanges' if the function gets called
- # when an exception is raised.
- def ShouldNotExecuteUploadChanges(_repo_path, _llvm_hash, _commit_messages):
- # Test function should not be called (i.e. execution should resume in the
- # 'finally' block) because 'UprevEbuild()' raised an
- # exception.
- assert False, 'Failed to go to "finally" block ' \
- 'after the exception was raised.'
-
- test_package_path_dict = {symlink_path_to_package: abs_path_to_package}
-
- # Simulate behavior of 'CreatePathDictionaryFromPackages()' when
- # successfully created a dictionary where the key is the absolute path to
- # the symlink of the package and value is the absolute path to the ebuild of
- # the package.
- mock_create_path_dict.return_value = test_package_path_dict
-
- # Use test function to simulate behavior.
- mock_create_repo.side_effect = SuccessfullyCreateRepoForChanges
- mock_update_llvm_next.side_effect = SuccessfullyUpdatedLLVMNextHash
- mock_uprev_ebuild.side_effect = FailedToUprevEbuild
- mock_upload_changes.side_effect = ShouldNotExecuteUploadChanges
-
- packages_to_update = ['test-packages/package1']
- patch_metadata_file = 'PATCHES.json'
- llvm_hash = 'a123testhash4'
- llvm_version = 1000
- chroot_path = '/some/path/to/chroot'
- svn_option = 'google3'
-
- # Verify exception is raised when an exception is thrown within
- # the 'try' block by UprevEbuild function.
- with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_next_hash.UpdatePackages(
- packages_to_update, llvm_hash, llvm_version, chroot_path,
- patch_metadata_file, FailureModes.FAIL, svn_option)
-
- self.assertEqual(str(err.exception), 'Failed to uprev the ebuild.')
-
- mock_create_path_dict.assert_called_once_with(chroot_path,
- packages_to_update)
-
- mock_create_repo.assert_called_once_with(path_to_package_dir, llvm_hash)
-
- mock_update_llvm_next.assert_called_once_with(abs_path_to_package,
- llvm_hash, llvm_version)
-
- mock_uprev_ebuild.assert_called_once_with(symlink_path_to_package)
-
- mock_upload_changes.assert_not_called()
-
- mock_delete_repo.assert_called_once_with(path_to_package_dir, llvm_hash)
-
- @mock.patch.object(update_chromeos_llvm_next_hash,
- 'CreatePathDictionaryFromPackages')
- @mock.patch.object(update_chromeos_llvm_next_hash, '_CreateRepo')
- @mock.patch.object(update_chromeos_llvm_next_hash, 'UpdateBuildLLVMNextHash')
- @mock.patch.object(update_chromeos_llvm_next_hash, 'UprevEbuild')
- @mock.patch.object(update_chromeos_llvm_next_hash, 'UploadChanges')
- @mock.patch.object(update_chromeos_llvm_next_hash, '_DeleteRepo')
- @mock.patch.object(llvm_patch_management, 'UpdatePackagesPatchMetadataFile')
- @mock.patch.object(update_chromeos_llvm_next_hash,
- 'StagePatchMetadataFileForCommit')
- def testSuccessfullyUpdatedPackages(
- self, mock_stage_patch_file, mock_update_package_metadata_file,
- mock_delete_repo, mock_upload_changes, mock_uprev_ebuild,
- mock_update_llvm_next, mock_create_repo, mock_create_path_dict):
-
- abs_path_to_package = '/some/path/to/chroot/src/path/to/package.ebuild'
-
- symlink_path_to_package = \
- '/some/path/to/chroot/src/path/to/package-r1.ebuild'
-
- path_to_package_dir = '/some/path/to/chroot/src/path/to'
-
- # Test function to simulate '_CreateRepo' when successfully created the repo
- # for the changes to be made to the ebuild files.
- def SuccessfullyCreateRepoForChanges(_repo_path, llvm_hash):
- self.assertEqual(llvm_hash, 'a123testhash5')
- return
-
- # Test function to simulate 'UploadChanges' after a successfull update of
- # 'LLVM_NEXT_HASH" of the ebuild file.
- def SuccessfullyUpdatedLLVMNextHash(ebuild_path, llvm_hash, llvm_version):
- self.assertEqual(ebuild_path,
- '/some/path/to/chroot/src/path/to/package.ebuild')
- self.assertEqual(llvm_hash, 'a123testhash5')
- self.assertEqual(llvm_version, 1000)
- return
-
- # Test function to simulate 'UprevEbuild' when successfully incremented
- # the revision number by 1.
- def SuccessfullyUprevedEbuild(symlink_path):
- self.assertEqual(symlink_path,
- '/some/path/to/chroot/src/path/to/package-r1.ebuild')
-
- return
-
- # Test function to simulate 'UpdatePackagesPatchMetadataFile()' when the
- # patch results contains a disabled patch in 'disable_patches' mode.
- def RetrievedPatchResults(chroot_path, llvm_version, patch_metadata_file,
- packages, mode):
-
- self.assertEqual(chroot_path, '/some/path/to/chroot')
- self.assertEqual(llvm_version, 1000)
- self.assertEqual(patch_metadata_file, 'PATCHES.json')
- self.assertListEqual(packages, ['path/to'])
- self.assertEqual(mode, FailureModes.DISABLE_PATCHES)
-
- PatchInfo = namedtuple('PatchInfo', [
- 'applied_patches', 'failed_patches', 'non_applicable_patches',
- 'disabled_patches', 'removed_patches', 'modified_metadata'
- ])
-
- package_patch_info = PatchInfo(
- applied_patches=['fix_display.patch'],
- failed_patches=['fix_stdout.patch'],
- non_applicable_patches=[],
- disabled_patches=['fix_stdout.patch'],
- removed_patches=[],
- modified_metadata='/abs/path/to/filesdir/%s' % patch_metadata_file)
-
- package_info_dict = {'path/to': package_patch_info._asdict()}
-
- # Returns a dictionary where the key is the package and the value is a
- # dictionary that contains information about the package's patch results
- # produced by the patch manager.
- return package_info_dict
-
- # Test function to simulate 'UploadChanges()' when successfully created a
- # commit for the changes made to the packages and their patches and
- # retrieved the change list of the commit.
- def SuccessfullyUploadedChanges(_repo_path, _llvm_hash, _commit_messages):
- commit_url = 'https://some_name/path/to/commit/+/12345'
-
- return update_chromeos_llvm_next_hash.CommitContents(
- url=commit_url, cl_number=12345)
-
- test_package_path_dict = {symlink_path_to_package: abs_path_to_package}
-
- # Simulate behavior of 'CreatePathDictionaryFromPackages()' when
- # successfully created a dictionary where the key is the absolute path to
- # the symlink of the package and value is the absolute path to the ebuild of
- # the package.
- mock_create_path_dict.return_value = test_package_path_dict
-
- # Use test function to simulate behavior.
- mock_create_repo.side_effect = SuccessfullyCreateRepoForChanges
- mock_update_llvm_next.side_effect = SuccessfullyUpdatedLLVMNextHash
- mock_uprev_ebuild.side_effect = SuccessfullyUprevedEbuild
- mock_update_package_metadata_file.side_effect = RetrievedPatchResults
- mock_upload_changes.side_effect = SuccessfullyUploadedChanges
-
- packages_to_update = ['test-packages/package1']
- patch_metadata_file = 'PATCHES.json'
- llvm_hash = 'a123testhash5'
- llvm_version = 1000
- chroot_path = '/some/path/to/chroot'
- svn_option = 'tot'
-
- change_list = update_chromeos_llvm_next_hash.UpdatePackages(
- packages_to_update, llvm_hash, llvm_version, chroot_path,
- patch_metadata_file, FailureModes.DISABLE_PATCHES, svn_option)
-
- self.assertEqual(change_list.url,
- 'https://some_name/path/to/commit/+/12345')
-
- self.assertEqual(change_list.cl_number, 12345)
-
- mock_create_path_dict.assert_called_once_with(chroot_path,
- packages_to_update)
-
- mock_create_repo.assert_called_once_with(path_to_package_dir, llvm_hash)
-
- mock_update_llvm_next.assert_called_once_with(abs_path_to_package,
- llvm_hash, llvm_version)
-
- mock_uprev_ebuild.assert_called_once_with(symlink_path_to_package)
-
- expected_commit_messages = [
- '-m %s' % 'llvm-next/tot: Update packages to r1000',
- '-m %s' % 'Following packages have been updated:',
- '-m %s' % 'path/to',
- '-m %s' % 'For the package path/to:',
- '-m %s' % 'The patch metadata file PATCHES.json was modified',
- '-m %s' % 'The following patches were disabled:',
- '-m %s' % 'fix_stdout.patch'
- ]
-
- mock_update_package_metadata_file.assert_called_once()
-
- mock_stage_patch_file.assert_called_once_with(
- '/abs/path/to/filesdir/PATCHES.json')
-
- mock_upload_changes.assert_called_once_with(path_to_package_dir, llvm_hash,
- expected_commit_messages)
-
- mock_delete_repo.assert_called_once_with(path_to_package_dir, llvm_hash)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/llvm_tools/update_packages_and_run_tryjobs.py b/llvm_tools/update_packages_and_run_tryjobs.py
deleted file mode 100755
index fac93db3..00000000
--- a/llvm_tools/update_packages_and_run_tryjobs.py
+++ /dev/null
@@ -1,314 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs a tryjob/tryjobs after updating the packages."""
-
-from __future__ import print_function
-
-import argparse
-import datetime
-import json
-import os
-
-from assert_not_in_chroot import VerifyOutsideChroot
-from failure_modes import FailureModes
-from get_llvm_hash import GetLLVMHashAndVersionFromSVNOption
-from get_llvm_hash import is_svn_option
-from subprocess_helpers import ChrootRunCommand
-from subprocess_helpers import ExecCommandAndCaptureOutput
-import update_chromeos_llvm_next_hash
-
-
-def GetCommandLineArgs():
- """Parses the command line for the command line arguments.
-
- Returns:
- The log level to use when retrieving the LLVM hash or google3 LLVM version,
- the chroot path to use for executing chroot commands,
- a list of a package or packages to update their LLVM next hash,
- and the LLVM version to use when retrieving the LLVM hash.
- """
-
- # Default path to the chroot if a path is not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(
- description='Runs a tryjob if successfully updated packages\''
- '"LLVM_NEXT_HASH".')
-
- # Add argument for the absolute path to the file that contains information on
- # the previous tested svn version.
- parser.add_argument(
- '--last_tested',
- help='the absolute path to the file that contains the last tested '
- 'svn version')
-
- # Add argument for other change lists that want to run alongside the tryjob
- # which has a change list of updating a package's git hash.
- parser.add_argument(
- '--extra_change_lists',
- type=int,
- nargs='+',
- help='change lists that would like to be run alongside the change list '
- 'of updating the packages')
-
- # Add argument for custom options for the tryjob.
- parser.add_argument(
- '--options',
- required=False,
- nargs='+',
- help='options to use for the tryjob testing')
-
- # Add argument for builders for the tryjob.
- parser.add_argument(
- '--builders',
- required=True,
- nargs='+',
- help='builders to use for the tryjob testing')
-
- # Add argument for the description of the tryjob.
- parser.add_argument(
- '--description',
- required=False,
- nargs='+',
- help='the description of the tryjob')
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
- # Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
-
- # Add argument for the LLVM version to use.
- parser.add_argument(
- '--llvm_version',
- type=is_svn_option,
- required=True,
- help='which git hash of LLVM to find '
- '{google3, ToT, <svn_version>} '
- '(default: finds the git hash of the google3 LLVM '
- 'version)')
-
- args_output = parser.parse_args()
-
- return args_output
-
-
-def GetLastTestedSVNVersion(last_tested_file):
- """Gets the lasted tested svn version from the file.
-
- Args:
- last_tested_file: The absolute path to the file that contains the last
- tested svn version.
-
- Returns:
- The last tested svn version or 'None' if the file did not have a last tested
- svn version (the file exists, but failed to convert the contents to an
- integer) or the file does not exist.
- """
-
- if not last_tested_file:
- return None
-
- last_svn_version = None
-
- # Get the last tested svn version if the file exists.
- try:
- with open(last_tested_file) as file_obj:
- # For now, the first line contains the last tested svn version.
- return int(file_obj.read().rstrip())
-
- except IOError:
- pass
- except ValueError:
- pass
-
- return last_svn_version
-
-
-def GetTryJobCommand(change_list, extra_change_lists, options, builder):
- """Constructs the 'tryjob' command.
-
- Args:
- change_list: The CL obtained from updating the packages.
- extra_change_lists: Extra change lists that would like to be run alongside
- the change list of updating the packages.
- options: Options to be passed into the tryjob command.
- builder: The builder to be passed into the tryjob command.
-
- Returns:
- The 'tryjob' command with the change list of updating the packages and
- any extra information that was passed into the command line.
- """
-
- tryjob_cmd = ['cros', 'tryjob', '--yes', '--json', '-g', '%d' % change_list]
-
- if extra_change_lists:
- for extra_cl in extra_change_lists:
- tryjob_cmd.extend(['-g', '%d' % extra_cl])
-
- tryjob_cmd.append(builder)
-
- if options:
- tryjob_cmd.extend('--%s' % option for option in options)
-
- return tryjob_cmd
-
-
-def GetCurrentTimeInUTC():
- """Returns the current time via `datetime.datetime.utcnow()`."""
- return datetime.datetime.utcnow()
-
-
-def RunTryJobs(cl_number, extra_change_lists, options, builders, chroot_path,
- verbose):
- """Runs a tryjob/tryjobs.
-
- Args:
- cl_number: The CL created by updating the packages.
- extra_change_lists: Any extra change lists that would run alongside the CL
- that was created by updating the packages ('cl_number').
- options: Any options to be passed into the 'tryjob' command.
- builders: All the builders to run the 'tryjob' with.
- chroot_path: The absolute path to the chroot.
- verbose: Print command contents to `stdout`.
-
- Returns:
- A list that contains stdout contents of each tryjob, where stdout is
- information (a hashmap) about the tryjob. The hashmap also contains stderr
- if there was an error when running a tryjob.
-
- Raises:
- ValueError: Failed to submit a tryjob.
- """
-
- # Contains the results of each tryjob. The results are retrieved from 'out'
- # which is stdout of the command executer.
- tryjob_results = []
-
- # For each builder passed into the command line:
- #
- # Run a tryjob with the change list number obtained from updating the
- # packages and append additional changes lists and options obtained from the
- # command line.
- for cur_builder in builders:
- tryjob_cmd = GetTryJobCommand(cl_number, extra_change_lists, options,
- cur_builder)
-
- out = ChrootRunCommand(chroot_path, tryjob_cmd, verbose=verbose)
-
- tryjob_launch_time = GetCurrentTimeInUTC()
-
- tryjob_contents = json.loads(out)
-
- buildbucket_id = int(tryjob_contents[0]['buildbucket_id'])
-
- new_tryjob = {
- 'launch_time': str(tryjob_launch_time),
- 'link': str(tryjob_contents[0]['url']),
- 'buildbucket_id': buildbucket_id,
- 'extra_cls': extra_change_lists,
- 'options': options,
- 'builder': [cur_builder]
- }
-
- tryjob_results.append(new_tryjob)
-
- AddTryjobLinkToCL(tryjob_results, cl_number, chroot_path)
-
- return tryjob_results
-
-
-def AddTryjobLinkToCL(tryjobs, cl, chroot_path):
- """Adds the tryjob link(s) to the CL via `gerrit message <CL> <message>`."""
-
- # NOTE: Invoking `cros_sdk` does not make each tryjob link appear on its own
- # line, so invoking the `gerrit` command directly instead of using `cros_sdk`
- # to do it for us.
- #
- # FIXME: Need to figure out why `cros_sdk` does not add each tryjob link as a
- # newline.
- gerrit_abs_path = os.path.join(chroot_path, 'chromite/bin/gerrit')
-
- tryjob_links = ['Started the following tryjobs:']
- tryjob_links.extend(tryjob['link'] for tryjob in tryjobs)
-
- add_message_cmd = [
- gerrit_abs_path, 'message',
- str(cl), '\n'.join(tryjob_links)
- ]
-
- ExecCommandAndCaptureOutput(add_message_cmd)
-
-
-def main():
- """Updates the packages' 'LLVM_NEXT_HASH' and submits tryjobs.
-
- Raises:
- AssertionError: The script was run inside the chroot.
- """
-
- VerifyOutsideChroot()
-
- args_output = GetCommandLineArgs()
-
- last_svn_version = GetLastTestedSVNVersion(args_output.last_tested)
-
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
-
- patch_metadata_file = 'PATCHES.json'
-
- svn_option = args_output.llvm_version
-
- git_hash, svn_version = GetLLVMHashAndVersionFromSVNOption(svn_option)
-
- # There is no need to run tryjobs when the SVN version matches the last tested
- # SVN version.
- if last_svn_version == svn_version:
- print('svn version (%d) matches the last tested svn version (%d) in %s' %
- (svn_version, last_svn_version, args_output.last_tested))
- return
-
- update_chromeos_llvm_next_hash.verbose = args_output.verbose
-
- change_list = update_chromeos_llvm_next_hash.UpdatePackages(
- update_packages, git_hash, svn_version, args_output.chroot_path,
- patch_metadata_file, FailureModes.DISABLE_PATCHES, svn_option)
-
- print('Successfully updated packages to %d' % svn_version)
- print('Gerrit URL: %s' % change_list.url)
- print('Change list number: %d' % change_list.cl_number)
-
- tryjob_results = RunTryJobs(change_list.cl_number,
- args_output.extra_change_lists,
- args_output.options, args_output.builders,
- args_output.chroot_path, args_output.verbose)
-
- print('Tryjobs:')
- for tryjob in tryjob_results:
- print(tryjob)
-
- # Updated the packages and submitted tryjobs successfully, so the file will
- # contain 'svn_version' which will now become the last tested svn version.
- if args_output.last_tested:
- with open(args_output.last_tested, 'w') as file_obj:
- file_obj.write(str(svn_version))
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/update_packages_and_run_tryjobs_unittest.py b/llvm_tools/update_packages_and_run_tryjobs_unittest.py
deleted file mode 100755
index 55e344bb..00000000
--- a/llvm_tools/update_packages_and_run_tryjobs_unittest.py
+++ /dev/null
@@ -1,292 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unittests for running tryjobs after updating packages."""
-
-from __future__ import print_function
-
-import json
-import unittest
-import unittest.mock as mock
-
-from test_helpers import ArgsOutputTest
-from test_helpers import CreateTemporaryFile
-from update_chromeos_llvm_next_hash import CommitContents
-import update_chromeos_llvm_next_hash
-import update_packages_and_run_tryjobs
-
-
-class UpdatePackagesAndRunTryjobsTest(unittest.TestCase):
- """Unittests when running tryjobs after updating packages."""
-
- def testNoLastTestedFile(self):
- self.assertEqual(
- update_packages_and_run_tryjobs.GetLastTestedSVNVersion(None), None)
-
- def testFailedToGetIntegerFromLastTestedFile(self):
- # Create a temporary file to simulate the behavior of the last tested file
- # when the file does not have a SVN version (i.e. int() failed).
- with CreateTemporaryFile() as temp_file:
- self.assertEqual(
- update_packages_and_run_tryjobs.GetLastTestedSVNVersion(temp_file),
- None)
-
- def testLastTestFileDoesNotExist(self):
- # Simulate 'open()' on a lasted tested file that does not exist.
- mock_open = mock.mock_open(read_data='')
-
- self.assertEqual(
- update_packages_and_run_tryjobs.GetLastTestedSVNVersion(
- '/some/file/that/does/not/exist.txt'), None)
-
- def testSuccessfullyRetrievedLastTestedSVNVersion(self):
- with CreateTemporaryFile() as temp_file:
- # Simulate behavior when the last tested file contains a SVN version.
- with open(temp_file, 'w') as svn_file:
- svn_file.write('1234')
-
- self.assertEqual(
- update_packages_and_run_tryjobs.GetLastTestedSVNVersion(temp_file),
- 1234)
-
- def testGetTryJobCommandWithNoExtraInformation(self):
- test_change_list = 1234
-
- test_builder = 'nocturne'
-
- expected_tryjob_cmd_list = [
- 'cros', 'tryjob', '--yes', '--json', '-g',
- '%d' % test_change_list, test_builder
- ]
-
- self.assertEqual(
- update_packages_and_run_tryjobs.GetTryJobCommand(
- test_change_list, None, None, test_builder),
- expected_tryjob_cmd_list)
-
- def testGetTryJobCommandWithExtraInformation(self):
- test_change_list = 4321
- test_extra_cls = [1000, 10]
- test_options = ['report_error', 'delete_tryjob']
- test_builder = 'kevin'
-
- expected_tryjob_cmd_list = [
- 'cros',
- 'tryjob',
- '--yes',
- '--json',
- '-g',
- '%d' % test_change_list,
- '-g',
- '%d' % test_extra_cls[0],
- '-g',
- '%d' % test_extra_cls[1],
- test_builder,
- '--%s' % test_options[0],
- '--%s' % test_options[1],
- ]
-
- self.assertEqual(
- update_packages_and_run_tryjobs.GetTryJobCommand(
- test_change_list, test_extra_cls, test_options, test_builder),
- expected_tryjob_cmd_list)
-
- # Simulate `datetime.datetime.utcnow()` when retrieving the current time when
- # submitted a tryjob.
- @mock.patch.object(
- update_packages_and_run_tryjobs,
- 'GetCurrentTimeInUTC',
- return_value='2019-09-09')
- # Simulate the behavior of `AddTryjobLinkToCL()` when successfully added the
- # tryjob url to the CL that was uploaded to Gerrit for review.
- @mock.patch.object(update_packages_and_run_tryjobs, 'AddTryjobLinkToCL')
- # Simulate behavior of `ChrootRunCommand()` when successfully submitted a
- # tryjob via `cros tryjob`.
- @mock.patch.object(update_packages_and_run_tryjobs, 'ChrootRunCommand')
- def testSuccessfullySubmittedTryJob(
- self, mock_chroot_cmd, mock_add_tryjob_link_to_cl, mock_launch_time):
-
- expected_tryjob_cmd_list = [
- 'cros', 'tryjob', '--yes', '--json', '-g',
- '%d' % 900, '-g',
- '%d' % 1200, 'builder1', '--some_option'
- ]
-
- buildbucket_id = '1234'
- url = 'https://some_tryjob_url.com'
-
- tryjob_launch_contents = [{'buildbucket_id': buildbucket_id, 'url': url}]
-
- mock_chroot_cmd.return_value = json.dumps(tryjob_launch_contents)
-
- extra_cls = [1200]
- tryjob_options = ['some_option']
- builder_list = ['builder1']
- chroot_path = '/some/path/to/chroot'
- cl_to_launch_tryjob = 900
- verbose = False
-
- tryjob_results_list = update_packages_and_run_tryjobs.RunTryJobs(
- cl_to_launch_tryjob, extra_cls, tryjob_options, builder_list,
- chroot_path, verbose)
-
- expected_tryjob_dict = {
- 'launch_time': '2019-09-09',
- 'link': url,
- 'buildbucket_id': int(buildbucket_id),
- 'extra_cls': extra_cls,
- 'options': tryjob_options,
- 'builder': builder_list
- }
-
- self.assertEqual(tryjob_results_list, [expected_tryjob_dict])
-
- mock_chroot_cmd.assert_called_once_with(
- chroot_path, expected_tryjob_cmd_list, verbose=False)
-
- mock_add_tryjob_link_to_cl.assert_called_once()
-
- mock_launch_time.assert_called_once()
-
- # Simulate behavior of `ExecCommandAndCaptureOutput()` when successfully added
- # the tryjob link to the CL via `gerrit message <CL> <message>`.
- @mock.patch.object(
- update_packages_and_run_tryjobs,
- 'ExecCommandAndCaptureOutput',
- return_value=None)
- def testSuccessfullyAddedTryjobLinkToCL(self, mock_exec_cmd):
- chroot_path = '/abs/path/to/chroot'
-
- test_cl_number = 1000
-
- tryjob_result = [{'link': 'https://some_tryjob_link.com'}]
-
- update_packages_and_run_tryjobs.AddTryjobLinkToCL(
- tryjob_result, test_cl_number, chroot_path)
-
- expected_gerrit_message = [
- '%s/chromite/bin/gerrit' % chroot_path, 'message',
- str(test_cl_number),
- 'Started the following tryjobs:\n%s' % tryjob_result[0]['link']
- ]
-
- mock_exec_cmd.assert_called_once_with(expected_gerrit_message)
-
- # Simulate behavior of `GetCommandLineArgs()` when successfully parsed the
- # command line for the optional/required arguments for the script.
- @mock.patch.object(update_packages_and_run_tryjobs, 'GetCommandLineArgs')
- # Simulate behavior of `GetLLVMHashAndVersionFromSVNOption()` when
- # successfully retrieved the LLVM hash and version for google3.
- @mock.patch.object(update_packages_and_run_tryjobs,
- 'GetLLVMHashAndVersionFromSVNOption')
- # Simulate behavior of `GetLastTestedSVNVersion()` when successfully retrieved
- # the last tested revision from the last tested file.
- @mock.patch.object(
- update_packages_and_run_tryjobs,
- 'GetLastTestedSVNVersion',
- return_value=100)
- # Simulate behavior of `VerifyOutsideChroot()` when successfully invoked the
- # script outside of the chroot.
- @mock.patch.object(
- update_packages_and_run_tryjobs, 'VerifyOutsideChroot', return_value=True)
- def testLastTestSVNVersionMatchesSVNVersion(
- self, mock_outside_chroot, mock_get_last_tested_version,
- mock_get_hash_and_version, mock_get_commandline_args):
-
- args_output_obj = ArgsOutputTest()
-
- mock_get_commandline_args.return_value = args_output_obj
-
- mock_get_hash_and_version.return_value = ('a123testhash1', 100)
-
- update_packages_and_run_tryjobs.main()
-
- mock_outside_chroot.assert_called_once()
-
- mock_get_commandline_args.assert_called_once()
-
- mock_get_last_tested_version.assert_called_once_with(
- args_output_obj.last_tested)
-
- mock_get_hash_and_version.assert_called_once_with(
- args_output_obj.llvm_version)
-
- # Simulate the behavior of `RunTryJobs()` when successfully submitted a
- # tryjob.
- @mock.patch.object(update_packages_and_run_tryjobs, 'RunTryJobs')
- # Simulate behavior of `UpdatePackages()` when successfully updated the
- # packages and uploaded a CL for review.
- @mock.patch.object(update_chromeos_llvm_next_hash, 'UpdatePackages')
- # Simulate behavior of `GetCommandLineArgs()` when successfully parsed the
- # command line for the optional/required arguments for the script.
- @mock.patch.object(update_packages_and_run_tryjobs, 'GetCommandLineArgs')
- # Simulate behavior of `GetLLVMHashAndVersionFromSVNOption()` when
- # successfully retrieved the LLVM hash and version for google3.
- @mock.patch.object(update_packages_and_run_tryjobs,
- 'GetLLVMHashAndVersionFromSVNOption')
- # Simulate behavior of `GetLastTestedSVNVersion()` when successfully retrieved
- # the last tested revision from the last tested file.
- @mock.patch.object(
- update_packages_and_run_tryjobs,
- 'GetLastTestedSVNVersion',
- return_value=100)
- # Simulate behavior of `VerifyOutsideChroot()` when successfully invoked the
- # script outside of the chroot.
- @mock.patch.object(
- update_packages_and_run_tryjobs, 'VerifyOutsideChroot', return_value=True)
- def testUpdatedLastTestedFileWithNewTestedRevision(
- self, mock_outside_chroot, mock_get_last_tested_version,
- mock_get_hash_and_version, mock_get_commandline_args,
- mock_update_packages, mock_run_tryjobs):
-
- mock_get_hash_and_version.return_value = ('a123testhash2', 200)
-
- test_cl_url = 'https://some_cl_url.com'
-
- test_cl_number = 12345
-
- mock_update_packages.return_value = CommitContents(
- url=test_cl_url, cl_number=test_cl_number)
-
- tryjob_test_results = [{
- 'link': 'https://some_tryjob_url.com',
- 'buildbucket_id': 1234
- }]
-
- mock_run_tryjobs.return_value = tryjob_test_results
-
- # Create a temporary file to simulate the last tested file that contains a
- # revision.
- with CreateTemporaryFile() as last_tested_file:
- args_output_obj = ArgsOutputTest(svn_option=200)
- args_output_obj.last_tested = last_tested_file
-
- mock_get_commandline_args.return_value = args_output_obj
-
- update_packages_and_run_tryjobs.main()
-
- # Verify that the lasted tested file has been updated to the new LLVM
- # revision.
- with open(last_tested_file) as update_revision:
- new_revision = update_revision.readline()
-
- self.assertEqual(int(new_revision.rstrip()), 200)
-
- mock_outside_chroot.assert_called_once()
-
- mock_get_commandline_args.assert_called_once()
-
- mock_get_last_tested_version.assert_called_once()
-
- mock_get_hash_and_version.assert_called_once()
-
- mock_run_tryjobs.assert_called_once()
-
- mock_update_packages.assert_called_once()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/llvm_tools/update_tryjob_status.py b/llvm_tools/update_tryjob_status.py
deleted file mode 100755
index 38eab8e5..00000000
--- a/llvm_tools/update_tryjob_status.py
+++ /dev/null
@@ -1,323 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Updates the status of a tryjob."""
-
-from __future__ import print_function
-
-import argparse
-import enum
-import json
-import os
-import subprocess
-import sys
-
-from assert_not_in_chroot import VerifyOutsideChroot
-from subprocess_helpers import ChrootRunCommand
-from test_helpers import CreateTemporaryJsonFile
-
-
-class TryjobStatus(enum.Enum):
- """Values for the 'status' field of a tryjob."""
-
- GOOD = 'good'
- BAD = 'bad'
- PENDING = 'pending'
- SKIP = 'skip'
-
- # Executes the script passed into the command line (this script's exit code
- # determines the 'status' value of the tryjob).
- CUSTOM_SCRIPT = 'custom_script'
-
- # Uses the result returned by 'cros buildresult'.
- AUTO = 'auto'
-
-
-class BuilderStatus(enum.Enum):
- """Actual values given via 'cros buildresult'."""
-
- PASS = 'pass'
- FAIL = 'fail'
- RUNNING = 'running'
-
-
-class CustomScriptStatus(enum.Enum):
- """Exit code values of a custom script."""
-
- # NOTE: Not using 1 for 'bad' because the custom script can raise an
- # exception which would cause the exit code of the script to be 1, so the
- # tryjob's 'status' would be updated when there is an exception.
- #
- # Exit codes are as follows:
- # 0: 'good'
- # 124: 'bad'
- # 125: 'skip'
- GOOD = 0
- BAD = 124
- SKIP = 125
-
-
-custom_script_exit_value_mapping = {
- CustomScriptStatus.GOOD.value: TryjobStatus.GOOD.value,
- CustomScriptStatus.BAD.value: TryjobStatus.BAD.value,
- CustomScriptStatus.SKIP.value: TryjobStatus.SKIP.value
-}
-
-builder_status_mapping = {
- BuilderStatus.PASS.value: TryjobStatus.GOOD.value,
- BuilderStatus.FAIL.value: TryjobStatus.BAD.value,
- BuilderStatus.RUNNING.value: TryjobStatus.PENDING.value
-}
-
-
-def GetCommandLineArgs():
- """Parses the command line for the command line arguments."""
-
- # Default absoute path to the chroot if not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(
- description='Updates the status of a tryjob.')
-
- # Add argument for the JSON file to use for the update of a tryjob.
- parser.add_argument(
- '--status_file',
- required=True,
- help='The absolute path to the JSON file that contains the tryjobs used '
- 'for bisecting LLVM.')
-
- # Add argument that sets the 'status' field to that value.
- parser.add_argument(
- '--set_status',
- required=True,
- choices=[tryjob_status.value for tryjob_status in TryjobStatus],
- help='Sets the "status" field of the tryjob.')
-
- # Add argument that determines which revision to search for in the list of
- # tryjobs.
- parser.add_argument(
- '--revision',
- required=True,
- type=int,
- help='The revision to set its status.')
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
- # Add argument for the custom script to execute for the 'custom_script'
- # option in '--set_status'.
- parser.add_argument(
- '--custom_script',
- help='The absolute path to the custom script to execute (its exit code '
- 'should be %d for "good", %d for "bad", or %d for "skip")' %
- (CustomScriptStatus.GOOD.value, CustomScriptStatus.BAD.value,
- CustomScriptStatus.SKIP.value))
-
- args_output = parser.parse_args()
-
- if not os.path.isfile(args_output.status_file) or \
- not args_output.status_file.endswith('.json'):
- raise ValueError('File does not exist or does not ending in ".json" '
- ': %s' % args_output.status_file)
-
- if args_output.set_status == TryjobStatus.CUSTOM_SCRIPT.value and \
- not args_output.custom_script:
- raise ValueError('Please provide the absolute path to the script to '
- 'execute.')
-
- return args_output
-
-
-def FindTryjobIndex(revision, tryjobs_list):
- """Searches the list of tryjob dictionaries to find 'revision'.
-
- Uses the key 'rev' for each dictionary and compares the value against
- 'revision.'
-
- Args:
- revision: The revision to search for in the tryjobs.
- tryjobs_list: A list of tryjob dictionaries of the format:
- {
- 'rev' : [REVISION],
- 'url' : [URL_OF_CL],
- 'cl' : [CL_NUMBER],
- 'link' : [TRYJOB_LINK],
- 'status' : [TRYJOB_STATUS],
- 'buildbucket_id': [BUILDBUCKET_ID]
- }
-
- Returns:
- The index within the list or None to indicate it was not found.
- """
-
- for cur_index, cur_tryjob_dict in enumerate(tryjobs_list):
- if cur_tryjob_dict['rev'] == revision:
- return cur_index
-
- return None
-
-
-def GetStatusFromCrosBuildResult(chroot_path, buildbucket_id):
- """Retrieves the 'status' using 'cros buildresult'."""
-
- get_buildbucket_id_cmd = [
- 'cros', 'buildresult', '--buildbucket-id',
- str(buildbucket_id), '--report', 'json'
- ]
-
- tryjob_json = ChrootRunCommand(chroot_path, get_buildbucket_id_cmd)
-
- tryjob_contents = json.loads(tryjob_json)
-
- return str(tryjob_contents['%d' % buildbucket_id]['status'])
-
-
-def GetAutoResult(chroot_path, buildbucket_id):
- """Returns the conversion of the result of 'cros buildresult'."""
-
- # Calls 'cros buildresult' to get the status of the tryjob.
- build_result = GetStatusFromCrosBuildResult(chroot_path, buildbucket_id)
-
- # The string returned by 'cros buildresult' might not be in the mapping.
- if build_result not in builder_status_mapping:
- raise ValueError(
- '"cros buildresult" return value is invalid: %s' % build_result)
-
- return builder_status_mapping[build_result]
-
-
-def GetCustomScriptResult(custom_script, status_file, tryjob_contents):
- """Returns the conversion of the exit code of the custom script.
-
- Args:
- custom_script: Absolute path to the script to be executed.
- status_file: Absolute path to the file that contains information about the
- bisection of LLVM.
- tryjob_contents: A dictionary of the contents of the tryjob (e.g. 'status',
- 'url', 'link', 'buildbucket_id', etc.).
-
- Returns:
- The exit code conversion to either return 'good', 'bad', or 'skip'.
-
- Raises:
- ValueError: The custom script failed to provide the correct exit code.
- """
-
- # Create a temporary file to write the contents of the tryjob at index
- # 'tryjob_index' (the temporary file path will be passed into the custom
- # script as a command line argument).
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as tryjob_file:
- json.dump(tryjob_contents, tryjob_file, indent=4, separators=(',', ': '))
-
- exec_script_cmd = [custom_script, temp_json_file]
-
- # Execute the custom script to get the exit code.
- exec_script_cmd_obj = subprocess.Popen(
- exec_script_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- _, stderr = exec_script_cmd_obj.communicate()
-
- # Invalid exit code by the custom script.
- if exec_script_cmd_obj.returncode not in custom_script_exit_value_mapping:
- # Save the .JSON file to the directory of 'status_file'.
- name_of_json_file = os.path.join(
- os.path.dirname(status_file), os.path.basename(temp_json_file))
-
- os.rename(temp_json_file, name_of_json_file)
-
- raise ValueError(
- 'Custom script %s exit code %d did not match '
- 'any of the expected exit codes: %d for "good", %d '
- 'for "bad", or %d for "skip".\nPlease check %s for information '
- 'about the tryjob: %s' %
- (custom_script, exec_script_cmd_obj.returncode,
- CustomScriptStatus.GOOD.value, CustomScriptStatus.BAD.value,
- CustomScriptStatus.SKIP.value, name_of_json_file, stderr))
-
- return custom_script_exit_value_mapping[exec_script_cmd_obj.returncode]
-
-
-def UpdateTryjobStatus(revision, set_status, status_file, chroot_path,
- custom_script):
- """Updates a tryjob's 'status' field based off of 'set_status'.
-
- Args:
- revision: The revision associated with the tryjob.
- set_status: What to update the 'status' field to.
- Ex: TryjobStatus.Good, TryjobStatus.BAD, TryjobStatus.PENDING, or
- TryjobStatus.AUTO where TryjobStatus.AUTO uses the result of
- 'cros buildresult'.
- status_file: The .JSON file that contains the tryjobs.
- chroot_path: The absolute path to the chroot (used by 'cros buildresult').
- custom_script: The absolute path to a script that will be executed which
- will determine the 'status' value of the tryjob.
- """
-
- # Format of 'bisect_contents':
- # {
- # 'start': [START_REVISION_OF_BISECTION]
- # 'end': [END_REVISION_OF_BISECTION]
- # 'jobs' : [
- # {[TRYJOB_INFORMATION]},
- # {[TRYJOB_INFORMATION]},
- # ...,
- # {[TRYJOB_INFORMATION]}
- # ]
- # }
- with open(status_file) as tryjobs:
- bisect_contents = json.load(tryjobs)
-
- if not bisect_contents['jobs']:
- sys.exit('No tryjobs in %s' % status_file)
-
- tryjob_index = FindTryjobIndex(revision, bisect_contents['jobs'])
-
- # 'FindTryjobIndex()' returns None if the revision was not found.
- if tryjob_index is None:
- raise ValueError(
- 'Unable to find tryjob for %d in %s' % (revision, status_file))
-
- # Set 'status' depending on 'set_status' for the tryjob.
- if set_status == TryjobStatus.GOOD:
- bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.GOOD.value
- elif set_status == TryjobStatus.BAD:
- bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.BAD.value
- elif set_status == TryjobStatus.PENDING:
- bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.PENDING.value
- elif set_status == TryjobStatus.AUTO:
- bisect_contents['jobs'][tryjob_index]['status'] = GetAutoResult(
- chroot_path, bisect_contents['jobs'][tryjob_index]['buildbucket_id'])
- elif set_status == TryjobStatus.SKIP:
- bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.SKIP.value
- elif set_status == TryjobStatus.CUSTOM_SCRIPT:
- bisect_contents['jobs'][tryjob_index]['status'] = GetCustomScriptResult(
- custom_script, status_file, bisect_contents['jobs'][tryjob_index])
- else:
- raise ValueError('Invalid "set_status" option provided: %s' % set_status)
-
- with open(status_file, 'w') as update_tryjobs:
- json.dump(bisect_contents, update_tryjobs, indent=4, separators=(',', ': '))
-
-
-def main():
- """Updates the status of a tryjob."""
-
- VerifyOutsideChroot()
-
- args_output = GetCommandLineArgs()
-
- UpdateTryjobStatus(args_output.revision, TryjobStatus(args_output.set_status),
- args_output.status_file, args_output.chroot_path,
- args_output.custom_script)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/update_tryjob_status_unittest.py b/llvm_tools/update_tryjob_status_unittest.py
deleted file mode 100755
index b5e6556c..00000000
--- a/llvm_tools/update_tryjob_status_unittest.py
+++ /dev/null
@@ -1,617 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests when updating a tryjob's status."""
-
-from __future__ import print_function
-
-import json
-import os
-import subprocess
-import unittest
-import unittest.mock as mock
-
-from test_helpers import CreateTemporaryJsonFile
-from test_helpers import WritePrettyJsonFile
-from update_tryjob_status import TryjobStatus
-from update_tryjob_status import CustomScriptStatus
-import update_tryjob_status
-
-
-class UpdateTryjobStatusTest(unittest.TestCase):
- """Unittests for updating a tryjob's 'status'."""
-
- def testFoundTryjobIndex(self):
- test_tryjobs = [{
- 'rev': 123,
- 'url': 'https://some_url_to_CL.com',
- 'cl': 'https://some_link_to_tryjob.com',
- 'status': 'good',
- 'buildbucket_id': 91835
- },
- {
- 'rev': 1000,
- 'url': 'https://some_url_to_CL.com',
- 'cl': 'https://some_link_to_tryjob.com',
- 'status': 'pending',
- 'buildbucket_id': 10931
- }]
-
- expected_index = 0
-
- revision_to_find = 123
-
- self.assertEqual(
- update_tryjob_status.FindTryjobIndex(revision_to_find, test_tryjobs),
- expected_index)
-
- def testNotFindTryjobIndex(self):
- test_tryjobs = [{
- 'rev': 500,
- 'url': 'https://some_url_to_CL.com',
- 'cl': 'https://some_link_to_tryjob.com',
- 'status': 'bad',
- 'buildbucket_id': 390
- },
- {
- 'rev': 10,
- 'url': 'https://some_url_to_CL.com',
- 'cl': 'https://some_link_to_tryjob.com',
- 'status': 'skip',
- 'buildbucket_id': 10
- }]
-
- revision_to_find = 250
-
- self.assertIsNone(
- update_tryjob_status.FindTryjobIndex(revision_to_find, test_tryjobs))
-
- # Simulate the behavior of `ChrootRunCommand()` when executing a command
- # inside the chroot.
- @mock.patch.object(update_tryjob_status, 'ChrootRunCommand')
- def testGetStatusFromCrosBuildResult(self, mock_chroot_command):
- tryjob_contents = {
- '192': {
- 'status': 'good',
- 'CleanUpChroot': 'pass',
- 'artifacts_url': None
- }
- }
-
- # Use the test function to simulate 'ChrootRunCommand()' behavior.
- mock_chroot_command.return_value = json.dumps(tryjob_contents)
-
- buildbucket_id = 192
-
- chroot_path = '/some/path/to/chroot'
-
- self.assertEqual(
- update_tryjob_status.GetStatusFromCrosBuildResult(
- chroot_path, buildbucket_id), 'good')
-
- expected_cmd = [
- 'cros', 'buildresult', '--buildbucket-id',
- str(buildbucket_id), '--report', 'json'
- ]
-
- mock_chroot_command.assert_called_once_with(chroot_path, expected_cmd)
-
- # Simulate the behavior of `GetStatusFromCrosBuildResult()` when `cros
- # buildresult` returned a string that is not in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetStatusFromCrosBuildResult',
- return_value='querying')
- def testInvalidCrosBuildResultValue(self, mock_cros_buildresult):
- chroot_path = '/some/path/to/chroot'
- buildbucket_id = 50
-
- # Verify the exception is raised when the return value of `cros buildresult`
- # is not in the `builder_status_mapping`.
- with self.assertRaises(ValueError) as err:
- update_tryjob_status.GetAutoResult(chroot_path, buildbucket_id)
-
- self.assertEqual(
- str(err.exception),
- '"cros buildresult" return value is invalid: querying')
-
- mock_cros_buildresult.assert_called_once_with(chroot_path, buildbucket_id)
-
- # Simulate the behavior of `GetStatusFromCrosBuildResult()` when `cros
- # buildresult` returned a string that is in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetStatusFromCrosBuildResult',
- return_value=update_tryjob_status.BuilderStatus.PASS.value)
- def testValidCrosBuildResultValue(self, mock_cros_buildresult):
- chroot_path = '/some/path/to/chroot'
- buildbucket_id = 100
-
- self.assertEqual(
- update_tryjob_status.GetAutoResult(chroot_path, buildbucket_id),
- TryjobStatus.GOOD.value)
-
- mock_cros_buildresult.assert_called_once_with(chroot_path, buildbucket_id)
-
- @mock.patch.object(subprocess, 'Popen')
- # Simulate the behavior of `os.rename()` when successfully renamed a file.
- @mock.patch.object(os, 'rename', return_value=None)
- # Simulate the behavior of `os.path.basename()` when successfully retrieved
- # the basename of the temp .JSON file.
- @mock.patch.object(os.path, 'basename', return_value='tmpFile.json')
- def testInvalidExitCodeByCustomScript(self, mock_basename, mock_rename_file,
- mock_exec_custom_script):
-
- error_message_by_custom_script = 'Failed to parse .JSON file'
-
- # Simulate the behavior of 'subprocess.Popen()' when executing the custom
- # script.
- #
- # `Popen.communicate()` returns a tuple of `stdout` and `stderr`.
- mock_exec_custom_script.return_value.communicate.return_value = (
- None, error_message_by_custom_script)
-
- # Exit code of 1 is not in the mapping, so an exception will be raised.
- custom_script_exit_code = 1
-
- mock_exec_custom_script.return_value.returncode = custom_script_exit_code
-
- tryjob_contents = {
- 'status': 'good',
- 'rev': 1234,
- 'url': 'https://some_url_to_CL.com',
- 'link': 'https://some_url_to_tryjob.com'
- }
-
- custom_script_path = '/abs/path/to/script.py'
- status_file_path = '/abs/path/to/status_file.json'
-
- name_json_file = os.path.join(
- os.path.dirname(status_file_path), 'tmpFile.json')
-
- expected_error_message = (
- 'Custom script %s exit code %d did not match '
- 'any of the expected exit codes: %s for "good", '
- '%d for "bad", or %d for "skip".\nPlease check '
- '%s for information about the tryjob: %s' %
- (custom_script_path, custom_script_exit_code,
- CustomScriptStatus.GOOD.value, CustomScriptStatus.BAD.value,
- CustomScriptStatus.SKIP.value, name_json_file,
- error_message_by_custom_script))
-
- # Verify the exception is raised when the exit code by the custom script
- # does not match any of the exit codes in the mapping of
- # `custom_script_exit_value_mapping`.
- with self.assertRaises(ValueError) as err:
- update_tryjob_status.GetCustomScriptResult(
- custom_script_path, status_file_path, tryjob_contents)
-
- self.assertEqual(str(err.exception), expected_error_message)
-
- mock_exec_custom_script.assert_called_once()
-
- mock_rename_file.assert_called_once()
-
- mock_basename.assert_called_once()
-
- @mock.patch.object(subprocess, 'Popen')
- # Simulate the behavior of `os.rename()` when successfully renamed a file.
- @mock.patch.object(os, 'rename', return_value=None)
- # Simulate the behavior of `os.path.basename()` when successfully retrieved
- # the basename of the temp .JSON file.
- @mock.patch.object(os.path, 'basename', return_value='tmpFile.json')
- def testValidExitCodeByCustomScript(self, mock_basename, mock_rename_file,
- mock_exec_custom_script):
-
- # Simulate the behavior of 'subprocess.Popen()' when executing the custom
- # script.
- #
- # `Popen.communicate()` returns a tuple of `stdout` and `stderr`.
- mock_exec_custom_script.return_value.communicate.return_value = (None, None)
-
- mock_exec_custom_script.return_value.returncode = \
- CustomScriptStatus.GOOD.value
-
- tryjob_contents = {
- 'status': 'good',
- 'rev': 1234,
- 'url': 'https://some_url_to_CL.com',
- 'link': 'https://some_url_to_tryjob.com'
- }
-
- custom_script_path = '/abs/path/to/script.py'
- status_file_path = '/abs/path/to/status_file.json'
-
- self.assertEqual(
- update_tryjob_status.GetCustomScriptResult(
- custom_script_path, status_file_path, tryjob_contents),
- TryjobStatus.GOOD.value)
-
- mock_exec_custom_script.assert_called_once()
-
- mock_rename_file.assert_not_called()
-
- mock_basename.assert_not_called()
-
- def testNoTryjobsInStatusFileWhenUpdatingTryjobStatus(self):
- bisect_test_contents = {'start': 369410, 'end': 369420, 'jobs': []}
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369412
-
- chroot_path = '/abs/path/to/chroot'
-
- custom_script = None
-
- # Verify the exception is raised when the `status_file` does not have any
- # `jobs` (empty).
- with self.assertRaises(SystemExit) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, TryjobStatus.GOOD, temp_json_file, chroot_path,
- custom_script)
-
- self.assertEqual(str(err.exception), 'No tryjobs in %s' % temp_json_file)
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob does not exist
- # in the status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=None)
- def testNotFindTryjobIndexWhenUpdatingTryjobStatus(self,
- mock_find_tryjob_index):
-
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending'
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369416
-
- chroot_path = '/abs/path/to/chroot'
-
- custom_script = None
-
- # Verify the exception is raised when the `status_file` does not have any
- # `jobs` (empty).
- with self.assertRaises(ValueError) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, TryjobStatus.SKIP, temp_json_file, chroot_path,
- custom_script)
-
- self.assertEqual(
- str(err.exception), 'Unable to find tryjob for %d in %s' %
- (revision_to_update, temp_json_file))
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- def testSuccessfullyUpdatedTryjobStatusToGood(self, mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending'
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- # Index of the tryjob that is going to have its 'status' value updated.
- tryjob_index = 0
-
- chroot_path = '/abs/path/to/chroot'
-
- custom_script = None
-
- update_tryjob_status.UpdateTryjobStatus(revision_to_update,
- TryjobStatus.GOOD, temp_json_file,
- chroot_path, custom_script)
-
- # Verify that the tryjob's 'status' has been updated in the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- self.assertEqual(bisect_contents['jobs'][tryjob_index]['status'],
- TryjobStatus.GOOD.value)
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- def testSuccessfullyUpdatedTryjobStatusToBad(self, mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending'
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- # Index of the tryjob that is going to have its 'status' value updated.
- tryjob_index = 0
-
- chroot_path = '/abs/path/to/chroot'
-
- custom_script = None
-
- update_tryjob_status.UpdateTryjobStatus(revision_to_update,
- TryjobStatus.BAD, temp_json_file,
- chroot_path, custom_script)
-
- # Verify that the tryjob's 'status' has been updated in the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- self.assertEqual(bisect_contents['jobs'][tryjob_index]['status'],
- TryjobStatus.BAD.value)
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- def testSuccessfullyUpdatedTryjobStatusToPending(self,
- mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'skip'
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- # Index of the tryjob that is going to have its 'status' value updated.
- tryjob_index = 0
-
- chroot_path = '/abs/path/to/chroot'
-
- custom_script = None
-
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, update_tryjob_status.TryjobStatus.SKIP,
- temp_json_file, chroot_path, custom_script)
-
- # Verify that the tryjob's 'status' has been updated in the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- self.assertEqual(bisect_contents['jobs'][tryjob_index]['status'],
- update_tryjob_status.TryjobStatus.SKIP.value)
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- def testSuccessfullyUpdatedTryjobStatusToSkip(self, mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending',
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- # Index of the tryjob that is going to have its 'status' value updated.
- tryjob_index = 0
-
- chroot_path = '/abs/path/to/chroot'
-
- custom_script = None
-
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, update_tryjob_status.TryjobStatus.PENDING,
- temp_json_file, chroot_path, custom_script)
-
- # Verify that the tryjob's 'status' has been updated in the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- self.assertEqual(bisect_contents['jobs'][tryjob_index]['status'],
- update_tryjob_status.TryjobStatus.PENDING.value)
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- # Simulate the behavior of `GetAutoResult()` when `cros buildresult` returns
- # a value that is in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetAutoResult',
- return_value=TryjobStatus.GOOD.value)
- def testSuccessfullyUpdatedTryjobStatusToAuto(self, mock_get_auto_result,
- mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending',
- 'buildbucket_id': 1200
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- # Index of the tryjob that is going to have its 'status' value updated.
- tryjob_index = 0
-
- path_to_chroot = '/abs/path/to/chroot'
-
- custom_script = None
-
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, update_tryjob_status.TryjobStatus.AUTO,
- temp_json_file, path_to_chroot, custom_script)
-
- # Verify that the tryjob's 'status' has been updated in the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- self.assertEqual(bisect_contents['jobs'][tryjob_index]['status'],
- update_tryjob_status.TryjobStatus.GOOD.value)
-
- mock_get_auto_result.assert_called_once_with(
- path_to_chroot,
- bisect_test_contents['jobs'][tryjob_index]['buildbucket_id'])
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- # Simulate the behavior of `GetCustomScriptResult()` when the custom script
- # exit code is in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetCustomScriptResult',
- return_value=TryjobStatus.SKIP.value)
- def testSuccessfullyUpdatedTryjobStatusToAuto(
- self, mock_get_custom_script_result, mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending',
- 'buildbucket_id': 1200
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- # Index of the tryjob that is going to have its 'status' value updated.
- tryjob_index = 0
-
- path_to_chroot = '/abs/path/to/chroot'
-
- custom_script_path = '/abs/path/to/custom_script.py'
-
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, update_tryjob_status.TryjobStatus.CUSTOM_SCRIPT,
- temp_json_file, path_to_chroot, custom_script_path)
-
- # Verify that the tryjob's 'status' has been updated in the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- self.assertEqual(bisect_contents['jobs'][tryjob_index]['status'],
- update_tryjob_status.TryjobStatus.SKIP.value)
-
- mock_get_custom_script_result.assert_called_once()
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- def testSetStatusDoesNotExistWhenUpdatingTryjobStatus(self,
- mock_find_tryjob_index):
-
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending',
- 'buildbucket_id': 1200
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- path_to_chroot = '/abs/path/to/chroot'
-
- nonexistent_update_status = 'revert_status'
-
- custom_script = None
-
- # Verify the exception is raised when the `set_status` command line
- # argument does not exist in the mapping.
- with self.assertRaises(ValueError) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, nonexistent_update_status, temp_json_file,
- path_to_chroot, custom_script)
-
- self.assertEqual(
- str(err.exception),
- 'Invalid "set_status" option provided: revert_status')
-
- mock_find_tryjob_index.assert_called_once()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lock_machine.py b/lock_machine.py
deleted file mode 100755
index 40c7d8fd..00000000
--- a/lock_machine.py
+++ /dev/null
@@ -1,618 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This module controls locking and unlocking of test machines."""
-
-from __future__ import print_function
-
-import argparse
-import enum
-import getpass
-import os
-import sys
-
-import file_lock_machine
-
-from cros_utils import command_executer
-from cros_utils import logger
-from cros_utils import machines
-
-
-class LockException(Exception):
- """Base class for exceptions in this module."""
-
-
-class MachineNotPingable(LockException):
- """Raised when machine does not respond to ping."""
-
-
-class LockingError(LockException):
- """Raised when server fails to lock/unlock machine as requested."""
-
-
-class DontOwnLock(LockException):
- """Raised when user attmepts to unlock machine locked by someone else."""
- # This should not be raised if the user specified '--force'
-
-
-class NoAFEServer(LockException):
- """Raised when cannot find/access the autotest server."""
-
-
-class AFEAccessError(LockException):
- """Raised when cannot get information about lab machine from lab server."""
-
-
-class MachineType(enum.Enum):
- """Enum class to hold machine type."""
- AFE = 'afe'
- LOCAL = 'local'
- SKYLAB = 'skylab'
-
-
-class LockManager(object):
- """Class for locking/unlocking machines vie three different modes.
-
- This class contains methods for checking the locked status of machines,
- and for changing the locked status. It handles HW lab machines (both AFE
- and Skylab), and local machines, using appropriate locking mechanisms for
- each.
-
- !!!IMPORTANT NOTE!!! The AFE server can only be called from the main
- thread/process of a program. If you launch threads and try to call it
- from a thread, you will get an error. This has to do with restrictions
- in the Python virtual machine (and signal handling) and cannot be changed.
- """
-
- SKYLAB_PATH = '/usr/local/bin/skylab'
- LEASE_MINS = 600
- SKYLAB_CREDENTIAL = '/usr/local/google/home/mobiletc-prebuild' \
- '/sheriff_utils/skylab_credential' \
- '/chromeos-swarming-credential.json'
- SWARMING = 'chromite/third_party/swarming.client/swarming.py'
- SUCCESS = 0
-
- def __init__(self,
- remotes,
- force_option,
- chromeos_root,
- locks_dir='',
- log=None):
- """Initializes an LockManager object.
-
- Args:
- remotes: A list of machine names or ip addresses to be managed. Names
- and ip addresses should be represented as strings. If the list is
- empty, the lock manager will get all known machines.
- force_option: A Boolean indicating whether or not to force an unlock of
- a machine that was locked by someone else.
- chromeos_root: The ChromeOS chroot to use for the autotest scripts.
- locks_dir: A directory used for file locking local devices.
- log: If not None, this is the logger object to be used for writing out
- informational output messages. It is expected to be an instance of
- Logger class from cros_utils/logger.py.
- """
- self.chromeos_root = chromeos_root
- self.user = getpass.getuser()
- self.logger = log or logger.GetLogger()
- self.ce = command_executer.GetCommandExecuter(self.logger)
- autotest_path = os.path.join(chromeos_root,
- 'src/third_party/autotest/files')
-
- sys.path.append(chromeos_root)
- sys.path.append(autotest_path)
- sys.path.append(os.path.join(autotest_path, 'server', 'cros'))
-
- self.locks_dir = locks_dir
-
- # We have to wait to do these imports until the paths above have
- # been fixed.
- # pylint: disable=import-error
- from client import setup_modules
- setup_modules.setup(
- base_path=autotest_path, root_module_name='autotest_lib')
-
- from dynamic_suite import frontend_wrappers
-
- self.afe = frontend_wrappers.RetryingAFE(
- timeout_min=30, delay_sec=10, debug=False, server='cautotest')
-
- self.machines = list(set(remotes)) or []
- self.toolchain_lab_machines = self.GetAllToolchainLabMachines()
-
- if not self.machines:
- self.machines = self.toolchain_lab_machines
- self.force = force_option
-
- self.local_machines = []
- self.skylab_machines = []
-
- def CheckMachine(self, machine, error_msg):
- """Verifies that machine is responding to ping.
-
- Args:
- machine: String containing the name or ip address of machine to check.
- error_msg: Message to print if ping fails.
-
- Raises:
- MachineNotPingable: If machine is not responding to 'ping'
- """
- if not machines.MachineIsPingable(machine, logging_level='none'):
- cros_machine = machine + '.cros'
- if not machines.MachineIsPingable(cros_machine, logging_level='none'):
- raise MachineNotPingable(error_msg)
-
- def GetAllToolchainLabMachines(self):
- """Gets a list of all the toolchain machines in the ChromeOS HW lab.
-
- Returns:
- A list of names of the toolchain machines in the ChromeOS HW lab.
- """
- machines_file = os.path.join(
- os.path.dirname(__file__), 'crosperf', 'default_remotes')
- machine_list = []
- with open(machines_file, 'r') as input_file:
- lines = input_file.readlines()
- for line in lines:
- _, remotes = line.split(':')
- remotes = remotes.strip()
- for r in remotes.split():
- machine_list.append(r.strip())
- return machine_list
-
- def GetMachineType(self, m):
- """Get where the machine is located.
-
- Args:
- m: String containing the name or ip address of machine.
-
- Returns:
- Value of the type in MachineType Enum.
- """
- if m in self.local_machines:
- return MachineType.LOCAL
- if m in self.skylab_machines:
- return MachineType.SKYLAB
- return MachineType.AFE
-
- def PrintStatusHeader(self):
- """Prints the status header lines for machines."""
- print('\nMachine (Board)\t\t\t\t\tStatus')
- print('---------------\t\t\t\t\t------')
-
- def PrintStatus(self, m, state, machine_type):
- """Prints status for a single machine.
-
- Args:
- m: String containing the name or ip address of machine.
- state: A dictionary of the current state of the machine.
- machine_type: MachineType to determine where the machine is located.
- """
- if machine_type == MachineType.AFE and not m.endswith('.cros'):
- m += '.cros'
- if state['locked']:
- print('%s (%s)\t\t%slocked by %s since %s' %
- (m, state['board'], '\t\t' if machine_type == MachineType.LOCAL else
- '', state['locked_by'], state['lock_time']))
- else:
- print(
- '%s (%s)\t\t%sunlocked' % (m, state['board'], '\t\t' if
- machine_type == MachineType.LOCAL else ''))
-
- def AddMachineToLocal(self, machine):
- """Adds a machine to local machine list.
-
- Args:
- machine: The machine to be added.
- """
- if machine not in self.local_machines:
- self.local_machines.append(machine)
-
- def AddMachineToSkylab(self, machine):
- """Adds a machine to skylab machine list.
-
- Args:
- machine: The machine to be added.
- """
- if machine not in self.skylab_machines:
- self.skylab_machines.append(machine)
-
- def ListMachineStates(self, machine_states):
- """Gets and prints the current status for a list of machines.
-
- Prints out the current status for all of the machines in the current
- LockManager's list of machines (set when the object is initialized).
-
- Args:
- machine_states: A dictionary of the current state of every machine in
- the current LockManager's list of machines. Normally obtained by
- calling LockManager::GetMachineStates.
- """
- self.PrintStatusHeader()
- for m in machine_states:
- machine_type = self.GetMachineType(m)
- state = machine_states[m]
- self.PrintStatus(m, state, machine_type)
-
- def UpdateLockInAFE(self, should_lock_machine, machine):
- """Calls an AFE server to lock/unlock a machine.
-
- Args:
- should_lock_machine: Boolean indicating whether to lock the machine (True)
- or unlock the machine (False).
- machine: The machine to update.
-
- Returns:
- True if requested action succeeded, else False.
- """
- kwargs = {'locked': should_lock_machine}
- if should_lock_machine:
- kwargs['lock_reason'] = 'toolchain user request (%s)' % self.user
-
- m = machine.split('.')[0]
- afe_server = self.afe
-
- try:
- afe_server.run(
- 'modify_hosts',
- host_filter_data={'hostname__in': [m]},
- update_data=kwargs)
- except Exception:
- return False
- return True
-
- def UpdateLockInSkylab(self, should_lock_machine, machine):
- """Ask skylab to lease/release a machine.
-
- Args:
- should_lock_machine: Boolean indicating whether to lock the machine (True)
- or unlock the machine (False).
- machine: The machine to update.
-
- Returns:
- True if requested action succeeded, else False.
- """
- try:
- if should_lock_machine:
- ret = self.LeaseSkylabMachine(machine)
- else:
- ret = self.ReleaseSkylabMachine(machine)
- except Exception:
- return False
- return ret
-
- def UpdateFileLock(self, should_lock_machine, machine):
- """Use file lock for local machines,
-
- Args:
- should_lock_machine: Boolean indicating whether to lock the machine (True)
- or unlock the machine (False).
- machine: The machine to update.
-
- Returns:
- True if requested action succeeded, else False.
- """
- try:
- if should_lock_machine:
- ret = file_lock_machine.Machine(machine, self.locks_dir).Lock(
- True, sys.argv[0])
- else:
- ret = file_lock_machine.Machine(machine, self.locks_dir).Unlock(True)
- except Exception:
- return False
- return ret
-
- def UpdateMachines(self, lock_machines):
- """Sets the locked state of the machines to the requested value.
-
- The machines updated are the ones in self.machines (specified when the
- class object was intialized).
-
- Args:
- lock_machines: Boolean indicating whether to lock the machines (True) or
- unlock the machines (False).
-
- Returns:
- A list of the machines whose state was successfully updated.
- """
- updated_machines = []
- action = 'Locking' if lock_machines else 'Unlocking'
- for m in self.machines:
- # TODO(zhizhouy): Handling exceptions with more details when locking
- # doesn't succeed.
- machine_type = self.GetMachineType(m)
- if machine_type == MachineType.SKYLAB:
- ret = self.UpdateLockInSkylab(lock_machines, m)
- elif machine_type == MachineType.LOCAL:
- ret = self.UpdateFileLock(lock_machines, m)
- else:
- ret = self.UpdateLockInAFE(lock_machines, m)
-
- if ret:
- self.logger.LogOutput(
- '%s %s machine succeeded: %s.' % (action, machine_type.value, m))
- updated_machines.append(m)
- else:
- self.logger.LogOutput(
- '%s %s machine failed: %s.' % (action, machine_type.value, m))
-
- self.machines = updated_machines
- return updated_machines
-
- def _InternalRemoveMachine(self, machine):
- """Remove machine from internal list of machines.
-
- Args:
- machine: Name of machine to be removed from internal list.
- """
- # Check to see if machine is lab machine and if so, make sure it has
- # ".cros" on the end.
- cros_machine = machine
- if machine.find('rack') > 0 and machine.find('row') > 0:
- if machine.find('.cros') == -1:
- cros_machine = cros_machine + '.cros'
-
- self.machines = [
- m for m in self.machines if m != cros_machine and m != machine
- ]
-
- def CheckMachineLocks(self, machine_states, cmd):
- """Check that every machine in requested list is in the proper state.
-
- If the cmd is 'unlock' verify that every machine is locked by requestor.
- If the cmd is 'lock' verify that every machine is currently unlocked.
-
- Args:
- machine_states: A dictionary of the current state of every machine in
- the current LockManager's list of machines. Normally obtained by
- calling LockManager::GetMachineStates.
- cmd: The user-requested action for the machines: 'lock' or 'unlock'.
-
- Raises:
- DontOwnLock: The lock on a requested machine is owned by someone else.
- """
- for k, state in machine_states.iteritems():
- if cmd == 'unlock':
- if not state['locked']:
- self.logger.LogWarning('Attempt to unlock already unlocked machine '
- '(%s).' % k)
- self._InternalRemoveMachine(k)
-
- # TODO(zhizhouy): Skylab doesn't support host info such as locked_by.
- # Need to update this when skylab supports it.
- if (state['locked'] and state['locked_by'] and
- state['locked_by'] != self.user):
- raise DontOwnLock('Attempt to unlock machine (%s) locked by someone '
- 'else (%s).' % (k, state['locked_by']))
- elif cmd == 'lock':
- if state['locked']:
- self.logger.LogWarning(
- 'Attempt to lock already locked machine (%s)' % k)
- self._InternalRemoveMachine(k)
-
- def GetMachineStates(self, cmd=''):
- """Gets the current state of all the requested machines.
-
- Gets the current state of all the requested machines. Stores the data in a
- dictionary keyed by machine name.
-
- Args:
- cmd: The command for which we are getting the machine states. This is
- important because if one of the requested machines is missing we raise
- an exception, unless the requested command is 'add'.
-
- Returns:
- A dictionary of machine states for all the machines in the LockManager
- object.
-
- Raises:
- NoAFEServer: Cannot find the HW Lab AFE server.
- AFEAccessError: An error occurred when querying the server about a
- machine.
- """
- if not self.afe:
- raise NoAFEServer('Error: Cannot connect to main AFE server.')
-
- machine_list = {}
- for m in self.machines:
- # For local or skylab machines, we simply set {'locked': status} for them
- # TODO(zhizhouy): This is a quick fix since skylab cannot return host info
- # as afe does. We need to get more info such as locked_by when skylab
- # supports that.
- if m in self.local_machines or m in self.skylab_machines:
- values = {
- 'locked': 0 if cmd == 'lock' else 1,
- 'board': '??',
- 'locked_by': '',
- 'lock_time': ''
- }
- machine_list[m] = values
- else:
- # For autotest machines, we use afe APIs to get locking info.
- mod_host = m.split('.')[0]
- host_info = self.afe.get_hosts(hostname=mod_host)
- if not host_info:
- raise AFEAccessError('Unable to get information about %s from main'
- ' autotest server.' % m)
- host_info = host_info[0]
- name = host_info.hostname
- values = {}
- values['board'] = host_info.platform if host_info.platform else '??'
- values['locked'] = host_info.locked
- if host_info.locked:
- values['locked_by'] = host_info.locked_by
- values['lock_time'] = host_info.lock_time
- else:
- values['locked_by'] = ''
- values['lock_time'] = ''
- machine_list[name] = values
-
- self.ListMachineStates(machine_list)
-
- return machine_list
-
- def CheckMachineInSkylab(self, machine):
- """Run command to check if machine is in Skylab or not.
-
- Returns:
- True if machine in skylab, else False
- """
- credential = ''
- if os.path.exists(self.SKYLAB_CREDENTIAL):
- credential = '--auth-service-account-json %s' % self.SKYLAB_CREDENTIAL
- swarming = os.path.join(self.chromeos_root, self.SWARMING)
- cmd = (('%s query --swarming https://chromeos-swarming.appspot.com ' \
- "%s 'bots/list?is_dead=FALSE&dimensions=dut_name:%s'") % \
- (swarming,
- credential,
- machine.rstrip('.cros')))
- ret_tup = self.ce.RunCommandWOutput(cmd)
- # The command will return a json output as stdout. If machine not in skylab
- # stdout will look like this:
- # {
- # "death_timeout": "600",
- # "now": "TIMESTAMP"
- # }
- # Otherwise there will be a tuple starting with 'items', we simply detect
- # this keyword for result.
- if 'items' not in ret_tup[1]:
- return False
- else:
- return True
-
- def LeaseSkylabMachine(self, machine):
- """Run command to lease dut from skylab.
-
- Returns:
- True if succeeded, False if failed.
- """
- credential = ''
- if os.path.exists(self.SKYLAB_CREDENTIAL):
- credential = '-service-account-json %s' % self.SKYLAB_CREDENTIAL
- cmd = (('%s lease-dut -minutes %s %s %s') % \
- (self.SKYLAB_PATH,
- self.LEASE_MINS,
- credential,
- machine.rstrip('.cros')))
- # Wait 120 seconds for server to start the lease task, if not started,
- # we will treat it as unavailable.
- check_interval_time = 120
- retval = self.ce.RunCommand(cmd, command_timeout=check_interval_time)
- return retval == self.SUCCESS
-
- def ReleaseSkylabMachine(self, machine):
- """Run command to release dut from skylab.
-
- Returns:
- True if succeeded, False if failed.
- """
- credential = ''
- if os.path.exists(self.SKYLAB_CREDENTIAL):
- credential = '-service-account-json %s' % self.SKYLAB_CREDENTIAL
- cmd = (('%s release-dut %s %s') % \
- (self.SKYLAB_PATH,
- credential,
- machine.rstrip('.cros')))
- retval = self.ce.RunCommand(cmd)
- return retval == self.SUCCESS
-
-
-def Main(argv):
- """Parse the options, initialize lock manager and dispatch proper method.
-
- Args:
- argv: The options with which this script was invoked.
-
- Returns:
- 0 unless an exception is raised.
- """
- parser = argparse.ArgumentParser()
-
- parser.add_argument(
- '--list',
- dest='cmd',
- action='store_const',
- const='status',
- help='List current status of all known machines.')
- parser.add_argument(
- '--lock',
- dest='cmd',
- action='store_const',
- const='lock',
- help='Lock given machine(s).')
- parser.add_argument(
- '--unlock',
- dest='cmd',
- action='store_const',
- const='unlock',
- help='Unlock given machine(s).')
- parser.add_argument(
- '--status',
- dest='cmd',
- action='store_const',
- const='status',
- help='List current status of given machine(s).')
- parser.add_argument(
- '--remote', dest='remote', help='machines on which to operate')
- parser.add_argument(
- '--chromeos_root',
- dest='chromeos_root',
- required=True,
- help='ChromeOS root to use for autotest scripts.')
- parser.add_argument(
- '--force',
- dest='force',
- action='store_true',
- default=False,
- help='Force lock/unlock of machines, even if not'
- ' current lock owner.')
-
- options = parser.parse_args(argv)
-
- if not options.remote and options.cmd != 'status':
- parser.error('No machines specified for operation.')
-
- if not os.path.isdir(options.chromeos_root):
- parser.error('Cannot find chromeos_root: %s.' % options.chromeos_root)
-
- if not options.cmd:
- parser.error('No operation selected (--list, --status, --lock, --unlock,'
- ' --add_machine, --remove_machine).')
-
- machine_list = []
- if options.remote:
- machine_list = options.remote.split()
-
- lock_manager = LockManager(machine_list, options.force, options.chromeos_root)
-
- machine_states = lock_manager.GetMachineStates(cmd=options.cmd)
- cmd = options.cmd
-
- if cmd == 'status':
- lock_manager.ListMachineStates(machine_states)
-
- elif cmd == 'lock':
- if not lock_manager.force:
- lock_manager.CheckMachineLocks(machine_states, cmd)
- lock_manager.UpdateMachines(True)
-
- elif cmd == 'unlock':
- if not lock_manager.force:
- lock_manager.CheckMachineLocks(machine_states, cmd)
- lock_manager.UpdateMachines(False)
-
- elif cmd == 'add':
- lock_manager.AddMachinesToLocalServer()
-
- elif cmd == 'remove':
- lock_manager.RemoveMachinesFromLocalServer()
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(Main(sys.argv[1:]))
diff --git a/file_lock_machine_test.py b/lock_machine_test.py
index 340f9149..0ffe094d 100755..100644
--- a/file_lock_machine_test.py
+++ b/lock_machine_test.py
@@ -1,10 +1,4 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
+# Copyright 2010 Google Inc. All Rights Reserved.
"""lock_machine.py related unit-tests.
MachineManagerTest tests MachineManager.
@@ -22,7 +16,7 @@ import file_lock_machine
def LockAndSleep(machine):
- file_lock_machine.Machine(machine, '/tmp', auto=True).Lock(exclusive=True)
+ file_lock_machine.Machine(machine, auto=True).Lock(exclusive=True)
time.sleep(1)
@@ -33,12 +27,12 @@ class MachineTest(unittest.TestCase):
pass
def testRepeatedUnlock(self):
- mach = file_lock_machine.Machine('qqqraymes.mtv', '/tmp')
+ mach = file_lock_machine.Machine('qqqraymes.mtv')
for _ in range(10):
- self.assertTrue(mach.Unlock())
- mach = file_lock_machine.Machine('qqqraymes.mtv', '/tmp', auto=True)
+ self.assertFalse(mach.Unlock())
+ mach = file_lock_machine.Machine('qqqraymes.mtv', auto=True)
for _ in range(10):
- self.assertTrue(mach.Unlock())
+ self.assertFalse(mach.Unlock())
def testLockUnlock(self):
mach = file_lock_machine.Machine('otter.mtv', '/tmp')
@@ -52,7 +46,7 @@ class MachineTest(unittest.TestCase):
self.assertTrue(mach.Unlock(exclusive=True))
def testSharedLock(self):
- mach = file_lock_machine.Machine('chrotomation.mtv', '/tmp')
+ mach = file_lock_machine.Machine('chrotomation.mtv')
for _ in range(10):
self.assertTrue(mach.Lock(exclusive=False))
for _ in range(10):
@@ -60,7 +54,7 @@ class MachineTest(unittest.TestCase):
self.assertTrue(mach.Lock(exclusive=True))
self.assertTrue(mach.Unlock(exclusive=True))
- mach = file_lock_machine.Machine('chrotomation.mtv', '/tmp', auto=True)
+ mach = file_lock_machine.Machine('chrotomation.mtv', auto=True)
for _ in range(10):
self.assertTrue(mach.Lock(exclusive=False))
for _ in range(10):
@@ -69,14 +63,14 @@ class MachineTest(unittest.TestCase):
self.assertTrue(mach.Unlock(exclusive=True))
def testExclusiveLock(self):
- mach = file_lock_machine.Machine('atree.mtv', '/tmp')
+ mach = file_lock_machine.Machine('atree.mtv')
self.assertTrue(mach.Lock(exclusive=True))
for _ in range(10):
self.assertFalse(mach.Lock(exclusive=True))
self.assertFalse(mach.Lock(exclusive=False))
self.assertTrue(mach.Unlock(exclusive=True))
- mach = file_lock_machine.Machine('atree.mtv', '/tmp', auto=True)
+ mach = file_lock_machine.Machine('atree.mtv', auto=True)
self.assertTrue(mach.Lock(exclusive=True))
for _ in range(10):
self.assertFalse(mach.Lock(exclusive=True))
@@ -84,20 +78,20 @@ class MachineTest(unittest.TestCase):
self.assertTrue(mach.Unlock(exclusive=True))
def testExclusiveState(self):
- mach = file_lock_machine.Machine('testExclusiveState', '/tmp')
+ mach = file_lock_machine.Machine('testExclusiveState')
self.assertTrue(mach.Lock(exclusive=True))
for _ in range(10):
self.assertFalse(mach.Lock(exclusive=False))
self.assertTrue(mach.Unlock(exclusive=True))
- mach = file_lock_machine.Machine('testExclusiveState', '/tmp', auto=True)
+ mach = file_lock_machine.Machine('testExclusiveState', auto=True)
self.assertTrue(mach.Lock(exclusive=True))
for _ in range(10):
self.assertFalse(mach.Lock(exclusive=False))
self.assertTrue(mach.Unlock(exclusive=True))
def testAutoLockGone(self):
- mach = file_lock_machine.Machine('lockgone', '/tmp', auto=True)
+ mach = file_lock_machine.Machine('lockgone', auto=True)
p = Process(target=LockAndSleep, args=('lockgone',))
p.start()
time.sleep(1.1)
@@ -105,7 +99,7 @@ class MachineTest(unittest.TestCase):
self.assertTrue(mach.Lock(exclusive=True))
def testAutoLockFromOther(self):
- mach = file_lock_machine.Machine('other_lock', '/tmp', auto=True)
+ mach = file_lock_machine.Machine('other_lock', auto=True)
p = Process(target=LockAndSleep, args=('other_lock',))
p.start()
time.sleep(0.5)
@@ -115,7 +109,7 @@ class MachineTest(unittest.TestCase):
self.assertTrue(mach.Lock(exclusive=True))
def testUnlockByOthers(self):
- mach = file_lock_machine.Machine('other_unlock', '/tmp', auto=True)
+ mach = file_lock_machine.Machine('other_unlock', auto=True)
p = Process(target=LockAndSleep, args=('other_unlock',))
p.start()
time.sleep(0.5)
diff --git a/mem_tests/README b/mem_tests/README
new file mode 100644
index 00000000..4e35f684
--- /dev/null
+++ b/mem_tests/README
@@ -0,0 +1,46 @@
+Usage
+-----
+
+These scripts are made to parse TCMalloc output in order to extract certain
+info from them.
+
+In particular, these scripts rely on the error logging system for ChromeOS in
+order to extract information. In order to use a script (e.g. total_mem.py), you
+just have the command:
+
+./total_mem.py FILENAME
+
+where FILENAME is the name of the log file to be parsed.
+
+Codebase Changes
+----------------
+
+There are two ideas that motivate these changes:
+
+1- Turn on TCMalloc sampling.
+2- Use perf to collect the sample information.
+
+The following files have to be changed:
+
+in chrome/browser/metrics/perf_provider_chrome_os:
+
+add:
+ #include "third_party/tcmalloc/chromium/src/gperftools/malloc_extension.h"
+
+Change the perf profiling interval to something small (60*1000 milliseconds).
+
+inside DoPeriodicCollection, insert the following code:
+
+ std::string output;
+ char* chr_arr = new char[9999];
+ MallocExtension::instance() ->GetHeapSample(&output);
+ MallocExtension::instance() ->GetStats(chr_arr, 9999);
+ LOG(ERROR) << "Output Heap Data: ";
+ LOG(ERROR) << output;
+ LOG(ERROR) << "Output Heap Stats: ";
+ output = "";
+ for (unsigned int i = 0; i < strlen(chr_arr); i++) {
+ output += chr_arr[i];
+ }
+ LOG(ERROR) << output;
+ delete[] chr_arr;
diff --git a/mem_tests/README.md b/mem_tests/README.md
deleted file mode 100644
index 44bf16c9..00000000
--- a/mem_tests/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# Memory tests
-
-## Usage
-
-These scripts are made to parse TCMalloc output in order to extract certain
-info from them.
-
-In particular, these scripts rely on the error logging system for ChromeOS in
-order to extract information. In order to use a script (e.g. `total_mem.py`),
-you just have the command:
-
-```
-./total_mem.py FILENAME
-```
-
-where `FILENAME` is the name of the log file to be parsed.
-
-## Codebase Changes
-
-There are two ideas that motivate these changes:
-
-1. Turn on TCMalloc sampling.
-2. Use perf to collect the sample information.
-
-
-The following files have to be changed:
-
-in `chrome/browser/metrics/perf_provider_chrome_os`, add:
-
-```
-#include "third_party/tcmalloc/chromium/src/gperftools/malloc_extension.h"
-```
-
-Change the perf profiling interval to something small (60*1000 milliseconds).
-
-Inside DoPeriodicCollection, insert the following code:
-
-```
-std::string output;
-char* chr_arr = new char[9999];
-MallocExtension::instance() ->GetHeapSample(&output);
-MallocExtension::instance() ->GetStats(chr_arr, 9999);
-LOG(ERROR) << "Output Heap Data: ";
-LOG(ERROR) << output;
-LOG(ERROR) << "Output Heap Stats: ";
-output = "";
-for (unsigned int i = 0; i < strlen(chr_arr); i++) {
- output += chr_arr[i];
-}
-LOG(ERROR) << output;
-delete[] chr_arr;
-```
diff --git a/mem_tests/clean_data.py b/mem_tests/clean_data.py
index 1433ba41..f9a11e75 100755
--- a/mem_tests/clean_data.py
+++ b/mem_tests/clean_data.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#! /usr/bin/python
"""Cleans output from other scripts to eliminate duplicates.
When frequently sampling data, we see that records occasionally will contain
diff --git a/mem_tests/mem_groups.py b/mem_tests/mem_groups.py
index 5d593872..e2fbf271 100755
--- a/mem_tests/mem_groups.py
+++ b/mem_tests/mem_groups.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#! /usr/bin/python
"""Groups memory by allocation sizes.
Takes a log entry and sorts sorts everything into groups based on what size
diff --git a/mem_tests/total_mem_actual.py b/mem_tests/total_mem_actual.py
index d2a0cedf..2d53bebe 100755
--- a/mem_tests/total_mem_actual.py
+++ b/mem_tests/total_mem_actual.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#! /usr/bin/python
"""Parses the actual memory usage from TCMalloc.
This goes through logs that have the actual allocated memory (not sampled) in
diff --git a/mem_tests/total_mem_sampled.py b/mem_tests/total_mem_sampled.py
index 32aa527c..77dd6817 100755
--- a/mem_tests/total_mem_sampled.py
+++ b/mem_tests/total_mem_sampled.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#! /usr/bin/python
"""Parses the total amount of sampled memory from log files.
This file outputs the total amount of memory that has been sampled by tcmalloc.
diff --git a/orderfile/post_process_orderfile.py b/orderfile/post_process_orderfile.py
deleted file mode 100755
index e24ab1cd..00000000
--- a/orderfile/post_process_orderfile.py
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to do post-process on orderfile generated by C3.
-
-The goal of this script is to take in an orderfile generated by C3, and do
-the following post process:
-
-1. Take in the result of nm command on Chrome binary to find out all the
-Builtin functions and put them after the input symbols.
-
-2. Put special markers "chrome_begin_ordered_code" and "chrome_end_ordered_code"
-in begin and end of the file.
-
-The results of the file is intended to be uploaded and consumed when linking
-Chrome in Chrome OS.
-"""
-
-from __future__ import division, print_function
-
-import argparse
-import os
-import sys
-
-
-def _parse_nm_output(stream):
- for line in (line.rstrip() for line in stream):
- if not line:
- continue
-
- pieces = line.split()
- if len(pieces) != 3:
- continue
-
- _, ty, symbol = pieces
- if ty not in 'tT':
- continue
-
- # We'll sometimes see synthesized symbols that start with $. There isn't
- # much we can do about or with them, regrettably.
- if symbol.startswith('$'):
- continue
-
- yield symbol
-
-
-def _remove_duplicates(iterable):
- seen = set()
- for item in iterable:
- if item in seen:
- continue
- seen.add(item)
- yield item
-
-
-def run(c3_ordered_stream, chrome_nm_stream, output_stream):
- head_marker = 'chrome_begin_ordered_code'
- tail_marker = 'chrome_end_ordered_code'
-
- c3_ordered_syms = [x.strip() for x in c3_ordered_stream.readlines()]
- all_chrome_syms = set(_parse_nm_output(chrome_nm_stream))
- # Sort by name, so it's predictable. Otherwise, these should all land in the
- # same hugepage anyway, so order doesn't matter as much.
- builtin_syms = sorted(s for s in all_chrome_syms if s.startswith('Builtins_'))
- output = _remove_duplicates([head_marker] + c3_ordered_syms + builtin_syms +
- [tail_marker])
- output_stream.write('\n'.join(output))
-
-
-def main(argv):
- parser = argparse.ArgumentParser()
- parser.add_argument('--chrome_nm', required=True, dest='chrome_nm')
- parser.add_argument('--input', required=True, dest='input_file')
- parser.add_argument('--output', required=True, dest='output_file')
-
- options = parser.parse_args(argv)
-
- if not os.path.exists(options.input_file):
- sys.exit('Input orderfile doesn\'t exist.')
-
- with open(options.input_file) as in_stream, \
- open(options.chrome_nm) as chrome_nm_stream, \
- open(options.output_file, 'w') as out_stream:
- run(in_stream, chrome_nm_stream, out_stream)
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/orderfile/post_process_orderfile_test.py b/orderfile/post_process_orderfile_test.py
deleted file mode 100755
index 2532b8b3..00000000
--- a/orderfile/post_process_orderfile_test.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for post_process_orderfile.py."""
-
-from __future__ import division, print_function
-
-import os
-import shutil
-import tempfile
-import unittest
-
-import post_process_orderfile
-
-
-def _write_nm_file(name):
- with open(name, 'w') as out:
- out.write('000001 s NotAValidSymbol1\n')
- out.write('000002 S NotAValidSymbol2\n')
- out.write('000010 t FirstValidSymbol\n')
- out.write('000012 t \n')
- out.write('000020 T Builtins_SecondValidSymbol\n')
- out.write('000030 T $SymbolToIgnore\n')
- out.write('000036 T Builtins_LastValidSymbol\n')
-
-
-def _write_orderfile(name):
- with open(name, 'w') as out:
- out.write('SymbolOrdered1\n')
- out.write('SymbolOrdered2\n')
-
-
-def _cleanup(files):
- for f in files:
- shutil.rmtree(f, ignore_errors=True)
-
-
-class Tests(unittest.TestCase):
- """All of our tests for post_process_orderfile."""
-
- #pylint: disable=protected-access
- def test__parse_nm_output(self):
- temp_dir = tempfile.mkdtemp()
- self.addCleanup(_cleanup, [temp_dir])
- chrome_nm_file = os.path.join(temp_dir, 'chrome_nm.txt')
- _write_nm_file(chrome_nm_file)
- with open(chrome_nm_file) as f:
- results = list(post_process_orderfile._parse_nm_output(f))
- self.assertEqual(len(results), 3)
- self.assertIn('FirstValidSymbol', results)
- self.assertIn('Builtins_SecondValidSymbol', results)
- self.assertIn('Builtins_LastValidSymbol', results)
-
- def test__remove_duplicates(self):
- duplicates = ['marker1', 'marker2', 'marker3', 'marker2', 'marker1']
- results = list(post_process_orderfile._remove_duplicates(duplicates))
- self.assertEqual(results, ['marker1', 'marker2', 'marker3'])
-
- def test_run(self):
- temp_dir = tempfile.mkdtemp()
- self.addCleanup(_cleanup, [temp_dir])
- orderfile_input = os.path.join(temp_dir, 'orderfile.in.txt')
- orderfile_output = os.path.join(temp_dir, 'orderfile.out.txt')
- chrome_nm_file = os.path.join(temp_dir, 'chrome_nm.txt')
- _write_nm_file(chrome_nm_file)
- _write_orderfile(orderfile_input)
- with open(orderfile_input) as in_stream, \
- open(orderfile_output, 'w') as out_stream, \
- open(chrome_nm_file) as chrome_nm_stream:
- post_process_orderfile.run(in_stream, chrome_nm_stream, out_stream)
-
- with open(orderfile_output) as check:
- results = [x.strip() for x in check.readlines()]
- self.assertEqual(
- results,
- [
- # Start marker should be put first.
- 'chrome_begin_ordered_code',
- # Symbols in orderfile come next.
- 'SymbolOrdered1',
- 'SymbolOrdered2',
- # Builtin functions in chrome_nm come next, and sorted.
- 'Builtins_LastValidSymbol',
- 'Builtins_SecondValidSymbol',
- # Last symbol should be end marker.
- 'chrome_end_ordered_code'
- ])
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/perf-to-inst-page.sh b/perf-to-inst-page.sh
new file mode 100755
index 00000000..ba1d2582
--- /dev/null
+++ b/perf-to-inst-page.sh
@@ -0,0 +1,85 @@
+#! /bin/bash -u
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script first collects the addresses of the instructions being tracked by
+# the profile. After that, it calculates the offset of the addresses compared
+# to the base address and then gets the number of execution times for each
+# address. After that, it draws the heat map and the time map. A heap map shows
+# the overall hotness of instructions being executed while the time map shows the
+# hotness of instruction at different time.
+
+# binary : the name of the binary
+# profile : output of 'perf report -D'
+# loading_address : the loading address of the binary
+# page_size : the size to be displayed, usually 4096(byte).
+
+if [[ $# -ne 4 ]]; then
+ echo 'Illegal number of parameters' exit 1
+fi
+
+binary=$1
+profile=$2
+loading_address=$3
+page_size=$4
+
+# size of binary supported.
+binary_maximum=1000000000
+
+if ! [[ -e $profile ]] ; then
+ echo "error: profile does not exist" >&2; exit 1
+fi
+
+re='^[0-9]+$'
+if ! [[ $page_size =~ $re ]] ; then
+ echo "error: page_size is not a number" >&2; exit 1
+fi
+
+function test {
+ "$@"
+ local status=$?
+ if [ $status -ne 0 ]; then
+ echo "error with $1" >&2
+ fi
+ return $status
+}
+
+HEAT_PNG="heat_map.png"
+TIMELINE_PNG="timeline.png"
+
+test grep -A 2 PERF_RECORD_SAMPLE $profile | grep -A 1 -B 1 "thread: $binary" | \
+grep -B 2 "dso.*$binary$" | awk -v base=$loading_address \
+ "BEGIN { count=0; } /PERF_RECORD_SAMPLE/ {addr = strtonum(\$8) - strtonum(base); \
+ if (addr < $binary_maximum) count++; \
+ if (addr < $binary_maximum) print \$7,count,int(addr/$page_size)*$page_size}" > out.txt
+
+
+test awk '{print $3}' out.txt | sort -n | uniq -c > inst-histo.txt
+
+# generate inst heat map
+echo "
+set terminal png size 600,450
+set xlabel \"Instruction Virtual Address (MB)\"
+set ylabel \"Sample Occurance\"
+set grid
+
+set output \"${HEAT_PNG}\"
+set title \"Instruction Heat Map\"
+
+plot 'inst-histo.txt' using (\$2/1024/1024):1 with impulses notitle
+" | test gnuplot
+
+# generate instruction page access timeline
+num=$(awk 'END {print NR+1}' out.txt)
+
+echo "
+set terminal png size 600,450
+set xlabel \"time (sec)\"
+set ylabel \"Instruction Virtual Address (MB)\"
+
+set output \"${TIMELINE_PNG}\"
+set title \"instruction page accessd timeline\"
+
+plot 'out.txt' using (\$0/$num*10):(\$3/1024/1024) with dots notitle
+" | test gnuplot
diff --git a/pgo_tools/merge_profdata_and_upload.py b/pgo_tools/merge_profdata_and_upload.py
deleted file mode 100755
index dddc7f1e..00000000
--- a/pgo_tools/merge_profdata_and_upload.py
+++ /dev/null
@@ -1,271 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Download profdata from different arches, merge them and upload to gs
-
-The script is used for updating the PGO profiles for LLVM. The workflow
-is that the script will download profdata from different PGO builds, merge
-them and then upload it to a gs location that LLVM can access.
-
-The simplest way of using this script, is to run:
- ./merge_profdata_and_upload.py --all_latest_profiles
-which will automatically grab profdata from latest PGO generate builders
-for three different architectures and merge them. LLVM hash is also
-detected automatically from the artifacts.
-
-If you want to specify certain llvm hash, run it with:
- ./merge_profdata_and_upload.py --all_latest_profiles --llvm_hash LLVM_HASH
-Note that hash checking will fail if the llvm hash you provided is not the
-same as those in artifacts, or llvm hash in different artifacts are not the
-same.
-
-To only use profiles from PGO generate tryjob, run it with:
- ./merge_profdata_and_upload.py --nolatest -t TRYJOB1 -t TRYJOB2 ...
-Using of --nolatest will tell the script not to use any results from builders,
-and merge only the profdata from the tryjobs you specified.
-
-There is a chance that builders only succeeded partially, in this case, you
-can run this script to merge both profdata from builder and tryjob:
- ./merge_profdata_and_upload.py -l arm -l amd64 -t TRYJOB_FOR_ARM64
-In this example, the script will merge profdata from arm and amd64 builder, and
-profdata from an arm64 tryjob.
-"""
-
-from __future__ import print_function
-
-import argparse
-import collections
-import distutils.spawn
-import json
-import os
-import os.path
-import shutil
-import subprocess
-import sys
-import tempfile
-
-_LLVM_PROFDATA = '/usr/bin/llvm-profdata'
-_GS_PREFIX = 'gs://'
-
-_LLVMMetadata = collections.namedtuple('_LLVMMetadata', ['head_sha'])
-
-
-def _get_gs_latest(remote_lastest):
- assert remote_lastest.startswith(_GS_PREFIX)
- try:
- return subprocess.check_output(['gsutil', 'cat', remote_lastest])
- except subprocess.CalledProcessError:
- raise RuntimeError('Lastest artifacts not found: %s' % remote_lastest)
-
-
-def _fetch_gs_artifact(remote_name, local_name):
- assert remote_name.startswith(_GS_PREFIX)
-
- print('Fetching %r to %r' % (remote_name, local_name))
- subprocess.check_call(['gsutil', 'cp', remote_name, local_name])
-
-
-def _find_latest_artifacts(arch):
- remote_latest = (
- '%schromeos-image-archive/'
- '%s-pgo-generate-llvm-next-toolchain/LATEST-master' % (_GS_PREFIX, arch))
- version = _get_gs_latest(remote_latest)
- return '%s-pgo-generate-llvm-next-toolchain/%s' % (arch, version)
-
-
-def _get_gs_profdata(remote_base, base_dir):
- remote_profdata_basename = 'llvm_profdata.tar.xz'
-
- remote_profdata = os.path.join(remote_base, remote_profdata_basename)
- tar = 'llvm_profdata.tar.xz'
- _fetch_gs_artifact(remote_profdata, tar)
- extract_cmd = ['tar', '-xf', tar]
-
- print('Extracting profdata tarball.\nCMD: %s\n' % extract_cmd)
- subprocess.check_call(extract_cmd)
- # Return directory to the llvm.profdata extracted.
- if '-tryjob/' in base_dir:
- prefix = 'b/s/w/ir/cache/cbuild/repository/trybot_archive/'
- else:
- prefix = 'b/s/w/ir/cache/cbuild/repository/buildbot_archive/'
- return os.path.join(prefix, base_dir, 'llvm.profdata')
-
-
-def _get_gs_metadata(remote_base):
- metadata_basename = 'llvm_metadata.json'
- _fetch_gs_artifact(
- os.path.join(remote_base, metadata_basename), metadata_basename)
-
- with open(metadata_basename) as f:
- result = json.load(f)
-
- return _LLVMMetadata(head_sha=result['head_sha'])
-
-
-def _get_gs_artifacts(base_dir):
- remote_base = '%schromeos-image-archive/%s' % (_GS_PREFIX, base_dir)
- profile_path = _get_gs_profdata(remote_base, base_dir)
- metadata = _get_gs_metadata(remote_base)
- return metadata, profile_path
-
-
-def _merge_profdata(profdata_list, output_name):
- merge_cmd = [_LLVM_PROFDATA, 'merge', '-output', output_name] + profdata_list
- print('Merging PGO profiles.\nCMD: %s\n' % merge_cmd)
- subprocess.check_call(merge_cmd)
-
-
-def _tar_and_upload_profdata(profdata, name_suffix):
- tarball = 'llvm-profdata-%s.tar.xz' % name_suffix
- print('Making profdata tarball: %s' % tarball)
- subprocess.check_call(
- ['tar', '--sparse', '-I', 'xz', '-cf', tarball, profdata])
-
- upload_location = '%schromeos-localmirror/distfiles/%s' % (_GS_PREFIX,
- tarball)
-
- # TODO: it's better to create a subdir: distfiles/llvm_pgo_profile, but
- # now llvm could only recognize distfiles.
- upload_cmd = [
- 'gsutil',
- '-m',
- 'cp',
- '-n',
- '-a',
- 'public-read',
- tarball,
- upload_location,
- ]
- print('Uploading tarball to gs.\nCMD: %s\n' % upload_cmd)
-
- # gsutil prints all status to stderr, oddly enough.
- gs_output = subprocess.check_output(upload_cmd, stderr=subprocess.STDOUT)
- print(gs_output)
-
- # gsutil exits successfully even if it uploaded nothing. It prints a summary
- # of what all it did, though. Successful uploads are just a progress bar,
- # unsuccessful ones note that items were skipped.
- if 'Skipping existing item' in gs_output:
- raise ValueError('Profile upload failed: would overwrite an existing '
- 'profile at %s' % upload_location)
-
-
-def main():
- parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '-a',
- '--all_latest_profiles',
- action='store_true',
- help='Merge and upload profiles from the latest builders.')
- parser.add_argument(
- '-l',
- '--latest',
- default=[],
- action='append',
- help='User can specify the profdata from which builder with specific '
- 'architecture to download. By default, we merge profdata from arm, '
- 'arm64, amd64.')
- parser.add_argument(
- '-t',
- '--tryjob',
- default=[],
- action='append',
- help='Extra pgo-generate-llvm-next-toolchain/tryjob results to be used. '
- 'Format should be '
- '{arch}-pgo-generate-llvm-next-toolchain(-tryjob)/{VERSION}.')
- parser.add_argument(
- '-o',
- '--output',
- default='llvm.profdata',
- help='Where to put merged PGO profile. The default is to not save it '
- 'anywhere.')
- parser.add_argument(
- '--llvm_hash',
- help='The LLVM hash to select for the profiles. Generally autodetected.')
- args = parser.parse_args()
-
- if not args.all_latest_profiles and not (args.latest or args.tryjob):
- sys.exit('Please specify whether to use latest profiles or profiles from '
- 'tryjobs')
-
- if args.all_latest_profiles and (args.latest or args.tryjob):
- sys.exit('--all_latest_profiles cannot be specified together with '
- '--latest or --tryjob.')
-
- latest = ['arm', 'arm64', 'amd64'] \
- if args.all_latest_profiles else args.latest
-
- if not distutils.spawn.find_executable(_LLVM_PROFDATA):
- sys.exit(_LLVM_PROFDATA + ' not found; are you in the chroot?')
-
- initial_dir = os.getcwd()
- temp_dir = tempfile.mkdtemp(prefix='merge_pgo')
- success = True
- try:
- os.chdir(temp_dir)
- profdata_list = []
- heads = set()
-
- def fetch_and_append_artifacts(gs_url):
- llvm_metadata, profdata_loc = _get_gs_artifacts(gs_url)
- if os.path.getsize(profdata_loc) < 512 * 1024:
- raise RuntimeError('The PGO profile in %s (local path: %s) is '
- 'suspiciously small. Something might have gone '
- 'wrong.' % (gs_url, profdata_loc))
-
- heads.add(llvm_metadata.head_sha)
- profdata_list.append(profdata_loc)
-
- for arch in latest:
- fetch_and_append_artifacts(_find_latest_artifacts(arch))
-
- if args.tryjob:
- for tryjob in args.tryjob:
- fetch_and_append_artifacts(tryjob)
-
- assert heads, 'Didn\'t fetch anything?'
-
- def die_with_head_complaint(complaint):
- extra = ' (HEADs found: %s)' % sorted(heads)
- raise RuntimeError(complaint.rstrip() + extra)
-
- llvm_hash = args.llvm_hash
- if not llvm_hash:
- if len(heads) != 1:
- die_with_head_complaint(
- '%d LLVM HEADs were found, which is more than one. You probably '
- 'want a consistent set of HEADs for a profile. If you know you '
- 'don\'t, please specify --llvm_hash, and note that *all* profiles '
- 'will be merged into this final profile, regardless of their '
- 'reported HEAD.' % len(heads))
- llvm_hash, = heads
-
- if llvm_hash not in heads:
- assert llvm_hash == args.llvm_hash
- die_with_head_complaint(
- 'HEAD %s wasn\'t found in any fetched artifacts.' % llvm_hash)
-
- print('Using LLVM hash: %s' % llvm_hash)
-
- _merge_profdata(profdata_list, args.output)
- print('Merged profdata locates at %s\n' % os.path.abspath(args.output))
- _tar_and_upload_profdata(args.output, name_suffix=llvm_hash)
- print('Merged profdata uploaded successfully.')
- except:
- success = False
- raise
- finally:
- os.chdir(initial_dir)
- if success:
- print('Clearing temp directory.')
- shutil.rmtree(temp_dir, ignore_errors=True)
- else:
- print('Script fails, temp directory is at: %s' % temp_dir)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/run_tests.py b/run_tests.py
new file mode 100755
index 00000000..e1b8ca2f
--- /dev/null
+++ b/run_tests.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python2
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+"""Script to wrap run_remote_tests.sh script.
+
+This script calls run_remote_tests.sh with standard tests.
+"""
+
+from __future__ import print_function
+
+__author__ = 'asharif@google.com (Ahmad Sharif)'
+
+import sys
+
+
+def Main():
+ """The main function."""
+ print('This script is deprecated. Use crosperf for running tests.')
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/run_tests_for.py b/run_tests_for.py
deleted file mode 100755
index 6f77b12c..00000000
--- a/run_tests_for.py
+++ /dev/null
@@ -1,256 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs tests for the given input files.
-
-Tries its best to autodetect all tests based on path name without being *too*
-aggressive.
-
-In short, there's a small set of directories in which, if you make any change,
-all of the tests in those directories get run. Additionally, if you change a
-python file named foo, it'll run foo_test.py or foo_unittest.py if either of
-those exist.
-
-All tests are run in parallel.
-"""
-
-# NOTE: An alternative mentioned on the initial CL for this
-# https://chromium-review.googlesource.com/c/chromiumos/third_party/toolchain-utils/+/1516414
-# is pytest. It looks like that brings some complexity (and makes use outside
-# of the chroot a bit more obnoxious?), but might be worth exploring if this
-# starts to grow quite complex on its own.
-
-from __future__ import print_function
-
-import argparse
-import collections
-import contextlib
-import multiprocessing.pool
-import os
-import pipes
-import subprocess
-import sys
-
-TestSpec = collections.namedtuple('TestSpec', ['directory', 'command'])
-
-
-def _make_relative_to_toolchain_utils(toolchain_utils, path):
- """Cleans & makes a path relative to toolchain_utils.
-
- Raises if that path isn't under toolchain_utils.
- """
- # abspath has the nice property that it removes any markers like './'.
- as_abs = os.path.abspath(path)
- result = os.path.relpath(as_abs, start=toolchain_utils)
-
- if result.startswith('../'):
- raise ValueError('Non toolchain-utils directory found: %s' % result)
- return result
-
-
-def _gather_python_tests_in(subdir):
- """Returns all files that appear to be Python tests in a given directory."""
- test_files = (
- os.path.join(subdir, file_name)
- for file_name in os.listdir(subdir)
- if file_name.endswith('_test.py') or file_name.endswith('_unittest.py'))
- return [_python_test_to_spec(test_file) for test_file in test_files]
-
-
-def _run_test(test_spec):
- """Runs a test."""
- p = subprocess.Popen(
- test_spec.command,
- cwd=test_spec.directory,
- stdin=open('/dev/null'),
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- stdout, _ = p.communicate()
- exit_code = p.wait()
- return exit_code, stdout
-
-
-def _python_test_to_spec(test_file):
- """Given a .py file, convert it to a TestSpec."""
- # Run tests in the directory they exist in, since some of them are sensitive
- # to that.
- test_directory = os.path.dirname(os.path.abspath(test_file))
- file_name = os.path.basename(test_file)
-
- if os.access(test_file, os.X_OK):
- command = ['./' + file_name]
- else:
- # Assume the user wanted py2.
- command = ['python2', file_name]
-
- return TestSpec(directory=test_directory, command=command)
-
-
-def _autodetect_python_tests_for(test_file):
- """Given a test file, detect if there may be related tests."""
- if not test_file.endswith('.py'):
- return []
-
- test_suffixes = ['_test.py', '_unittest.py']
- if any(test_file.endswith(x) for x in test_suffixes):
- test_files = [test_file]
- else:
- base = test_file[:-3]
- candidates = (base + x for x in test_suffixes)
- test_files = (x for x in candidates if os.path.exists(x))
-
- return [_python_test_to_spec(test_file) for test_file in test_files]
-
-
-def _run_test_scripts(all_tests, show_successful_output=False):
- """Runs a list of TestSpecs. Returns whether all of them succeeded."""
- with contextlib.closing(multiprocessing.pool.ThreadPool()) as pool:
- results = [pool.apply_async(_run_test, (test,)) for test in all_tests]
-
- failures = []
- for i, (test, future) in enumerate(zip(all_tests, results)):
- # Add a bit more spacing between outputs.
- if show_successful_output and i:
- print('\n')
-
- pretty_test = ' '.join(pipes.quote(test_arg) for test_arg in test.command)
- pretty_directory = os.path.relpath(test.directory)
- if pretty_directory == '.':
- test_message = pretty_test
- else:
- test_message = '%s in %s/' % (pretty_test, pretty_directory)
-
- print('## %s ... ' % test_message, end='')
- # Be sure that the users sees which test is running.
- sys.stdout.flush()
-
- exit_code, stdout = future.get()
- if not exit_code:
- print('PASS')
- else:
- print('FAIL')
- failures.append(pretty_test)
-
- if show_successful_output or exit_code:
- sys.stdout.write(stdout)
-
- if failures:
- word = 'tests' if len(failures) > 1 else 'test'
- print('%d %s failed: %s' % (len(failures), word, failures))
-
- return not failures
-
-
-def _compress_list(l):
- """Removes consecutive duplicate elements from |l|.
-
- >>> _compress_list([])
- []
- >>> _compress_list([1, 1])
- [1]
- >>> _compress_list([1, 2, 1])
- [1, 2, 1]
- """
- result = []
- for e in l:
- if result and result[-1] == e:
- continue
- result.append(e)
- return result
-
-
-def _fix_python_path(toolchain_utils):
- pypath = os.environ.get('PYTHONPATH', '')
- if pypath:
- pypath = ':' + pypath
- os.environ['PYTHONPATH'] = toolchain_utils + pypath
-
-
-def _find_forced_subdir_python_tests(test_paths, toolchain_utils):
- assert all(os.path.isabs(path) for path in test_paths)
-
- # Directories under toolchain_utils for which any change will cause all tests
- # in that directory to be rerun. Includes changes in subdirectories.
- all_dirs = {
- 'crosperf',
- 'cros_utils',
- }
-
- relative_paths = [
- _make_relative_to_toolchain_utils(toolchain_utils, path)
- for path in test_paths
- ]
-
- gather_test_dirs = set()
-
- for path in relative_paths:
- top_level_dir = path.split('/')[0]
- if top_level_dir in all_dirs:
- gather_test_dirs.add(top_level_dir)
-
- results = []
- for d in sorted(gather_test_dirs):
- results += _gather_python_tests_in(os.path.join(toolchain_utils, d))
- return results
-
-
-def _find_go_tests(test_paths):
- """Returns TestSpecs for the go folders of the given files"""
- assert all(os.path.isabs(path) for path in test_paths)
-
- dirs_with_gofiles = set(
- os.path.dirname(p) for p in test_paths if p.endswith('.go'))
- command = ['go', 'test', '-vet=all']
- # Note: We sort the directories to be deterministic.
- return [
- TestSpec(directory=d, command=command) for d in sorted(dirs_with_gofiles)
- ]
-
-
-def main(argv):
- default_toolchain_utils = os.path.abspath(os.path.dirname(__file__))
-
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument(
- '--show_all_output',
- action='store_true',
- help='show stdout of successful tests')
- parser.add_argument(
- '--toolchain_utils',
- default=default_toolchain_utils,
- help='directory of toolchain-utils. Often auto-detected')
- parser.add_argument(
- 'file', nargs='*', help='a file that we should run tests for')
- args = parser.parse_args(argv)
-
- modified_files = [os.path.abspath(f) for f in args.file]
- show_all_output = args.show_all_output
- toolchain_utils = args.toolchain_utils
-
- if not modified_files:
- print('No files given. Exit.')
- return 0
-
- _fix_python_path(toolchain_utils)
-
- tests_to_run = _find_forced_subdir_python_tests(modified_files,
- toolchain_utils)
- for f in modified_files:
- tests_to_run += _autodetect_python_tests_for(f)
- tests_to_run += _find_go_tests(modified_files)
-
- # TestSpecs have lists, so we can't use a set. We'd likely want to keep them
- # sorted for determinism anyway.
- tests_to_run.sort()
- tests_to_run = _compress_list(tests_to_run)
-
- success = _run_test_scripts(tests_to_run, show_all_output)
- return 0 if success else 1
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/test_gcc_dejagnu.py b/test_gcc_dejagnu.py
index 4ea0c51e..cd2e0cde 100755
--- a/test_gcc_dejagnu.py
+++ b/test_gcc_dejagnu.py
@@ -55,7 +55,7 @@ class DejagnuAdapter(object):
raise RuntimeError('Failed to create chroot.')
def SetupBoard(self):
- cmd = 'setup_board --board=' + self._board
+ cmd = './setup_board --board=' + self._board
ret = self._cmd_exec.ChrootRunCommand(
self._chromeos_root, cmd, terminated_timeout=4000)
if ret:
diff --git a/test_gdb_dejagnu.py b/test_gdb_dejagnu.py
index 6f37a4c9..c2a4ba9a 100755
--- a/test_gdb_dejagnu.py
+++ b/test_gdb_dejagnu.py
@@ -42,7 +42,7 @@ class DejagnuAdapter(object):
raise RuntimeError('Failed to create chroot.')
def SetupBoard(self):
- cmd = 'setup_board --board=' + self._board
+ cmd = './setup_board --board=' + self._board
ret = self._cmd_exec.ChrootRunCommand(
self._chromeos_root, cmd, terminated_timeout=4000)
if ret:
diff --git a/toolchain_utils_githooks/check-format b/toolchain_utils_githooks/check-format
deleted file mode 100755
index 372cc483..00000000
--- a/toolchain_utils_githooks/check-format
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/bin/bash -e
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# This script checks the format of the given files. If any look incorrectly
-# formatted, this will complain about them and exit. At the moment, it only
-# checks the format of Python.
-#
-# FIXME: It would be nice if YAPF supported tweaking quotes:
-# https://github.com/google/yapf/issues/399
-
-if [[ $# -eq 0 ]]; then
- echo "No files were given to check the format of." >&2
- echo "Usage: $0 file1 file2 ..." >&2
- exit 1
-fi
-
-yapf=yapf
-gofmt=gofmt
-
-if ! type "${yapf}" >/dev/null 2>&1; then
- echo "${yapf} isn't on your \$PATH. Please either enter a chroot, or place" \
- "depot_tools on your \$PATH." >&2
- exit 1
-fi
-
-if ! type "${gofmt}" >/dev/null 2>&1; then
- echo "${gofmt} isn't on your \$PATH. Please either enter a chroot, or add " \
- "the go binaries to your \$PATH." >&2
- exit 1
-fi
-
-status_to_tf() {
- if "$@" >& /dev/null; then
- echo true
- else
- echo false
- fi
-}
-
-complain_about_missing=$(status_to_tf test -z "${IGNORE_MISSING}")
-
-check_python_file_header() {
- local py_file="$1"
- local needs_hashbang=$(status_to_tf test -x "${py_file}")
- local has_hashbang=$(status_to_tf grep -q '^#!' <(head -n1 "${py_file}"))
-
- if [[ "${needs_hashbang}" == "${has_hashbang}" ]]; then
- return 0
- fi
-
- if "${needs_hashbang}"; then
- echo "File ${py_file} needs a #!; please run" \
- "\`sed -i '1i#!/usr/bin/env python' ${py_file}\`"
- else
- echo "File ${py_file} has an unnecessary #!; please run" \
- "\`sed -i 1d ${py_file}\`"
- fi
- return 1
-}
-
-everything_passed=true
-python_files=()
-go_files=()
-
-for f in "$@"; do
- if [[ ! -e "${f}" ]]; then
- if "${complain_about_missing}"; then
- echo "error: no such file: ${f}" >&2
- everything_passed=false
- fi
- continue
- fi
-
- if [[ "${f}" == *.py ]]; then
- python_files+=( "${f}" )
-
- if ! check_python_file_header "${f}"; then
- everything_passed=false
- fi
- elif [[ "${f}" == *.go ]]; then
- go_files+=( "${f}" )
- fi
-done
-
-if [[ "${#python_files[@]}" -ne 0 ]]; then
- # yapf will give us a full unified (git-like) diff. We parse out the file
- # names, e.g.,
- #
- # --- foo (original)
- #
- # Sed makes it so that bad_files consists only of those lines, but with the
- # leading '--- ' and trailing ' (original)' removed.
- #
- # FIXME: Ideally, we should pass yapf the `-p` arg, so it'll format things in
- # parallel. This requires concurrent.futures in python2 (which isn't
- # available in the chroot by default), and is purely an optimization, so we
- # can handle it later.
- bad_files=(
- $("${yapf}" -d "${python_files[@]}" |
- sed -n '/^--- /{ s/^--- //; s/ *(original)$//p }')
- )
- if [[ "${#bad_files[@]}" -ne 0 ]]; then
- echo "One or more python files appear to be incorrectly formatted."
- echo "Please run \`${yapf} -i ${bad_files[@]}\` to rectify this."
- everything_passed=false
- fi
-fi
-
-if [[ "${#go_files[@]}" -ne 0 ]]; then
- bad_files=(
- $("${gofmt}" -l "${go_files[@]}")
- )
- if [[ "${#bad_files[@]}" -ne 0 ]]; then
- echo "One or more go files appear to be incorrectly formatted."
- echo "Please run \`${gofmt} -w ${bad_files[@]}\` to rectify this."
- everything_passed=false
- fi
-fi
-
-"${everything_passed}"
diff --git a/toolchain_utils_githooks/check-lint b/toolchain_utils_githooks/check-lint
deleted file mode 100755
index e4ba934b..00000000
--- a/toolchain_utils_githooks/check-lint
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/bin/bash -ue
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# This script runs `cros lint` on everything it's handed.
-
-if [[ $# -eq 0 ]]; then
- echo "No files were given to lint." >&2
- echo "Usage: $0 file1 file2 ..." >&2
- exit 1
-fi
-
-cros=cros
-golint=golint
-
-if ! type "${cros}" >&/dev/null; then
- echo "${cros} isn't on your \$PATH. Please either enter a chroot, or place" \
- "depot_tools on your \$PATH." >&2
- exit 1
-fi
-
-lint_args=( "$@" )
-
-# Trys to lint our sources. If `cros` tooling isn't properly found, returns. If
-# anything else happens, this will exit the script with the exit code of
-# `cros`.
-try_lint() {
- local output last_exit_code cros_binary
-
- cros_binary="$1"
-
- set +e
- output="$("${cros_binary}" lint -- "${lint_args[@]}" 2>&1)"
- last_exit_code="$?"
- set -e
-
- # `cros` exits with 127 specifically if it failed due to not finding a Chrome
- # OS checkout.
- if [[ "${last_exit_code}" -ne 127 ]]; then
- if [[ -n "${output}" ]]; then
- echo "${output}"
- fi
- exit "${last_exit_code}"
- fi
-}
-
-try_lint "${cros}"
-
-# If the user's given us a root directory to fall back on, try that
-if [[ -n "${CHROMEOS_ROOT_DIRECTORY:-}" ]]; then
- user_cros="${CHROMEOS_ROOT_DIRECTORY}/chromite/bin/cros"
- if [[ -x "${user_cros}" ]]; then
- try_lint "${user_cros}"
- fi
-fi
-
-# So, `cros` outside of the chroot defers to other tools inside of Chromite. If
-# `cros` couldn't find the real `cros` tool, we fall back to pylint on each
-# Python file. It appears that `cros` uses depot_tools' pylint configuration, so
-# this should get us most of the way there, and is probably the best we can
-# reasonably expect to do for users who want to develop without the source
-# tree.
-echo "WARNING: No Chrome OS checkout detected, and no viable CrOS tree " >&2
-echo "found; falling back to linting only python and go. If you have a " >&2
-echo "Chrome OS checkout, please either develop from inside of the source ">&2
-echo "tree, or set \$CHROMEOS_ROOT_DIRECTORY to the root of it." >&2
-
-python_files=()
-go_files=()
-for file in "$@"; do
- if [[ "${file}" == *.py ]]; then
- python_files+=( "${file}" )
- fi
- if [[ "${file}" == *.go ]]; then
- go_files+=( "${file}" )
- fi
-done
-
-if [[ "${#python_files[@]}" -ne 0 ]]; then
- # We saw `cros` above, so assume that `pylint` is in our PATH (depot_tools
- # packages it, and provides a reasonable default config).
- pylint "${python_files[@]}"
-fi
-
-if ! type "${golint}" >/dev/null 2>&1; then
- echo "Warning: go linting disabled. ${golint} isn't on your \$PATH. "\
- "Please either enter a chroot, or install go locally. Continuing." >&2
-elif [[ "${#go_files[@]}" -ne 0 ]]; then
- "${golint}" -set_exit_status "${go_files[@]}"
-fi
diff --git a/toolchain_utils_githooks/check-presubmit b/toolchain_utils_githooks/check-presubmit
deleted file mode 100755
index 0f770234..00000000
--- a/toolchain_utils_githooks/check-presubmit
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash -eu
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# Convenient wrapper to run presubmit checks in parallel without interleaving
-# output/etc.
-
-if [[ $# -eq 0 ]]; then
- echo "No files were given to check the style of. Exiting." >&2
- exit
-fi
-
-mydir="$(dirname "$(readlink -m "$0")")"
-pydir="${mydir}/.."
-
-if [[ -z "${PYTHONPATH:-}" ]]; then
- export PYTHONPATH="${pydir}"
-else
- export PYTHONPATH="${pydir}:${PYTHONPATH}"
-fi
-
-tempfiles=()
-rm_tempfiles() {
- rm -f "${tempfiles[@]}"
-}
-
-trap rm_tempfiles EXIT
-
-child_pids=()
-spawn_child() {
- local tempfile
- tempfile="$(mktemp)"
- tempfiles+=( "${tempfile}" )
- "$@" >"${tempfile}" 2>&1 &
- child_pids+=( "$!" )
-}
-
-
-# only lint existing files
-files_exist=false
-declare -a to_lint
-for file; do
- if [[ -f "${file}" ]]; then
- files_exist=true
- to_lint+=("${file}")
- fi
-done
-
-# We have a few things to do in parallel here. To avoid interleaving their
-# output, we pipe them all to tempfiles, then cat those tempfiles.
-if "${files_exist}"; then
- spawn_child "${mydir}/check-lint" "${to_lint[@]}"
- spawn_child "${mydir}/check-format" "${to_lint[@]}"
- spawn_child "${mydir}/../run_tests_for.py" "${to_lint[@]}"
-fi
-
-success=true
-for i in "${!child_pids[@]}"; do
- wait "${child_pids[$i]}" || success=false
- cat "${tempfiles[$i]}"
-done
-
-"${success}"
diff --git a/toolchain_utils_githooks/pre-push.real b/toolchain_utils_githooks/pre-push.real
index 06aa6213..0f6856ee 100755
--- a/toolchain_utils_githooks/pre-push.real
+++ b/toolchain_utils_githooks/pre-push.real
@@ -4,10 +4,52 @@
#
# This is a pre-push hook that does the following before uploading a
# CL for review:
-# 1) check that python sources have been formatted with yapf.
+# 1) check that python sources have been formatted with pyformat.
# 2) allows the user to run the unit tests.
-mydir="$(dirname "$(readlink -m "$0")")"
+# This redirects stdin. Make sure to run after stdin has been read.
+run_UnitTests() {
+ save_dir=$(pwd)
+ status=0
+ valid=0
+
+ # Make sure we can read the stdin from terminal
+ exec < /dev/tty
+
+ while [[ $valid -eq 0 ]] ; do
+ read -p "Run unit tests? [y/n] " choice
+ case "$choice" in
+ n|N ) valid=1 ;;
+ y|Y ) valid=1; cd crosperf; ./run_tests.sh; status=$? ;
+ cd $save_dir;;
+ * ) echo "Must choose y or n."
+ esac
+ done
+ if [[ $status -ne 0 ]]; then
+ exit $status
+ fi
+}
+
+run_PyFormat() {
+ pyformat="./bin/tc_pyformat"
+ range=$1
+ files=$(git show --pretty="format:" --name-only $range)
+ for f in $files; do
+ [[ $f == *.py ]] || continue
+ # File could have been removed as part of the commit.
+ [[ -e $f ]] || continue
+ diffs=$($pyformat -d $f)
+ if [[ $? -ne 0 ]]; then
+ echo "Error: $pyformat $f returned with error code $?"
+ exit 1
+ fi
+ if [[ -n "$diffs" ]]; then
+ echo -e "Error: $f is not formatted correctly. Run $pyformat -i $f\n"
+ echo -e "diffs:\n$diffs\n"
+ exit 2
+ fi
+ done
+}
z40=0000000000000000000000000000000000000000
@@ -20,11 +62,10 @@ while IFS=' ' read local_ref local_sha remote_ref remote_sha; do
# Update to existing branch, examine new commits
range="$remote_sha..$local_sha"
fi
- all_files="$(git show --pretty="format:" --name-only "${range}")"
- # Note that ${all_files} may include files that were deleted. Hence, we
- # ignore any complaints about missing files.
- IGNORE_MISSING=1 "${mydir}/check-presubmit" ${all_files} || exit 1
+ run_PyFormat $range
fi
done
+run_UnitTests
+
exit 0