aboutsummaryrefslogtreecommitdiff
path: root/infra
diff options
context:
space:
mode:
Diffstat (limited to 'infra')
-rwxr-xr-xinfra/base-images/all.sh7
-rw-r--r--infra/base-images/base-builder-go/Dockerfile (renamed from infra/base-images/base-sanitizer-libs-builder/packages/sqlite3.py)22
-rw-r--r--infra/base-images/base-builder-go/ossfuzz_coverage_runner.go69
-rw-r--r--infra/base-images/base-builder-jvm/Dockerfile (renamed from infra/base-images/base-sanitizer-libs-builder/packages/tar.py)18
-rw-r--r--infra/base-images/base-builder-python/Dockerfile (renamed from infra/base-images/base-sanitizer-libs-builder/Dockerfile)9
-rw-r--r--infra/base-images/base-builder-rust/Dockerfile (renamed from infra/base-images/base-sanitizer-libs-builder/packages/mesa.py)19
-rw-r--r--infra/base-images/base-builder-swift/Dockerfile21
-rwxr-xr-x[-rw-r--r--]infra/base-images/base-builder-swift/precompile_swift (renamed from infra/base-images/base-sanitizer-libs-builder/packages/gnutls28.py)36
-rw-r--r--infra/base-images/base-builder/Dockerfile91
-rwxr-xr-xinfra/base-images/base-builder/bazel_build_fuzz_tests12
-rw-r--r--infra/base-images/base-builder/bisect_clang_test.py18
-rwxr-xr-xinfra/base-images/base-builder/cargo8
-rwxr-xr-xinfra/base-images/base-builder/compile50
-rw-r--r--infra/base-images/base-builder/compile_afl25
-rwxr-xr-xinfra/base-images/base-builder/compile_go_fuzzer6
-rwxr-xr-x[-rw-r--r--]infra/base-images/base-builder/install_go.sh (renamed from infra/base-images/base-sanitizer-libs-builder/packages/boost1_58.py)23
-rwxr-xr-xinfra/base-images/base-builder/install_java.sh37
-rwxr-xr-xinfra/base-images/base-builder/install_python.sh21
-rwxr-xr-xinfra/base-images/base-builder/install_rust.sh21
-rwxr-xr-xinfra/base-images/base-builder/install_swift.sh66
-rw-r--r--infra/base-images/base-builder/llvmsymbol.diff50
-rwxr-xr-xinfra/base-images/base-builder/write_labels.py2
-rw-r--r--infra/base-images/base-clang/Dockerfile2
-rwxr-xr-xinfra/base-images/base-clang/checkout_build_install_llvm.sh28
-rw-r--r--infra/base-images/base-image/Dockerfile4
-rwxr-xr-xinfra/base-images/base-runner/Dockerfile18
-rwxr-xr-xinfra/base-images/base-runner/bad_build_check12
-rwxr-xr-xinfra/base-images/base-runner/coverage120
-rwxr-xr-xinfra/base-images/base-runner/coverage_helper2
-rwxr-xr-xinfra/base-images/base-runner/jacoco_report_converter.py158
-rw-r--r--infra/base-images/base-runner/profraw_update.py123
-rwxr-xr-xinfra/base-images/base-runner/run_fuzzer31
-rwxr-xr-xinfra/base-images/base-runner/targets_list3
-rwxr-xr-xinfra/base-images/base-runner/test_all.py102
-rw-r--r--infra/base-images/base-runner/test_all_test.py8
-rwxr-xr-xinfra/base-images/base-sanitizer-libs-builder/compiler_wrapper.py175
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/compiler_wrapper_test.py42
-rwxr-xr-xinfra/base-images/base-sanitizer-libs-builder/msan_build.py460
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/__init__.py0
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/nettle.py41
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/openssl.py42
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/package.py82
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/pixman.py40
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/pixman_blocklist.txt1
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio.py42
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio_fix_android.patch39
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/packages/systemd.py42
-rwxr-xr-xinfra/base-images/base-sanitizer-libs-builder/patch_build.py143
-rw-r--r--infra/base-images/base-sanitizer-libs-builder/wrapper_utils.py47
-rw-r--r--infra/base-images/msan-libs-builder/Dockerfile45
-rw-r--r--infra/bisector.py38
-rw-r--r--infra/bisector_test.py4
-rw-r--r--infra/build/functions/base_images.py62
-rwxr-xr-x[-rw-r--r--]infra/build/functions/build_and_run_coverage.py194
-rw-r--r--infra/build/functions/build_and_run_coverage_test.py78
-rw-r--r--infra/build/functions/build_lib.py102
-rwxr-xr-x[-rw-r--r--]infra/build/functions/build_project.py725
-rw-r--r--infra/build/functions/build_project_test.py77
-rwxr-xr-xinfra/build/functions/deploy.sh12
-rw-r--r--infra/build/functions/expected_build_steps.json330
-rw-r--r--infra/build/functions/main.py5
-rw-r--r--infra/build/functions/project_sync.py12
-rw-r--r--infra/build/functions/project_sync_test.py5
-rw-r--r--infra/build/functions/request_build.py63
-rw-r--r--infra/build/functions/request_build_test.py61
-rw-r--r--infra/build/functions/request_coverage_build.py16
-rw-r--r--infra/build/functions/request_coverage_build_test.py90
-rw-r--r--infra/build/functions/test_data/expected_build_steps.json628
-rw-r--r--infra/build/functions/test_data/expected_coverage_build_steps.json (renamed from infra/build/functions/expected_coverage_build_steps.json)34
-rw-r--r--infra/build/functions/test_utils.py48
-rw-r--r--infra/build/functions/update_build_status.py24
-rw-r--r--infra/build/functions/update_build_status_test.py51
-rwxr-xr-xinfra/build_and_push_test_images.py92
-rw-r--r--infra/build_fuzzers.Dockerfile7
-rw-r--r--infra/build_specified_commit.py16
-rw-r--r--infra/build_specified_commit_test.py38
-rwxr-xr-xinfra/ci/build.py20
-rw-r--r--infra/ci/requirements.txt2
-rw-r--r--infra/cifuzz/actions/build_fuzzers/action.yml4
-rw-r--r--infra/cifuzz/actions/run_fuzzers/action.yml21
-rw-r--r--infra/cifuzz/affected_fuzz_targets.py26
-rw-r--r--infra/cifuzz/affected_fuzz_targets_test.py30
-rw-r--r--infra/cifuzz/base_runner_utils.py33
-rw-r--r--infra/cifuzz/build_fuzzers.py197
-rw-r--r--infra/cifuzz/build_fuzzers_entrypoint.py67
-rw-r--r--infra/cifuzz/build_fuzzers_test.py293
-rw-r--r--infra/cifuzz/cifuzz-base/Dockerfile23
-rw-r--r--infra/cifuzz/cifuzz_combined_entrypoint.py53
-rw-r--r--infra/cifuzz/cifuzz_end_to_end_test.py46
-rw-r--r--infra/cifuzz/cloudbuild.yaml39
-rw-r--r--infra/cifuzz/clusterfuzz_deployment.py360
-rw-r--r--infra/cifuzz/clusterfuzz_deployment_test.py266
-rw-r--r--infra/cifuzz/config_utils.py304
-rw-r--r--infra/cifuzz/config_utils_test.py200
-rw-r--r--infra/cifuzz/continuous_integration.py169
-rw-r--r--infra/cifuzz/continuous_integration_test.py87
-rw-r--r--infra/cifuzz/coverage_test.py194
-rw-r--r--infra/cifuzz/docker.py81
-rw-r--r--infra/cifuzz/docker_test.py122
-rw-r--r--infra/cifuzz/environment.py7
-rw-r--r--infra/cifuzz/external-actions/build_fuzzers/action.yml63
-rw-r--r--infra/cifuzz/external-actions/run_fuzzers/action.yml69
-rw-r--r--infra/cifuzz/filestore/__init__.py54
-rw-r--r--infra/cifuzz/filestore/git/__init__.py159
-rw-r--r--infra/cifuzz/filestore/git/git_test.py122
-rw-r--r--infra/cifuzz/filestore/github_actions/__init__.py177
-rw-r--r--infra/cifuzz/filestore/github_actions/github_actions_test.py281
-rw-r--r--infra/cifuzz/filestore/github_actions/github_api.py108
-rw-r--r--infra/cifuzz/filestore/github_actions/github_api_test.py33
-rwxr-xr-xinfra/cifuzz/filestore/github_actions/upload.js33
-rw-r--r--infra/cifuzz/filestore_utils.py31
-rw-r--r--infra/cifuzz/filestore_utils_test.py50
-rw-r--r--infra/cifuzz/fuzz_target.py278
-rw-r--r--infra/cifuzz/fuzz_target_test.py197
-rw-r--r--infra/cifuzz/generate_coverage_report.py48
-rw-r--r--infra/cifuzz/generate_coverage_report_test.py71
-rw-r--r--infra/cifuzz/get_coverage.py (renamed from infra/cifuzz/coverage.py)173
-rw-r--r--infra/cifuzz/get_coverage_test.py239
-rw-r--r--infra/cifuzz/http_utils.py117
-rw-r--r--infra/cifuzz/http_utils_test.py71
-rw-r--r--infra/cifuzz/package-lock.json316
-rw-r--r--infra/cifuzz/package.json10
-rw-r--r--infra/cifuzz/requirements.txt2
-rw-r--r--infra/cifuzz/run_cifuzz.py88
-rw-r--r--infra/cifuzz/run_fuzzers.py153
-rw-r--r--infra/cifuzz/run_fuzzers_entrypoint.py67
-rw-r--r--infra/cifuzz/run_fuzzers_test.py354
-rw-r--r--infra/cifuzz/stack_parser.py40
-rw-r--r--infra/cifuzz/stack_parser_test.py12
-rwxr-xr-xinfra/cifuzz/test_data/build-out/example_crash_fuzzer (renamed from infra/cifuzz/test_data/out/example_crash_fuzzer)bin4375872 -> 4375872 bytes
-rwxr-xr-xinfra/cifuzz/test_data/build-out/example_nocrash_fuzzer (renamed from infra/cifuzz/test_data/out/example_nocrash_fuzzer)bin4376224 -> 4376224 bytes
-rw-r--r--infra/cifuzz/test_data/example_coverage_report_summary.json1
-rw-r--r--infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile (renamed from infra/cifuzz/test_data/external-project/oss-fuzz/Dockerfile)4
-rw-r--r--infra/cifuzz/test_data/external-project/.clusterfuzzlite/build.sh (renamed from infra/cifuzz/test_data/external-project/oss-fuzz/build.sh)0
-rwxr-xr-xinfra/cifuzz/test_data/memory/build-out/curl_fuzzer_memory (renamed from infra/cifuzz/test_data/memory/out/curl_fuzzer_memory)bin9768680 -> 9768680 bytes
-rwxr-xr-xinfra/cifuzz/test_data/undefined/build-out/curl_fuzzer_undefined (renamed from infra/cifuzz/test_data/undefined/out/curl_fuzzer_undefined)bin14401312 -> 14401312 bytes
-rw-r--r--infra/cifuzz/test_helpers.py116
-rw-r--r--infra/cifuzz/workspace_utils.py75
-rw-r--r--infra/constants.py (renamed from infra/base-images/base-sanitizer-libs-builder/packages/libgcrypt20.py)43
-rwxr-xr-xinfra/helper.py886
-rw-r--r--infra/helper_test.py211
-rwxr-xr-xinfra/presubmit.py141
-rw-r--r--infra/pytest.ini3
-rw-r--r--infra/repo_manager.py8
-rw-r--r--infra/retry.py6
-rw-r--r--infra/run_fuzzers.Dockerfile9
-rwxr-xr-xinfra/templates.py39
-rw-r--r--infra/test_helpers.py39
-rw-r--r--infra/test_repos.py14
-rw-r--r--infra/triage-party/README.md13
-rwxr-xr-xinfra/triage-party/deploy.sh42
-rw-r--r--infra/triage-party/oss-fuzz.yaml172
-rw-r--r--infra/utils.py53
-rw-r--r--infra/utils_test.py26
154 files changed, 8721 insertions, 4535 deletions
diff --git a/infra/base-images/all.sh b/infra/base-images/all.sh
index 6d012d5aa..8b3571083 100755
--- a/infra/base-images/all.sh
+++ b/infra/base-images/all.sh
@@ -17,6 +17,11 @@
docker build --pull -t gcr.io/oss-fuzz-base/base-image "$@" infra/base-images/base-image
docker build -t gcr.io/oss-fuzz-base/base-clang "$@" infra/base-images/base-clang
-docker build -t gcr.io/oss-fuzz-base/base-builder -t gcr.io/oss-fuzz/base-libfuzzer "$@" infra/base-images/base-builder
+docker build -t gcr.io/oss-fuzz-base/base-builder "$@" infra/base-images/base-builder
+docker build -t gcr.io/oss-fuzz-base/base-builder-go "$@" infra/base-images/base-builder-go
+docker build -t gcr.io/oss-fuzz-base/base-builder-jvm "$@" infra/base-images/base-builder-jvm
+docker build -t gcr.io/oss-fuzz-base/base-builder-python "$@" infra/base-images/base-builder-python
+docker build -t gcr.io/oss-fuzz-base/base-builder-rust "$@" infra/base-images/base-builder-rust
+docker build -t gcr.io/oss-fuzz-base/base-builder-swift "$@" infra/base-images/base-builder-swift
docker build -t gcr.io/oss-fuzz-base/base-runner "$@" infra/base-images/base-runner
docker build -t gcr.io/oss-fuzz-base/base-runner-debug "$@" infra/base-images/base-runner-debug
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/sqlite3.py b/infra/base-images/base-builder-go/Dockerfile
index 3e1a1070f..9d2c61502 100644
--- a/infra/base-images/base-sanitizer-libs-builder/packages/sqlite3.py
+++ b/infra/base-images/base-builder-go/Dockerfile
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,18 +14,17 @@
#
################################################################################
-import os
+FROM gcr.io/oss-fuzz-base/base-builder
-import package
+# Set up Golang environment variables (copied from /root/.bash_profile).
+ENV GOPATH /root/go
+# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc).
+# $GOPATH/bin is for the binaries from the dependencies installed via "go get".
+ENV PATH $PATH:/root/.go/bin:$GOPATH/bin
-class Package(package.Package):
- """sqlite3 package."""
+RUN install_go.sh
- def __init__(self, apt_version):
- super(Package, self).__init__('sqlite3', apt_version)
+# TODO(jonathanmetzman): Install this file using install_go.sh.
+COPY ossfuzz_coverage_runner.go $GOPATH
- def PreBuild(self, source_directory, env, custom_bin_dir):
- os.system(
- 'sed -i "s/package ifneeded sqlite3//" %s/debian/rules' %
- source_directory)
diff --git a/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go b/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go
new file mode 100644
index 000000000..d433da246
--- /dev/null
+++ b/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go
@@ -0,0 +1,69 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package mypackagebeingfuzzed
+
+import (
+ "io/ioutil"
+ "os"
+ "runtime/pprof"
+ "testing"
+)
+
+func TestFuzzCorpus(t *testing.T) {
+ dir := os.Getenv("FUZZ_CORPUS_DIR")
+ if dir == "" {
+ t.Logf("No fuzzing corpus directory set")
+ return
+ }
+ infos, err := ioutil.ReadDir(dir)
+ if err != nil {
+ t.Logf("Not fuzzing corpus directory %s", err)
+ return
+ }
+ filename := ""
+ defer func() {
+ if r := recover(); r != nil {
+ t.Error("Fuzz panicked in "+filename, r)
+ }
+ }()
+ profname := os.Getenv("FUZZ_PROFILE_NAME")
+ if profname != "" {
+ f, err := os.Create(profname + ".cpu.prof")
+ if err != nil {
+ t.Logf("error creating profile file %s\n", err)
+ } else {
+ _ = pprof.StartCPUProfile(f)
+ }
+ }
+ for i := range infos {
+ filename = dir + infos[i].Name()
+ data, err := ioutil.ReadFile(filename)
+ if err != nil {
+ t.Error("Failed to read corpus file", err)
+ }
+ FuzzFunction(data)
+ }
+ if profname != "" {
+ pprof.StopCPUProfile()
+ f, err := os.Create(profname + ".heap.prof")
+ if err != nil {
+ t.Logf("error creating heap profile file %s\n", err)
+ }
+ if err = pprof.WriteHeapProfile(f); err != nil {
+ t.Logf("error writing heap profile file %s\n", err)
+ }
+ f.Close()
+ }
+}
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/tar.py b/infra/base-images/base-builder-jvm/Dockerfile
index 74abd5c72..f27478b35 100644
--- a/infra/base-images/base-sanitizer-libs-builder/packages/tar.py
+++ b/infra/base-images/base-builder-jvm/Dockerfile
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,14 +14,11 @@
#
################################################################################
-import package
+FROM gcr.io/oss-fuzz-base/base-builder
+ENV JAVA_HOME /usr/lib/jvm/java-15-openjdk-amd64
+ENV JVM_LD_LIBRARY_PATH $JAVA_HOME/lib/server
+ENV PATH $PATH:$JAVA_HOME/bin
+ENV JAZZER_API_PATH "/usr/local/lib/jazzer_api_deploy.jar"
-class Package(package.Package):
- """tar package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('tar', apt_version)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- env['FORCE_UNSAFE_CONFIGURE'] = '1'
+RUN install_java.sh \ No newline at end of file
diff --git a/infra/base-images/base-sanitizer-libs-builder/Dockerfile b/infra/base-images/base-builder-python/Dockerfile
index b1a17b96c..749b4d59e 100644
--- a/infra/base-images/base-sanitizer-libs-builder/Dockerfile
+++ b/infra/base-images/base-builder-python/Dockerfile
@@ -1,4 +1,4 @@
-# Copyright 2017 Google Inc.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,9 +14,6 @@
#
################################################################################
-FROM gcr.io/oss-fuzz-base/base-clang
-RUN sed -i -r 's/#\s*deb-src/deb-src/g' /etc/apt/sources.list
-RUN apt-get update && apt-get install -y python dpkg-dev patchelf python-apt zip
+FROM gcr.io/oss-fuzz-base/base-builder
-COPY compiler_wrapper.py msan_build.py patch_build.py wrapper_utils.py /usr/local/bin/
-COPY packages /usr/local/bin/packages
+RUN install_python.sh
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/mesa.py b/infra/base-images/base-builder-rust/Dockerfile
index ec2e9d217..a4ec327b6 100644
--- a/infra/base-images/base-sanitizer-libs-builder/packages/mesa.py
+++ b/infra/base-images/base-builder-rust/Dockerfile
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,14 +14,12 @@
#
################################################################################
-import package
+FROM gcr.io/oss-fuzz-base/base-builder
+ENV CARGO_HOME=/rust
+ENV RUSTUP_HOME=/rust/rustup
+ENV PATH=$PATH:/rust/bin
+# Set up custom environment variable for source code copy for coverage reports
+ENV OSSFUZZ_RUSTPATH /rust
-class Package(package.Package):
- """mesa package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('mesa', apt_version)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- env['DEB_CXXFLAGS_APPEND'] += ' -std=c++11'
+RUN install_rust.sh
diff --git a/infra/base-images/base-builder-swift/Dockerfile b/infra/base-images/base-builder-swift/Dockerfile
new file mode 100644
index 000000000..2b063bb2f
--- /dev/null
+++ b/infra/base-images/base-builder-swift/Dockerfile
@@ -0,0 +1,21 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+FROM gcr.io/oss-fuzz-base/base-builder
+
+RUN install_swift.sh
+
+COPY precompile_swift /usr/local/bin/
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/gnutls28.py b/infra/base-images/base-builder-swift/precompile_swift
index f8407a668..ab855a620 100644..100755
--- a/infra/base-images/base-sanitizer-libs-builder/packages/gnutls28.py
+++ b/infra/base-images/base-builder-swift/precompile_swift
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
+#!/bin/bash -eu
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,23 +15,19 @@
#
################################################################################
-import os
-import shutil
+cp /usr/local/bin/llvm-symbolizer-swift $OUT/llvm-symbolizer
-import package
-import wrapper_utils
+export SWIFTFLAGS="-Xswiftc -parse-as-library -Xswiftc -static-stdlib --static-swift-stdlib"
+if [ "$SANITIZER" = "coverage" ]
+then
+ export SWIFTFLAGS="$SWIFTFLAGS -Xswiftc -profile-generate -Xswiftc -profile-coverage-mapping -Xswiftc -sanitize=fuzzer"
+else
+ export SWIFTFLAGS="$SWIFTFLAGS -Xswiftc -sanitize=fuzzer,$SANITIZER --sanitize=$SANITIZER"
+ for f in $CFLAGS; do
+ export SWIFTFLAGS="$SWIFTFLAGS -Xcc=$f"
+ done
-
-class Package(package.Package):
- """gnutls28 package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('gnutls28', apt_version)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- configure_wrapper = (
- '#!/bin/bash\n'
- '/usr/bin/dh_auto_configure "$@" --disable-hardware-acceleration')
-
- wrapper_utils.InstallWrapper(
- custom_bin_dir, 'dh_auto_configure', configure_wrapper)
+ for f in $CXXFLAGS; do
+ export SWIFTFLAGS="$SWIFTFLAGS -Xcxx=$f"
+ done
+fi
diff --git a/infra/base-images/base-builder/Dockerfile b/infra/base-images/base-builder/Dockerfile
index d802f247a..256e7be56 100644
--- a/infra/base-images/base-builder/Dockerfile
+++ b/infra/base-images/base-builder/Dockerfile
@@ -1,4 +1,4 @@
-# Copyright 2016 Google Inc.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -61,71 +61,15 @@ RUN export PYTHON_DEPS="\
rm -rf /usr/local/lib/python3.8/test && \
apt-get remove -y $PYTHON_DEPS # https://github.com/google/oss-fuzz/issues/3888
-# Install latest atheris for python fuzzing, pyinstaller for fuzzer packaging,
-# six for Bazel rules.
+# Install six for Bazel rules.
RUN unset CFLAGS CXXFLAGS && pip3 install -v --no-cache-dir \
- atheris pyinstaller==4.1 six==1.15.0 && \
- rm -rf /tmp/*
-
-# Download and install the latest stable Go.
-RUN cd /tmp && \
- curl -O https://storage.googleapis.com/golang/getgo/installer_linux && \
- chmod +x ./installer_linux && \
- SHELL="bash" ./installer_linux && \
- rm -rf ./installer_linux
-
-# Set up Golang environment variables (copied from /root/.bash_profile).
-ENV GOPATH /root/go
-
-# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc).
-# $GOPATH/bin is for the binaries from the dependencies installed via "go get".
-ENV PATH $PATH:/root/.go/bin:$GOPATH/bin
-
-# Uses golang 1.14+ cmd/compile's native libfuzzer instrumentation.
-RUN go get -u github.com/mdempsky/go114-fuzz-build && \
- ln -s $GOPATH/bin/go114-fuzz-build $GOPATH/bin/go-fuzz
-
-# Install Rust and cargo-fuzz for libFuzzer instrumentation.
-ENV CARGO_HOME=/rust
-ENV RUSTUP_HOME=/rust/rustup
-ENV PATH=$PATH:/rust/bin
-RUN curl https://sh.rustup.rs | sh -s -- -y --default-toolchain=nightly --profile=minimal
-RUN cargo install cargo-fuzz && rm -rf /rust/registry
-# Needed to recompile rust std library for MSAN
-RUN rustup component add rust-src --toolchain nightly
-# Set up custom environment variable for source code copy for coverage reports
-ENV OSSFUZZ_RUSTPATH /rust
+ six==1.15.0 && rm -rf /tmp/*
# Install Bazel through Bazelisk, which automatically fetches the latest Bazel version.
-ENV BAZELISK_VERSION 1.7.4
+ENV BAZELISK_VERSION 1.9.0
RUN curl -L https://github.com/bazelbuild/bazelisk/releases/download/v$BAZELISK_VERSION/bazelisk-linux-amd64 -o /usr/local/bin/bazel && \
chmod +x /usr/local/bin/bazel
-# Install OpenJDK 15 and trim its size by removing unused components.
-ENV JAVA_HOME=/usr/lib/jvm/java-15-openjdk-amd64
-ENV JVM_LD_LIBRARY_PATH=$JAVA_HOME/lib/server
-ENV PATH=$PATH:$JAVA_HOME/bin
-RUN cd /tmp && \
- curl -L -O https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-x64_bin.tar.gz && \
- mkdir -p $JAVA_HOME && \
- tar -xzv --strip-components=1 -f openjdk-15.0.2_linux-x64_bin.tar.gz --directory $JAVA_HOME && \
- rm -f openjdk-15.0.2_linux-x64_bin.tar.gz && \
- rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip
-
-# Install the latest Jazzer in $OUT.
-# jazzer_api_deploy.jar is required only at build-time, the agent and the
-# drivers are copied to $OUT as they need to be present on the runners.
-ENV JAZZER_API_PATH "/usr/local/lib/jazzer_api_deploy.jar"
-RUN cd $SRC/ && \
- git clone --depth=1 https://github.com/CodeIntelligenceTesting/jazzer && \
- cd jazzer && \
- bazel build --java_runtime_version=localjdk_15 -c opt --cxxopt="-stdlib=libc++" --linkopt=-lc++ \
- //agent:jazzer_agent_deploy.jar //driver:jazzer_driver //driver:jazzer_driver_asan //agent:jazzer_api_deploy.jar && \
- cp bazel-bin/agent/jazzer_agent_deploy.jar bazel-bin/driver/jazzer_driver bazel-bin/driver/jazzer_driver_asan /usr/local/bin/ && \
- cp bazel-bin/agent/jazzer_api_deploy.jar $JAZZER_API_PATH && \
- rm -rf ~/.cache/bazel ~/.cache/bazelisk && \
- rm -rf $SRC/jazzer
-
# Default build flags for various sanitizers.
ENV SANITIZER_FLAGS_address "-fsanitize=address -fsanitize-address-use-after-scope"
@@ -181,7 +125,7 @@ WORKDIR $SRC
# TODO: switch to -b stable once we can.
RUN git clone https://github.com/AFLplusplus/AFLplusplus.git aflplusplus && \
cd aflplusplus && \
- git checkout 2102264acf5c271b7560a82771b3af8136af9354
+ git checkout 4fe572b80f76ff0b0e916b639d1e04d5af48b157
RUN cd $SRC && \
curl -L -O https://github.com/google/honggfuzz/archive/oss-fuzz.tar.gz && \
@@ -190,14 +134,25 @@ RUN cd $SRC && \
tar -xzv --strip-components=1 -f $SRC/oss-fuzz.tar.gz && \
rm -rf examples $SRC/oss-fuzz.tar.gz
-COPY cargo compile compile_afl compile_dataflow compile_libfuzzer compile_honggfuzz \
- compile_go_fuzzer precompile_honggfuzz precompile_afl debug_afl srcmap \
- write_labels.py bazel_build_fuzz_tests /usr/local/bin/
-
-COPY detect_repo.py /opt/cifuzz/
-COPY ossfuzz_coverage_runner.go $GOPATH
+# Do precompiles before copying other scripts for better cache efficiency.
+COPY precompile_afl /usr/local/bin/
+RUN precompile_afl
+COPY precompile_honggfuzz /usr/local/bin/
RUN precompile_honggfuzz
-RUN precompile_afl
+
+COPY cargo compile compile_afl compile_dataflow compile_libfuzzer compile_honggfuzz \
+ compile_go_fuzzer debug_afl srcmap \
+ write_labels.py bazel_build_fuzz_tests \
+ # Go, java, and swift installation scripts.
+ install_go.sh \
+ install_java.sh \
+ install_python.sh \
+ install_rust.sh \
+ install_swift.sh \
+ /usr/local/bin/
+
+COPY llvmsymbol.diff $SRC
+COPY detect_repo.py /opt/cifuzz/
CMD ["compile"]
diff --git a/infra/base-images/base-builder/bazel_build_fuzz_tests b/infra/base-images/base-builder/bazel_build_fuzz_tests
index 86740ee01..dca79f3f2 100755
--- a/infra/base-images/base-builder/bazel_build_fuzz_tests
+++ b/infra/base-images/base-builder/bazel_build_fuzz_tests
@@ -22,10 +22,17 @@
: "${BAZEL_TOOL:=bazel}"
: "${BAZEL_EXTRA_BUILD_FLAGS:=}"
+if [ "$FUZZING_LANGUAGE" = "jvm" ]; then
+ BAZEL_LANGUAGE=java
+else
+ BAZEL_LANGUAGE=cc
+fi
+
if [[ -z "${BAZEL_FUZZ_TEST_QUERY:-}" ]]; then
BAZEL_FUZZ_TEST_QUERY="
let all_fuzz_tests = attr(tags, \"${BAZEL_FUZZ_TEST_TAG}\", \"//...\") in
- \$all_fuzz_tests - attr(tags, \"${BAZEL_FUZZ_TEST_EXCLUDE_TAG}\", \$all_fuzz_tests)
+ let lang_fuzz_tests = attr(generator_function, \"^${BAZEL_LANGUAGE}_fuzz_test\$\", \$all_fuzz_tests) in
+ \$lang_fuzz_tests - attr(tags, \"${BAZEL_FUZZ_TEST_EXCLUDE_TAG}\", \$lang_fuzz_tests)
"
fi
@@ -42,9 +49,10 @@ done
declare -r BAZEL_BUILD_FLAGS=(
"-c" "opt"
- "--//fuzzing:cc_engine=@rules_fuzzing_oss_fuzz//:oss_fuzz_engine" \
+ "--@rules_fuzzing//fuzzing:cc_engine=@rules_fuzzing_oss_fuzz//:oss_fuzz_engine" \
"--@rules_fuzzing//fuzzing:cc_engine_instrumentation=oss-fuzz" \
"--@rules_fuzzing//fuzzing:cc_engine_sanitizer=none" \
+ "--cxxopt=-stdlib=libc++" \
"--linkopt=-lc++" \
"--action_env=CC=${CC}" "--action_env=CXX=${CXX}" \
${BAZEL_EXTRA_BUILD_FLAGS[*]}
diff --git a/infra/base-images/base-builder/bisect_clang_test.py b/infra/base-images/base-builder/bisect_clang_test.py
index edf13e759..a11bf8640 100644
--- a/infra/base-images/base-builder/bisect_clang_test.py
+++ b/infra/base-images/base-builder/bisect_clang_test.py
@@ -127,7 +127,7 @@ def create_mock_popen(
return MockPopen
-def mock_prepare_build(llvm_project_path): # pylint: disable=unused-argument
+def mock_prepare_build_impl(llvm_project_path): # pylint: disable=unused-argument
"""Mocked prepare_build function."""
return '/work/llvm-build'
@@ -138,7 +138,7 @@ class BuildClangTest(BisectClangTestMixin, unittest.TestCase):
def test_build_clang_test(self):
"""Tests that build_clang works as intended."""
with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen:
- with mock.patch('bisect_clang.prepare_build', mock_prepare_build):
+ with mock.patch('bisect_clang.prepare_build', mock_prepare_build_impl):
llvm_src_dir = '/src/llvm-project'
bisect_clang.build_clang(llvm_src_dir)
self.assertEqual([['ninja', '-C', '/work/llvm-build', 'install']],
@@ -170,13 +170,13 @@ class GitRepoTest(BisectClangTestMixin, unittest.TestCase):
"""Tests test_start_commit works as intended when the test returns an
unexpected value."""
- def mock_execute(command, *args, **kwargs): # pylint: disable=unused-argument
+ def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument
if command == self.test_command:
return returncode, '', ''
return 0, '', ''
- with mock.patch('bisect_clang.execute', mock_execute):
- with mock.patch('bisect_clang.prepare_build', mock_prepare_build):
+ with mock.patch('bisect_clang.execute', mock_execute_impl):
+ with mock.patch('bisect_clang.prepare_build', mock_prepare_build_impl):
with self.assertRaises(bisect_clang.BisectError):
self.git.test_start_commit(commit, label, self.test_command)
@@ -202,13 +202,13 @@ class GitRepoTest(BisectClangTestMixin, unittest.TestCase):
expected value."""
command_args = []
- def mock_execute(command, *args, **kwargs): # pylint: disable=unused-argument
+ def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument
command_args.append(command)
if command == self.test_command:
return returncode, '', ''
return 0, '', ''
- with mock.patch('bisect_clang.execute', mock_execute):
+ with mock.patch('bisect_clang.execute', mock_execute_impl):
self.git.test_start_commit(commit, label, self.test_command)
self.assertEqual([
get_git_command('checkout', commit), self.test_command,
@@ -247,13 +247,13 @@ class GitRepoTest(BisectClangTestMixin, unittest.TestCase):
"""Test test_commit works as intended."""
command_args = []
- def mock_execute(command, *args, **kwargs): # pylint: disable=unused-argument
+ def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument
command_args.append(command)
if command == self.test_command:
return returncode, output, ''
return 0, output, ''
- with mock.patch('bisect_clang.execute', mock_execute):
+ with mock.patch('bisect_clang.execute', mock_execute_impl):
result = self.git.test_commit(self.test_command)
self.assertEqual([self.test_command,
get_git_command('bisect', label)], command_args)
diff --git a/infra/base-images/base-builder/cargo b/infra/base-images/base-builder/cargo
index bed8e7660..c60c7611b 100755
--- a/infra/base-images/base-builder/cargo
+++ b/infra/base-images/base-builder/cargo
@@ -27,7 +27,7 @@ then
export RUSTFLAGS="$RUSTFLAGS --remap-path-prefix src=$crate_src_abspath/src"
fi
-if [ "$SANITIZER" = "coverage" ] && [ $1 = "fuzz" ]
+if [ "$SANITIZER" = "coverage" ] && [ $1 = "fuzz" ] && [ $2 = "build" ]
then
# hack to turn cargo fuzz build into cargo build so as to get coverage
# cargo fuzz adds "--target" "x86_64-unknown-linux-gnu"
@@ -35,7 +35,11 @@ then
# go into fuzz directory if not already the case
cd fuzz || true
fuzz_src_abspath=`pwd`
- export RUSTFLAGS="$RUSTFLAGS --remap-path-prefix fuzz_targets=$fuzz_src_abspath/fuzz_targets"
+ # Default directory is fuzz_targets, but some projects like image-rs use fuzzers.
+ while read i; do
+ export RUSTFLAGS="$RUSTFLAGS --remap-path-prefix $i=$fuzz_src_abspath/$i"
+ # Bash while syntax so that we modify RUSTFLAGS in main shell instead of a subshell.
+ done <<< "$(ls */*.rs | cut -d/ -f1 | uniq)"
# we do not want to trigger debug assertions and stops
export RUSTFLAGS="$RUSTFLAGS -C debug-assertions=no"
# do not optimize with --release, leading to Malformed instrumentation profile data
diff --git a/infra/base-images/base-builder/compile b/infra/base-images/base-builder/compile
index 78453c98c..c934d3b5b 100755
--- a/infra/base-images/base-builder/compile
+++ b/infra/base-images/base-builder/compile
@@ -27,8 +27,8 @@ if [ "$FUZZING_LANGUAGE" = "jvm" ]; then
echo "ERROR: JVM projects can be fuzzed with libFuzzer engine only."
exit 1
fi
- if [ "$SANITIZER" != "address" ]; then
- echo "ERROR: JVM projects can be fuzzed with AddressSanitizer only."
+ if [ "$SANITIZER" != "address" ] && [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "undefined" ]; then
+ echo "ERROR: JVM projects can be fuzzed with AddressSanitizer or UndefinedBehaviorSanitizer only."
exit 1
fi
if [ "$ARCHITECTURE" != "x86_64" ]; then
@@ -43,7 +43,7 @@ if [ "$FUZZING_LANGUAGE" = "python" ]; then
exit 1
fi
if [ "$SANITIZER" != "address" ] && [ "$SANITIZER" != "undefined" ]; then
- echo "ERROR: Python projects can be fuzzed with AddressSanitizer and UndefinedBehaviorSanitizer only."
+ echo "ERROR: Python projects can be fuzzed with AddressSanitizer or UndefinedBehaviorSanitizer only."
exit 1
fi
if [ "$ARCHITECTURE" != "x86_64" ]; then
@@ -59,7 +59,7 @@ fi
if [[ $ARCHITECTURE == "i386" ]]; then
export CFLAGS="-m32 $CFLAGS"
- cp -R /usr/i386/lib/* /usr/lib
+ cp -R /usr/i386/lib/* /usr/local/lib
fi
# JVM projects are fuzzed with Jazzer, which has libFuzzer built in.
if [[ $FUZZING_ENGINE != "none" ]] && [[ $FUZZING_LANGUAGE != "jvm" ]]; then
@@ -71,15 +71,9 @@ if [[ $SANITIZER_FLAGS = *sanitize=memory* ]]
then
# Take all libraries from lib/msan and MSAN_LIBS_PATH
# export CXXFLAGS_EXTRA="-L/usr/msan/lib $CXXFLAGS_EXTRA"
- cp -R /usr/msan/lib/* /usr/lib/
-
- if [[ -z "${MSAN_LIBS_PATH-}" ]]; then
- echo 'WARNING: Building without MSan instrumented libraries.'
- else
- # Copy all static libraries only. Don't include .so files because they can
- # break non MSan compiled programs.
- (cd "$MSAN_LIBS_PATH" && find . -name '*.a' -exec cp --parents '{}' / ';')
- fi
+ cp -R /usr/msan/lib/* /usr/local/lib/
+
+ echo 'Building without MSan instrumented libraries.'
fi
# Coverage flag overrides.
@@ -118,7 +112,7 @@ export CFLAGS="$CFLAGS $SANITIZER_FLAGS $COVERAGE_FLAGS"
export CXXFLAGS="$CFLAGS $CXXFLAGS_EXTRA"
if [ "$FUZZING_LANGUAGE" = "python" ]; then
- sanitizer_with_fuzzer_lib_dir=`python3 -c "import atheris; import os; print(os.path.dirname(atheris.path()))"`
+ sanitizer_with_fuzzer_lib_dir=`python3 -c "import atheris; import os; print(atheris.path())"`
sanitizer_with_fuzzer_output_lib=$OUT/sanitizer_with_fuzzer.so
if [ "$SANITIZER" = "address" ]; then
cp $sanitizer_with_fuzzer_lib_dir/asan_with_fuzzer.so $sanitizer_with_fuzzer_output_lib
@@ -136,7 +130,20 @@ cp $(which llvm-symbolizer) $OUT/
# Copy Jazzer to $OUT if needed.
if [ "$FUZZING_LANGUAGE" = "jvm" ]; then
- cp $(which jazzer_agent_deploy.jar) $(which jazzer_driver) $(which jazzer_driver_asan) $OUT/
+ cp $(which jazzer_agent_deploy.jar) $(which jazzer_driver) $OUT/
+ jazzer_driver_with_sanitizer=$OUT/jazzer_driver_with_sanitizer
+ if [ "$SANITIZER" = "address" ]; then
+ cp $(which jazzer_driver_asan) $jazzer_driver_with_sanitizer
+ elif [ "$SANITIZER" = "undefined" ]; then
+ cp $(which jazzer_driver_ubsan) $jazzer_driver_with_sanitizer
+ elif [ "$SANITIZER" = "coverage" ]; then
+ # Coverage builds require no instrumentation.
+ cp $(which jazzer_driver) $jazzer_driver_with_sanitizer
+ fi
+
+ # Disable leak checking since the JVM triggers too many false positives.
+ export CFLAGS="$CFLAGS -fno-sanitize=leak"
+ export CXXFLAGS="$CXXFLAGS -fno-sanitize=leak"
fi
echo "---------------------------------------------------------------"
@@ -144,13 +151,24 @@ echo "CC=$CC"
echo "CXX=$CXX"
echo "CFLAGS=$CFLAGS"
echo "CXXFLAGS=$CXXFLAGS"
+echo "RUSTFLAGS=$RUSTFLAGS"
echo "---------------------------------------------------------------"
BUILD_CMD="bash -eux $SRC/build.sh"
+# Set +u temporarily to continue even if GOPATH and OSSFUZZ_RUSTPATH are undefined.
+set +u
# We need to preserve source code files for generating a code coverage report.
# We need exact files that were compiled, so copy both $SRC and $WORK dirs.
-COPY_SOURCES_CMD="cp -rL --parents $SRC $WORK /usr/include /usr/local/include $GOPATH $OSSFUZZ_RUSTPATH $OUT"
+COPY_SOURCES_CMD="cp -rL --parents $SRC $WORK /usr/include /usr/local/include $GOPATH $OSSFUZZ_RUSTPATH /rustc $OUT"
+set -u
+
+if [ "$FUZZING_LANGUAGE" = "rust" ]; then
+ # Copy rust std lib to its path with a hash.
+ export rustch=`rustc --version --verbose | grep commit-hash | cut -d' ' -f2`
+ mkdir -p /rustc/$rustch/
+ cp -r /rust/rustup/toolchains/nightly-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/ /rustc/$rustch/
+fi
if [ "${BUILD_UID-0}" -ne "0" ]; then
adduser -u $BUILD_UID --disabled-password --gecos '' builder
diff --git a/infra/base-images/base-builder/compile_afl b/infra/base-images/base-builder/compile_afl
index dc6624459..d6509c74c 100644
--- a/infra/base-images/base-builder/compile_afl
+++ b/infra/base-images/base-builder/compile_afl
@@ -22,6 +22,8 @@
# AFL++ settings.
export AFL_LLVM_MODE_WORKAROUND=0
export AFL_ENABLE_DICTIONARY=0
+export AFL_ENABLE_CMPLOG=1
+export AFL_LAF_CHANCE=3
# Start compiling afl++.
echo "Copying precompiled afl++"
@@ -45,19 +47,32 @@ export ASAN_OPTIONS="detect_leaks=0:symbolize=0:detect_odr_violation=0:abort_on_
# AFL compile option roulette. It is OK if they all happen together.
-# 40% chance to perform CMPLOG
+# 20% chance for CTX-2 coverage instrumentation (Caller conTeXt sensitive
+# edge coverage).
+test $(($RANDOM % 100)) -lt 20 && {
+ export AFL_LLVM_INSTRUMENT=CLASSIC,CTX-2
+ export AFL_ENABLE_CMPLOG=0
+ export AFL_LAF_CHANCE=30
+}
+
+# 40% chance to create a dictionary.
+test $(($RANDOM % 100)) -lt 40 && {
+ export AFL_ENABLE_DICTIONARY=1
+}
+
+# 60% chance to perform CMPLOG/REDQUEEN.
rm -f "$OUT/afl_cmplog.txt"
-test $(($RANDOM % 10)) -lt 4 && {
+test "$AFL_ENABLE_CMPLOG" = "1" -a $(($RANDOM % 100)) -lt 60 && {
export AFL_LLVM_CMPLOG=1
touch "$OUT/afl_cmplog.txt"
}
-# 10% chance to perform LAF_INTEL
-test $(($RANDOM % 10)) -lt 1 && {
+# 3% chance to perform COMPCOV/LAF_INTEL.
+test $(($RANDOM % 100)) -lt $AFL_LAF_CHANCE && {
export AFL_LLVM_LAF_ALL=1
}
-# If the targets wants a dictionary - then create one.
+# Create a dictionary if one is wanted.
test "$AFL_ENABLE_DICTIONARY" = "1" && {
export AFL_LLVM_DICT2FILE="$OUT/afl++.dict"
}
diff --git a/infra/base-images/base-builder/compile_go_fuzzer b/infra/base-images/base-builder/compile_go_fuzzer
index 2342800fb..dd8c9f6a1 100755
--- a/infra/base-images/base-builder/compile_go_fuzzer
+++ b/infra/base-images/base-builder/compile_go_fuzzer
@@ -29,7 +29,7 @@ cd $GOPATH/src/$path || true
# in the case we are in the right directory, with go.mod but no go.sum
go mod tidy || true
# project was downloaded with go get if go list fails
-go list $tags $path || { cd $GOPATH/pkg/mod/ && cd `echo $path | cut -d/ -f1-3 | awk '{print $1"@*"}'`; }
+go list $tags $path || { cd $GOPATH/pkg/mod/ && cd `echo $path | cut -d/ -f1-3 | awk '{print $1"@*"}'`; } || cd -
# project does not have go.mod if go list fails again
go list $tags $path || { go mod init $path && go mod tidy ;}
@@ -42,7 +42,9 @@ if [[ $SANITIZER = *coverage* ]]; then
sed -i -e 's/mypackagebeingfuzzed/'$fuzzed_package'/' ./"${function,,}"_test.go
sed -i -e 's/TestFuzzCorpus/Test'$function'Corpus/' ./"${function,,}"_test.go
- fuzzed_repo=`echo $path | cut -d/ -f-3`
+ # The repo is the module path/name, which is already created above in case it doesn't exist,
+ # but not always the same as the module path. This is necessary to handle SIV properly.
+ fuzzed_repo=$(go list $tags -f {{.Module}} "$path")
abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo`
# give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir
echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/boost1_58.py b/infra/base-images/base-builder/install_go.sh
index 8071b7ecd..21138831c 100644..100755
--- a/infra/base-images/base-sanitizer-libs-builder/packages/boost1_58.py
+++ b/infra/base-images/base-builder/install_go.sh
@@ -1,5 +1,5 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
+#!/bin/bash -eux
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,15 +15,14 @@
#
################################################################################
-import package
+cd /tmp
+curl -O https://storage.googleapis.com/golang/getgo/installer_linux
+chmod +x ./installer_linux
+SHELL="bash" ./installer_linux
+rm -rf ./installer_linux
+echo 'Set "GOPATH=/root/go"'
+echo 'Set "PATH=$PATH:/root/.go/bin:$GOPATH/bin"'
-class Package(package.Package):
- """boost1.58 package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('boost1.58', apt_version)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- # Otherwise py_nonblocking.cpp fails to build.
- env['DEB_CXXFLAGS_APPEND'] += ' -std=c++98'
+go get -u github.com/mdempsky/go114-fuzz-build
+ln -s $GOPATH/bin/go114-fuzz-build $GOPATH/bin/go-fuzz
diff --git a/infra/base-images/base-builder/install_java.sh b/infra/base-images/base-builder/install_java.sh
new file mode 100755
index 000000000..560c816a9
--- /dev/null
+++ b/infra/base-images/base-builder/install_java.sh
@@ -0,0 +1,37 @@
+#!/bin/bash -eux
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+# Install OpenJDK 15 and trim its size by removing unused components.
+cd /tmp
+curl -L -O https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-x64_bin.tar.gz && \
+mkdir -p $JAVA_HOME
+tar -xzv --strip-components=1 -f openjdk-15.0.2_linux-x64_bin.tar.gz --directory $JAVA_HOME && \
+rm -f openjdk-15.0.2_linux-x64_bin.tar.gz
+rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip
+
+# Install the latest Jazzer in $OUT.
+# jazzer_api_deploy.jar is required only at build-time, the agent and the
+# drivers are copied to $OUT as they need to be present on the runners.
+cd $SRC/
+git clone --depth=1 https://github.com/CodeIntelligenceTesting/jazzer && \
+cd jazzer
+bazel build --java_runtime_version=localjdk_15 -c opt --cxxopt="-stdlib=libc++" --linkopt=-lc++ \
+ //agent:jazzer_agent_deploy.jar //driver:jazzer_driver //driver:jazzer_driver_asan //driver:jazzer_driver_ubsan //agent:jazzer_api_deploy.jar
+cp bazel-bin/agent/jazzer_agent_deploy.jar bazel-bin/driver/jazzer_driver bazel-bin/driver/jazzer_driver_asan bazel-bin/driver/jazzer_driver_ubsan /usr/local/bin/
+cp bazel-bin/agent/jazzer_api_deploy.jar $JAZZER_API_PATH
+rm -rf ~/.cache/bazel ~/.cache/bazelisk
+rm -rf $SRC/jazzer
diff --git a/infra/base-images/base-builder/install_python.sh b/infra/base-images/base-builder/install_python.sh
new file mode 100755
index 000000000..b9c9a38c3
--- /dev/null
+++ b/infra/base-images/base-builder/install_python.sh
@@ -0,0 +1,21 @@
+#!/bin/bash -eux
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+echo "ATHERIS INSTALL"
+unset CFLAGS CXXFLAGS
+pip3 install -v --no-cache-dir atheris>=2.0.6 pyinstaller==4.1
+rm -rf /tmp/*
diff --git a/infra/base-images/base-builder/install_rust.sh b/infra/base-images/base-builder/install_rust.sh
new file mode 100755
index 000000000..cbb461fd6
--- /dev/null
+++ b/infra/base-images/base-builder/install_rust.sh
@@ -0,0 +1,21 @@
+#!/bin/bash -eux
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+curl https://sh.rustup.rs | sh -s -- -y --default-toolchain=nightly --profile=minimal
+cargo install cargo-fuzz && rm -rf /rust/registry
+# Needed to recompile rust std library for MSAN
+rustup component add rust-src --toolchain nightly
diff --git a/infra/base-images/base-builder/install_swift.sh b/infra/base-images/base-builder/install_swift.sh
new file mode 100755
index 000000000..d88a7b5cd
--- /dev/null
+++ b/infra/base-images/base-builder/install_swift.sh
@@ -0,0 +1,66 @@
+#!/bin/bash -eux
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+
+SWIFT_PACKAGES="wget \
+ binutils \
+ git \
+ gnupg2 \
+ libc6-dev \
+ libcurl4 \
+ libedit2 \
+ libgcc-9-dev \
+ libpython2.7 \
+ libsqlite3-0 \
+ libstdc++-9-dev \
+ libxml2 \
+ libz3-dev \
+ pkg-config \
+ tzdata \
+ zlib1g-dev"
+SWIFT_SYMBOLIZER_PACKAGES="build-essential make cmake ninja-build git python3 g++-multilib binutils-dev zlib1g-dev"
+apt-get update && apt install -y $SWIFT_PACKAGES && \
+ apt install -y $SWIFT_SYMBOLIZER_PACKAGES --no-install-recommends
+
+
+wget https://swift.org/builds/swift-5.4.2-release/ubuntu2004/swift-5.4.2-RELEASE/swift-5.4.2-RELEASE-ubuntu20.04.tar.gz
+tar xzf swift-5.4.2-RELEASE-ubuntu20.04.tar.gz
+cp -r swift-5.4.2-RELEASE-ubuntu20.04/usr/* /usr/
+rm -rf swift-5.4.2-RELEASE-ubuntu20.04.tar.gz
+# TODO: Move to a seperate work dir
+git clone --depth 1 https://github.com/llvm/llvm-project.git
+cd llvm-project
+git apply ../llvmsymbol.diff --verbose
+cmake -G "Ninja" \
+ -DLIBCXX_ENABLE_SHARED=OFF \
+ -DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON \
+ -DLIBCXXABI_ENABLE_SHARED=OFF \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_TARGETS_TO_BUILD=X86 \
+ -DCMAKE_C_COMPILER=clang \
+ -DCMAKE_CXX_COMPILER=clang++ \
+ -DLLVM_BUILD_TESTS=OFF \
+ -DLLVM_INCLUDE_TESTS=OFF llvm
+ninja -j$(nproc) llvm-symbolizer
+cp bin/llvm-symbolizer /usr/local/bin/llvm-symbolizer-swift
+
+cd $SRC
+rm -rf llvm-project llvmsymbol.diff
+
+# TODO: Cleanup packages
+apt-get remove --purge -y wget zlib1g-dev
+apt-get autoremove -y
diff --git a/infra/base-images/base-builder/llvmsymbol.diff b/infra/base-images/base-builder/llvmsymbol.diff
new file mode 100644
index 000000000..70181bf39
--- /dev/null
+++ b/infra/base-images/base-builder/llvmsymbol.diff
@@ -0,0 +1,50 @@
+diff --git a/llvm/lib/DebugInfo/Symbolize/CMakeLists.txt b/llvm/lib/DebugInfo/Symbolize/CMakeLists.txt
+index acfb3bd0e..a499ee2e0 100644
+--- a/llvm/lib/DebugInfo/Symbolize/CMakeLists.txt
++++ b/llvm/lib/DebugInfo/Symbolize/CMakeLists.txt
+@@ -12,4 +12,11 @@ add_llvm_component_library(LLVMSymbolize
+ Object
+ Support
+ Demangle
+- )
++
++ LINK_LIBS
++ /usr/lib/swift_static/linux/libswiftCore.a
++ /usr/lib/swift_static/linux/libicui18nswift.a
++ /usr/lib/swift_static/linux/libicuucswift.a
++ /usr/lib/swift_static/linux/libicudataswift.a
++ /usr/lib/x86_64-linux-gnu/libstdc++.so.6
++)
+diff --git a/llvm/lib/DebugInfo/Symbolize/Symbolize.cpp b/llvm/lib/DebugInfo/Symbolize/Symbolize.cpp
+index fb4875f79..0030769ee 100644
+--- a/llvm/lib/DebugInfo/Symbolize/Symbolize.cpp
++++ b/llvm/lib/DebugInfo/Symbolize/Symbolize.cpp
+@@ -36,6 +36,13 @@
+ #include <cassert>
+ #include <cstring>
+
++
++extern "C" char *swift_demangle(const char *mangledName,
++ size_t mangledNameLength,
++ char *outputBuffer,
++ size_t *outputBufferSize,
++ uint32_t flags);
++
+ namespace llvm {
+ namespace symbolize {
+
+@@ -678,6 +685,14 @@ LLVMSymbolizer::DemangleName(const std::string &Name,
+ free(DemangledName);
+ return Result;
+ }
++ if (!Name.empty() && Name.front() == '$') {
++ char *DemangledName = swift_demangle(Name.c_str(), Name.length(), 0, 0, 0);
++ if (DemangledName) {
++ std::string Result = DemangledName;
++ free(DemangledName);
++ return Result;
++ }
++ }
+
+ if (DbiModuleDescriptor && DbiModuleDescriptor->isWin32Module())
+ return std::string(demanglePE32ExternCFunc(Name));
diff --git a/infra/base-images/base-builder/write_labels.py b/infra/base-images/base-builder/write_labels.py
index 6766e37fe..92a820a43 100755
--- a/infra/base-images/base-builder/write_labels.py
+++ b/infra/base-images/base-builder/write_labels.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/infra/base-images/base-clang/Dockerfile b/infra/base-images/base-clang/Dockerfile
index 3c16a8f3c..45260941c 100644
--- a/infra/base-images/base-clang/Dockerfile
+++ b/infra/base-images/base-clang/Dockerfile
@@ -19,7 +19,7 @@
FROM gcr.io/oss-fuzz-base/base-image
# Install newer cmake.
-ENV CMAKE_VERSION 3.19.2
+ENV CMAKE_VERSION 3.21.1
RUN apt-get update && apt-get install -y wget sudo && \
wget https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-Linux-x86_64.sh && \
chmod +x cmake-$CMAKE_VERSION-Linux-x86_64.sh && \
diff --git a/infra/base-images/base-clang/checkout_build_install_llvm.sh b/infra/base-images/base-clang/checkout_build_install_llvm.sh
index f6e8ca99c..a62b27cff 100755
--- a/infra/base-images/base-clang/checkout_build_install_llvm.sh
+++ b/infra/base-images/base-clang/checkout_build_install_llvm.sh
@@ -21,8 +21,8 @@
NPROC=$(expr $(nproc) / 2)
# zlib1g-dev is needed for llvm-profdata to handle coverage data from rust compiler
-LLVM_DEP_PACKAGES="build-essential make cmake ninja-build git python3 g++-multilib binutils-dev zlib1g-dev"
-apt-get install -y $LLVM_DEP_PACKAGES --no-install-recommends
+LLVM_DEP_PACKAGES="build-essential make cmake ninja-build git python3 python3-distutils g++-multilib binutils-dev zlib1g-dev"
+apt-get update && apt-get install -y $LLVM_DEP_PACKAGES --no-install-recommends
# Checkout
CHECKOUT_RETRIES=10
@@ -74,7 +74,7 @@ OUR_LLVM_REVISION=llvmorg-12-init-17251-g6de48655
# To allow for manual downgrades. Set to 0 to use Chrome's clang version (i.e.
# *not* force a manual downgrade). Set to 1 to force a manual downgrade.
-FORCE_OUR_REVISION=1
+FORCE_OUR_REVISION=0
LLVM_REVISION=$(grep -Po "CLANG_REVISION = '\K([^']+)" scripts/update.py)
clone_with_retries https://github.com/llvm/llvm-project.git $LLVM_SRC
@@ -124,10 +124,24 @@ rm -rf $WORK/llvm-stage1 $WORK/llvm-stage2
# Use the clang we just built from now on.
CMAKE_EXTRA_ARGS="-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++"
+function cmake_libcxx {
+ extra_args="$@"
+ cmake -G "Ninja" \
+ -DLIBCXX_ENABLE_SHARED=OFF \
+ -DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON \
+ -DLIBCXXABI_ENABLE_SHARED=OFF \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_TARGETS_TO_BUILD="$TARGET_TO_BUILD" \
+ -DLLVM_ENABLE_PROJECTS="libcxx;libcxxabi" \
+ -DLLVM_BINUTILS_INCDIR="/usr/include/" \
+ $extra_args \
+ $LLVM_SRC/llvm
+}
+
# 32-bit libraries.
mkdir -p $WORK/i386
cd $WORK/i386
-cmake_llvm $CMAKE_EXTRA_ARGS \
+cmake_libcxx $CMAKE_EXTRA_ARGS \
-DCMAKE_INSTALL_PREFIX=/usr/i386/ \
-DCMAKE_C_FLAGS="-m32" \
-DCMAKE_CXX_FLAGS="-m32"
@@ -145,7 +159,7 @@ cat <<EOF > $WORK/msan/blocklist.txt
fun:__gxx_personality_*
EOF
-cmake_llvm $CMAKE_EXTRA_ARGS \
+cmake_libcxx $CMAKE_EXTRA_ARGS \
-DLLVM_USE_SANITIZER=Memory \
-DCMAKE_INSTALL_PREFIX=/usr/msan/ \
-DCMAKE_CXX_FLAGS="-fsanitize-blacklist=$WORK/msan/blocklist.txt"
@@ -158,7 +172,7 @@ rm -rf $WORK/msan
mkdir -p $WORK/dfsan
cd $WORK/dfsan
-cmake_llvm $CMAKE_EXTRA_ARGS \
+cmake_libcxx $CMAKE_EXTRA_ARGS \
-DLLVM_USE_SANITIZER=DataFlow \
-DCMAKE_INSTALL_PREFIX=/usr/dfsan/
@@ -197,7 +211,7 @@ rm -rf /usr/local/bin/llvm-*
mv $LLVM_TOOLS_TMPDIR/* /usr/local/bin/
rm -rf $LLVM_TOOLS_TMPDIR
-# Remove binaries from LLVM buld that we don't need.
+# Remove binaries from LLVM build that we don't need.
rm -f \
/usr/local/bin/bugpoint \
/usr/local/bin/llc \
diff --git a/infra/base-images/base-image/Dockerfile b/infra/base-images/base-image/Dockerfile
index 2099ffdd2..bc6035b72 100644
--- a/infra/base-images/base-image/Dockerfile
+++ b/infra/base-images/base-image/Dockerfile
@@ -16,11 +16,11 @@
# Base image for all other images.
-FROM ubuntu:16.04
+FROM ubuntu:20.04
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && \
apt-get upgrade -y && \
- apt-get install -y libc6-dev binutils libgcc-5-dev && \
+ apt-get install -y libc6-dev binutils libgcc-9-dev && \
apt-get autoremove -y
ENV OUT=/out
diff --git a/infra/base-images/base-runner/Dockerfile b/infra/base-images/base-runner/Dockerfile
index f847de026..fadd00acc 100755
--- a/infra/base-images/base-runner/Dockerfile
+++ b/infra/base-images/base-runner/Dockerfile
@@ -45,12 +45,18 @@ RUN apt-get update && apt-get install -y \
libcap2 \
python3 \
python3-pip \
+ python3-setuptools \
unzip \
wget \
zip --no-install-recommends
-RUN git clone https://chromium.googlesource.com/chromium/src/tools/code_coverage /opt/code_coverage && \
- pip3 install -r /opt/code_coverage/requirements.txt
+ENV CODE_COVERAGE_SRC=/opt/code_coverage
+RUN git clone https://chromium.googlesource.com/chromium/src/tools/code_coverage $CODE_COVERAGE_SRC && \
+ cd /opt/code_coverage && \
+ git checkout edba4873b5e8a390e977a64c522db2df18a8b27d && \
+ pip3 install wheel && \
+ pip3 install -r requirements.txt && \
+ pip3 install MarkupSafe==0.23
# Default environment options for various sanitizers.
# Note that these match the settings used in ClusterFuzz and
@@ -91,6 +97,12 @@ RUN wget https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db
rm -f openjdk-15.0.2_linux-x64_bin.tar.gz && \
rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip
+# Install JaCoCo for JVM coverage.
+RUN wget https://repo1.maven.org/maven2/org/jacoco/org.jacoco.cli/0.8.7/org.jacoco.cli-0.8.7-nodeps.jar -O /opt/jacoco-cli.jar && \
+ wget https://repo1.maven.org/maven2/org/jacoco/org.jacoco.agent/0.8.7/org.jacoco.agent-0.8.7-runtime.jar -O /opt/jacoco-agent.jar && \
+ echo "37df187b76888101ecd745282e9cd1ad4ea508d6 /opt/jacoco-agent.jar" | shasum --check && \
+ echo "c1814e7bba5fd8786224b09b43c84fd6156db690 /opt/jacoco-cli.jar" | shasum --check
+
# Do this last to make developing these files easier/faster due to caching.
COPY bad_build_check \
collect_dft \
@@ -98,10 +110,12 @@ COPY bad_build_check \
coverage_helper \
dataflow_tracer.py \
download_corpus \
+ jacoco_report_converter.py \
rcfilt \
reproduce \
run_fuzzer \
parse_options.py \
+ profraw_update.py \
targets_list \
test_all.py \
test_one.py \
diff --git a/infra/base-images/base-runner/bad_build_check b/infra/base-images/base-runner/bad_build_check
index 01f8fbbab..bb328c793 100755
--- a/infra/base-images/base-runner/bad_build_check
+++ b/infra/base-images/base-runner/bad_build_check
@@ -39,7 +39,7 @@ DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD=0
MSAN_CALLS_THRESHOLD_FOR_MSAN_BUILD=1000
# Some engines (e.g. honggfuzz) may make a very small number of calls to msan
# for memory poisoning.
-MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD=2
+MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD=3
# Usually, a non UBSan build (e.g. ASan) has 165 calls to UBSan runtime. The
# majority of targets built with UBSan have 200+ UBSan calls, but there are
@@ -90,10 +90,7 @@ function check_engine {
echo "BAD BUILD: $FUZZER seems to have only partial coverage instrumentation."
fi
elif [[ "$FUZZING_ENGINE" == afl ]]; then
- # TODO(https://github.com/google/oss-fuzz/issues/2470): Dont use
- # AFL_DRIVER_DONT_DEFER by default, support .options files in
- # bad_build_check instead.
- AFL_DRIVER_DONT_DEFER=1 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 20s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT
+ AFL_FORKSRV_INIT_TMOUT=30000 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 35s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT
CHECK_PASSED=$(egrep "All set and ready to roll" -c $FUZZER_OUTPUT)
if (( $CHECK_PASSED == 0 )); then
echo "BAD BUILD: fuzzing $FUZZER with afl-fuzz failed."
@@ -136,10 +133,7 @@ function check_startup_crash {
SKIP_SEED_CORPUS=1 run_fuzzer $FUZZER_NAME -seed=1337 -runs=$MIN_NUMBER_OF_RUNS &>$FUZZER_OUTPUT
CHECK_PASSED=$(egrep "Done $MIN_NUMBER_OF_RUNS runs" -c $FUZZER_OUTPUT)
elif [[ "$FUZZING_ENGINE" = afl ]]; then
- # TODO(https://github.com/google/oss-fuzz/issues/2470): Dont use
- # AFL_DRIVER_DONT_DEFER by default, support .options files in
- # bad_build_check instead.
- AFL_DRIVER_DONT_DEFER=1 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 20s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT
+ AFL_FORKSRV_INIT_TMOUT=30000 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 35s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT
if [ $(egrep "target binary (crashed|terminated)" -c $FUZZER_OUTPUT) -eq 0 ]; then
CHECK_PASSED=1
fi
diff --git a/infra/base-images/base-runner/coverage b/infra/base-images/base-runner/coverage
index a86b00dec..3c7b274e4 100755
--- a/infra/base-images/base-runner/coverage
+++ b/infra/base-images/base-runner/coverage
@@ -19,14 +19,21 @@ cd $OUT
if (( $# > 0 )); then
FUZZ_TARGETS="$@"
else
- FUZZ_TARGETS="$(find . -maxdepth 1 -type f -executable -printf '%P\n')"
+ FUZZ_TARGETS="$(find . -maxdepth 1 -type f -executable -printf '%P\n' | \
+ grep -v -x -F \
+ -e 'llvm-symbolizer' \
+ -e 'jazzer_agent_deploy.jar' \
+ -e 'jazzer_driver' \
+ -e 'jazzer_driver_with_sanitizer')"
fi
-DUMPS_DIR="$OUT/dumps"
-FUZZER_STATS_DIR="$OUT/fuzzer_stats"
-LOGS_DIR="$OUT/logs"
-REPORT_ROOT_DIR="$OUT/report"
-REPORT_PLATFORM_DIR="$OUT/report/linux"
+COVERAGE_OUTPUT_DIR=${COVERAGE_OUTPUT_DIR:-$OUT}
+
+DUMPS_DIR="$COVERAGE_OUTPUT_DIR/dumps"
+FUZZER_STATS_DIR="$COVERAGE_OUTPUT_DIR/fuzzer_stats"
+LOGS_DIR="$COVERAGE_OUTPUT_DIR/logs"
+REPORT_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report"
+REPORT_PLATFORM_DIR="$COVERAGE_OUTPUT_DIR/report/linux"
for directory in $DUMPS_DIR $FUZZER_STATS_DIR $LOGS_DIR $REPORT_ROOT_DIR \
$REPORT_PLATFORM_DIR; do
@@ -54,6 +61,8 @@ objects=""
# Number of CPUs available, this is needed for running tests in parallel.
NPROC=$(nproc)
+CORPUS_DIR=${CORPUS_DIR:-"/corpus"}
+
function run_fuzz_target {
local target=$1
@@ -62,7 +71,7 @@ function run_fuzz_target {
local profraw_file="$DUMPS_DIR/$target.%1m.profraw"
local profraw_file_mask="$DUMPS_DIR/$target.*.profraw"
local profdata_file="$DUMPS_DIR/$target.profdata"
- local corpus_real="/corpus/${target}"
+ local corpus_real="$CORPUS_DIR/${target}"
# -merge=1 requires an output directory, create a new, empty dir for that.
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
@@ -74,7 +83,7 @@ function run_fuzz_target {
# because (A) corpuses are already minimized; (B) we do not use sancov, and so
# libFuzzer always finishes merge with an empty output dir.
# Use 100s timeout instead of 25s as code coverage builds can be very slow.
- local args="-merge=1 -timeout=100 -close_fd_mask=3 $corpus_dummy $corpus_real"
+ local args="-merge=1 -timeout=100 $corpus_dummy $corpus_real"
export LLVM_PROFILE_FILE=$profraw_file
timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log
@@ -90,6 +99,9 @@ function run_fuzz_target {
return 0
fi
+ # If necessary translate to latest profraw version.
+ profraw_update.py $OUT/$target $profraw_file_mask tmp.profraw
+ mv tmp.profraw $profraw_file_mask
llvm-profdata merge -j=1 -sparse $profraw_file_mask -o $profdata_file
# Delete unnecessary and (potentially) large .profraw files.
@@ -115,7 +127,7 @@ function run_go_fuzz_target {
local target=$1
echo "Running go target $target"
- export FUZZ_CORPUS_DIR="/corpus/${target}/"
+ export FUZZ_CORPUS_DIR="$CORPUS_DIR/${target}/"
export FUZZ_PROFILE_NAME="$DUMPS_DIR/$target.perf"
$OUT/$target -test.coverprofile $DUMPS_DIR/$target.profdata &> $LOGS_DIR/$target.log
# translate from golangish paths to current absolute paths
@@ -125,6 +137,47 @@ function run_go_fuzz_target {
$SYSGOPATH/bin/gocovsum $DUMPS_DIR/$target.profdata > $FUZZER_STATS_DIR/$target.json
}
+function run_java_fuzz_target {
+ local target=$1
+
+ local exec_file="$DUMPS_DIR/$target.exec"
+ local class_dump_dir="$DUMPS_DIR/${target}_classes/"
+ mkdir "$class_dump_dir"
+ local corpus_real="$CORPUS_DIR/${target}"
+
+ # -merge=1 requires an output directory, create a new, empty dir for that.
+ local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
+ rm -rf $corpus_dummy && mkdir -p $corpus_dummy
+
+ # Use 100s timeout instead of 25s as code coverage builds can be very slow.
+ local jacoco_args="destfile=$exec_file,classdumpdir=$class_dump_dir,excludes=com.code_intelligence.jazzer.*"
+ local args="-merge=1 -timeout=100 --nohooks \
+ --additional_jvm_args=-javaagent:/opt/jacoco-agent.jar=$jacoco_args \
+ $corpus_dummy $corpus_real"
+
+ timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log
+ if (( $? != 0 )); then
+ echo "Error occured while running $target:"
+ cat $LOGS_DIR/$target.log
+ fi
+
+ if (( $(du -c $exec_file | tail -n 1 | cut -f 1) == 0 )); then
+ # Skip fuzz targets that failed to produce .exec files.
+ return 0
+ fi
+
+ # Generate XML report only as input to jacoco_report_converter.
+ # Source files are not needed for the summary.
+ local xml_report="$DUMPS_DIR/${target}.xml"
+ local summary_file="$FUZZER_STATS_DIR/$target.json"
+ java -jar /opt/jacoco-cli.jar report $exec_file \
+ --xml $xml_report \
+ --classfiles $class_dump_dir
+
+ # Write llvm-cov summary file.
+ jacoco_report_converter.py $xml_report $summary_file
+}
+
export SYSGOPATH=$GOPATH
export GOPATH=$OUT/$GOPATH
# Run each fuzz target, generate raw coverage dumps.
@@ -136,6 +189,14 @@ for fuzz_target in $FUZZ_TARGETS; do
grep "FUZZ_CORPUS_DIR" $fuzz_target > /dev/null 2>&1 || continue
fi
run_go_fuzz_target $fuzz_target &
+ elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then
+ # Continue if not a fuzz target.
+ if [[ $FUZZING_ENGINE != "none" ]]; then
+ grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
+ fi
+
+ echo "Running $fuzz_target"
+ run_java_fuzz_target $fuzz_target &
else
# Continue if not a fuzz target.
if [[ $FUZZING_ENGINE != "none" ]]; then
@@ -175,6 +236,43 @@ if [[ $FUZZING_LANGUAGE == "go" ]]; then
mv merged.data $REPORT_ROOT_DIR/heap.prof
#TODO some proxy for go tool pprof -http=127.0.0.1:8001 $DUMPS_DIR/cpu.prof
echo "Finished generating code coverage report for Go fuzz targets."
+elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then
+
+ # From this point on the script does not tolerate any errors.
+ set -e
+
+ # Merge .exec files from the individual targets.
+ jacoco_merged_exec=$DUMPS_DIR/jacoco.merged.exec
+ java -jar /opt/jacoco-cli.jar merge $DUMPS_DIR/*.exec \
+ --destfile $jacoco_merged_exec
+
+ # Merge .class files from the individual targets.
+ classes_dir=$DUMPS_DIR/classes
+ mkdir $classes_dir
+ for fuzz_target in $FUZZ_TARGETS; do
+ cp -r $DUMPS_DIR/${fuzz_target}_classes/* $classes_dir/
+ done
+
+ # Heuristically determine source directories based on Maven structure.
+ # Always include the $SRC root as it likely contains the fuzzer sources.
+ sourcefiles_args=(--sourcefiles $OUT/$SRC)
+ source_dirs=$(find $OUT/$SRC -type d -name 'java')
+ for source_dir in $source_dirs; do
+ sourcefiles_args+=(--sourcefiles "$source_dir")
+ done
+
+ # Generate HTML and XML reports.
+ xml_report=$REPORT_PLATFORM_DIR/index.xml
+ java -jar /opt/jacoco-cli.jar report $jacoco_merged_exec \
+ --html $REPORT_PLATFORM_DIR \
+ --xml $xml_report \
+ --classfiles $classes_dir \
+ "${sourcefiles_args[@]}"
+
+ # Write llvm-cov summary file.
+ jacoco_report_converter.py $xml_report $SUMMARY_FILE
+
+ set +e
else
# From this point on the script does not tolerate any errors.
@@ -206,6 +304,10 @@ else
fi
+# Make sure report is readable.
+chmod -R +r $REPORT_ROOT_DIR
+find $REPORT_ROOT_DIR -type d -exec chmod +x {} +
+
if [[ -n $HTTP_PORT ]]; then
# Serve the report locally.
echo "Serving the report on http://127.0.0.1:$HTTP_PORT/linux/index.html"
diff --git a/infra/base-images/base-runner/coverage_helper b/infra/base-images/base-runner/coverage_helper
index 22c9cb5d6..4d29ceac8 100755
--- a/infra/base-images/base-runner/coverage_helper
+++ b/infra/base-images/base-runner/coverage_helper
@@ -14,4 +14,4 @@
# limitations under the License.
#
################################################################################
-python3 /opt/code_coverage/coverage_utils.py $@
+python3 $CODE_COVERAGE_SRC/coverage_utils.py $@
diff --git a/infra/base-images/base-runner/jacoco_report_converter.py b/infra/base-images/base-runner/jacoco_report_converter.py
new file mode 100755
index 000000000..3c36065f1
--- /dev/null
+++ b/infra/base-images/base-runner/jacoco_report_converter.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Helper script for creating an llvm-cov style JSON summary from a JaCoCo XML
+report."""
+import json
+import os
+import sys
+import xml.etree.ElementTree as ET
+
+
+def convert(xml):
+ """Turns a JaCoCo XML report into an llvm-cov JSON summary."""
+ summary = {
+ "type": "oss-fuzz.java.coverage.json.export",
+ "version": "1.0.0",
+ "data": [{
+ "totals": {},
+ "files": [],
+ }],
+ }
+
+ report = ET.fromstring(xml)
+ totals = make_element_summary(report)
+ summary["data"][0]["totals"] = totals
+
+ # Since Java compilation does not track source file location, we match
+ # coverage info to source files via the full class name, e.g. we search for
+ # a path in /out/src ending in foo/bar/Baz.java for the class foo.bar.Baz.
+ # Under the assumptions that a given project only ever contains a single
+ # version of a class and that no class name appears as a suffix of another
+ # class name, we can assign coverage info to every source file matched in that
+ # way.
+ src_files = list_src_files()
+
+ for class_element in report.findall("./package/class"):
+ class_name = class_element.attrib["name"]
+ package_name = os.path.dirname(class_name)
+ if "sourcefilename" not in class_element.attrib:
+ continue
+ basename = class_element.attrib["sourcefilename"]
+ # This path is "foo/Bar.java" for the class element
+ # <class name="foo/Bar" sourcefilename="Bar.java">.
+ canonical_path = os.path.join(package_name, basename)
+
+ class_summary = make_element_summary(class_element)
+ summary["data"][0]["files"].append({
+ "filename": relative_to_src_path(src_files, canonical_path),
+ "summary": class_summary,
+ })
+
+ return json.dumps(summary)
+
+
+def list_src_files():
+ """Returns a map from basename to full path for all files in $OUT/$SRC."""
+ filename_to_paths = {}
+ out_path = os.environ["OUT"] + "/"
+ src_path = os.environ["SRC"]
+ src_in_out = out_path + src_path
+ for dirpath, _, filenames in os.walk(src_in_out):
+ for filename in filenames:
+ full_path = dirpath + "/" + filename
+ # Map /out//src/... to /src/...
+ src_path = full_path[len(out_path):]
+ filename_to_paths.setdefault(filename, []).append(src_path)
+ return filename_to_paths
+
+
+def relative_to_src_path(src_files, canonical_path):
+ """Returns all paths in src_files ending in canonical_path."""
+ basename = os.path.basename(canonical_path)
+ if basename not in src_files:
+ return []
+ candidate_paths = src_files[basename]
+ return [
+ path for path in candidate_paths if path.endswith("/" + canonical_path)
+ ]
+
+
+def make_element_summary(element):
+ """Returns a coverage summary for an element in the XML report."""
+ summary = {}
+
+ function_counter = element.find("./counter[@type='METHOD']")
+ summary["functions"] = make_counter_summary(function_counter)
+
+ line_counter = element.find("./counter[@type='LINE']")
+ summary["lines"] = make_counter_summary(line_counter)
+
+ # JaCoCo tracks branch coverage, which counts the covered control-flow edges
+ # between llvm-cov's regions instead of the covered regions themselves. For
+ # non-trivial code parts, the difference is usually negligible. However, if
+ # all methods of a class consist of a single region only (no branches),
+ # JaCoCo does not report any branch coverage even if there is instruction
+ # coverage. Since this would give incorrect results for CI Fuzz purposes, we
+ # increase the regions counter by 1 if there is any amount of instruction
+ # coverage.
+ instruction_counter = element.find("./counter[@type='INSTRUCTION']")
+ has_some_coverage = instruction_counter is not None and int(
+ instruction_counter.attrib["covered"]) > 0
+ branch_covered_adjustment = 1 if has_some_coverage else 0
+ region_counter = element.find("./counter[@type='BRANCH']")
+ summary["regions"] = make_counter_summary(
+ region_counter, covered_adjustment=branch_covered_adjustment)
+
+ return summary
+
+
+def make_counter_summary(counter_element, covered_adjustment=0):
+ """Turns a JaCoCo <counter> element into an llvm-cov totals entry."""
+ summary = {}
+ covered = covered_adjustment
+ missed = 0
+ if counter_element is not None:
+ covered += int(counter_element.attrib["covered"])
+ missed += int(counter_element.attrib["missed"])
+ summary["covered"] = covered
+ summary["notcovered"] = missed
+ summary["count"] = summary["covered"] + summary["notcovered"]
+ if summary["count"] != 0:
+ summary["percent"] = (100.0 * summary["covered"]) / summary["count"]
+ else:
+ summary["percent"] = 0
+ return summary
+
+
+def main():
+ """Produces an llvm-cov style JSON summary from a JaCoCo XML report."""
+ if len(sys.argv) != 3:
+ sys.stderr.write('Usage: %s <path_to_jacoco_xml> <out_path_json>\n' %
+ sys.argv[0])
+ return 1
+
+ with open(sys.argv[1], 'r') as xml_file:
+ xml_report = xml_file.read()
+ json_summary = convert(xml_report)
+ with open(sys.argv[2], 'w') as json_file:
+ json_file.write(json_summary)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/infra/base-images/base-runner/profraw_update.py b/infra/base-images/base-runner/profraw_update.py
new file mode 100644
index 000000000..408b5fb93
--- /dev/null
+++ b/infra/base-images/base-runner/profraw_update.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Helper script for upgrading a profraw file to latest version."""
+
+from collections import namedtuple
+import struct
+import subprocess
+import sys
+
+HeaderGeneric = namedtuple('HeaderGeneric', 'magic version')
+HeaderVersion7 = namedtuple(
+ 'HeaderVersion7',
+ 'BinaryIdsSize DataSize PaddingBytesBeforeCounters CountersSize \
+ PaddingBytesAfterCounters NamesSize CountersDelta NamesDelta ValueKindLast')
+
+PROFRAW_MAGIC = 0xff6c70726f667281
+
+
+def relativize_address(data, offset, databegin, sect_prf_cnts, sect_prf_data):
+ """Turns an absolute offset into a relative one."""
+ value = struct.unpack('Q', data[offset:offset + 8])[0]
+ if sect_prf_cnts <= value < sect_prf_data:
+ # If the value is an address in the right section, make it relative.
+ value = (value - databegin) & 0xffffffffffffffff
+ value = struct.pack('Q', value)
+ for i in range(8):
+ data[offset + i] = value[i]
+
+
+def upgrade(data, sect_prf_cnts, sect_prf_data):
+ """Upgrades profraw data, knowing the sections addresses."""
+ generic_header = HeaderGeneric._make(struct.unpack('QQ', data[:16]))
+ if generic_header.magic != PROFRAW_MAGIC:
+ raise Exception('Bad magic.')
+ if generic_header.version == 5:
+ generic_header = generic_header._replace(version=7)
+ # Upgrade from version 5 to 7 by adding binaryids field.
+ data = struct.pack('QQ', generic_header) + struct.pack('Q', 0) + data[16:]
+ if generic_header.version < 7:
+ raise Exception('Unhandled version.')
+ v7_header = HeaderVersion7._make(struct.unpack('QQQQQQQQQ', data[16:88]))
+
+ if v7_header.BinaryIdsSize % 8 != 0:
+ # Adds padding for binary ids.
+ # cf commit b9f547e8e51182d32f1912f97a3e53f4899ea6be
+ # cf https://reviews.llvm.org/D110365
+ padlen = 8 - (v7_header.BinaryIdsSize % 8)
+ v7_header = v7_header._replace(BinaryIdsSize=v7_header.BinaryIdsSize +
+ padlen)
+ data = data[:16] + struct.pack('Q', v7_header.BinaryIdsSize) + data[24:]
+ data = data[:88 + v7_header.BinaryIdsSize] + bytes(
+ padlen) + data[88 + v7_header.BinaryIdsSize:]
+
+ if v7_header.CountersDelta != sect_prf_cnts - sect_prf_data:
+ # Rust linking seems to add an offset...
+ sect_prf_data = v7_header.CountersDelta - sect_prf_cnts + sect_prf_data
+ sect_prf_cnts = v7_header.CountersDelta
+
+ dataref = sect_prf_data
+ relativize_address(data, 64, dataref, sect_prf_cnts, sect_prf_data)
+
+ offset = 88 + v7_header.BinaryIdsSize
+ # This also works for C+Rust binaries compiled with
+ # clang-14/rust-nightly-clang-13.
+ for _ in range(v7_header.DataSize):
+ # 16 is the offset of CounterPtr in ProfrawData structure.
+ relativize_address(data, offset + 16, dataref, sect_prf_cnts, sect_prf_data)
+ # We need this because of CountersDelta -= sizeof(*SrcData);
+ # seen in __llvm_profile_merge_from_buffer.
+ dataref += 44 + 2 * (v7_header.ValueKindLast + 1)
+ # This is the size of one ProfrawData structure.
+ offset += 44 + 2 * (v7_header.ValueKindLast + 1)
+
+ return data
+
+
+def main():
+ """Helper script for upgrading a profraw file to latest version."""
+ if len(sys.argv) != 4:
+ sys.stderr.write('Usage: %s <binary> <profraw> <output>\n' % sys.argv[0])
+ return 1
+
+ # First find llvm profile sections addresses in the elf, quick and dirty.
+ process = subprocess.Popen(['readelf', '-S', sys.argv[1]],
+ stdout=subprocess.PIPE)
+ output, err = process.communicate()
+ if err:
+ print('readelf failed')
+ return 2
+ for line in iter(output.split(b'\n')):
+ if b'__llvm_prf_cnts' in line:
+ sect_prf_cnts = int(line.split()[4], 16)
+ elif b'__llvm_prf_data' in line:
+ sect_prf_data = int(line.split()[4], 16)
+
+ # Then open and read the input profraw file.
+ with open(sys.argv[2], 'rb') as input_file:
+ profraw_base = bytearray(input_file.read())
+ # Do the upgrade, returning a bytes object.
+ profraw_latest = upgrade(profraw_base, sect_prf_cnts, sect_prf_data)
+ # Write the output to the file given to the command line.
+ with open(sys.argv[3], 'wb') as output_file:
+ output_file.write(profraw_latest)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/infra/base-images/base-runner/run_fuzzer b/infra/base-images/base-runner/run_fuzzer
index b9bc8d9d6..426688ea3 100755
--- a/infra/base-images/base-runner/run_fuzzer
+++ b/infra/base-images/base-runner/run_fuzzer
@@ -26,7 +26,14 @@ DEBUGGER=${DEBUGGER:-}
FUZZER=$1
shift
-CORPUS_DIR=${CORPUS_DIR:-"/tmp/${FUZZER}_corpus"}
+# This env var is set by CIFuzz. CIFuzz fills this directory with the corpus
+# from ClusterFuzz.
+CORPUS_DIR=${CORPUS_DIR:-}
+if [ -z "$CORPUS_DIR" ]
+then
+ CORPUS_DIR="/tmp/${FUZZER}_corpus"
+ rm -rf $CORPUS_DIR && mkdir -p $CORPUS_DIR
+fi
SANITIZER=${SANITIZER:-}
if [ -z $SANITIZER ]; then
@@ -63,14 +70,13 @@ function get_dictionary() {
fi
}
-rm -rf $CORPUS_DIR && mkdir -p $CORPUS_DIR
rm -rf $FUZZER_OUT && mkdir -p $FUZZER_OUT
SEED_CORPUS="${FUZZER}_seed_corpus.zip"
if [ -f $SEED_CORPUS ] && [ -z ${SKIP_SEED_CORPUS:-} ]; then
echo "Using seed corpus: $SEED_CORPUS"
- unzip -d ${CORPUS_DIR}/ $SEED_CORPUS > /dev/null
+ unzip -o -d ${CORPUS_DIR}/ $SEED_CORPUS > /dev/null
fi
OPTIONS_FILE="${FUZZER}.options"
@@ -103,19 +109,18 @@ if [[ "$FUZZING_ENGINE" = afl ]]; then
export UBSAN_OPTIONS="$UBSAN_OPTIONS:symbolize=0"
export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1
export AFL_SKIP_CPUFREQ=1
- export AFL_NO_AFFINITY=1
+ export AFL_TRY_AFFINITY=1
export AFL_FAST_CAL=1
+ export AFL_CMPLOG_ONLY_NEW=1
+ export AFL_FORKSRV_INIT_TMOUT=30000
# If $OUT/afl_cmplog.txt is present this means the target was compiled for
- # CMPLOG. So we have to add the proper parameters to afl-fuzz. `-l 2` is
- # CMPLOG level 2, which will colorize larger files but not huge files and
- # not enable transform analysis unless there have been several cycles without
- # any finds.
- test -e "$OUT/afl_cmplog.txt" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -l 2 -c $OUT/$FUZZER"
+ # CMPLOG. So we have to add the proper parameters to afl-fuzz.
+ test -e "$OUT/afl_cmplog.txt" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -c $OUT/$FUZZER"
# If $OUT/afl++.dict we load it as a dictionary for afl-fuzz.
test -e "$OUT/afl++.dict" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -x $OUT/afl++.dict"
- # Ensure timeout is a bit large than 1sec as some of the OSS-Fuzz fuzzers
- # are slower than this.
- AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -t 5000+"
+ # Ensure timeout is a bit larger than 1sec as some of the OSS-Fuzz fuzzers
+ # are slower than this.
+ AFL_FUZZER_ARGS="$FUZZER_ARGS $AFL_FUZZER_ARGS -t 5000+"
# AFL expects at least 1 file in the input dir.
echo input > ${CORPUS_DIR}/input
echo afl++ setup:
@@ -135,7 +140,7 @@ elif [[ "$FUZZING_ENGINE" = honggfuzz ]]; then
# -P: use persistent mode of fuzzing (i.e. LLVMFuzzerTestOneInput)
# -f: location of the initial (and destination) file corpus
# -n: number of fuzzing threads (and processes)
- CMD_LINE="$OUT/honggfuzz -n 1 --exit_upon_crash -R /tmp/${FUZZER}_honggfuzz.report -W $FUZZER_OUT -v -z -P -f \"$CORPUS_DIR\" $(get_dictionary) $* -- \"$OUT/$FUZZER\""
+ CMD_LINE="$OUT/honggfuzz -n 1 --exit_upon_crash -R /tmp/${FUZZER}_honggfuzz.report -W $FUZZER_OUT -v -z -P -f \"$CORPUS_DIR\" $(get_dictionary) $FUZZER_ARGS $* -- \"$OUT/$FUZZER\""
else
diff --git a/infra/base-images/base-runner/targets_list b/infra/base-images/base-runner/targets_list
index d35534258..95615c811 100755
--- a/infra/base-images/base-runner/targets_list
+++ b/infra/base-images/base-runner/targets_list
@@ -2,7 +2,8 @@
for binary in $(find $OUT/ -executable -type f); do
[[ "$binary" != *.so ]] || continue
- file "$binary" | grep ELF > /dev/null 2>&1 || continue
+ [[ $(basename "$binary") != jazzer_driver* ]] || continue
+ file "$binary" | grep -e ELF -e "shell script" > /dev/null 2>&1 || continue
grep "LLVMFuzzerTestOneInput" "$binary" > /dev/null 2>&1 || continue
basename "$binary"
diff --git a/infra/base-images/base-runner/test_all.py b/infra/base-images/base-runner/test_all.py
index 925ebde69..16dfcbfa9 100755
--- a/infra/base-images/base-runner/test_all.py
+++ b/infra/base-images/base-runner/test_all.py
@@ -20,12 +20,12 @@ import contextlib
import multiprocessing
import os
import re
-import shutil
import subprocess
import stat
import sys
+import tempfile
-TMP_FUZZER_DIR = '/tmp/not-out'
+BASE_TMP_FUZZER_DIR = '/tmp/not-out'
EXECUTABLE = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
@@ -37,14 +37,6 @@ IGNORED_TARGETS = [
IGNORED_TARGETS_RE = re.compile('^' + r'$|^'.join(IGNORED_TARGETS) + '$')
-def recreate_directory(directory):
- """Creates |directory|. If it already exists than deletes it first before
- creating."""
- if os.path.exists(directory):
- shutil.rmtree(directory)
- os.mkdir(directory)
-
-
def move_directory_contents(src_directory, dst_directory):
"""Moves contents of |src_directory| to |dst_directory|."""
# Use mv because mv preserves file permissions. If we don't preserve file
@@ -67,7 +59,15 @@ def is_elf(filepath):
return b'ELF' in result.stdout
-def find_fuzz_targets(directory, fuzzing_language):
+def is_shell_script(filepath):
+ """Returns True if |filepath| is a shell script."""
+ result = subprocess.run(['file', filepath],
+ stdout=subprocess.PIPE,
+ check=False)
+ return b'shell script' in result.stdout
+
+
+def find_fuzz_targets(directory):
"""Returns paths to fuzz targets in |directory|."""
# TODO(https://github.com/google/oss-fuzz/issues/4585): Use libClusterFuzz for
# this.
@@ -84,10 +84,10 @@ def find_fuzz_targets(directory, fuzzing_language):
continue
if not os.stat(path).st_mode & EXECUTABLE:
continue
- # Fuzz targets are expected to be ELF binaries for languages other than
- # Python and Java.
- if (fuzzing_language != 'python' and fuzzing_language != 'jvm' and
- not is_elf(path)):
+ # Fuzz targets can either be ELF binaries or shell scripts (e.g. wrapper
+ # scripts for Python and JVM targets or rules_fuzzing builds with runfiles
+ # trees).
+ if not is_elf(path) and not is_shell_script(path):
continue
if os.getenv('FUZZING_ENGINE') != 'none':
with open(path, 'rb') as file_handle:
@@ -132,51 +132,66 @@ def has_ignored_targets(out_dir):
@contextlib.contextmanager
def use_different_out_dir():
- """Context manager that moves OUT to TMP_FUZZER_DIR. This is useful for
- catching hardcoding. Note that this sets the environment variable OUT and
- therefore must be run before multiprocessing.Pool is created. Resets OUT at
- the end."""
+ """Context manager that moves OUT to subdirectory of BASE_TMP_FUZZER_DIR. This
+ is useful for catching hardcoding. Note that this sets the environment
+ variable OUT and therefore must be run before multiprocessing.Pool is created.
+ Resets OUT at the end."""
# Use a fake OUT directory to catch path hardcoding that breaks on
# ClusterFuzz.
- out = os.getenv('OUT')
- initial_out = out
- recreate_directory(TMP_FUZZER_DIR)
- out = TMP_FUZZER_DIR
- # Set this so that run_fuzzer which is called by bad_build_check works
- # properly.
- os.environ['OUT'] = out
- # We move the contents of the directory because we can't move the
- # directory itself because it is a mount.
- move_directory_contents(initial_out, out)
- try:
- yield out
- finally:
- move_directory_contents(out, initial_out)
- shutil.rmtree(out)
- os.environ['OUT'] = initial_out
-
-
-def test_all_outside_out(fuzzing_language, allowed_broken_targets_percentage):
+ initial_out = os.getenv('OUT')
+ os.makedirs(BASE_TMP_FUZZER_DIR, exist_ok=True)
+ # Use a random subdirectory of BASE_TMP_FUZZER_DIR to allow running multiple
+ # instances of test_all in parallel (useful for integration testing).
+ with tempfile.TemporaryDirectory(dir=BASE_TMP_FUZZER_DIR) as out:
+ # Set this so that run_fuzzer which is called by bad_build_check works
+ # properly.
+ os.environ['OUT'] = out
+ # We move the contents of the directory because we can't move the
+ # directory itself because it is a mount.
+ move_directory_contents(initial_out, out)
+ try:
+ yield out
+ finally:
+ move_directory_contents(out, initial_out)
+ os.environ['OUT'] = initial_out
+
+
+def test_all_outside_out(allowed_broken_targets_percentage):
"""Wrapper around test_all that changes OUT and returns the result."""
with use_different_out_dir() as out:
- return test_all(out, fuzzing_language, allowed_broken_targets_percentage)
+ return test_all(out, allowed_broken_targets_percentage)
-def test_all(out, fuzzing_language, allowed_broken_targets_percentage):
+def test_all(out, allowed_broken_targets_percentage):
"""Do bad_build_check on all fuzz targets."""
# TODO(metzman): Refactor so that we can convert test_one to python.
- fuzz_targets = find_fuzz_targets(out, fuzzing_language)
+ fuzz_targets = find_fuzz_targets(out)
if not fuzz_targets:
print('ERROR: No fuzz targets found.')
return False
pool = multiprocessing.Pool()
bad_build_results = pool.map(do_bad_build_check, fuzz_targets)
+ pool.close()
+ pool.join()
broken_targets = get_broken_fuzz_targets(bad_build_results, fuzz_targets)
broken_targets_count = len(broken_targets)
if not broken_targets_count:
return True
+ print('Retrying failed fuzz targets sequentially', broken_targets_count)
+ pool = multiprocessing.Pool(1)
+ retry_targets = []
+ for broken_target, result in broken_targets:
+ retry_targets.append(broken_target)
+ bad_build_results = pool.map(do_bad_build_check, retry_targets)
+ pool.close()
+ pool.join()
+ broken_targets = get_broken_fuzz_targets(bad_build_results, broken_targets)
+ broken_targets_count = len(broken_targets)
+ if not broken_targets_count:
+ return True
+
print('Broken fuzz targets', broken_targets_count)
total_targets_count = len(fuzz_targets)
broken_targets_percentage = 100 * broken_targets_count / total_targets_count
@@ -211,11 +226,8 @@ def get_allowed_broken_targets_percentage():
def main():
"""Does bad_build_check on all fuzz targets in parallel. Returns 0 on success.
Returns 1 on failure."""
- # Set these environment variables here so that stdout
- fuzzing_language = os.getenv('FUZZING_LANGUAGE')
allowed_broken_targets_percentage = get_allowed_broken_targets_percentage()
- if not test_all_outside_out(fuzzing_language,
- allowed_broken_targets_percentage):
+ if not test_all_outside_out(allowed_broken_targets_percentage):
return 1
return 0
diff --git a/infra/base-images/base-runner/test_all_test.py b/infra/base-images/base-runner/test_all_test.py
index 3771ec231..b3077ec1e 100644
--- a/infra/base-images/base-runner/test_all_test.py
+++ b/infra/base-images/base-runner/test_all_test.py
@@ -25,15 +25,13 @@ class TestTestAll(unittest.TestCase):
@mock.patch('test_all.find_fuzz_targets', return_value=[])
@mock.patch('builtins.print')
- def test_test_all_no_fuzz_targets(self, mocked_print, _):
+ def test_test_all_no_fuzz_targets(self, mock_print, _):
"""Tests that test_all returns False when there are no fuzz targets."""
outdir = '/out'
- fuzzing_language = 'c++'
allowed_broken_targets_percentage = 0
self.assertFalse(
- test_all.test_all(outdir, fuzzing_language,
- allowed_broken_targets_percentage))
- mocked_print.assert_called_with('ERROR: No fuzz targets found.')
+ test_all.test_all(outdir, allowed_broken_targets_percentage))
+ mock_print.assert_called_with('ERROR: No fuzz targets found.')
if __name__ == '__main__':
diff --git a/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper.py b/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper.py
deleted file mode 100755
index 04aa4207c..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper.py
+++ /dev/null
@@ -1,175 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-from __future__ import print_function
-import os
-import subprocess
-import sys
-
-import msan_build
-
-GCC_ONLY_ARGS = [
- '-aux-info',
-]
-
-
-def InvokedAsGcc():
- """Return whether or not we're pretending to be GCC."""
- return sys.argv[0].endswith('gcc') or sys.argv[0].endswith('g++')
-
-
-def Is32Bit(args):
- """Return whether or not we're 32-bit."""
- M32_BIT_ARGS = [
- '-m32',
- '-mx32',
- ]
-
- return any(arg in M32_BIT_ARGS for arg in args)
-
-
-def FilterWlArg(arg):
- """Remove -z,defs and equivalents from a single -Wl option."""
- parts = arg.split(',')[1:]
-
- filtered = []
- for part in parts:
- if part == 'defs':
- removed = filtered.pop()
- assert removed == '-z'
- continue
-
- if part == '--no-undefined':
- continue
-
- filtered.append(part)
-
- if filtered:
- return '-Wl,' + ','.join(filtered)
-
- # Filtered entire argument.
- return None
-
-
-def _RemoveLastMatching(l, find):
- for i in xrange(len(l) - 1, -1, -1):
- if l[i] == find:
- del l[i]
- return
-
- raise IndexError('Not found')
-
-
-def RemoveZDefs(args):
- """Remove unsupported -Wl,-z,defs linker option."""
- filtered = []
-
- for arg in args:
- if arg == '-Wl,defs':
- _RemoveLastMatching(filtered, '-Wl,-z')
- continue
-
- if arg == '-Wl,--no-undefined':
- continue
-
- if arg.startswith('-Wl,'):
- arg = FilterWlArg(arg)
- if not arg:
- continue
-
- filtered.append(arg)
-
- return filtered
-
-
-def GetCompilerArgs(args, is_cxx):
- """Generate compiler args."""
- compiler_args = args[1:]
-
- if Is32Bit(args):
- # 32 bit builds not supported.
- compiler_args.extend([
- '-fno-sanitize=memory',
- '-fno-sanitize-memory-track-origins',
- ])
-
- return compiler_args
-
- compiler_args = RemoveZDefs(compiler_args)
- compiler_args.extend([
- # FORTIFY_SOURCE is not supported by sanitizers.
- '-U_FORTIFY_SOURCE',
- '-Wp,-U_FORTIFY_SOURCE',
- # Reduce binary size.
- '-gline-tables-only',
- # Disable all warnings.
- '-w',
- # LTO isn't supported.
- '-fno-lto',
- ])
-
- if InvokedAsGcc():
- compiler_args.extend([
- # For better compatibility with flags passed via -Wa,...
- '-fno-integrated-as',
- ])
-
- if '-fsanitize=memory' not in args:
- # If MSan flags weren't added for some reason, add them here.
- compiler_args.extend(msan_build.GetInjectedFlags())
-
- if is_cxx:
- compiler_args.append('-stdlib=libc++')
-
- return compiler_args
-
-
-def FindRealClang():
- """Return path to real clang."""
- return os.environ['REAL_CLANG_PATH']
-
-
-def FallbackToGcc(args):
- """Check whether if we should fall back to GCC."""
- if not InvokedAsGcc():
- return False
-
- return any(arg in GCC_ONLY_ARGS for arg in args[1:])
-
-
-def main(args):
- if FallbackToGcc(args):
- sys.exit(subprocess.call(['/usr/bin/' + os.path.basename(args[0])] +
- args[1:]))
-
- is_cxx = args[0].endswith('++')
- real_clang = FindRealClang()
-
- if is_cxx:
- real_clang += '++'
-
- args = [real_clang] + GetCompilerArgs(args, is_cxx)
- debug_log_path = os.getenv('WRAPPER_DEBUG_LOG_PATH')
- if debug_log_path:
- with open(debug_log_path, 'a') as f:
- f.write(str(args) + '\n')
-
- sys.exit(subprocess.call(args))
-
-
-if __name__ == '__main__':
- main(sys.argv)
diff --git a/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper_test.py b/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper_test.py
deleted file mode 100644
index a05592d38..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper_test.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""Tests for compiler_wrapper."""
-
-from __future__ import print_function
-
-import unittest
-
-import compiler_wrapper
-
-
-class CompilerWrapperTest(unittest.TestCase):
-
- def testFilterZDefs(self):
- self.assertListEqual(
- ['arg'],
- compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z,defs']))
-
- self.assertListEqual(
- ['arg'],
- compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z,--no-undefined']))
-
- self.assertListEqual(
- ['arg', '-Wl,-z,relro'],
- compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z,relro']))
-
- self.assertListEqual(
- ['arg', '-Wl,-soname,lib.so.1,-z,relro'],
- compiler_wrapper.RemoveZDefs(['arg', '-Wl,-soname,lib.so.1,-z,defs,-z,relro']))
-
- self.assertListEqual(
- ['arg', '-Wl,-z,relro'],
- compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z,relro,-z,defs']))
-
- self.assertListEqual(
- ['arg'],
- compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z', '-Wl,defs']))
-
- self.assertListEqual(
- ['arg', 'arg2'],
- compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z', 'arg2', '-Wl,--no-undefined']))
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/infra/base-images/base-sanitizer-libs-builder/msan_build.py b/infra/base-images/base-sanitizer-libs-builder/msan_build.py
deleted file mode 100755
index 5ea00ab10..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/msan_build.py
+++ /dev/null
@@ -1,460 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-from __future__ import print_function
-import argparse
-import imp
-import os
-import multiprocessing
-import resource
-import shutil
-import subprocess
-import tempfile
-
-import apt
-from apt import debfile
-
-from packages import package
-import wrapper_utils
-
-SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-PACKAGES_DIR = os.path.join(SCRIPT_DIR, 'packages')
-
-TRACK_ORIGINS_ARG = '-fsanitize-memory-track-origins='
-
-INJECTED_ARGS = [
- '-fsanitize=memory',
- '-fsanitize-recover=memory',
- '-fPIC',
- '-fno-omit-frame-pointer',
-]
-
-
-class MSanBuildException(Exception):
- """Base exception."""
-
-
-def GetTrackOriginsFlag():
- """Get the track origins flag."""
- if os.getenv('MSAN_NO_TRACK_ORIGINS'):
- return TRACK_ORIGINS_ARG + '0'
-
- return TRACK_ORIGINS_ARG + '2'
-
-
-def GetInjectedFlags():
- return INJECTED_ARGS + [GetTrackOriginsFlag()]
-
-
-def SetUpEnvironment(work_dir):
- """Set up build environment."""
- env = {}
- env['REAL_CLANG_PATH'] = subprocess.check_output(['which', 'clang']).strip()
- print('Real clang at', env['REAL_CLANG_PATH'])
- compiler_wrapper_path = os.path.join(SCRIPT_DIR, 'compiler_wrapper.py')
-
- # Symlink binaries into TMP/bin
- bin_dir = os.path.join(work_dir, 'bin')
- os.mkdir(bin_dir)
-
- dpkg_host_architecture = wrapper_utils.DpkgHostArchitecture()
- wrapper_utils.CreateSymlinks(
- compiler_wrapper_path,
- bin_dir,
- [
- 'clang',
- 'clang++',
- # Not all build rules respect $CC/$CXX, so make additional symlinks.
- 'gcc',
- 'g++',
- 'cc',
- 'c++',
- dpkg_host_architecture + '-gcc',
- dpkg_host_architecture + '-g++',
- ])
-
- env['CC'] = os.path.join(bin_dir, 'clang')
- env['CXX'] = os.path.join(bin_dir, 'clang++')
-
- MSAN_OPTIONS = ' '.join(GetInjectedFlags())
-
- # We don't use nostrip because some build rules incorrectly break when it is
- # passed. Instead we install our own no-op strip binaries.
- env['DEB_BUILD_OPTIONS'] = ('nocheck parallel=%d' %
- multiprocessing.cpu_count())
- env['DEB_CFLAGS_APPEND'] = MSAN_OPTIONS
- env['DEB_CXXFLAGS_APPEND'] = MSAN_OPTIONS + ' -stdlib=libc++'
- env['DEB_CPPFLAGS_APPEND'] = MSAN_OPTIONS
- env['DEB_LDFLAGS_APPEND'] = MSAN_OPTIONS
- env['DPKG_GENSYMBOLS_CHECK_LEVEL'] = '0'
-
- # debian/rules can set DPKG_GENSYMBOLS_CHECK_LEVEL explicitly, so override it.
- gen_symbols_wrapper = ('#!/bin/sh\n'
- 'export DPKG_GENSYMBOLS_CHECK_LEVEL=0\n'
- '/usr/bin/dpkg-gensymbols "$@"\n')
-
- wrapper_utils.InstallWrapper(bin_dir, 'dpkg-gensymbols', gen_symbols_wrapper)
-
- # Install no-op strip binaries.
- no_op_strip = ('#!/bin/sh\n' 'exit 0\n')
- wrapper_utils.InstallWrapper(bin_dir, 'strip', no_op_strip,
- [dpkg_host_architecture + '-strip'])
-
- env['PATH'] = bin_dir + ':' + os.environ['PATH']
-
- # nocheck doesn't disable override_dh_auto_test. So we have this hack to try
- # to disable "make check" or "make test" invocations.
- make_wrapper = ('#!/bin/bash\n'
- 'if [ "$1" = "test" ] || [ "$1" = "check" ]; then\n'
- ' exit 0\n'
- 'fi\n'
- '/usr/bin/make "$@"\n')
- wrapper_utils.InstallWrapper(bin_dir, 'make', make_wrapper)
-
- # Prevent entire build from failing because of bugs/uninstrumented in tools
- # that are part of the build.
- msan_log_dir = os.path.join(work_dir, 'msan')
- os.mkdir(msan_log_dir)
- msan_log_path = os.path.join(msan_log_dir, 'log')
- env['MSAN_OPTIONS'] = ('halt_on_error=0:exitcode=0:report_umrs=0:log_path=' +
- msan_log_path)
-
- # Increase maximum stack size to prevent tests from failing.
- limit = 128 * 1024 * 1024
- resource.setrlimit(resource.RLIMIT_STACK, (limit, limit))
- return env
-
-
-def FindPackageDebs(package_name, work_directory):
- """Find package debs."""
- deb_paths = []
- cache = apt.Cache()
-
- for filename in os.listdir(work_directory):
- file_path = os.path.join(work_directory, filename)
- if not file_path.endswith('.deb'):
- continue
-
- # Matching package name.
- deb = debfile.DebPackage(file_path)
- if deb.pkgname == package_name:
- deb_paths.append(file_path)
- continue
-
- # Also include -dev packages that depend on the runtime package.
- pkg = cache[deb.pkgname]
- if pkg.section != 'libdevel' and pkg.section != 'universe/libdevel':
- continue
-
- # But ignore -dbg packages.
- if deb.pkgname.endswith('-dbg'):
- continue
-
- for dependency in deb.depends:
- if any(dep[0] == package_name for dep in dependency):
- deb_paths.append(file_path)
- break
-
- return deb_paths
-
-
-def ExtractLibraries(deb_paths, work_directory, output_directory):
- """Extract libraries from .deb packages."""
- extract_directory = os.path.join(work_directory, 'extracted')
- if os.path.exists(extract_directory):
- shutil.rmtree(extract_directory, ignore_errors=True)
-
- os.mkdir(extract_directory)
-
- for deb_path in deb_paths:
- subprocess.check_call(['dpkg-deb', '-x', deb_path, extract_directory])
-
- extracted = []
- for root, _, filenames in os.walk(extract_directory):
- if 'libx32' in root or 'lib32' in root:
- continue
-
- for filename in filenames:
- if (not filename.endswith('.so') and '.so.' not in filename and
- not filename.endswith('.a') and '.a' not in filename):
- continue
-
- file_path = os.path.join(root, filename)
- rel_file_path = os.path.relpath(file_path, extract_directory)
- rel_directory = os.path.dirname(rel_file_path)
-
- target_dir = os.path.join(output_directory, rel_directory)
- if not os.path.exists(target_dir):
- os.makedirs(target_dir)
-
- target_file_path = os.path.join(output_directory, rel_file_path)
- extracted.append(target_file_path)
-
- if os.path.lexists(target_file_path):
- os.remove(target_file_path)
-
- if os.path.islink(file_path):
- link_path = os.readlink(file_path)
- if os.path.isabs(link_path):
- # Make absolute links relative.
- link_path = os.path.relpath(link_path,
- os.path.join('/', rel_directory))
-
- os.symlink(link_path, target_file_path)
- else:
- shutil.copy2(file_path, target_file_path)
-
- return extracted
-
-
-def GetPackage(package_name):
- apt_cache = apt.Cache()
- version = apt_cache[package_name].candidate
- source_name = version.source_name
- local_source_name = source_name.replace('.', '_')
-
- custom_package_path = os.path.join(PACKAGES_DIR, local_source_name) + '.py'
- if not os.path.exists(custom_package_path):
- print('Using default package build steps.')
- return package.Package(source_name, version)
-
- print('Using custom package build steps.')
- module = imp.load_source('packages.' + local_source_name, custom_package_path)
- return module.Package(version)
-
-
-def PatchRpath(path, output_directory):
- """Patch rpath to be relative to $ORIGIN."""
- try:
- rpaths = subprocess.check_output(['patchelf', '--print-rpath',
- path]).strip()
- except subprocess.CalledProcessError:
- return
-
- if not rpaths:
- return
-
- processed_rpath = []
- rel_directory = os.path.join(
- '/', os.path.dirname(os.path.relpath(path, output_directory)))
-
- for rpath in rpaths.split(':'):
- if '$ORIGIN' in rpath:
- # Already relative.
- processed_rpath.append(rpath)
- continue
-
- processed_rpath.append(
- os.path.join('$ORIGIN', os.path.relpath(rpath, rel_directory)))
-
- processed_rpath = ':'.join(processed_rpath)
- print('Patching rpath for', path, 'to', processed_rpath)
- subprocess.check_call(
- ['patchelf', '--force-rpath', '--set-rpath', processed_rpath, path])
-
-
-def _CollectDependencies(apt_cache, pkg, cache, dependencies):
- """Collect dependencies that need to be built."""
- C_OR_CXX_DEPS = [
- 'libc++1',
- 'libc6',
- 'libc++abi1',
- 'libgcc1',
- 'libstdc++6',
- ]
-
- BLACKLISTED_PACKAGES = [
- 'libcapnp-0.5.3', # fails to compile on newer clang.
- 'libllvm5.0',
- 'libmircore1',
- 'libmircommon7',
- 'libmirclient9',
- 'libmirprotobuf3',
- 'multiarch-support',
- ]
-
- if pkg.name in BLACKLISTED_PACKAGES:
- return False
-
- if pkg.section != 'libs' and pkg.section != 'universe/libs':
- return False
-
- if pkg.name in C_OR_CXX_DEPS:
- return True
-
- is_c_or_cxx = False
- for dependency in pkg.candidate.dependencies:
- dependency = dependency[0]
-
- if dependency.name in cache:
- is_c_or_cxx |= cache[dependency.name]
- else:
- is_c_or_cxx |= _CollectDependencies(apt_cache, apt_cache[dependency.name],
- cache, dependencies)
- if is_c_or_cxx:
- dependencies.append(pkg.name)
-
- cache[pkg.name] = is_c_or_cxx
- return is_c_or_cxx
-
-
-def GetBuildList(package_name):
- """Get list of packages that need to be built including dependencies."""
- apt_cache = apt.Cache()
- pkg = apt_cache[package_name]
-
- dependencies = []
- _CollectDependencies(apt_cache, pkg, {}, dependencies)
- return dependencies
-
-
-class MSanBuilder(object):
- """MSan builder."""
-
- def __init__(self,
- debug=False,
- log_path=None,
- work_dir=None,
- no_track_origins=False):
- self.debug = debug
- self.log_path = log_path
- self.work_dir = work_dir
- self.no_track_origins = no_track_origins
- self.env = None
-
- def __enter__(self):
- if not self.work_dir:
- self.work_dir = tempfile.mkdtemp(dir=self.work_dir)
-
- if os.path.exists(self.work_dir):
- shutil.rmtree(self.work_dir, ignore_errors=True)
-
- os.makedirs(self.work_dir)
- self.env = SetUpEnvironment(self.work_dir)
-
- if self.debug and self.log_path:
- self.env['WRAPPER_DEBUG_LOG_PATH'] = self.log_path
-
- if self.no_track_origins:
- self.env['MSAN_NO_TRACK_ORIGINS'] = '1'
-
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- if not self.debug:
- shutil.rmtree(self.work_dir, ignore_errors=True)
-
- def Build(self, package_name, output_directory, create_subdirs=False):
- """Build the package and write results into the output directory."""
- deb_paths = FindPackageDebs(package_name, self.work_dir)
- if deb_paths:
- print('Source package already built for', package_name)
- else:
- pkg = GetPackage(package_name)
-
- pkg.InstallBuildDeps()
- source_directory = pkg.DownloadSource(self.work_dir)
- print('Source downloaded to', source_directory)
-
- # custom bin directory for custom build scripts to write wrappers.
- custom_bin_dir = os.path.join(self.work_dir, package_name + '_bin')
- os.mkdir(custom_bin_dir)
- env = self.env.copy()
- env['PATH'] = custom_bin_dir + ':' + env['PATH']
-
- pkg.Build(source_directory, env, custom_bin_dir)
- shutil.rmtree(custom_bin_dir, ignore_errors=True)
-
- deb_paths = FindPackageDebs(package_name, self.work_dir)
-
- if not deb_paths:
- raise MSanBuildException('Failed to find .deb packages.')
-
- print('Extracting', ' '.join(deb_paths))
-
- if create_subdirs:
- extract_directory = os.path.join(output_directory, package_name)
- else:
- extract_directory = output_directory
-
- extracted_paths = ExtractLibraries(deb_paths, self.work_dir,
- extract_directory)
- for extracted_path in extracted_paths:
- if os.path.islink(extracted_path):
- continue
- if os.path.basename(extracted_path) == 'llvm-symbolizer':
- continue
- PatchRpath(extracted_path, extract_directory)
-
-
-def main():
- parser = argparse.ArgumentParser('msan_build.py', description='MSan builder.')
- parser.add_argument('package_names', nargs='+', help='Name of the packages.')
- parser.add_argument('output_dir', help='Output directory.')
- parser.add_argument('--create-subdirs',
- action='store_true',
- help=('Create subdirectories in the output '
- 'directory for each package.'))
- parser.add_argument('--work-dir', help='Work directory.')
- parser.add_argument('--no-build-deps',
- action='store_true',
- help='Don\'t build dependencies.')
- parser.add_argument('--debug', action='store_true', help='Enable debug mode.')
- parser.add_argument('--log-path', help='Log path for debugging.')
- parser.add_argument('--no-track-origins',
- action='store_true',
- help='Build with -fsanitize-memory-track-origins=0.')
- args = parser.parse_args()
-
- if args.no_track_origins:
- os.environ['MSAN_NO_TRACK_ORIGINS'] = '1'
-
- if not os.path.exists(args.output_dir):
- os.makedirs(args.output_dir)
-
- if args.no_build_deps:
- package_names = args.package_names
- else:
- all_packages = set()
- package_names = []
-
- # Get list of packages to build, including all dependencies.
- for package_name in args.package_names:
- for dep in GetBuildList(package_name):
- if dep in all_packages:
- continue
-
- if args.create_subdirs:
- os.mkdir(os.path.join(args.output_dir, dep))
-
- all_packages.add(dep)
- package_names.append(dep)
-
- print('Going to build:')
- for package_name in package_names:
- print('\t', package_name)
-
- with MSanBuilder(debug=args.debug,
- log_path=args.log_path,
- work_dir=args.work_dir,
- no_track_origins=args.no_track_origins) as builder:
- for package_name in package_names:
- builder.Build(package_name, args.output_dir, args.create_subdirs)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/__init__.py b/infra/base-images/base-sanitizer-libs-builder/packages/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/__init__.py
+++ /dev/null
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/nettle.py b/infra/base-images/base-sanitizer-libs-builder/packages/nettle.py
deleted file mode 100644
index e1b0e2f81..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/nettle.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-import os
-import shutil
-
-import package
-
-
-def AddNoAsmArg(config_path):
- """Add --disable-assembler to config scripts."""
- shutil.move(config_path, config_path + '.real')
- with open(config_path, 'w') as f:
- f.write(
- '#!/bin/sh\n'
- '%s.real --disable-assembler "$@"\n' % config_path)
- os.chmod(config_path, 0755)
-
-
-class Package(package.Package):
- """nettle package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('nettle', apt_version)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- AddNoAsmArg(os.path.join(source_directory, 'configure'))
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/openssl.py b/infra/base-images/base-sanitizer-libs-builder/packages/openssl.py
deleted file mode 100644
index e24ccc588..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/openssl.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-import os
-import shutil
-
-import package
-
-
-def AddNoAsmArg(config_path):
- """Add --no-asm to config scripts."""
- shutil.move(config_path, config_path + '.real')
- with open(config_path, 'w') as f:
- f.write(
- '#!/bin/sh\n'
- '%s.real no-asm "$@"\n' % config_path)
- os.chmod(config_path, 0755)
-
-
-class Package(package.Package):
- """openssl package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('openssl', apt_version)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- AddNoAsmArg(os.path.join(source_directory, 'Configure'))
- AddNoAsmArg(os.path.join(source_directory, 'config'))
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/package.py b/infra/base-images/base-sanitizer-libs-builder/packages/package.py
deleted file mode 100644
index 059c23587..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/package.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-import os
-import subprocess
-
-import apt
-
-SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-
-
-def ApplyPatch(source_directory, patch_name):
- """Apply custom patch."""
- subprocess.check_call(['patch', '-p1', '-i',
- os.path.join(SCRIPT_DIR, patch_name)],
- cwd=source_directory)
-
-
-class PackageException(Exception):
- """Base package exception."""
-
-
-class Package(object):
- """Base package."""
-
- def __init__(self, name, apt_version):
- self.name = name
- self.apt_version = apt_version
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- return
-
- def PostBuild(self, source_directory, env, custom_bin_dir):
- return
-
- def PreDownload(self, download_directory):
- return
-
- def PostDownload(self, source_directory):
- return
-
- def InstallBuildDeps(self):
- """Install build dependencies for a package."""
- subprocess.check_call(['apt-get', 'update'])
- subprocess.check_call(['apt-get', 'build-dep', '-y', self.name])
-
- # Reload package after update.
- self.apt_version = (
- apt.Cache()[self.apt_version.package.name].candidate)
-
- def DownloadSource(self, download_directory):
- """Download the source for a package."""
- self.PreDownload(download_directory)
-
- source_directory = self.apt_version.fetch_source(download_directory)
-
- self.PostDownload(source_directory)
- return source_directory
-
- def Build(self, source_directory, env, custom_bin_dir):
- """Build .deb packages."""
- self.PreBuild(source_directory, env, custom_bin_dir)
- subprocess.check_call(
- ['dpkg-buildpackage', '-us', '-uc', '-B'],
- cwd=source_directory, env=env)
- self.PostBuild(source_directory, env, custom_bin_dir)
-
-
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/pixman.py b/infra/base-images/base-sanitizer-libs-builder/packages/pixman.py
deleted file mode 100644
index 52512461e..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/pixman.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-import os
-import shutil
-
-import package
-
-
-class Package(package.Package):
- """pixman package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('pixman', apt_version)
-
- def PostDownload(self, source_directory):
- # Incorrect checking of GCC vector extension availability.
- os.system('sed s/support_for_gcc_vector_extensions=yes/'
- 'support_for_gcc_vector_extensions=no/ -i %s/configure.ac' %
- source_directory)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- blocklist_flag = ' -fsanitize-blacklist=' + os.path.join(
- os.path.dirname(os.path.abspath(__file__)), 'pixman_blocklist.txt')
- env['DEB_CXXFLAGS_APPEND'] += blocklist_flag
- env['DEB_CFLAGS_APPEND'] += blocklist_flag
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/pixman_blocklist.txt b/infra/base-images/base-sanitizer-libs-builder/packages/pixman_blocklist.txt
deleted file mode 100644
index 69cf159dd..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/pixman_blocklist.txt
+++ /dev/null
@@ -1 +0,0 @@
-src:*/pixman-sse2.c
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio.py b/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio.py
deleted file mode 100644
index d3ce7a113..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-from __future__ import print_function
-import glob
-import os
-import subprocess
-
-import package
-
-
-class Package(package.Package):
- """PulseAudio package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('pulseaudio', apt_version)
-
- def PostDownload(self, source_directory):
- """Remove blocklisted patches."""
- # Fix *droid* patches.
- bad_patch_path = os.path.join(
- source_directory, 'debian', 'patches',
- '0600-droid-sync-with-upstream-for-Android-5-support-and-b.patch')
- if not os.path.exists(bad_patch_path):
- return
-
- print('Applying custom patches.')
- package.ApplyPatch(source_directory, 'pulseaudio_fix_android.patch')
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio_fix_android.patch b/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio_fix_android.patch
deleted file mode 100644
index e86f79823..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio_fix_android.patch
+++ /dev/null
@@ -1,39 +0,0 @@
---- pulseaudio-8.0/src/modules/droid/module-droid-card.c 2017-11-27 22:09:42.533589970 +0000
-+++ pulseaudio-8.0.fixed/src/modules/droid/module-droid-card.c 2017-11-27 22:28:23.847250467 +0000
-@@ -66,10 +66,11 @@
- #include "droid-extcon.h"
- #endif
-
--#if ANDROID_VERSION_MAJOR == 4 && ANDROID_VERSION_MINOR == 2
-+#if ANDROID_VERSION_MAJOR == 4 && ANDROID_VERSION_MINOR == 4
- #include "module-droid-card-19-symdef.h"
- #elif ANDROID_VERSION_MAJOR == 5 && ANDROID_VERSION_MINOR == 1
- #include "module-droid-card-22-symdef.h"
-+#else
- #endif
-
- PA_MODULE_AUTHOR("Juho Hämäläinen");
-diff -ru pulseaudio-8.0/src/modules/droid/module-droid-sink.c pulseaudio-8.0.fixed/src/modules/droid/module-droid-sink.c
---- pulseaudio-8.0/src/modules/droid/module-droid-sink.c 2017-11-27 22:09:42.533589970 +0000
-+++ pulseaudio-8.0.fixed/src/modules/droid/module-droid-sink.c 2017-11-27 22:29:53.776348900 +0000
-@@ -40,7 +40,7 @@
- #include "droid-util.h"
- #include "droid-sink.h"
-
--#if ANDROID_VERSION_MAJOR == 4 && ANDROID_VERSION_MINOR == 2
-+#if ANDROID_VERSION_MAJOR == 4 && ANDROID_VERSION_MINOR == 4
- #include "module-droid-sink-19-symdef.h"
- #elif ANDROID_VERSION_MAJOR == 5 && ANDROID_VERSION_MINOR == 1
- #include "module-droid-sink-22-symdef.h"
-diff -ru pulseaudio-8.0/src/modules/droid/module-droid-source.c pulseaudio-8.0.fixed/src/modules/droid/module-droid-source.c
---- pulseaudio-8.0/src/modules/droid/module-droid-source.c 2017-11-27 22:09:42.533589970 +0000
-+++ pulseaudio-8.0.fixed/src/modules/droid/module-droid-source.c 2017-11-27 22:30:03.920472828 +0000
-@@ -40,7 +40,7 @@
- #include "droid-util.h"
- #include "droid-source.h"
-
--#if ANDROID_VERSION_MAJOR == 4 && ANDROID_VERSION_MINOR == 2
-+#if ANDROID_VERSION_MAJOR == 4 && ANDROID_VERSION_MINOR == 4
- #include "module-droid-source-19-symdef.h"
- #elif ANDROID_VERSION_MAJOR == 5 && ANDROID_VERSION_MINOR == 1
- #include "module-droid-source-22-symdef.h"
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/systemd.py b/infra/base-images/base-sanitizer-libs-builder/packages/systemd.py
deleted file mode 100644
index 5cb6d60be..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/packages/systemd.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-from __future__ import print_function
-import glob
-import os
-import subprocess
-
-import package
-import wrapper_utils
-
-
-class Package(package.Package):
- """systemd package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('systemd', apt_version)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- # Hide msan symbols from nm. the systemd build system uses this to find
- # undefined symbols and errors out if it does.
- nm_wrapper = (
- '#!/bin/bash\n'
- '/usr/bin/nm "$@" | grep -E -v "U (__msan|memset)"\n'
- 'exit ${PIPESTATUS[0]}\n')
-
- wrapper_utils.InstallWrapper(custom_bin_dir, 'nm', nm_wrapper,
- [wrapper_utils.DpkgHostArchitecture() + '-nm'])
diff --git a/infra/base-images/base-sanitizer-libs-builder/patch_build.py b/infra/base-images/base-sanitizer-libs-builder/patch_build.py
deleted file mode 100755
index cb1f4b1d7..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/patch_build.py
+++ /dev/null
@@ -1,143 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-from __future__ import print_function
-import argparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-
-INSTRUMENTED_LIBRARIES_DIRNAME = 'instrumented_libraries'
-MSAN_LIBS_PATH = os.getenv('MSAN_LIBS_PATH', '/msan')
-
-
-def IsElf(file_path):
- """Whether if the file is an elf file."""
- with open(file_path) as f:
- return f.read(4) == '\x7fELF'
-
-
-def Ldd(binary_path):
- """Run ldd on a file."""
- try:
- output = subprocess.check_output(['ldd', binary_path], stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError:
- print('Failed to call ldd on', binary_path, file=sys.stderr)
- return []
-
- libs = []
-
- OUTPUT_PATTERN = re.compile(r'\s*([^\s]+)\s*=>\s*([^\s]+)')
- for line in output.splitlines():
- match = OUTPUT_PATTERN.match(line)
- if not match:
- continue
-
- libs.append((match.group(1), match.group(2)))
-
- return libs
-
-
-def FindLib(path):
- """Find instrumented version of lib."""
- candidate_path = os.path.join(MSAN_LIBS_PATH, path[1:])
- if os.path.exists(candidate_path):
- return candidate_path
-
- for lib_dir in os.listdir(MSAN_LIBS_PATH):
- candidate_path = os.path.join(MSAN_LIBS_PATH, lib_dir, path[1:])
- if os.path.exists(candidate_path):
- return candidate_path
-
- return None
-
-
-def PatchBinary(binary_path, instrumented_dir):
- """Patch binary to link to instrumented libs."""
- extra_rpaths = set()
-
- for name, path in Ldd(binary_path):
- if not os.path.isabs(path):
- continue
-
- instrumented_path = FindLib(path)
- if not instrumented_path:
- print('WARNING: Instrumented library not found for', path,
- file=sys.stderr)
- continue
-
- target_path = os.path.join(instrumented_dir, path[1:])
- if not os.path.exists(target_path):
- print('Copying instrumented lib to', target_path)
- target_dir = os.path.dirname(target_path)
- if not os.path.exists(target_dir):
- os.makedirs(target_dir)
- shutil.copy2(instrumented_path, target_path)
-
- extra_rpaths.add(
- os.path.join('$ORIGIN', INSTRUMENTED_LIBRARIES_DIRNAME,
- os.path.dirname(path[1:])))
-
- if not extra_rpaths:
- return
-
- existing_rpaths = subprocess.check_output(
- ['patchelf', '--print-rpath', binary_path]).strip()
- processed_rpaths = ':'.join(extra_rpaths)
- if existing_rpaths:
- processed_rpaths += ':' + existing_rpaths
- print('Patching rpath for', binary_path, 'from', existing_rpaths, 'to',
- processed_rpaths)
-
- subprocess.check_call(
- ['patchelf', '--force-rpath', '--set-rpath',
- processed_rpaths, binary_path])
-
-
-def PatchBuild(output_directory):
- """Patch build to use msan libs."""
- instrumented_dir = os.path.join(output_directory,
- INSTRUMENTED_LIBRARIES_DIRNAME)
- if not os.path.exists(instrumented_dir):
- os.mkdir(instrumented_dir)
-
- for root_dir, _, filenames in os.walk(output_directory):
- for filename in filenames:
- file_path = os.path.join(root_dir, filename)
-
- if os.path.islink(file_path):
- continue
-
- if not IsElf(file_path):
- continue
-
- PatchBinary(file_path, instrumented_dir)
-
-
-def main():
- parser = argparse.ArgumentParser('patch_build.py', description='MSan build patcher.')
- parser.add_argument('output_dir', help='Output directory.')
-
- args = parser.parse_args()
-
- PatchBuild(os.path.abspath(args.output_dir))
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/base-images/base-sanitizer-libs-builder/wrapper_utils.py b/infra/base-images/base-sanitizer-libs-builder/wrapper_utils.py
deleted file mode 100644
index 0cbf1677d..000000000
--- a/infra/base-images/base-sanitizer-libs-builder/wrapper_utils.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-from __future__ import print_function
-
-import contextlib
-import os
-import subprocess
-
-
-def DpkgHostArchitecture():
- """Return the host architecture."""
- return subprocess.check_output(
- ['dpkg-architecture', '-qDEB_HOST_GNU_TYPE']).strip()
-
-
-def InstallWrapper(bin_dir, name, contents, extra_names=None):
- """Install a custom wrapper script into |bin_dir|."""
- path = os.path.join(bin_dir, name)
- with open(path, 'w') as f:
- f.write(contents)
-
- os.chmod(path, 0755)
-
- if extra_names:
- CreateSymlinks(path, bin_dir, extra_names)
-
-
-def CreateSymlinks(original_path, bin_dir, extra_names):
- """Create symlinks."""
- for extra_name in extra_names:
- extra_path = os.path.join(bin_dir, extra_name)
- os.symlink(original_path, extra_path)
diff --git a/infra/base-images/msan-libs-builder/Dockerfile b/infra/base-images/msan-libs-builder/Dockerfile
deleted file mode 100644
index 7780c1f33..000000000
--- a/infra/base-images/msan-libs-builder/Dockerfile
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2017 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-FROM gcr.io/oss-fuzz-base/base-sanitizer-libs-builder
-
-# Take all libraries from lib/msan
-RUN cp -R /usr/msan/lib/* /usr/lib/
-
-RUN mkdir /msan
-WORKDIR /msan
-
-ENV PYTHONUNBUFFERED 1
-RUN msan_build.py --work-dir=$WORK \
- libarchive13 \
- libattr1 \
- libbz2-1.0 \
- libfontconfig1 \
- libfreetype6 \
- libfribidi0 \
- libglib2.0-0 \
- libicu55 \
- liblz4-1 \
- liblzma5 \
- liblzo2-2 \
- libnettle6 \
- libpcre2-posix0 \
- libpcre3 \
- libpng12-0 \
- libssl1.0.0 \
- libxml2 \
- zlib1g \
- /msan
diff --git a/infra/bisector.py b/infra/bisector.py
index 1438d0de9..51f93c132 100644
--- a/infra/bisector.py
+++ b/infra/bisector.py
@@ -115,7 +115,10 @@ def main():
'Bisection Error: Both the first and the last commits in'
'the given range have the same behavior, bisection is not possible. ')
return 1
- print('Error was introduced at commit %s' % result.commit)
+ if args.type == 'regressed':
+ print('Error was introduced at commit %s' % result.commit)
+ elif args.type == 'fixed':
+ print('Error was fixed at commit %s' % result.commit)
return 0
@@ -131,7 +134,7 @@ def _get_dedup_token(output):
return None
-def _check_for_crash(project_name, fuzz_target, test_case_path):
+def _check_for_crash(project_name, fuzz_target, testcase_path):
"""Check for crash."""
def docker_run(args):
@@ -142,12 +145,15 @@ def _check_for_crash(project_name, fuzz_target, test_case_path):
return utils.execute(command + args)
logging.info('Checking for crash')
- out, err, return_code = helper.reproduce_impl(project_name,
- fuzz_target,
- False, [], [],
- test_case_path,
- runner=docker_run,
- err_result=(None, None, None))
+ out, err, return_code = helper.reproduce_impl(
+ project=helper.Project(project_name),
+ fuzzer_name=fuzz_target,
+ valgrind=False,
+ env_to_add=[],
+ fuzzer_args=[],
+ testcase_path=testcase_path,
+ run_function=docker_run,
+ err_result=(None, None, None))
if return_code is None:
return None
@@ -167,7 +173,7 @@ def _check_for_crash(project_name, fuzz_target, test_case_path):
# pylint: disable=too-many-locals
# pylint: disable=too-many-arguments
# pylint: disable=too-many-statements
-def _bisect(bisect_type, old_commit, new_commit, test_case_path, fuzz_target,
+def _bisect(bisect_type, old_commit, new_commit, testcase_path, fuzz_target,
build_data):
"""Perform the bisect."""
# pylint: disable=too-many-branches
@@ -212,7 +218,7 @@ def _bisect(bisect_type, old_commit, new_commit, test_case_path, fuzz_target,
raise BisectError('Invalid bisect type ' + bisect_type, repo_url)
expected_error = _check_for_crash(build_data.project_name, fuzz_target,
- test_case_path)
+ testcase_path)
logging.info('new_commit result = %s', expected_error)
if not should_crash and expected_error:
@@ -231,7 +237,7 @@ def _bisect(bisect_type, old_commit, new_commit, test_case_path, fuzz_target,
raise BisectError('Failed to build old_commit', repo_url)
if _check_for_crash(build_data.project_name, fuzz_target,
- test_case_path) == expected_error:
+ testcase_path) == expected_error:
logging.warning('old_commit %s had same result as new_commit %s',
old_commit, new_commit)
# Try again on an slightly older commit.
@@ -266,7 +272,7 @@ def _bisect(bisect_type, old_commit, new_commit, test_case_path, fuzz_target,
continue
current_error = _check_for_crash(build_data.project_name, fuzz_target,
- test_case_path)
+ testcase_path)
logging.info('Current result = %s', current_error)
if expected_error == current_error:
new_idx = curr_idx
@@ -277,16 +283,16 @@ def _bisect(bisect_type, old_commit, new_commit, test_case_path, fuzz_target,
# pylint: disable=too-many-locals
# pylint: disable=too-many-arguments
-def bisect(bisect_type, old_commit, new_commit, test_case_path, fuzz_target,
+def bisect(bisect_type, old_commit, new_commit, testcase_path, fuzz_target,
build_data):
"""From a commit range, this function caluclates which introduced a
- specific error from a fuzz test_case_path.
+ specific error from a fuzz testcase_path.
Args:
bisect_type: The type of the bisect ('regressed' or 'fixed').
old_commit: The oldest commit in the error regression range.
new_commit: The newest commit in the error regression range.
- test_case_path: The file path of the test case that triggers the error
+ testcase_path: The file path of the test case that triggers the error
fuzz_target: The name of the fuzzer to be tested.
build_data: a class holding all of the input parameters for bisection.
@@ -297,7 +303,7 @@ def bisect(bisect_type, old_commit, new_commit, test_case_path, fuzz_target,
ValueError: when a repo url can't be determine from the project.
"""
try:
- return _bisect(bisect_type, old_commit, new_commit, test_case_path,
+ return _bisect(bisect_type, old_commit, new_commit, testcase_path,
fuzz_target, build_data)
finally:
# Clean up projects/ as _bisect may have modified it.
diff --git a/infra/bisector_test.py b/infra/bisector_test.py
index 5e3dc5232..d93ac3239 100644
--- a/infra/bisector_test.py
+++ b/infra/bisector_test.py
@@ -45,7 +45,7 @@ class BisectIntegrationTests(unittest.TestCase):
architecture='x86_64')
with self.assertRaises(ValueError):
bisector.bisect(self.BISECT_TYPE, test_repo.old_commit,
- test_repo.new_commit, test_repo.test_case_path,
+ test_repo.new_commit, test_repo.testcase_path,
test_repo.fuzz_target, build_data)
def test_bisect(self):
@@ -58,7 +58,7 @@ class BisectIntegrationTests(unittest.TestCase):
sanitizer='address',
architecture='x86_64')
result = bisector.bisect(self.BISECT_TYPE, test_repo.old_commit,
- test_repo.new_commit, test_repo.test_case_path,
+ test_repo.new_commit, test_repo.testcase_path,
test_repo.fuzz_target, build_data)
self.assertEqual(result.commit, test_repo.intro_commit)
diff --git a/infra/build/functions/base_images.py b/infra/build/functions/base_images.py
index 8c9b2d85f..593323fc3 100644
--- a/infra/build/functions/base_images.py
+++ b/infra/build/functions/base_images.py
@@ -15,7 +15,6 @@
################################################################################
"""Cloud function to build base images on Google Cloud Builder."""
-import datetime
import logging
import google.auth
@@ -25,14 +24,17 @@ BASE_IMAGES = [
'base-image',
'base-clang',
'base-builder',
+ 'base-builder-go',
+ 'base-builder-jvm',
+ 'base-builder-python',
+ 'base-builder-rust',
+ 'base-builder-swift',
'base-runner',
'base-runner-debug',
]
BASE_PROJECT = 'oss-fuzz-base'
TAG_PREFIX = f'gcr.io/{BASE_PROJECT}/'
-
-BASE_SANITIZER_LIBS_IMAGE = TAG_PREFIX + 'base-sanitizer-libs-builder'
-MSAN_LIBS_IMAGE = TAG_PREFIX + 'msan-libs-builder'
+MAJOR_VERSION = 'v1'
def _get_base_image_steps(images, tag_prefix=TAG_PREFIX):
@@ -46,11 +48,14 @@ def _get_base_image_steps(images, tag_prefix=TAG_PREFIX):
}]
for base_image in images:
+ image = tag_prefix + base_image
steps.append({
'args': [
'build',
'-t',
- tag_prefix + base_image,
+ image,
+ '-t',
+ f'{image}:{MAJOR_VERSION}',
'.',
],
'dir': 'oss-fuzz/infra/base-images/' + base_image,
@@ -62,9 +67,8 @@ def _get_base_image_steps(images, tag_prefix=TAG_PREFIX):
def get_logs_url(build_id, project_id='oss-fuzz-base'):
"""Returns url that displays the build logs."""
- url_format = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project={1}')
- return url_format.format(build_id, project_id)
+ return ('https://console.developers.google.com/logs/viewer?'
+ f'resource=build%2Fbuild_id%2F{build_id}&project={project_id}')
# pylint: disable=no-member
@@ -77,7 +81,7 @@ def run_build(steps, images):
'options': {
'machineType': 'N1_HIGHCPU_32'
},
- 'images': images
+ 'images': images + [f'{image}:{MAJOR_VERSION}' for image in images]
}
cloudbuild = build('cloudbuild',
'v1',
@@ -99,43 +103,3 @@ def base_builder(event, context):
images = [tag_prefix + base_image for base_image in BASE_IMAGES]
run_build(steps, images)
-
-
-def _get_msan_steps(image):
- """Get build steps for msan-libs-builder."""
- timestamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M')
- upload_name = 'msan-libs-' + timestamp + '.zip'
-
- steps = _get_base_image_steps([
- 'base-sanitizer-libs-builder',
- 'msan-libs-builder',
- ])
- steps.extend([{
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'cd /msan && zip -r /workspace/libs.zip .',
- ],
- }, {
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- 'cp',
- '/workspace/libs.zip',
- 'gs://oss-fuzz-msan-libs/' + upload_name,
- ],
- }])
- return steps
-
-
-def base_msan_builder(event, context):
- """Cloud function to build base images."""
- del event, context
- steps = _get_msan_steps(MSAN_LIBS_IMAGE)
- images = [
- BASE_SANITIZER_LIBS_IMAGE,
- MSAN_LIBS_IMAGE,
- ]
-
- run_build(steps, images)
diff --git a/infra/build/functions/build_and_run_coverage.py b/infra/build/functions/build_and_run_coverage.py
index cc2de5a32..1195776d9 100644..100755
--- a/infra/build/functions/build_and_run_coverage.py
+++ b/infra/build/functions/build_and_run_coverage.py
@@ -13,11 +13,11 @@
# limitations under the License.
#
################################################################################
-#!/usr/bin/python2
+#!/usr/bin/env python3
"""Starts and runs coverage build on Google Cloud Builder.
-Usage: build_and_run_coverage.py <project_dir>
+
+Usage: build_and_run_coverage.py <project>.
"""
-import datetime
import json
import logging
import os
@@ -27,119 +27,105 @@ import build_lib
import build_project
SANITIZER = 'coverage'
-CONFIGURATION = ['FUZZING_ENGINE=libfuzzer', 'SANITIZER=%s' % SANITIZER]
+FUZZING_ENGINE = 'libfuzzer'
+ARCHITECTURE = 'x86_64'
+
PLATFORM = 'linux'
-COVERAGE_BUILD_TAG = 'coverage'
+COVERAGE_BUILD_TYPE = 'coverage'
# Where code coverage reports need to be uploaded to.
COVERAGE_BUCKET_NAME = 'oss-fuzz-coverage'
-# Link to the code coverage report in HTML format.
-HTML_REPORT_URL_FORMAT = (build_lib.GCS_URL_BASENAME + COVERAGE_BUCKET_NAME +
- '/{project}/reports/{date}/{platform}/index.html')
-
# This is needed for ClusterFuzz to pick up the most recent reports data.
-LATEST_REPORT_INFO_URL = ('/' + COVERAGE_BUCKET_NAME +
- '/latest_report_info/{project}.json')
-LATEST_REPORT_INFO_CONTENT_TYPE = 'application/json'
-# Link where to upload code coverage report files to.
-UPLOAD_URL_FORMAT = 'gs://' + COVERAGE_BUCKET_NAME + '/{project}/{type}/{date}'
+LATEST_REPORT_INFO_CONTENT_TYPE = 'application/json'
# Languages from project.yaml that have code coverage support.
-LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++', 'go', 'rust']
+LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++', 'go', 'jvm', 'rust', 'swift']
+
+
+class Bucket: # pylint: disable=too-few-public-methods
+ """Class representing the coverage GCS bucket."""
+ def __init__(self, project, date, platform, testing):
+ self.coverage_bucket_name = 'oss-fuzz-coverage'
+ if testing:
+ self.coverage_bucket_name += '-testing'
-def usage():
- """Exit with code 1 and display syntax to use this file."""
- sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n")
- sys.exit(1)
+ self.date = date
+ self.project = project
+ self.html_report_url = (
+ f'{build_lib.GCS_URL_BASENAME}{self.coverage_bucket_name}/{project}'
+ f'/reports/{date}/{platform}/index.html')
+ self.latest_report_info_url = (f'/{COVERAGE_BUCKET_NAME}'
+ f'/latest_report_info/{project}.json')
+ def get_upload_url(self, upload_type):
+ """Returns an upload url for |upload_type|."""
+ return (f'gs://{self.coverage_bucket_name}/{self.project}'
+ f'/{upload_type}/{self.date}')
-# pylint: disable=too-many-locals
-def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project):
+
+def get_build_steps( # pylint: disable=too-many-locals, too-many-arguments
+ project_name, project_yaml_contents, dockerfile_lines, image_project,
+ base_images_project, config):
"""Returns build steps for project."""
- project_yaml = build_project.load_project_yaml(project_name,
- project_yaml_file,
- image_project)
- if project_yaml['disabled']:
- logging.info('Project "%s" is disabled.', project_name)
+ project = build_project.Project(project_name, project_yaml_contents,
+ dockerfile_lines, image_project)
+ if project.disabled:
+ logging.info('Project "%s" is disabled.', project.name)
return []
- if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
+ if project.fuzzing_language not in LANGUAGES_WITH_COVERAGE_SUPPORT:
logging.info(
'Project "%s" is written in "%s", coverage is not supported yet.',
- project_name, project_yaml['language'])
+ project.name, project.fuzzing_language)
return []
- name = project_yaml['name']
- image = project_yaml['image']
- language = project_yaml['language']
- report_date = datetime.datetime.now().strftime('%Y%m%d')
-
- build_steps = build_lib.project_image_steps(name, image, language)
+ report_date = build_project.get_datetime_now().strftime('%Y%m%d')
+ bucket = Bucket(project.name, report_date, PLATFORM, config.testing)
- env = CONFIGURATION[:]
- out = '/workspace/out/' + SANITIZER
- env.append('OUT=' + out)
- env.append('FUZZING_LANGUAGE=' + language)
+ build_steps = build_lib.project_image_steps(
+ project.name,
+ project.image,
+ project.fuzzing_language,
+ branch=config.branch,
+ test_image_suffix=config.test_image_suffix)
- workdir = build_project.workdir_from_dockerfile(dockerfile_lines)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer coverage '
- '{name}\n' + '*' * 80).format(name=name)
-
- # Compilation step.
- build_steps.append({
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to make sure there are non instrumented binaries.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from the
- # Dockerfile). Container Builder overrides our workdir so we need
- # to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
- 'compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
-
- download_corpora_steps = build_lib.download_corpora_steps(project_name)
+ build = build_project.Build('libfuzzer', 'coverage', 'x86_64')
+ env = build_project.get_env(project.fuzzing_language, build)
+ build_steps.append(
+ build_project.get_compile_step(project, build, env, config.parallel))
+ download_corpora_steps = build_lib.download_corpora_steps(
+ project.name, testing=config.testing)
if not download_corpora_steps:
- logging.info('Skipping code coverage build for %s.', project_name)
+ logging.info('Skipping code coverage build for %s.', project.name)
return []
build_steps.extend(download_corpora_steps)
failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
+ f'python infra/helper.py build_image {project.name}\n'
'python infra/helper.py build_fuzzers --sanitizer coverage '
- '{name}\n'
- 'python infra/helper.py coverage {name}\n' +
- '*' * 80).format(name=name)
+ f'{project.name}\n'
+ f'python infra/helper.py coverage {project.name}\n' + '*' * 80)
# Unpack the corpus and run coverage script.
coverage_env = env + [
'HTTP_PORT=',
- 'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip(),
+ f'COVERAGE_EXTRA_ARGS={project.coverage_extra_args.strip()}',
]
- if 'dataflow' in project_yaml['fuzzing_engines']:
+ if 'dataflow' in project.fuzzing_engines:
coverage_env.append('FULL_SUMMARY_PER_TARGET=1')
build_steps.append({
- 'name': 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env': coverage_env,
+ 'name':
+ build_project.get_runner_image_name(base_images_project,
+ config.test_image_suffix),
+ 'env':
+ coverage_env,
'args': [
'bash', '-c',
('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
@@ -158,9 +144,7 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
})
# Upload the report.
- upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='reports',
- date=report_date)
+ upload_report_url = bucket.get_upload_url('reports')
# Delete the existing report as gsutil cannot overwrite it in a useful way due
# to the lack of `-T` option (it creates a subdir in the destination dir).
@@ -172,15 +156,14 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
- os.path.join(out, 'report'),
+ os.path.join(build.out, 'report'),
upload_report_url,
],
})
# Upload the fuzzer stats. Delete the old ones just in case.
- upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='fuzzer_stats',
- date=report_date)
+ upload_fuzzer_stats_url = bucket.get_upload_url('fuzzer_stats')
+
build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url))
build_steps.append({
'name':
@@ -189,15 +172,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
- os.path.join(out, 'fuzzer_stats'),
+ os.path.join(build.out, 'fuzzer_stats'),
upload_fuzzer_stats_url,
],
})
# Upload the fuzzer logs. Delete the old ones just in case
- upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='logs',
- date=report_date)
+ upload_fuzzer_logs_url = bucket.get_upload_url('logs')
build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
build_steps.append({
'name':
@@ -206,15 +187,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
- os.path.join(out, 'logs'),
+ os.path.join(build.out, 'logs'),
upload_fuzzer_logs_url,
],
})
# Upload srcmap.
- srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='srcmap',
- date=report_date)
+ srcmap_upload_url = bucket.get_upload_url('srcmap')
srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
build_steps.append({
'name': 'gcr.io/cloud-builders/gsutil',
@@ -227,15 +206,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
# Update the latest report information file for ClusterFuzz.
latest_report_info_url = build_lib.get_signed_url(
- LATEST_REPORT_INFO_URL.format(project=project_name),
+ bucket.latest_report_info_url,
content_type=LATEST_REPORT_INFO_CONTENT_TYPE)
latest_report_info_body = json.dumps({
'fuzzer_stats_dir':
upload_fuzzer_stats_url,
'html_report_url':
- HTML_REPORT_URL_FORMAT.format(project=project_name,
- date=report_date,
- platform=PLATFORM),
+ bucket.html_report_url,
'report_date':
report_date,
'report_summary_path':
@@ -251,25 +228,10 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
def main():
"""Build and run coverage for projects."""
- if len(sys.argv) != 2:
- usage()
-
- image_project = 'oss-fuzz'
- base_images_project = 'oss-fuzz-base'
- project_dir = sys.argv[1].rstrip(os.path.sep)
- project_name = os.path.basename(project_dir)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
-
- with open(dockerfile_path) as docker_file:
- dockerfile_lines = docker_file.readlines()
-
- with open(project_yaml_path) as project_yaml_file:
- steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project)
-
- build_project.run_build(steps, project_name, COVERAGE_BUILD_TAG)
+ return build_project.build_script_main(
+ 'Generates coverage report for project.', get_build_steps,
+ COVERAGE_BUILD_TYPE)
-if __name__ == "__main__":
- main()
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/infra/build/functions/build_and_run_coverage_test.py b/infra/build/functions/build_and_run_coverage_test.py
new file mode 100644
index 000000000..83ea39ecd
--- /dev/null
+++ b/infra/build/functions/build_and_run_coverage_test.py
@@ -0,0 +1,78 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Unit tests for build_and_run_coverage."""
+import json
+import os
+import sys
+import unittest
+from unittest import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+FUNCTIONS_DIR = os.path.dirname(__file__)
+sys.path.append(FUNCTIONS_DIR)
+# pylint: disable=wrong-import-position
+
+import build_and_run_coverage
+import build_project
+import test_utils
+
+# pylint: disable=no-member
+
+
+class TestRequestCoverageBuilds(fake_filesystem_unittest.TestCase):
+ """Unit tests for sync."""
+
+ def setUp(self):
+ self.maxDiff = None # pylint: disable=invalid-name
+ self.setUpPyfakefs()
+
+ @mock.patch('build_lib.get_signed_url', return_value='test_url')
+ @mock.patch('build_lib.download_corpora_steps',
+ return_value=[{
+ 'url': 'test_download'
+ }])
+ @mock.patch('build_project.get_datetime_now',
+ return_value=test_utils.FAKE_DATETIME)
+ def test_get_coverage_build_steps(self, mock_url, mock_corpora_steps,
+ mock_get_datetime_now):
+ """Test for get_build_steps."""
+ del mock_url, mock_corpora_steps, mock_get_datetime_now
+ project_yaml_contents = ('language: c++\n'
+ 'sanitizers:\n'
+ ' - address\n'
+ 'architectures:\n'
+ ' - x86_64\n')
+ self.fs.create_dir(test_utils.PROJECT_DIR)
+ test_utils.create_project_data(test_utils.PROJECT, project_yaml_contents)
+
+ expected_build_steps_file_path = test_utils.get_test_data_file_path(
+ 'expected_coverage_build_steps.json')
+ self.fs.add_real_file(expected_build_steps_file_path)
+ with open(expected_build_steps_file_path) as expected_build_steps_file:
+ expected_coverage_build_steps = json.load(expected_build_steps_file)
+
+ config = build_project.Config(False, False, None, False)
+ project_yaml, dockerfile = build_project.get_project_data(
+ test_utils.PROJECT)
+ build_steps = build_and_run_coverage.get_build_steps(
+ test_utils.PROJECT, project_yaml, dockerfile, test_utils.IMAGE_PROJECT,
+ test_utils.BASE_IMAGES_PROJECT, config)
+ self.assertEqual(build_steps, expected_coverage_build_steps)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/infra/build/functions/build_lib.py b/infra/build/functions/build_lib.py
index 007579ef9..292ef682f 100644
--- a/infra/build/functions/build_lib.py
+++ b/infra/build/functions/build_lib.py
@@ -83,11 +83,23 @@ def get_targets_list_url(bucket, project, sanitizer):
return url
-def _get_targets_list(project_name):
+def get_upload_bucket(engine, architecture, testing):
+ """Returns the upload bucket for |engine| and architecture. Returns the
+ testing bucket if |testing|."""
+ bucket = ENGINE_INFO[engine].upload_bucket
+ if architecture != 'x86_64':
+ bucket += '-' + architecture
+ if testing:
+ bucket += '-testing'
+ return bucket
+
+
+def _get_targets_list(project_name, testing):
"""Returns target list."""
- # libFuzzer ASan is the default configuration, get list of targets from it.
- url = get_targets_list_url(ENGINE_INFO['libfuzzer'].upload_bucket,
- project_name, 'address')
+ # libFuzzer ASan 'x86_84' is the default configuration, get list of targets
+ # from it.
+ bucket = get_upload_bucket('libfuzzer', 'x86_64', testing)
+ url = get_targets_list_url(bucket, project_name, 'address')
url = urlparse.urljoin(GCS_URL_BASENAME, url)
response = requests.get(url)
@@ -104,7 +116,7 @@ def _get_targets_list(project_name):
def get_signed_url(path, method='PUT', content_type=''):
"""Returns signed url."""
timestamp = int(time.time() + BUILD_TIMEOUT)
- blob = '{0}\n\n{1}\n{2}\n{3}'.format(method, content_type, timestamp, path)
+ blob = f'{method}\n\n{content_type}\n{timestamp}\n{path}'
service_account_path = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS')
if service_account_path:
@@ -119,7 +131,7 @@ def get_signed_url(path, method='PUT', content_type=''):
credentials=credentials,
cache_discovery=False)
client_id = project + '@appspot.gserviceaccount.com'
- service_account = 'projects/-/serviceAccounts/{0}'.format(client_id)
+ service_account = f'projects/-/serviceAccounts/{client_id}'
response = iam.projects().serviceAccounts().signBlob(
name=service_account,
body={
@@ -133,14 +145,13 @@ def get_signed_url(path, method='PUT', content_type=''):
'Expires': timestamp,
'Signature': signature,
}
- return ('https://storage.googleapis.com{0}?'.format(path) +
- urlparse.urlencode(values))
+ return f'https://storage.googleapis.com{path}?{urlparse.urlencode(values)}'
-def download_corpora_steps(project_name):
+def download_corpora_steps(project_name, testing):
"""Returns GCB steps for downloading corpora backups for the given project.
"""
- fuzz_targets = _get_targets_list(project_name)
+ fuzz_targets = _get_targets_list(project_name, testing)
if not fuzz_targets:
sys.stderr.write('No fuzz targets found for project "%s".\n' % project_name)
return None
@@ -206,15 +217,72 @@ def gsutil_rm_rf_step(url):
return step
-def project_image_steps(name, image, language):
+def get_pull_test_images_steps(test_image_suffix):
+ """Returns steps to pull testing versions of base-images and tag them so that
+ they are used in builds."""
+ images = [
+ 'gcr.io/oss-fuzz-base/base-builder',
+ 'gcr.io/oss-fuzz-base/base-builder-swift',
+ 'gcr.io/oss-fuzz-base/base-builder-jvm',
+ 'gcr.io/oss-fuzz-base/base-builder-go',
+ 'gcr.io/oss-fuzz-base/base-builder-python',
+ 'gcr.io/oss-fuzz-base/base-builder-rust',
+ ]
+ steps = []
+ for image in images:
+ test_image = image + '-' + test_image_suffix
+ steps.append({
+ 'name': 'gcr.io/cloud-builders/docker',
+ 'args': [
+ 'pull',
+ test_image,
+ ],
+ 'waitFor': '-' # Start this immediately, don't wait for previous step.
+ })
+
+ # This step is hacky but gives us great flexibility. OSS-Fuzz has hardcoded
+ # references to gcr.io/oss-fuzz-base/base-builder (in dockerfiles, for
+ # example) and gcr.io/oss-fuzz-base-runner (in this build code). But the
+ # testing versions of those images are called e.g.
+ # gcr.io/oss-fuzz-base/base-builder-testing and
+ # gcr.io/oss-fuzz-base/base-runner-testing. How can we get the build to use
+ # the testing images instead of the real ones? By doing this step: tagging
+ # the test image with the non-test version, so that the test version is used
+ # instead of pulling the real one.
+ steps.append({
+ 'name': 'gcr.io/cloud-builders/docker',
+ 'args': ['tag', test_image, image],
+ })
+ return steps
+
+
+def get_srcmap_step_id():
+ """Returns the id for the srcmap step."""
+ return 'srcmap'
+
+
+def project_image_steps(name,
+ image,
+ language,
+ branch=None,
+ test_image_suffix=None):
"""Returns GCB steps to build OSS-Fuzz project image."""
- steps = [{
+ clone_step = {
'args': [
- 'clone',
- 'https://github.com/google/oss-fuzz.git',
+ 'clone', 'https://github.com/google/oss-fuzz.git', '--depth', '1'
],
'name': 'gcr.io/cloud-builders/git',
- }, {
+ }
+ if branch:
+ # Do this to support testing other branches.
+ clone_step['args'].extend(['--branch', branch])
+
+ steps = [clone_step]
+ if test_image_suffix:
+ steps.extend(get_pull_test_images_steps(test_image_suffix))
+
+ srcmap_step_id = get_srcmap_step_id()
+ steps += [{
'name': 'gcr.io/cloud-builders/docker',
'args': [
'build',
@@ -224,8 +292,7 @@ def project_image_steps(name, image, language):
],
'dir': 'oss-fuzz/projects/' + name,
}, {
- 'name':
- image,
+ 'name': image,
'args': [
'bash', '-c',
'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
@@ -234,6 +301,7 @@ def project_image_steps(name, image, language):
'OSSFUZZ_REVISION=$REVISION_ID',
'FUZZING_LANGUAGE=%s' % language,
],
+ 'id': srcmap_step_id
}]
return steps
diff --git a/infra/build/functions/build_project.py b/infra/build/functions/build_project.py
index 9115c85fd..bdc7985e1 100644..100755
--- a/infra/build/functions/build_project.py
+++ b/infra/build/functions/build_project.py
@@ -13,7 +13,7 @@
# limitations under the License.
#
################################################################################
-#!/usr/bin/python2
+#!/usr/bin/env python3
"""Starts project build on Google Cloud Builder.
Usage: build_project.py <project_dir>
@@ -21,37 +21,27 @@ Usage: build_project.py <project_dir>
from __future__ import print_function
+import argparse
+import collections
import datetime
import json
import logging
import os
+import posixpath
import re
import sys
+from googleapiclient.discovery import build as cloud_build
+import oauth2client.client
import six
import yaml
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
import build_lib
-FUZZING_BUILD_TAG = 'fuzzing'
+FUZZING_BUILD_TYPE = 'fuzzing'
GCB_LOGS_BUCKET = 'oss-fuzz-gcb-logs'
-CONFIGURATIONS = {
- 'sanitizer-address': ['SANITIZER=address'],
- 'sanitizer-dataflow': ['SANITIZER=dataflow'],
- 'sanitizer-memory': ['SANITIZER=memory'],
- 'sanitizer-undefined': ['SANITIZER=undefined'],
- 'engine-libfuzzer': ['FUZZING_ENGINE=libfuzzer'],
- 'engine-afl': ['FUZZING_ENGINE=afl'],
- 'engine-honggfuzz': ['FUZZING_ENGINE=honggfuzz'],
- 'engine-dataflow': ['FUZZING_ENGINE=dataflow'],
- 'engine-none': ['FUZZING_ENGINE=none'],
-}
-
DEFAULT_ARCHITECTURES = ['x86_64']
DEFAULT_ENGINES = ['libfuzzer', 'afl', 'honggfuzz']
DEFAULT_SANITIZERS = ['address', 'undefined']
@@ -61,19 +51,100 @@ LATEST_VERSION_CONTENT_TYPE = 'text/plain'
QUEUE_TTL_SECONDS = 60 * 60 * 24 # 24 hours.
+PROJECTS_DIR = os.path.abspath(
+ os.path.join(__file__, os.path.pardir, os.path.pardir, os.path.pardir,
+ os.path.pardir, 'projects'))
+
+DEFAULT_GCB_OPTIONS = {'machineType': 'N1_HIGHCPU_32'}
+
+Config = collections.namedtuple(
+ 'Config', ['testing', 'test_image_suffix', 'branch', 'parallel'])
+
+WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+
-def usage():
- """Exit with code 1 and display syntax to use this file."""
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
- sys.exit(1)
+class Build: # pylint: disable=too-few-public-methods
+ """Class representing the configuration for a build."""
+ def __init__(self, fuzzing_engine, sanitizer, architecture):
+ self.fuzzing_engine = fuzzing_engine
+ self.sanitizer = sanitizer
+ self.architecture = architecture
+ self.targets_list_filename = build_lib.get_targets_list_filename(
+ self.sanitizer)
-def set_yaml_defaults(project_name, project_yaml, image_project):
- """Set project.yaml's default parameters."""
+ @property
+ def out(self):
+ """Returns the out directory for the build."""
+ return posixpath.join(
+ '/workspace/out/',
+ f'{self.fuzzing_engine}-{self.sanitizer}-{self.architecture}')
+
+
+def get_project_data(project_name):
+ """Returns a tuple containing the contents of the project.yaml and Dockerfile
+ of |project_name|. Raises a FileNotFoundError if there is no Dockerfile for
+ |project_name|."""
+ project_dir = os.path.join(PROJECTS_DIR, project_name)
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ try:
+ with open(dockerfile_path) as dockerfile:
+ dockerfile = dockerfile.read()
+ except FileNotFoundError:
+ logging.error('Project "%s" does not have a dockerfile.', project_name)
+ raise
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
+ with open(project_yaml_path, 'r') as project_yaml_file_handle:
+ project_yaml_contents = project_yaml_file_handle.read()
+ return project_yaml_contents, dockerfile
+
+
+class Project: # pylint: disable=too-many-instance-attributes
+ """Class representing an OSS-Fuzz project."""
+
+ def __init__(self, name, project_yaml_contents, dockerfile, image_project):
+ project_yaml = yaml.safe_load(project_yaml_contents)
+ self.name = name
+ self.image_project = image_project
+ self.workdir = workdir_from_dockerfile(dockerfile)
+ set_yaml_defaults(project_yaml)
+ self._sanitizers = project_yaml['sanitizers']
+ self.disabled = project_yaml['disabled']
+ self.architectures = project_yaml['architectures']
+ self.fuzzing_engines = project_yaml['fuzzing_engines']
+ self.coverage_extra_args = project_yaml['coverage_extra_args']
+ self.labels = project_yaml['labels']
+ self.fuzzing_language = project_yaml['language']
+ self.run_tests = project_yaml['run_tests']
+
+ @property
+ def sanitizers(self):
+ """Returns processed sanitizers."""
+ assert isinstance(self._sanitizers, list)
+ processed_sanitizers = []
+ for sanitizer in self._sanitizers:
+ if isinstance(sanitizer, six.string_types):
+ processed_sanitizers.append(sanitizer)
+ elif isinstance(sanitizer, dict):
+ for key in sanitizer.keys():
+ processed_sanitizers.append(key)
+
+ return processed_sanitizers
+
+ @property
+ def image(self):
+ """Returns the docker image for the project."""
+ return f'gcr.io/{self.image_project}/{self.name}'
+
+
+def get_last_step_id(steps):
+ """Returns the id of the last step in |steps|."""
+ return steps[-1]['id']
+
+
+def set_yaml_defaults(project_yaml):
+ """Sets project.yaml's default parameters."""
project_yaml.setdefault('disabled', False)
- project_yaml.setdefault('name', project_name)
- project_yaml.setdefault('image',
- 'gcr.io/{0}/{1}'.format(image_project, project_name))
project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
@@ -82,291 +153,310 @@ def set_yaml_defaults(project_name, project_yaml, image_project):
project_yaml.setdefault('labels', {})
-def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
+def is_supported_configuration(build):
"""Check if the given configuration is supported."""
- fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
- if architecture == 'i386' and sanitizer != 'address':
+ fuzzing_engine_info = build_lib.ENGINE_INFO[build.fuzzing_engine]
+ if build.architecture == 'i386' and build.sanitizer != 'address':
return False
- return (sanitizer in fuzzing_engine_info.supported_sanitizers and
- architecture in fuzzing_engine_info.supported_architectures)
-
+ return (build.sanitizer in fuzzing_engine_info.supported_sanitizers and
+ build.architecture in fuzzing_engine_info.supported_architectures)
-def get_sanitizers(project_yaml):
- """Retrieve sanitizers from project.yaml."""
- sanitizers = project_yaml['sanitizers']
- assert isinstance(sanitizers, list)
- processed_sanitizers = []
- for sanitizer in sanitizers:
- if isinstance(sanitizer, six.string_types):
- processed_sanitizers.append(sanitizer)
- elif isinstance(sanitizer, dict):
- for key in sanitizer.keys():
- processed_sanitizers.append(key)
-
- return processed_sanitizers
-
-
-def workdir_from_dockerfile(dockerfile_lines):
- """Parse WORKDIR from the Dockerfile."""
- workdir_regex = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+def workdir_from_dockerfile(dockerfile):
+ """Parses WORKDIR from the Dockerfile."""
+ dockerfile_lines = dockerfile.split('\n')
for line in dockerfile_lines:
- match = re.match(workdir_regex, line)
+ match = re.match(WORKDIR_REGEX, line)
if match:
# We need to escape '$' since they're used for subsitutions in Container
# Builer builds.
return match.group(1).replace('$', '$$')
- return None
+ return '/src'
-def load_project_yaml(project_name, project_yaml_file, image_project):
- """Loads project yaml and sets default values."""
- project_yaml = yaml.safe_load(project_yaml_file)
- set_yaml_defaults(project_name, project_yaml, image_project)
- return project_yaml
+def get_datetime_now():
+ """Returns datetime.datetime.now(). Used for mocking."""
+ return datetime.datetime.now()
-# pylint: disable=too-many-locals, too-many-statements, too-many-branches
-def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project):
- """Returns build steps for project."""
- project_yaml = load_project_yaml(project_name, project_yaml_file,
- image_project)
-
- if project_yaml['disabled']:
- logging.info('Project "%s" is disabled.', project_name)
- return []
-
- name = project_yaml['name']
- image = project_yaml['image']
- language = project_yaml['language']
- run_tests = project_yaml['run_tests']
- time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
-
- build_steps = build_lib.project_image_steps(name, image, language)
- # Copy over MSan instrumented libraries.
- build_steps.append({
- 'name': 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
+def get_env(fuzzing_language, build):
+ """Returns an environment for building. The environment is returned as a list
+ and is suitable for use as the "env" parameter in a GCB build step. The
+ environment variables are based on the values of |fuzzing_language| and
+ |build."""
+ env_dict = {
+ 'FUZZING_LANGUAGE': fuzzing_language,
+ 'FUZZING_ENGINE': build.fuzzing_engine,
+ 'SANITIZER': build.sanitizer,
+ 'ARCHITECTURE': build.architecture,
+ # Set HOME so that it doesn't point to a persisted volume (see
+ # https://github.com/google/oss-fuzz/issues/6035).
+ 'HOME': '/root',
+ 'OUT': build.out,
+ }
+ return list(sorted([f'{key}={value}' for key, value in env_dict.items()]))
+
+
+def get_compile_step(project, build, env, parallel):
+ """Returns the GCB step for compiling |projects| fuzzers using |env|. The type
+ of build is specified by |build|."""
+ failure_msg = (
+ '*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
+ f'python infra/helper.py build_image {project.name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} --architecture '
+ f'{build.architecture} {project.name}\n' + '*' * 80)
+ compile_step = {
+ 'name': project.image,
+ 'env': env,
'args': [
'bash',
'-c',
- 'cp -r /msan /workspace',
+ # Remove /out to make sure there are non instrumented binaries.
+ # `cd /src && cd {workdir}` (where {workdir} is parsed from the
+ # Dockerfile). Container Builder overrides our workdir so we need
+ # to add this step to set it back.
+ (f'rm -r /out && cd /src && cd {project.workdir} && '
+ f'mkdir -p {build.out} && compile || '
+ f'(echo "{failure_msg}" && false)'),
],
- })
+ 'id': get_id('compile', build),
+ }
+ if parallel:
+ maybe_add_parallel(compile_step, build_lib.get_srcmap_step_id(), parallel)
+ return compile_step
- for fuzzing_engine in project_yaml['fuzzing_engines']:
- for sanitizer in get_sanitizers(project_yaml):
- for architecture in project_yaml['architectures']:
- if not is_supported_configuration(fuzzing_engine, sanitizer,
- architecture):
- continue
- env = CONFIGURATIONS['engine-' + fuzzing_engine][:]
- env.extend(CONFIGURATIONS['sanitizer-' + sanitizer])
- out = '/workspace/out/' + sanitizer
- stamped_name = '-'.join([name, sanitizer, time_stamp])
- latest_version_file = '-'.join(
- [name, sanitizer, LATEST_VERSION_FILENAME])
- zip_file = stamped_name + '.zip'
- stamped_srcmap_file = stamped_name + '.srcmap.json'
- bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
- if architecture != 'x86_64':
- bucket += '-' + architecture
-
- upload_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
- srcmap_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name,
- stamped_srcmap_file))
- latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
- bucket, name, latest_version_file)
- latest_version_url = build_lib.get_signed_url(
- latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
-
- targets_list_filename = build_lib.get_targets_list_filename(sanitizer)
- targets_list_url = build_lib.get_signed_url(
- build_lib.get_targets_list_url(bucket, name, sanitizer))
-
- env.append('OUT=' + out)
- env.append('MSAN_LIBS_PATH=/workspace/msan')
- env.append('ARCHITECTURE=' + architecture)
- env.append('FUZZING_LANGUAGE=' + language)
-
- workdir = workdir_from_dockerfile(dockerfile_lines)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # compile
- {
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to break loudly when a build script
- # incorrectly uses /out instead of $OUT.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from
- # the Dockerfile). Container Builder overrides our workdir
- # so we need to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} '
- '&& compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
+def maybe_add_parallel(step, wait_for_id, parallel):
+ """Makes |step| run immediately after |wait_for_id| if |parallel|. Mutates
+ |step|."""
+ if not parallel:
+ return
+ step['waitFor'] = wait_for_id
- if sanitizer == 'memory':
- # Patch dynamic libraries to use instrumented ones.
- build_steps.append({
+
+def get_id(step_type, build):
+ """Returns a unique step id based on |step_type| and |build|. Useful for
+ parallelizing builds."""
+ return (f'{step_type}-{build.fuzzing_engine}-{build.sanitizer}'
+ f'-{build.architecture}')
+
+
+def get_build_steps( # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-arguments
+ project_name, project_yaml_contents, dockerfile, image_project,
+ base_images_project, config):
+ """Returns build steps for project."""
+
+ project = Project(project_name, project_yaml_contents, dockerfile,
+ image_project)
+
+ if project.disabled:
+ logging.info('Project "%s" is disabled.', project.name)
+ return []
+
+ timestamp = get_datetime_now().strftime('%Y%m%d%H%M')
+
+ build_steps = build_lib.project_image_steps(
+ project.name,
+ project.image,
+ project.fuzzing_language,
+ branch=config.branch,
+ test_image_suffix=config.test_image_suffix)
+
+ # Sort engines to make AFL first to test if libFuzzer has an advantage in
+ # finding bugs first since it is generally built first.
+ for fuzzing_engine in sorted(project.fuzzing_engines):
+ for sanitizer in project.sanitizers:
+ for architecture in project.architectures:
+ build = Build(fuzzing_engine, sanitizer, architecture)
+ if not is_supported_configuration(build):
+ continue
+
+ env = get_env(project.fuzzing_language, build)
+ compile_step = get_compile_step(project, build, env, config.parallel)
+ build_steps.append(compile_step)
+
+ if project.run_tests:
+ failure_msg = (
+ '*' * 80 + '\nBuild checks failed.\n'
+ 'To reproduce, run:\n'
+ f'python infra/helper.py build_image {project.name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} '
+ f'--architecture {build.architecture} {project.name}\n'
+ 'python infra/helper.py check_build --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} '
+ f'--architecture {build.architecture} {project.name}\n' +
+ '*' * 80)
+ # Test fuzz targets.
+ test_step = {
'name':
- 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
+ get_runner_image_name(base_images_project,
+ config.test_image_suffix),
+ 'env':
+ env,
'args': [
- 'bash',
- '-c',
- # TODO(ochang): Replace with just patch_build.py once
- # permission in image is fixed.
- 'python /usr/local/bin/patch_build.py {0}'.format(out),
+ 'bash', '-c',
+ f'test_all.py || (echo "{failure_msg}" && false)'
],
- })
-
- if run_tests:
- failure_msg = ('*' * 80 + '\nBuild checks failed.\n'
- 'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n'
- 'python infra/helper.py check_build --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # test binaries
- {
- 'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env':
- env,
- 'args': [
- 'bash', '-c',
- 'test_all.py || (echo "{0}" && false)'.format(failure_msg)
- ],
- })
-
- if project_yaml['labels']:
- # write target labels
+ 'id':
+ get_id('build-check', build)
+ }
+ maybe_add_parallel(test_step, get_last_step_id(build_steps),
+ config.parallel)
+ build_steps.append(test_step)
+
+ if project.labels:
+ # Write target labels.
build_steps.append({
'name':
- image,
+ project.image,
'env':
env,
'args': [
'/usr/local/bin/write_labels.py',
- json.dumps(project_yaml['labels']),
- out,
+ json.dumps(project.labels),
+ build.out,
],
})
- if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
- dataflow_steps = dataflow_post_build_steps(name, env,
- base_images_project)
+ if build.sanitizer == 'dataflow' and build.fuzzing_engine == 'dataflow':
+ dataflow_steps = dataflow_post_build_steps(project.name, env,
+ base_images_project,
+ config.testing,
+ config.test_image_suffix)
if dataflow_steps:
build_steps.extend(dataflow_steps)
else:
sys.stderr.write('Skipping dataflow post build steps.\n')
build_steps.extend([
- # generate targets list
+ # Generate targets list.
{
'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
+ get_runner_image_name(base_images_project,
+ config.test_image_suffix),
'env':
env,
'args': [
- 'bash',
- '-c',
- 'targets_list > /workspace/{0}'.format(
- targets_list_filename),
- ],
- },
- # zip binaries
- {
- 'name':
- image,
- 'args': [
'bash', '-c',
- 'cd {out} && zip -r {zip_file} *'.format(out=out,
- zip_file=zip_file)
- ],
- },
- # upload srcmap
- {
- 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- '/workspace/srcmap.json',
- srcmap_url,
- ],
- },
- # upload binaries
- {
- 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- os.path.join(out, zip_file),
- upload_url,
- ],
- },
- # upload targets list
- {
- 'name':
- 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- '/workspace/{0}'.format(targets_list_filename),
- targets_list_url,
- ],
- },
- # upload the latest.version file
- build_lib.http_upload_step(zip_file, latest_version_url,
- LATEST_VERSION_CONTENT_TYPE),
- # cleanup
- {
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'rm -r ' + out,
+ f'targets_list > /workspace/{build.targets_list_filename}'
],
- },
+ }
])
+ upload_steps = get_upload_steps(project, build, timestamp,
+ base_images_project, config.testing)
+ build_steps.extend(upload_steps)
return build_steps
-def dataflow_post_build_steps(project_name, env, base_images_project):
+def get_targets_list_upload_step(bucket, project, build, uploader_image):
+ """Returns the step to upload targets_list for |build| of |project| to
+ |bucket|."""
+ targets_list_url = build_lib.get_signed_url(
+ build_lib.get_targets_list_url(bucket, project.name, build.sanitizer))
+ return {
+ 'name': uploader_image,
+ 'args': [
+ f'/workspace/{build.targets_list_filename}',
+ targets_list_url,
+ ],
+ }
+
+
+def get_uploader_image(base_images_project):
+ """Returns the uploader base image in |base_images_project|."""
+ return f'gcr.io/{base_images_project}/uploader'
+
+
+def get_upload_steps(project, build, timestamp, base_images_project, testing):
+ """Returns the steps for uploading the fuzzer build specified by |project| and
+ |build|. Uses |timestamp| for naming the uploads. Uses |base_images_project|
+ and |testing| for determining which image to use for the upload."""
+ bucket = build_lib.get_upload_bucket(build.fuzzing_engine, build.architecture,
+ testing)
+ stamped_name = '-'.join([project.name, build.sanitizer, timestamp])
+ zip_file = stamped_name + '.zip'
+ upload_url = build_lib.get_signed_url(
+ build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name, zip_file))
+ stamped_srcmap_file = stamped_name + '.srcmap.json'
+ srcmap_url = build_lib.get_signed_url(
+ build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name,
+ stamped_srcmap_file))
+ latest_version_file = '-'.join(
+ [project.name, build.sanitizer, LATEST_VERSION_FILENAME])
+ latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
+ bucket, project.name, latest_version_file)
+ latest_version_url = build_lib.get_signed_url(
+ latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
+ uploader_image = get_uploader_image(base_images_project)
+
+ upload_steps = [
+ # Zip binaries.
+ {
+ 'name': project.image,
+ 'args': ['bash', '-c', f'cd {build.out} && zip -r {zip_file} *'],
+ },
+ # Upload srcmap.
+ {
+ 'name': uploader_image,
+ 'args': [
+ '/workspace/srcmap.json',
+ srcmap_url,
+ ],
+ },
+ # Upload binaries.
+ {
+ 'name': uploader_image,
+ 'args': [
+ os.path.join(build.out, zip_file),
+ upload_url,
+ ],
+ },
+ # Upload targets list.
+ get_targets_list_upload_step(bucket, project, build, uploader_image),
+ # Upload the latest.version file.
+ build_lib.http_upload_step(zip_file, latest_version_url,
+ LATEST_VERSION_CONTENT_TYPE),
+ # Cleanup.
+ get_cleanup_step(project, build),
+ ]
+ return upload_steps
+
+
+def get_cleanup_step(project, build):
+ """Returns the step for cleaning up after doing |build| of |project|."""
+ return {
+ 'name': project.image,
+ 'args': [
+ 'bash',
+ '-c',
+ 'rm -r ' + build.out,
+ ],
+ }
+
+
+def get_runner_image_name(base_images_project, test_image_suffix):
+ """Returns the runner image that should be used, based on
+ |base_images_project|. Returns the testing image if |test_image_suffix|."""
+ image = f'gcr.io/{base_images_project}/base-runner'
+ if test_image_suffix:
+ image += '-' + test_image_suffix
+ return image
+
+
+def dataflow_post_build_steps(project_name, env, base_images_project, testing,
+ test_image_suffix):
"""Appends dataflow post build steps."""
- steps = build_lib.download_corpora_steps(project_name)
+ steps = build_lib.download_corpora_steps(project_name, testing)
if not steps:
return None
steps.append({
'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
+ get_runner_image_name(base_images_project, test_image_suffix),
'env':
env + [
'COLLECT_DFT_TIMEOUT=2h',
@@ -387,63 +477,126 @@ def dataflow_post_build_steps(project_name, env, base_images_project):
return steps
-def get_logs_url(build_id, image_project='oss-fuzz'):
+def get_logs_url(build_id, cloud_project='oss-fuzz'):
"""Returns url where logs are displayed for the build."""
- url_format = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project={1}')
- return url_format.format(build_id, image_project)
+ return ('https://console.cloud.google.com/logs/viewer?'
+ f'resource=build%2Fbuild_id%2F{build_id}&project={cloud_project}')
+
+
+def get_gcb_url(build_id, cloud_project='oss-fuzz'):
+ """Returns url where logs are displayed for the build."""
+ return (f'https://console.cloud.google.com/cloud-build/builds/{build_id}'
+ f'?project={cloud_project}')
# pylint: disable=no-member
-def run_build(build_steps, project_name, tag):
- """Run the build for given steps on cloud build."""
+def run_build(oss_fuzz_project,
+ build_steps,
+ credentials,
+ build_type,
+ cloud_project='oss-fuzz'):
+ """Run the build for given steps on cloud build. |build_steps| are the steps
+ to run. |credentials| are are used to authenticate to GCB and build in
+ |cloud_project|. |oss_fuzz_project| and |build_type| are used to tag the build
+ in GCB so the build can be queried for debugging purposes."""
options = {}
if 'GCB_OPTIONS' in os.environ:
options = yaml.safe_load(os.environ['GCB_OPTIONS'])
+ else:
+ options = DEFAULT_GCB_OPTIONS
+ tags = [oss_fuzz_project + '-' + build_type, build_type, oss_fuzz_project]
build_body = {
'steps': build_steps,
'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
'options': options,
'logsBucket': GCB_LOGS_BUCKET,
- 'tags': [project_name + '-' + tag,],
+ 'tags': tags,
'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
}
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild',
- 'v1',
- credentials=credentials,
- cache_discovery=False)
- build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
+ cloudbuild = cloud_build('cloudbuild',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
+ build_info = cloudbuild.projects().builds().create(projectId=cloud_project,
body=build_body).execute()
build_id = build_info['metadata']['build']['id']
- print('Logs:', get_logs_url(build_id), file=sys.stderr)
- print(build_id)
-
-
-def main():
- """Build and run projects."""
- if len(sys.argv) != 2:
- usage()
+ logging.info('Build ID: %s', build_id)
+ logging.info('Logs: %s', get_logs_url(build_id, cloud_project))
+ logging.info('Cloud build page: %s', get_gcb_url(build_id, cloud_project))
+ return build_id
+
+
+def get_args(description):
+ """Parses command line arguments and returns them. Suitable for a build
+ script."""
+ parser = argparse.ArgumentParser(sys.argv[0], description=description)
+ parser.add_argument('projects', help='Projects.', nargs='+')
+ parser.add_argument('--testing',
+ action='store_true',
+ required=False,
+ default=False,
+ help='Upload to testing buckets.')
+ parser.add_argument('--test-image-suffix',
+ required=False,
+ default=None,
+ help='Use testing base-images.')
+ parser.add_argument('--branch',
+ required=False,
+ default=None,
+ help='Use specified OSS-Fuzz branch.')
+ parser.add_argument('--parallel',
+ action='store_true',
+ required=False,
+ default=False,
+ help='Do builds in parallel.')
+ return parser.parse_args()
+
+
+def build_script_main(script_description, get_build_steps_func, build_type):
+ """Gets arguments from command line using |script_description| as helpstring
+ description. Gets build_steps using |get_build_steps_func| and then runs those
+ steps on GCB, tagging the builds with |build_type|. Returns 0 on success, 1 on
+ failure."""
+ args = get_args(script_description)
+ logging.basicConfig(level=logging.INFO)
image_project = 'oss-fuzz'
base_images_project = 'oss-fuzz-base'
- project_dir = sys.argv[1].rstrip(os.path.sep)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
- project_name = os.path.basename(project_dir)
- with open(dockerfile_path) as dockerfile:
- dockerfile_lines = dockerfile.readlines()
+ credentials = oauth2client.client.GoogleCredentials.get_application_default()
+ error = False
+ config = Config(args.testing, args.test_image_suffix, args.branch,
+ args.parallel)
+ for project_name in args.projects:
+ logging.info('Getting steps for: "%s".', project_name)
+ try:
+ project_yaml_contents, dockerfile_contents = get_project_data(
+ project_name)
+ except FileNotFoundError:
+ logging.error('Couldn\'t get project data. Skipping %s.', project_name)
+ error = True
+ continue
+
+ steps = get_build_steps_func(project_name, project_yaml_contents,
+ dockerfile_contents, image_project,
+ base_images_project, config)
+ if not steps:
+ logging.error('No steps. Skipping %s.', project_name)
+ error = True
+ continue
+
+ run_build(project_name, steps, credentials, build_type)
+ return 0 if not error else 1
- with open(project_yaml_path) as project_yaml_file:
- steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project)
- run_build(steps, project_name, FUZZING_BUILD_TAG)
+def main():
+ """Build and run projects."""
+ return build_script_main('Builds a project on GCB.', get_build_steps,
+ FUZZING_BUILD_TYPE)
if __name__ == '__main__':
- main()
+ sys.exit(main())
diff --git a/infra/build/functions/build_project_test.py b/infra/build/functions/build_project_test.py
new file mode 100644
index 000000000..43f6c1cfa
--- /dev/null
+++ b/infra/build/functions/build_project_test.py
@@ -0,0 +1,77 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Unit tests for build_project."""
+import json
+import os
+import sys
+import unittest
+from unittest import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+FUNCTIONS_DIR = os.path.dirname(__file__)
+sys.path.append(FUNCTIONS_DIR)
+# pylint: disable=wrong-import-position
+
+import build_project
+import test_utils
+
+# pylint: disable=no-member
+
+
+class TestRequestCoverageBuilds(fake_filesystem_unittest.TestCase):
+ """Unit tests for sync."""
+
+ def setUp(self):
+ self.maxDiff = None # pylint: disable=invalid-name
+ self.setUpPyfakefs()
+
+ @mock.patch('build_lib.get_signed_url', return_value='test_url')
+ @mock.patch('build_project.get_datetime_now',
+ return_value=test_utils.FAKE_DATETIME)
+ def test_get_build_steps(self, mock_url, mock_get_datetime_now):
+ """Test for get_build_steps."""
+ del mock_url, mock_get_datetime_now
+ project_yaml_contents = ('language: c++\n'
+ 'sanitizers:\n'
+ ' - address\n'
+ ' - memory\n'
+ ' - undefined\n'
+ 'architectures:\n'
+ ' - x86_64\n'
+ ' - i386\n')
+ self.fs.create_dir(test_utils.PROJECT_DIR)
+ test_utils.create_project_data(test_utils.PROJECT, project_yaml_contents)
+
+ expected_build_steps_file_path = test_utils.get_test_data_file_path(
+ 'expected_build_steps.json')
+ self.fs.add_real_file(expected_build_steps_file_path)
+ with open(expected_build_steps_file_path) as expected_build_steps_file:
+ expected_build_steps = json.load(expected_build_steps_file)
+
+ config = build_project.Config(False, False, None, False)
+ project_yaml, dockerfile = build_project.get_project_data(
+ test_utils.PROJECT)
+ build_steps = build_project.get_build_steps(test_utils.PROJECT,
+ project_yaml, dockerfile,
+ test_utils.IMAGE_PROJECT,
+ test_utils.BASE_IMAGES_PROJECT,
+ config)
+ self.assertEqual(build_steps, expected_build_steps)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/infra/build/functions/deploy.sh b/infra/build/functions/deploy.sh
index ea094e3b2..3edf6ee17 100755
--- a/infra/build/functions/deploy.sh
+++ b/infra/build/functions/deploy.sh
@@ -80,9 +80,10 @@ function deploy_cloud_function {
--runtime python38 \
--project $project \
--timeout 540 \
- --region us-central1 \
- --set-env-vars GCP_PROJECT=$project,FUNCTION_REGION=us-central1 \
- --max-instances 1
+ --region us-central1 \
+ --set-env-vars GCP_PROJECT=$project,FUNCTION_REGION=us-central1 \
+ --max-instances 1 \
+ --memory 2048MB
}
if [ $# == 1 ]; then
@@ -135,11 +136,6 @@ deploy_cloud_function base-image-build \
$BASE_IMAGE_JOB_TOPIC \
$PROJECT_ID
-deploy_cloud_function base-msan-build \
- build_msan \
- $BASE_IMAGE_JOB_TOPIC \
- $PROJECT_ID
-
deploy_cloud_function request-build \
build_project \
$BUILD_JOB_TOPIC \
diff --git a/infra/build/functions/expected_build_steps.json b/infra/build/functions/expected_build_steps.json
deleted file mode 100644
index da9c63654..000000000
--- a/infra/build/functions/expected_build_steps.json
+++ /dev/null
@@ -1,330 +0,0 @@
-[
- {
- "args": [
- "clone",
- "https://github.com/google/oss-fuzz.git"
- ],
- "name": "gcr.io/cloud-builders/git"
- },
- {
- "name": "gcr.io/cloud-builders/docker",
- "args": [
- "build",
- "-t",
- "gcr.io/oss-fuzz/test-project",
- "."
- ],
- "dir": "oss-fuzz/projects/test-project"
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json"
- ],
- "env": [
- "OSSFUZZ_REVISION=$REVISION_ID",
- "FUZZING_LANGUAGE=c++"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/msan-libs-builder",
- "args": [
- "bash",
- "-c",
- "cp -r /msan /workspace"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "env": [
- "FUZZING_ENGINE=libfuzzer",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=libfuzzer",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=libfuzzer",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "targets_list > /workspace/targets.list.address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/srcmap.json",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/out/address/test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/targets.list.address",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/cloud-builders/curl",
- "args": [
- "-H",
- "Content-Type: text/plain",
- "-X",
- "PUT",
- "-d",
- "test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "rm -r /workspace/out/address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "env": [
- "FUZZING_ENGINE=afl",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=afl",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=afl",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "targets_list > /workspace/targets.list.address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/srcmap.json",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/out/address/test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/targets.list.address",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/cloud-builders/curl",
- "args": [
- "-H",
- "Content-Type: text/plain",
- "-X",
- "PUT",
- "-d",
- "test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "rm -r /workspace/out/address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "env": [
- "FUZZING_ENGINE=honggfuzz",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=honggfuzz",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=honggfuzz",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "targets_list > /workspace/targets.list.address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/srcmap.json",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/out/address/test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/targets.list.address",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/cloud-builders/curl",
- "args": [
- "-H",
- "Content-Type: text/plain",
- "-X",
- "PUT",
- "-d",
- "test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "rm -r /workspace/out/address"
- ]
- }
-]
diff --git a/infra/build/functions/main.py b/infra/build/functions/main.py
index 1bfd35818..c34dc1329 100644
--- a/infra/build/functions/main.py
+++ b/infra/build/functions/main.py
@@ -45,8 +45,3 @@ def coverage_build(event, context):
def builds_status(event, context):
"""Entry point for builds status cloud function."""
update_build_status.update_status(event, context)
-
-
-def build_msan(event, context):
- """Entry point for base msan builder."""
- base_images.base_msan_builder(event, context)
diff --git a/infra/build/functions/project_sync.py b/infra/build/functions/project_sync.py
index debdbbd9a..7b30cae2d 100644
--- a/infra/build/functions/project_sync.py
+++ b/infra/build/functions/project_sync.py
@@ -94,8 +94,8 @@ def update_scheduler(cloud_scheduler_client, project, schedule, tag):
def delete_project(cloud_scheduler_client, project):
"""Delete the given project."""
logging.info('Deleting project %s', project.name)
- for tag in (build_project.FUZZING_BUILD_TAG,
- build_and_run_coverage.COVERAGE_BUILD_TAG):
+ for tag in (build_project.FUZZING_BUILD_TYPE,
+ build_and_run_coverage.COVERAGE_BUILD_TYPE):
try:
delete_scheduler(cloud_scheduler_client, project.name, tag)
except exceptions.NotFound:
@@ -124,9 +124,9 @@ def sync_projects(cloud_scheduler_client, projects):
try:
create_scheduler(cloud_scheduler_client, project_name,
projects[project_name].schedule,
- build_project.FUZZING_BUILD_TAG, FUZZING_BUILD_TOPIC)
+ build_project.FUZZING_BUILD_TYPE, FUZZING_BUILD_TOPIC)
create_scheduler(cloud_scheduler_client, project_name, COVERAGE_SCHEDULE,
- build_and_run_coverage.COVERAGE_BUILD_TAG,
+ build_and_run_coverage.COVERAGE_BUILD_TYPE,
COVERAGE_BUILD_TOPIC)
project_metadata = projects[project_name]
Project(name=project_name,
@@ -149,7 +149,7 @@ def sync_projects(cloud_scheduler_client, projects):
logging.info('Schedule changed.')
update_scheduler(cloud_scheduler_client, project,
projects[project.name].schedule,
- build_project.FUZZING_BUILD_TAG)
+ build_project.FUZZING_BUILD_TYPE)
project.schedule = project_metadata.schedule
project_changed = True
except exceptions.GoogleAPICallError as error:
@@ -232,7 +232,7 @@ def get_github_creds():
def sync(event, context):
"""Sync projects with cloud datastore."""
- del event, context #unused
+ del event, context # Unused.
with ndb.Client().context():
git_creds = get_github_creds()
diff --git a/infra/build/functions/project_sync_test.py b/infra/build/functions/project_sync_test.py
index f90733810..ad1330eaf 100644
--- a/infra/build/functions/project_sync_test.py
+++ b/infra/build/functions/project_sync_test.py
@@ -71,7 +71,7 @@ class CloudSchedulerClient:
# pylint: disable=no-self-use
def location_path(self, project_id, location_id):
"""Return project path."""
- return 'projects/{}/location/{}'.format(project_id, location_id)
+ return f'projects/{project_id}/location/{location_id}'
def create_job(self, parent, job):
"""Simulate create job."""
@@ -81,8 +81,7 @@ class CloudSchedulerClient:
# pylint: disable=no-self-use
def job_path(self, project_id, location_id, name):
"""Return job path."""
- return 'projects/{}/location/{}/jobs/{}'.format(project_id, location_id,
- name)
+ return f'projects/{project_id}/location/{location_id}/jobs/{name}'
def delete_job(self, name):
"""Simulate delete jobs."""
diff --git a/infra/build/functions/request_build.py b/infra/build/functions/request_build.py
index 6f0ab62a3..543bafb33 100644
--- a/infra/build/functions/request_build.py
+++ b/infra/build/functions/request_build.py
@@ -15,13 +15,10 @@
################################################################################
"""Cloud function to request builds."""
import base64
-import logging
import google.auth
-from googleapiclient.discovery import build
from google.cloud import ndb
-import build_lib
import build_project
from datastore_entities import BuildsHistory
from datastore_entities import Project
@@ -55,46 +52,33 @@ def get_project_data(project_name):
project = query.get()
if not project:
raise RuntimeError(
- 'Project {0} not available in cloud datastore'.format(project_name))
- project_yaml_contents = project.project_yaml_contents
- dockerfile_lines = project.dockerfile_contents.split('\n')
+ f'Project {project_name} not available in cloud datastore')
- return (project_yaml_contents, dockerfile_lines)
+ return project.project_yaml_contents, project.dockerfile_contents
+
+
+def get_empty_config():
+ """Returns an empty build config."""
+ return build_project.Config(False, None, None, False)
def get_build_steps(project_name, image_project, base_images_project):
"""Retrieve build steps."""
+ # TODO(metzman): Figure out if we need this.
project_yaml_contents, dockerfile_lines = get_project_data(project_name)
+ build_config = get_empty_config()
return build_project.get_build_steps(project_name, project_yaml_contents,
dockerfile_lines, image_project,
- base_images_project)
+ base_images_project, build_config)
-# pylint: disable=no-member
-def run_build(project_name, image_project, build_steps, credentials, tag):
- """Execute build on cloud build."""
- build_body = {
- 'steps': build_steps,
- 'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
- 'options': {
- 'machineType': 'N1_HIGHCPU_32'
- },
- 'logsBucket': build_project.GCB_LOGS_BUCKET,
- 'tags': [project_name + '-' + tag,],
- 'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
- }
-
- cloudbuild = build('cloudbuild',
- 'v1',
- credentials=credentials,
- cache_discovery=False)
- build_info = cloudbuild.projects().builds().create(projectId=image_project,
- body=build_body).execute()
- build_id = build_info['metadata']['build']['id']
-
- update_build_history(project_name, build_id, tag)
- logging.info('Build ID: %s', build_id)
- logging.info('Logs: %s', build_project.get_logs_url(build_id, image_project))
+def run_build(oss_fuzz_project, build_steps, credentials, build_type,
+ cloud_project):
+ """Execute build on cloud build. Wrapper around build_project.py that also
+ updates the db."""
+ build_id = build_project.run_build(oss_fuzz_project, build_steps, credentials,
+ build_type, cloud_project)
+ update_build_history(oss_fuzz_project, build_id, build_type)
# pylint: disable=no-member
@@ -107,9 +91,14 @@ def request_build(event, context):
raise RuntimeError('Project name missing from payload')
with ndb.Client().context():
- credentials, image_project = google.auth.default()
- build_steps = get_build_steps(project_name, image_project, BASE_PROJECT)
+ credentials, cloud_project = google.auth.default()
+ build_steps = get_build_steps(project_name, cloud_project, BASE_PROJECT)
if not build_steps:
return
- run_build(project_name, image_project, build_steps, credentials,
- build_project.FUZZING_BUILD_TAG)
+ run_build(
+ project_name,
+ build_steps,
+ credentials,
+ build_project.FUZZING_BUILD_TYPE,
+ cloud_project=cloud_project,
+ )
diff --git a/infra/build/functions/request_build_test.py b/infra/build/functions/request_build_test.py
index 22a4a1056..1eb1d8efc 100644
--- a/infra/build/functions/request_build_test.py
+++ b/infra/build/functions/request_build_test.py
@@ -14,23 +14,17 @@
#
################################################################################
"""Unit tests for Cloud Function request builds which builds projects."""
-import json
-import datetime
import os
import sys
import unittest
-from unittest import mock
from google.cloud import ndb
sys.path.append(os.path.dirname(__file__))
# pylint: disable=wrong-import-position
-from datastore_entities import BuildsHistory
-from datastore_entities import Project
-from request_build import get_build_steps
-from request_build import get_project_data
-from request_build import update_build_history
+import datastore_entities
+import request_build
import test_utils
# pylint: disable=no-member
@@ -48,65 +42,42 @@ class TestRequestBuilds(unittest.TestCase):
def setUp(self):
test_utils.reset_ds_emulator()
-
- @mock.patch('build_lib.get_signed_url', return_value='test_url')
- @mock.patch('datetime.datetime')
- def test_get_build_steps(self, mocked_url, mocked_time):
- """Test for get_build_steps."""
- del mocked_url, mocked_time
- datetime.datetime = test_utils.SpoofedDatetime
- project_yaml_contents = ('language: c++\n'
- 'sanitizers:\n'
- ' - address\n'
- 'architectures:\n'
- ' - x86_64\n')
- image_project = 'oss-fuzz'
- base_images_project = 'oss-fuzz-base'
- testcase_path = os.path.join(os.path.dirname(__file__),
- 'expected_build_steps.json')
- with open(testcase_path) as testcase_file:
- expected_build_steps = json.load(testcase_file)
-
- with ndb.Client().context():
- Project(name='test-project',
- project_yaml_contents=project_yaml_contents,
- dockerfile_contents='test line').put()
- build_steps = get_build_steps('test-project', image_project,
- base_images_project)
- self.assertEqual(build_steps, expected_build_steps)
+ self.maxDiff = None # pylint: disable=invalid-name
def test_get_build_steps_no_project(self):
"""Test for when project isn't available in datastore."""
with ndb.Client().context():
- self.assertRaises(RuntimeError, get_build_steps, 'test-project',
- 'oss-fuzz', 'oss-fuzz-base')
+ self.assertRaises(RuntimeError, request_build.get_build_steps,
+ 'test-project', 'oss-fuzz', 'oss-fuzz-base')
def test_build_history(self):
"""Testing build history."""
with ndb.Client().context():
- BuildsHistory(id='test-project-fuzzing',
- build_tag='fuzzing',
- project='test-project',
- build_ids=[str(i) for i in range(1, 65)]).put()
- update_build_history('test-project', '65', 'fuzzing')
+ datastore_entities.BuildsHistory(id='test-project-fuzzing',
+ build_tag='fuzzing',
+ project='test-project',
+ build_ids=[str(i) for i in range(1, 65)
+ ]).put()
+ request_build.update_build_history('test-project', '65', 'fuzzing')
expected_build_ids = [str(i) for i in range(2, 66)]
- self.assertEqual(BuildsHistory.query().get().build_ids,
+ self.assertEqual(datastore_entities.BuildsHistory.query().get().build_ids,
expected_build_ids)
def test_build_history_no_existing_project(self):
"""Testing build history when build history object is missing."""
with ndb.Client().context():
- update_build_history('test-project', '1', 'fuzzing')
+ request_build.update_build_history('test-project', '1', 'fuzzing')
expected_build_ids = ['1']
- self.assertEqual(BuildsHistory.query().get().build_ids,
+ self.assertEqual(datastore_entities.BuildsHistory.query().get().build_ids,
expected_build_ids)
def test_get_project_data(self):
"""Testing get project data."""
with ndb.Client().context():
- self.assertRaises(RuntimeError, get_project_data, 'test-project')
+ self.assertRaises(RuntimeError, request_build.get_project_data,
+ 'test-project')
@classmethod
def tearDownClass(cls):
diff --git a/infra/build/functions/request_coverage_build.py b/infra/build/functions/request_coverage_build.py
index 1b4ac0e47..a3890cb32 100644
--- a/infra/build/functions/request_coverage_build.py
+++ b/infra/build/functions/request_coverage_build.py
@@ -27,27 +27,31 @@ BASE_PROJECT = 'oss-fuzz-base'
def get_build_steps(project_name, image_project, base_images_project):
"""Retrieve build steps."""
+ build_config = request_build.get_empty_config()
project_yaml_contents, dockerfile_lines = request_build.get_project_data(
project_name)
return build_and_run_coverage.get_build_steps(project_name,
project_yaml_contents,
dockerfile_lines, image_project,
- base_images_project)
+ base_images_project,
+ build_config)
def request_coverage_build(event, context):
"""Entry point for coverage build cloud function."""
- del context #unused
+ del context # Unused.
if 'data' in event:
project_name = base64.b64decode(event['data']).decode('utf-8')
else:
raise RuntimeError('Project name missing from payload')
with ndb.Client().context():
- credentials, image_project = google.auth.default()
- build_steps = get_build_steps(project_name, image_project, BASE_PROJECT)
+ credentials, cloud_project = google.auth.default()
+ build_steps = get_build_steps(project_name, cloud_project, BASE_PROJECT)
if not build_steps:
return
- request_build.run_build(project_name, image_project, build_steps,
+ request_build.run_build(project_name,
+ build_steps,
credentials,
- build_and_run_coverage.COVERAGE_BUILD_TAG)
+ build_and_run_coverage.COVERAGE_BUILD_TYPE,
+ cloud_project=cloud_project)
diff --git a/infra/build/functions/request_coverage_build_test.py b/infra/build/functions/request_coverage_build_test.py
deleted file mode 100644
index 1327e36a0..000000000
--- a/infra/build/functions/request_coverage_build_test.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-"""Unit tests for Cloud Function that builds coverage reports."""
-import json
-import datetime
-import os
-import sys
-import unittest
-from unittest import mock
-
-from google.cloud import ndb
-
-sys.path.append(os.path.dirname(__file__))
-# pylint: disable=wrong-import-position
-
-from datastore_entities import Project
-from build_and_run_coverage import get_build_steps
-import test_utils
-
-# pylint: disable=no-member
-
-
-class TestRequestCoverageBuilds(unittest.TestCase):
- """Unit tests for sync."""
-
- @classmethod
- def setUpClass(cls):
- cls.ds_emulator = test_utils.start_datastore_emulator()
- test_utils.wait_for_emulator_ready(cls.ds_emulator, 'datastore',
- test_utils.DATASTORE_READY_INDICATOR)
- test_utils.set_gcp_environment()
-
- def setUp(self):
- test_utils.reset_ds_emulator()
-
- @mock.patch('build_lib.get_signed_url', return_value='test_url')
- @mock.patch('build_lib.download_corpora_steps',
- return_value=[{
- 'url': 'test_download'
- }])
- @mock.patch('datetime.datetime')
- def test_get_coverage_build_steps(self, mocked_url, mocked_corpora_steps,
- mocked_time):
- """Test for get_build_steps."""
- del mocked_url, mocked_corpora_steps, mocked_time
- datetime.datetime = test_utils.SpoofedDatetime
- project_yaml_contents = ('language: c++\n'
- 'sanitizers:\n'
- ' - address\n'
- 'architectures:\n'
- ' - x86_64\n')
- dockerfile_contents = 'test line'
- image_project = 'oss-fuzz'
- base_images_project = 'oss-fuzz-base'
- testcase_path = os.path.join(os.path.dirname(__file__),
- 'expected_coverage_build_steps.json')
- with open(testcase_path) as testcase_file:
- expected_coverage_build_steps = json.load(testcase_file)
-
- with ndb.Client().context():
- Project(name='test-project',
- project_yaml_contents=project_yaml_contents,
- dockerfile_contents=dockerfile_contents).put()
-
- dockerfile_lines = dockerfile_contents.split('\n')
- build_steps = get_build_steps('test-project', project_yaml_contents,
- dockerfile_lines, image_project,
- base_images_project)
- self.assertEqual(build_steps, expected_coverage_build_steps)
-
- @classmethod
- def tearDownClass(cls):
- test_utils.cleanup_emulator(cls.ds_emulator)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/infra/build/functions/test_data/expected_build_steps.json b/infra/build/functions/test_data/expected_build_steps.json
new file mode 100644
index 000000000..f0e39832b
--- /dev/null
+++ b/infra/build/functions/test_data/expected_build_steps.json
@@ -0,0 +1,628 @@
+[
+ {
+ "args": [
+ "clone",
+ "https://github.com/google/oss-fuzz.git",
+ "--depth",
+ "1"
+ ],
+ "name": "gcr.io/cloud-builders/git"
+ },
+ {
+ "name": "gcr.io/cloud-builders/docker",
+ "args": [
+ "build",
+ "-t",
+ "gcr.io/oss-fuzz/test-project",
+ "."
+ ],
+ "dir": "oss-fuzz/projects/test-project"
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json"
+ ],
+ "env": [
+ "OSSFUZZ_REVISION=$REVISION_ID",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "id": "srcmap"
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=afl",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/afl-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/afl-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-afl-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=afl",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/afl-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-afl-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=afl",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/afl-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/afl-address-x86_64 && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/afl-address-x86_64/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/afl-address-x86_64"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=honggfuzz",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/honggfuzz-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/honggfuzz-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-honggfuzz-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=honggfuzz",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/honggfuzz-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-honggfuzz-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=honggfuzz",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/honggfuzz-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/honggfuzz-address-x86_64 && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/honggfuzz-address-x86_64/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/honggfuzz-address-x86_64"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-libfuzzer-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/libfuzzer-address-x86_64 && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/libfuzzer-address-x86_64/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/libfuzzer-address-x86_64"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=i386",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-i386",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-address-i386 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture i386 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-address-i386"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=i386",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-i386",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture i386 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture i386 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-libfuzzer-address-i386"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=i386",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-i386",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/libfuzzer-address-i386 && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/libfuzzer-address-i386/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/libfuzzer-address-i386"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-memory-x86_64",
+ "SANITIZER=memory"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-memory-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-memory-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-memory-x86_64",
+ "SANITIZER=memory"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-libfuzzer-memory-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-memory-x86_64",
+ "SANITIZER=memory"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.memory"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/libfuzzer-memory-x86_64 && zip -r test-project-memory-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/libfuzzer-memory-x86_64/test-project-memory-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.memory",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-memory-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/libfuzzer-memory-x86_64"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-undefined-x86_64",
+ "SANITIZER=undefined"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-undefined-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-undefined-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-undefined-x86_64",
+ "SANITIZER=undefined"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-libfuzzer-undefined-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-undefined-x86_64",
+ "SANITIZER=undefined"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.undefined"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/libfuzzer-undefined-x86_64 && zip -r test-project-undefined-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/libfuzzer-undefined-x86_64/test-project-undefined-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.undefined",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-undefined-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/libfuzzer-undefined-x86_64"
+ ]
+ }
+]
diff --git a/infra/build/functions/expected_coverage_build_steps.json b/infra/build/functions/test_data/expected_coverage_build_steps.json
index 19b1d5b81..2af48f58d 100644
--- a/infra/build/functions/expected_coverage_build_steps.json
+++ b/infra/build/functions/test_data/expected_coverage_build_steps.json
@@ -2,7 +2,9 @@
{
"args": [
"clone",
- "https://github.com/google/oss-fuzz.git"
+ "https://github.com/google/oss-fuzz.git",
+ "--depth",
+ "1"
],
"name": "gcr.io/cloud-builders/git"
},
@@ -26,21 +28,25 @@
"env": [
"OSSFUZZ_REVISION=$REVISION_ID",
"FUZZING_LANGUAGE=c++"
- ]
+ ],
+ "id": "srcmap"
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
+ "ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
- "SANITIZER=coverage",
- "OUT=/workspace/out/coverage",
- "FUZZING_LANGUAGE=c++"
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-coverage-x86_64",
+ "SANITIZER=coverage"
],
"args": [
"bash",
"-c",
- "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/coverage && compile || (echo \"********************************************************************************\nCoverage build failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer coverage test-project\n********************************************************************************\" && false)"
- ]
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-coverage-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer coverage --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-coverage-x86_64"
},
{
"url": "test_download"
@@ -48,10 +54,12 @@
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
+ "ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
- "SANITIZER=coverage",
- "OUT=/workspace/out/coverage",
"FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-coverage-x86_64",
+ "SANITIZER=coverage",
"HTTP_PORT=",
"COVERAGE_EXTRA_ARGS="
],
@@ -81,7 +89,7 @@
"-m",
"cp",
"-r",
- "/workspace/out/coverage/report",
+ "/workspace/out/libfuzzer-coverage-x86_64/report",
"gs://oss-fuzz-coverage/test-project/reports/20200101"
]
},
@@ -99,7 +107,7 @@
"-m",
"cp",
"-r",
- "/workspace/out/coverage/fuzzer_stats",
+ "/workspace/out/libfuzzer-coverage-x86_64/fuzzer_stats",
"gs://oss-fuzz-coverage/test-project/fuzzer_stats/20200101"
]
},
@@ -117,7 +125,7 @@
"-m",
"cp",
"-r",
- "/workspace/out/coverage/logs",
+ "/workspace/out/libfuzzer-coverage-x86_64/logs",
"gs://oss-fuzz-coverage/test-project/logs/20200101"
]
},
@@ -141,4 +149,4 @@
"test_url"
]
}
-] \ No newline at end of file
+]
diff --git a/infra/build/functions/test_utils.py b/infra/build/functions/test_utils.py
index 9aac8eac8..a093bcfa0 100644
--- a/infra/build/functions/test_utils.py
+++ b/infra/build/functions/test_utils.py
@@ -24,16 +24,31 @@ import requests
DATASTORE_READY_INDICATOR = b'is now running'
DATASTORE_EMULATOR_PORT = 8432
EMULATOR_TIMEOUT = 20
-TEST_PROJECT_ID = 'test-project'
+FUNCTIONS_DIR = os.path.dirname(__file__)
+OSS_FUZZ_DIR = os.path.dirname(os.path.dirname(os.path.dirname(FUNCTIONS_DIR)))
+PROJECTS_DIR = os.path.join(OSS_FUZZ_DIR, 'projects')
-# pylint: disable=arguments-differ
-class SpoofedDatetime(datetime.datetime):
- """Mocking Datetime class for now() function."""
+FAKE_DATETIME = datetime.datetime(2020, 1, 1, 0, 0, 0)
+IMAGE_PROJECT = 'oss-fuzz'
+BASE_IMAGES_PROJECT = 'oss-fuzz-base'
+PROJECT = 'test-project'
+PROJECT_DIR = os.path.join(PROJECTS_DIR, PROJECT)
- @classmethod
- def now(cls):
- return datetime.datetime(2020, 1, 1, 0, 0, 0)
+
+def create_project_data(project,
+ project_yaml_contents,
+ dockerfile_contents='test line'):
+ """Creates a project.yaml with |project_yaml_contents| and a Dockerfile with
+ |dockerfile_contents| for |project|."""
+ project_dir = os.path.join(PROJECTS_DIR, project)
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
+ with open(project_yaml_path, 'w') as project_yaml_handle:
+ project_yaml_handle.write(project_yaml_contents)
+
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ with open(dockerfile_path, 'w') as dockerfile_handle:
+ dockerfile_handle.write(dockerfile_contents)
def start_datastore_emulator():
@@ -46,7 +61,7 @@ def start_datastore_emulator():
'start',
'--consistency=1.0',
'--host-port=localhost:' + str(DATASTORE_EMULATOR_PORT),
- '--project=' + TEST_PROJECT_ID,
+ '--project=' + PROJECT,
'--no-store-on-disk',
],
stdout=subprocess.PIPE,
@@ -76,15 +91,13 @@ def wait_for_emulator_ready(proc,
thread.daemon = True
thread.start()
if not ready_event.wait(timeout):
- raise RuntimeError(
- '{} emulator did not get ready in time.'.format(emulator))
+ raise RuntimeError(f'{emulator} emulator did not get ready in time.')
return thread
def reset_ds_emulator():
"""Reset ds emulator/clean all entities."""
- req = requests.post(
- 'http://localhost:{}/reset'.format(DATASTORE_EMULATOR_PORT))
+ req = requests.post(f'http://localhost:{DATASTORE_EMULATOR_PORT}/reset')
req.raise_for_status()
@@ -98,7 +111,12 @@ def set_gcp_environment():
"""Set environment variables for simulating in google cloud platform."""
os.environ['DATASTORE_EMULATOR_HOST'] = 'localhost:' + str(
DATASTORE_EMULATOR_PORT)
- os.environ['GOOGLE_CLOUD_PROJECT'] = TEST_PROJECT_ID
- os.environ['DATASTORE_DATASET'] = TEST_PROJECT_ID
- os.environ['GCP_PROJECT'] = TEST_PROJECT_ID
+ os.environ['GOOGLE_CLOUD_PROJECT'] = PROJECT
+ os.environ['DATASTORE_DATASET'] = PROJECT
+ os.environ['GCP_PROJECT'] = PROJECT
os.environ['FUNCTION_REGION'] = 'us-central1'
+
+
+def get_test_data_file_path(filename):
+ """Returns the path to a test data file with name |filename|."""
+ return os.path.join(os.path.dirname(__file__), 'test_data', filename)
diff --git a/infra/build/functions/update_build_status.py b/infra/build/functions/update_build_status.py
index af65a41ab..927216628 100644
--- a/infra/build/functions/update_build_status.py
+++ b/infra/build/functions/update_build_status.py
@@ -145,8 +145,8 @@ def get_build_history(build_ids):
}
if not upload_log(build_id):
- log_name = 'log-{0}'.format(build_id)
- raise MissingBuildLogError('Missing build log file {0}'.format(log_name))
+ log_name = f'log-{build_id}'
+ raise MissingBuildLogError(f'Missing build log file {log_name}')
history.append({
'build_id': build_id,
@@ -203,19 +203,15 @@ def update_build_badges(project, last_build_successful,
if not last_build_successful:
badge = 'failing'
- print("[badge] {}: {}".format(project, badge))
+ print(f'[badge] {project}: {badge}')
for extension in BADGE_IMAGE_TYPES:
- badge_name = '{badge}.{extension}'.format(badge=badge, extension=extension)
+ badge_name = f'{badge}.{extension}'
# Copy blob from badge_images/badge_name to badges/project/
- blob_name = '{badge_dir}/{badge_name}'.format(badge_dir=BADGE_DIR,
- badge_name=badge_name)
+ blob_name = f'{BADGE_DIR}/{badge_name}'
- destination_blob_name = '{badge_dir}/{project_name}.{extension}'.format(
- badge_dir=DESTINATION_BADGE_DIR,
- project_name=project,
- extension=extension)
+ destination_blob_name = f'{DESTINATION_BADGE_DIR}/{project}.{extension}'
status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
badge_blob = status_bucket.blob(blob_name)
@@ -228,12 +224,12 @@ def upload_log(build_id):
"""Upload log file to GCS."""
status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
gcb_bucket = get_storage_client().get_bucket(build_project.GCB_LOGS_BUCKET)
- log_name = 'log-{0}.txt'.format(build_id)
+ log_name = f'log-{build_id}.txt'
log = gcb_bucket.blob(log_name)
dest_log = status_bucket.blob(log_name)
if not log.exists():
- print('Failed to find build log {0}'.format(log_name), file=sys.stderr)
+ print('Failed to find build log', log_name, file=sys.stderr)
return False
if dest_log.exists():
@@ -258,10 +254,10 @@ def update_status(event, context):
return
if status_type == 'fuzzing':
- tag = build_project.FUZZING_BUILD_TAG
+ tag = build_project.FUZZING_BUILD_TYPE
status_filename = FUZZING_STATUS_FILENAME
elif status_type == 'coverage':
- tag = build_and_run_coverage.COVERAGE_BUILD_TAG
+ tag = build_and_run_coverage.COVERAGE_BUILD_TYPE
status_filename = COVERAGE_STATUS_FILENAME
else:
raise RuntimeError('Invalid build status type ' + status_type)
diff --git a/infra/build/functions/update_build_status_test.py b/infra/build/functions/update_build_status_test.py
index 6784fac2d..24a32f676 100644
--- a/infra/build/functions/update_build_status_test.py
+++ b/infra/build/functions/update_build_status_test.py
@@ -56,14 +56,14 @@ class MockGetBuild:
class TestGetBuildHistory(unittest.TestCase):
"""Unit tests for get_build_history."""
- def test_get_build_history(self, mocked_upload_log, mocked_cloud_build,
- mocked_google_auth):
+ def test_get_build_history(self, mock_upload_log, mock_cloud_build,
+ mock_google_auth):
"""Test for get_build_steps."""
- del mocked_cloud_build, mocked_google_auth
- mocked_upload_log.return_value = True
+ del mock_cloud_build, mock_google_auth
+ mock_upload_log.return_value = True
builds = [{'build_id': '1', 'finishTime': 'test_time', 'status': 'SUCCESS'}]
- mocked_get_build = MockGetBuild(builds)
- update_build_status.get_build = mocked_get_build.get_build
+ mock_get_build = MockGetBuild(builds)
+ update_build_status.get_build = mock_get_build.get_build
expected_projects = {
'history': [{
@@ -79,27 +79,26 @@ class TestGetBuildHistory(unittest.TestCase):
self.assertDictEqual(update_build_status.get_build_history(['1']),
expected_projects)
- def test_get_build_history_missing_log(self, mocked_upload_log,
- mocked_cloud_build,
- mocked_google_auth):
+ def test_get_build_history_missing_log(self, mock_upload_log,
+ mock_cloud_build, mock_google_auth):
"""Test for missing build log file."""
- del mocked_cloud_build, mocked_google_auth
+ del mock_cloud_build, mock_google_auth
builds = [{'build_id': '1', 'finishTime': 'test_time', 'status': 'SUCCESS'}]
- mocked_get_build = MockGetBuild(builds)
- update_build_status.get_build = mocked_get_build.get_build
- mocked_upload_log.return_value = False
+ mock_get_build = MockGetBuild(builds)
+ update_build_status.get_build = mock_get_build.get_build
+ mock_upload_log.return_value = False
self.assertRaises(update_build_status.MissingBuildLogError,
update_build_status.get_build_history, ['1'])
- def test_get_build_history_no_last_success(self, mocked_upload_log,
- mocked_cloud_build,
- mocked_google_auth):
+ def test_get_build_history_no_last_success(self, mock_upload_log,
+ mock_cloud_build,
+ mock_google_auth):
"""Test when there is no last successful build."""
- del mocked_cloud_build, mocked_google_auth
+ del mock_cloud_build, mock_google_auth
builds = [{'build_id': '1', 'finishTime': 'test_time', 'status': 'FAILURE'}]
- mocked_get_build = MockGetBuild(builds)
- update_build_status.get_build = mocked_get_build.get_build
- mocked_upload_log.return_value = True
+ mock_get_build = MockGetBuild(builds)
+ update_build_status.get_build = mock_get_build.get_build
+ mock_upload_log.return_value = True
expected_projects = {
'history': [{
@@ -229,12 +228,12 @@ class TestUpdateBuildStatus(unittest.TestCase):
@mock.patch('google.auth.default', return_value=['temp', 'temp'])
@mock.patch('update_build_status.build', return_value='cloudbuild')
@mock.patch('update_build_status.upload_log')
- def test_update_build_status(self, mocked_upload_log, mocked_cloud_build,
- mocked_google_auth):
+ def test_update_build_status(self, mock_upload_log, mock_cloud_build,
+ mock_google_auth):
"""Testing update build status as a whole."""
- del self, mocked_cloud_build, mocked_google_auth
+ del self, mock_cloud_build, mock_google_auth
update_build_status.upload_status = MagicMock()
- mocked_upload_log.return_value = True
+ mock_upload_log.return_value = True
status_filename = 'status.json'
with ndb.Client().context():
BuildsHistory(id='test-project-1-fuzzing',
@@ -264,8 +263,8 @@ class TestUpdateBuildStatus(unittest.TestCase):
'build_id': '3',
'status': 'WORKING'
}]
- mocked_get_build = MockGetBuild(builds)
- update_build_status.get_build = mocked_get_build.get_build
+ mock_get_build = MockGetBuild(builds)
+ update_build_status.get_build = mock_get_build.get_build
expected_data = {
'projects': [{
diff --git a/infra/build_and_push_test_images.py b/infra/build_and_push_test_images.py
new file mode 100755
index 000000000..44c65ae2c
--- /dev/null
+++ b/infra/build_and_push_test_images.py
@@ -0,0 +1,92 @@
+#! /usr/bin/env python3
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Script for building and pushing base-images to gcr.io/oss-fuzz-base/ with
+"-test" suffix. This is useful for reusing the build infra to test image
+changes."""
+import logging
+import multiprocessing
+import os
+import subprocess
+import sys
+
+TAG_PREFIX = 'gcr.io/oss-fuzz-base/'
+INFRA_DIR = os.path.dirname(__file__)
+IMAGES_DIR = os.path.join(INFRA_DIR, 'base-images')
+
+
+def push_image(tag):
+ """Pushes image with |tag| to docker registry."""
+ logging.info('Pushing: %s', tag)
+ command = ['docker', 'push', tag]
+ subprocess.run(command, check=True)
+ logging.info('Pushed: %s', tag)
+
+
+def build_and_push_image(image, test_image_suffix):
+ """Builds and pushes |image| to docker registry with "-testing" suffix."""
+ main_tag = TAG_PREFIX + image
+ testing_tag = main_tag + '-' + test_image_suffix
+ tags = [main_tag, testing_tag]
+ build_image(image, tags)
+ push_image(testing_tag)
+
+
+def build_image(image, tags):
+ """Builds |image| and tags it with |tags|."""
+ logging.info('Building: %s', image)
+ command = ['docker', 'build']
+ for tag in tags:
+ command.extend(['--tag', tag])
+ path = os.path.join(IMAGES_DIR, image)
+ command.append(path)
+ subprocess.run(command, check=True)
+ logging.info('Built: %s', image)
+
+
+def build_and_push_images(test_image_suffix):
+ """Builds and pushes base-images."""
+ images = [
+ ['base-image'],
+ ['base-clang'],
+ # base-runner is also dependent on base-clang.
+ ['base-builder', 'base-runner'],
+ [
+ 'base-runner-debug', 'base-builder-go', 'base-builder-jvm',
+ 'base-builder-python', 'base-builder-rust', 'base-builder-swift'
+ ],
+ ]
+ max_parallelization = max([len(image_list) for image_list in images])
+ proc_count = min(multiprocessing.cpu_count(), max_parallelization)
+ logging.info('Using %d parallel processes.', proc_count)
+ pool = multiprocessing.Pool(proc_count)
+ for image_list in images:
+ args_list = [(image, test_image_suffix) for image in image_list]
+ pool.starmap(build_and_push_image, args_list)
+
+
+def main():
+ """"Builds base-images tags them with "-testing" suffix (in addition to normal
+ tag) and pushes testing suffixed images to docker registry."""
+ test_image_suffix = sys.argv[1]
+ logging.basicConfig(level=logging.DEBUG)
+ logging.info('Doing simple gcloud command to ensure 2FA passes.')
+ subprocess.run(['gcloud', 'projects', 'list', '--limit=1'], check=True)
+ build_and_push_images(test_image_suffix)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/infra/build_fuzzers.Dockerfile b/infra/build_fuzzers.Dockerfile
index df06ff754..77a34ae40 100644
--- a/infra/build_fuzzers.Dockerfile
+++ b/infra/build_fuzzers.Dockerfile
@@ -13,7 +13,8 @@
# limitations under the License.
#
################################################################################
-# Docker image to run the CIFuzz action build_fuzzers in.
+# Docker image to run fuzzers for CIFuzz (the run_fuzzers action on GitHub
+# actions).
FROM gcr.io/oss-fuzz-base/cifuzz-base
@@ -22,5 +23,9 @@ FROM gcr.io/oss-fuzz-base/cifuzz-base
# just expand to '/opt/oss-fuzz'.
ENTRYPOINT ["python3", "/opt/oss-fuzz/infra/cifuzz/build_fuzzers_entrypoint.py"]
+WORKDIR ${OSS_FUZZ_ROOT}/infra
+
# Update infra source code.
ADD . ${OSS_FUZZ_ROOT}/infra
+
+RUN python3 -m pip install -r ${OSS_FUZZ_ROOT}/infra/cifuzz/requirements.txt \ No newline at end of file
diff --git a/infra/build_specified_commit.py b/infra/build_specified_commit.py
index b2130ea85..d7b667004 100644
--- a/infra/build_specified_commit.py
+++ b/infra/build_specified_commit.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Module to build a image from a specific commit, branch or pull request
+"""Module to build a image from a specific commit, branch or pull request.
This module is allows each of the OSS Fuzz projects fuzzers to be built
from a specific point in time. This feature can be used for implementations
@@ -147,7 +147,7 @@ def copy_src_from_docker(project_name, host_dir):
@retry.wrap(_IMAGE_BUILD_TRIES, 2)
def _build_image_with_retries(project_name):
"""Build image with retries."""
- return helper.build_image_impl(project_name)
+ return helper.build_image_impl(helper.Project(project_name))
def get_required_post_checkout_steps(dockerfile_path):
@@ -223,15 +223,16 @@ def build_fuzzers_from_commit(commit,
post_checkout_step,
])
- result = helper.build_fuzzers_impl(project_name=build_data.project_name,
+ project = helper.Project(build_data.project_name)
+ result = helper.build_fuzzers_impl(project=project,
clean=True,
engine=build_data.engine,
sanitizer=build_data.sanitizer,
architecture=build_data.architecture,
env_to_add=None,
source_path=host_src_path,
- mount_location='/src')
- if result == 0 or i == num_retry:
+ mount_path='/src')
+ if result or i == num_retry:
break
# Retry with an OSS-Fuzz builder container that's closer to the project
@@ -285,7 +286,7 @@ def build_fuzzers_from_commit(commit,
cleanup()
cleanup()
- return result == 0
+ return result
def detect_main_repo(project_name, repo_name=None, commit=None):
@@ -297,10 +298,9 @@ def detect_main_repo(project_name, repo_name=None, commit=None):
project_name: The name of the oss-fuzz project.
repo_name: The name of the main repo in an OSS-Fuzz project.
commit: A commit SHA that is associated with the main repo.
- src_dir: The location of the projects source on the docker image.
Returns:
- The repo's origin, the repo's path.
+ A tuple containing (the repo's origin, the repo's path).
"""
if not repo_name and not commit:
diff --git a/infra/build_specified_commit_test.py b/infra/build_specified_commit_test.py
index a86504580..00f50947f 100644
--- a/infra/build_specified_commit_test.py
+++ b/infra/build_specified_commit_test.py
@@ -16,7 +16,6 @@ The will consist of the following functional tests:
1. The inference of the main repo for a specific project.
2. The building of a projects fuzzers from a specific commit.
-IMPORTANT: This test needs to be run with root privileges.
"""
import os
import tempfile
@@ -27,7 +26,7 @@ import helper
import repo_manager
import test_repos
-# Necessary because __file__ changes with os.chdir
+# necessary because __file__ changes with os.chdir
TEST_DIR_PATH = os.path.dirname(os.path.realpath(__file__))
@@ -45,32 +44,39 @@ class BuildImageIntegrationTest(unittest.TestCase):
should not.
"""
with tempfile.TemporaryDirectory() as tmp_dir:
- test_case = test_repos.TEST_REPOS[1]
- self.assertTrue(helper.build_image_impl(test_case.project_name))
+ test_repo = test_repos.TEST_REPOS[1]
+ self.assertTrue(helper.build_image_impl(test_repo.project_name))
host_src_dir = build_specified_commit.copy_src_from_docker(
- test_case.project_name, tmp_dir)
+ test_repo.project_name, tmp_dir)
test_repo_manager = repo_manager.clone_repo_and_get_manager(
- test_case.git_url, host_src_dir, test_case.oss_repo_name)
+ test_repo.git_url, host_src_dir, test_repo.oss_repo_name)
build_data = build_specified_commit.BuildData(
sanitizer='address',
architecture='x86_64',
engine='libfuzzer',
- project_name=test_case.project_name)
+ project_name=test_repo.project_name)
- build_specified_commit.build_fuzzers_from_commit(test_case.old_commit,
+ build_specified_commit.build_fuzzers_from_commit(test_repo.old_commit,
test_repo_manager,
host_src_dir, build_data)
- old_error_code = helper.reproduce_impl(test_case.project_name,
- test_case.fuzz_target, False, [],
- [], test_case.test_case_path)
- build_specified_commit.build_fuzzers_from_commit(test_case.new_commit,
+ project = helper.Project(test_repo.project_name)
+ old_result = helper.reproduce_impl(project=project,
+ fuzzer_name=test_repo.fuzz_target,
+ valgrind=False,
+ env_to_add=[],
+ fuzzer_args=[],
+ testcase_path=test_repo.testcase_path)
+ build_specified_commit.build_fuzzers_from_commit(test_repo.project_name,
test_repo_manager,
host_src_dir, build_data)
- new_error_code = helper.reproduce_impl(test_case.project_name,
- test_case.fuzz_target, False, [],
- [], test_case.test_case_path)
- self.assertNotEqual(new_error_code, old_error_code)
+ new_result = helper.reproduce_impl(project=project,
+ fuzzer_name=test_repo.fuzz_target,
+ valgrind=False,
+ env_to_add=[],
+ fuzzer_args=[],
+ testcase_path=test_repo.testcase_path)
+ self.assertNotEqual(new_result, old_result)
def test_detect_main_repo_from_commit(self):
"""Test the detect main repo function from build specific commit module."""
diff --git a/infra/ci/build.py b/infra/ci/build.py
index addeb7879..7a02d6001 100755
--- a/infra/ci/build.py
+++ b/infra/ci/build.py
@@ -25,20 +25,22 @@ import sys
import subprocess
import yaml
+# pylint: disable=wrong-import-position,import-error
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import constants
+
CANARY_PROJECT = 'skcms'
DEFAULT_ARCHITECTURES = ['x86_64']
DEFAULT_ENGINES = ['afl', 'honggfuzz', 'libfuzzer']
DEFAULT_SANITIZERS = ['address', 'undefined']
-# Languages from project.yaml that have code coverage support.
-LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++', 'go', 'rust']
-
def get_changed_files_output():
"""Returns the output of a git command that discovers changed files."""
branch_commit_hash = subprocess.check_output(
- ['git', 'merge-base', 'FETCH_HEAD', 'origin/HEAD']).strip().decode()
+ ['git', 'merge-base', 'HEAD', 'origin/HEAD']).strip().decode()
return subprocess.check_output(
['git', 'diff', '--name-only', branch_commit_hash + '..']).decode()
@@ -112,7 +114,7 @@ def should_build_coverage(project_yaml):
return False
language = project_yaml.get('language')
- if language not in LANGUAGES_WITH_COVERAGE_SUPPORT:
+ if language not in constants.LANGUAGES_WITH_COVERAGE_SUPPORT:
print(('Project is written in "{language}", '
'coverage is not supported yet.').format(language=language))
return False
@@ -213,11 +215,16 @@ def build_base_images():
images = [
'base-image',
'base-builder',
+ 'base-builder-go',
+ 'base-builder-jvm',
+ 'base-builder-python',
+ 'base-builder-rust',
+ 'base-builder-swift',
'base-runner',
]
for image in images:
try:
- execute_helper_command(['build_image', image, '--no-pull'])
+ execute_helper_command(['build_image', image, '--no-pull', '--cache'])
except subprocess.CalledProcessError:
return 1
@@ -239,6 +246,7 @@ def build_canary_project():
def main():
"""Build modified projects or canary project."""
+ os.environ['OSS_FUZZ_CI'] = '1'
infra_changed = is_infra_changed()
if infra_changed:
print('Pulling and building base images first.')
diff --git a/infra/ci/requirements.txt b/infra/ci/requirements.txt
index f0a8be0b5..310ba53e8 100644
--- a/infra/ci/requirements.txt
+++ b/infra/ci/requirements.txt
@@ -6,3 +6,5 @@ pytest==6.2.1
pytest-xdist==2.2.0
PyYAML==5.4
yapf==0.30.0
+# Needed for cifuzz tests.
+Jinja2==2.11.3
diff --git a/infra/cifuzz/actions/build_fuzzers/action.yml b/infra/cifuzz/actions/build_fuzzers/action.yml
index 835b7b430..5cc35a15f 100644
--- a/infra/cifuzz/actions/build_fuzzers/action.yml
+++ b/infra/cifuzz/actions/build_fuzzers/action.yml
@@ -21,9 +21,6 @@ inputs:
project-src-path:
description: "The path to the project's source code checkout."
required: false
- build-integration-path:
- description: "The path to the the project's build integration."
- required: false
bad-build-check:
description: "Whether or not OSS-Fuzz's check for bad builds should be done."
required: false
@@ -38,6 +35,5 @@ runs:
ALLOWED_BROKEN_TARGETS_PERCENTAGE: ${{ inputs.allowed-broken-targets-percentage}}
SANITIZER: ${{ inputs.sanitizer }}
PROJECT_SRC_PATH: ${{ inputs.project-src-path }}
- BUILD_INTEGRATION_PATH: ${{ inputs.build-integration-path }}
LOW_DISK_SPACE: 'True'
BAD_BUILD_CHECK: ${{ inputs.bad-build-check }}
diff --git a/infra/cifuzz/actions/run_fuzzers/action.yml b/infra/cifuzz/actions/run_fuzzers/action.yml
index d1c03c833..b56dbce3d 100644
--- a/infra/cifuzz/actions/run_fuzzers/action.yml
+++ b/infra/cifuzz/actions/run_fuzzers/action.yml
@@ -19,9 +19,6 @@ inputs:
sanitizer:
description: 'The sanitizer to run the fuzzers with.'
default: 'address'
- build-integration-path:
- description: "The path to the the project's build integration."
- required: false
run-fuzzers-mode:
description: |
The mode to run the fuzzers with ("ci" or "batch").
@@ -30,6 +27,18 @@ inputs:
"batch" is in alpha and should not be used in production.
required: false
default: 'ci'
+ github-token:
+ description: |
+ Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET
+ You should use "secrets.GITHUB_TOKEN" in your workflow file, do not
+ hardcode the token.
+ TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361):
+ Document locking this down.
+ required: false
+ report-unreproducible-crashes:
+ description: 'If True, then unreproducible crashes will be reported by CIFuzz.'
+ required: false
+ default: false
runs:
using: 'docker'
image: '../../../run_fuzzers.Dockerfile'
@@ -40,8 +49,6 @@ runs:
DRY_RUN: ${{ inputs.dry-run}}
SANITIZER: ${{ inputs.sanitizer }}
RUN_FUZZERS_MODE: ${{ inputs.run-fuzzers-mode }}
- # TODO(metzman): Even though this param is used for building, it's needed
- # for running because we use it to distinguish OSS-Fuzz from non-OSS-Fuzz.
- # We should do something explicit instead.
- BUILD_INTEGRATION_PATH: ${{ inputs.build-integration-path }}
+ GITHUB_TOKEN: ${{ inputs.github-token }}
LOW_DISK_SPACE: 'True'
+ REPORT_UNREPRODUCIBLE_CRASHES: ${{ inputs.report-unreproducible-crashes }}
diff --git a/infra/cifuzz/affected_fuzz_targets.py b/infra/cifuzz/affected_fuzz_targets.py
index f9f2242a3..be35c5cc5 100644
--- a/infra/cifuzz/affected_fuzz_targets.py
+++ b/infra/cifuzz/affected_fuzz_targets.py
@@ -17,19 +17,17 @@ import logging
import os
import sys
-import coverage
-
# pylint: disable=wrong-import-position,import-error
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import utils
-def remove_unaffected_fuzz_targets(project_name, out_dir, files_changed,
- repo_path):
+def remove_unaffected_fuzz_targets(clusterfuzz_deployment, out_dir,
+ files_changed, repo_path):
"""Removes all non affected fuzz targets in the out directory.
Args:
- project_name: The name of the relevant OSS-Fuzz project.
+ clusterfuzz_deployment: The ClusterFuzz deployment object.
out_dir: The location of the fuzz target binaries.
files_changed: A list of files changed compared to HEAD.
repo_path: The location of the OSS-Fuzz repo in the docker image.
@@ -38,7 +36,6 @@ def remove_unaffected_fuzz_targets(project_name, out_dir, files_changed,
targets are unaffected. For example, this means that fuzz targets which don't
have coverage data on will not be deleted.
"""
- # TODO(metzman): Make this use clusterfuzz deployment.
if not files_changed:
# Don't remove any fuzz targets if there is no difference from HEAD.
logging.info('No files changed compared to HEAD.')
@@ -52,14 +49,13 @@ def remove_unaffected_fuzz_targets(project_name, out_dir, files_changed,
logging.error('No fuzz targets found in out dir.')
return
- coverage_getter = coverage.OssFuzzCoverageGetter(project_name, repo_path)
- if not coverage_getter.fuzzer_stats_url:
+ coverage = clusterfuzz_deployment.get_coverage(repo_path)
+ if not coverage:
# Don't remove any fuzz targets unless we have data.
logging.error('Could not find latest coverage report.')
return
- affected_fuzz_targets = get_affected_fuzz_targets(coverage_getter,
- fuzz_target_paths,
+ affected_fuzz_targets = get_affected_fuzz_targets(coverage, fuzz_target_paths,
files_changed)
if not affected_fuzz_targets:
@@ -79,11 +75,11 @@ def remove_unaffected_fuzz_targets(project_name, out_dir, files_changed,
fuzz_target_path)
-def is_fuzz_target_affected(coverage_getter, fuzz_target_path, files_changed):
+def is_fuzz_target_affected(coverage, fuzz_target_path, files_changed):
"""Returns True if a fuzz target (|fuzz_target_path|) is affected by
|files_changed|."""
fuzz_target = os.path.basename(fuzz_target_path)
- covered_files = coverage_getter.get_files_covered_by_target(fuzz_target)
+ covered_files = coverage.get_files_covered_by_target(fuzz_target)
if not covered_files:
# Assume a fuzz target is affected if we can't get its coverage from
# OSS-Fuzz.
@@ -104,13 +100,11 @@ def is_fuzz_target_affected(coverage_getter, fuzz_target_path, files_changed):
return False
-def get_affected_fuzz_targets(coverage_getter, fuzz_target_paths,
- files_changed):
+def get_affected_fuzz_targets(coverage, fuzz_target_paths, files_changed):
"""Returns a list of paths of affected targets."""
affected_fuzz_targets = set()
for fuzz_target_path in fuzz_target_paths:
- if is_fuzz_target_affected(coverage_getter, fuzz_target_path,
- files_changed):
+ if is_fuzz_target_affected(coverage, fuzz_target_path, files_changed):
affected_fuzz_targets.add(fuzz_target_path)
return affected_fuzz_targets
diff --git a/infra/cifuzz/affected_fuzz_targets_test.py b/infra/cifuzz/affected_fuzz_targets_test.py
index 05f27c072..823654d11 100644
--- a/infra/cifuzz/affected_fuzz_targets_test.py
+++ b/infra/cifuzz/affected_fuzz_targets_test.py
@@ -21,6 +21,9 @@ from unittest import mock
import parameterized
import affected_fuzz_targets
+import clusterfuzz_deployment
+import test_helpers
+import workspace_utils
# pylint: disable=protected-access
@@ -30,15 +33,15 @@ EXAMPLE_PROJECT = 'example'
EXAMPLE_FILE_CHANGED = 'test.txt'
-TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'test_data')
+TEST_DATA_OUT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'test_data', 'build-out')
class RemoveUnaffectedFuzzTargets(unittest.TestCase):
"""Tests remove_unaffected_fuzzers."""
- TEST_FUZZER_1 = os.path.join(TEST_DATA_PATH, 'out', 'example_crash_fuzzer')
- TEST_FUZZER_2 = os.path.join(TEST_DATA_PATH, 'out', 'example_nocrash_fuzzer')
+ TEST_FUZZER_1 = os.path.join(TEST_DATA_OUT_PATH, 'example_crash_fuzzer')
+ TEST_FUZZER_2 = os.path.join(TEST_DATA_OUT_PATH, 'example_nocrash_fuzzer')
# yapf: disable
@parameterized.parameterized.expand([
@@ -57,18 +60,27 @@ class RemoveUnaffectedFuzzTargets(unittest.TestCase):
# yapf: enable
def test_remove_unaffected_fuzz_targets(self, side_effect, expected_dir_len):
"""Tests that remove_unaffected_fuzzers has the intended effect."""
+ config = test_helpers.create_run_config(
+ is_github=True,
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
+ workspace='/workspace')
+ workspace = workspace_utils.Workspace(config)
+ deployment = clusterfuzz_deployment.get_clusterfuzz_deployment(
+ config, workspace)
# We can't use fakefs in this test because this test executes
# utils.is_fuzz_target_local. This function relies on the executable bit
# being set, which doesn't work properly in fakefs.
with tempfile.TemporaryDirectory() as tmp_dir, mock.patch(
- 'coverage.OssFuzzCoverageGetter.get_files_covered_by_target'
- ) as mocked_get_files:
- with mock.patch('coverage._get_fuzzer_stats_dir_url', return_value=1):
- mocked_get_files.side_effect = side_effect
+ 'get_coverage.OSSFuzzCoverage.get_files_covered_by_target'
+ ) as mock_get_files:
+ with mock.patch('get_coverage._get_oss_fuzz_fuzzer_stats_dir_url',
+ return_value=1):
+ mock_get_files.side_effect = side_effect
shutil.copy(self.TEST_FUZZER_1, tmp_dir)
shutil.copy(self.TEST_FUZZER_2, tmp_dir)
+
affected_fuzz_targets.remove_unaffected_fuzz_targets(
- EXAMPLE_PROJECT, tmp_dir, [EXAMPLE_FILE_CHANGED], '')
+ deployment, tmp_dir, [EXAMPLE_FILE_CHANGED], '')
self.assertEqual(expected_dir_len, len(os.listdir(tmp_dir)))
diff --git a/infra/cifuzz/base_runner_utils.py b/infra/cifuzz/base_runner_utils.py
new file mode 100644
index 000000000..246375481
--- /dev/null
+++ b/infra/cifuzz/base_runner_utils.py
@@ -0,0 +1,33 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utilities for scripts from gcr.io/oss-fuzz-base/base-runner."""
+
+import os
+
+import config_utils
+
+
+def get_env(config, workspace):
+ """Returns a dictionary containing the current environment with additional env
+ vars set to values needed to run a fuzzer."""
+ env = os.environ.copy()
+ env['SANITIZER'] = config.sanitizer
+ env['FUZZING_LANGUAGE'] = config.language
+ env['OUT'] = workspace.out
+ env['CIFUZZ'] = 'True'
+ env['FUZZING_ENGINE'] = config_utils.DEFAULT_ENGINE
+ env['ARCHITECTURE'] = config_utils.DEFAULT_ARCHITECTURE
+ # Do this so we don't fail in tests.
+ env['FUZZER_ARGS'] = '-rss_limit_mb=2560 -timeout=25'
+ return env
diff --git a/infra/cifuzz/build_fuzzers.py b/infra/cifuzz/build_fuzzers.py
index 78180b52b..6722be5e9 100644
--- a/infra/cifuzz/build_fuzzers.py
+++ b/infra/cifuzz/build_fuzzers.py
@@ -19,18 +19,17 @@ import os
import sys
import affected_fuzz_targets
+import base_runner_utils
+import clusterfuzz_deployment
import continuous_integration
import docker
+import workspace_utils
# pylint: disable=wrong-import-position,import-error
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import helper
import utils
-# Default fuzz configuration.
-DEFAULT_ENGINE = 'libfuzzer'
-DEFAULT_ARCHITECTURE = 'x86_64'
-
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.DEBUG)
@@ -55,89 +54,82 @@ class Builder: # pylint: disable=too-many-instance-attributes
def __init__(self, config, ci_system):
self.config = config
self.ci_system = ci_system
- self.out_dir = os.path.join(config.workspace, 'out')
- os.makedirs(self.out_dir, exist_ok=True)
- self.work_dir = os.path.join(config.workspace, 'work')
- os.makedirs(self.work_dir, exist_ok=True)
+ self.workspace = workspace_utils.Workspace(config)
+ self.workspace.initialize_dir(self.workspace.out)
+ self.workspace.initialize_dir(self.workspace.work)
+ self.clusterfuzz_deployment = (
+ clusterfuzz_deployment.get_clusterfuzz_deployment(
+ self.config, self.workspace))
self.image_repo_path = None
self.host_repo_path = None
self.repo_manager = None
def build_image_and_checkout_src(self):
"""Builds the project builder image and checkout source code for the patch
- we want to fuzz (if necessary). Returns True on success.
- Must be implemented by child classes."""
+ we want to fuzz (if necessary). Returns True on success."""
result = self.ci_system.prepare_for_fuzzer_build()
if not result.success:
return False
self.image_repo_path = result.image_repo_path
self.repo_manager = result.repo_manager
+ logging.info('repo_dir: %s.', self.repo_manager.repo_dir)
self.host_repo_path = self.repo_manager.repo_dir
return True
def build_fuzzers(self):
"""Moves the source code we want to fuzz into the project builder and builds
the fuzzers from that source code. Returns True on success."""
- docker_args = get_common_docker_args(self.config.sanitizer,
- self.config.language)
- container = utils.get_container_name()
-
- if container:
- docker_args.extend(
- _get_docker_build_fuzzers_args_container(self.out_dir, container))
- else:
+ docker_args, docker_container = docker.get_base_docker_run_args(
+ self.workspace, self.config.sanitizer, self.config.language,
+ self.config.docker_in_docker)
+ if not docker_container:
docker_args.extend(
- _get_docker_build_fuzzers_args_not_container(self.out_dir,
- self.host_repo_path))
-
- if self.config.sanitizer == 'memory':
- docker_args.extend(_get_docker_build_fuzzers_args_msan(self.work_dir))
- self.handle_msan_prebuild(container)
+ _get_docker_build_fuzzers_args_not_container(self.host_repo_path))
docker_args.extend([
- docker.get_project_image_name(self.config.project_name),
+ docker.get_project_image_name(self.config.oss_fuzz_project_name),
'/bin/bash',
'-c',
])
- rm_path = os.path.join(self.image_repo_path, '*')
- image_src_path = os.path.dirname(self.image_repo_path)
- bash_command = 'rm -rf {0} && cp -r {1} {2} && compile'.format(
- rm_path, self.host_repo_path, image_src_path)
- docker_args.append(bash_command)
+ build_command = self.ci_system.get_build_command(self.host_repo_path,
+ self.image_repo_path)
+ docker_args.append(build_command)
logging.info('Building with %s sanitizer.', self.config.sanitizer)
- if helper.docker_run(docker_args):
- # docker_run returns nonzero on failure.
+
+ # TODO(metzman): Stop using helper.docker_run so we can get rid of
+ # docker.get_base_docker_run_args and merge its contents into
+ # docker.get_base_docker_run_command.
+ if not helper.docker_run(docker_args):
logging.error('Building fuzzers failed.')
return False
- if self.config.sanitizer == 'memory':
- self.handle_msan_postbuild(container)
return True
- def handle_msan_postbuild(self, container):
- """Post-build step for MSAN builds. Patches the build to use MSAN
- libraries."""
- helper.docker_run([
- '--volumes-from', container, '-e',
- 'WORK={work_dir}'.format(work_dir=self.work_dir),
- docker.MSAN_LIBS_BUILDER_TAG, 'patch_build.py', '/out'
- ])
+ def upload_build(self):
+ """Upload build."""
+ if self.config.upload_build:
+ self.clusterfuzz_deployment.upload_build(
+ self.repo_manager.get_current_commit())
- def handle_msan_prebuild(self, container):
- """Pre-build step for MSAN builds. Copies MSAN libs to |msan_libs_dir| and
- returns docker arguments to use that directory for MSAN libs."""
- logging.info('Copying MSAN libs.')
- helper.docker_run([
- '--volumes-from', container, docker.MSAN_LIBS_BUILDER_TAG, 'bash', '-c',
- 'cp -r /msan {work_dir}'.format(work_dir=self.work_dir)
- ])
+ return True
+
+ def check_fuzzer_build(self):
+ """Checks the fuzzer build. Returns True on success or if config specifies
+ to skip check."""
+ if not self.config.bad_build_check:
+ return True
+
+ return check_fuzzer_build(self.config)
def build(self):
"""Builds the image, checkouts the source (if needed), builds the fuzzers
and then removes the unaffectted fuzzers. Returns True on success."""
methods = [
- self.build_image_and_checkout_src, self.build_fuzzers,
- self.remove_unaffected_fuzz_targets
+ self.build_image_and_checkout_src,
+ self.build_fuzzers,
+ self.remove_unaffected_fuzz_targets,
+ self.check_fuzzer_build,
+ self.upload_build,
]
for method in methods:
if not method():
@@ -154,7 +146,7 @@ class Builder: # pylint: disable=too-many-instance-attributes
changed_files = self.ci_system.get_changed_code_under_test(
self.repo_manager)
affected_fuzz_targets.remove_unaffected_fuzz_targets(
- self.config.project_name, self.out_dir, changed_files,
+ self.clusterfuzz_deployment, self.workspace.out, changed_files,
self.image_repo_path)
return True
@@ -186,94 +178,39 @@ def build_fuzzers(config):
return builder.build()
-def get_common_docker_args(sanitizer, language):
- """Returns a list of common docker arguments."""
- return [
- '--cap-add',
- 'SYS_PTRACE',
- '-e',
- 'FUZZING_ENGINE=' + DEFAULT_ENGINE,
- '-e',
- 'SANITIZER=' + sanitizer,
- '-e',
- 'ARCHITECTURE=' + DEFAULT_ARCHITECTURE,
- '-e',
- 'CIFUZZ=True',
- '-e',
- 'FUZZING_LANGUAGE=' + language,
- ]
-
-
-def check_fuzzer_build(out_dir,
- sanitizer,
- language,
- allowed_broken_targets_percentage=None):
+def check_fuzzer_build(config):
"""Checks the integrity of the built fuzzers.
Args:
- out_dir: The directory containing the fuzzer binaries.
- sanitizer: The sanitizer the fuzzers are built with.
+ config: The config object.
Returns:
- True if fuzzers are correct.
+ True if fuzzers pass OSS-Fuzz's build check.
"""
- if not os.path.exists(out_dir):
- logging.error('Invalid out directory: %s.', out_dir)
+ workspace = workspace_utils.Workspace(config)
+ if not os.path.exists(workspace.out):
+ logging.error('Invalid out directory: %s.', workspace.out)
return False
- if not os.listdir(out_dir):
- logging.error('No fuzzers found in out directory: %s.', out_dir)
+ if not os.listdir(workspace.out):
+ logging.error('No fuzzers found in out directory: %s.', workspace.out)
return False
- command = get_common_docker_args(sanitizer, language)
-
- if allowed_broken_targets_percentage is not None:
- command += [
- '-e',
- ('ALLOWED_BROKEN_TARGETS_PERCENTAGE=' +
- allowed_broken_targets_percentage)
- ]
+ env = base_runner_utils.get_env(config, workspace)
+ if config.allowed_broken_targets_percentage is not None:
+ env['ALLOWED_BROKEN_TARGETS_PERCENTAGE'] = (
+ config.allowed_broken_targets_percentage)
- container = utils.get_container_name()
- if container:
- command += ['-e', 'OUT=' + out_dir, '--volumes-from', container]
- else:
- command += ['-v', '%s:/out' % out_dir]
- command.extend(['-t', docker.BASE_RUNNER_TAG, 'test_all.py'])
- exit_code = helper.docker_run(command)
- logging.info('check fuzzer build exit code: %d', exit_code)
- if exit_code:
- logging.error('Check fuzzer build failed.')
- return False
- return True
-
-
-def _get_docker_build_fuzzers_args_container(host_out_dir, container):
- """Returns arguments to the docker build arguments that are needed to use
- |host_out_dir| when the host of the OSS-Fuzz builder container is another
- container."""
- return ['-e', 'OUT=' + host_out_dir, '--volumes-from', container]
+ stdout, stderr, retcode = utils.execute('test_all.py', env=env)
+ print(f'Build check: stdout: {stdout}\nstderr: {stderr}')
+ if retcode == 0:
+ logging.info('Build check passed.')
+ return True
+ logging.error('Build check failed.')
+ return False
-def _get_docker_build_fuzzers_args_not_container(host_out_dir, host_repo_path):
+def _get_docker_build_fuzzers_args_not_container(host_repo_path):
"""Returns arguments to the docker build arguments that are needed to use
- |host_out_dir| when the host of the OSS-Fuzz builder container is not
+ |host_repo_path| when the host of the OSS-Fuzz builder container is not
another container."""
- image_out_dir = '/out'
- return [
- '-e',
- 'OUT=' + image_out_dir,
- '-v',
- '%s:%s' % (host_out_dir, image_out_dir),
- '-v',
- '%s:%s' % (host_repo_path, host_repo_path),
- ]
-
-
-def _get_docker_build_fuzzers_args_msan(work_dir):
- """Returns arguments to the docker build command that are needed to use
- MSAN."""
- # TODO(metzman): MSAN is broken, fix.
- return [
- '-e', 'MSAN_LIBS_PATH={msan_libs_path}'.format(
- msan_libs_path=os.path.join(work_dir, 'msan'))
- ]
+ return ['-v', f'{host_repo_path}:{host_repo_path}']
diff --git a/infra/cifuzz/build_fuzzers_entrypoint.py b/infra/cifuzz/build_fuzzers_entrypoint.py
index 04f562068..e8e368f1b 100644
--- a/infra/cifuzz/build_fuzzers_entrypoint.py
+++ b/infra/cifuzz/build_fuzzers_entrypoint.py
@@ -13,7 +13,6 @@
# limitations under the License.
"""Builds a specific OSS-Fuzz project's fuzzers for CI tools."""
import logging
-import os
import sys
import build_fuzzers
@@ -22,19 +21,34 @@ import config_utils
# pylint: disable=c-extension-no-member
# pylint gets confused because of the relative import of cifuzz.
-# TODO: Turn default logging to INFO when CIFuzz is stable
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.DEBUG)
+def build_fuzzers_entrypoint():
+ """Builds OSS-Fuzz project's fuzzers for CI tools."""
+ config = config_utils.BuildFuzzersConfig()
+
+ if config.dry_run:
+ # Sets the default return code on error to success.
+ returncode = 0
+ else:
+ # The default return code when an error occurs.
+ returncode = 1
+
+ if not build_fuzzers.build_fuzzers(config):
+ logging.error('Error building fuzzers for (commit: %s, pr_ref: %s).',
+ config.commit_sha, config.pr_ref)
+ return returncode
+
+ return 0
+
+
def main():
- """Build OSS-Fuzz project's fuzzers for CI tools.
- This script is used to kick off the Github Actions CI tool. It is the
- entrypoint of the Dockerfile in this directory. This action can be added to
- any OSS-Fuzz project's workflow that uses Github.
+ """Builds OSS-Fuzz project's fuzzers for CI tools.
- Note: The resulting clusterfuzz binaries of this build are placed in
+ Note: The resulting fuzz target binaries of this build are placed in
the directory: ${GITHUB_WORKSPACE}/out
Required environment variables:
@@ -50,44 +64,9 @@ def main():
SANITIZER: The sanitizer to use when running fuzzers.
Returns:
- 0 on success or 1 on failure.
+ 0 on success or nonzero on failure.
"""
- config = config_utils.BuildFuzzersConfig()
-
- if config.dry_run:
- # Sets the default return code on error to success.
- returncode = 0
- else:
- # The default return code when an error occurs.
- returncode = 1
-
- if not config.workspace:
- logging.error('This script needs to be run within Github actions.')
- return returncode
-
- if not build_fuzzers.build_fuzzers(config):
- logging.error(
- 'Error building fuzzers for project %s (commit: %s, pr_ref: %s).',
- config.project_name, config.commit_sha, config.pr_ref)
- return returncode
-
- out_dir = os.path.join(config.workspace, 'out')
-
- if not config.bad_build_check:
- # If we've gotten to this point and we don't need to do bad_build_check,
- # then the build has succeeded.
- returncode = 0
- # yapf: disable
- elif build_fuzzers.check_fuzzer_build(
- out_dir,
- config.sanitizer,
- config.language,
- allowed_broken_targets_percentage=config.allowed_broken_targets_percentage
- ):
- # yapf: enable
- returncode = 0
-
- return returncode
+ return build_fuzzers_entrypoint()
if __name__ == '__main__':
diff --git a/infra/cifuzz/build_fuzzers_test.py b/infra/cifuzz/build_fuzzers_test.py
index 298778867..5c068ac4d 100644
--- a/infra/cifuzz/build_fuzzers_test.py
+++ b/infra/cifuzz/build_fuzzers_test.py
@@ -28,8 +28,8 @@ sys.path.append(INFRA_DIR)
OSS_FUZZ_DIR = os.path.dirname(INFRA_DIR)
import build_fuzzers
-import config_utils
import continuous_integration
+import repo_manager
import test_helpers
# NOTE: This integration test relies on
@@ -53,23 +53,7 @@ EXAMPLE_NOCRASH_FUZZER = 'example_nocrash_fuzzer'
# A fuzzer to be built in build_fuzzers integration tests.
EXAMPLE_BUILD_FUZZER = 'do_stuff_fuzzer'
-# pylint: disable=no-self-use,protected-access,too-few-public-methods
-
-
-def create_config(**kwargs):
- """Creates a config object and then sets every attribute that is a key in
- |kwargs| to the corresponding value. Asserts that each key in |kwargs| is an
- attribute of Config."""
- with mock.patch('os.path.basename', return_value=None), mock.patch(
- 'config_utils.get_project_src_path',
- return_value=None), mock.patch('config_utils._is_dry_run',
- return_value=True):
- config = config_utils.BuildFuzzersConfig()
-
- for key, value in kwargs.items():
- assert hasattr(config, key), 'Config doesn\'t have attribute: ' + key
- setattr(config, key, value)
- return config
+# pylint: disable=no-self-use,protected-access,too-few-public-methods,unused-argument
class BuildFuzzersTest(unittest.TestCase):
@@ -79,17 +63,19 @@ class BuildFuzzersTest(unittest.TestCase):
return_value=('example.com', '/path'))
@mock.patch('repo_manager._clone', return_value=None)
@mock.patch('continuous_integration.checkout_specified_commit')
- @mock.patch('helper.docker_run')
- def test_cifuzz_env_var(self, mocked_docker_run, _, __, ___):
+ @mock.patch('helper.docker_run', return_value=False) # We want to quit early.
+ def test_cifuzz_env_var(self, mock_docker_run, _, __, ___):
"""Tests that the CIFUZZ env var is set."""
with tempfile.TemporaryDirectory() as tmp_dir:
build_fuzzers.build_fuzzers(
- create_config(project_name=EXAMPLE_PROJECT,
- project_repo_name=EXAMPLE_PROJECT,
- workspace=tmp_dir,
- pr_ref='refs/pull/1757/merge'))
- docker_run_command = mocked_docker_run.call_args_list[0][0][0]
+ test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
+ project_repo_name=EXAMPLE_PROJECT,
+ workspace=tmp_dir,
+ pr_ref='refs/pull/1757/merge'))
+
+ docker_run_command = mock_docker_run.call_args_list[0][0][0]
def command_has_env_var_arg(command, env_var_arg):
for idx, element in enumerate(command):
@@ -105,23 +91,25 @@ class BuildFuzzersTest(unittest.TestCase):
class InternalGithubBuildTest(unittest.TestCase):
"""Tests for building OSS-Fuzz projects on GitHub actions."""
- PROJECT_NAME = 'myproject'
PROJECT_REPO_NAME = 'myproject'
SANITIZER = 'address'
COMMIT_SHA = 'fake'
PR_REF = 'fake'
- def _create_builder(self, tmp_dir):
+ def _create_builder(self, tmp_dir, oss_fuzz_project_name='myproject'):
"""Creates an InternalGithubBuilder and returns it."""
- config = create_config(project_name=self.PROJECT_NAME,
- project_repo_name=self.PROJECT_REPO_NAME,
- workspace=tmp_dir,
- sanitizer=self.SANITIZER,
- commit_sha=self.COMMIT_SHA,
- pr_ref=self.PR_REF,
- is_github=True)
+ config = test_helpers.create_build_config(
+ oss_fuzz_project_name=oss_fuzz_project_name,
+ project_repo_name=self.PROJECT_REPO_NAME,
+ workspace=tmp_dir,
+ sanitizer=self.SANITIZER,
+ commit_sha=self.COMMIT_SHA,
+ pr_ref=self.PR_REF,
+ is_github=True)
ci_system = continuous_integration.get_ci(config)
- return build_fuzzers.Builder(config, ci_system)
+ builder = build_fuzzers.Builder(config, ci_system)
+ builder.repo_manager = repo_manager.RepoManager('/fake')
+ return builder
@mock.patch('repo_manager._clone', side_effect=None)
@mock.patch('continuous_integration.checkout_specified_commit',
@@ -141,6 +129,29 @@ class InternalGithubBuildTest(unittest.TestCase):
self.assertEqual(os.path.basename(builder.host_repo_path),
os.path.basename(image_repo_path))
+ @mock.patch('clusterfuzz_deployment.ClusterFuzzLite.upload_build',
+ return_value=True)
+ def test_upload_build_disabled(self, mock_upload_build):
+ """Test upload build (disabled)."""
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ builder = self._create_builder(tmp_dir)
+ builder.upload_build()
+
+ mock_upload_build.assert_not_called()
+
+ @mock.patch('repo_manager.RepoManager.get_current_commit',
+ return_value='commit')
+ @mock.patch('clusterfuzz_deployment.ClusterFuzzLite.upload_build',
+ return_value=True)
+ def test_upload_build(self, mock_upload_build, mock_get_current_commit):
+ """Test upload build."""
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ builder = self._create_builder(tmp_dir, oss_fuzz_project_name='')
+ builder.config.upload_build = True
+ builder.upload_build()
+
+ mock_upload_build.assert_called_with('commit')
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
'INTEGRATION_TESTS=1 not set')
@@ -148,36 +159,59 @@ class BuildFuzzersIntegrationTest(unittest.TestCase):
"""Integration tests for build_fuzzers."""
def setUp(self):
- self.tmp_dir_obj = tempfile.TemporaryDirectory()
- self.workspace = self.tmp_dir_obj.name
- self.out_dir = os.path.join(self.workspace, 'out')
+ self.temp_dir_obj = tempfile.TemporaryDirectory()
+ self.workspace = self.temp_dir_obj.name
+ self.out_dir = os.path.join(self.workspace, 'build-out')
test_helpers.patch_environ(self)
+ base_runner_path = os.path.join(INFRA_DIR, 'base-images', 'base-runner')
+ os.environ['PATH'] = os.environ['PATH'] + os.pathsep + base_runner_path
+
def tearDown(self):
- self.tmp_dir_obj.cleanup()
+ self.temp_dir_obj.cleanup()
def test_external_github_project(self):
"""Tests building fuzzers from an external project on Github."""
- project_name = 'external-project'
- build_integration_path = 'fuzzer-build-integration'
+ project_repo_name = 'external-project'
git_url = 'https://github.com/jonathanmetzman/cifuzz-external-example.git'
# This test is dependant on the state of
# github.com/jonathanmetzman/cifuzz-external-example.
- config = create_config(project_name=project_name,
- project_repo_name=project_name,
- workspace=self.workspace,
- build_integration_path=build_integration_path,
- git_url=git_url,
- commit_sha='HEAD',
- base_commit='HEAD^1')
+ config = test_helpers.create_build_config(
+ project_repo_name=project_repo_name,
+ workspace=self.workspace,
+ git_url=git_url,
+ commit_sha='HEAD',
+ is_github=True,
+ base_commit='HEAD^1')
+ self.assertTrue(build_fuzzers.build_fuzzers(config))
+ self.assertTrue(
+ os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER)))
+
+ def test_external_generic_project(self):
+ """Tests building fuzzers from an external project not on Github."""
+ project_repo_name = 'cifuzz-external-example'
+ git_url = 'https://github.com/jonathanmetzman/cifuzz-external-example.git'
+ # This test is dependant on the state of
+ # github.com/jonathanmetzman/cifuzz-external-example.
+ manager = repo_manager.clone_repo_and_get_manager(
+ 'https://github.com/jonathanmetzman/cifuzz-external-example',
+ self.temp_dir_obj.name)
+ project_src_path = manager.repo_dir
+ config = test_helpers.create_build_config(
+ project_repo_name=project_repo_name,
+ workspace=self.workspace,
+ git_url=git_url,
+ commit_sha='HEAD',
+ project_src_path=project_src_path,
+ base_commit='HEAD^1')
self.assertTrue(build_fuzzers.build_fuzzers(config))
self.assertTrue(
os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER)))
def test_valid_commit(self):
"""Tests building fuzzers with valid inputs."""
- config = create_config(
- project_name=EXAMPLE_PROJECT,
+ config = test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
project_repo_name='oss-fuzz',
workspace=self.workspace,
commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523',
@@ -189,31 +223,32 @@ class BuildFuzzersIntegrationTest(unittest.TestCase):
def test_valid_pull_request(self):
"""Tests building fuzzers with valid pull request."""
- # TODO(metzman): What happens when this branch closes?
- config = create_config(project_name=EXAMPLE_PROJECT,
- project_repo_name='oss-fuzz',
- workspace=self.workspace,
- pr_ref='refs/pull/1757/merge',
- base_ref='master',
- is_github=True)
+ config = test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
+ project_repo_name='oss-fuzz',
+ workspace=self.workspace,
+ pr_ref='refs/pull/1757/merge',
+ base_ref='master',
+ is_github=True)
self.assertTrue(build_fuzzers.build_fuzzers(config))
self.assertTrue(
os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER)))
def test_invalid_pull_request(self):
"""Tests building fuzzers with invalid pull request."""
- config = create_config(project_name=EXAMPLE_PROJECT,
- project_repo_name='oss-fuzz',
- workspace=self.workspace,
- pr_ref='ref-1/merge',
- base_ref='master',
- is_github=True)
+ config = test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
+ project_repo_name='oss-fuzz',
+ workspace=self.workspace,
+ pr_ref='ref-1/merge',
+ base_ref='master',
+ is_github=True)
self.assertTrue(build_fuzzers.build_fuzzers(config))
- def test_invalid_project_name(self):
+ def test_invalid_oss_fuzz_project_name(self):
"""Tests building fuzzers with invalid project name."""
- config = create_config(
- project_name='not_a_valid_project',
+ config = test_helpers.create_build_config(
+ oss_fuzz_project_name='not_a_valid_project',
project_repo_name='oss-fuzz',
workspace=self.workspace,
commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523')
@@ -221,8 +256,8 @@ class BuildFuzzersIntegrationTest(unittest.TestCase):
def test_invalid_repo_name(self):
"""Tests building fuzzers with invalid repo name."""
- config = create_config(
- project_name=EXAMPLE_PROJECT,
+ config = test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
project_repo_name='not-real-repo',
workspace=self.workspace,
commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523')
@@ -230,18 +265,19 @@ class BuildFuzzersIntegrationTest(unittest.TestCase):
def test_invalid_commit_sha(self):
"""Tests building fuzzers with invalid commit SHA."""
- config = create_config(project_name=EXAMPLE_PROJECT,
- project_repo_name='oss-fuzz',
- workspace=self.workspace,
- commit_sha='',
- is_github=True)
+ config = test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
+ project_repo_name='oss-fuzz',
+ workspace=self.workspace,
+ commit_sha='',
+ is_github=True)
with self.assertRaises(AssertionError):
build_fuzzers.build_fuzzers(config)
def test_invalid_workspace(self):
"""Tests building fuzzers with invalid workspace."""
- config = create_config(
- project_name=EXAMPLE_PROJECT,
+ config = test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
project_repo_name='oss-fuzz',
workspace=os.path.join(self.workspace, 'not', 'a', 'dir'),
commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523')
@@ -255,44 +291,47 @@ class CheckFuzzerBuildTest(unittest.TestCase):
LANGUAGE = 'c++'
def setUp(self):
- self.tmp_dir_obj = tempfile.TemporaryDirectory()
- self.test_files_path = os.path.join(self.tmp_dir_obj.name, 'test_files')
- shutil.copytree(TEST_DATA_PATH, self.test_files_path)
+ self.temp_dir_obj = tempfile.TemporaryDirectory()
+ workspace_path = os.path.join(self.temp_dir_obj.name, 'workspace')
+ self.config = test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
+ sanitizer=self.SANITIZER,
+ language=self.LANGUAGE,
+ workspace=workspace_path,
+ pr_ref='refs/pull/1757/merge')
+ self.workspace = test_helpers.create_workspace(workspace_path)
+ shutil.copytree(TEST_DATA_PATH, workspace_path)
+ test_helpers.patch_environ(self, runner=True)
def tearDown(self):
- self.tmp_dir_obj.cleanup()
+ self.temp_dir_obj.cleanup()
def test_correct_fuzzer_build(self):
"""Checks check_fuzzer_build function returns True for valid fuzzers."""
- test_fuzzer_dir = os.path.join(self.test_files_path, 'out')
- self.assertTrue(
- build_fuzzers.check_fuzzer_build(test_fuzzer_dir, self.SANITIZER,
- self.LANGUAGE))
-
- def test_not_a_valid_fuzz_path(self):
- """Tests that False is returned when a bad path is given."""
- self.assertFalse(
- build_fuzzers.check_fuzzer_build('not/a/valid/path', self.SANITIZER,
- self.LANGUAGE))
-
- def test_not_a_valid_fuzzer(self):
- """Checks a directory that exists but does not have fuzzers is False."""
- self.assertFalse(
- build_fuzzers.check_fuzzer_build(self.test_files_path, self.SANITIZER,
- self.LANGUAGE))
-
- @mock.patch('helper.docker_run')
- def test_allow_broken_fuzz_targets_percentage(self, mocked_docker_run):
+ self.assertTrue(build_fuzzers.check_fuzzer_build(self.config))
+
+ def test_not_a_valid_path(self):
+ """Tests that False is returned when a nonexistent path is given."""
+ self.config.workspace = 'not/a/valid/path'
+ self.assertFalse(build_fuzzers.check_fuzzer_build(self.config))
+
+ def test_no_valid_fuzzers(self):
+ """Tests that False is returned when an empty directory is given."""
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ self.config.workspace = tmp_dir
+ os.mkdir(os.path.join(self.config.workspace, 'build-out'))
+ self.assertFalse(build_fuzzers.check_fuzzer_build(self.config))
+
+ @mock.patch('utils.execute', return_value=(None, None, 0))
+ def test_allow_broken_fuzz_targets_percentage(self, mock_execute):
"""Tests that ALLOWED_BROKEN_TARGETS_PERCENTAGE is set when running
docker if passed to check_fuzzer_build."""
- mocked_docker_run.return_value = 0
- test_fuzzer_dir = os.path.join(TEST_DATA_PATH, 'out')
- build_fuzzers.check_fuzzer_build(test_fuzzer_dir,
- self.SANITIZER,
- self.LANGUAGE,
- allowed_broken_targets_percentage='0')
- self.assertIn('-e ALLOWED_BROKEN_TARGETS_PERCENTAGE=0',
- ' '.join(mocked_docker_run.call_args[0][0]))
+ percentage = '0'
+ self.config.allowed_broken_targets_percentage = percentage
+ build_fuzzers.check_fuzzer_build(self.config)
+ self.assertEqual(
+ mock_execute.call_args[1]['env']['ALLOWED_BROKEN_TARGETS_PERCENTAGE'],
+ percentage)
@unittest.skip('Test is too long to be run with presubmit.')
@@ -304,11 +343,12 @@ class BuildSantizerIntegrationTest(unittest.TestCase):
@classmethod
def _create_config(cls, tmp_dir, sanitizer):
- return create_config(project_name=cls.PROJECT_NAME,
- project_repo_name=cls.PROJECT_NAME,
- workspace=tmp_dir,
- pr_ref=cls.PR_REF,
- sanitizer=sanitizer)
+ return test_helpers.create_build_config(
+ oss_fuzz_project_name=cls.PROJECT_NAME,
+ project_repo_name=cls.PROJECT_NAME,
+ workspace=tmp_dir,
+ pr_ref=cls.PR_REF,
+ sanitizer=sanitizer)
@parameterized.parameterized.expand([('memory',), ('undefined',)])
def test_valid_project_curl(self, sanitizer):
@@ -318,18 +358,6 @@ class BuildSantizerIntegrationTest(unittest.TestCase):
build_fuzzers.build_fuzzers(self._create_config(tmp_dir, sanitizer)))
-class GetDockerBuildFuzzersArgsContainerTest(unittest.TestCase):
- """Tests that _get_docker_build_fuzzers_args_container works as intended."""
-
- def test_get_docker_build_fuzzers_args_container(self):
- """Tests that _get_docker_build_fuzzers_args_container works as intended."""
- out_dir = '/my/out'
- container = 'my-container'
- result = build_fuzzers._get_docker_build_fuzzers_args_container(
- out_dir, container)
- self.assertEqual(result, ['-e', 'OUT=/my/out', '--volumes-from', container])
-
-
class GetDockerBuildFuzzersArgsNotContainerTest(unittest.TestCase):
"""Tests that _get_docker_build_fuzzers_args_not_container works as
intended."""
@@ -337,25 +365,10 @@ class GetDockerBuildFuzzersArgsNotContainerTest(unittest.TestCase):
def test_get_docker_build_fuzzers_args_no_container(self):
"""Tests that _get_docker_build_fuzzers_args_not_container works
as intended."""
- host_out_dir = '/cifuzz/out'
host_repo_path = '/host/repo'
result = build_fuzzers._get_docker_build_fuzzers_args_not_container(
- host_out_dir, host_repo_path)
- expected_result = [
- '-e', 'OUT=/out', '-v', '/cifuzz/out:/out', '-v',
- '/host/repo:/host/repo'
- ]
- self.assertEqual(result, expected_result)
-
-
-class GetDockerBuildFuzzersArgsMsanTest(unittest.TestCase):
- """Tests that _get_docker_build_fuzzers_args_msan works as intended."""
-
- def test_get_docker_build_fuzzers_args_msan(self):
- """Tests that _get_docker_build_fuzzers_args_msan works as intended."""
- work_dir = '/work_dir'
- result = build_fuzzers._get_docker_build_fuzzers_args_msan(work_dir)
- expected_result = ['-e', 'MSAN_LIBS_PATH=/work_dir/msan']
+ host_repo_path)
+ expected_result = ['-v', '/host/repo:/host/repo']
self.assertEqual(result, expected_result)
diff --git a/infra/cifuzz/cifuzz-base/Dockerfile b/infra/cifuzz/cifuzz-base/Dockerfile
index e0599dbbe..bb1431dd8 100644
--- a/infra/cifuzz/cifuzz-base/Dockerfile
+++ b/infra/cifuzz/cifuzz-base/Dockerfile
@@ -14,19 +14,20 @@
#
################################################################################
-# Don't bother with a slimmer base image.
-# When we pull base-builder to build project builder image we need to pull
-# ubuntu:16.04 anyway. So in the long run we probably would waste time if
-# we pulled something like alpine here instead.
-FROM ubuntu:16.04
+FROM gcr.io/oss-fuzz-base/base-runner
RUN apt-get update && \
- apt-get install ca-certificates wget python3 git-core --no-install-recommends -y && \
- wget https://download.docker.com/linux/ubuntu/dists/xenial/pool/stable/amd64/docker-ce-cli_20.10.5~3-0~ubuntu-xenial_amd64.deb -O /tmp/docker-ce.deb && \
- dpkg -i /tmp/docker-ce.deb && rm /tmp/docker-ce.deb && \
- apt-get remove wget -y --purge
-
+ apt-get install -y systemd && \
+ apt-get install -y --no-install-recommends nodejs npm && \
+ wget https://download.docker.com/linux/ubuntu/dists/focal/pool/stable/amd64/docker-ce-cli_20.10.8~3-0~ubuntu-focal_amd64.deb -O /tmp/docker-ce.deb && \
+ dpkg -i /tmp/docker-ce.deb && rm /tmp/docker-ce.deb
ENV OSS_FUZZ_ROOT=/opt/oss-fuzz
ADD . ${OSS_FUZZ_ROOT}
-RUN rm -rf ${OSS_FUZZ_ROOT}/infra \ No newline at end of file
+RUN python3 -m pip install -r ${OSS_FUZZ_ROOT}/infra/cifuzz/requirements.txt
+RUN npm install ${OSS_FUZZ_ROOT}/infra/cifuzz
+
+# Python file to execute when the docker container starts up.
+# We can't use the env var $OSS_FUZZ_ROOT here. Since it's a constant env var,
+# just expand to '/opt/oss-fuzz'.
+ENTRYPOINT ["python3", "/opt/oss-fuzz/infra/cifuzz/cifuzz_combined_entrypoint.py"]
diff --git a/infra/cifuzz/cifuzz_combined_entrypoint.py b/infra/cifuzz/cifuzz_combined_entrypoint.py
new file mode 100644
index 000000000..008ce1088
--- /dev/null
+++ b/infra/cifuzz/cifuzz_combined_entrypoint.py
@@ -0,0 +1,53 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Builds fuzzers and runs fuzzers. Entrypoint used for external users"""
+import logging
+import sys
+
+import build_fuzzers_entrypoint
+import run_fuzzers_entrypoint
+
+
+def main():
+ """Builds and runs fuzzers for CI tools.
+
+ NOTE: Any crash report will be in the filepath:
+ ${GITHUB_WORKSPACE}/out/testcase
+ This can be used with GitHub's upload-artifact action to surface the logs.
+
+ Required environment variables:
+ OSS_FUZZ_PROJECT_NAME: The name of OSS-Fuzz project.
+ GITHUB_REPOSITORY: The name of the Github repo that called this script.
+ GITHUB_SHA: The commit SHA that triggered this script.
+ GITHUB_EVENT_NAME: The name of the hook event that triggered this script.
+ GITHUB_EVENT_PATH:
+ The path to the file containing the POST payload of the webhook:
+ https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners#filesystems-on-github-hosted-runners
+ GITHUB_WORKSPACE: The shared volume directory where input artifacts are.
+ DRY_RUN: If true, no failures will surface.
+ SANITIZER: The sanitizer to use when running fuzzers.
+ FUZZ_SECONDS: The length of time in seconds that fuzzers are to be run.
+
+ Returns:
+ 0 on success or 1 on failure.
+ """
+ logging.debug("Using cifuzz_combined_entrypoint.")
+ result = build_fuzzers_entrypoint.build_fuzzers_entrypoint()
+ if result != 0:
+ return result
+ return run_fuzzers_entrypoint.run_fuzzers_entrypoint()
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/infra/cifuzz/cifuzz_end_to_end_test.py b/infra/cifuzz/cifuzz_end_to_end_test.py
new file mode 100644
index 000000000..2a4234faf
--- /dev/null
+++ b/infra/cifuzz/cifuzz_end_to_end_test.py
@@ -0,0 +1,46 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""End-to-End tests for CIFuzz."""
+import os
+import unittest
+
+import run_cifuzz
+import test_helpers
+
+CIFUZZ_DIR = os.path.dirname(os.path.abspath(__file__))
+EXTERNAL_PROJECT_PATH = os.path.join(CIFUZZ_DIR, 'test_data',
+ 'external-project')
+
+
+# This test will fail if not run as root because the fuzzer build process
+# creates binaries that only root can write to.
+# Use a seperate env var to keep this seperate from integration tests which
+# don't have this annoying property.
+@unittest.skipIf(not os.getenv('END_TO_END_TESTS'),
+ 'END_TO_END_TESTS=1 not set')
+class EndToEndTest(unittest.TestCase):
+ """End-to-End tests for CIFuzz."""
+
+ def setUp(self):
+ test_helpers.patch_environ(self, runner=True)
+
+ def test_simple(self):
+ """Simple end-to-end test using run_cifuzz.main()."""
+ os.environ['REPOSITORY'] = 'external-project'
+ os.environ['PROJECT_SRC_PATH'] = EXTERNAL_PROJECT_PATH
+
+ with test_helpers.docker_temp_dir() as temp_dir:
+ os.environ['WORKSPACE'] = temp_dir
+ # TODO(metzman): Verify the crash, affected fuzzers, and other things.
+ self.assertEqual(run_cifuzz.main(), 1)
diff --git a/infra/cifuzz/cloudbuild.yaml b/infra/cifuzz/cloudbuild.yaml
new file mode 100644
index 000000000..6f38ccecd
--- /dev/null
+++ b/infra/cifuzz/cloudbuild.yaml
@@ -0,0 +1,39 @@
+steps:
+- name: 'gcr.io/cloud-builders/docker'
+ args:
+ - build
+ - '-t'
+ - gcr.io/oss-fuzz-base/cifuzz-base
+ - '-t'
+ - gcr.io/oss-fuzz-base/cifuzz-base:v1
+ - '-f'
+ - infra/cifuzz/cifuzz-base/Dockerfile
+ - .
+- name: 'gcr.io/cloud-builders/docker'
+ args:
+ - build
+ - '-t'
+ - gcr.io/oss-fuzz-base/cifuzz-build-fuzzers
+ - '-t'
+ - gcr.io/oss-fuzz-base/cifuzz-build-fuzzers:v1
+ - '-f'
+ - infra/build_fuzzers.Dockerfile
+ - infra
+- name: 'gcr.io/cloud-builders/docker'
+ args:
+ - build
+ - '-t'
+ - gcr.io/oss-fuzz-base/cifuzz-run-fuzzers
+ - '-t'
+ - gcr.io/oss-fuzz-base/cifuzz-run-fuzzers:v1
+ - '-f'
+ - infra/run_fuzzers.Dockerfile
+ - infra
+images:
+- gcr.io/oss-fuzz-base/cifuzz-base
+- gcr.io/oss-fuzz-base/cifuzz-base:v1
+- gcr.io/oss-fuzz-base/cifuzz-run-fuzzers
+- gcr.io/oss-fuzz-base/cifuzz-run-fuzzers:v1
+- gcr.io/oss-fuzz-base/cifuzz-build-fuzzers
+- gcr.io/oss-fuzz-base/cifuzz-build-fuzzers:v1
+timeout: 1800s
diff --git a/infra/cifuzz/clusterfuzz_deployment.py b/infra/cifuzz/clusterfuzz_deployment.py
index 8c46e9d4e..fdc3738df 100644
--- a/infra/cifuzz/clusterfuzz_deployment.py
+++ b/infra/cifuzz/clusterfuzz_deployment.py
@@ -11,15 +11,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Module for interacting with the "ClusterFuzz deployment."""
+"""Module for interacting with the ClusterFuzz deployment."""
import logging
import os
import sys
-import tempfile
-import time
import urllib.error
import urllib.request
-import zipfile
+
+import config_utils
+import continuous_integration
+import filestore
+import filestore_utils
+import http_utils
+import get_coverage
+import repo_manager
# pylint: disable=wrong-import-position,import-error
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@@ -29,13 +34,12 @@ import utils
class BaseClusterFuzzDeployment:
"""Base class for ClusterFuzz deployments."""
- CORPUS_DIR_NAME = 'cifuzz-corpus'
- BUILD_DIR_NAME = 'cifuzz-latest-build'
-
- def __init__(self, config):
+ def __init__(self, config, workspace):
self.config = config
+ self.workspace = workspace
+ self.ci_system = continuous_integration.get_ci(config)
- def download_latest_build(self, out_dir):
+ def download_latest_build(self):
"""Downloads the latest build from ClusterFuzz.
Returns:
@@ -43,23 +47,167 @@ class BaseClusterFuzzDeployment:
"""
raise NotImplementedError('Child class must implement method.')
- def download_corpus(self, target_name, out_dir):
- """Downloads the corpus for |target_name| from ClusterFuzz to |out_dir|.
+ def upload_build(self, commit):
+ """Uploads the build with the given commit sha to the filestore."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def download_corpus(self, target_name, corpus_dir):
+ """Downloads the corpus for |target_name| from ClusterFuzz to |corpus_dir|.
Returns:
A path to where the OSS-Fuzz build was stored, or None if it wasn't.
"""
raise NotImplementedError('Child class must implement method.')
+ def upload_crashes(self):
+ """Uploads crashes in |crashes_dir| to filestore."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def upload_corpus(self, target_name, corpus_dir, replace=False): # pylint: disable=no-self-use,unused-argument
+ """Uploads the corpus for |target_name| to filestore."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def upload_coverage(self):
+ """Uploads the coverage report to the filestore."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def get_coverage(self, repo_path):
+ """Returns the project coverage object for the project."""
+ raise NotImplementedError('Child class must implement method.')
+
+
+def _make_empty_dir_if_nonexistent(path):
+ """Makes an empty directory at |path| if it does not exist."""
+ os.makedirs(path, exist_ok=True)
+
class ClusterFuzzLite(BaseClusterFuzzDeployment):
"""Class representing a deployment of ClusterFuzzLite."""
- def download_latest_build(self, out_dir):
- logging.info('download_latest_build not implemented for ClusterFuzzLite.')
+ COVERAGE_NAME = 'latest'
+ LATEST_BUILD_WINDOW = 3
+
+ def __init__(self, config, workspace):
+ super().__init__(config, workspace)
+ self.filestore = filestore_utils.get_filestore(self.config)
+
+ def download_latest_build(self):
+ if os.path.exists(self.workspace.clusterfuzz_build):
+ # This path is necessary because download_latest_build can be called
+ # multiple times.That is the case because it is called only when we need
+ # to see if a bug is novel, i.e. until we want to check a bug is novel we
+ # don't want to waste time calling this, but therefore this method can be
+ # called if multiple bugs are found.
+ return self.workspace.clusterfuzz_build
+
+ repo_dir = self.ci_system.repo_dir()
+ if not repo_dir:
+ raise RuntimeError('Repo checkout does not exist.')
+
+ _make_empty_dir_if_nonexistent(self.workspace.clusterfuzz_build)
+ repo = repo_manager.RepoManager(repo_dir)
+
+ # Builds are stored by commit, so try the latest |LATEST_BUILD_WINDOW|
+ # commits before the current.
+ # TODO(ochang): If API usage becomes an issue, this can be optimized by the
+ # filestore accepting a list of filenames to try.
+ for old_commit in repo.get_commit_list('HEAD^',
+ limit=self.LATEST_BUILD_WINDOW):
+ logging.info('Trying to downloading previous build %s.', old_commit)
+ build_name = self._get_build_name(old_commit)
+ try:
+ if self.filestore.download_build(build_name,
+ self.workspace.clusterfuzz_build):
+ logging.info('Done downloading previus build.')
+ return self.workspace.clusterfuzz_build
+
+ logging.info('Build for %s does not exist.', old_commit)
+ except Exception as err: # pylint: disable=broad-except
+ logging.error('Could not download build for %s because of: %s',
+ old_commit, err)
- def download_corpus(self, target_name, out_dir):
- logging.info('download_corpus not implemented for ClusterFuzzLite.')
+ return None
+
+ def download_corpus(self, target_name, corpus_dir):
+ _make_empty_dir_if_nonexistent(corpus_dir)
+ logging.info('Downloading corpus for %s to %s.', target_name, corpus_dir)
+ corpus_name = self._get_corpus_name(target_name)
+ try:
+ self.filestore.download_corpus(corpus_name, corpus_dir)
+ logging.info('Done downloading corpus. Contains %d elements.',
+ len(os.listdir(corpus_dir)))
+ except Exception as err: # pylint: disable=broad-except
+ logging.error('Failed to download corpus for target: %s. Error: %s',
+ target_name, str(err))
+ return corpus_dir
+
+ def _get_build_name(self, name):
+ return f'{self.config.sanitizer}-{name}'
+
+ def _get_corpus_name(self, target_name): # pylint: disable=no-self-use
+ """Returns the name of the corpus artifact."""
+ return target_name
+
+ def _get_crashes_artifact_name(self): # pylint: disable=no-self-use
+ """Returns the name of the crashes artifact."""
+ return 'current'
+
+ def upload_corpus(self, target_name, corpus_dir, replace=False):
+ """Upload the corpus produced by |target_name|."""
+ logging.info('Uploading corpus in %s for %s.', corpus_dir, target_name)
+ name = self._get_corpus_name(target_name)
+ try:
+ self.filestore.upload_corpus(name, corpus_dir, replace=replace)
+ logging.info('Done uploading corpus.')
+ except Exception as error: # pylint: disable=broad-except
+ logging.error('Failed to upload corpus for target: %s. Error: %s.',
+ target_name, error)
+
+ def upload_build(self, commit):
+ """Upload the build produced by CIFuzz as the latest build."""
+ logging.info('Uploading latest build in %s.', self.workspace.out)
+ build_name = self._get_build_name(commit)
+ try:
+ result = self.filestore.upload_build(build_name, self.workspace.out)
+ logging.info('Done uploading latest build.')
+ return result
+ except Exception as error: # pylint: disable=broad-except
+ logging.error('Failed to upload latest build: %s. Error: %s',
+ self.workspace.out, error)
+
+ def upload_crashes(self):
+ """Uploads crashes."""
+ if not os.listdir(self.workspace.artifacts):
+ logging.info('No crashes in %s. Not uploading.', self.workspace.artifacts)
+ return
+
+ crashes_artifact_name = self._get_crashes_artifact_name()
+
+ logging.info('Uploading crashes in %s.', self.workspace.artifacts)
+ try:
+ self.filestore.upload_crashes(crashes_artifact_name,
+ self.workspace.artifacts)
+ logging.info('Done uploading crashes.')
+ except Exception as error: # pylint: disable=broad-except
+ logging.error('Failed to upload crashes. Error: %s', error)
+
+ def upload_coverage(self):
+ """Uploads the coverage report to the filestore."""
+ self.filestore.upload_coverage(self.COVERAGE_NAME,
+ self.workspace.coverage_report)
+
+ def get_coverage(self, repo_path):
+ """Returns the project coverage object for the project."""
+ try:
+ if not self.filestore.download_coverage(
+ self.COVERAGE_NAME, self.workspace.clusterfuzz_coverage):
+ logging.error('Could not download coverage.')
+ return None
+ return get_coverage.FilesystemCoverage(
+ repo_path, self.workspace.clusterfuzz_coverage)
+ except (get_coverage.CoverageError, filestore.FilestoreError):
+ logging.error('Could not get coverage.')
+ return None
class OSSFuzz(BaseClusterFuzzDeployment):
@@ -68,9 +216,6 @@ class OSSFuzz(BaseClusterFuzzDeployment):
# Location of clusterfuzz builds on GCS.
CLUSTERFUZZ_BUILDS = 'clusterfuzz-builds'
- # Format string for the latest version of a project's build.
- VERSION_STRING = '{project_name}-{sanitizer}-latest.version'
-
# Zip file name containing the corpus.
CORPUS_ZIP_NAME = 'public.zip'
@@ -80,139 +225,148 @@ class OSSFuzz(BaseClusterFuzzDeployment):
Returns:
A string with the latest build version or None.
"""
- version_file = self.VERSION_STRING.format(
- project_name=self.config.project_name, sanitizer=self.config.sanitizer)
+ version_file = (
+ f'{self.config.oss_fuzz_project_name}-{self.config.sanitizer}'
+ '-latest.version')
version_url = utils.url_join(utils.GCS_BASE_URL, self.CLUSTERFUZZ_BUILDS,
- self.config.project_name, version_file)
+ self.config.oss_fuzz_project_name,
+ version_file)
try:
response = urllib.request.urlopen(version_url)
except urllib.error.HTTPError:
logging.error('Error getting latest build version for %s from: %s.',
- self.config.project_name, version_url)
+ self.config.oss_fuzz_project_name, version_url)
return None
return response.read().decode()
- def download_latest_build(self, out_dir):
+ def download_latest_build(self):
"""Downloads the latest OSS-Fuzz build from GCS.
Returns:
A path to where the OSS-Fuzz build was stored, or None if it wasn't.
"""
- build_dir = os.path.join(out_dir, self.BUILD_DIR_NAME)
- if os.path.exists(build_dir):
- return build_dir
+ if os.path.exists(self.workspace.clusterfuzz_build):
+ # This function can be called multiple times, don't download the build
+ # again.
+ return self.workspace.clusterfuzz_build
- os.makedirs(build_dir, exist_ok=True)
+ _make_empty_dir_if_nonexistent(self.workspace.clusterfuzz_build)
latest_build_name = self.get_latest_build_name()
if not latest_build_name:
return None
+ logging.info('Downloading latest build.')
oss_fuzz_build_url = utils.url_join(utils.GCS_BASE_URL,
self.CLUSTERFUZZ_BUILDS,
- self.config.project_name,
+ self.config.oss_fuzz_project_name,
latest_build_name)
- if download_and_unpack_zip(oss_fuzz_build_url, build_dir):
- return build_dir
+ if http_utils.download_and_unpack_zip(oss_fuzz_build_url,
+ self.workspace.clusterfuzz_build):
+ logging.info('Done downloading latest build.')
+ return self.workspace.clusterfuzz_build
return None
- def download_corpus(self, target_name, out_dir):
+ def upload_build(self, commit): # pylint: disable=no-self-use
+ """Noop Implementation of upload_build."""
+ logging.info('Not uploading latest build because on OSS-Fuzz.')
+
+ def upload_corpus(self, target_name, corpus_dir, replace=False): # pylint: disable=no-self-use,unused-argument
+ """Noop Implementation of upload_corpus."""
+ logging.info('Not uploading corpus because on OSS-Fuzz.')
+
+ def upload_crashes(self): # pylint: disable=no-self-use
+ """Noop Implementation of upload_crashes."""
+ logging.info('Not uploading crashes because on OSS-Fuzz.')
+
+ def download_corpus(self, target_name, corpus_dir):
"""Downloads the latest OSS-Fuzz corpus for the target.
Returns:
The local path to to corpus or None if download failed.
"""
- corpus_dir = os.path.join(out_dir, self.CORPUS_DIR_NAME, target_name)
- os.makedirs(corpus_dir, exist_ok=True)
- # TODO(metzman): Clean up this code.
+ _make_empty_dir_if_nonexistent(corpus_dir)
project_qualified_fuzz_target_name = target_name
- qualified_name_prefix = self.config.project_name + '_'
-
+ qualified_name_prefix = self.config.oss_fuzz_project_name + '_'
if not target_name.startswith(qualified_name_prefix):
project_qualified_fuzz_target_name = qualified_name_prefix + target_name
- corpus_url = utils.url_join(
- utils.GCS_BASE_URL,
- '{0}-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/'.format(
- self.config.project_name), project_qualified_fuzz_target_name,
- self.CORPUS_ZIP_NAME)
+ corpus_url = (f'{utils.GCS_BASE_URL}{self.config.oss_fuzz_project_name}'
+ '-backup.clusterfuzz-external.appspot.com/corpus/'
+ f'libFuzzer/{project_qualified_fuzz_target_name}/'
+ f'{self.CORPUS_ZIP_NAME}')
- if download_and_unpack_zip(corpus_url, corpus_dir):
- return corpus_dir
-
- return None
+ if not http_utils.download_and_unpack_zip(corpus_url, corpus_dir):
+ logging.warning('Failed to download corpus for %s.', target_name)
+ return corpus_dir
+ def upload_coverage(self):
+ """Noop Implementation of upload_coverage_report."""
+ logging.info('Not uploading coverage report because on OSS-Fuzz.')
-def download_url(url, filename, num_attempts=3):
- """Downloads the file located at |url|, using HTTP to |filename|.
-
- Args:
- url: A url to a file to download.
- filename: The path the file should be downloaded to.
- num_retries: The number of times to retry the download on
- ConnectionResetError.
+ def get_coverage(self, repo_path):
+ """Returns the project coverage object for the project."""
+ try:
+ return get_coverage.OSSFuzzCoverage(repo_path,
+ self.config.oss_fuzz_project_name)
+ except get_coverage.CoverageError:
+ return None
- Returns:
- True on success.
- """
- sleep_time = 1
- # Don't use retry wrapper since we don't want this to raise any exceptions.
- for _ in range(num_attempts):
- try:
- urllib.request.urlretrieve(url, filename)
- return True
- except urllib.error.HTTPError:
- # In these cases, retrying probably wont work since the error probably
- # means there is nothing at the URL to download.
- logging.error('Unable to download from: %s.', url)
- return False
- except ConnectionResetError:
- # These errors are more likely to be transient. Retry.
- pass
- time.sleep(sleep_time)
+class NoClusterFuzzDeployment(BaseClusterFuzzDeployment):
+ """ClusterFuzzDeployment implementation used when there is no deployment of
+ ClusterFuzz to use."""
- logging.error('Failed to download %s, %d times.', url, num_attempts)
+ def upload_build(self, commit): # pylint: disable=no-self-use
+ """Noop Implementation of upload_build."""
+ logging.info('Not uploading latest build because no ClusterFuzz '
+ 'deployment.')
- return False
+ def upload_corpus(self, target_name, corpus_dir, replace=False): # pylint: disable=no-self-use,unused-argument
+ """Noop Implementation of upload_corpus."""
+ logging.info('Not uploading corpus because no ClusterFuzz deployment.')
+ def upload_crashes(self): # pylint: disable=no-self-use
+ """Noop Implementation of upload_crashes."""
+ logging.info('Not uploading crashes because no ClusterFuzz deployment.')
-def download_and_unpack_zip(url, extract_directory):
- """Downloads and unpacks a zip file from an HTTP URL.
+ def download_corpus(self, target_name, corpus_dir):
+ """Noop Implementation of download_corpus."""
+ logging.info('Not downloading corpus because no ClusterFuzz deployment.')
+ return _make_empty_dir_if_nonexistent(corpus_dir)
- Args:
- url: A url to the zip file to be downloaded and unpacked.
- out_dir: The path where the zip file should be extracted to.
+ def download_latest_build(self): # pylint: disable=no-self-use
+ """Noop Implementation of download_latest_build."""
+ logging.info(
+ 'Not downloading latest build because no ClusterFuzz deployment.')
- Returns:
- True on success.
- """
- if not os.path.exists(extract_directory):
- logging.error('Extract directory: %s does not exist.', extract_directory)
- return False
+ def upload_coverage(self):
+ """Noop Implementation of upload_coverage."""
+ logging.info(
+ 'Not uploading coverage report because no ClusterFuzz deployment.')
- # Gives the temporary zip file a unique identifier in the case that
- # that download_and_unpack_zip is done in parallel.
- with tempfile.NamedTemporaryFile(suffix='.zip') as tmp_file:
- if not download_url(url, tmp_file.name):
- return False
+ def get_coverage(self, repo_path):
+ """Noop Implementation of get_coverage."""
+ logging.info(
+ 'Not getting project coverage because no ClusterFuzz deployment.')
- try:
- with zipfile.ZipFile(tmp_file.name, 'r') as zip_file:
- zip_file.extractall(extract_directory)
- except zipfile.BadZipFile:
- logging.error('Error unpacking zip from %s. Bad Zipfile.', url)
- return False
- return True
+_PLATFORM_CLUSTERFUZZ_DEPLOYMENT_MAPPING = {
+ config_utils.BaseConfig.Platform.INTERNAL_GENERIC_CI:
+ OSSFuzz,
+ config_utils.BaseConfig.Platform.INTERNAL_GITHUB:
+ OSSFuzz,
+ config_utils.BaseConfig.Platform.EXTERNAL_GENERIC_CI:
+ NoClusterFuzzDeployment,
+ config_utils.BaseConfig.Platform.EXTERNAL_GITHUB:
+ ClusterFuzzLite,
+}
-def get_clusterfuzz_deployment(config):
+def get_clusterfuzz_deployment(config, workspace):
"""Returns object reprsenting deployment of ClusterFuzz used by |config|."""
- if (config.platform == config.Platform.INTERNAL_GENERIC_CI or
- config.platform == config.Platform.INTERNAL_GITHUB):
- logging.info('Using OSS-Fuzz as ClusterFuzz deployment.')
- return OSSFuzz(config)
- logging.info('Using ClusterFuzzLite as ClusterFuzz deployment.')
- return ClusterFuzzLite(config)
+ deployment_cls = _PLATFORM_CLUSTERFUZZ_DEPLOYMENT_MAPPING[config.platform]
+ result = deployment_cls(config, workspace)
+ logging.info('ClusterFuzzDeployment: %s.', result)
+ return result
diff --git a/infra/cifuzz/clusterfuzz_deployment_test.py b/infra/cifuzz/clusterfuzz_deployment_test.py
index 06ff78476..247678548 100644
--- a/infra/cifuzz/clusterfuzz_deployment_test.py
+++ b/infra/cifuzz/clusterfuzz_deployment_test.py
@@ -16,12 +16,14 @@
import os
import unittest
from unittest import mock
-import urllib.error
+import parameterized
from pyfakefs import fake_filesystem_unittest
import clusterfuzz_deployment
import config_utils
+import test_helpers
+import workspace_utils
# NOTE: This integration test relies on
# https://github.com/google/oss-fuzz/tree/master/projects/example project.
@@ -30,62 +32,60 @@ EXAMPLE_PROJECT = 'example'
# An example fuzzer that triggers an error.
EXAMPLE_FUZZER = 'example_crash_fuzzer'
+WORKSPACE = '/workspace'
+EXPECTED_LATEST_BUILD_PATH = os.path.join(WORKSPACE, 'cifuzz-prev-build')
+
+# pylint: disable=unused-argument
+
def _create_config(**kwargs):
"""Creates a config object and then sets every attribute that is a key in
|kwargs| to the corresponding value. Asserts that each key in |kwargs| is an
attribute of Config."""
- defaults = {'is_github': True, 'project_name': EXAMPLE_PROJECT}
+ defaults = {
+ 'is_github': True,
+ 'oss_fuzz_project_name': EXAMPLE_PROJECT,
+ 'workspace': WORKSPACE,
+ }
for default_key, default_value in defaults.items():
if default_key not in kwargs:
kwargs[default_key] = default_value
- with mock.patch('os.path.basename', return_value=None), mock.patch(
- 'config_utils.get_project_src_path',
- return_value=None), mock.patch('config_utils._is_dry_run',
- return_value=True):
- config = config_utils.RunFuzzersConfig()
-
- for key, value in kwargs.items():
- assert hasattr(config, key), 'Config doesn\'t have attribute: ' + key
- setattr(config, key, value)
- return config
+ return test_helpers.create_run_config(**kwargs)
def _create_deployment(**kwargs):
config = _create_config(**kwargs)
- return clusterfuzz_deployment.get_clusterfuzz_deployment(config)
+ workspace = workspace_utils.Workspace(config)
+ return clusterfuzz_deployment.get_clusterfuzz_deployment(config, workspace)
class OSSFuzzTest(fake_filesystem_unittest.TestCase):
"""Tests OSSFuzz."""
- OUT_DIR = '/out'
-
def setUp(self):
self.setUpPyfakefs()
self.deployment = _create_deployment()
+ self.corpus_dir = os.path.join(self.deployment.workspace.corpora,
+ EXAMPLE_FUZZER)
- @mock.patch('clusterfuzz_deployment.download_and_unpack_zip',
- return_value=True)
- def test_download_corpus(self, mocked_download_and_unpack_zip):
+ @mock.patch('http_utils.download_and_unpack_zip', return_value=True)
+ def test_download_corpus(self, mock_download_and_unpack_zip):
"""Tests that we can download a corpus for a valid project."""
- result = self.deployment.download_corpus(EXAMPLE_FUZZER, self.OUT_DIR)
- self.assertIsNotNone(result)
- expected_corpus_dir = os.path.join(self.OUT_DIR, 'cifuzz-corpus',
- EXAMPLE_FUZZER)
+ self.deployment.download_corpus(EXAMPLE_FUZZER, self.corpus_dir)
expected_url = ('https://storage.googleapis.com/example-backup.'
'clusterfuzz-external.appspot.com/corpus/libFuzzer/'
'example_crash_fuzzer/public.zip')
- call_args, _ = mocked_download_and_unpack_zip.call_args
- self.assertEqual(call_args, (expected_url, expected_corpus_dir))
+ call_args, _ = mock_download_and_unpack_zip.call_args
+ self.assertEqual(call_args, (expected_url, self.corpus_dir))
+ self.assertTrue(os.path.exists(self.corpus_dir))
- @mock.patch('clusterfuzz_deployment.download_and_unpack_zip',
- return_value=False)
- def test_download_fail(self, _):
- """Tests that when downloading fails, None is returned."""
- corpus_path = self.deployment.download_corpus(EXAMPLE_FUZZER, self.OUT_DIR)
- self.assertIsNone(corpus_path)
+ @mock.patch('http_utils.download_and_unpack_zip', return_value=False)
+ def test_download_corpus_fail(self, _):
+ """Tests that when downloading fails, an empty corpus directory is still
+ returned."""
+ self.deployment.download_corpus(EXAMPLE_FUZZER, self.corpus_dir)
+ self.assertEqual(os.listdir(self.corpus_dir), [])
def test_get_latest_build_name(self):
"""Tests that the latest build name can be retrieved from GCS."""
@@ -93,58 +93,170 @@ class OSSFuzzTest(fake_filesystem_unittest.TestCase):
self.assertTrue(latest_build_name.endswith('.zip'))
self.assertTrue('address' in latest_build_name)
+ @parameterized.parameterized.expand([
+ ('upload_build', ('commit',),
+ 'Not uploading latest build because on OSS-Fuzz.'),
+ ('upload_corpus', ('target', 'corpus-dir'),
+ 'Not uploading corpus because on OSS-Fuzz.'),
+ ('upload_crashes', tuple(), 'Not uploading crashes because on OSS-Fuzz.'),
+ ])
+ def test_noop_methods(self, method, method_args, expected_message):
+ """Tests that certain methods are noops for OSS-Fuzz."""
+ with mock.patch('logging.info') as mock_info:
+ method = getattr(self.deployment, method)
+ self.assertIsNone(method(*method_args))
+ mock_info.assert_called_with(expected_message)
+
+ @mock.patch('http_utils.download_and_unpack_zip', return_value=True)
+ def test_download_latest_build(self, mock_download_and_unpack_zip):
+ """Tests that downloading the latest build works as intended under normal
+ circumstances."""
+ self.assertEqual(self.deployment.download_latest_build(),
+ EXPECTED_LATEST_BUILD_PATH)
+ expected_url = ('https://storage.googleapis.com/clusterfuzz-builds/example/'
+ 'example-address-202008030600.zip')
+ mock_download_and_unpack_zip.assert_called_with(expected_url,
+ EXPECTED_LATEST_BUILD_PATH)
+
+ @mock.patch('http_utils.download_and_unpack_zip', return_value=False)
+ def test_download_latest_build_fail(self, _):
+ """Tests that download_latest_build returns None when it fails to download a
+ build."""
+ self.assertIsNone(self.deployment.download_latest_build())
+
+
+class ClusterFuzzLiteTest(fake_filesystem_unittest.TestCase):
+ """Tests for ClusterFuzzLite."""
+
+ def setUp(self):
+ self.setUpPyfakefs()
+ self.deployment = _create_deployment(run_fuzzers_mode='batch',
+ oss_fuzz_project_name='',
+ is_github=True)
+ self.corpus_dir = os.path.join(self.deployment.workspace.corpora,
+ EXAMPLE_FUZZER)
+
+ @mock.patch('filestore.github_actions.GithubActionsFilestore.download_corpus',
+ return_value=True)
+ def test_download_corpus(self, mock_download_corpus):
+ """Tests that download_corpus works for a valid project."""
+ self.deployment.download_corpus(EXAMPLE_FUZZER, self.corpus_dir)
+ mock_download_corpus.assert_called_with('example_crash_fuzzer',
+ self.corpus_dir)
+ self.assertTrue(os.path.exists(self.corpus_dir))
+
+ @mock.patch('filestore.github_actions.GithubActionsFilestore.download_corpus',
+ side_effect=Exception)
+ def test_download_corpus_fail(self, _):
+ """Tests that when downloading fails, an empty corpus directory is still
+ returned."""
+ self.deployment.download_corpus(EXAMPLE_FUZZER, self.corpus_dir)
+ self.assertEqual(os.listdir(self.corpus_dir), [])
+
+ @mock.patch('filestore.github_actions.GithubActionsFilestore.download_build',
+ side_effect=[False, True])
+ @mock.patch('repo_manager.RepoManager.get_commit_list',
+ return_value=['commit1', 'commit2'])
+ @mock.patch('continuous_integration.BaseCi.repo_dir',
+ return_value='/path/to/repo')
+ def test_download_latest_build(self, mock_repo_dir, mock_get_commit_list,
+ mock_download_build):
+ """Tests that downloading the latest build works as intended under normal
+ circumstances."""
+ self.assertEqual(self.deployment.download_latest_build(),
+ EXPECTED_LATEST_BUILD_PATH)
+ expected_artifact_name = 'address-commit2'
+ mock_download_build.assert_called_with(expected_artifact_name,
+ EXPECTED_LATEST_BUILD_PATH)
+
+ @mock.patch('filestore.github_actions.GithubActionsFilestore.download_build',
+ side_effect=Exception)
+ @mock.patch('repo_manager.RepoManager.get_commit_list',
+ return_value=['commit1', 'commit2'])
+ @mock.patch('continuous_integration.BaseCi.repo_dir',
+ return_value='/path/to/repo')
+ def test_download_latest_build_fail(self, mock_repo_dir, mock_get_commit_list,
+ _):
+ """Tests that download_latest_build returns None when it fails to download a
+ build."""
+ self.assertIsNone(self.deployment.download_latest_build())
-class DownloadUrlTest(unittest.TestCase):
- """Tests that download_url works."""
- URL = 'example.com/file'
- FILE_PATH = '/tmp/file'
-
- @mock.patch('time.sleep')
- @mock.patch('urllib.request.urlretrieve', return_value=True)
- def test_download_url_no_error(self, mocked_urlretrieve, _):
- """Tests that download_url works when there is no error."""
- self.assertTrue(
- clusterfuzz_deployment.download_url(self.URL, self.FILE_PATH))
- self.assertEqual(1, mocked_urlretrieve.call_count)
-
- @mock.patch('time.sleep')
- @mock.patch('logging.error')
- @mock.patch('urllib.request.urlretrieve',
- side_effect=urllib.error.HTTPError(None, None, None, None, None))
- def test_download_url_http_error(self, mocked_urlretrieve, mocked_error, _):
- """Tests that download_url doesn't retry when there is an HTTP error."""
- self.assertFalse(
- clusterfuzz_deployment.download_url(self.URL, self.FILE_PATH))
- mocked_error.assert_called_with('Unable to download from: %s.', self.URL)
- self.assertEqual(1, mocked_urlretrieve.call_count)
-
- @mock.patch('time.sleep')
- @mock.patch('logging.error')
- @mock.patch('urllib.request.urlretrieve', side_effect=ConnectionResetError)
- def test_download_url_connection_error(self, mocked_urlretrieve, mocked_error,
- mocked_sleep):
- """Tests that download_url doesn't retry when there is an HTTP error."""
- self.assertFalse(
- clusterfuzz_deployment.download_url(self.URL, self.FILE_PATH))
- self.assertEqual(3, mocked_urlretrieve.call_count)
- self.assertEqual(3, mocked_sleep.call_count)
- mocked_error.assert_called_with('Failed to download %s, %d times.',
- self.URL, 3)
-
-
-class DownloadAndUnpackZipTest(fake_filesystem_unittest.TestCase):
- """Tests download_and_unpack_zip."""
+ @mock.patch('filestore.github_actions.GithubActionsFilestore.upload_build')
+ def test_upload_build(self, mock_upload_build):
+ """Tests that upload_build works as intended."""
+ self.deployment.upload_build('commit')
+ mock_upload_build.assert_called_with('address-commit',
+ '/workspace/build-out')
+
+
+class NoClusterFuzzDeploymentTest(fake_filesystem_unittest.TestCase):
+ """Tests for NoClusterFuzzDeployment."""
def setUp(self):
self.setUpPyfakefs()
+ config = test_helpers.create_run_config(workspace=WORKSPACE,
+ is_github=False)
+ workspace = workspace_utils.Workspace(config)
+ self.deployment = clusterfuzz_deployment.get_clusterfuzz_deployment(
+ config, workspace)
+ self.corpus_dir = os.path.join(workspace.corpora, EXAMPLE_FUZZER)
+
+ @mock.patch('logging.info')
+ def test_download_corpus(self, mock_info):
+ """Tests that download corpus returns the path to the empty corpus
+ directory."""
+ self.deployment.download_corpus(EXAMPLE_FUZZER, self.corpus_dir)
+ mock_info.assert_called_with(
+ 'Not downloading corpus because no ClusterFuzz deployment.')
+ self.assertTrue(os.path.exists(self.corpus_dir))
+
+ @parameterized.parameterized.expand([
+ ('upload_build', ('commit',),
+ 'Not uploading latest build because no ClusterFuzz deployment.'),
+ ('upload_corpus', ('target', 'corpus-dir'),
+ 'Not uploading corpus because no ClusterFuzz deployment.'),
+ ('upload_crashes', tuple(),
+ 'Not uploading crashes because no ClusterFuzz deployment.'),
+ ('download_latest_build', tuple(),
+ 'Not downloading latest build because no ClusterFuzz deployment.')
+ ])
+ def test_noop_methods(self, method, method_args, expected_message):
+ """Tests that certain methods are noops for NoClusterFuzzDeployment."""
+ with mock.patch('logging.info') as mock_info:
+ method = getattr(self.deployment, method)
+ self.assertIsNone(method(*method_args))
+ mock_info.assert_called_with(expected_message)
+
+
+class GetClusterFuzzDeploymentTest(unittest.TestCase):
+ """Tests for get_clusterfuzz_deployment."""
+
+ def setUp(self):
+ test_helpers.patch_environ(self)
+ os.environ['GITHUB_REPOSITORY'] = 'owner/myproject'
+
+ @parameterized.parameterized.expand([
+ (config_utils.BaseConfig.Platform.INTERNAL_GENERIC_CI,
+ clusterfuzz_deployment.OSSFuzz),
+ (config_utils.BaseConfig.Platform.INTERNAL_GITHUB,
+ clusterfuzz_deployment.OSSFuzz),
+ (config_utils.BaseConfig.Platform.EXTERNAL_GENERIC_CI,
+ clusterfuzz_deployment.NoClusterFuzzDeployment),
+ (config_utils.BaseConfig.Platform.EXTERNAL_GITHUB,
+ clusterfuzz_deployment.ClusterFuzzLite),
+ ])
+ def test_get_clusterfuzz_deployment(self, platform, expected_deployment_cls):
+ """Tests that get_clusterfuzz_deployment returns the correct value."""
+ with mock.patch('config_utils.BaseConfig.platform',
+ return_value=platform,
+ new_callable=mock.PropertyMock):
+ with mock.patch('filestore_utils.get_filestore', return_value=None):
+ config = _create_config()
+ workspace = workspace_utils.Workspace(config)
- @mock.patch('urllib.request.urlretrieve', return_value=True)
- def test_bad_zip_download(self, _):
- """Tests download_and_unpack_zip returns none when a bad zip is passed."""
- self.fs.create_file('/url_tmp.zip', contents='Test file.')
- self.assertFalse(
- clusterfuzz_deployment.download_and_unpack_zip('/not/a/real/url',
- '/extract-directory'))
+ self.assertIsInstance(
+ clusterfuzz_deployment.get_clusterfuzz_deployment(
+ config, workspace), expected_deployment_cls)
if __name__ == '__main__':
diff --git a/infra/cifuzz/config_utils.py b/infra/cifuzz/config_utils.py
index ad2cd36c6..bc73536bd 100644
--- a/infra/cifuzz/config_utils.py
+++ b/infra/cifuzz/config_utils.py
@@ -16,55 +16,44 @@
import logging
import enum
import os
+import sys
import json
import environment
+# pylint: disable=wrong-import-position,import-error
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-def _get_project_repo_name():
- return os.path.basename(environment.get('GITHUB_REPOSITORY', ''))
+import constants
+
+RUN_FUZZERS_MODES = ['batch', 'ci', 'coverage', 'prune']
+SANITIZERS = ['address', 'memory', 'undefined', 'coverage']
+
+# TODO(metzman): Set these on config objects so there's one source of truth.
+DEFAULT_ENGINE = 'libfuzzer'
+DEFAULT_ARCHITECTURE = 'x86_64'
+
+# This module deals a lot with env variables. Many of these will be set by users
+# and others beyond CIFuzz's control. Thus, you should be careful about using
+# the environment.py helpers for getting env vars, since it can cause values
+# that should be interpreted as strings to be returned as other types (bools or
+# ints for example). The environment.py helpers should not be used for values
+# that are supposed to be strings.
def _get_pr_ref(event):
if event == 'pull_request':
- return environment.get('GITHUB_REF')
+ return os.getenv('GITHUB_REF')
return None
def _get_sanitizer():
- return os.getenv('SANITIZER', 'address').lower()
-
-
-def _get_project_name():
- # TODO(metzman): Remove OSS-Fuzz reference.
- return os.getenv('OSS_FUZZ_PROJECT_NAME')
+ return os.getenv('SANITIZER', constants.DEFAULT_SANITIZER).lower()
def _is_dry_run():
"""Returns True if configured to do a dry run."""
- return environment.get_bool('DRY_RUN', 'false')
-
-
-def get_project_src_path(workspace):
- """Returns the manually checked out path of the project's source if specified
- or None."""
- # TODO(metzman): Get rid of MANUAL_SRC_PATH when Skia switches to
- # PROJECT_SRC_PATH.
- path = os.getenv('PROJECT_SRC_PATH', os.getenv('MANUAL_SRC_PATH'))
- if not path:
- logging.debug('No PROJECT_SRC_PATH.')
- return path
-
- logging.debug('PROJECT_SRC_PATH set.')
- if os.path.isabs(path):
- return path
-
- # If |src| is not absolute, assume we are running in GitHub actions.
- # TODO(metzman): Don't make this assumption.
- return os.path.join(workspace, path)
-
-
-DEFAULT_LANGUAGE = 'c++'
+ return environment.get_bool('DRY_RUN', False)
def _get_language():
@@ -74,12 +63,114 @@ def _get_language():
# getting it from the project.yaml) is outweighed by the complexity in
# implementing this. A lot of the complexity comes from our unittests not
# setting a proper projet at this point.
- return os.getenv('LANGUAGE', DEFAULT_LANGUAGE)
+ return os.getenv('LANGUAGE', constants.DEFAULT_LANGUAGE)
# pylint: disable=too-few-public-methods,too-many-instance-attributes
+class BaseCiEnvironment:
+ """Base class for CiEnvironment subclasses."""
+
+ @property
+ def workspace(self):
+ """Returns the workspace."""
+ raise NotImplementedError('Child class must implment method.')
+
+ @property
+ def git_sha(self):
+ """Returns the Git SHA to diff against."""
+ raise NotImplementedError('Child class must implment method.')
+
+ @property
+ def token(self):
+ """Returns the CI API token."""
+ raise NotImplementedError('Child class must implment method.')
+
+ @property
+ def project_src_path(self):
+ """Returns the manually checked out path of the project's source if
+ specified or None."""
+
+ path = os.getenv('PROJECT_SRC_PATH')
+ if not path:
+ logging.debug('No PROJECT_SRC_PATH.')
+ return path
+
+ logging.debug('PROJECT_SRC_PATH set: %s.', path)
+ return path
+
+
+class GenericCiEnvironment(BaseCiEnvironment):
+ """CI Environment for generic CI systems."""
+
+ @property
+ def workspace(self):
+ """Returns the workspace."""
+ return os.getenv('WORKSPACE')
+
+ @property
+ def git_sha(self):
+ """Returns the Git SHA to diff against."""
+ return os.getenv('GIT_SHA')
+
+ @property
+ def token(self):
+ """Returns the CI API token."""
+ return os.getenv('TOKEN')
+
+ @property
+ def project_repo_owner_and_name(self):
+ """Returns a tuple containing the project repo owner and None."""
+ repository = os.getenv('REPOSITORY')
+ # Repo owner is a githubism.
+ return None, repository
+
+
+class GithubEnvironment(BaseCiEnvironment):
+ """CI environment for GitHub."""
+
+ @property
+ def workspace(self):
+ """Returns the workspace."""
+ return os.getenv('GITHUB_WORKSPACE')
+
+ @property
+ def git_sha(self):
+ """Returns the Git SHA to diff against."""
+ return os.getenv('GITHUB_SHA')
+
+ @property
+ def token(self):
+ """Returns the CI API token."""
+ return os.getenv('GITHUB_TOKEN')
+
+ @property
+ def project_src_path(self):
+ """Returns the manually checked out path of the project's source if
+ specified or None. The path returned is relative to |self.workspace| since
+ on github the checkout will be relative to there."""
+ # On GitHub, they don't know the absolute path, it is relative to
+ # |workspace|.
+ project_src_path = super().project_src_path
+ if project_src_path is None:
+ return project_src_path
+ return os.path.join(self.workspace, project_src_path)
+
+ @property
+ def project_repo_owner_and_name(self):
+ """Returns a tuple containing the project repo owner and the name of the
+ repo."""
+ # On GitHub this includes owner and repo name.
+ repository = os.getenv('GITHUB_REPOSITORY')
+ # Use os.path.split to split owner from repo.
+ return os.path.split(repository)
+
+
+class ConfigError(Exception):
+ """Error for invalid configuration."""
+
+
class BaseConfig:
"""Object containing constant configuration for CIFuzz."""
@@ -88,66 +179,144 @@ class BaseConfig:
EXTERNAL_GITHUB = 0 # Non-OSS-Fuzz on GitHub actions.
INTERNAL_GITHUB = 1 # OSS-Fuzz on GitHub actions.
INTERNAL_GENERIC_CI = 2 # OSS-Fuzz on any CI.
+ EXTERNAL_GENERIC_CI = 3 # Non-OSS-Fuzz on any CI.
def __init__(self):
- self.workspace = os.getenv('GITHUB_WORKSPACE')
- self.project_name = _get_project_name()
+ # Need to set these before calling self.platform.
+ self._github_event_path = os.getenv('GITHUB_EVENT_PATH')
+ self.is_github = bool(self._github_event_path)
+ logging.debug('Is github: %s.', self.is_github)
+ self.oss_fuzz_project_name = os.getenv('OSS_FUZZ_PROJECT_NAME')
+
+ self._ci_env = _get_ci_environment(self.platform)
+ self.workspace = self._ci_env.workspace
+
+ self.project_repo_owner, self.project_repo_name = (
+ self._ci_env.project_repo_owner_and_name)
+
# Check if failures should not be reported.
self.dry_run = _is_dry_run()
+
self.sanitizer = _get_sanitizer()
- self.build_integration_path = os.getenv('BUILD_INTEGRATION_PATH')
+
+ self.build_integration_path = (
+ constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH)
self.language = _get_language()
- event_path = os.getenv('GITHUB_EVENT_PATH')
- self.is_github = bool(event_path)
- logging.debug('Is github: %s.', self.is_github)
- # TODO(metzman): Parse env like we do in ClusterFuzz.
- self.low_disk_space = environment.get('LOW_DISK_SPACE', False)
+ self.low_disk_space = environment.get_bool('LOW_DISK_SPACE', False)
+
+ self.token = self._ci_env.token
+ self.git_store_repo = os.environ.get('GIT_STORE_REPO')
+ self.git_store_branch = os.environ.get('GIT_STORE_BRANCH')
+ self.git_store_branch_coverage = os.environ.get('GIT_STORE_BRANCH_COVERAGE',
+ self.git_store_branch)
+ self.docker_in_docker = os.environ.get('DOCKER_IN_DOCKER')
+
+ # TODO(metzman): Fix tests to create valid configurations and get rid of
+ # CIFUZZ_TEST here and in presubmit.py.
+ if not os.getenv('CIFUZZ_TEST') and not self.validate():
+ raise ConfigError('Invalid Configuration.')
+
+ def validate(self):
+ """Returns False if the configuration is invalid."""
+ # Do validation here so that unittests don't need to make a fully-valid
+ # config.
+ if not self.workspace:
+ logging.error('Must set WORKSPACE.')
+ return False
+
+ if self.sanitizer not in SANITIZERS:
+ logging.error('Invalid SANITIZER: %s. Must be one of: %s.',
+ self.sanitizer, SANITIZERS)
+ return False
+
+ if self.language not in constants.LANGUAGES:
+ logging.error('Invalid LANGUAGE: %s. Must be one of: %s.', self.language,
+ constants.LANGUAGES)
+ return False
+
+ return True
@property
def is_internal(self):
"""Returns True if this is an OSS-Fuzz project."""
- return not self.build_integration_path
+ return bool(self.oss_fuzz_project_name)
@property
def platform(self):
"""Returns the platform CIFuzz is runnning on."""
if not self.is_internal:
+ if not self.is_github:
+ return self.Platform.EXTERNAL_GENERIC_CI
return self.Platform.EXTERNAL_GITHUB
+
if self.is_github:
return self.Platform.INTERNAL_GITHUB
return self.Platform.INTERNAL_GENERIC_CI
+ @property
+ def is_coverage(self):
+ """Returns True if this CIFuzz run (building fuzzers and running them) for
+ generating a coverage report."""
+ return self.sanitizer == 'coverage'
+
+
+_CI_ENVIRONMENT_MAPPING = {
+ BaseConfig.Platform.EXTERNAL_GITHUB: GithubEnvironment,
+ BaseConfig.Platform.INTERNAL_GITHUB: GithubEnvironment,
+ BaseConfig.Platform.INTERNAL_GENERIC_CI: GenericCiEnvironment,
+ BaseConfig.Platform.EXTERNAL_GENERIC_CI: GenericCiEnvironment,
+}
+
+
+def _get_ci_environment(platform):
+ """Returns the CI environment object for |platform|."""
+ return _CI_ENVIRONMENT_MAPPING[platform]()
+
class RunFuzzersConfig(BaseConfig):
"""Class containing constant configuration for running fuzzers in CIFuzz."""
- RUN_FUZZERS_MODES = {'batch', 'ci'}
-
def __init__(self):
super().__init__()
+ # TODO(metzman): Pick a better default for pruning.
self.fuzz_seconds = int(os.environ.get('FUZZ_SECONDS', 600))
self.run_fuzzers_mode = os.environ.get('RUN_FUZZERS_MODE', 'ci').lower()
- if self.run_fuzzers_mode not in self.RUN_FUZZERS_MODES:
- raise Exception(
- ('Invalid RUN_FUZZERS_MODE %s not one of allowed choices: %s.' %
- self.run_fuzzers_mode, self.RUN_FUZZERS_MODES))
+ if self.is_coverage:
+ self.run_fuzzers_mode = 'coverage'
+
+ self.report_unreproducible_crashes = environment.get_bool(
+ 'REPORT_UNREPRODUCIBLE_CRASHES', False)
+
+ # TODO(metzman): Fix tests to create valid configurations and get rid of
+ # CIFUZZ_TEST here and in presubmit.py.
+ if not os.getenv('CIFUZZ_TEST') and not self._run_config_validate():
+ raise ConfigError('Invalid Run Configuration.')
+
+ def _run_config_validate(self):
+ """Do extra validation on RunFuzzersConfig.__init__(). Do not name this
+ validate or else it will be called when using the parent's __init__ and will
+ fail. Returns True if valid."""
+ if self.run_fuzzers_mode not in RUN_FUZZERS_MODES:
+ logging.error('Invalid RUN_FUZZERS_MODE: %s. Must be one of %s.',
+ self.run_fuzzers_mode, RUN_FUZZERS_MODES)
+ return False
+
+ return True
class BuildFuzzersConfig(BaseConfig):
"""Class containing constant configuration for building fuzzers in CIFuzz."""
def _get_config_from_event_path(self, event):
- event_path = os.getenv('GITHUB_EVENT_PATH')
- if not event_path:
+ if not self._github_event_path:
return
- with open(event_path, encoding='utf-8') as file_handle:
+ with open(self._github_event_path, encoding='utf-8') as file_handle:
event_data = json.load(file_handle)
if event == 'push':
self.base_commit = event_data['before']
logging.debug('base_commit: %s', self.base_commit)
- else:
- self.pr_ref = 'refs/pull/{0}/merge'.format(
- event_data['pull_request']['number'])
+ elif event == 'pull_request':
+ self.pr_ref = f'refs/pull/{event_data["pull_request"]["number"]}/merge'
logging.debug('pr_ref: %s', self.pr_ref)
self.git_url = event_data['repository']['html_url']
@@ -155,11 +324,8 @@ class BuildFuzzersConfig(BaseConfig):
def __init__(self):
"""Get the configuration from CIFuzz from the environment. These variables
are set by GitHub or the user."""
- # TODO(metzman): Some of this config is very CI-specific. Move it into the
- # CI class.
super().__init__()
- self.project_repo_name = _get_project_repo_name()
- self.commit_sha = os.getenv('GITHUB_SHA')
+ self.commit_sha = self._ci_env.git_sha
event = os.getenv('GITHUB_EVENT_NAME')
self.pr_ref = None
@@ -168,13 +334,21 @@ class BuildFuzzersConfig(BaseConfig):
self._get_config_from_event_path(event)
self.base_ref = os.getenv('GITHUB_BASE_REF')
- self.project_src_path = get_project_src_path(self.workspace)
+ self.project_src_path = self._ci_env.project_src_path
self.allowed_broken_targets_percentage = os.getenv(
'ALLOWED_BROKEN_TARGETS_PERCENTAGE')
- self.bad_build_check = environment.get_bool('BAD_BUILD_CHECK', 'true')
-
- # TODO(metzman): Use better system for interpreting env vars. What if env
- # var is set to '0'?
- self.keep_unaffected_fuzz_targets = bool(
- os.getenv('KEEP_UNAFFECTED_FUZZERS'))
+ self.bad_build_check = environment.get_bool('BAD_BUILD_CHECK', True)
+ # pylint: disable=consider-using-ternary
+ self.keep_unaffected_fuzz_targets = (
+ # Not from a commit or PR.
+ (not self.base_ref and not self.base_commit) or
+ environment.get_bool('KEEP_UNAFFECTED_FUZZERS'))
+ self.upload_build = environment.get_bool('UPLOAD_BUILD', False)
+ if self.upload_build:
+ logging.info('Keeping all fuzzers because we are uploading build.')
+ self.keep_unaffected_fuzz_targets = True
+
+ if self.sanitizer == 'coverage':
+ self.keep_unaffected_fuzz_targets = True
+ self.bad_build_check = False
diff --git a/infra/cifuzz/config_utils_test.py b/infra/cifuzz/config_utils_test.py
index 6f87bd4c5..32499bfd0 100644
--- a/infra/cifuzz/config_utils_test.py
+++ b/infra/cifuzz/config_utils_test.py
@@ -14,11 +14,13 @@
"""Module for getting the configuration CIFuzz needs to run."""
import os
import unittest
+from unittest import mock
import config_utils
+import constants
import test_helpers
-# pylint: disable=no-self-use
+# pylint: disable=no-self-use,protected-access
class BaseConfigTest(unittest.TestCase):
@@ -32,18 +34,66 @@ class BaseConfigTest(unittest.TestCase):
def test_language_default(self):
"""Tests that the correct default language is set."""
- os.environ['BUILD_INTEGRATION_PATH'] = '/path'
config = self._create_config()
self.assertEqual(config.language, 'c++')
def test_language(self):
"""Tests that the correct language is set."""
- os.environ['BUILD_INTEGRATION_PATH'] = '/path'
language = 'python'
os.environ['LANGUAGE'] = language
config = self._create_config()
self.assertEqual(config.language, language)
+ def test_is_coverage(self):
+ """Tests that is_coverage is set correctly."""
+ # Test it is set when it is supposed to be.
+ os.environ['SANITIZER'] = 'coverage'
+ config = self._create_config()
+ self.assertTrue(config.is_coverage)
+
+ # Test it is not set when it is not supposed to be.
+ os.environ['SANITIZER'] = 'address'
+ config = self._create_config()
+ self.assertFalse(config.is_coverage)
+
+ @mock.patch('logging.error')
+ def test_validate_no_workspace(self, mock_error):
+ """Tests that validate returns False if GITHUB_WORKSPACE isn't set."""
+ os.environ['OSS_FUZZ_PROJECT_NAME'] = 'example'
+ config = self._create_config()
+ self.assertFalse(config.validate())
+ mock_error.assert_called_with('Must set WORKSPACE.')
+
+ @mock.patch('logging.error')
+ def test_validate_invalid_language(self, mock_error):
+ """Tests that validate returns False if GITHUB_WORKSPACE isn't set."""
+ os.environ['OSS_FUZZ_PROJECT_NAME'] = 'example'
+ os.environ['WORKSPACE'] = '/workspace'
+ os.environ['LANGUAGE'] = 'invalid-language'
+ config = self._create_config()
+ self.assertFalse(config.validate())
+ mock_error.assert_called_with('Invalid LANGUAGE: %s. Must be one of: %s.',
+ os.environ['LANGUAGE'], constants.LANGUAGES)
+
+ @mock.patch('logging.error')
+ def test_validate_invalid_sanitizer(self, mock_error):
+ """Tests that validate returns False if GITHUB_WORKSPACE isn't set."""
+ os.environ['OSS_FUZZ_PROJECT_NAME'] = 'example'
+ os.environ['WORKSPACE'] = '/workspace'
+ os.environ['SANITIZER'] = 'invalid-sanitizer'
+ config = self._create_config()
+ self.assertFalse(config.validate())
+ mock_error.assert_called_with('Invalid SANITIZER: %s. Must be one of: %s.',
+ os.environ['SANITIZER'],
+ config_utils.SANITIZERS)
+
+ def test_validate(self):
+ """Tests that validate returns True if config is valid."""
+ os.environ['OSS_FUZZ_PROJECT_NAME'] = 'example'
+ os.environ['WORKSPACE'] = '/workspace'
+ config = self._create_config()
+ self.assertTrue(config.validate())
+
class BuildFuzzersConfigTest(unittest.TestCase):
"""Tests for BuildFuzzersConfig."""
@@ -61,11 +111,151 @@ class BuildFuzzersConfigTest(unittest.TestCase):
config = self._create_config()
self.assertEqual(config.base_ref, expected_base_ref)
- def test_keep_unaffected_defaults_to_false(self):
- """Tests that keep_unaffected_fuzz_targets defaults to false."""
+ def test_keep_unaffected_defaults_to_true(self):
+ """Tests that keep_unaffected_fuzz_targets defaults to true."""
+ config = self._create_config()
+ self.assertTrue(config.keep_unaffected_fuzz_targets)
+
+ def test_keep_unaffected_defaults_to_false_when_pr(self):
+ """Tests that keep_unaffected_fuzz_targets defaults to false when from a
+ pr."""
+ os.environ['GITHUB_BASE_REF'] = 'base-ref'
config = self._create_config()
self.assertFalse(config.keep_unaffected_fuzz_targets)
+class RunFuzzersConfigTest(unittest.TestCase):
+ """Tests for RunFuzzersConfig."""
+
+ def setUp(self):
+ test_helpers.patch_environ(self)
+
+ def _create_config(self):
+ return config_utils.RunFuzzersConfig()
+
+ def test_coverage(self):
+ """Tests that run_fuzzers_mode is overriden properly based on
+ is_coverage."""
+ # Test that it is overriden when it is supposed to be.
+ os.environ['SANITIZER'] = 'coverage'
+ os.environ['RUN_FUZZERS_MODE'] = 'ci'
+ config = self._create_config()
+ self.assertEqual(config.run_fuzzers_mode, 'coverage')
+
+ # Test that it isn't overriden when it isn't supposed to be.
+ os.environ['SANITIZER'] = 'address'
+ run_fuzzers_mode = 'ci'
+ os.environ['RUN_FUZZERS_MODE'] = run_fuzzers_mode
+ config = self._create_config()
+ self.assertEqual(config.run_fuzzers_mode, run_fuzzers_mode)
+
+ def test_run_config_validate(self):
+ """Tests that _run_config_validate returns True when the config is valid."""
+ self.assertTrue(self._create_config()._run_config_validate())
+
+ @mock.patch('logging.error')
+ def test_run_config_invalid_mode(self, mock_error):
+ """Tests that _run_config_validate returns False when run_fuzzers_mode is
+ invalid."""
+ fake_mode = 'fake-mode'
+ os.environ['RUN_FUZZERS_MODE'] = fake_mode
+ self.assertFalse(self._create_config()._run_config_validate())
+ mock_error.assert_called_with(
+ 'Invalid RUN_FUZZERS_MODE: %s. Must be one of %s.', fake_mode,
+ config_utils.RUN_FUZZERS_MODES)
+
+
+class GetProjectRepoOwnerAndNameTest(unittest.TestCase):
+ """Tests for BaseCiEnv.get_project_repo_owner_and_name."""
+
+ def setUp(self):
+ test_helpers.patch_environ(self)
+ self.repo_owner = 'repo-owner'
+ self.repo_name = 'repo-name'
+ self.github_env = config_utils.GithubEnvironment()
+ self.generic_ci_env = config_utils.GenericCiEnvironment()
+
+ def test_unset_repository(self):
+ """Tests that the correct result is returned when repository is not set."""
+ self.assertEqual(self.generic_ci_env.project_repo_owner_and_name,
+ (None, None))
+
+ def test_empty_repository(self):
+ """Tests that the correct result is returned when repository is an empty
+ string."""
+ os.environ['REPOSITORY'] = ''
+ self.assertEqual(self.generic_ci_env.project_repo_owner_and_name,
+ (None, ''))
+
+ def test_github_repository(self):
+ """Tests that the correct result is returned when repository contains the
+ owner and repo name (as it does on GitHub)."""
+ os.environ['GITHUB_REPOSITORY'] = f'{self.repo_owner}/{self.repo_name}'
+ self.assertEqual(self.github_env.project_repo_owner_and_name,
+ (self.repo_owner, self.repo_name))
+
+ def test_nongithub_repository(self):
+ """Tests that the correct result is returned when repository contains the
+ just the repo name (as it does outside of GitHub)."""
+ os.environ['REPOSITORY'] = self.repo_name
+ self.assertEqual(self.generic_ci_env.project_repo_owner_and_name,
+ (None, self.repo_name))
+
+
+class GetSanitizerTest(unittest.TestCase):
+ """Tests for _get_sanitizer."""
+
+ def setUp(self):
+ test_helpers.patch_environ(self)
+ self.sanitizer = 'memory'
+
+ def test_default_value(self):
+ """Tests that the default value returned by _get_sanitizer is correct."""
+ self.assertEqual(config_utils._get_sanitizer(), 'address')
+
+ def test_normal_case(self):
+ """Tests that _get_sanitizer returns the correct value in normal cases."""
+ os.environ['SANITIZER'] = self.sanitizer
+ self.assertEqual(config_utils._get_sanitizer(), self.sanitizer)
+
+ def test_capitalization(self):
+ """Tests that that _get_sanitizer handles capitalization properly."""
+ os.environ['SANITIZER'] = self.sanitizer.upper()
+ self.assertEqual(config_utils._get_sanitizer(), self.sanitizer)
+
+
+class ProjectSrcPathTest(unittest.TestCase):
+ """Tests for project_src_path."""
+
+ def setUp(self):
+ test_helpers.patch_environ(self)
+ self.workspace = '/workspace'
+ os.environ['GITHUB_WORKSPACE'] = self.workspace
+
+ self.project_src_dir_name = 'project-src'
+
+ def test_unset(self):
+ """Tests that project_src_path returns None when no PROJECT_SRC_PATH is
+ set."""
+ github_env = config_utils.GithubEnvironment()
+ self.assertIsNone(github_env.project_src_path)
+
+ def test_github(self):
+ """Tests that project_src_path returns the correct result on GitHub."""
+ os.environ['PROJECT_SRC_PATH'] = self.project_src_dir_name
+ expected_project_src_path = os.path.join(self.workspace,
+ self.project_src_dir_name)
+ github_env = config_utils.GithubEnvironment()
+ self.assertEqual(github_env.project_src_path, expected_project_src_path)
+
+ def test_not_github(self):
+ """Tests that project_src_path returns the correct result not on
+ GitHub."""
+ project_src_path = os.path.join('/', self.project_src_dir_name)
+ os.environ['PROJECT_SRC_PATH'] = project_src_path
+ generic_ci_env = config_utils.GenericCiEnvironment()
+ self.assertEqual(generic_ci_env.project_src_path, project_src_path)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/infra/cifuzz/continuous_integration.py b/infra/cifuzz/continuous_integration.py
index b2e8af28e..47c4a7cbf 100644
--- a/infra/cifuzz/continuous_integration.py
+++ b/infra/cifuzz/continuous_integration.py
@@ -21,16 +21,21 @@ import logging
# pylint: disable=wrong-import-position,import-error
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import build_specified_commit
+import docker
import helper
import repo_manager
import retry
import utils
+import workspace_utils
# pylint: disable=too-few-public-methods
BuildPreparationResult = collections.namedtuple(
'BuildPreparationResult', ['success', 'image_repo_path', 'repo_manager'])
+_IMAGE_BUILD_TRIES = 3
+_IMAGE_BUILD_BACKOFF = 2
+
def fix_git_repo_for_diff(repo_manager_obj):
"""Fixes git repos cloned by the "checkout" action so that diffing works on
@@ -47,16 +52,34 @@ class BaseCi:
def __init__(self, config):
self.config = config
+ self.workspace = workspace_utils.Workspace(config)
+
+ def repo_dir(self):
+ """Returns the source repo path, if it has been checked out. None is
+ returned otherwise."""
+ if not os.path.exists(self.workspace.repo_storage):
+ return None
+
+ # Note: this assumes there is only one repo checked out here.
+ listing = os.listdir(self.workspace.repo_storage)
+ if len(listing) != 1:
+ raise RuntimeError('Invalid repo storage.')
+
+ repo_path = os.path.join(self.workspace.repo_storage, listing[0])
+ if not os.path.isdir(repo_path):
+ raise RuntimeError('Repo is not a directory.')
+
+ return repo_path
def prepare_for_fuzzer_build(self):
"""Builds the fuzzer builder image and gets the source code we need to
fuzz."""
- raise NotImplementedError('Children must implement this method.')
+ raise NotImplementedError('Child class must implement method.')
def get_diff_base(self):
"""Returns the base to diff against with git to get the change under
test."""
- raise NotImplementedError('Children must implement this method.')
+ raise NotImplementedError('Child class must implement method.')
def get_changed_code_under_test(self, repo_manager_obj):
"""Returns the changed files that need to be tested."""
@@ -65,10 +88,38 @@ class BaseCi:
logging.info('Diffing against %s.', base)
return repo_manager_obj.get_git_diff(base)
+ def get_build_command(self, host_repo_path, image_repo_path):
+ """Returns the command for building the project that is run inside the
+ project builder container."""
+ raise NotImplementedError('Child class must implement method.')
+
+
+def get_build_command():
+ """Returns the command to build the project inside the project builder
+ container."""
+ return 'compile'
+
+
+def get_replace_repo_and_build_command(host_repo_path, image_repo_path):
+ """Returns the command to replace the repo located at |image_repo_path| with
+ |host_repo_path| and build the project inside the project builder
+ container."""
+ rm_path = os.path.join(image_repo_path, '*')
+ image_src_path = os.path.dirname(image_repo_path)
+ build_command = get_build_command()
+ command = (f'cd / && rm -rf {rm_path} && cp -r {host_repo_path} '
+ f'{image_src_path} && cd - && {build_command}')
+ return command
+
def get_ci(config):
"""Determines what kind of CI is being used and returns the object
representing that system."""
+
+ if config.platform == config.Platform.EXTERNAL_GENERIC_CI:
+ # Non-OSS-Fuzz projects must bring their own source and their own build
+ # integration (which is relative to that source).
+ return ExternalGeneric(config)
if config.platform == config.Platform.EXTERNAL_GITHUB:
# Non-OSS-Fuzz projects must bring their own source and their own build
# integration (which is relative to that source).
@@ -127,27 +178,35 @@ class InternalGithub(GithubCiMixin, BaseCi):
assert self.config.pr_ref or self.config.commit_sha
# detect_main_repo builds the image as a side effect.
inferred_url, image_repo_path = (build_specified_commit.detect_main_repo(
- self.config.project_name, repo_name=self.config.project_repo_name))
+ self.config.oss_fuzz_project_name,
+ repo_name=self.config.project_repo_name))
if not inferred_url or not image_repo_path:
- logging.error('Could not detect repo from project %s.',
- self.config.project_name)
- return BuildPreparationResult(False, None, None)
+ logging.error('Could not detect repo.')
+ return BuildPreparationResult(success=False,
+ image_repo_path=None,
+ repo_manager=None)
- git_workspace = os.path.join(self.config.workspace, 'storage')
- os.makedirs(git_workspace, exist_ok=True)
+ os.makedirs(self.workspace.repo_storage, exist_ok=True)
# Use the same name used in the docker image so we can overwrite it.
image_repo_name = os.path.basename(image_repo_path)
# Checkout project's repo in the shared volume.
- manager = repo_manager.clone_repo_and_get_manager(inferred_url,
- git_workspace,
- repo_name=image_repo_name)
+ manager = repo_manager.clone_repo_and_get_manager(
+ inferred_url, self.workspace.repo_storage, repo_name=image_repo_name)
checkout_specified_commit(manager, self.config.pr_ref,
self.config.commit_sha)
- return BuildPreparationResult(True, image_repo_path, manager)
+ return BuildPreparationResult(success=True,
+ image_repo_path=image_repo_path,
+ repo_manager=manager)
+
+ def get_build_command(self, host_repo_path, image_repo_path): # pylint: disable=no-self-use
+ """Returns the command for building the project that is run inside the
+ project builder container. Command also replaces |image_repo_path| with
+ |host_repo_path|."""
+ return get_replace_repo_and_build_command(host_repo_path, image_repo_path)
class InternalGeneric(BaseCi):
@@ -162,35 +221,71 @@ class InternalGeneric(BaseCi):
logging.info('Building OSS-Fuzz project.')
# detect_main_repo builds the image as a side effect.
_, image_repo_path = (build_specified_commit.detect_main_repo(
- self.config.project_name, repo_name=self.config.project_repo_name))
+ self.config.oss_fuzz_project_name,
+ repo_name=self.config.project_repo_name))
if not image_repo_path:
- logging.error('Could not detect repo from project %s.',
- self.config.project_name)
- return BuildPreparationResult(False, None, None)
+ logging.error('Could not detect repo.')
+ return BuildPreparationResult(success=False,
+ image_repo_path=None,
+ repo_manager=None)
manager = repo_manager.RepoManager(self.config.project_src_path)
- return BuildPreparationResult(True, image_repo_path, manager)
+ return BuildPreparationResult(success=True,
+ image_repo_path=image_repo_path,
+ repo_manager=manager)
def get_diff_base(self):
return 'origin...'
-
-_IMAGE_BUILD_TRIES = 3
-_IMAGE_BUILD_BACKOFF = 2
+ def get_build_command(self, host_repo_path, image_repo_path): # pylint: disable=no-self-use
+ """Returns the command for building the project that is run inside the
+ project builder container. Command also replaces |image_repo_path| with
+ |host_repo_path|."""
+ return get_replace_repo_and_build_command(host_repo_path, image_repo_path)
@retry.wrap(_IMAGE_BUILD_TRIES, _IMAGE_BUILD_BACKOFF)
-def build_external_project_docker_image(project_name, project_src,
- build_integration_path):
+def build_external_project_docker_image(project_src, build_integration_path):
"""Builds the project builder image for an external (non-OSS-Fuzz) project.
Returns True on success."""
dockerfile_path = os.path.join(build_integration_path, 'Dockerfile')
- tag = 'gcr.io/oss-fuzz/{project_name}'.format(project_name=project_name)
- command = ['-t', tag, '-f', dockerfile_path, project_src]
+ command = [
+ '-t', docker.EXTERNAL_PROJECT_IMAGE, '-f', dockerfile_path, project_src
+ ]
return helper.docker_build(command)
+class ExternalGeneric(BaseCi):
+ """CI implementation for generic CI for external (non-OSS-Fuzz) projects."""
+
+ def get_diff_base(self):
+ return 'origin...'
+
+ def prepare_for_fuzzer_build(self):
+ logging.info('ExternalGeneric: preparing for fuzzer build.')
+ manager = repo_manager.RepoManager(self.config.project_src_path)
+ build_integration_abs_path = os.path.join(
+ manager.repo_dir, self.config.build_integration_path)
+ if not build_external_project_docker_image(manager.repo_dir,
+ build_integration_abs_path):
+ logging.error('Failed to build external project: %s.',
+ self.config.oss_fuzz_project_name)
+ return BuildPreparationResult(success=False,
+ image_repo_path=None,
+ repo_manager=None)
+
+ image_repo_path = os.path.join('/src', self.config.project_repo_name)
+ return BuildPreparationResult(success=True,
+ image_repo_path=image_repo_path,
+ repo_manager=manager)
+
+ def get_build_command(self, host_repo_path, image_repo_path): # pylint: disable=no-self-use
+ """Returns the command for building the project that is run inside the
+ project builder container."""
+ return get_build_command()
+
+
class ExternalGithub(GithubCiMixin, BaseCi):
"""Class representing CI for a non-OSS-Fuzz project on Github Actions."""
@@ -200,24 +295,32 @@ class ExternalGithub(GithubCiMixin, BaseCi):
projects are expected to bring their own source code to CIFuzz. Returns True
on success."""
logging.info('Building external project.')
- git_workspace = os.path.join(self.config.workspace, 'storage')
- os.makedirs(git_workspace, exist_ok=True)
+ os.makedirs(self.workspace.repo_storage, exist_ok=True)
# Checkout before building, so we don't need to rely on copying the source
# into the image.
# TODO(metzman): Figure out if we want second copy at all.
manager = repo_manager.clone_repo_and_get_manager(
self.config.git_url,
- git_workspace,
+ self.workspace.repo_storage,
repo_name=self.config.project_repo_name)
checkout_specified_commit(manager, self.config.pr_ref,
self.config.commit_sha)
- build_integration_path = os.path.join(manager.repo_dir,
- self.config.build_integration_path)
- if not build_external_project_docker_image(
- self.config.project_name, manager.repo_dir, build_integration_path):
+ build_integration_abs_path = os.path.join(
+ manager.repo_dir, self.config.build_integration_path)
+ if not build_external_project_docker_image(manager.repo_dir,
+ build_integration_abs_path):
logging.error('Failed to build external project.')
- return BuildPreparationResult(False, None, None)
+ return BuildPreparationResult(success=False,
+ image_repo_path=None,
+ repo_manager=None)
image_repo_path = os.path.join('/src', self.config.project_repo_name)
- return BuildPreparationResult(True, image_repo_path, manager)
+ return BuildPreparationResult(success=True,
+ image_repo_path=image_repo_path,
+ repo_manager=manager)
+
+ def get_build_command(self, host_repo_path, image_repo_path): # pylint: disable=no-self-use
+ """Returns the command for building the project that is run inside the
+ project builder container."""
+ return get_build_command()
diff --git a/infra/cifuzz/continuous_integration_test.py b/infra/cifuzz/continuous_integration_test.py
new file mode 100644
index 000000000..7c7e3eefd
--- /dev/null
+++ b/infra/cifuzz/continuous_integration_test.py
@@ -0,0 +1,87 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for continuous_integration_module."""
+import os
+import sys
+import unittest
+from unittest import mock
+
+import continuous_integration
+
+# pylint: disable=wrong-import-position,import-error
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import repo_manager
+
+# pylint: disable=no-self-use
+
+
+class FixGitRepoForDiffTest(unittest.TestCase):
+ """Tests for fix_git_repo_for_diff."""
+
+ @mock.patch('utils.execute')
+ def test_fix_git_repo_for_diff(self, mock_execute):
+ """Tests that fix_git_repo_for_diff works as intended."""
+ repo_dir = '/dir'
+ repo_manager_obj = repo_manager.RepoManager(repo_dir)
+ continuous_integration.fix_git_repo_for_diff(repo_manager_obj)
+ expected_command = [
+ 'git', 'symbolic-ref', 'refs/remotes/origin/HEAD',
+ 'refs/remotes/origin/master'
+ ]
+
+ mock_execute.assert_called_with(expected_command, location=repo_dir)
+
+
+class GetBuildCommand(unittest.TestCase):
+ """Tests for get_build_command."""
+
+ def test_build_command(self):
+ """Tests that get_build_command works as intended."""
+ self.assertEqual(continuous_integration.get_build_command(), 'compile')
+
+
+class GetReplaceRepoAndBuildCommand(unittest.TestCase):
+ """Tests for get_replace_repo_and_build_command."""
+
+ def test_get_replace_repo_and_build_command(self):
+ """Tests that get_replace_repo_and_build_command works as intended."""
+ host_repo_path = '/path/on/host/to/repo'
+ image_repo_path = '/src/repo'
+ command = continuous_integration.get_replace_repo_and_build_command(
+ host_repo_path, image_repo_path)
+ expected_command = ('cd / && rm -rf /src/repo/* && '
+ 'cp -r /path/on/host/to/repo /src && cd - '
+ '&& compile')
+ self.assertEqual(command, expected_command)
+
+
+class BuildExternalProjetDockerImage(unittest.TestCase):
+ """Tests for build_external_project_docker_image."""
+
+ @mock.patch('helper.docker_build')
+ def test_build_external_project_docker_image(self, mock_docker_build):
+ """Tests that build_external_project_docker_image works as intended."""
+ build_integration_path = '.clusterfuzzlite'
+ project_src = '/path/to/project/src'
+ continuous_integration.build_external_project_docker_image(
+ project_src, build_integration_path)
+
+ mock_docker_build.assert_called_with([
+ '-t', 'external-project', '-f',
+ os.path.join('.clusterfuzzlite', 'Dockerfile'), project_src
+ ])
+
+
+# TODO(metzman): Write tests for the rest of continuous_integration.py.
diff --git a/infra/cifuzz/coverage_test.py b/infra/cifuzz/coverage_test.py
deleted file mode 100644
index 1b24d798c..000000000
--- a/infra/cifuzz/coverage_test.py
+++ /dev/null
@@ -1,194 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests for coverage.py"""
-import os
-import json
-import unittest
-from unittest import mock
-
-import coverage
-
-# pylint: disable=protected-access
-
-TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'test_data')
-
-PROJECT_NAME = 'curl'
-REPO_PATH = '/src/curl'
-FUZZ_TARGET = 'curl_fuzzer'
-PROJECT_COV_JSON_FILENAME = 'example_curl_cov.json'
-FUZZ_TARGET_COV_JSON_FILENAME = 'example_curl_fuzzer_cov.json'
-INVALID_TARGET = 'not-a-fuzz-target'
-
-with open(os.path.join(TEST_DATA_PATH,
- PROJECT_COV_JSON_FILENAME),) as cov_file_handle:
- PROJECT_COV_INFO = json.loads(cov_file_handle.read())
-
-
-class GetFuzzerStatsDirUrlTest(unittest.TestCase):
- """Tests _get_fuzzer_stats_dir_url."""
-
- @mock.patch('coverage.get_json_from_url',
- return_value={
- 'fuzzer_stats_dir':
- 'gs://oss-fuzz-coverage/systemd/fuzzer_stats/20210303'
- })
- def test_get_valid_project(self, mocked_get_json_from_url):
- """Tests that a project's coverage report can be downloaded and parsed.
-
- NOTE: This test relies on the PROJECT_NAME repo's coverage report.
- The "example" project was not used because it has no coverage reports.
- """
- result = coverage._get_fuzzer_stats_dir_url(PROJECT_NAME)
- (url,), _ = mocked_get_json_from_url.call_args
- self.assertEqual(
- 'https://storage.googleapis.com/oss-fuzz-coverage/'
- 'latest_report_info/curl.json', url)
-
- expected_result = (
- 'https://storage.googleapis.com/oss-fuzz-coverage/systemd/fuzzer_stats/'
- '20210303')
- self.assertEqual(result, expected_result)
-
- def test_get_invalid_project(self):
- """Tests that passing a bad project returns None."""
- self.assertIsNone(coverage._get_fuzzer_stats_dir_url('not-a-proj'))
-
-
-class GetTargetCoverageReportTest(unittest.TestCase):
- """Tests get_target_coverage_report."""
-
- def setUp(self):
- with mock.patch('coverage._get_latest_cov_report_info',
- return_value=PROJECT_COV_INFO):
- self.coverage_getter = coverage.OssFuzzCoverageGetter(
- PROJECT_NAME, REPO_PATH)
-
- @mock.patch('coverage.get_json_from_url', return_value={})
- def test_valid_target(self, mocked_get_json_from_url):
- """Tests that a target's coverage report can be downloaded and parsed."""
- self.coverage_getter.get_target_coverage_report(FUZZ_TARGET)
- (url,), _ = mocked_get_json_from_url.call_args
- self.assertEqual(
- 'https://storage.googleapis.com/oss-fuzz-coverage/'
- 'curl/fuzzer_stats/20200226/curl_fuzzer.json', url)
-
- def test_invalid_target(self):
- """Tests that passing an invalid target coverage report returns None."""
- self.assertIsNone(
- self.coverage_getter.get_target_coverage_report(INVALID_TARGET))
-
- @mock.patch('coverage._get_latest_cov_report_info', return_value=None)
- def test_invalid_project_json(self, _):
- """Tests an invalid project JSON results in None being returned."""
- coverage_getter = coverage.OssFuzzCoverageGetter(PROJECT_NAME, REPO_PATH)
- self.assertIsNone(coverage_getter.get_target_coverage_report(FUZZ_TARGET))
-
-
-class GetFilesCoveredByTargetTest(unittest.TestCase):
- """Tests get_files_covered_by_target."""
-
- def setUp(self):
- with mock.patch('coverage._get_latest_cov_report_info',
- return_value=PROJECT_COV_INFO):
- self.coverage_getter = coverage.OssFuzzCoverageGetter(
- PROJECT_NAME, REPO_PATH)
-
- def test_valid_target(self):
- """Tests that covered files can be retrieved from a coverage report."""
- with open(os.path.join(TEST_DATA_PATH,
- FUZZ_TARGET_COV_JSON_FILENAME),) as file_handle:
- fuzzer_cov_info = json.loads(file_handle.read())
-
- with mock.patch('coverage.OssFuzzCoverageGetter.get_target_coverage_report',
- return_value=fuzzer_cov_info):
- file_list = self.coverage_getter.get_files_covered_by_target(FUZZ_TARGET)
-
- curl_files_list_path = os.path.join(TEST_DATA_PATH,
- 'example_curl_file_list.json')
- with open(curl_files_list_path) as file_handle:
- expected_file_list = json.loads(file_handle.read())
- self.assertCountEqual(file_list, expected_file_list)
-
- def test_invalid_target(self):
- """Tests passing invalid fuzz target returns None."""
- self.assertIsNone(
- self.coverage_getter.get_files_covered_by_target(INVALID_TARGET))
-
-
-class IsFileCoveredTest(unittest.TestCase):
- """Tests for is_file_covered."""
-
- def test_is_file_covered_covered(self):
- """Tests that is_file_covered returns True for a covered file."""
- file_coverage = {
- 'filename': '/src/systemd/src/basic/locale-util.c',
- 'summary': {
- 'regions': {
- 'count': 204,
- 'covered': 200,
- 'notcovered': 200,
- 'percent': 98.03
- }
- }
- }
- self.assertTrue(coverage.is_file_covered(file_coverage))
-
- def test_is_file_covered_not_covered(self):
- """Tests that is_file_covered returns False for a not covered file."""
- file_coverage = {
- 'filename': '/src/systemd/src/basic/locale-util.c',
- 'summary': {
- 'regions': {
- 'count': 204,
- 'covered': 0,
- 'notcovered': 0,
- 'percent': 0
- }
- }
- }
- self.assertFalse(coverage.is_file_covered(file_coverage))
-
-
-class GetLatestCovReportInfo(unittest.TestCase):
- """Tests that _get_latest_cov_report_info works as intended."""
-
- PROJECT = 'project'
- LATEST_REPORT_INFO_URL = ('https://storage.googleapis.com/oss-fuzz-coverage/'
- 'latest_report_info/project.json')
-
- @mock.patch('logging.error')
- @mock.patch('coverage.get_json_from_url', return_value={'coverage': 1})
- def test_get_latest_cov_report_info(self, mocked_get_json_from_url,
- mocked_error):
- """Tests that _get_latest_cov_report_info works as intended."""
- result = coverage._get_latest_cov_report_info(self.PROJECT)
- self.assertEqual(result, {'coverage': 1})
- mocked_error.assert_not_called()
- mocked_get_json_from_url.assert_called_with(self.LATEST_REPORT_INFO_URL)
-
- @mock.patch('logging.error')
- @mock.patch('coverage.get_json_from_url', return_value=None)
- def test_get_latest_cov_report_info_fail(self, _, mocked_error):
- """Tests that _get_latest_cov_report_info works as intended when we can't
- get latest report info."""
- result = coverage._get_latest_cov_report_info('project')
- self.assertIsNone(result)
- mocked_error.assert_called_with(
- 'Could not get the coverage report json from url: %s.',
- self.LATEST_REPORT_INFO_URL)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/infra/cifuzz/docker.py b/infra/cifuzz/docker.py
index eb993e28d..935773d92 100644
--- a/infra/cifuzz/docker.py
+++ b/infra/cifuzz/docker.py
@@ -12,23 +12,52 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for dealing with docker."""
+import logging
import os
import sys
# pylint: disable=wrong-import-position,import-error
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+import constants
import utils
BASE_BUILDER_TAG = 'gcr.io/oss-fuzz-base/base-builder'
-BASE_RUNNER_TAG = 'gcr.io/oss-fuzz-base/base-runner'
-MSAN_LIBS_BUILDER_TAG = 'gcr.io/oss-fuzz-base/msan-libs-builder'
PROJECT_TAG_PREFIX = 'gcr.io/oss-fuzz/'
+# Default fuzz configuration.
+_DEFAULT_DOCKER_RUN_ARGS = [
+ '--cap-add', 'SYS_PTRACE', '-e',
+ 'FUZZING_ENGINE=' + constants.DEFAULT_ENGINE, '-e',
+ 'ARCHITECTURE=' + constants.DEFAULT_ARCHITECTURE, '-e', 'CIFUZZ=True'
+]
+
+EXTERNAL_PROJECT_IMAGE = 'external-project'
+
+_DEFAULT_DOCKER_RUN_COMMAND = [
+ 'docker',
+ 'run',
+ '--rm',
+ '--privileged',
+]
+
+
+def get_docker_env_vars(env_mapping):
+ """Returns a list of docker arguments that sets each key in |env_mapping| as
+ an env var and the value of that key in |env_mapping| as the value."""
+ env_var_args = []
+ for env_var, env_var_val in env_mapping.items():
+ env_var_args.extend(['-e', f'{env_var}={env_var_val}'])
+ return env_var_args
+
def get_project_image_name(project):
"""Returns the name of the project builder image for |project_name|."""
- return PROJECT_TAG_PREFIX + project
+ # TODO(ochang): We may need unique names to support parallel fuzzing.
+ if project:
+ return PROJECT_TAG_PREFIX + project
+
+ return EXTERNAL_PROJECT_IMAGE
def delete_images(images):
@@ -36,3 +65,49 @@ def delete_images(images):
command = ['docker', 'rmi', '-f'] + images
utils.execute(command)
utils.execute(['docker', 'builder', 'prune', '-f'])
+
+
+def get_base_docker_run_args(workspace,
+ sanitizer=constants.DEFAULT_SANITIZER,
+ language=constants.DEFAULT_LANGUAGE,
+ docker_in_docker=False):
+ """Returns arguments that should be passed to every invocation of 'docker
+ run'."""
+ docker_args = _DEFAULT_DOCKER_RUN_ARGS.copy()
+ env_mapping = {
+ 'SANITIZER': sanitizer,
+ 'FUZZING_LANGUAGE': language,
+ 'OUT': workspace.out
+ }
+ docker_args += get_docker_env_vars(env_mapping)
+ docker_container = utils.get_container_name()
+ logging.info('Docker container: %s.', docker_container)
+ if docker_container and not docker_in_docker:
+ # Don't map specific volumes if in a docker container, it breaks when
+ # running a sibling container.
+ docker_args += ['--volumes-from', docker_container]
+ else:
+ docker_args += _get_args_mapping_host_path_to_container(workspace.workspace)
+ return docker_args, docker_container
+
+
+def get_base_docker_run_command(workspace,
+ sanitizer=constants.DEFAULT_SANITIZER,
+ language=constants.DEFAULT_LANGUAGE,
+ docker_in_docker=False):
+ """Returns part of the command that should be used everytime 'docker run' is
+ invoked."""
+ docker_args, docker_container = get_base_docker_run_args(
+ workspace, sanitizer, language, docker_in_docker=docker_in_docker)
+ command = _DEFAULT_DOCKER_RUN_COMMAND.copy() + docker_args
+ return command, docker_container
+
+
+def _get_args_mapping_host_path_to_container(host_path, container_path=None):
+ """Get arguments to docker run that will map |host_path| a path on the host to
+ a path in the container. If |container_path| is specified, that path is mapped
+ to. If not, then |host_path| is mapped to itself in the container."""
+ # WARNING: Do not use this function when running in production (and
+ # --volumes-from) is used for mapping volumes. It will break production.
+ container_path = host_path if container_path is None else container_path
+ return ['-v', f'{host_path}:{container_path}']
diff --git a/infra/cifuzz/docker_test.py b/infra/cifuzz/docker_test.py
new file mode 100644
index 000000000..b356138cb
--- /dev/null
+++ b/infra/cifuzz/docker_test.py
@@ -0,0 +1,122 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests the functionality of the docker module."""
+import unittest
+from unittest import mock
+
+import docker
+import test_helpers
+import workspace_utils
+
+CONTAINER_NAME = 'example-container'
+config = test_helpers.create_run_config(oss_fuzz_project_name='project',
+ workspace='/workspace')
+config.workspace = '/workspace'
+WORKSPACE = workspace_utils.Workspace(config)
+SANITIZER = 'example-sanitizer'
+LANGUAGE = 'example-language'
+
+
+class GetProjectImageTest(unittest.TestCase):
+ """Tests for get_project_image."""
+
+ def test_get_project_image(self):
+ """Tests that get_project_image_name works as intended."""
+ project = 'my-project'
+ self.assertEqual(docker.get_project_image_name(project),
+ 'gcr.io/oss-fuzz/my-project')
+
+
+class GetDeleteImagesTest(unittest.TestCase):
+ """Tests for delete_images."""
+
+ @mock.patch('utils.execute')
+ def test_delete_images(self, mock_execute): # pylint: disable=no-self-use
+ """Tests that get_project_image_name works as intended."""
+ images = ['image']
+ docker.delete_images(images)
+ expected_calls = [
+ mock.call(['docker', 'rmi', '-f'] + images),
+ mock.call(['docker', 'builder', 'prune', '-f'])
+ ]
+
+ mock_execute.assert_has_calls(expected_calls)
+
+
+class GetBaseDockerRunArgsTest(unittest.TestCase):
+ """Tests get_base_docker_run_args."""
+
+ @mock.patch('utils.get_container_name', return_value=CONTAINER_NAME)
+ def test_get_base_docker_run_args_container(self, _):
+ """Tests that get_base_docker_run_args works as intended when inside a
+ container."""
+ docker_args, docker_container = docker.get_base_docker_run_args(
+ WORKSPACE, SANITIZER, LANGUAGE)
+ self.assertEqual(docker_container, CONTAINER_NAME)
+ expected_docker_args = []
+ expected_docker_args = [
+ '--cap-add',
+ 'SYS_PTRACE',
+ '-e',
+ 'FUZZING_ENGINE=libfuzzer',
+ '-e',
+ 'ARCHITECTURE=x86_64',
+ '-e',
+ 'CIFUZZ=True',
+ '-e',
+ f'SANITIZER={SANITIZER}',
+ '-e',
+ f'FUZZING_LANGUAGE={LANGUAGE}',
+ '-e',
+ f'OUT={WORKSPACE.out}',
+ '--volumes-from',
+ CONTAINER_NAME,
+ ]
+ self.assertEqual(docker_args, expected_docker_args)
+
+ @mock.patch('utils.get_container_name', return_value=None)
+ def test_get_base_docker_run_args_no_container(self, _):
+ """Tests that get_base_docker_run_args works as intended when not inside a
+ container."""
+ docker_args, docker_container = docker.get_base_docker_run_args(
+ WORKSPACE, SANITIZER, LANGUAGE)
+ self.assertEqual(docker_container, None)
+ expected_docker_args = [
+ '--cap-add', 'SYS_PTRACE', '-e', 'FUZZING_ENGINE=libfuzzer', '-e',
+ 'ARCHITECTURE=x86_64', '-e', 'CIFUZZ=True', '-e',
+ f'SANITIZER={SANITIZER}', '-e', f'FUZZING_LANGUAGE={LANGUAGE}', '-e',
+ f'OUT={WORKSPACE.out}', '-v',
+ f'{WORKSPACE.workspace}:{WORKSPACE.workspace}'
+ ]
+ self.assertEqual(docker_args, expected_docker_args)
+
+
+class GetBaseDockerRunCommandTest(unittest.TestCase):
+ """Tests get_base_docker_run_args."""
+
+ @mock.patch('utils.get_container_name', return_value=None)
+ def test_get_base_docker_run_command_no_container(self, _):
+ """Tests that get_base_docker_run_args works as intended when not inside a
+ container."""
+ docker_args, docker_container = docker.get_base_docker_run_command(
+ WORKSPACE, SANITIZER, LANGUAGE)
+ self.assertEqual(docker_container, None)
+ expected_docker_command = [
+ 'docker', 'run', '--rm', '--privileged', '--cap-add', 'SYS_PTRACE',
+ '-e', 'FUZZING_ENGINE=libfuzzer', '-e', 'ARCHITECTURE=x86_64', '-e',
+ 'CIFUZZ=True', '-e', f'SANITIZER={SANITIZER}', '-e',
+ f'FUZZING_LANGUAGE={LANGUAGE}', '-e', f'OUT={WORKSPACE.out}', '-v',
+ f'{WORKSPACE.workspace}:{WORKSPACE.workspace}'
+ ]
+ self.assertEqual(docker_args, expected_docker_command)
diff --git a/infra/cifuzz/environment.py b/infra/cifuzz/environment.py
index 4cc0f846b..e99a67910 100644
--- a/infra/cifuzz/environment.py
+++ b/infra/cifuzz/environment.py
@@ -46,9 +46,6 @@ def get_bool(env_var, default_value=None):
lower_value = value.lower()
allowed_values = {'true', 'false'}
if lower_value not in allowed_values:
- raise Exception(('Bool env var {env_var} value {value} is invalid. '
- 'Must be one of {allowed_values}').format(
- env_var=env_var,
- value=value,
- allowed_values=allowed_values))
+ raise Exception(f'Bool env var {env_var} value {value} is invalid. '
+ f'Must be one of {allowed_values}.')
return lower_value == 'true'
diff --git a/infra/cifuzz/external-actions/build_fuzzers/action.yml b/infra/cifuzz/external-actions/build_fuzzers/action.yml
new file mode 100644
index 000000000..f45d02e20
--- /dev/null
+++ b/infra/cifuzz/external-actions/build_fuzzers/action.yml
@@ -0,0 +1,63 @@
+# action.yml
+name: 'build-fuzzers'
+description: "Builds an OSS-Fuzz project's fuzzers."
+inputs:
+ language:
+ description: 'Programming language project is written in.'
+ required: false
+ default: 'c++'
+ dry-run:
+ description: 'If set, run the action without actually reporting a failure.'
+ default: false
+ allowed-broken-targets-percentage:
+ description: 'The percentage of broken targets allowed in bad_build_check.'
+ required: false
+ sanitizer:
+ description: 'The sanitizer to build the fuzzers with.'
+ default: 'address'
+ project-src-path:
+ description: "The path to the project's source code checkout."
+ required: false
+ bad-build-check:
+ description: "Whether or not OSS-Fuzz's check for bad builds should be done."
+ required: false
+ default: true
+ storage-repo:
+ description: |
+ The git repo to use for storing certain artifacts from fuzzing.
+ required: false
+ storage-repo-branch:
+ description: |
+ The branch of the git repo to use for storing certain artifacts from
+ fuzzing.
+ required: false
+ storage-repo-branch-coverage:
+ description: |
+ The branch of the git repo to use for storing coverage reports.
+ required: false
+ upload-build:
+ description: |
+ If set, will upload the build.
+ default: false
+ github-token:
+ description: |
+ Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET
+ You should use "secrets.GITHUB_TOKEN" in your workflow file, do not
+ hardcode the token.
+ TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361):
+ Document locking this down.
+ required: false
+runs:
+ using: 'docker'
+ image: '../../../build_fuzzers.Dockerfile'
+ env:
+ OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }}
+ LANGUAGE: ${{ inputs.language }}
+ DRY_RUN: ${{ inputs.dry-run}}
+ ALLOWED_BROKEN_TARGETS_PERCENTAGE: ${{ inputs.allowed-broken-targets-percentage}}
+ SANITIZER: ${{ inputs.sanitizer }}
+ PROJECT_SRC_PATH: ${{ inputs.project-src-path }}
+ GITHUB_TOKEN: ${{ inputs.github-token }}
+ LOW_DISK_SPACE: 'True'
+ BAD_BUILD_CHECK: ${{ inputs.bad-build-check }}
+ UPLOAD_BUILD: ${{ inputs.upload-build }}
diff --git a/infra/cifuzz/external-actions/run_fuzzers/action.yml b/infra/cifuzz/external-actions/run_fuzzers/action.yml
new file mode 100644
index 000000000..cdefb5675
--- /dev/null
+++ b/infra/cifuzz/external-actions/run_fuzzers/action.yml
@@ -0,0 +1,69 @@
+# action.yml
+name: 'run-fuzzers'
+description: 'Runs fuzz target binaries for a specified length of time.'
+inputs:
+ language:
+ description: 'Programming language project is written in.'
+ required: false
+ default: 'c++'
+ fuzz-seconds:
+ description: 'The total time allotted for fuzzing in seconds.'
+ required: true
+ default: 600
+ dry-run:
+ description: 'If set, run the action without actually reporting a failure.'
+ default: false
+ sanitizer:
+ description: 'The sanitizer to run the fuzzers with.'
+ default: 'address'
+ run-fuzzers-mode:
+ description: |
+ The mode to run the fuzzers with ("ci" or "batch").
+ "ci" is for fuzzing a pull request or commit.
+ "batch" is for non-interactive fuzzing of an entire project.
+ "batch" is in alpha and should not be used in production.
+ required: false
+ default: 'ci'
+ github-token:
+ description: |
+ Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET
+ You should use "secrets.GITHUB_TOKEN" in your workflow file, do not
+ hardcode the token.
+ TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361):
+ Document locking this down.
+ required: true
+ storage-repo:
+ description: |
+ The git repo to use for storing certain artifacts from fuzzing.
+ required: false
+ storage-repo-branch:
+ description: |
+ The branch of the git repo to use for storing certain artifacts from
+ fuzzing.
+ default: main
+ required: false
+ storage-repo-branch-coverage:
+ description: |
+ The branch of the git repo to use for storing coverage reports.
+ default: gh-pages
+ required: false
+ report-unreproducible-crashes:
+ description: 'If True, then unreproducible crashes will be reported by CIFuzz.'
+ required: false
+ default: false
+runs:
+ using: 'docker'
+ image: '../../../run_fuzzers.Dockerfile'
+ env:
+ OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }}
+ LANGUAGE: ${{ inputs.language }}
+ FUZZ_SECONDS: ${{ inputs.fuzz-seconds }}
+ DRY_RUN: ${{ inputs.dry-run}}
+ SANITIZER: ${{ inputs.sanitizer }}
+ RUN_FUZZERS_MODE: ${{ inputs.run-fuzzers-mode }}
+ GITHUB_TOKEN: ${{ inputs.github-token }}
+ LOW_DISK_SPACE: 'True'
+ GIT_STORE_REPO: ${{ inputs.storage-repo }}
+ GIT_STORE_BRANCH: ${{ inputs.storage-repo-branch }}
+ GIT_STORE_BRANCH_COVERAGE: ${{ inputs.storage-repo-branch-coverage }}
+ REPORT_UNREPRODUCIBLE_CRASHES: ${{ inputs.report-unreproducible-crashes }}
diff --git a/infra/cifuzz/filestore/__init__.py b/infra/cifuzz/filestore/__init__.py
new file mode 100644
index 000000000..d112f7b8c
--- /dev/null
+++ b/infra/cifuzz/filestore/__init__.py
@@ -0,0 +1,54 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module for a generic filestore."""
+
+
+class FilestoreError(Exception):
+ """Error using the filestore."""
+
+
+# pylint: disable=unused-argument,no-self-use
+class BaseFilestore:
+ """Base class for a filestore."""
+
+ def __init__(self, config):
+ self.config = config
+
+ def upload_crashes(self, name, directory):
+ """Uploads the crashes at |directory| to |name|."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def upload_corpus(self, name, directory, replace=False):
+ """Uploads the corpus at |directory| to |name|."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def upload_build(self, name, directory):
+ """Uploads the build at |directory| to |name|."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def upload_coverage(self, name, directory):
+ """Uploads the coverage report at |directory| to |name|."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def download_corpus(self, name, dst_directory):
+ """Downloads the corpus located at |name| to |dst_directory|."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def download_build(self, name, dst_directory):
+ """Downloads the build with |name| to |dst_directory|."""
+ raise NotImplementedError('Child class must implement method.')
+
+ def download_coverage(self, dst_directory):
+ """Downloads the latest project coverage report."""
+ raise NotImplementedError('Child class must implement method.')
diff --git a/infra/cifuzz/filestore/git/__init__.py b/infra/cifuzz/filestore/git/__init__.py
new file mode 100644
index 000000000..5414003da
--- /dev/null
+++ b/infra/cifuzz/filestore/git/__init__.py
@@ -0,0 +1,159 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module for a git based filestore."""
+
+from distutils import dir_util
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import filestore
+
+# pylint: disable=wrong-import-position
+INFRA_DIR = os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+sys.path.append(INFRA_DIR)
+
+import retry
+
+_PUSH_RETRIES = 3
+_PUSH_BACKOFF = 1
+_GIT_EMAIL = 'cifuzz@clusterfuzz.com'
+_GIT_NAME = 'CIFuzz'
+_CORPUS_DIR = 'corpus'
+_COVERAGE_DIR = 'coverage'
+
+
+def git_runner(repo_path):
+ """Returns a gits runner for the repo_path."""
+
+ def func(*args):
+ return subprocess.check_call(('git', '-C', repo_path) + args)
+
+ return func
+
+
+# pylint: disable=unused-argument,no-self-use
+class GitFilestore(filestore.BaseFilestore):
+ """Generic git filestore. This still relies on another filestore provided by
+ the CI for larger artifacts or artifacts which make sense to be included as
+ the result of a workflow run."""
+
+ def __init__(self, config, ci_filestore):
+ super().__init__(config)
+ self.repo_path = tempfile.mkdtemp()
+ self._git = git_runner(self.repo_path)
+ self._clone(self.config.git_store_repo)
+
+ self._ci_filestore = ci_filestore
+
+ def __del__(self):
+ shutil.rmtree(self.repo_path)
+
+ def _clone(self, repo_url):
+ """Clones repo URL."""
+ self._git('clone', repo_url, '.')
+ self._git('config', '--local', 'user.email', _GIT_EMAIL)
+ self._git('config', '--local', 'user.name', _GIT_NAME)
+
+ def _reset_git(self, branch):
+ """Resets the git repo."""
+ self._git('fetch', 'origin')
+ try:
+ self._git('checkout', '-B', branch, 'origin/' + branch)
+ self._git('reset', '--hard', 'HEAD')
+ except subprocess.CalledProcessError:
+ self._git('checkout', '--orphan', branch)
+
+ self._git('clean', '-fxd')
+
+ # pylint: disable=too-many-arguments
+ @retry.wrap(_PUSH_RETRIES, _PUSH_BACKOFF)
+ def _upload_to_git(self,
+ message,
+ branch,
+ upload_path,
+ local_path,
+ replace=False):
+ """Uploads a directory to git. If `replace` is True, then existing contents
+ in the upload_path is deleted."""
+ self._reset_git(branch)
+
+ full_repo_path = os.path.join(self.repo_path, upload_path)
+ if replace and os.path.exists(full_repo_path):
+ shutil.rmtree(full_repo_path)
+
+ dir_util.copy_tree(local_path, full_repo_path)
+ self._git('add', '.')
+ try:
+ self._git('commit', '-m', message)
+ except subprocess.CalledProcessError:
+ logging.debug('No changes, skipping git push.')
+ return
+
+ self._git('push', 'origin', branch)
+
+ def upload_crashes(self, name, directory):
+ """Uploads the crashes at |directory| to |name|."""
+ return self._ci_filestore.upload_crashes(name, directory)
+
+ def upload_corpus(self, name, directory, replace=False):
+ """Uploads the corpus at |directory| to |name|."""
+ self._upload_to_git('Corpus upload',
+ self.config.git_store_branch,
+ os.path.join(_CORPUS_DIR, name),
+ directory,
+ replace=replace)
+
+ def upload_build(self, name, directory):
+ """Uploads the build at |directory| to |name|."""
+ return self._ci_filestore.upload_build(name, directory)
+
+ def upload_coverage(self, name, directory):
+ """Uploads the coverage report at |directory| to |name|."""
+ self._upload_to_git('Coverage upload',
+ self.config.git_store_branch_coverage,
+ os.path.join(_COVERAGE_DIR, name),
+ directory,
+ replace=True)
+
+ def download_corpus(self, name, dst_directory):
+ """Downloads the corpus located at |name| to |dst_directory|."""
+ self._reset_git(self.config.git_store_branch)
+ path = os.path.join(self.repo_path, _CORPUS_DIR, name)
+ if not os.path.exists(path):
+ logging.debug('Corpus does not exist at %s.', path)
+ return False
+
+ dir_util.copy_tree(path, dst_directory)
+ return True
+
+ def download_build(self, name, dst_directory):
+ """Downloads the build with |name| to |dst_directory|."""
+ return self._ci_filestore.download_build(name, dst_directory)
+
+ def download_coverage(self, name, dst_directory):
+ """Downloads the latest project coverage report."""
+ self._reset_git(self.config.git_store_branch_coverage)
+ path = os.path.join(self.repo_path, _COVERAGE_DIR, name)
+ if not os.path.exists(path):
+ logging.debug('Coverage does not exist at %s.', path)
+ return False
+
+ dir_util.copy_tree(path, dst_directory)
+ return True
diff --git a/infra/cifuzz/filestore/git/git_test.py b/infra/cifuzz/filestore/git/git_test.py
new file mode 100644
index 000000000..56be23bac
--- /dev/null
+++ b/infra/cifuzz/filestore/git/git_test.py
@@ -0,0 +1,122 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for git."""
+import filecmp
+import os
+import tempfile
+import subprocess
+import sys
+import unittest
+from unittest import mock
+
+# pylint: disable=wrong-import-position
+INFRA_DIR = os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+sys.path.append(INFRA_DIR)
+
+from filestore import git
+import test_helpers
+
+# pylint: disable=protected-access,no-self-use
+
+
+class GitFilestoreTest(unittest.TestCase):
+ """Tests for GitFilestore."""
+
+ def setUp(self):
+ self.git_dir = tempfile.TemporaryDirectory()
+ self.addCleanup(self.git_dir.cleanup)
+
+ self.local_dir = tempfile.TemporaryDirectory()
+ self.addCleanup(self.local_dir.cleanup)
+
+ self.download_dir = tempfile.TemporaryDirectory()
+ self.addCleanup(self.download_dir.cleanup)
+
+ with open(os.path.join(self.local_dir.name, 'a'), 'w') as handle:
+ handle.write('')
+
+ os.makedirs(os.path.join(self.local_dir.name, 'b'))
+
+ with open(os.path.join(self.local_dir.name, 'b', 'c'), 'w') as handle:
+ handle.write('')
+
+ self.git_repo = git.git_runner(self.git_dir.name)
+ self.git_repo('init', '--bare')
+
+ self.config = test_helpers.create_run_config(
+ git_store_repo='file://' + self.git_dir.name,
+ git_store_branch='main',
+ git_store_branch_coverage='cov-branch')
+
+ self.mock_ci_filestore = mock.MagicMock()
+ self.git_store = git.GitFilestore(self.config, self.mock_ci_filestore)
+
+ def assert_dirs_same(self, first, second):
+ """Asserts two dirs are the same."""
+ dcmp = filecmp.dircmp(first, second)
+ if dcmp.diff_files or dcmp.left_only or dcmp.right_only:
+ return False
+
+ return all(
+ self.assert_dirs_same(os.path.join(first, subdir),
+ os.path.join(second, subdir))
+ for subdir in dcmp.common_dirs)
+
+ def get_repo_filelist(self, branch):
+ """Get files in repo."""
+ return subprocess.check_output([
+ 'git', '-C', self.git_dir.name, 'ls-tree', '-r', '--name-only', branch
+ ]).decode().splitlines()
+
+ def test_upload_download_corpus(self):
+ """Tests uploading and downloading corpus."""
+ self.git_store.upload_corpus('target', self.local_dir.name)
+ self.git_store.download_corpus('target', self.download_dir.name)
+ self.assert_dirs_same(self.local_dir.name, self.download_dir.name)
+
+ self.assertCountEqual([
+ 'corpus/target/a',
+ 'corpus/target/b/c',
+ ], self.get_repo_filelist('main'))
+
+ def test_upload_download_coverage(self):
+ """Tests uploading and downloading corpus."""
+ self.git_store.upload_coverage('latest', self.local_dir.name)
+ self.git_store.download_coverage('latest', self.download_dir.name)
+ self.assert_dirs_same(self.local_dir.name, self.download_dir.name)
+
+ self.assertCountEqual([
+ 'coverage/latest/a',
+ 'coverage/latest/b/c',
+ ], self.get_repo_filelist('cov-branch'))
+
+ def test_upload_crashes(self):
+ """Tests uploading crashes."""
+ self.git_store.upload_crashes('current', self.local_dir.name)
+ self.mock_ci_filestore.upload_crashes.assert_called_with(
+ 'current', self.local_dir.name)
+
+ def test_upload_build(self):
+ """Tests uploading build."""
+ self.git_store.upload_build('sanitizer', self.local_dir.name)
+ self.mock_ci_filestore.upload_build.assert_called_with(
+ 'sanitizer', self.local_dir.name)
+
+ def test_download_build(self):
+ """Tests downloading build."""
+ self.git_store.download_build('sanitizer', self.download_dir.name)
+ self.mock_ci_filestore.download_build.assert_called_with(
+ 'sanitizer', self.download_dir.name)
diff --git a/infra/cifuzz/filestore/github_actions/__init__.py b/infra/cifuzz/filestore/github_actions/__init__.py
new file mode 100644
index 000000000..3b03f9c0b
--- /dev/null
+++ b/infra/cifuzz/filestore/github_actions/__init__.py
@@ -0,0 +1,177 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Implementation of a filestore using Github actions artifacts."""
+import logging
+import os
+import shutil
+import sys
+import tarfile
+import tempfile
+
+# pylint: disable=wrong-import-position,import-error
+sys.path.append(
+ os.path.join(os.path.pardir, os.path.pardir, os.path.pardir,
+ os.path.dirname(os.path.abspath(__file__))))
+
+import utils
+import http_utils
+import filestore
+from filestore.github_actions import github_api
+
+UPLOAD_JS = os.path.join(os.path.dirname(__file__), 'upload.js')
+
+
+def tar_directory(directory, archive_path):
+ """Tars a |directory| and stores archive at |archive_path|. |archive_path|
+ must end in .tar"""
+ assert archive_path.endswith('.tar')
+ # Do this because make_archive will append the extension to archive_path.
+ archive_path = os.path.splitext(archive_path)[0]
+
+ root_directory = os.path.abspath(directory)
+ shutil.make_archive(archive_path,
+ 'tar',
+ root_dir=root_directory,
+ base_dir='./')
+
+
+class GithubActionsFilestore(filestore.BaseFilestore):
+ """Implementation of BaseFilestore using Github actions artifacts. Relies on
+ github_actions_toolkit for using the GitHub actions API and the github_api
+ module for using GitHub's standard API. We need to use both because the GitHub
+ actions API is the only way to upload an artifact but it does not support
+ downloading artifacts from other runs. The standard GitHub API does support
+ this however."""
+
+ ARTIFACT_PREFIX = 'cifuzz-'
+ BUILD_PREFIX = 'build-'
+ CRASHES_PREFIX = 'crashes-'
+ CORPUS_PREFIX = 'corpus-'
+ COVERAGE_PREFIX = 'coverage-'
+
+ def __init__(self, config):
+ super().__init__(config)
+ self.github_api_http_headers = github_api.get_http_auth_headers(config)
+
+ def _get_artifact_name(self, name):
+ """Returns |name| prefixed with |self.ARITFACT_PREFIX| if it isn't already
+ prefixed. Otherwise returns |name|."""
+ if name.startswith(self.ARTIFACT_PREFIX):
+ return name
+ return f'{self.ARTIFACT_PREFIX}{name}'
+
+ def _upload_directory(self, name, directory): # pylint: disable=no-self-use
+ """Uploads |directory| as artifact with |name|."""
+ name = self._get_artifact_name(name)
+ with tempfile.TemporaryDirectory() as temp_dir:
+ archive_path = os.path.join(temp_dir, name + '.tar')
+ tar_directory(directory, archive_path)
+ _raw_upload_directory(name, temp_dir)
+
+ def upload_crashes(self, name, directory):
+ """Uploads the crashes at |directory| to |name|."""
+ return _raw_upload_directory(self.CRASHES_PREFIX + name, directory)
+
+ def upload_corpus(self, name, directory, replace=False):
+ """Uploads the corpus at |directory| to |name|."""
+ # Not applicable as the the entire corpus is uploaded under a single
+ # artifact name.
+ del replace
+ return self._upload_directory(self.CORPUS_PREFIX + name, directory)
+
+ def upload_build(self, name, directory):
+ """Uploads the build at |directory| to |name|."""
+ return self._upload_directory(self.BUILD_PREFIX + name, directory)
+
+ def upload_coverage(self, name, directory):
+ """Uploads the coverage report at |directory| to |name|."""
+ return self._upload_directory(self.COVERAGE_PREFIX + name, directory)
+
+ def download_corpus(self, name, dst_directory): # pylint: disable=unused-argument,no-self-use
+ """Downloads the corpus located at |name| to |dst_directory|."""
+ return self._download_artifact(self.CORPUS_PREFIX + name, dst_directory)
+
+ def _find_artifact(self, name):
+ """Finds an artifact using the GitHub API and returns it."""
+ logging.debug('Listing artifacts.')
+ artifacts = self._list_artifacts()
+ artifact = github_api.find_artifact(name, artifacts)
+ logging.debug('Artifact: %s.', artifact)
+ return artifact
+
+ def _download_artifact(self, name, dst_directory):
+ """Downloads artifact with |name| to |dst_directory|. Returns True on
+ success."""
+ name = self._get_artifact_name(name)
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ if not self._raw_download_artifact(name, temp_dir):
+ logging.warning('Could not download artifact: %s.', name)
+ return False
+
+ artifact_tarfile_path = os.path.join(temp_dir, name + '.tar')
+ if not os.path.exists(artifact_tarfile_path):
+ logging.error('Artifact zip did not contain a tarfile.')
+ return False
+
+ # TODO(jonathanmetzman): Replace this with archive.unpack from
+ # libClusterFuzz so we can avoid path traversal issues.
+ with tarfile.TarFile(artifact_tarfile_path) as artifact_tarfile:
+ artifact_tarfile.extractall(dst_directory)
+ return True
+
+ def _raw_download_artifact(self, name, dst_directory):
+ """Downloads the artifact with |name| to |dst_directory|. Returns True on
+ success. Does not do any untarring or adding prefix to |name|."""
+ artifact = self._find_artifact(name)
+ if not artifact:
+ logging.warning('Could not find artifact: %s.', name)
+ return False
+ download_url = artifact['archive_download_url']
+ return http_utils.download_and_unpack_zip(
+ download_url, dst_directory, headers=self.github_api_http_headers)
+
+ def _list_artifacts(self):
+ """Returns a list of artifacts."""
+ return github_api.list_artifacts(self.config.project_repo_owner,
+ self.config.project_repo_name,
+ self.github_api_http_headers)
+
+ def download_build(self, name, dst_directory):
+ """Downloads the build with name |name| to |dst_directory|."""
+ return self._download_artifact(self.BUILD_PREFIX + name, dst_directory)
+
+ def download_coverage(self, name, dst_directory):
+ """Downloads the latest project coverage report."""
+ return self._download_artifact(self.COVERAGE_PREFIX + name, dst_directory)
+
+
+def _upload_artifact_with_upload_js(name, artifact_paths, directory):
+ """Uploads the artifacts in |artifact_paths| that are located in |directory|
+ to |name|, using the upload.js script."""
+ command = [UPLOAD_JS, name, directory] + artifact_paths
+ _, _, retcode = utils.execute(command)
+ return retcode == 0
+
+
+def _raw_upload_directory(name, directory):
+ """Uploads the artifacts located in |directory| to |name|. Does not do any
+ tarring or adding prefixes to |name|."""
+ # Get file paths.
+ artifact_paths = []
+ for root, _, curr_file_paths in os.walk(directory):
+ for file_path in curr_file_paths:
+ artifact_paths.append(os.path.join(root, file_path))
+ logging.debug('Artifact paths: %s.', artifact_paths)
+ return _upload_artifact_with_upload_js(name, artifact_paths, directory)
diff --git a/infra/cifuzz/filestore/github_actions/github_actions_test.py b/infra/cifuzz/filestore/github_actions/github_actions_test.py
new file mode 100644
index 000000000..7745065a9
--- /dev/null
+++ b/infra/cifuzz/filestore/github_actions/github_actions_test.py
@@ -0,0 +1,281 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for github_actions."""
+import os
+import shutil
+import sys
+import tarfile
+import tempfile
+import unittest
+from unittest import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+# pylint: disable=wrong-import-position
+INFRA_DIR = os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+sys.path.append(INFRA_DIR)
+
+from filestore import github_actions
+import test_helpers
+
+# pylint: disable=protected-access,no-self-use
+
+
+class GithubActionsFilestoreTest(fake_filesystem_unittest.TestCase):
+ """Tests for GithubActionsFilestore."""
+
+ def setUp(self):
+ test_helpers.patch_environ(self)
+ self.token = 'example githubtoken'
+ self.owner = 'exampleowner'
+ self.repo = 'examplerepo'
+ os.environ['GITHUB_REPOSITORY'] = f'{self.owner}/{self.repo}'
+ os.environ['GITHUB_EVENT_PATH'] = '/fake'
+ self.config = test_helpers.create_run_config(token=self.token)
+ self.local_dir = '/local-dir'
+ self.testcase = os.path.join(self.local_dir, 'testcase')
+
+ def _get_expected_http_headers(self):
+ return {
+ 'Authorization': f'token {self.token}',
+ 'Accept': 'application/vnd.github.v3+json',
+ }
+
+ @mock.patch('filestore.github_actions.github_api.list_artifacts')
+ def test_list_artifacts(self, mock_list_artifacts):
+ """Tests that _list_artifacts works as intended."""
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ filestore._list_artifacts()
+ mock_list_artifacts.assert_called_with(self.owner, self.repo,
+ self._get_expected_http_headers())
+
+ @mock.patch('logging.warning')
+ @mock.patch('filestore.github_actions.GithubActionsFilestore._list_artifacts',
+ return_value=None)
+ @mock.patch('filestore.github_actions.github_api.find_artifact',
+ return_value=None)
+ def test_download_build_no_artifact(self, _, __, mock_warning):
+ """Tests that download_build returns None and doesn't exception when
+ find_artifact can't find an artifact."""
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ name = 'name'
+ build_dir = 'build-dir'
+ self.assertFalse(filestore.download_build(name, build_dir))
+ mock_warning.assert_called_with('Could not download artifact: %s.',
+ 'cifuzz-build-' + name)
+
+ @mock.patch('logging.warning')
+ @mock.patch('filestore.github_actions.GithubActionsFilestore._list_artifacts',
+ return_value=None)
+ @mock.patch('filestore.github_actions.github_api.find_artifact',
+ return_value=None)
+ def test_download_corpus_no_artifact(self, _, __, mock_warning):
+ """Tests that download_corpus_build returns None and doesn't exception when
+ find_artifact can't find an artifact."""
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ name = 'name'
+ dst_dir = 'local-dir'
+ self.assertFalse(filestore.download_corpus(name, dst_dir))
+ mock_warning.assert_called_with('Could not download artifact: %s.',
+ 'cifuzz-corpus-' + name)
+
+ @mock.patch('filestore.github_actions.tar_directory')
+ @mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
+ def test_upload_corpus(self, mock_upload_artifact, mock_tar_directory):
+ """Test uploading corpus."""
+ self._create_local_dir()
+
+ def mock_tar_directory_impl(_, archive_path):
+ self.fs.create_file(archive_path)
+
+ mock_tar_directory.side_effect = mock_tar_directory_impl
+
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ filestore.upload_corpus('target', self.local_dir)
+ self.assert_upload(mock_upload_artifact, mock_tar_directory,
+ 'corpus-target')
+
+ @mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
+ def test_upload_crashes(self, mock_upload_artifact):
+ """Test uploading crashes."""
+ self._create_local_dir()
+
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ filestore.upload_crashes('current', self.local_dir)
+ mock_upload_artifact.assert_has_calls(
+ [mock.call('crashes-current', ['/local-dir/testcase'], '/local-dir')])
+
+ @mock.patch('filestore.github_actions.tar_directory')
+ @mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
+ def test_upload_build(self, mock_upload_artifact, mock_tar_directory):
+ """Test uploading build."""
+ self._create_local_dir()
+
+ def mock_tar_directory_impl(_, archive_path):
+ self.fs.create_file(archive_path)
+
+ mock_tar_directory.side_effect = mock_tar_directory_impl
+
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ filestore.upload_build('sanitizer', self.local_dir)
+ self.assert_upload(mock_upload_artifact, mock_tar_directory,
+ 'build-sanitizer')
+
+ @mock.patch('filestore.github_actions.tar_directory')
+ @mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
+ def test_upload_coverage(self, mock_upload_artifact, mock_tar_directory):
+ """Test uploading coverage."""
+ self._create_local_dir()
+
+ def mock_tar_directory_impl(_, archive_path):
+ self.fs.create_file(archive_path)
+
+ mock_tar_directory.side_effect = mock_tar_directory_impl
+
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ filestore.upload_coverage('latest', self.local_dir)
+ self.assert_upload(mock_upload_artifact, mock_tar_directory,
+ 'coverage-latest')
+
+ def assert_upload(self, mock_upload_artifact, mock_tar_directory,
+ expected_artifact_name):
+ """Tests that upload_directory invokes tar_directory and
+ artifact_client.upload_artifact properly."""
+ # Don't assert what second argument will be since it's a temporary
+ # directory.
+ self.assertEqual(mock_tar_directory.call_args_list[0][0][0], self.local_dir)
+
+ # Don't assert what second and third arguments will be since they are
+ # temporary directories.
+ expected_artifact_name = 'cifuzz-' + expected_artifact_name
+ self.assertEqual(mock_upload_artifact.call_args_list[0][0][0],
+ expected_artifact_name)
+
+ # Assert artifacts list contains one tarfile.
+ artifacts_list = mock_upload_artifact.call_args_list[0][0][1]
+ self.assertEqual(len(artifacts_list), 1)
+ self.assertEqual(os.path.basename(artifacts_list[0]),
+ expected_artifact_name + '.tar')
+
+ def _create_local_dir(self):
+ """Sets up pyfakefs and creates a corpus directory containing
+ self.testcase."""
+ self.setUpPyfakefs()
+ self.fs.create_file(self.testcase, contents='hi')
+
+ @mock.patch('filestore.github_actions.GithubActionsFilestore._find_artifact')
+ @mock.patch('http_utils.download_and_unpack_zip')
+ def test_download_artifact(self, mock_download_and_unpack_zip,
+ mock_find_artifact):
+ """Tests that _download_artifact works as intended."""
+ artifact_download_url = 'http://example.com/download'
+ artifact_listing = {
+ 'expired': False,
+ 'name': 'corpus',
+ 'archive_download_url': artifact_download_url
+ }
+ mock_find_artifact.return_value = artifact_listing
+
+ self._create_local_dir()
+ with tempfile.TemporaryDirectory() as temp_dir:
+ # Create a tarball.
+ archive_path = os.path.join(temp_dir, 'cifuzz-corpus.tar')
+ github_actions.tar_directory(self.local_dir, archive_path)
+
+ artifact_download_dst_dir = os.path.join(temp_dir, 'dst')
+ os.mkdir(artifact_download_dst_dir)
+
+ def mock_download_and_unpack_zip_impl(url, download_artifact_temp_dir,
+ headers):
+ self.assertEqual(url, artifact_download_url)
+ self.assertEqual(headers, self._get_expected_http_headers())
+ shutil.copy(
+ archive_path,
+ os.path.join(download_artifact_temp_dir,
+ os.path.basename(archive_path)))
+ return True
+
+ mock_download_and_unpack_zip.side_effect = (
+ mock_download_and_unpack_zip_impl)
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ self.assertTrue(
+ filestore._download_artifact('corpus', artifact_download_dst_dir))
+ mock_find_artifact.assert_called_with('cifuzz-corpus')
+ self.assertTrue(
+ os.path.exists(
+ os.path.join(artifact_download_dst_dir,
+ os.path.basename(self.testcase))))
+
+ @mock.patch('filestore.github_actions.github_api.list_artifacts')
+ def test_find_artifact(self, mock_list_artifacts):
+ """Tests that _find_artifact works as intended."""
+ artifact_listing_1 = {
+ 'expired': False,
+ 'name': 'other',
+ 'archive_download_url': 'http://download1'
+ }
+ artifact_listing_2 = {
+ 'expired': False,
+ 'name': 'artifact',
+ 'archive_download_url': 'http://download2'
+ }
+ artifact_listing_3 = {
+ 'expired': True,
+ 'name': 'artifact',
+ 'archive_download_url': 'http://download3'
+ }
+ artifact_listing_4 = {
+ 'expired': False,
+ 'name': 'artifact',
+ 'archive_download_url': 'http://download4'
+ }
+ artifacts = [
+ artifact_listing_1, artifact_listing_2, artifact_listing_3,
+ artifact_listing_4
+ ]
+ mock_list_artifacts.return_value = artifacts
+ filestore = github_actions.GithubActionsFilestore(self.config)
+ # Test that find_artifact will return the most recent unexpired artifact
+ # with the correct name.
+ self.assertEqual(filestore._find_artifact('artifact'), artifact_listing_2)
+ mock_list_artifacts.assert_called_with(self.owner, self.repo,
+ self._get_expected_http_headers())
+
+
+class TarDirectoryTest(unittest.TestCase):
+ """Tests for tar_directory."""
+
+ def test_tar_directory(self):
+ """Tests that tar_directory writes the archive to the correct location and
+ archives properly."""
+ with tempfile.TemporaryDirectory() as temp_dir:
+ archive_path = os.path.join(temp_dir, 'myarchive.tar')
+ archived_dir = os.path.join(temp_dir, 'toarchive')
+ os.mkdir(archived_dir)
+ archived_filename = 'file1'
+ archived_file_path = os.path.join(archived_dir, archived_filename)
+ with open(archived_file_path, 'w') as file_handle:
+ file_handle.write('hi')
+ github_actions.tar_directory(archived_dir, archive_path)
+ self.assertTrue(os.path.exists(archive_path))
+
+ # Now check it archives correctly.
+ unpacked_directory = os.path.join(temp_dir, 'unpacked')
+ with tarfile.TarFile(archive_path) as artifact_tarfile:
+ artifact_tarfile.extractall(unpacked_directory)
+ unpacked_archived_file_path = os.path.join(unpacked_directory,
+ archived_filename)
+ self.assertTrue(os.path.exists(unpacked_archived_file_path))
diff --git a/infra/cifuzz/filestore/github_actions/github_api.py b/infra/cifuzz/filestore/github_actions/github_api.py
new file mode 100644
index 000000000..191b75058
--- /dev/null
+++ b/infra/cifuzz/filestore/github_actions/github_api.py
@@ -0,0 +1,108 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module for dealing with the GitHub API. This is different from
+github_actions_toolkit which only deals with the actions API. We need to use
+both."""
+import logging
+import os
+import sys
+
+import requests
+
+import filestore
+
+# pylint: disable=wrong-import-position,import-error
+
+sys.path.append(
+ os.path.join(__file__, os.path.pardir, os.path.pardir, os.path.pardir,
+ os.path.pardir))
+import retry
+
+_MAX_ITEMS_PER_PAGE = 100
+
+_GET_ATTEMPTS = 3
+_GET_BACKOFF = 1
+
+
+def get_http_auth_headers(config):
+ """Returns HTTP headers for authentication to the API."""
+ authorization = f'token {config.token}'
+ return {
+ 'Authorization': authorization,
+ 'Accept': 'application/vnd.github.v3+json'
+ }
+
+
+def _get_artifacts_list_api_url(repo_owner, repo_name):
+ """Returns the artifacts_api_url for |repo_name| owned by |repo_owner|."""
+ return (f'https://api.github.com/repos/{repo_owner}/'
+ f'{repo_name}/actions/artifacts')
+
+
+@retry.wrap(_GET_ATTEMPTS, _GET_BACKOFF)
+def _do_get_request(*args, **kwargs):
+ """Wrapped version of requests.get that does retries."""
+ return requests.get(*args, **kwargs)
+
+
+def _get_items(url, headers):
+ """Generator that gets and yields items from a GitHub API endpoint (specified
+ by |URL|) sending |headers| with the get request."""
+ # Github API response pages are 1-indexed.
+ page_counter = 1
+
+ # Set to infinity so we run loop at least once.
+ total_num_items = float('inf')
+
+ item_num = 0
+ while item_num < total_num_items:
+ params = {'per_page': _MAX_ITEMS_PER_PAGE, 'page': str(page_counter)}
+ response = _do_get_request(url, params=params, headers=headers)
+ response_json = response.json()
+ if not response.status_code == 200:
+ # Check that request was successful.
+ logging.error('Request to %s failed. Code: %d. Response: %s',
+ response.request.url, response.status_code, response_json)
+ raise filestore.FilestoreError('Github API request failed.')
+
+ if total_num_items == float('inf'):
+ # Set proper total_num_items
+ total_num_items = response_json['total_count']
+
+ # Get the key for the items we are after.
+ keys = [key for key in response_json.keys() if key != 'total_count']
+ assert len(keys) == 1, keys
+ items_key = keys[0]
+
+ for item in response_json[items_key]:
+ yield item
+ item_num += 1
+
+ page_counter += 1
+
+
+def find_artifact(artifact_name, artifacts):
+ """Find the artifact with the name |artifact_name| in |artifacts|."""
+ for artifact in artifacts:
+ # TODO(metzman): Handle multiple by making sure we download the latest.
+ if artifact['name'] == artifact_name and not artifact['expired']:
+ return artifact
+ return None
+
+
+def list_artifacts(owner, repo, headers):
+ """Returns a generator of all the artifacts for |owner|/|repo|."""
+ url = _get_artifacts_list_api_url(owner, repo)
+ logging.debug('Getting artifacts from: %s', url)
+ return _get_items(url, headers)
diff --git a/infra/cifuzz/filestore/github_actions/github_api_test.py b/infra/cifuzz/filestore/github_actions/github_api_test.py
new file mode 100644
index 000000000..c7cad6db0
--- /dev/null
+++ b/infra/cifuzz/filestore/github_actions/github_api_test.py
@@ -0,0 +1,33 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for github_api."""
+import unittest
+
+from filestore.github_actions import github_api
+import test_helpers
+
+
+class GetHttpAuthHeaders(unittest.TestCase):
+ """Tests for get_http_auth_headers."""
+
+ def test_get_http_auth_headers(self):
+ """Tests that get_http_auth_headers returns the correct result."""
+ token = 'example githubtoken'
+ run_config = test_helpers.create_run_config(token=token)
+ expected_headers = {
+ 'Authorization': f'token {token}',
+ 'Accept': 'application/vnd.github.v3+json',
+ }
+ self.assertEqual(expected_headers,
+ github_api.get_http_auth_headers(run_config))
diff --git a/infra/cifuzz/filestore/github_actions/upload.js b/infra/cifuzz/filestore/github_actions/upload.js
new file mode 100755
index 000000000..cd025e560
--- /dev/null
+++ b/infra/cifuzz/filestore/github_actions/upload.js
@@ -0,0 +1,33 @@
+#!/usr/bin/env node
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Script for uploading an artifact. Returns 0 on success.
+// Usage: upload.js <aritfactName> <rootDirectory> <file 1>...<file N>
+
+const fs = require('fs');
+const artifact = require('@actions/artifact');
+const artifactClient = artifact.create()
+const artifactName = process.argv[2];
+const rootDirectory = process.argv[3]
+const files = process.argv.slice(4);
+const options = {
+ continueOnError: true
+}
+
+const uploadResult = artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
+console.log(uploadResult);
+if (uploadResult['failedItems']) {
+ return 1;
+}
+return 0;
diff --git a/infra/cifuzz/filestore_utils.py b/infra/cifuzz/filestore_utils.py
new file mode 100644
index 000000000..d3aaecd82
--- /dev/null
+++ b/infra/cifuzz/filestore_utils.py
@@ -0,0 +1,31 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""External filestore interface. Cannot be depended on by filestore code."""
+import filestore
+import filestore.git
+import filestore.github_actions
+
+
+def get_filestore(config):
+ """Returns the correct filestore based on the platform in |config|.
+ Raises an exception if there is no correct filestore for the platform."""
+ # TODO(metzman): Force specifying of filestore.
+ if config.platform == config.Platform.EXTERNAL_GITHUB:
+ ci_filestore = filestore.github_actions.GithubActionsFilestore(config)
+ if not config.git_store_repo:
+ return ci_filestore
+
+ return filestore.git.GitFilestore(config, ci_filestore)
+
+ raise filestore.FilestoreError('Filestore doesn\'t support platform.')
diff --git a/infra/cifuzz/filestore_utils_test.py b/infra/cifuzz/filestore_utils_test.py
new file mode 100644
index 000000000..db5fc5bc1
--- /dev/null
+++ b/infra/cifuzz/filestore_utils_test.py
@@ -0,0 +1,50 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for filestore_utils."""
+import unittest
+from unittest import mock
+
+import parameterized
+
+import config_utils
+import filestore
+from filestore import github_actions
+import filestore_utils
+import test_helpers
+
+
+class GetFilestoreTest(unittest.TestCase):
+ """Tests for get_filestore."""
+
+ @parameterized.parameterized.expand([
+ ({
+ 'is_github': True,
+ }, github_actions.GithubActionsFilestore),
+ ])
+ def test_get_filestore(self, config_kwargs, filestore_cls):
+ """Tests that get_filestore returns the right filestore given a certain
+ platform."""
+ run_config = test_helpers.create_run_config(**config_kwargs)
+ filestore_impl = filestore_utils.get_filestore(run_config)
+ self.assertIsInstance(filestore_impl, filestore_cls)
+
+ @mock.patch('config_utils.BaseConfig.platform', return_value='other')
+ @mock.patch('config_utils._get_ci_environment',
+ return_value=config_utils.GenericCiEnvironment())
+ def test_get_filestore_unsupported_platform(self, _, __):
+ """Tests that get_filestore exceptions given a platform it doesn't
+ support."""
+ run_config = test_helpers.create_run_config()
+ with self.assertRaises(filestore.FilestoreError):
+ filestore_utils.get_filestore(run_config)
diff --git a/infra/cifuzz/fuzz_target.py b/infra/cifuzz/fuzz_target.py
index c623bf60d..ae92b14c9 100644
--- a/infra/cifuzz/fuzz_target.py
+++ b/infra/cifuzz/fuzz_target.py
@@ -15,17 +15,13 @@
import collections
import logging
import os
-import re
import shutil
import stat
-import subprocess
-import sys
-import docker
+import clusterfuzz.environment
+import clusterfuzz.fuzz
-# pylint: disable=wrong-import-position,import-error
-sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
-import utils
+import config_utils
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
@@ -33,46 +29,59 @@ logging.basicConfig(
# Use a fixed seed for determinism. Use len_control=0 since we don't have enough
# time fuzzing for len_control to make sense (probably).
-LIBFUZZER_OPTIONS = '-seed=1337 -len_control=0'
+LIBFUZZER_OPTIONS = ['-seed=1337', '-len_control=0']
# The number of reproduce attempts for a crash.
REPRODUCE_ATTEMPTS = 10
+REPRODUCE_TIME_SECONDS = 30
+
# Seconds on top of duration until a timeout error is raised.
BUFFER_TIME = 10
# Log message if we can't check if crash reproduces on an recent build.
-COULD_NOT_TEST_ON_RECENT_MESSAGE = (
- 'Crash is reproducible. Could not run recent build of '
- 'target to determine if this code change (pr/commit) introduced crash. '
- 'Assuming this code change introduced crash.')
+COULD_NOT_TEST_ON_CLUSTERFUZZ_MESSAGE = (
+ 'Could not run previous build of target to determine if this code change '
+ '(pr/commit) introduced crash. Assuming crash was newly introduced.')
-FuzzResult = collections.namedtuple('FuzzResult', ['testcase', 'stacktrace'])
+FuzzResult = collections.namedtuple('FuzzResult',
+ ['testcase', 'stacktrace', 'corpus_path'])
class ReproduceError(Exception):
"""Error for when we can't attempt to reproduce a crash."""
-class FuzzTarget:
+def get_fuzz_target_corpus_dir(workspace, target_name):
+ """Returns the directory for storing |target_name|'s corpus in |workspace|."""
+ return os.path.join(workspace.corpora, target_name)
+
+
+def get_fuzz_target_pruned_corpus_dir(workspace, target_name):
+ """Returns the directory for storing |target_name|'s puned corpus in
+ |workspace|."""
+ return os.path.join(workspace.pruned_corpora, target_name)
+
+
+class FuzzTarget: # pylint: disable=too-many-instance-attributes
"""A class to manage a single fuzz target.
Attributes:
target_name: The name of the fuzz target.
duration: The length of time in seconds that the target should run.
target_path: The location of the fuzz target binary.
- out_dir: The location of where output artifacts are stored.
+ workspace: The workspace for storing things related to fuzzing.
"""
# pylint: disable=too-many-arguments
- def __init__(self, target_path, duration, out_dir, clusterfuzz_deployment,
+ def __init__(self, target_path, duration, workspace, clusterfuzz_deployment,
config):
"""Represents a single fuzz target.
Args:
target_path: The location of the fuzz target binary.
duration: The length of time in seconds the target should run.
- out_dir: The location of where the output from crashes should be stored.
+ workspace: The path used for storing things needed for fuzzing.
clusterfuzz_deployment: The object representing the ClusterFuzz
deployment.
config: The config of this project.
@@ -80,10 +89,39 @@ class FuzzTarget:
self.target_path = target_path
self.target_name = os.path.basename(self.target_path)
self.duration = int(duration)
- self.out_dir = out_dir
+ self.workspace = workspace
self.clusterfuzz_deployment = clusterfuzz_deployment
self.config = config
- self.latest_corpus_path = None
+ self.latest_corpus_path = get_fuzz_target_corpus_dir(
+ self.workspace, self.target_name)
+ os.makedirs(self.latest_corpus_path, exist_ok=True)
+ self.pruned_corpus_path = get_fuzz_target_pruned_corpus_dir(
+ self.workspace, self.target_name)
+ os.makedirs(self.pruned_corpus_path, exist_ok=True)
+
+ def _download_corpus(self):
+ """Downloads the corpus for the target from ClusterFuzz and returns the path
+ to the corpus. An empty directory is provided if the corpus can't be
+ downloaded or is empty."""
+ self.clusterfuzz_deployment.download_corpus(self.target_name,
+ self.latest_corpus_path)
+ return self.latest_corpus_path
+
+ def prune(self):
+ """Prunes the corpus and returns the result."""
+ self._download_corpus()
+ with clusterfuzz.environment.Environment(config_utils.DEFAULT_ENGINE,
+ self.config.sanitizer,
+ self.target_path,
+ interactive=True):
+ engine_impl = clusterfuzz.fuzz.get_engine(config_utils.DEFAULT_ENGINE)
+ result = engine_impl.minimize_corpus(self.target_path, [],
+ [self.latest_corpus_path],
+ self.pruned_corpus_path,
+ self.workspace.artifacts,
+ self.duration)
+
+ return FuzzResult(None, result.logs, self.pruned_corpus_path)
def fuzz(self):
"""Starts the fuzz target run for the length of time specified by duration.
@@ -91,85 +129,65 @@ class FuzzTarget:
Returns:
FuzzResult namedtuple with stacktrace and testcase if applicable.
"""
- logging.info('Fuzzer %s, started.', self.target_name)
- docker_container = utils.get_container_name()
- command = ['docker', 'run', '--rm', '--privileged']
- if docker_container:
- command += [
- '--volumes-from', docker_container, '-e', 'OUT=' + self.out_dir
- ]
- else:
- command += ['-v', '%s:%s' % (self.out_dir, '/out')]
-
- command += [
- '-e', 'FUZZING_ENGINE=libfuzzer', '-e',
- 'SANITIZER=' + self.config.sanitizer, '-e', 'CIFUZZ=True', '-e',
- 'RUN_FUZZER_MODE=interactive', docker.BASE_RUNNER_TAG, 'bash', '-c'
- ]
-
- run_fuzzer_command = 'run_fuzzer {fuzz_target} {options}'.format(
- fuzz_target=self.target_name,
- options=LIBFUZZER_OPTIONS + ' -max_total_time=' + str(self.duration))
-
- # If corpus can be downloaded use it for fuzzing.
- self.latest_corpus_path = self.clusterfuzz_deployment.download_corpus(
- self.target_name, self.out_dir)
- if self.latest_corpus_path:
- run_fuzzer_command = run_fuzzer_command + ' ' + self.latest_corpus_path
- command.append(run_fuzzer_command)
-
- logging.info('Running command: %s', ' '.join(command))
- process = subprocess.Popen(command,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ logging.info('Running fuzzer: %s.', self.target_name)
- try:
- _, stderr = process.communicate(timeout=self.duration + BUFFER_TIME)
- except subprocess.TimeoutExpired:
- logging.error('Fuzzer %s timed out, ending fuzzing.', self.target_name)
- return FuzzResult(None, None)
+ self._download_corpus()
+ corpus_path = self.latest_corpus_path
+
+ logging.info('Starting fuzzing')
+ with clusterfuzz.environment.Environment(config_utils.DEFAULT_ENGINE,
+ self.config.sanitizer,
+ self.target_path,
+ interactive=True) as env:
+ engine_impl = clusterfuzz.fuzz.get_engine(config_utils.DEFAULT_ENGINE)
+ options = engine_impl.prepare(corpus_path, env.target_path, env.build_dir)
+ options.merge_back_new_testcases = False
+ options.analyze_dictionary = False
+ options.arguments.extend(LIBFUZZER_OPTIONS)
+
+ result = engine_impl.fuzz(self.target_path, options,
+ self.workspace.artifacts, self.duration)
# Libfuzzer timeout was reached.
- if not process.returncode:
+ if not result.crashes:
logging.info('Fuzzer %s finished with no crashes discovered.',
self.target_name)
- return FuzzResult(None, None)
-
- # Crash was discovered.
- logging.info('Fuzzer %s, ended before timeout.', self.target_name)
- testcase = self.get_testcase(stderr)
- if not testcase:
- logging.error(b'No testcase found in stacktrace: %s.', stderr)
- return FuzzResult(None, None)
-
- utils.binary_print(b'Fuzzer: %s. Detected bug:\n%s' %
- (self.target_name.encode(), stderr))
- if self.is_crash_reportable(testcase):
+ return FuzzResult(None, None, self.latest_corpus_path)
+
+ # Only report first crash.
+ crash = result.crashes[0]
+ logging.info('Fuzzer: %s. Detected bug:\n%s', self.target_name,
+ crash.stacktrace)
+
+ if self.is_crash_reportable(crash.input_path):
# We found a bug in the fuzz target and we will report it.
- return FuzzResult(testcase, stderr)
+ return FuzzResult(crash.input_path, result.logs, self.latest_corpus_path)
# We found a bug but we won't report it.
- return FuzzResult(None, None)
+ return FuzzResult(None, None, self.latest_corpus_path)
- def free_disk_if_needed(self):
+ def free_disk_if_needed(self, delete_fuzz_target=True):
"""Deletes things that are no longer needed from fuzzing this fuzz target to
save disk space if needed."""
if not self.config.low_disk_space:
+ logging.info('Not freeing disk space after running fuzz target.')
return
- logging.info(
- 'Deleting corpus, seed corpus and fuzz target of %s to save disk.',
- self.target_name)
+ logging.info('Deleting corpus and seed corpus of %s to save disk.',
+ self.target_name)
# Delete the seed corpus, corpus, and fuzz target.
- if self.latest_corpus_path and os.path.exists(self.latest_corpus_path):
+ for corpus_path in [self.latest_corpus_path, self.pruned_corpus_path]:
# Use ignore_errors=True to fix
# https://github.com/google/oss-fuzz/issues/5383.
- shutil.rmtree(self.latest_corpus_path, ignore_errors=True)
+ shutil.rmtree(corpus_path, ignore_errors=True)
- os.remove(self.target_path)
target_seed_corpus_path = self.target_path + '_seed_corpus.zip'
if os.path.exists(target_seed_corpus_path):
os.remove(target_seed_corpus_path)
+
+ if delete_fuzz_target:
+ logging.info('Deleting fuzz target: %s.', self.target_name)
+ os.remove(self.target_path)
logging.info('Done deleting.')
def is_reproducible(self, testcase, target_path):
@@ -186,41 +204,31 @@ class FuzzTarget:
Raises:
ReproduceError if we can't attempt to reproduce the crash.
"""
-
if not os.path.exists(target_path):
- raise ReproduceError('Target %s not found.' % target_path)
+ logging.info('Target: %s does not exist.', target_path)
+ raise ReproduceError(f'Target {target_path} not found.')
os.chmod(target_path, stat.S_IRWXO)
- target_dirname = os.path.dirname(target_path)
- command = ['docker', 'run', '--rm', '--privileged']
- container = utils.get_container_name()
- if container:
- command += [
- '--volumes-from', container, '-e', 'OUT=' + target_dirname, '-e',
- 'TESTCASE=' + testcase
- ]
- else:
- command += [
- '-v',
- '%s:/out' % target_dirname, '-v',
- '%s:/testcase' % testcase
- ]
-
- command += [
- '-t', docker.BASE_RUNNER_TAG, 'reproduce', self.target_name, '-runs=100'
- ]
-
- logging.info('Running reproduce command: %s.', ' '.join(command))
- for _ in range(REPRODUCE_ATTEMPTS):
- _, _, returncode = utils.execute(command)
- if returncode != 0:
- logging.info('Reproduce command returned: %s. Reproducible on %s.',
- returncode, target_path)
-
- return True
-
- logging.info('Reproduce command returned 0. Not reproducible on %s.',
+ logging.info('Trying to reproduce crash using: %s.', testcase)
+ with clusterfuzz.environment.Environment(config_utils.DEFAULT_ENGINE,
+ self.config.sanitizer,
+ target_path,
+ interactive=True):
+ for _ in range(REPRODUCE_ATTEMPTS):
+ engine_impl = clusterfuzz.fuzz.get_engine(config_utils.DEFAULT_ENGINE)
+ result = engine_impl.reproduce(target_path,
+ testcase,
+ arguments=[],
+ max_time=REPRODUCE_TIME_SECONDS)
+
+ if result.return_code != 0:
+ logging.info('Reproduce command returned: %s. Reproducible on %s.',
+ result.return_code, target_path)
+
+ return True
+
+ logging.info('Reproduce command returned: 0. Not reproducible on %s.',
target_path)
return False
@@ -239,60 +247,52 @@ class FuzzTarget:
ReproduceError if we can't attempt to reproduce the crash on the PR build.
"""
if not os.path.exists(testcase):
- raise ReproduceError('Testcase %s not found.' % testcase)
+ raise ReproduceError(f'Testcase {testcase} not found.')
try:
reproducible_on_code_change = self.is_reproducible(
testcase, self.target_path)
except ReproduceError as error:
- logging.error('Could not run target when checking for reproducibility.'
+ logging.error('Could not check for crash reproducibility.'
'Please file an issue:'
'https://github.com/google/oss-fuzz/issues/new.')
raise error
if not reproducible_on_code_change:
- logging.info('Failed to reproduce the crash using the obtained testcase.')
- return False
+ logging.info('Crash is not reproducible.')
+ return self.config.report_unreproducible_crashes
- clusterfuzz_build_dir = self.clusterfuzz_deployment.download_latest_build(
- self.out_dir)
+ logging.info('Crash is reproducible.')
+ return self.is_crash_novel(testcase)
+
+ def is_crash_novel(self, testcase):
+ """Returns whether or not the crash is new. A crash is considered new if it
+ can't be reproduced on an older ClusterFuzz build of the target."""
+ if not os.path.exists(testcase):
+ raise ReproduceError('Testcase %s not found.' % testcase)
+ clusterfuzz_build_dir = self.clusterfuzz_deployment.download_latest_build()
if not clusterfuzz_build_dir:
# Crash is reproducible on PR build and we can't test on a recent
# ClusterFuzz/OSS-Fuzz build.
- logging.info(COULD_NOT_TEST_ON_RECENT_MESSAGE)
+ logging.info(COULD_NOT_TEST_ON_CLUSTERFUZZ_MESSAGE)
return True
clusterfuzz_target_path = os.path.join(clusterfuzz_build_dir,
self.target_name)
+
try:
reproducible_on_clusterfuzz_build = self.is_reproducible(
testcase, clusterfuzz_target_path)
except ReproduceError:
# This happens if the project has ClusterFuzz builds, but the fuzz target
# is not in it (e.g. because the fuzz target is new).
- logging.info(COULD_NOT_TEST_ON_RECENT_MESSAGE)
+ logging.info(COULD_NOT_TEST_ON_CLUSTERFUZZ_MESSAGE)
return True
- if not reproducible_on_clusterfuzz_build:
- logging.info('The crash is reproducible. The crash doesn\'t reproduce '
- 'on old builds. This code change probably introduced the '
- 'crash.')
- return True
-
- logging.info('The crash is reproducible on old builds '
- '(without the current code change).')
- return False
-
- def get_testcase(self, error_bytes):
- """Gets the file from a fuzzer run stacktrace.
-
- Args:
- error_bytes: The bytes containing the output from the fuzzer.
-
- Returns:
- The path to the testcase or None if not found.
- """
- match = re.search(rb'\bTest unit written to \.\/([^\s]+)', error_bytes)
- if match:
- return os.path.join(self.out_dir, match.group(1).decode('utf-8'))
- return None
+ if reproducible_on_clusterfuzz_build:
+ logging.info('The crash is reproducible on previous build. '
+ 'Code change (pr/commit) did not introduce crash.')
+ return False
+ logging.info('The crash is not reproducible on previous build. '
+ 'Code change (pr/commit) introduced crash.')
+ return True
diff --git a/infra/cifuzz/fuzz_target_test.py b/infra/cifuzz/fuzz_target_test.py
index 8bec234dc..ecea6fbbf 100644
--- a/infra/cifuzz/fuzz_target_test.py
+++ b/infra/cifuzz/fuzz_target_test.py
@@ -18,12 +18,18 @@ import tempfile
import unittest
from unittest import mock
+import certifi
+# Importing this later causes import failures with pytest for some reason.
+# TODO(ochang): Figure out why.
import parameterized
+import google.cloud.ndb # pylint: disable=unused-import
from pyfakefs import fake_filesystem_unittest
+from clusterfuzz.fuzz import engine
import clusterfuzz_deployment
-import config_utils
import fuzz_target
+import test_helpers
+import workspace_utils
# NOTE: This integration test relies on
# https://github.com/google/oss-fuzz/tree/master/projects/example project.
@@ -32,153 +38,118 @@ EXAMPLE_PROJECT = 'example'
# An example fuzzer that triggers an error.
EXAMPLE_FUZZER = 'example_crash_fuzzer'
-# The return value of a successful call to utils.execute.
-EXECUTE_SUCCESS_RETVAL = ('', '', 0)
-
-# The return value of a failed call to utils.execute.
-EXECUTE_FAILURE_RETVAL = ('', '', 1)
+# Mock return values for engine_impl.reproduce.
+EXECUTE_SUCCESS_RESULT = engine.ReproduceResult([], 0, 0, '')
+EXECUTE_FAILURE_RESULT = engine.ReproduceResult([], 1, 0, '')
def _create_config(**kwargs):
"""Creates a config object and then sets every attribute that is a key in
|kwargs| to the corresponding value. Asserts that each key in |kwargs| is an
attribute of Config."""
- defaults = {'is_github': True, 'project_name': EXAMPLE_PROJECT}
+ defaults = {
+ 'is_github': True,
+ 'oss_fuzz_project_name': EXAMPLE_PROJECT,
+ 'workspace': '/workspace'
+ }
for default_key, default_value in defaults.items():
if default_key not in kwargs:
kwargs[default_key] = default_value
- with mock.patch('os.path.basename', return_value=None), mock.patch(
- 'config_utils.get_project_src_path',
- return_value=None), mock.patch('config_utils._is_dry_run',
- return_value=True):
- config = config_utils.RunFuzzersConfig()
-
- for key, value in kwargs.items():
- assert hasattr(config, key), 'Config doesn\'t have attribute: ' + key
- setattr(config, key, value)
- return config
+ return test_helpers.create_run_config(**kwargs)
def _create_deployment(**kwargs):
config = _create_config(**kwargs)
- return clusterfuzz_deployment.get_clusterfuzz_deployment(config)
+ workspace = workspace_utils.Workspace(config)
+ return clusterfuzz_deployment.get_clusterfuzz_deployment(config, workspace)
-# TODO(metzman): Use patch from test_libs/helpers.py in clusterfuzz so that we
-# don't need to accept this as an argument in every test method.
@mock.patch('utils.get_container_name', return_value='container')
class IsReproducibleTest(fake_filesystem_unittest.TestCase):
"""Tests the is_reproducible method in the fuzz_target.FuzzTarget class."""
def setUp(self):
"""Sets up example fuzz target to test is_reproducible method."""
- self.fuzz_target_path = '/example/path'
- self.testcase_path = '/testcase'
+ self.fuzz_target_name = 'fuzz-target'
deployment = _create_deployment()
- self.test_target = fuzz_target.FuzzTarget(self.fuzz_target_path,
- fuzz_target.REPRODUCE_ATTEMPTS,
- '/example/outdir', deployment,
- deployment.config)
+ self.config = deployment.config
+ self.workspace = deployment.workspace
+ self.fuzz_target_path = os.path.join(self.workspace.out,
+ self.fuzz_target_name)
+ self.setUpPyfakefs()
+ self.fs.create_file(self.fuzz_target_path)
+ self.testcase_path = '/testcase'
+ self.fs.create_file(self.testcase_path)
+
+ self.target = fuzz_target.FuzzTarget(self.fuzz_target_path,
+ fuzz_target.REPRODUCE_ATTEMPTS,
+ self.workspace, deployment,
+ deployment.config)
+
+ # ClusterFuzz requires ROOT_DIR.
+ root_dir = os.environ['ROOT_DIR']
+ test_helpers.patch_environ(self, empty=True)
+ os.environ['ROOT_DIR'] = root_dir
def test_reproducible(self, _):
"""Tests that is_reproducible returns True if crash is detected and that
is_reproducible uses the correct command to reproduce a crash."""
- self._set_up_fakefs()
- all_repro = [EXECUTE_FAILURE_RETVAL] * fuzz_target.REPRODUCE_ATTEMPTS
- with mock.patch('utils.execute', side_effect=all_repro) as mocked_execute:
- result = self.test_target.is_reproducible(self.testcase_path,
- self.fuzz_target_path)
- mocked_execute.assert_called_once_with([
- 'docker', 'run', '--rm', '--privileged', '--volumes-from',
- 'container', '-e', 'OUT=/example', '-e',
- 'TESTCASE=' + self.testcase_path, '-t',
- 'gcr.io/oss-fuzz-base/base-runner', 'reproduce', 'path', '-runs=100'
- ])
+ all_repro = [EXECUTE_FAILURE_RESULT] * fuzz_target.REPRODUCE_ATTEMPTS
+ with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine:
+ mock_get_engine().reproduce.side_effect = all_repro
+
+ result = self.target.is_reproducible(self.testcase_path,
+ self.fuzz_target_path)
+ mock_get_engine().reproduce.assert_called_once_with(
+ '/workspace/build-out/fuzz-target',
+ '/testcase',
+ arguments=[],
+ max_time=30)
self.assertTrue(result)
- self.assertEqual(1, mocked_execute.call_count)
-
- def _set_up_fakefs(self):
- """Helper to setup pyfakefs and add important files to the fake
- filesystem."""
- self.setUpPyfakefs()
- self.fs.create_file(self.fuzz_target_path)
- self.fs.create_file(self.testcase_path)
+ self.assertEqual(1, mock_get_engine().reproduce.call_count)
def test_flaky(self, _):
"""Tests that is_reproducible returns True if crash is detected on the last
attempt."""
- self._set_up_fakefs()
- last_time_repro = [EXECUTE_SUCCESS_RETVAL] * 9 + [EXECUTE_FAILURE_RETVAL]
- with mock.patch('utils.execute',
- side_effect=last_time_repro) as mocked_execute:
+ last_time_repro = [EXECUTE_SUCCESS_RESULT] * 9 + [EXECUTE_FAILURE_RESULT]
+ with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine:
+ mock_get_engine().reproduce.side_effect = last_time_repro
self.assertTrue(
- self.test_target.is_reproducible(self.testcase_path,
- self.fuzz_target_path))
+ self.target.is_reproducible(self.testcase_path,
+ self.fuzz_target_path))
self.assertEqual(fuzz_target.REPRODUCE_ATTEMPTS,
- mocked_execute.call_count)
+ mock_get_engine().reproduce.call_count)
def test_nonexistent_fuzzer(self, _):
"""Tests that is_reproducible raises an error if it could not attempt
reproduction because the fuzzer doesn't exist."""
with self.assertRaises(fuzz_target.ReproduceError):
- self.test_target.is_reproducible(self.testcase_path, '/non-existent-path')
+ self.target.is_reproducible(self.testcase_path, '/non-existent-path')
def test_unreproducible(self, _):
"""Tests that is_reproducible returns False for a crash that did not
reproduce."""
- all_unrepro = [EXECUTE_SUCCESS_RETVAL] * fuzz_target.REPRODUCE_ATTEMPTS
- self._set_up_fakefs()
- with mock.patch('utils.execute', side_effect=all_unrepro):
- result = self.test_target.is_reproducible(self.testcase_path,
- self.fuzz_target_path)
+ all_unrepro = [EXECUTE_SUCCESS_RESULT] * fuzz_target.REPRODUCE_ATTEMPTS
+ with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine:
+ mock_get_engine().reproduce.side_effect = all_unrepro
+ result = self.target.is_reproducible(self.testcase_path,
+ self.fuzz_target_path)
self.assertFalse(result)
-class GetTestCaseTest(unittest.TestCase):
- """Tests get_testcase."""
-
- def setUp(self):
- """Sets up example fuzz target to test get_testcase method."""
- deployment = _create_deployment()
- self.test_target = fuzz_target.FuzzTarget('/example/path', 10,
- '/example/outdir', deployment,
- deployment.config)
-
- def test_valid_error_string(self):
- """Tests that get_testcase returns the correct testcase give an error."""
- testcase_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'test_data', 'example_crash_fuzzer_output.txt')
- with open(testcase_path, 'rb') as test_fuzz_output:
- parsed_testcase = self.test_target.get_testcase(test_fuzz_output.read())
- self.assertEqual(
- parsed_testcase,
- '/example/outdir/crash-ad6700613693ef977ff3a8c8f4dae239c3dde6f5')
-
- def test_invalid_error_string(self):
- """Tests that get_testcase returns None with a bad error string."""
- self.assertIsNone(self.test_target.get_testcase(b''))
- self.assertIsNone(self.test_target.get_testcase(b' Example crash string.'))
-
- def test_encoding(self):
- """Tests that get_testcase accepts bytes and returns a string."""
- fuzzer_output = b'\x8fTest unit written to ./crash-1'
- result = self.test_target.get_testcase(fuzzer_output)
- self.assertTrue(isinstance(result, str))
-
-
class IsCrashReportableTest(fake_filesystem_unittest.TestCase):
"""Tests the is_crash_reportable method of FuzzTarget."""
def setUp(self):
"""Sets up example fuzz target to test is_crash_reportable method."""
+ self.setUpPyfakefs()
self.fuzz_target_path = '/example/do_stuff_fuzzer'
deployment = _create_deployment()
- self.test_target = fuzz_target.FuzzTarget(self.fuzz_target_path, 100,
- '/example/outdir', deployment,
- deployment.config)
+ self.target = fuzz_target.FuzzTarget(self.fuzz_target_path, 100,
+ deployment.workspace, deployment,
+ deployment.config)
self.oss_fuzz_build_path = '/oss-fuzz-build'
- self.setUpPyfakefs()
self.fs.create_file(self.fuzz_target_path)
self.oss_fuzz_target_path = os.path.join(
self.oss_fuzz_build_path, os.path.basename(self.fuzz_target_path))
@@ -186,18 +157,20 @@ class IsCrashReportableTest(fake_filesystem_unittest.TestCase):
self.testcase_path = '/testcase'
self.fs.create_file(self.testcase_path, contents='')
+ # Do this to prevent pyfakefs from messing with requests.
+ self.fs.add_real_directory(os.path.dirname(certifi.__file__))
+
@mock.patch('fuzz_target.FuzzTarget.is_reproducible',
side_effect=[True, False])
@mock.patch('logging.info')
- def test_new_reproducible_crash(self, mocked_info, _):
+ def test_new_reproducible_crash(self, mock_info, _):
"""Tests that a new reproducible crash returns True."""
with tempfile.TemporaryDirectory() as tmp_dir:
- self.test_target.out_dir = tmp_dir
- self.assertTrue(self.test_target.is_crash_reportable(self.testcase_path))
- mocked_info.assert_called_with(
- 'The crash is reproducible. The crash doesn\'t reproduce '
- 'on old builds. This code change probably introduced the '
- 'crash.')
+ self.target.out_dir = tmp_dir
+ self.assertTrue(self.target.is_crash_reportable(self.testcase_path))
+ mock_info.assert_called_with(
+ 'The crash is not reproducible on previous build. '
+ 'Code change (pr/commit) introduced crash.')
# yapf: disable
@parameterized.parameterized.expand([
@@ -218,12 +191,11 @@ class IsCrashReportableTest(fake_filesystem_unittest.TestCase):
side_effect=is_reproducible_retvals):
with mock.patch('clusterfuzz_deployment.OSSFuzz.download_latest_build',
return_value=self.oss_fuzz_build_path):
- self.assertFalse(
- self.test_target.is_crash_reportable(self.testcase_path))
+ self.assertFalse(self.target.is_crash_reportable(self.testcase_path))
@mock.patch('logging.info')
@mock.patch('fuzz_target.FuzzTarget.is_reproducible', return_value=[True])
- def test_reproducible_no_oss_fuzz_target(self, _, mocked_info):
+ def test_reproducible_no_oss_fuzz_target(self, _, mock_info):
"""Tests that is_crash_reportable returns True when a crash reproduces on
the PR build but the target is not in the OSS-Fuzz build (usually because it
is new)."""
@@ -236,17 +208,16 @@ class IsCrashReportableTest(fake_filesystem_unittest.TestCase):
with mock.patch(
'fuzz_target.FuzzTarget.is_reproducible',
- side_effect=is_reproducible_side_effect) as mocked_is_reproducible:
+ side_effect=is_reproducible_side_effect) as mock_is_reproducible:
with mock.patch('clusterfuzz_deployment.OSSFuzz.download_latest_build',
return_value=self.oss_fuzz_build_path):
- self.assertTrue(self.test_target.is_crash_reportable(
- self.testcase_path))
- mocked_is_reproducible.assert_any_call(self.testcase_path,
- self.oss_fuzz_target_path)
- mocked_info.assert_called_with(
- 'Crash is reproducible. Could not run recent build of '
- 'target to determine if this code change (pr/commit) introduced crash. '
- 'Assuming this code change introduced crash.')
+ self.assertTrue(self.target.is_crash_reportable(self.testcase_path))
+ mock_is_reproducible.assert_any_call(self.testcase_path,
+ self.oss_fuzz_target_path)
+ mock_info.assert_called_with(
+ 'Could not run previous build of target to determine if this code '
+ 'change (pr/commit) introduced crash. Assuming crash was newly '
+ 'introduced.')
if __name__ == '__main__':
diff --git a/infra/cifuzz/generate_coverage_report.py b/infra/cifuzz/generate_coverage_report.py
new file mode 100644
index 000000000..9901c452a
--- /dev/null
+++ b/infra/cifuzz/generate_coverage_report.py
@@ -0,0 +1,48 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module for generating coverage reports."""
+import os
+
+import base_runner_utils
+import fuzz_target
+import utils
+
+
+def run_coverage_command(config, workspace):
+ """Runs the coverage command in base-runner to generate a coverage report."""
+ env = base_runner_utils.get_env(config, workspace)
+ env['HTTP_PORT'] = ''
+ env['COVERAGE_EXTRA_ARGS'] = ''
+ env['CORPUS_DIR'] = workspace.corpora
+ env['COVERAGE_OUTPUT_DIR'] = workspace.coverage_report
+ command = 'coverage'
+ return utils.execute(command, env=env)
+
+
+def download_corpora(fuzz_target_paths, clusterfuzz_deployment):
+ """Downloads corpora for fuzz targets in |fuzz_target_paths| using
+ |clusterfuzz_deployment| to download corpora from ClusterFuzz/OSS-Fuzz."""
+ for target_path in fuzz_target_paths:
+ target_name = os.path.basename(target_path)
+ corpus_dir = fuzz_target.get_fuzz_target_corpus_dir(
+ clusterfuzz_deployment.workspace, target_name)
+ clusterfuzz_deployment.download_corpus(target_name, corpus_dir)
+
+
+def generate_coverage_report(fuzz_target_paths, workspace,
+ clusterfuzz_deployment, config):
+ """Generates a coverage report using Clang's source based coverage."""
+ download_corpora(fuzz_target_paths, clusterfuzz_deployment)
+ run_coverage_command(config, workspace)
+ clusterfuzz_deployment.upload_coverage()
diff --git a/infra/cifuzz/generate_coverage_report_test.py b/infra/cifuzz/generate_coverage_report_test.py
new file mode 100644
index 000000000..df2c9b206
--- /dev/null
+++ b/infra/cifuzz/generate_coverage_report_test.py
@@ -0,0 +1,71 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for generate_coverage_report."""
+
+import unittest
+from unittest import mock
+
+import generate_coverage_report
+import test_helpers
+
+OUT_DIR = '/outdir'
+PROJECT = 'example-project'
+SANITIZER = 'coverage'
+
+
+class TestRunCoverageCommand(unittest.TestCase):
+ """Tests run_coverage_command"""
+
+ def setUp(self):
+ test_helpers.patch_environ(self, empty=True)
+
+ @mock.patch('utils.execute')
+ def test_run_coverage_command(self, mock_execute): # pylint: disable=no-self-use
+ """Tests that run_coverage_command works as intended."""
+ config = test_helpers.create_run_config(oss_fuzz_project_name=PROJECT,
+ sanitizer=SANITIZER)
+ workspace = test_helpers.create_workspace()
+ generate_coverage_report.run_coverage_command(config, workspace)
+ expected_command = 'coverage'
+ expected_env = {
+ 'SANITIZER': config.sanitizer,
+ 'FUZZING_LANGUAGE': config.language,
+ 'OUT': workspace.out,
+ 'CIFUZZ': 'True',
+ 'FUZZING_ENGINE': 'libfuzzer',
+ 'ARCHITECTURE': 'x86_64',
+ 'FUZZER_ARGS': '-rss_limit_mb=2560 -timeout=25',
+ 'HTTP_PORT': '',
+ 'COVERAGE_EXTRA_ARGS': '',
+ 'CORPUS_DIR': workspace.corpora,
+ 'COVERAGE_OUTPUT_DIR': workspace.coverage_report
+ }
+ mock_execute.assert_called_with(expected_command, env=expected_env)
+
+
+class DownloadCorporaTest(unittest.TestCase):
+ """Tests for download_corpora."""
+
+ def test_download_corpora(self): # pylint: disable=no-self-use
+ """Tests that download_corpora works as intended."""
+ clusterfuzz_deployment = mock.Mock()
+ clusterfuzz_deployment.workspace = test_helpers.create_workspace()
+ fuzz_target_paths = ['/path/to/fuzzer1', '/path/to/fuzzer2']
+ expected_calls = [
+ mock.call('fuzzer1', '/workspace/cifuzz-corpus/fuzzer1'),
+ mock.call('fuzzer2', '/workspace/cifuzz-corpus/fuzzer2')
+ ]
+ generate_coverage_report.download_corpora(fuzz_target_paths,
+ clusterfuzz_deployment)
+ clusterfuzz_deployment.download_corpus.assert_has_calls(expected_calls)
diff --git a/infra/cifuzz/coverage.py b/infra/cifuzz/get_coverage.py
index 9a179c59d..b4b2d25d6 100644
--- a/infra/cifuzz/coverage.py
+++ b/infra/cifuzz/get_coverage.py
@@ -12,57 +12,44 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for determining coverage of fuzz targets."""
+import json
import logging
import os
import sys
-import json
-import urllib.error
-import urllib.request
+
+import http_utils
# pylint: disable=wrong-import-position,import-error
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import utils
-# The path to get project's latest report json file.
-LATEST_REPORT_INFO_PATH = 'oss-fuzz-coverage/latest_report_info/'
+# The path to get OSS-Fuzz project's latest report json file.`
+OSS_FUZZ_LATEST_COVERAGE_INFO_PATH = 'oss-fuzz-coverage/latest_report_info/'
-class OssFuzzCoverageGetter:
- """Gets coverage data for a project from OSS-Fuzz."""
+# pylint: disable=too-few-public-methods
+class CoverageError(Exception):
+ """Exceptions for project coverage."""
- def __init__(self, project_name, repo_path):
- """Constructor for OssFuzzCoverageGetter. Callers should check that
- fuzzer_stats_url is initialized."""
- self.project_name = project_name
- self.repo_path = _normalize_repo_path(repo_path)
- self.fuzzer_stats_url = _get_fuzzer_stats_dir_url(self.project_name)
- def get_target_coverage_report(self, target):
- """Get the coverage report for a specific fuzz target.
+class BaseCoverage:
+ """Gets coverage data for a project."""
- Args:
- target: The name of the fuzz target whose coverage is requested.
-
- Returns:
- The target's coverage json dict or None on failure.
- """
- if not self.fuzzer_stats_url:
- return None
-
- target_url = utils.url_join(self.fuzzer_stats_url, target + '.json')
- return get_json_from_url(target_url)
+ def __init__(self, repo_path):
+ self.repo_path = _normalize_repo_path(repo_path)
def get_files_covered_by_target(self, target):
- """Gets a list of source files covered by the specific fuzz target.
+ """Returns a list of source files covered by the specific fuzz target.
Args:
target: The name of the fuzz target whose coverage is requested.
Returns:
- A list of files that the fuzz targets covers or None.
+ A list of files that the fuzz target covers or None.
"""
- target_cov = self.get_target_coverage_report(target)
+ target_cov = self.get_target_coverage(target)
if not target_cov:
+ logging.info('No coverage available for %s', target)
return None
coverage_per_file = get_coverage_per_file(target_cov)
@@ -88,33 +75,53 @@ class OssFuzzCoverageGetter:
return affected_file_list
+ def get_target_coverage(self, target):
+ """Get the coverage report for a specific fuzz target.
-def is_file_covered(file_cov):
- """Returns whether the file is covered."""
- return file_cov['summary']['regions']['covered']
+ Args:
+ target: The name of the fuzz target whose coverage is requested.
+ Returns:
+ The target's coverage json dict or None on failure.
+ """
+ raise NotImplementedError('Child class must implement method.')
-def get_coverage_per_file(target_cov):
- """Returns the coverage per file within |target_cov|."""
- return target_cov['data'][0]['files']
+class OSSFuzzCoverage(BaseCoverage):
+ """Gets coverage data for a project from OSS-Fuzz."""
-def _normalize_repo_path(repo_path):
- """Normalizes and returns |repo_path| to make sure cases like /src/curl and
- /src/curl/ are both handled."""
- repo_path = os.path.normpath(repo_path)
- if not repo_path.endswith('/'):
- repo_path += '/'
- return repo_path
+ def __init__(self, repo_path, oss_fuzz_project_name):
+ """Constructor for OSSFuzzCoverage."""
+ super().__init__(repo_path)
+ self.oss_fuzz_project_name = oss_fuzz_project_name
+ self.fuzzer_stats_url = _get_oss_fuzz_fuzzer_stats_dir_url(
+ self.oss_fuzz_project_name)
+ if self.fuzzer_stats_url is None:
+ raise CoverageError('Could not get latest coverage.')
+ def get_target_coverage(self, target):
+ """Get the coverage report for a specific fuzz target.
-def _get_latest_cov_report_info(project_name):
+ Args:
+ target: The name of the fuzz target whose coverage is requested.
+
+ Returns:
+ The target's coverage json dict or None on failure.
+ """
+ if not self.fuzzer_stats_url:
+ return None
+
+ target_url = utils.url_join(self.fuzzer_stats_url, target + '.json')
+ return http_utils.get_json_from_url(target_url)
+
+
+def _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name):
"""Gets and returns a dictionary containing the latest coverage report info
for |project|."""
latest_report_info_url = utils.url_join(utils.GCS_BASE_URL,
- LATEST_REPORT_INFO_PATH,
- project_name + '.json')
- latest_cov_info = get_json_from_url(latest_report_info_url)
+ OSS_FUZZ_LATEST_COVERAGE_INFO_PATH,
+ oss_fuzz_project_name + '.json')
+ latest_cov_info = http_utils.get_json_from_url(latest_report_info_url)
if latest_cov_info is None:
logging.error('Could not get the coverage report json from url: %s.',
latest_report_info_url)
@@ -122,16 +129,17 @@ def _get_latest_cov_report_info(project_name):
return latest_cov_info
-def _get_fuzzer_stats_dir_url(project_name):
- """Gets latest coverage report info for a specific OSS-Fuzz project from GCS.
+def _get_oss_fuzz_fuzzer_stats_dir_url(oss_fuzz_project_name):
+ """Gets latest coverage report info for a specific OSS-Fuzz project from
+ GCS.
Args:
- project_name: The name of the relevant OSS-Fuzz project.
+ oss_fuzz_project_name: The name of the project.
Returns:
The projects coverage report info in json dict or None on failure.
"""
- latest_cov_info = _get_latest_cov_report_info(project_name)
+ latest_cov_info = _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name)
if not latest_cov_info:
return None
@@ -145,25 +153,52 @@ def _get_fuzzer_stats_dir_url(project_name):
return fuzzer_stats_dir_url
-def get_json_from_url(url):
- """Gets a json object from a specified HTTP URL.
+class FilesystemCoverage(BaseCoverage):
+ """Class that gets a project's coverage from the filesystem."""
- Args:
- url: The url of the json to be downloaded.
+ def __init__(self, repo_path, project_coverage_dir):
+ super().__init__(repo_path)
+ self.project_coverage_dir = project_coverage_dir
- Returns:
- A dictionary deserialized from JSON or None on failure.
- """
- try:
- response = urllib.request.urlopen(url)
- except urllib.error.HTTPError:
- logging.error('HTTP error with url %s.', url)
- return None
+ def get_target_coverage(self, target):
+ """Get the coverage report for a specific fuzz target.
- try:
- # read().decode() fixes compatibility issue with urllib response object.
- result_json = json.loads(response.read().decode())
- except (ValueError, TypeError, json.JSONDecodeError) as err:
- logging.error('Loading json from url %s failed with: %s.', url, str(err))
- return None
- return result_json
+ Args:
+ target: The name of the fuzz target whose coverage is requested.
+
+ Returns:
+ The target's coverage json dict or None on failure.
+ """
+ logging.info('Getting coverage for %s from filesystem.', target)
+ fuzzer_stats_json_path = os.path.join(self.project_coverage_dir,
+ 'fuzzer_stats', target + '.json')
+ if not os.path.exists(fuzzer_stats_json_path):
+ logging.warning('%s does not exist.', fuzzer_stats_json_path)
+ return None
+
+ with open(fuzzer_stats_json_path) as fuzzer_stats_json_file_handle:
+ try:
+ return json.load(fuzzer_stats_json_file_handle)
+ except json.decoder.JSONDecodeError as err:
+ logging.error('Could not decode: %s. Error: %s.',
+ fuzzer_stats_json_path, err)
+ return None
+
+
+def is_file_covered(file_cov):
+ """Returns whether the file is covered."""
+ return file_cov['summary']['regions']['covered']
+
+
+def get_coverage_per_file(target_cov):
+ """Returns the coverage per file within |target_cov|."""
+ return target_cov['data'][0]['files']
+
+
+def _normalize_repo_path(repo_path):
+ """Normalizes and returns |repo_path| to make sure cases like /src/curl and
+ /src/curl/ are both handled."""
+ repo_path = os.path.normpath(repo_path)
+ if not repo_path.endswith('/'):
+ repo_path += '/'
+ return repo_path
diff --git a/infra/cifuzz/get_coverage_test.py b/infra/cifuzz/get_coverage_test.py
new file mode 100644
index 000000000..fcfc9bd25
--- /dev/null
+++ b/infra/cifuzz/get_coverage_test.py
@@ -0,0 +1,239 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for get_coverage.py"""
+import os
+import json
+import unittest
+from unittest import mock
+
+from pyfakefs import fake_filesystem_unittest
+import pytest
+
+import get_coverage
+
+# pylint: disable=protected-access
+
+TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'test_data')
+
+PROJECT_NAME = 'curl'
+REPO_PATH = '/src/curl'
+FUZZ_TARGET = 'curl_fuzzer'
+PROJECT_COV_JSON_FILENAME = 'example_curl_cov.json'
+FUZZ_TARGET_COV_JSON_FILENAME = 'example_curl_fuzzer_cov.json'
+INVALID_TARGET = 'not-a-fuzz-target'
+
+with open(os.path.join(TEST_DATA_PATH,
+ PROJECT_COV_JSON_FILENAME),) as cov_file_handle:
+ PROJECT_COV_INFO = json.loads(cov_file_handle.read())
+
+
+class GetOssFuzzFuzzerStatsDirUrlTest(unittest.TestCase):
+ """Tests _get_oss_fuzz_fuzzer_stats_dir_url."""
+
+ @mock.patch('http_utils.get_json_from_url',
+ return_value={
+ 'fuzzer_stats_dir':
+ 'gs://oss-fuzz-coverage/systemd/fuzzer_stats/20210303'
+ })
+ def test_get_valid_project(self, mock_get_json_from_url):
+ """Tests that a project's coverage report can be downloaded and parsed.
+
+ NOTE: This test relies on the PROJECT_NAME repo's coverage report.
+ The "example" project was not used because it has no coverage reports.
+ """
+ result = get_coverage._get_oss_fuzz_fuzzer_stats_dir_url(PROJECT_NAME)
+ (url,), _ = mock_get_json_from_url.call_args
+ self.assertEqual(
+ 'https://storage.googleapis.com/oss-fuzz-coverage/'
+ 'latest_report_info/curl.json', url)
+
+ expected_result = (
+ 'https://storage.googleapis.com/oss-fuzz-coverage/systemd/fuzzer_stats/'
+ '20210303')
+ self.assertEqual(result, expected_result)
+
+ def test_get_invalid_project(self):
+ """Tests that passing a bad project returns None."""
+ self.assertIsNone(
+ get_coverage._get_oss_fuzz_fuzzer_stats_dir_url('not-a-proj'))
+
+
+class OSSFuzzCoverageGetTargetCoverageTest(unittest.TestCase):
+ """Tests OSSFuzzCoverage.get_target_coverage."""
+
+ def setUp(self):
+ with mock.patch('get_coverage._get_oss_fuzz_latest_cov_report_info',
+ return_value=PROJECT_COV_INFO):
+ self.oss_fuzz_coverage = get_coverage.OSSFuzzCoverage(
+ REPO_PATH, PROJECT_NAME)
+
+ @mock.patch('http_utils.get_json_from_url', return_value={})
+ def test_valid_target(self, mock_get_json_from_url):
+ """Tests that a target's coverage report can be downloaded and parsed."""
+ self.oss_fuzz_coverage.get_target_coverage(FUZZ_TARGET)
+ (url,), _ = mock_get_json_from_url.call_args
+ self.assertEqual(
+ 'https://storage.googleapis.com/oss-fuzz-coverage/'
+ 'curl/fuzzer_stats/20200226/curl_fuzzer.json', url)
+
+ def test_invalid_target(self):
+ """Tests that passing an invalid target coverage report returns None."""
+ self.assertIsNone(
+ self.oss_fuzz_coverage.get_target_coverage(INVALID_TARGET))
+
+ @mock.patch('get_coverage._get_oss_fuzz_latest_cov_report_info',
+ return_value=None)
+ def test_invalid_project_json(self, _): # pylint: disable=no-self-use
+ """Tests an invalid project JSON results in None being returned."""
+ with pytest.raises(get_coverage.CoverageError):
+ get_coverage.OSSFuzzCoverage(REPO_PATH, PROJECT_NAME)
+
+
+def _get_expected_curl_covered_file_list():
+ """Returns the expected covered file list for
+ FUZZ_TARGET_COV_JSON_FILENAME."""
+ curl_files_list_path = os.path.join(TEST_DATA_PATH,
+ 'example_curl_file_list.json')
+ with open(curl_files_list_path) as file_handle:
+ return json.loads(file_handle.read())
+
+
+def _get_example_curl_coverage():
+ """Returns the contents of the fuzzer stats JSON file for
+ FUZZ_TARGET_COV_JSON_FILENAME."""
+ with open(os.path.join(TEST_DATA_PATH,
+ FUZZ_TARGET_COV_JSON_FILENAME)) as file_handle:
+ return json.loads(file_handle.read())
+
+
+class OSSFuzzCoverageGetFilesCoveredByTargetTest(unittest.TestCase):
+ """Tests OSSFuzzCoverage.get_files_covered_by_target."""
+
+ def setUp(self):
+ with mock.patch('get_coverage._get_oss_fuzz_latest_cov_report_info',
+ return_value=PROJECT_COV_INFO):
+ self.oss_fuzz_coverage = get_coverage.OSSFuzzCoverage(
+ REPO_PATH, PROJECT_NAME)
+
+ def test_valid_target(self):
+ """Tests that covered files can be retrieved from a coverage report."""
+ fuzzer_cov_data = _get_example_curl_coverage()
+ with mock.patch('get_coverage.OSSFuzzCoverage.get_target_coverage',
+ return_value=fuzzer_cov_data):
+ file_list = self.oss_fuzz_coverage.get_files_covered_by_target(
+ FUZZ_TARGET)
+
+ expected_file_list = _get_expected_curl_covered_file_list()
+ self.assertCountEqual(file_list, expected_file_list)
+
+ def test_invalid_target(self):
+ """Tests passing invalid fuzz target returns None."""
+ self.assertIsNone(
+ self.oss_fuzz_coverage.get_files_covered_by_target(INVALID_TARGET))
+
+
+class FilesystemCoverageGetFilesCoveredByTargetTest(
+ fake_filesystem_unittest.TestCase):
+ """Tests FilesystemCoverage.get_files_covered_by_target."""
+
+ def setUp(self):
+ _fuzzer_cov_data = _get_example_curl_coverage()
+ self._expected_file_list = _get_expected_curl_covered_file_list()
+ self.coverage_path = '/coverage'
+ self.filesystem_coverage = get_coverage.FilesystemCoverage(
+ REPO_PATH, self.coverage_path)
+ self.setUpPyfakefs()
+ self.fs.create_file(os.path.join(self.coverage_path, 'fuzzer_stats',
+ FUZZ_TARGET + '.json'),
+ contents=json.dumps(_fuzzer_cov_data))
+
+ def test_valid_target(self):
+ """Tests that covered files can be retrieved from a coverage report."""
+ file_list = self.filesystem_coverage.get_files_covered_by_target(
+ FUZZ_TARGET)
+ self.assertCountEqual(file_list, self._expected_file_list)
+
+ def test_invalid_target(self):
+ """Tests passing invalid fuzz target returns None."""
+ self.assertIsNone(
+ self.filesystem_coverage.get_files_covered_by_target(INVALID_TARGET))
+
+
+class IsFileCoveredTest(unittest.TestCase):
+ """Tests for is_file_covered."""
+
+ def test_is_file_covered_covered(self):
+ """Tests that is_file_covered returns True for a covered file."""
+ file_coverage = {
+ 'filename': '/src/systemd/src/basic/locale-util.c',
+ 'summary': {
+ 'regions': {
+ 'count': 204,
+ 'covered': 200,
+ 'notcovered': 200,
+ 'percent': 98.03
+ }
+ }
+ }
+ self.assertTrue(get_coverage.is_file_covered(file_coverage))
+
+ def test_is_file_covered_not_covered(self):
+ """Tests that is_file_covered returns False for a not covered file."""
+ file_coverage = {
+ 'filename': '/src/systemd/src/basic/locale-util.c',
+ 'summary': {
+ 'regions': {
+ 'count': 204,
+ 'covered': 0,
+ 'notcovered': 0,
+ 'percent': 0
+ }
+ }
+ }
+ self.assertFalse(get_coverage.is_file_covered(file_coverage))
+
+
+class GetOssFuzzLatestCovReportInfo(unittest.TestCase):
+ """Tests that _get_oss_fuzz_latest_cov_report_info works as
+ intended."""
+
+ PROJECT = 'project'
+ LATEST_REPORT_INFO_URL = ('https://storage.googleapis.com/oss-fuzz-coverage/'
+ 'latest_report_info/project.json')
+
+ @mock.patch('logging.error')
+ @mock.patch('http_utils.get_json_from_url', return_value={'coverage': 1})
+ def test_get_oss_fuzz_latest_cov_report_info(self, mock_get_json_from_url,
+ mock_error):
+ """Tests that _get_oss_fuzz_latest_cov_report_info works as intended."""
+ result = get_coverage._get_oss_fuzz_latest_cov_report_info(self.PROJECT)
+ self.assertEqual(result, {'coverage': 1})
+ mock_error.assert_not_called()
+ mock_get_json_from_url.assert_called_with(self.LATEST_REPORT_INFO_URL)
+
+ @mock.patch('logging.error')
+ @mock.patch('http_utils.get_json_from_url', return_value=None)
+ def test_get_oss_fuzz_latest_cov_report_info_fail(self, _, mock_error):
+ """Tests that _get_oss_fuzz_latest_cov_report_info works as intended when we
+ can't get latest report info."""
+ result = get_coverage._get_oss_fuzz_latest_cov_report_info('project')
+ self.assertIsNone(result)
+ mock_error.assert_called_with(
+ 'Could not get the coverage report json from url: %s.',
+ self.LATEST_REPORT_INFO_URL)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/infra/cifuzz/http_utils.py b/infra/cifuzz/http_utils.py
new file mode 100644
index 000000000..931183593
--- /dev/null
+++ b/infra/cifuzz/http_utils.py
@@ -0,0 +1,117 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Utility module for HTTP."""
+import json
+import logging
+import os
+import sys
+import tempfile
+import zipfile
+
+import requests
+
+# pylint: disable=wrong-import-position,import-error
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+import retry
+
+_DOWNLOAD_URL_RETRIES = 3
+_DOWNLOAD_URL_BACKOFF = 1
+
+
+def download_and_unpack_zip(url, extract_directory, headers=None):
+ """Downloads and unpacks a zip file from an HTTP URL.
+
+ Args:
+ url: A url to the zip file to be downloaded and unpacked.
+ extract_directory: The path where the zip file should be extracted to.
+ headers: (Optional) HTTP headers to send with the download request.
+
+ Returns:
+ True on success.
+ """
+ if headers is None:
+ headers = {}
+
+ if not os.path.exists(extract_directory):
+ logging.error('Extract directory: %s does not exist.', extract_directory)
+ return False
+
+ # Gives the temporary zip file a unique identifier in the case that
+ # that download_and_unpack_zip is done in parallel.
+ with tempfile.NamedTemporaryFile(suffix='.zip') as tmp_file:
+ if not download_url(url, tmp_file.name, headers=headers):
+ return False
+
+ try:
+ with zipfile.ZipFile(tmp_file.name, 'r') as zip_file:
+ zip_file.extractall(extract_directory)
+ except zipfile.BadZipFile:
+ logging.error('Error unpacking zip from %s. Bad Zipfile.', url)
+ return False
+
+ return True
+
+
+def download_url(*args, **kwargs):
+ """Wrapper around _download_url that returns False if _download_url
+ exceptions."""
+ try:
+ return _download_url(*args, **kwargs)
+ except Exception: # pylint: disable=broad-except
+ return False
+
+
+def get_json_from_url(url):
+ """Gets a json object from a specified HTTP URL.
+
+ Args:
+ url: The url of the json to be downloaded.
+
+ Returns:
+ A dictionary deserialized from JSON or None on failure.
+ """
+ response = requests.get(url)
+ try:
+ return response.json()
+ except (ValueError, TypeError, json.JSONDecodeError) as err:
+ logging.error('Loading json from url %s failed with: %s.', url, str(err))
+ return None
+
+
+@retry.wrap(_DOWNLOAD_URL_RETRIES, _DOWNLOAD_URL_BACKOFF)
+def _download_url(url, filename, headers=None):
+ """Downloads the file located at |url|, using HTTP to |filename|.
+
+ Args:
+ url: A url to a file to download.
+ filename: The path the file should be downloaded to.
+ headers: (Optional) HTTP headers to send with the download request.
+
+ Returns:
+ True on success.
+ """
+ if headers is None:
+ headers = {}
+
+ response = requests.get(url, headers=headers)
+
+ if response.status_code != 200:
+ logging.error('Unable to download from: %s. Code: %d. Content: %s.', url,
+ response.status_code, response.content)
+ return False
+
+ with open(filename, 'wb') as file_handle:
+ file_handle.write(response.content)
+
+ return True
diff --git a/infra/cifuzz/http_utils_test.py b/infra/cifuzz/http_utils_test.py
new file mode 100644
index 000000000..64d0598ac
--- /dev/null
+++ b/infra/cifuzz/http_utils_test.py
@@ -0,0 +1,71 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for http_utils.py"""
+
+import unittest
+from unittest import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+import http_utils
+
+mock_get_response = mock.MagicMock(status_code=200, content=b'')
+
+
+class DownloadUrlTest(unittest.TestCase):
+ """Tests that download_url works."""
+ URL = 'https://example.com/file'
+ FILE_PATH = '/tmp/file'
+
+ @mock.patch('time.sleep')
+ @mock.patch('requests.get', return_value=mock_get_response)
+ def test_download_url_no_error(self, mock_urlretrieve, _):
+ """Tests that download_url works when there is no error."""
+ self.assertTrue(http_utils.download_url(self.URL, self.FILE_PATH))
+ self.assertEqual(1, mock_urlretrieve.call_count)
+
+ @mock.patch('time.sleep')
+ @mock.patch('logging.error')
+ @mock.patch('requests.get',
+ return_value=mock.MagicMock(status_code=404, content=b''))
+ def test_download_url_http_error(self, mock_get, mock_error, _):
+ """Tests that download_url doesn't retry when there is an HTTP error."""
+ self.assertFalse(http_utils.download_url(self.URL, self.FILE_PATH))
+ mock_error.assert_called_with(
+ 'Unable to download from: %s. Code: %d. Content: %s.', self.URL, 404,
+ b'')
+ self.assertEqual(1, mock_get.call_count)
+
+ @mock.patch('time.sleep')
+ @mock.patch('requests.get', side_effect=ConnectionResetError)
+ def test_download_url_connection_error(self, mock_get, mock_sleep):
+ """Tests that download_url doesn't retry when there is an HTTP error."""
+ self.assertFalse(http_utils.download_url(self.URL, self.FILE_PATH))
+ self.assertEqual(4, mock_get.call_count)
+ self.assertEqual(3, mock_sleep.call_count)
+
+
+class DownloadAndUnpackZipTest(fake_filesystem_unittest.TestCase):
+ """Tests download_and_unpack_zip."""
+
+ def setUp(self):
+ self.setUpPyfakefs()
+
+ @mock.patch('requests.get', return_value=mock_get_response)
+ def test_bad_zip_download(self, _):
+ """Tests download_and_unpack_zip returns none when a bad zip is passed."""
+ self.fs.create_file('/url_tmp.zip', contents='Test file.')
+ self.assertFalse(
+ http_utils.download_and_unpack_zip('/not/a/real/url',
+ '/extract-directory'))
diff --git a/infra/cifuzz/package-lock.json b/infra/cifuzz/package-lock.json
new file mode 100644
index 000000000..9ee58404c
--- /dev/null
+++ b/infra/cifuzz/package-lock.json
@@ -0,0 +1,316 @@
+{
+ "name": "cifuzz",
+ "version": "1.0.0",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "version": "1.0.0",
+ "license": "Apache2",
+ "dependencies": {
+ "@actions/artifact": "^0.5.2"
+ }
+ },
+ "node_modules/@actions/artifact": {
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.5.2.tgz",
+ "integrity": "sha512-q/r8WSqyxBJ0ffLCRrtjCBTGnAYqP+ID4yG7f7YSlhrQ4thNg/d+Tq9f1YkLPKX46ZR97OWtGDY+oU/nxcqvLw==",
+ "dependencies": {
+ "@actions/core": "^1.2.6",
+ "@actions/http-client": "^1.0.11",
+ "@types/tmp": "^0.1.0",
+ "tmp": "^0.1.0",
+ "tmp-promise": "^2.0.2"
+ }
+ },
+ "node_modules/@actions/core": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz",
+ "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==",
+ "dependencies": {
+ "@actions/http-client": "^1.0.11"
+ }
+ },
+ "node_modules/@actions/http-client": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
+ "integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
+ "dependencies": {
+ "tunnel": "0.0.6"
+ }
+ },
+ "node_modules/@types/tmp": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.1.0.tgz",
+ "integrity": "sha512-6IwZ9HzWbCq6XoQWhxLpDjuADodH/MKXRUIDFudvgjcVdjFknvmR+DNsoUeer4XPrEnrZs04Jj+kfV9pFsrhmA=="
+ },
+ "node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
+ },
+ "node_modules/brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
+ },
+ "node_modules/fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
+ },
+ "node_modules/glob": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
+ "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
+ "dependencies": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
+ "dependencies": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "node_modules/inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
+ },
+ "node_modules/minimatch": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
+ "dependencies": {
+ "wrappy": "1"
+ }
+ },
+ "node_modules/path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/rimraf": {
+ "version": "2.7.1",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
+ "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
+ "dependencies": {
+ "glob": "^7.1.3"
+ },
+ "bin": {
+ "rimraf": "bin.js"
+ }
+ },
+ "node_modules/tmp": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.1.0.tgz",
+ "integrity": "sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw==",
+ "dependencies": {
+ "rimraf": "^2.6.3"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/tmp-promise": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-2.1.1.tgz",
+ "integrity": "sha512-Z048AOz/w9b6lCbJUpevIJpRpUztENl8zdv1bmAKVHimfqRFl92ROkmT9rp7TVBnrEw2gtMTol/2Cp2S2kJa4Q==",
+ "dependencies": {
+ "tmp": "0.1.0"
+ }
+ },
+ "node_modules/tunnel": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
+ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
+ "engines": {
+ "node": ">=0.6.11 <=0.7.0 || >=0.7.3"
+ }
+ },
+ "node_modules/wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
+ }
+ },
+ "dependencies": {
+ "@actions/artifact": {
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.5.2.tgz",
+ "integrity": "sha512-q/r8WSqyxBJ0ffLCRrtjCBTGnAYqP+ID4yG7f7YSlhrQ4thNg/d+Tq9f1YkLPKX46ZR97OWtGDY+oU/nxcqvLw==",
+ "requires": {
+ "@actions/core": "^1.2.6",
+ "@actions/http-client": "^1.0.11",
+ "@types/tmp": "^0.1.0",
+ "tmp": "^0.1.0",
+ "tmp-promise": "^2.0.2"
+ }
+ },
+ "@actions/core": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz",
+ "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==",
+ "requires": {
+ "@actions/http-client": "^1.0.11"
+ }
+ },
+ "@actions/http-client": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
+ "integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
+ "requires": {
+ "tunnel": "0.0.6"
+ }
+ },
+ "@types/tmp": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.1.0.tgz",
+ "integrity": "sha512-6IwZ9HzWbCq6XoQWhxLpDjuADodH/MKXRUIDFudvgjcVdjFknvmR+DNsoUeer4XPrEnrZs04Jj+kfV9pFsrhmA=="
+ },
+ "balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
+ },
+ "brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "requires": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
+ },
+ "fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
+ },
+ "glob": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
+ "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ },
+ "inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
+ "requires": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
+ },
+ "minimatch": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+ "requires": {
+ "brace-expansion": "^1.1.7"
+ }
+ },
+ "once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
+ "requires": {
+ "wrappy": "1"
+ }
+ },
+ "path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
+ },
+ "rimraf": {
+ "version": "2.7.1",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
+ "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
+ "requires": {
+ "glob": "^7.1.3"
+ }
+ },
+ "tmp": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.1.0.tgz",
+ "integrity": "sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw==",
+ "requires": {
+ "rimraf": "^2.6.3"
+ }
+ },
+ "tmp-promise": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-2.1.1.tgz",
+ "integrity": "sha512-Z048AOz/w9b6lCbJUpevIJpRpUztENl8zdv1bmAKVHimfqRFl92ROkmT9rp7TVBnrEw2gtMTol/2Cp2S2kJa4Q==",
+ "requires": {
+ "tmp": "0.1.0"
+ }
+ },
+ "tunnel": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
+ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
+ },
+ "wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
+ }
+ }
+}
diff --git a/infra/cifuzz/package.json b/infra/cifuzz/package.json
new file mode 100644
index 000000000..5823747dd
--- /dev/null
+++ b/infra/cifuzz/package.json
@@ -0,0 +1,10 @@
+{
+ "name": "cifuzz",
+ "version": "1.0.0",
+ "description": "",
+ "author": "Google",
+ "license": "Apache2",
+ "dependencies": {
+ "@actions/artifact": "^0.5.2"
+ }
+}
diff --git a/infra/cifuzz/requirements.txt b/infra/cifuzz/requirements.txt
new file mode 100644
index 000000000..270c15547
--- /dev/null
+++ b/infra/cifuzz/requirements.txt
@@ -0,0 +1,2 @@
+clusterfuzz==2.5.6
+requests==2.25.1
diff --git a/infra/cifuzz/run_cifuzz.py b/infra/cifuzz/run_cifuzz.py
new file mode 100644
index 000000000..0382d78a8
--- /dev/null
+++ b/infra/cifuzz/run_cifuzz.py
@@ -0,0 +1,88 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Script for running CIFuzz end-to-end. This is meant to work outside any
+docker image. This cannot depend on any CIFuzz code or third party packages."""
+import os
+import subprocess
+import sys
+import tempfile
+import logging
+
+INFRA_DIR = os.path.dirname(os.path.dirname(__file__))
+DEFAULT_ENVS = [('DRY_RUN', '0'), ('SANITIZER', 'address')]
+BASE_CIFUZZ_DOCKER_TAG = 'gcr.io/oss-fuzz-base'
+
+
+def set_default_env_var_if_unset(env_var, default_value):
+ """Sets the value of |env_var| in the environment to |default_value| if it was
+ not already set."""
+ if env_var not in os.environ:
+ os.environ[env_var] = default_value
+
+
+def docker_run(name, workspace, project_src_path):
+ """Runs a CIFuzz docker container with |name|."""
+ command = [
+ 'docker', 'run', '--name', name, '--rm', '-e', 'PROJECT_SRC_PATH', '-e',
+ 'OSS_FUZZ_PROJECT_NAME', '-e', 'WORKSPACE', '-e', 'REPOSITORY', '-e',
+ 'DRY_RUN', '-e', 'CI', '-e', 'SANITIZER', '-e', 'GIT_SHA'
+ ]
+ if project_src_path:
+ command += ['-v', f'{project_src_path}:{project_src_path}']
+ command += [
+ '-v', '/var/run/docker.sock:/var/run/docker.sock', '-v',
+ f'{workspace}:{workspace}', f'{BASE_CIFUZZ_DOCKER_TAG}/{name}'
+ ]
+ print('Running docker command:', command)
+ subprocess.run(command, check=True)
+
+
+def docker_build(image):
+ """Builds the CIFuzz |image|. Only suitable for building CIFuzz images."""
+ command = [
+ 'docker', 'build', '-t', f'{BASE_CIFUZZ_DOCKER_TAG}/{image}', '--file',
+ f'{image}.Dockerfile', '.'
+ ]
+ subprocess.run(command, check=True, cwd=INFRA_DIR)
+
+
+def main():
+ """Builds and runs fuzzers using CIFuzz."""
+ for env_var, default_value in DEFAULT_ENVS:
+ set_default_env_var_if_unset(env_var, default_value)
+
+ repository = os.getenv('REPOSITORY')
+ assert repository
+
+ project_src_path = os.getenv('PROJECT_SRC_PATH')
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ if 'WORKSPACE' not in os.environ:
+ os.environ['WORKSPACE'] = temp_dir
+
+ workspace = os.environ['WORKSPACE']
+
+ docker_build('build_fuzzers')
+ docker_run('build_fuzzers', workspace, project_src_path)
+ docker_build('run_fuzzers')
+ try:
+ docker_run('run_fuzzers', workspace, project_src_path)
+ except subprocess.CalledProcessError:
+ logging.error('run_fuzzers failed.')
+ return 1
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/infra/cifuzz/run_fuzzers.py b/infra/cifuzz/run_fuzzers.py
index 513cfb6fa..67c4c66fd 100644
--- a/infra/cifuzz/run_fuzzers.py
+++ b/infra/cifuzz/run_fuzzers.py
@@ -21,7 +21,9 @@ import time
import clusterfuzz_deployment
import fuzz_target
+import generate_coverage_report
import stack_parser
+import workspace_utils
# pylint: disable=wrong-import-position,import-error
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@@ -41,12 +43,17 @@ class BaseFuzzTargetRunner:
def __init__(self, config):
self.config = config
+ self.workspace = workspace_utils.Workspace(config)
self.clusterfuzz_deployment = (
- clusterfuzz_deployment.get_clusterfuzz_deployment(self.config))
+ clusterfuzz_deployment.get_clusterfuzz_deployment(
+ self.config, self.workspace))
+
# Set by the initialize method.
- self.out_dir = None
self.fuzz_target_paths = None
- self.artifacts_dir = None
+
+ def get_fuzz_targets(self):
+ """Returns fuzz targets in out directory."""
+ return utils.get_fuzz_targets(self.workspace.out)
def initialize(self):
"""Initialization method. Must be called before calling run_fuzz_targets.
@@ -64,55 +71,51 @@ class BaseFuzzTargetRunner:
self.config.fuzz_seconds)
return False
- self.out_dir = os.path.join(self.config.workspace, 'out')
- if not os.path.exists(self.out_dir):
- logging.error('Out directory: %s does not exist.', self.out_dir)
+ if not os.path.exists(self.workspace.out):
+ logging.error('Out directory: %s does not exist.', self.workspace.out)
return False
- self.artifacts_dir = os.path.join(self.out_dir, 'artifacts')
- if not os.path.exists(self.artifacts_dir):
- os.mkdir(self.artifacts_dir)
- elif (not os.path.isdir(self.artifacts_dir) or
- os.listdir(self.artifacts_dir)):
+ if not os.path.exists(self.workspace.artifacts):
+ os.makedirs(self.workspace.artifacts)
+ elif (not os.path.isdir(self.workspace.artifacts) or
+ os.listdir(self.workspace.artifacts)):
logging.error('Artifacts path: %s exists and is not an empty directory.',
- self.artifacts_dir)
+ self.workspace.artifacts)
return False
- self.fuzz_target_paths = utils.get_fuzz_targets(self.out_dir)
+ self.fuzz_target_paths = self.get_fuzz_targets()
logging.info('Fuzz targets: %s', self.fuzz_target_paths)
if not self.fuzz_target_paths:
logging.error('No fuzz targets were found in out directory: %s.',
- self.out_dir)
+ self.workspace.out)
return False
return True
+ def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use
+ """Cleans up after running |fuzz_target_obj|."""
+ raise NotImplementedError('Child class must implement method.')
+
def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use
"""Fuzzes with |fuzz_target_obj| and returns the result."""
- # TODO(metzman): Make children implement this so that the batch runner can
- # do things differently.
- result = fuzz_target_obj.fuzz()
- fuzz_target_obj.free_disk_if_needed()
- return result
+ raise NotImplementedError('Child class must implement method.')
@property
def quit_on_bug_found(self):
"""Property that is checked to determine if fuzzing should quit after first
bug is found."""
- raise NotImplementedError('Child class must implement method')
+ raise NotImplementedError('Child class must implement method.')
def get_fuzz_target_artifact(self, target, artifact_name):
"""Returns the path of a fuzzing artifact named |artifact_name| for
|fuzz_target|."""
- artifact_name = '{target_name}-{sanitizer}-{artifact_name}'.format(
- target_name=target.target_name,
- sanitizer=self.config.sanitizer,
- artifact_name=artifact_name)
- return os.path.join(self.artifacts_dir, artifact_name)
+ artifact_name = (f'{target.target_name}-{self.config.sanitizer}-'
+ f'{artifact_name}')
+ return os.path.join(self.workspace.artifacts, artifact_name)
def create_fuzz_target_obj(self, target_path, run_seconds):
"""Returns a fuzz target object."""
- return fuzz_target.FuzzTarget(target_path, run_seconds, self.out_dir,
+ return fuzz_target.FuzzTarget(target_path, run_seconds, self.workspace,
self.clusterfuzz_deployment, self.config)
def run_fuzz_targets(self):
@@ -134,6 +137,7 @@ class BaseFuzzTargetRunner:
target = self.create_fuzz_target_obj(target_path, run_seconds)
start_time = time.time()
result = self.run_fuzz_target(target)
+ self.cleanup_after_fuzz_target_run(target)
# It's OK if this goes negative since we take max when determining
# run_seconds.
@@ -162,6 +166,60 @@ class BaseFuzzTargetRunner:
return bug_found
+class PruneTargetRunner(BaseFuzzTargetRunner):
+ """Runner that prunes corpora."""
+
+ @property
+ def quit_on_bug_found(self):
+ return False
+
+ def run_fuzz_target(self, fuzz_target_obj):
+ """Prunes with |fuzz_target_obj| and returns the result."""
+ result = fuzz_target_obj.prune()
+ logging.debug('Corpus path contents: %s.', os.listdir(result.corpus_path))
+ self.clusterfuzz_deployment.upload_corpus(fuzz_target_obj.target_name,
+ result.corpus_path,
+ replace=True)
+ return result
+
+ def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use
+ """Cleans up after pruning with |fuzz_target_obj|."""
+ fuzz_target_obj.free_disk_if_needed()
+
+
+class CoverageTargetRunner(BaseFuzzTargetRunner):
+ """Runner that runs the 'coverage' command."""
+
+ @property
+ def quit_on_bug_found(self):
+ raise NotImplementedError('Not implemented for CoverageTargetRunner.')
+
+ def get_fuzz_targets(self):
+ """Returns fuzz targets in out directory."""
+ # We only want fuzz targets from the root because during the coverage build,
+ # a lot of the image's filesystem is copied into /out for the purpose of
+ # generating coverage reports.
+ # TOOD(metzman): Figure out if top_level_only should be the only behavior
+ # for this function.
+ return utils.get_fuzz_targets(self.workspace.out, top_level_only=True)
+
+ def run_fuzz_targets(self):
+ """Generates a coverage report. Always returns False since it never finds
+ any bugs."""
+ generate_coverage_report.generate_coverage_report(
+ self.fuzz_target_paths, self.workspace, self.clusterfuzz_deployment,
+ self.config)
+ return False
+
+ def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use
+ """Fuzzes with |fuzz_target_obj| and returns the result."""
+ raise NotImplementedError('Not implemented for CoverageTargetRunner.')
+
+ def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use
+ """Cleans up after running |fuzz_target_obj|."""
+ raise NotImplementedError('Not implemented for CoverageTargetRunner.')
+
+
class CiFuzzTargetRunner(BaseFuzzTargetRunner):
"""Runner for fuzz targets used in CI (patch-fuzzing) context."""
@@ -169,6 +227,13 @@ class CiFuzzTargetRunner(BaseFuzzTargetRunner):
def quit_on_bug_found(self):
return True
+ def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use
+ """Cleans up after running |fuzz_target_obj|."""
+ fuzz_target_obj.free_disk_if_needed()
+
+ def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use
+ return fuzz_target_obj.fuzz()
+
class BatchFuzzTargetRunner(BaseFuzzTargetRunner):
"""Runner for fuzz targets used in batch fuzzing context."""
@@ -177,14 +242,42 @@ class BatchFuzzTargetRunner(BaseFuzzTargetRunner):
def quit_on_bug_found(self):
return False
+ def run_fuzz_target(self, fuzz_target_obj):
+ """Fuzzes with |fuzz_target_obj| and returns the result."""
+ result = fuzz_target_obj.fuzz()
+ logging.debug('Corpus path contents: %s.', os.listdir(result.corpus_path))
+ self.clusterfuzz_deployment.upload_corpus(fuzz_target_obj.target_name,
+ result.corpus_path)
+ return result
+
+ def cleanup_after_fuzz_target_run(self, fuzz_target_obj):
+ """Cleans up after running |fuzz_target_obj|."""
+ # This must be done after we upload the corpus, otherwise it will be deleted
+ # before we get a chance to upload it. We can't delete the fuzz target
+ # because it is needed when we upload the build.
+ fuzz_target_obj.free_disk_if_needed(delete_fuzz_target=False)
+
+ def run_fuzz_targets(self):
+ result = super().run_fuzz_targets()
+ self.clusterfuzz_deployment.upload_crashes()
+ return result
+
+
+_RUN_FUZZERS_MODE_RUNNER_MAPPING = {
+ 'batch': BatchFuzzTargetRunner,
+ 'coverage': CoverageTargetRunner,
+ 'prune': PruneTargetRunner,
+ 'ci': CiFuzzTargetRunner,
+}
+
def get_fuzz_target_runner(config):
"""Returns a fuzz target runner object based on the run_fuzzers_mode of
|config|."""
- logging.info('RUN_FUZZERS_MODE is: %s', config.run_fuzzers_mode)
- if config.run_fuzzers_mode == 'batch':
- return BatchFuzzTargetRunner(config)
- return CiFuzzTargetRunner(config)
+ runner = _RUN_FUZZERS_MODE_RUNNER_MAPPING[config.run_fuzzers_mode](config)
+ logging.info('RUN_FUZZERS_MODE is: %s. Runner: %s.', config.run_fuzzers_mode,
+ runner)
+ return runner
def run_fuzzers(config): # pylint: disable=too-many-locals
diff --git a/infra/cifuzz/run_fuzzers_entrypoint.py b/infra/cifuzz/run_fuzzers_entrypoint.py
index 46e208dc0..adfd1a960 100644
--- a/infra/cifuzz/run_fuzzers_entrypoint.py
+++ b/infra/cifuzz/run_fuzzers_entrypoint.py
@@ -33,19 +33,49 @@ def delete_unneeded_docker_images(config):
if not config.low_disk_space:
return
logging.info('Deleting builder docker images to save disk space.')
- project_image = docker.get_project_image_name(config.project_name)
+ project_image = docker.get_project_image_name(config.oss_fuzz_project_name)
images = [
project_image,
- docker.BASE_RUNNER_TAG,
- docker.MSAN_LIBS_BUILDER_TAG,
+ docker.BASE_BUILDER_TAG,
+ docker.BASE_BUILDER_TAG + ':xenial',
+ docker.BASE_BUILDER_TAG + '-go',
+ docker.BASE_BUILDER_TAG + '-jvm',
+ docker.BASE_BUILDER_TAG + '-python',
+ docker.BASE_BUILDER_TAG + '-rust',
+ docker.BASE_BUILDER_TAG + '-swift',
]
docker.delete_images(images)
+def run_fuzzers_entrypoint():
+ """This is the entrypoint for the run_fuzzers github action.
+ This action can be added to any OSS-Fuzz project's workflow that uses
+ Github."""
+ config = config_utils.RunFuzzersConfig()
+ # The default return code when an error occurs.
+ returncode = 1
+ if config.dry_run:
+ # Sets the default return code on error to success.
+ returncode = 0
+
+ delete_unneeded_docker_images(config)
+ # Run the specified project's fuzzers from the build.
+ result = run_fuzzers.run_fuzzers(config)
+ if result == run_fuzzers.RunFuzzersResult.ERROR:
+ logging.error('Error occurred while running in workspace %s.',
+ config.workspace)
+ return returncode
+ if result == run_fuzzers.RunFuzzersResult.BUG_FOUND:
+ logging.info('Bug found.')
+ if not config.dry_run:
+ # Return 2 when a bug was found by a fuzzer causing the CI to fail.
+ return 2
+ return 0
+
+
def main():
- """Runs OSS-Fuzz project's fuzzers for CI tools.
+ """Runs project's fuzzers for CI tools.
This is the entrypoint for the run_fuzzers github action.
- This action can be added to any OSS-Fuzz project's workflow that uses Github.
NOTE: libFuzzer binaries must be located in the ${GITHUB_WORKSPACE}/out
directory in order for this action to be used. This action will only fuzz the
@@ -65,32 +95,9 @@ def main():
SANITIZER: The sanitizer to use when running fuzzers.
Returns:
- 0 on success or 1 on failure.
+ 0 on success or nonzero on failure.
"""
- config = config_utils.RunFuzzersConfig()
- # The default return code when an error occurs.
- returncode = 1
- if config.dry_run:
- # Sets the default return code on error to success.
- returncode = 0
-
- if not config.workspace:
- logging.error('This script needs to be run within Github actions.')
- return returncode
-
- delete_unneeded_docker_images(config)
- # Run the specified project's fuzzers from the build.
- result = run_fuzzers.run_fuzzers(config)
- if result == run_fuzzers.RunFuzzersResult.ERROR:
- logging.error('Error occurred while running in workspace %s.',
- config.workspace)
- return returncode
- if result == run_fuzzers.RunFuzzersResult.BUG_FOUND:
- logging.info('Bug found.')
- if not config.dry_run:
- # Return 2 when a bug was found by a fuzzer causing the CI to fail.
- return 2
- return 0
+ return run_fuzzers_entrypoint()
if __name__ == '__main__':
diff --git a/infra/cifuzz/run_fuzzers_test.py b/infra/cifuzz/run_fuzzers_test.py
index b2659903c..db442b188 100644
--- a/infra/cifuzz/run_fuzzers_test.py
+++ b/infra/cifuzz/run_fuzzers_test.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for running fuzzers."""
+import json
import os
import sys
import shutil
@@ -22,7 +23,7 @@ from unittest import mock
import parameterized
from pyfakefs import fake_filesystem_unittest
-import config_utils
+import build_fuzzers
import fuzz_target
import run_fuzzers
@@ -30,6 +31,7 @@ import run_fuzzers
INFRA_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(INFRA_DIR)
+import helper
import test_helpers
# NOTE: This integration test relies on
@@ -49,22 +51,6 @@ UNDEFINED_FUZZER = 'curl_fuzzer_undefined'
FUZZ_SECONDS = 10
-def _create_config(**kwargs):
- """Creates a config object and then sets every attribute that is a key in
- |kwargs| to the corresponding value. Asserts that each key in |kwargs| is an
- attribute of Config."""
- with mock.patch('os.path.basename', return_value=None), mock.patch(
- 'config_utils.get_project_src_path',
- return_value=None), mock.patch('config_utils._is_dry_run',
- return_value=True):
- config = config_utils.RunFuzzersConfig()
-
- for key, value in kwargs.items():
- assert hasattr(config, key), 'Config doesn\'t have attribute: ' + key
- setattr(config, key, value)
- return config
-
-
class RunFuzzerIntegrationTestMixin: # pylint: disable=too-few-public-methods,invalid-name
"""Mixin for integration test classes that runbuild_fuzzers on builds of a
specific sanitizer."""
@@ -72,39 +58,43 @@ class RunFuzzerIntegrationTestMixin: # pylint: disable=too-few-public-methods,i
FUZZER_DIR = None
FUZZER = None
+ def setUp(self):
+ """Patch the environ so that we can execute runner scripts."""
+ test_helpers.patch_environ(self, runner=True)
+
def _test_run_with_sanitizer(self, fuzzer_dir, sanitizer):
"""Calls run_fuzzers on fuzzer_dir and |sanitizer| and asserts
the run succeeded and that no bug was found."""
with test_helpers.temp_dir_copy(fuzzer_dir) as fuzzer_dir_copy:
- config = _create_config(fuzz_seconds=FUZZ_SECONDS,
- workspace=fuzzer_dir_copy,
- project_name='curl',
- sanitizer=sanitizer)
+ config = test_helpers.create_run_config(fuzz_seconds=FUZZ_SECONDS,
+ workspace=fuzzer_dir_copy,
+ oss_fuzz_project_name='curl',
+ sanitizer=sanitizer)
result = run_fuzzers.run_fuzzers(config)
self.assertEqual(result, run_fuzzers.RunFuzzersResult.NO_BUG_FOUND)
+@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
+ 'INTEGRATION_TESTS=1 not set')
class RunMemoryFuzzerIntegrationTest(RunFuzzerIntegrationTestMixin,
unittest.TestCase):
"""Integration test for build_fuzzers with an MSAN build."""
FUZZER_DIR = MEMORY_FUZZER_DIR
FUZZER = MEMORY_FUZZER
- @unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
- 'INTEGRATION_TESTS=1 not set')
def test_run_with_memory_sanitizer(self):
"""Tests run_fuzzers with a valid MSAN build."""
self._test_run_with_sanitizer(self.FUZZER_DIR, 'memory')
+@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
+ 'INTEGRATION_TESTS=1 not set')
class RunUndefinedFuzzerIntegrationTest(RunFuzzerIntegrationTestMixin,
unittest.TestCase):
"""Integration test for build_fuzzers with an UBSAN build."""
FUZZER_DIR = UNDEFINED_FUZZER_DIR
FUZZER = UNDEFINED_FUZZER
- @unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
- 'INTEGRATION_TESTS=1 not set')
def test_run_with_undefined_sanitizer(self):
"""Tests run_fuzzers with a valid UBSAN build."""
self._test_run_with_sanitizer(self.FUZZER_DIR, 'undefined')
@@ -114,19 +104,22 @@ class BaseFuzzTargetRunnerTest(unittest.TestCase):
"""Tests BaseFuzzTargetRunner."""
def _create_runner(self, **kwargs): # pylint: disable=no-self-use
- defaults = {'fuzz_seconds': FUZZ_SECONDS, 'project_name': EXAMPLE_PROJECT}
+ defaults = {
+ 'fuzz_seconds': FUZZ_SECONDS,
+ 'oss_fuzz_project_name': EXAMPLE_PROJECT
+ }
for default_key, default_value in defaults.items():
if default_key not in kwargs:
kwargs[default_key] = default_value
- config = _create_config(**kwargs)
+ config = test_helpers.create_run_config(**kwargs)
return run_fuzzers.BaseFuzzTargetRunner(config)
def _test_initialize_fail(self, expected_error_args, **create_runner_kwargs):
- with mock.patch('logging.error') as mocked_error:
+ with mock.patch('logging.error') as mock_error:
runner = self._create_runner(**create_runner_kwargs)
self.assertFalse(runner.initialize())
- mocked_error.assert_called_with(*expected_error_args)
+ mock_error.assert_called_with(*expected_error_args)
@parameterized.parameterized.expand([(0,), (None,), (-1,)])
def test_initialize_invalid_fuzz_seconds(self, fuzz_seconds):
@@ -134,10 +127,10 @@ class BaseFuzzTargetRunnerTest(unittest.TestCase):
expected_error_args = ('Fuzz_seconds argument must be greater than 1, '
'but was: %s.', fuzz_seconds)
with tempfile.TemporaryDirectory() as tmp_dir:
- out_path = os.path.join(tmp_dir, 'out')
+ out_path = os.path.join(tmp_dir, 'build-out')
os.mkdir(out_path)
- with mock.patch('utils.get_fuzz_targets') as mocked_get_fuzz_targets:
- mocked_get_fuzz_targets.return_value = [
+ with mock.patch('utils.get_fuzz_targets') as mock_get_fuzz_targets:
+ mock_get_fuzz_targets.return_value = [
os.path.join(out_path, 'fuzz_target')
]
self._test_initialize_fail(expected_error_args,
@@ -147,16 +140,17 @@ class BaseFuzzTargetRunnerTest(unittest.TestCase):
def test_initialize_no_out_dir(self):
"""Tests initialize fails with no out dir."""
with tempfile.TemporaryDirectory() as tmp_dir:
- out_path = os.path.join(tmp_dir, 'out')
+ out_path = os.path.join(tmp_dir, 'build-out')
expected_error_args = ('Out directory: %s does not exist.', out_path)
self._test_initialize_fail(expected_error_args, workspace=tmp_dir)
def test_initialize_nonempty_artifacts(self):
"""Tests initialize with a file artifacts path."""
with tempfile.TemporaryDirectory() as tmp_dir:
- out_path = os.path.join(tmp_dir, 'out')
+ out_path = os.path.join(tmp_dir, 'build-out')
os.mkdir(out_path)
- artifacts_path = os.path.join(out_path, 'artifacts')
+ os.makedirs(os.path.join(tmp_dir, 'out'))
+ artifacts_path = os.path.join(tmp_dir, 'out', 'artifacts')
with open(artifacts_path, 'w') as artifacts_handle:
artifacts_handle.write('fake')
expected_error_args = (
@@ -167,8 +161,9 @@ class BaseFuzzTargetRunnerTest(unittest.TestCase):
def test_initialize_bad_artifacts(self):
"""Tests initialize with a non-empty artifacts path."""
with tempfile.TemporaryDirectory() as tmp_dir:
- out_path = os.path.join(tmp_dir, 'out')
- artifacts_path = os.path.join(out_path, 'artifacts')
+ out_path = os.path.join(tmp_dir, 'build-out')
+ os.mkdir(out_path)
+ artifacts_path = os.path.join(tmp_dir, 'out', 'artifacts')
os.makedirs(artifacts_path)
artifact_path = os.path.join(artifacts_path, 'artifact')
with open(artifact_path, 'w') as artifact_handle:
@@ -180,37 +175,37 @@ class BaseFuzzTargetRunnerTest(unittest.TestCase):
@mock.patch('utils.get_fuzz_targets')
@mock.patch('logging.error')
- def test_initialize_empty_artifacts(self, mocked_log_error,
- mocked_get_fuzz_targets):
+ def test_initialize_empty_artifacts(self, mock_log_error,
+ mock_get_fuzz_targets):
"""Tests initialize with an empty artifacts dir."""
- mocked_get_fuzz_targets.return_value = ['fuzz-target']
+ mock_get_fuzz_targets.return_value = ['fuzz-target']
with tempfile.TemporaryDirectory() as tmp_dir:
- out_path = os.path.join(tmp_dir, 'out')
- artifacts_path = os.path.join(out_path, 'artifacts')
+ out_path = os.path.join(tmp_dir, 'build-out')
+ os.mkdir(out_path)
+ artifacts_path = os.path.join(tmp_dir, 'out', 'artifacts')
os.makedirs(artifacts_path)
runner = self._create_runner(workspace=tmp_dir)
self.assertTrue(runner.initialize())
- mocked_log_error.assert_not_called()
+ mock_log_error.assert_not_called()
self.assertTrue(os.path.isdir(artifacts_path))
@mock.patch('utils.get_fuzz_targets')
@mock.patch('logging.error')
- def test_initialize_no_artifacts(self, mocked_log_error,
- mocked_get_fuzz_targets):
+ def test_initialize_no_artifacts(self, mock_log_error, mock_get_fuzz_targets):
"""Tests initialize with no artifacts dir (the expected setting)."""
- mocked_get_fuzz_targets.return_value = ['fuzz-target']
+ mock_get_fuzz_targets.return_value = ['fuzz-target']
with tempfile.TemporaryDirectory() as tmp_dir:
- out_path = os.path.join(tmp_dir, 'out')
- os.makedirs(out_path)
+ out_path = os.path.join(tmp_dir, 'build-out')
+ os.mkdir(out_path)
runner = self._create_runner(workspace=tmp_dir)
self.assertTrue(runner.initialize())
- mocked_log_error.assert_not_called()
- self.assertTrue(os.path.isdir(os.path.join(out_path, 'artifacts')))
+ mock_log_error.assert_not_called()
+ self.assertTrue(os.path.isdir(os.path.join(tmp_dir, 'out', 'artifacts')))
def test_initialize_no_fuzz_targets(self):
"""Tests initialize with no fuzz targets."""
with tempfile.TemporaryDirectory() as tmp_dir:
- out_path = os.path.join(tmp_dir, 'out')
+ out_path = os.path.join(tmp_dir, 'build-out')
os.makedirs(out_path)
expected_error_args = ('No fuzz targets were found in out directory: %s.',
out_path)
@@ -218,18 +213,21 @@ class BaseFuzzTargetRunnerTest(unittest.TestCase):
def test_get_fuzz_target_artifact(self):
"""Tests that get_fuzz_target_artifact works as intended."""
- runner = self._create_runner()
- artifacts_dir = 'artifacts-dir'
- runner.artifacts_dir = artifacts_dir
- artifact_name = 'artifact-name'
- target = mock.MagicMock()
- target_name = 'target_name'
- target.target_name = target_name
- fuzz_target_artifact = runner.get_fuzz_target_artifact(
- target, artifact_name)
- expected_fuzz_target_artifact = (
- 'artifacts-dir/target_name-address-artifact-name')
- self.assertEqual(fuzz_target_artifact, expected_fuzz_target_artifact)
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ runner = self._create_runner(workspace=tmp_dir)
+ crashes_dir = 'crashes-dir'
+ runner.crashes_dir = crashes_dir
+ artifact_name = 'artifact-name'
+ target = mock.MagicMock()
+ target_name = 'target_name'
+ target.target_name = target_name
+
+ fuzz_target_artifact = runner.get_fuzz_target_artifact(
+ target, artifact_name)
+ expected_fuzz_target_artifact = os.path.join(
+ tmp_dir, 'out', 'artifacts', 'target_name-address-artifact-name')
+
+ self.assertEqual(fuzz_target_artifact, expected_fuzz_target_artifact)
class CiFuzzTargetRunnerTest(fake_filesystem_unittest.TestCase):
@@ -241,91 +239,176 @@ class CiFuzzTargetRunnerTest(fake_filesystem_unittest.TestCase):
@mock.patch('utils.get_fuzz_targets')
@mock.patch('run_fuzzers.CiFuzzTargetRunner.run_fuzz_target')
@mock.patch('run_fuzzers.CiFuzzTargetRunner.create_fuzz_target_obj')
- def test_run_fuzz_targets_quits(self, mocked_create_fuzz_target_obj,
- mocked_run_fuzz_target,
- mocked_get_fuzz_targets):
+ def test_run_fuzz_targets_quits(self, mock_create_fuzz_target_obj,
+ mock_run_fuzz_target, mock_get_fuzz_targets):
"""Tests that run_fuzz_targets quits on the first crash it finds."""
workspace = 'workspace'
- out_path = os.path.join(workspace, 'out')
+ out_path = os.path.join(workspace, 'build-out')
self.fs.create_dir(out_path)
- config = _create_config(fuzz_seconds=FUZZ_SECONDS,
- workspace=workspace,
- project_name=EXAMPLE_PROJECT)
+ config = test_helpers.create_run_config(
+ fuzz_seconds=FUZZ_SECONDS,
+ workspace=workspace,
+ oss_fuzz_project_name=EXAMPLE_PROJECT)
runner = run_fuzzers.CiFuzzTargetRunner(config)
- mocked_get_fuzz_targets.return_value = ['target1', 'target2']
+ mock_get_fuzz_targets.return_value = ['target1', 'target2']
runner.initialize()
testcase = os.path.join(workspace, 'testcase')
self.fs.create_file(testcase)
- stacktrace = b'stacktrace'
- mocked_run_fuzz_target.return_value = fuzz_target.FuzzResult(
- testcase, stacktrace)
+ stacktrace = 'stacktrace'
+ corpus_dir = 'corpus'
+ self.fs.create_dir(corpus_dir)
+ mock_run_fuzz_target.return_value = fuzz_target.FuzzResult(
+ testcase, stacktrace, corpus_dir)
magic_mock = mock.MagicMock()
magic_mock.target_name = 'target1'
- mocked_create_fuzz_target_obj.return_value = magic_mock
+ mock_create_fuzz_target_obj.return_value = magic_mock
self.assertTrue(runner.run_fuzz_targets())
- self.assertIn('target1-address-testcase', os.listdir(runner.artifacts_dir))
- self.assertEqual(mocked_run_fuzz_target.call_count, 1)
+ self.assertIn('target1-address-testcase',
+ os.listdir(runner.workspace.artifacts))
+ self.assertEqual(mock_run_fuzz_target.call_count, 1)
class BatchFuzzTargetRunnerTest(fake_filesystem_unittest.TestCase):
- """Tests that CiFuzzTargetRunner works as intended."""
+ """Tests that BatchFuzzTargetRunnerTest works as intended."""
+ WORKSPACE = 'workspace'
+ STACKTRACE = 'stacktrace'
+ CORPUS_DIR = 'corpus'
def setUp(self):
self.setUpPyfakefs()
-
- @mock.patch('utils.get_fuzz_targets')
+ out_dir = os.path.join(self.WORKSPACE, 'build-out')
+ self.fs.create_dir(out_dir)
+ self.testcase1 = os.path.join(out_dir, 'testcase-aaa')
+ self.fs.create_file(self.testcase1)
+ self.testcase2 = os.path.join(out_dir, 'testcase-bbb')
+ self.fs.create_file(self.testcase2)
+ self.config = test_helpers.create_run_config(fuzz_seconds=FUZZ_SECONDS,
+ workspace=self.WORKSPACE,
+ is_github=True)
+
+ @mock.patch('utils.get_fuzz_targets', return_value=['target1', 'target2'])
+ @mock.patch('clusterfuzz_deployment.ClusterFuzzLite.upload_build',
+ return_value=True)
@mock.patch('run_fuzzers.BatchFuzzTargetRunner.run_fuzz_target')
@mock.patch('run_fuzzers.BatchFuzzTargetRunner.create_fuzz_target_obj')
- def test_run_fuzz_targets_quits(self, mocked_create_fuzz_target_obj,
- mocked_run_fuzz_target,
- mocked_get_fuzz_targets):
+ def test_run_fuzz_targets_quits(self, mock_create_fuzz_target_obj,
+ mock_run_fuzz_target, _, __):
"""Tests that run_fuzz_targets doesn't quit on the first crash it finds."""
- workspace = 'workspace'
- out_path = os.path.join(workspace, 'out')
- self.fs.create_dir(out_path)
- config = _create_config(fuzz_seconds=FUZZ_SECONDS,
- workspace=workspace,
- project_name=EXAMPLE_PROJECT)
- runner = run_fuzzers.BatchFuzzTargetRunner(config)
-
- mocked_get_fuzz_targets.return_value = ['target1', 'target2']
+ runner = run_fuzzers.BatchFuzzTargetRunner(self.config)
runner.initialize()
- testcase1 = os.path.join(workspace, 'testcase-aaa')
- testcase2 = os.path.join(workspace, 'testcase-bbb')
- self.fs.create_file(testcase1)
- self.fs.create_file(testcase2)
- stacktrace = b'stacktrace'
+
call_count = 0
- def mock_run_fuzz_target(_):
+ def mock_run_fuzz_target_impl(_):
nonlocal call_count
if call_count == 0:
- testcase = testcase1
+ testcase = self.testcase1
elif call_count == 1:
- testcase = testcase2
+ testcase = self.testcase2
assert call_count != 2
call_count += 1
- return fuzz_target.FuzzResult(testcase, stacktrace)
+ if not os.path.exists(self.CORPUS_DIR):
+ self.fs.create_dir(self.CORPUS_DIR)
+ return fuzz_target.FuzzResult(testcase, self.STACKTRACE, self.CORPUS_DIR)
- mocked_run_fuzz_target.side_effect = mock_run_fuzz_target
+ mock_run_fuzz_target.side_effect = mock_run_fuzz_target_impl
magic_mock = mock.MagicMock()
magic_mock.target_name = 'target1'
- mocked_create_fuzz_target_obj.return_value = magic_mock
+ mock_create_fuzz_target_obj.return_value = magic_mock
self.assertTrue(runner.run_fuzz_targets())
- self.assertIn('target1-address-testcase-aaa',
- os.listdir(runner.artifacts_dir))
- self.assertEqual(mocked_run_fuzz_target.call_count, 2)
+ self.assertEqual(mock_run_fuzz_target.call_count, 2)
+
+ @mock.patch('run_fuzzers.BaseFuzzTargetRunner.run_fuzz_targets',
+ return_value=False)
+ @mock.patch('clusterfuzz_deployment.ClusterFuzzLite.upload_crashes')
+ def test_run_fuzz_targets_upload_crashes_and_builds(self, mock_upload_crashes,
+ _):
+ """Tests that run_fuzz_targets uploads crashes and builds correctly."""
+ runner = run_fuzzers.BatchFuzzTargetRunner(self.config)
+ # TODO(metzman): Don't rely on this failing gracefully.
+ runner.initialize()
+
+ self.assertFalse(runner.run_fuzz_targets())
+ self.assertEqual(mock_upload_crashes.call_count, 1)
+@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
+ 'INTEGRATION_TESTS=1 not set')
+class CoverageReportIntegrationTest(unittest.TestCase):
+ """Integration tests for coverage reports."""
+ SANITIZER = 'coverage'
+
+ def setUp(self):
+ test_helpers.patch_environ(self, runner=True)
+
+ @mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
+ def test_coverage_report(self, _):
+ """Tests generation of coverage reports end-to-end, from building to
+ generation."""
+
+ with test_helpers.docker_temp_dir() as temp_dir:
+ shared = os.path.join(temp_dir, 'shared')
+ os.mkdir(shared)
+ copy_command = ('cp -r /opt/code_coverage /shared && '
+ 'cp $(which llvm-profdata) /shared && '
+ 'cp $(which llvm-cov) /shared')
+ assert helper.docker_run([
+ '-v', f'{shared}:/shared', 'gcr.io/oss-fuzz-base/base-runner', 'bash',
+ '-c', copy_command
+ ])
+
+ os.environ['CODE_COVERAGE_SRC'] = os.path.join(shared, 'code_coverage')
+ os.environ['PATH'] += os.pathsep + shared
+ # Do coverage build.
+ build_config = test_helpers.create_build_config(
+ oss_fuzz_project_name=EXAMPLE_PROJECT,
+ project_repo_name='oss-fuzz',
+ workspace=temp_dir,
+ commit_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523',
+ base_commit='da0746452433dc18bae699e355a9821285d863c8',
+ sanitizer=self.SANITIZER,
+ is_github=True,
+ # Needed for test not to fail because of permissions issues.
+ bad_build_check=False)
+ self.assertTrue(build_fuzzers.build_fuzzers(build_config))
+
+ # TODO(metzman): Get rid of this here and make 'compile' do this.
+ chmod_command = ('chmod -R +r /out && '
+ 'find /out -type d -exec chmod +x {} +')
+
+ assert helper.docker_run([
+ '-v', f'{os.path.join(temp_dir, "build-out")}:/out',
+ 'gcr.io/oss-fuzz-base/base-builder', 'bash', '-c', chmod_command
+ ])
+
+ # Generate report.
+ run_config = test_helpers.create_run_config(fuzz_seconds=FUZZ_SECONDS,
+ workspace=temp_dir,
+ sanitizer=self.SANITIZER,
+ run_fuzzers_mode='coverage',
+ is_github=True)
+ result = run_fuzzers.run_fuzzers(run_config)
+ self.assertEqual(result, run_fuzzers.RunFuzzersResult.NO_BUG_FOUND)
+ expected_summary_path = os.path.join(
+ TEST_DATA_PATH, 'example_coverage_report_summary.json')
+ with open(expected_summary_path) as file_handle:
+ expected_summary = json.loads(file_handle.read())
+ actual_summary_path = os.path.join(temp_dir, 'cifuzz-coverage',
+ 'report', 'linux', 'summary.json')
+ with open(actual_summary_path) as file_handle:
+ actual_summary = json.loads(file_handle.read())
+ self.assertEqual(expected_summary, actual_summary)
+
+
+@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
+ 'INTEGRATION_TESTS=1 not set')
class RunAddressFuzzersIntegrationTest(RunFuzzerIntegrationTestMixin,
unittest.TestCase):
"""Integration tests for build_fuzzers with an ASAN build."""
BUILD_DIR_NAME = 'cifuzz-latest-build'
- @unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
- 'INTEGRATION_TESTS=1 not set')
def test_new_bug_found(self):
"""Tests run_fuzzers with a valid ASAN build."""
# Set the first return value to True, then the second to False to
@@ -336,46 +419,61 @@ class RunAddressFuzzersIntegrationTest(RunFuzzerIntegrationTestMixin,
with tempfile.TemporaryDirectory() as tmp_dir:
workspace = os.path.join(tmp_dir, 'workspace')
shutil.copytree(TEST_DATA_PATH, workspace)
- config = _create_config(fuzz_seconds=FUZZ_SECONDS,
- workspace=workspace,
- project_name=EXAMPLE_PROJECT)
+ config = test_helpers.create_run_config(
+ fuzz_seconds=FUZZ_SECONDS,
+ workspace=workspace,
+ oss_fuzz_project_name=EXAMPLE_PROJECT)
result = run_fuzzers.run_fuzzers(config)
self.assertEqual(result, run_fuzzers.RunFuzzersResult.BUG_FOUND)
- build_dir = os.path.join(workspace, 'out', self.BUILD_DIR_NAME)
- self.assertNotEqual(0, len(os.listdir(build_dir)))
- @unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
- 'INTEGRATION_TESTS=1 not set')
@mock.patch('fuzz_target.FuzzTarget.is_reproducible',
side_effect=[True, True])
def test_old_bug_found(self, _):
"""Tests run_fuzzers with a bug found in OSS-Fuzz before."""
- config = _create_config(fuzz_seconds=FUZZ_SECONDS,
- workspace=TEST_DATA_PATH,
- project_name=EXAMPLE_PROJECT)
with tempfile.TemporaryDirectory() as tmp_dir:
workspace = os.path.join(tmp_dir, 'workspace')
shutil.copytree(TEST_DATA_PATH, workspace)
- config = _create_config(fuzz_seconds=FUZZ_SECONDS,
- workspace=TEST_DATA_PATH,
- project_name=EXAMPLE_PROJECT)
+ config = test_helpers.create_run_config(
+ fuzz_seconds=FUZZ_SECONDS,
+ workspace=workspace,
+ oss_fuzz_project_name=EXAMPLE_PROJECT)
result = run_fuzzers.run_fuzzers(config)
self.assertEqual(result, run_fuzzers.RunFuzzersResult.NO_BUG_FOUND)
- build_dir = os.path.join(TEST_DATA_PATH, 'out', self.BUILD_DIR_NAME)
- self.assertTrue(os.path.exists(build_dir))
- self.assertNotEqual(0, len(os.listdir(build_dir)))
def test_invalid_build(self):
"""Tests run_fuzzers with an invalid ASAN build."""
with tempfile.TemporaryDirectory() as tmp_dir:
- out_path = os.path.join(tmp_dir, 'out')
+ out_path = os.path.join(tmp_dir, 'build-out')
os.mkdir(out_path)
- config = _create_config(fuzz_seconds=FUZZ_SECONDS,
- workspace=tmp_dir,
- project_name=EXAMPLE_PROJECT)
+ config = test_helpers.create_run_config(
+ fuzz_seconds=FUZZ_SECONDS,
+ workspace=tmp_dir,
+ oss_fuzz_project_name=EXAMPLE_PROJECT)
result = run_fuzzers.run_fuzzers(config)
self.assertEqual(result, run_fuzzers.RunFuzzersResult.ERROR)
+class GetFuzzTargetRunnerTest(unittest.TestCase):
+ """Tests for get_fuzz_fuzz_target_runner."""
+
+ @parameterized.parameterized.expand([
+ ('batch', run_fuzzers.BatchFuzzTargetRunner),
+ ('ci', run_fuzzers.CiFuzzTargetRunner),
+ ('coverage', run_fuzzers.CoverageTargetRunner)
+ ])
+ def test_get_fuzz_target_runner(self, run_fuzzers_mode,
+ fuzz_target_runner_cls):
+ """Tests that get_fuzz_target_runner returns the correct runner based on the
+ specified run_fuzzers_mode."""
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ run_config = test_helpers.create_run_config(
+ fuzz_seconds=FUZZ_SECONDS,
+ workspace=tmp_dir,
+ oss_fuzz_project_name='example',
+ run_fuzzers_mode=run_fuzzers_mode)
+ runner = run_fuzzers.get_fuzz_target_runner(run_config)
+ self.assertTrue(isinstance(runner, fuzz_target_runner_cls))
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/infra/cifuzz/stack_parser.py b/infra/cifuzz/stack_parser.py
index 69c44bc2e..b53f875fe 100644
--- a/infra/cifuzz/stack_parser.py
+++ b/infra/cifuzz/stack_parser.py
@@ -18,30 +18,30 @@ import logging
# From clusterfuzz: src/python/crash_analysis/crash_analyzer.py
# Used to get the beginning of the stacktrace.
STACKTRACE_TOOL_MARKERS = [
- b'AddressSanitizer',
- b'ASAN:',
- b'CFI: Most likely a control flow integrity violation;',
- b'ERROR: libFuzzer',
- b'KASAN:',
- b'LeakSanitizer',
- b'MemorySanitizer',
- b'ThreadSanitizer',
- b'UndefinedBehaviorSanitizer',
- b'UndefinedSanitizer',
+ 'AddressSanitizer',
+ 'ASAN:',
+ 'CFI: Most likely a control flow integrity violation;',
+ 'ERROR: libFuzzer',
+ 'KASAN:',
+ 'LeakSanitizer',
+ 'MemorySanitizer',
+ 'ThreadSanitizer',
+ 'UndefinedBehaviorSanitizer',
+ 'UndefinedSanitizer',
]
# From clusterfuzz: src/python/crash_analysis/crash_analyzer.py
# Used to get the end of the stacktrace.
STACKTRACE_END_MARKERS = [
- b'ABORTING',
- b'END MEMORY TOOL REPORT',
- b'End of process memory map.',
- b'END_KASAN_OUTPUT',
- b'SUMMARY:',
- b'Shadow byte and word',
- b'[end of stack trace]',
- b'\nExiting',
- b'minidump has been written',
+ 'ABORTING',
+ 'END MEMORY TOOL REPORT',
+ 'End of process memory map.',
+ 'END_KASAN_OUTPUT',
+ 'SUMMARY:',
+ 'Shadow byte and word',
+ '[end of stack trace]',
+ '\nExiting',
+ 'minidump has been written',
]
@@ -82,5 +82,5 @@ def parse_fuzzer_output(fuzzer_output, parsed_output_file_path):
summary_str = fuzzer_output[begin_stack:end_stack]
# Write sections of fuzzer output to specific files.
- with open(parsed_output_file_path, 'ab') as summary_handle:
+ with open(parsed_output_file_path, 'a') as summary_handle:
summary_handle.write(summary_str)
diff --git a/infra/cifuzz/stack_parser_test.py b/infra/cifuzz/stack_parser_test.py
index faf601fd5..5a631b427 100644
--- a/infra/cifuzz/stack_parser_test.py
+++ b/infra/cifuzz/stack_parser_test.py
@@ -46,12 +46,12 @@ class ParseOutputTest(fake_filesystem_unittest.TestCase):
# Read the fuzzer output from disk.
fuzzer_output_path = os.path.join(TEST_DATA_PATH, fuzzer_output_file)
self.fs.add_real_file(fuzzer_output_path)
- with open(fuzzer_output_path, 'rb') as fuzzer_output_handle:
+ with open(fuzzer_output_path, 'r') as fuzzer_output_handle:
fuzzer_output = fuzzer_output_handle.read()
bug_summary_path = '/bug-summary.txt'
- with mock.patch('logging.info') as mocked_info:
+ with mock.patch('logging.info') as mock_info:
stack_parser.parse_fuzzer_output(fuzzer_output, bug_summary_path)
- mocked_info.assert_not_called()
+ mock_info.assert_not_called()
with open(bug_summary_path) as bug_summary_handle:
bug_summary = bug_summary_handle.read()
@@ -67,10 +67,10 @@ class ParseOutputTest(fake_filesystem_unittest.TestCase):
def test_parse_invalid_output(self):
"""Checks that no files are created when an invalid input was given."""
artifact_path = '/bug-summary.txt'
- with mock.patch('logging.error') as mocked_error:
- stack_parser.parse_fuzzer_output(b'not a valid output_string',
+ with mock.patch('logging.error') as mock_error:
+ stack_parser.parse_fuzzer_output('not a valid output_string',
artifact_path)
- assert mocked_error.call_count
+ assert mock_error.call_count
self.assertFalse(os.path.exists(artifact_path))
diff --git a/infra/cifuzz/test_data/out/example_crash_fuzzer b/infra/cifuzz/test_data/build-out/example_crash_fuzzer
index 704800dda..704800dda 100755
--- a/infra/cifuzz/test_data/out/example_crash_fuzzer
+++ b/infra/cifuzz/test_data/build-out/example_crash_fuzzer
Binary files differ
diff --git a/infra/cifuzz/test_data/out/example_nocrash_fuzzer b/infra/cifuzz/test_data/build-out/example_nocrash_fuzzer
index e4ff86042..e4ff86042 100755
--- a/infra/cifuzz/test_data/out/example_nocrash_fuzzer
+++ b/infra/cifuzz/test_data/build-out/example_nocrash_fuzzer
Binary files differ
diff --git a/infra/cifuzz/test_data/example_coverage_report_summary.json b/infra/cifuzz/test_data/example_coverage_report_summary.json
new file mode 100644
index 000000000..0004a1b57
--- /dev/null
+++ b/infra/cifuzz/test_data/example_coverage_report_summary.json
@@ -0,0 +1 @@
+{"data": [{"files": [{"filename": "/src/my-git-repo/projects/example/my-api-repo/do_stuff_fuzzer.cpp", "summary": {"branches": {"count": 0, "covered": 0, "notcovered": 0, "percent": 0}, "functions": {"count": 1, "covered": 0, "percent": 0}, "instantiations": {"count": 1, "covered": 0, "percent": 0}, "lines": {"count": 5, "covered": 0, "percent": 0}, "regions": {"count": 1, "covered": 0, "notcovered": 1, "percent": 0}}}, {"filename": "/src/my-git-repo/projects/example/my-api-repo/my_api.cpp", "summary": {"branches": {"count": 10, "covered": 0, "notcovered": 10, "percent": 0}, "functions": {"count": 1, "covered": 0, "percent": 0}, "instantiations": {"count": 1, "covered": 0, "percent": 0}, "lines": {"count": 15, "covered": 0, "percent": 0}, "regions": {"count": 11, "covered": 0, "notcovered": 11, "percent": 0}}}], "totals": {"branches": {"count": 10, "covered": 0, "notcovered": 10, "percent": 0}, "functions": {"count": 2, "covered": 0, "percent": 0}, "instantiations": {"count": 2, "covered": 0, "percent": 0}, "lines": {"count": 20, "covered": 0, "percent": 0}, "regions": {"count": 12, "covered": 0, "notcovered": 12, "percent": 0}}}], "type": "llvm.coverage.json.export", "version": "2.0.1"} \ No newline at end of file
diff --git a/infra/cifuzz/test_data/external-project/oss-fuzz/Dockerfile b/infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile
index e9dc33031..e24553825 100644
--- a/infra/cifuzz/test_data/external-project/oss-fuzz/Dockerfile
+++ b/infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile
@@ -18,5 +18,5 @@ FROM gcr.io/oss-fuzz-base/base-builder
RUN apt-get update && apt-get install -y make
COPY . $SRC/external-project
-WORKDIR external-project
-COPY oss-fuzz/build.sh $SRC/
+WORKDIR $SRC/external-project
+COPY .clusterfuzzlite/build.sh $SRC/
diff --git a/infra/cifuzz/test_data/external-project/oss-fuzz/build.sh b/infra/cifuzz/test_data/external-project/.clusterfuzzlite/build.sh
index 2c52ef90f..2c52ef90f 100644
--- a/infra/cifuzz/test_data/external-project/oss-fuzz/build.sh
+++ b/infra/cifuzz/test_data/external-project/.clusterfuzzlite/build.sh
diff --git a/infra/cifuzz/test_data/memory/out/curl_fuzzer_memory b/infra/cifuzz/test_data/memory/build-out/curl_fuzzer_memory
index c602ce970..c602ce970 100755
--- a/infra/cifuzz/test_data/memory/out/curl_fuzzer_memory
+++ b/infra/cifuzz/test_data/memory/build-out/curl_fuzzer_memory
Binary files differ
diff --git a/infra/cifuzz/test_data/undefined/out/curl_fuzzer_undefined b/infra/cifuzz/test_data/undefined/build-out/curl_fuzzer_undefined
index 504cab108..504cab108 100755
--- a/infra/cifuzz/test_data/undefined/out/curl_fuzzer_undefined
+++ b/infra/cifuzz/test_data/undefined/build-out/curl_fuzzer_undefined
Binary files differ
diff --git a/infra/cifuzz/test_helpers.py b/infra/cifuzz/test_helpers.py
new file mode 100644
index 000000000..85b5a8a67
--- /dev/null
+++ b/infra/cifuzz/test_helpers.py
@@ -0,0 +1,116 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Contains convenient helpers for writing tests."""
+
+import contextlib
+import os
+import sys
+import shutil
+import tempfile
+from unittest import mock
+
+import config_utils
+import docker
+import workspace_utils
+
+INFRA_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+# pylint: disable=wrong-import-position,import-error
+sys.path.append(INFRA_DIR)
+
+import helper
+
+
+@mock.patch('config_utils._is_dry_run', return_value=True)
+@mock.patch('config_utils.GenericCiEnvironment.project_src_path',
+ return_value=None)
+@mock.patch('os.path.basename', return_value=None)
+def _create_config(config_cls, _, __, ___, **kwargs):
+ """Creates a config object from |config_cls| and then sets every attribute
+ that is a key in |kwargs| to the corresponding value. Asserts that each key in
+ |kwargs| is an attribute of config."""
+ with mock.patch('config_utils.BaseConfig.validate', return_value=True):
+ config = config_cls()
+ for key, value in kwargs.items():
+ assert hasattr(config, key), 'Config doesn\'t have attribute: ' + key
+ setattr(config, key, value)
+
+ return config
+
+
+def create_build_config(**kwargs):
+ """Wrapper around _create_config for build configs."""
+ return _create_config(config_utils.BuildFuzzersConfig, **kwargs)
+
+
+def create_run_config(**kwargs):
+ """Wrapper around _create_config for run configs."""
+ return _create_config(config_utils.RunFuzzersConfig, **kwargs)
+
+
+def create_workspace(workspace_path='/workspace'):
+ """Returns a workspace located at |workspace_path| ('/workspace' by
+ default)."""
+ config = create_run_config(workspace=workspace_path)
+ return workspace_utils.Workspace(config)
+
+
+def patch_environ(testcase_obj, env=None, empty=False, runner=False):
+ """Patch environment. |testcase_obj| is the unittest.TestCase that contains
+ tests. |env|, if specified, is a dictionary of environment variables to start
+ from. If |empty| is True then the new patched environment will be empty. If
+ |runner| is True then the necessary environment variables will be set to run
+ the scripts from base-runner."""
+ if env is None:
+ env = {}
+
+ patcher = mock.patch.dict(os.environ, env)
+ testcase_obj.addCleanup(patcher.stop)
+ patcher.start()
+ if empty:
+ for key in os.environ.copy():
+ del os.environ[key]
+
+ if runner:
+ # Add the scripts for base-runner to the path since the wont be in
+ # /usr/local/bin on host machines during testing.
+ base_runner_dir = os.path.join(INFRA_DIR, 'base-images', 'base-runner')
+ os.environ['PATH'] = (os.environ.get('PATH', '') + os.pathsep +
+ base_runner_dir)
+ if 'GOPATH' not in os.environ:
+ # A GOPATH must be set or else the coverage script fails, even for getting
+ # the coverage of non-Go programs.
+ os.environ['GOPATH'] = '/root/go'
+
+
+@contextlib.contextmanager
+def temp_dir_copy(directory):
+ """Context manager that yields a temporary copy of |directory|."""
+ with tempfile.TemporaryDirectory() as temp_dir:
+ temp_copy_path = os.path.join(temp_dir, os.path.basename(directory))
+ shutil.copytree(directory, temp_copy_path)
+ yield temp_copy_path
+
+
+@contextlib.contextmanager
+def docker_temp_dir():
+ """Returns a temporary a directory that is useful for use with docker. On
+ cleanup this contextmanager uses docker to delete the directory's contents so
+ that if anything is owned by root it can be deleted (which
+ tempfile.TemporaryDirectory() cannot do) by non-root users."""
+ with tempfile.TemporaryDirectory() as temp_dir:
+ yield temp_dir
+ helper.docker_run([
+ '-v', f'{temp_dir}:/temp_dir', '-t', docker.BASE_BUILDER_TAG,
+ '/bin/bash', '-c', 'rm -rf /temp_dir/*'
+ ])
diff --git a/infra/cifuzz/workspace_utils.py b/infra/cifuzz/workspace_utils.py
new file mode 100644
index 000000000..ed296bc2b
--- /dev/null
+++ b/infra/cifuzz/workspace_utils.py
@@ -0,0 +1,75 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module for representing the workspace directory which CIFuzz uses."""
+
+import os
+
+
+class Workspace:
+ """Class representing the workspace directory."""
+
+ def __init__(self, config):
+ self.workspace = config.workspace
+
+ def initialize_dir(self, directory): # pylint: disable=no-self-use
+ """Creates directory if it doesn't already exist, otherwise does nothing."""
+ os.makedirs(directory, exist_ok=True)
+
+ @property
+ def repo_storage(self):
+ """The parent directory for repo storage."""
+ return os.path.join(self.workspace, 'storage')
+
+ @property
+ def out(self):
+ """The out directory used for storing the fuzzer build built by
+ build_fuzzers."""
+ # Don't use 'out' because it needs to be used by artifacts.
+ return os.path.join(self.workspace, 'build-out')
+
+ @property
+ def work(self):
+ """The directory used as the work directory for the fuzzer build/run."""
+ return os.path.join(self.workspace, 'work')
+
+ @property
+ def artifacts(self):
+ """The directory used to store artifacts for download by CI-system users."""
+ # This is hardcoded by a lot of clients, so we need to use this.
+ return os.path.join(self.workspace, 'out', 'artifacts')
+
+ @property
+ def clusterfuzz_build(self):
+ """The directory where builds from ClusterFuzz are stored."""
+ return os.path.join(self.workspace, 'cifuzz-prev-build')
+
+ @property
+ def clusterfuzz_coverage(self):
+ """The directory where builds from ClusterFuzz are stored."""
+ return os.path.join(self.workspace, 'cifuzz-prev-coverage')
+
+ @property
+ def coverage_report(self):
+ """The directory where coverage reports generated by cifuzz are put."""
+ return os.path.join(self.workspace, 'cifuzz-coverage')
+
+ @property
+ def corpora(self):
+ """The directory where corpora from ClusterFuzz are stored."""
+ return os.path.join(self.workspace, 'cifuzz-corpus')
+
+ @property
+ def pruned_corpora(self):
+ """The directory where pruned corpora are stored."""
+ return os.path.join(self.workspace, 'cifuzz-pruned-corpus')
diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/libgcrypt20.py b/infra/constants.py
index 9d200af6f..a323a4368 100644
--- a/infra/base-images/base-sanitizer-libs-builder/packages/libgcrypt20.py
+++ b/infra/constants.py
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-# Copyright 2017 Google Inc.
+# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,24 +13,26 @@
# limitations under the License.
#
################################################################################
+"""Constants for OSS-Fuzz."""
-import os
-import shutil
+DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH = '.clusterfuzzlite'
-import package
-import wrapper_utils
-
-
-class Package(package.Package):
- """libgcrypt20 package."""
-
- def __init__(self, apt_version):
- super(Package, self).__init__('libgcrypt20', apt_version)
-
- def PreBuild(self, source_directory, env, custom_bin_dir):
- configure_wrapper = (
- '#!/bin/bash\n'
- '/usr/bin/dh_auto_configure "$@" --disable-asm')
-
- wrapper_utils.InstallWrapper(
- custom_bin_dir, 'dh_auto_configure', configure_wrapper)
+DEFAULT_LANGUAGE = 'c++'
+DEFAULT_SANITIZER = 'address'
+DEFAULT_ARCHITECTURE = 'x86_64'
+DEFAULT_ENGINE = 'libfuzzer'
+LANGUAGES = [
+ 'c',
+ 'c++',
+ 'go',
+ 'jvm',
+ 'python',
+ 'rust',
+ 'swift',
+]
+LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++', 'go', 'jvm', 'rust', 'swift']
+SANITIZERS = [
+ 'address', 'none', 'memory', 'undefined', 'dataflow', 'thread', 'coverage'
+]
+ARCHITECTURES = ['i386', 'x86_64']
+ENGINES = ['libfuzzer', 'afl', 'honggfuzz', 'dataflow', 'none']
diff --git a/infra/helper.py b/infra/helper.py
index e24df4ded..805f39a99 100755
--- a/infra/helper.py
+++ b/infra/helper.py
@@ -22,6 +22,7 @@ from multiprocessing.dummy import Pool as ThreadPool
import argparse
import datetime
import errno
+import logging
import os
import pipes
import re
@@ -29,25 +30,31 @@ import subprocess
import sys
import templates
+import constants
+
OSS_FUZZ_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
BUILD_DIR = os.path.join(OSS_FUZZ_DIR, 'build')
-BASE_IMAGES = [
- 'gcr.io/oss-fuzz-base/base-image',
- 'gcr.io/oss-fuzz-base/base-clang',
- 'gcr.io/oss-fuzz-base/base-builder',
- 'gcr.io/oss-fuzz-base/base-runner',
- 'gcr.io/oss-fuzz-base/base-runner-debug',
- 'gcr.io/oss-fuzz-base/base-sanitizer-libs-builder',
- 'gcr.io/oss-fuzz-base/msan-libs-builder',
-]
+BASE_RUNNER_IMAGE = 'gcr.io/oss-fuzz-base/base-runner'
+
+BASE_IMAGES = {
+ 'generic': [
+ 'gcr.io/oss-fuzz-base/base-image',
+ 'gcr.io/oss-fuzz-base/base-clang',
+ 'gcr.io/oss-fuzz-base/base-builder',
+ BASE_RUNNER_IMAGE,
+ 'gcr.io/oss-fuzz-base/base-runner-debug',
+ ],
+ 'go': ['gcr.io/oss-fuzz-base/base-builder-go'],
+ 'jvm': ['gcr.io/oss-fuzz-base/base-builder-jvm'],
+ 'python': ['gcr.io/oss-fuzz-base/base-builder-python'],
+ 'rust': ['gcr.io/oss-fuzz-base/base-builder-rust'],
+ 'swift': ['gcr.io/oss-fuzz-base/base-builder-swift'],
+}
VALID_PROJECT_NAME_REGEX = re.compile(r'^[a-zA-Z0-9_-]+$')
MAX_PROJECT_NAME_LENGTH = 26
-if sys.version_info[0] >= 3:
- raw_input = input # pylint: disable=invalid-name
-
CORPUS_URL_FORMAT = (
'gs://{project_name}-corpus.clusterfuzz-external.appspot.com/libFuzzer/'
'{fuzz_target}/')
@@ -55,60 +62,158 @@ CORPUS_BACKUP_URL_FORMAT = (
'gs://{project_name}-backup.clusterfuzz-external.appspot.com/corpus/'
'libFuzzer/{fuzz_target}/')
+LANGUAGE_REGEX = re.compile(r'[^\s]+')
PROJECT_LANGUAGE_REGEX = re.compile(r'\s*language\s*:\s*([^\s]+)')
-# Languages from project.yaml that have code coverage support.
-LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++', 'go', 'rust']
+WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+
+LANGUAGES_WITH_BUILDER_IMAGES = {'go', 'jvm', 'python', 'rust', 'swift'}
+
+if sys.version_info[0] >= 3:
+ raw_input = input # pylint: disable=invalid-name
# pylint: disable=too-many-lines
+class Project:
+ """Class representing a project that is in OSS-Fuzz or an external project
+ (ClusterFuzzLite user)."""
+
+ def __init__(
+ self,
+ project_name_or_path,
+ is_external=False,
+ build_integration_path=constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH):
+ self.is_external = is_external
+ if self.is_external:
+ self.path = os.path.abspath(project_name_or_path)
+ self.name = os.path.basename(self.path)
+ self.build_integration_path = os.path.join(self.path,
+ build_integration_path)
+ else:
+ self.name = project_name_or_path
+ self.path = os.path.join(OSS_FUZZ_DIR, 'projects', self.name)
+ self.build_integration_path = self.path
+
+ @property
+ def dockerfile_path(self):
+ """Returns path to the project Dockerfile."""
+ return os.path.join(self.build_integration_path, 'Dockerfile')
+
+ @property
+ def language(self):
+ """Returns project language."""
+ if self.is_external:
+ # TODO(metzman): Handle this properly.
+ return constants.DEFAULT_LANGUAGE
+
+ project_yaml_path = os.path.join(self.path, 'project.yaml')
+ with open(project_yaml_path) as file_handle:
+ content = file_handle.read()
+ for line in content.splitlines():
+ match = PROJECT_LANGUAGE_REGEX.match(line)
+ if match:
+ return match.group(1)
+
+ logging.warning('Language not specified in project.yaml.')
+ return None
+
+ @property
+ def out(self):
+ """Returns the out dir for the project. Creates it if needed."""
+ return _get_out_dir(self.name)
+
+ @property
+ def work(self):
+ """Returns the out dir for the project. Creates it if needed."""
+ return _get_project_build_subdir(self.name, 'work')
+
+ @property
+ def corpus(self):
+ """Returns the out dir for the project. Creates it if needed."""
+ return _get_project_build_subdir(self.name, 'corpus')
+
+
def main(): # pylint: disable=too-many-branches,too-many-return-statements
- """Get subcommand from program arguments and do it."""
+ """Gets subcommand from program arguments and does it. Returns 0 on success 1
+ on error."""
+ logging.basicConfig(level=logging.INFO)
+
+ parser = get_parser()
+ args = parse_args(parser)
+
+ # Note: this has to happen after parse_args above as parse_args needs to know
+ # the original CWD for external projects.
os.chdir(OSS_FUZZ_DIR)
if not os.path.exists(BUILD_DIR):
os.mkdir(BUILD_DIR)
- args = parse_args()
-
# We have different default values for `sanitizer` depending on the `engine`.
# Some commands do not have `sanitizer` argument, so `hasattr` is necessary.
if hasattr(args, 'sanitizer') and not args.sanitizer:
if args.engine == 'dataflow':
args.sanitizer = 'dataflow'
else:
- args.sanitizer = 'address'
+ args.sanitizer = constants.DEFAULT_SANITIZER
if args.command == 'generate':
- return generate(args)
- if args.command == 'build_image':
- return build_image(args)
- if args.command == 'build_fuzzers':
- return build_fuzzers(args)
- if args.command == 'check_build':
- return check_build(args)
- if args.command == 'download_corpora':
- return download_corpora(args)
- if args.command == 'run_fuzzer':
- return run_fuzzer(args)
- if args.command == 'coverage':
- return coverage(args)
- if args.command == 'reproduce':
- return reproduce(args)
- if args.command == 'shell':
- return shell(args)
- if args.command == 'pull_images':
- return pull_images(args)
-
- return 0
-
-
-def parse_args(args=None):
- """Parses args using argparser and returns parsed args."""
+ result = generate(args)
+ elif args.command == 'build_image':
+ result = build_image(args)
+ elif args.command == 'build_fuzzers':
+ result = build_fuzzers(args)
+ elif args.command == 'check_build':
+ result = check_build(args)
+ elif args.command == 'download_corpora':
+ result = download_corpora(args)
+ elif args.command == 'run_fuzzer':
+ result = run_fuzzer(args)
+ elif args.command == 'coverage':
+ result = coverage(args)
+ elif args.command == 'reproduce':
+ result = reproduce(args)
+ elif args.command == 'shell':
+ result = shell(args)
+ elif args.command == 'pull_images':
+ result = pull_images()
+ else:
+ # Print help string if no arguments provided.
+ parser.print_help()
+ result = False
+ return bool_to_retcode(result)
+
+
+def bool_to_retcode(boolean):
+ """Returns 0 if |boolean| is Truthy, 0 is the standard return code for a
+ successful process execution. Returns 1 otherwise, indicating the process
+ failed."""
+ return 0 if boolean else 1
+
+
+def parse_args(parser, args=None):
+ """Parses |args| using |parser| and returns parsed args. Also changes
+ |args.build_integration_path| to have correct default behavior."""
# Use default argument None for args so that in production, argparse does its
# normal behavior, but unittesting is easier.
- parser = get_parser()
- return parser.parse_args(args)
+ parsed_args = parser.parse_args(args)
+ project = getattr(parsed_args, 'project', None)
+ if not project:
+ return parsed_args
+
+ # Use hacky method for extracting attributes so that ShellTest works.
+ # TODO(metzman): Fix this.
+ is_external = getattr(parsed_args, 'external', False)
+ parsed_args.project = Project(parsed_args.project, is_external)
+ return parsed_args
+
+
+def _add_external_project_args(parser):
+ parser.add_argument(
+ '--external',
+ help='Is project external?',
+ default=False,
+ action='store_true',
+ )
def get_parser(): # pylint: disable=too-many-statements
@@ -118,17 +223,28 @@ def get_parser(): # pylint: disable=too-many-statements
generate_parser = subparsers.add_parser(
'generate', help='Generate files for new project.')
- generate_parser.add_argument('project_name')
+ generate_parser.add_argument('project')
+ generate_parser.add_argument(
+ '--language',
+ default=constants.DEFAULT_LANGUAGE,
+ choices=['c', 'c++', 'rust', 'go', 'jvm', 'swift', 'python'],
+ help='Project language.')
+ _add_external_project_args(generate_parser)
build_image_parser = subparsers.add_parser('build_image',
help='Build an image.')
- build_image_parser.add_argument('project_name')
+ build_image_parser.add_argument('project')
build_image_parser.add_argument('--pull',
action='store_true',
help='Pull latest base image.')
+ build_image_parser.add_argument('--cache',
+ action='store_true',
+ default=False,
+ help='Use docker cache when building image.')
build_image_parser.add_argument('--no-pull',
action='store_true',
help='Do not pull latest base image.')
+ _add_external_project_args(build_image_parser)
build_fuzzers_parser = subparsers.add_parser(
'build_fuzzers', help='Build fuzzers for a project.')
@@ -136,10 +252,15 @@ def get_parser(): # pylint: disable=too-many-statements
_add_engine_args(build_fuzzers_parser)
_add_sanitizer_args(build_fuzzers_parser)
_add_environment_args(build_fuzzers_parser)
- build_fuzzers_parser.add_argument('project_name')
+ _add_external_project_args(build_fuzzers_parser)
+ build_fuzzers_parser.add_argument('project')
build_fuzzers_parser.add_argument('source_path',
help='path of local source',
nargs='?')
+ build_fuzzers_parser.add_argument('--mount_path',
+ dest='mount_path',
+ help='path to mount local source in '
+ '(defaults to WORKDIR)')
build_fuzzers_parser.add_argument('--clean',
dest='clean',
action='store_true',
@@ -154,26 +275,26 @@ def get_parser(): # pylint: disable=too-many-statements
check_build_parser = subparsers.add_parser(
'check_build', help='Checks that fuzzers execute without errors.')
_add_architecture_args(check_build_parser)
- _add_engine_args(
- check_build_parser,
- choices=['libfuzzer', 'afl', 'honggfuzz', 'dataflow', 'none'])
- _add_sanitizer_args(
- check_build_parser,
- choices=['address', 'memory', 'undefined', 'dataflow', 'thread'])
+ _add_engine_args(check_build_parser, choices=constants.ENGINES)
+ _add_sanitizer_args(check_build_parser, choices=constants.SANITIZERS)
_add_environment_args(check_build_parser)
- check_build_parser.add_argument('project_name', help='name of the project')
+ check_build_parser.add_argument('project',
+ help='name of the project or path (external)')
check_build_parser.add_argument('fuzzer_name',
help='name of the fuzzer',
nargs='?')
+ _add_external_project_args(check_build_parser)
run_fuzzer_parser = subparsers.add_parser(
'run_fuzzer', help='Run a fuzzer in the emulated fuzzing environment.')
_add_engine_args(run_fuzzer_parser)
_add_sanitizer_args(run_fuzzer_parser)
_add_environment_args(run_fuzzer_parser)
+ _add_external_project_args(run_fuzzer_parser)
run_fuzzer_parser.add_argument(
'--corpus-dir', help='directory to store corpus for the fuzz target')
- run_fuzzer_parser.add_argument('project_name', help='name of the project')
+ run_fuzzer_parser.add_argument('project',
+ help='name of the project or path (external)')
run_fuzzer_parser.add_argument('fuzzer_name', help='name of the fuzzer')
run_fuzzer_parser.add_argument('fuzzer_args',
help='arguments to pass to the fuzzer',
@@ -197,35 +318,40 @@ def get_parser(): # pylint: disable=too-many-statements
coverage_parser.add_argument('--corpus-dir',
help='specify location of corpus'
' to be used (requires --fuzz-target argument)')
- coverage_parser.add_argument('project_name', help='name of the project')
+ coverage_parser.add_argument('project',
+ help='name of the project or path (external)')
coverage_parser.add_argument('extra_args',
help='additional arguments to '
'pass to llvm-cov utility.',
nargs='*')
+ _add_external_project_args(coverage_parser)
download_corpora_parser = subparsers.add_parser(
'download_corpora', help='Download all corpora for a project.')
download_corpora_parser.add_argument('--fuzz-target',
help='specify name of a fuzz target')
- download_corpora_parser.add_argument('project_name',
- help='name of the project')
+ download_corpora_parser.add_argument(
+ 'project', help='name of the project or path (external)')
reproduce_parser = subparsers.add_parser('reproduce',
help='Reproduce a crash.')
reproduce_parser.add_argument('--valgrind',
action='store_true',
help='run with valgrind')
- reproduce_parser.add_argument('project_name', help='name of the project')
+ reproduce_parser.add_argument('project',
+ help='name of the project or path (external)')
reproduce_parser.add_argument('fuzzer_name', help='name of the fuzzer')
reproduce_parser.add_argument('testcase_path', help='path of local testcase')
reproduce_parser.add_argument('fuzzer_args',
help='arguments to pass to the fuzzer',
nargs=argparse.REMAINDER)
_add_environment_args(reproduce_parser)
+ _add_external_project_args(reproduce_parser)
shell_parser = subparsers.add_parser(
'shell', help='Run /bin/bash within the builder container.')
- shell_parser.add_argument('project_name', help='name of the project')
+ shell_parser.add_argument('project',
+ help='name of the project or path (external)')
shell_parser.add_argument('source_path',
help='path of local source',
nargs='?')
@@ -233,6 +359,7 @@ def get_parser(): # pylint: disable=too-many-statements
_add_engine_args(shell_parser)
_add_sanitizer_args(shell_parser)
_add_environment_args(shell_parser)
+ _add_external_project_args(shell_parser)
subparsers.add_parser('pull_images', help='Pull base images.')
return parser
@@ -243,29 +370,33 @@ def is_base_image(image_name):
return os.path.exists(os.path.join('infra', 'base-images', image_name))
-def check_project_exists(project_name):
+def check_project_exists(project):
"""Checks if a project exists."""
- if not os.path.exists(_get_project_dir(project_name)):
- print(project_name, 'does not exist', file=sys.stderr)
- return False
+ if os.path.exists(project.path):
+ return True
- return True
+ if project.is_external:
+ descriptive_project_name = project.path
+ else:
+ descriptive_project_name = project.name
+ logging.error('"%s" does not exist.', descriptive_project_name)
+ return False
-def _check_fuzzer_exists(project_name, fuzzer_name):
+
+def _check_fuzzer_exists(project, fuzzer_name):
"""Checks if a fuzzer exists."""
command = ['docker', 'run', '--rm']
- command.extend(['-v', '%s:/out' % _get_output_dir(project_name)])
- command.append('ubuntu:16.04')
+ command.extend(['-v', '%s:/out' % project.out])
+ command.append(BASE_RUNNER_IMAGE)
command.extend(['/bin/bash', '-c', 'test -f /out/%s' % fuzzer_name])
try:
subprocess.check_call(command)
except subprocess.CalledProcessError:
- print(fuzzer_name,
- 'does not seem to exist. Please run build_fuzzers first.',
- file=sys.stderr)
+ logging.error('%s does not seem to exist. Please run build_fuzzers first.',
+ fuzzer_name)
return False
return True
@@ -281,76 +412,44 @@ def _get_command_string(command):
return ' '.join(pipes.quote(part) for part in command)
-def _get_project_dir(project_name):
- """Returns path to the project."""
- return os.path.join(OSS_FUZZ_DIR, 'projects', project_name)
-
-
-def get_dockerfile_path(project_name):
- """Returns path to the project Dockerfile."""
- return os.path.join(_get_project_dir(project_name), 'Dockerfile')
-
-
-def _get_corpus_dir(project_name=''):
- """Creates and returns path to /corpus directory for the given project (if
- specified)."""
- directory = os.path.join(BUILD_DIR, 'corpus', project_name)
+def _get_project_build_subdir(project, subdir_name):
+ """Creates the |subdir_name| subdirectory of the |project| subdirectory in
+ |BUILD_DIR| and returns its path."""
+ directory = os.path.join(BUILD_DIR, subdir_name, project)
if not os.path.exists(directory):
os.makedirs(directory)
return directory
-def _get_output_dir(project_name=''):
+def _get_out_dir(project=''):
"""Creates and returns path to /out directory for the given project (if
specified)."""
- directory = os.path.join(BUILD_DIR, 'out', project_name)
- if not os.path.exists(directory):
- os.makedirs(directory)
+ return _get_project_build_subdir(project, 'out')
- return directory
+def _add_architecture_args(parser, choices=None):
+ """Adds common architecture args."""
+ if choices is None:
+ choices = constants.ARCHITECTURES
+ parser.add_argument('--architecture',
+ default=constants.DEFAULT_ARCHITECTURE,
+ choices=choices)
-def _get_work_dir(project_name=''):
- """Creates and returns path to /work directory for the given project (if
- specified)."""
- directory = os.path.join(BUILD_DIR, 'work', project_name)
- if not os.path.exists(directory):
- os.makedirs(directory)
- return directory
+def _add_engine_args(parser, choices=None):
+ """Adds common engine args."""
+ if choices is None:
+ choices = constants.ENGINES
+ parser.add_argument('--engine',
+ default=constants.DEFAULT_ENGINE,
+ choices=choices)
-def _get_project_language(project_name):
- """Returns project language."""
- project_yaml_path = os.path.join(OSS_FUZZ_DIR, 'projects', project_name,
- 'project.yaml')
- with open(project_yaml_path) as file_handle:
- content = file_handle.read()
- for line in content.splitlines():
- match = PROJECT_LANGUAGE_REGEX.match(line)
- if match:
- return match.group(1)
-
- return None
-
-
-def _add_architecture_args(parser, choices=('x86_64', 'i386')):
- """Add common architecture args."""
- parser.add_argument('--architecture', default='x86_64', choices=choices)
-
-
-def _add_engine_args(parser,
- choices=('libfuzzer', 'afl', 'honggfuzz', 'dataflow',
- 'none')):
- """Add common engine args."""
- parser.add_argument('--engine', default='libfuzzer', choices=choices)
-
-
-def _add_sanitizer_args(parser,
- choices=('address', 'memory', 'undefined', 'coverage',
- 'dataflow', 'thread')):
- """Add common sanitizer args."""
+def _add_sanitizer_args(parser, choices=None):
+ """Adds common sanitizer args."""
+ if choices is None:
+ choices = constants.SANITIZERS
parser.add_argument(
'--sanitizer',
default=None,
@@ -359,47 +458,50 @@ def _add_sanitizer_args(parser,
def _add_environment_args(parser):
- """Add common environment args."""
+ """Adds common environment args."""
parser.add_argument('-e',
action='append',
help="set environment variable e.g. VAR=value")
-def build_image_impl(image_name, no_cache=False, pull=False):
- """Build image."""
+def build_image_impl(project, cache=True, pull=False):
+ """Builds image."""
+ image_name = project.name
- proj_is_base_image = is_base_image(image_name)
- if proj_is_base_image:
+ if is_base_image(image_name):
image_project = 'oss-fuzz-base'
- dockerfile_dir = os.path.join('infra', 'base-images', image_name)
+ docker_build_dir = os.path.join(OSS_FUZZ_DIR, 'infra', 'base-images',
+ image_name)
+ dockerfile_path = os.path.join(docker_build_dir, 'Dockerfile')
else:
- image_project = 'oss-fuzz'
- if not check_project_exists(image_name):
+ if not check_project_exists(project):
return False
+ dockerfile_path = project.dockerfile_path
+ docker_build_dir = project.path
+ image_project = 'oss-fuzz'
- dockerfile_dir = os.path.join('projects', image_name)
+ if pull and not pull_images(project.language):
+ return False
build_args = []
- if no_cache:
+ if not cache:
build_args.append('--no-cache')
build_args += [
- '-t', 'gcr.io/%s/%s' % (image_project, image_name), dockerfile_dir
+ '-t',
+ 'gcr.io/%s/%s' % (image_project, image_name), '--file', dockerfile_path
]
-
- return docker_build(build_args, pull=pull)
+ build_args.append(docker_build_dir)
+ return docker_build(build_args)
def _env_to_docker_args(env_list):
- """Turn envirnoment variable list into docker arguments."""
+ """Turns envirnoment variable list into docker arguments."""
return sum([['-e', v] for v in env_list], [])
-WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
-
-
def workdir_from_lines(lines, default='/src'):
- """Get the WORKDIR from the given lines."""
+ """Gets the WORKDIR from the given lines."""
for line in reversed(lines): # reversed to get last WORKDIR.
match = re.match(WORKDIR_REGEX, line)
if match:
@@ -414,18 +516,16 @@ def workdir_from_lines(lines, default='/src'):
return default
-def _workdir_from_dockerfile(project_name):
- """Parse WORKDIR from the Dockerfile for the given project."""
- dockerfile_path = get_dockerfile_path(project_name)
-
- with open(dockerfile_path) as file_handle:
+def _workdir_from_dockerfile(project):
+ """Parses WORKDIR from the Dockerfile for the given project."""
+ with open(project.dockerfile_path) as file_handle:
lines = file_handle.readlines()
- return workdir_from_lines(lines, default=os.path.join('/src', project_name))
+ return workdir_from_lines(lines, default=os.path.join('/src', project.name))
def docker_run(run_args, print_output=True):
- """Call `docker run`."""
+ """Calls `docker run`."""
command = ['docker', 'run', '--rm', '--privileged']
# Support environments with a TTY.
@@ -434,32 +534,29 @@ def docker_run(run_args, print_output=True):
command.extend(run_args)
- print('Running:', _get_command_string(command))
+ logging.info('Running: %s.', _get_command_string(command))
stdout = None
if not print_output:
stdout = open(os.devnull, 'w')
try:
subprocess.check_call(command, stdout=stdout, stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError as error:
- return error.returncode
+ except subprocess.CalledProcessError:
+ return False
- return 0
+ return True
-def docker_build(build_args, pull=False):
- """Call `docker build`."""
+def docker_build(build_args):
+ """Calls `docker build`."""
command = ['docker', 'build']
- if pull:
- command.append('--pull')
-
command.extend(build_args)
- print('Running:', _get_command_string(command))
+ logging.info('Running: %s.', _get_command_string(command))
try:
subprocess.check_call(command)
except subprocess.CalledProcessError:
- print('docker build failed.', file=sys.stderr)
+ logging.error('Docker build failed.')
return False
return True
@@ -468,22 +565,22 @@ def docker_build(build_args, pull=False):
def docker_pull(image):
"""Call `docker pull`."""
command = ['docker', 'pull', image]
- print('Running:', _get_command_string(command))
+ logging.info('Running: %s', _get_command_string(command))
try:
subprocess.check_call(command)
except subprocess.CalledProcessError:
- print('docker pull failed.', file=sys.stderr)
+ logging.error('Docker pull failed.')
return False
return True
def build_image(args):
- """Build docker image."""
+ """Builds docker image."""
if args.pull and args.no_pull:
- print('Incompatible arguments --pull and --no-pull.')
- return 1
+ logging.error('Incompatible arguments --pull and --no-pull.')
+ return False
if args.pull:
pull = True
@@ -494,89 +591,74 @@ def build_image(args):
pull = y_or_n.lower() == 'y'
if pull:
- print('Pulling latest base images...')
+ logging.info('Pulling latest base images...')
else:
- print('Using cached base images...')
+ logging.info('Using cached base images...')
# If build_image is called explicitly, don't use cache.
- if build_image_impl(args.project_name, no_cache=True, pull=pull):
- return 0
+ if build_image_impl(args.project, cache=args.cache, pull=pull):
+ return True
- return 1
+ return False
def build_fuzzers_impl( # pylint: disable=too-many-arguments,too-many-locals,too-many-branches
- project_name,
+ project,
clean,
engine,
sanitizer,
architecture,
env_to_add,
source_path,
- no_cache=False,
- mount_location=None):
- """Build fuzzers."""
- if not build_image_impl(project_name, no_cache=no_cache):
- return 1
-
- project_out_dir = _get_output_dir(project_name)
- project_work_dir = _get_work_dir(project_name)
- project_language = _get_project_language(project_name)
- if not project_language:
- print('WARNING: language not specified in project.yaml. Build may fail.')
+ mount_path=None):
+ """Builds fuzzers."""
+ if not build_image_impl(project):
+ return False
if clean:
- print('Cleaning existing build artifacts.')
+ logging.info('Cleaning existing build artifacts.')
# Clean old and possibly conflicting artifacts in project's out directory.
docker_run([
'-v',
- '%s:/out' % project_out_dir, '-t',
- 'gcr.io/oss-fuzz/%s' % project_name, '/bin/bash', '-c', 'rm -rf /out/*'
+ '%s:/out' % project.out, '-t',
+ 'gcr.io/oss-fuzz/%s' % project.name, '/bin/bash', '-c', 'rm -rf /out/*'
])
docker_run([
'-v',
- '%s:/work' % project_work_dir, '-t',
- 'gcr.io/oss-fuzz/%s' % project_name, '/bin/bash', '-c', 'rm -rf /work/*'
+ '%s:/work' % project.work, '-t',
+ 'gcr.io/oss-fuzz/%s' % project.name, '/bin/bash', '-c', 'rm -rf /work/*'
])
else:
- print('Keeping existing build artifacts as-is (if any).')
+ logging.info('Keeping existing build artifacts as-is (if any).')
env = [
'FUZZING_ENGINE=' + engine,
'SANITIZER=' + sanitizer,
'ARCHITECTURE=' + architecture,
]
- if project_language:
- env.append('FUZZING_LANGUAGE=' + project_language)
+ _add_oss_fuzz_ci_if_needed(env)
+
+ if project.language:
+ env.append('FUZZING_LANGUAGE=' + project.language)
if env_to_add:
env += env_to_add
- # Copy instrumented libraries.
- if sanitizer == 'memory':
- docker_run([
- '-v',
- '%s:/work' % project_work_dir, 'gcr.io/oss-fuzz-base/msan-libs-builder',
- 'bash', '-c', 'cp -r /msan /work'
- ])
- env.append('MSAN_LIBS_PATH=' + '/work/msan')
-
command = ['--cap-add', 'SYS_PTRACE'] + _env_to_docker_args(env)
if source_path:
- workdir = _workdir_from_dockerfile(project_name)
- if mount_location:
+ workdir = _workdir_from_dockerfile(project)
+ if mount_path:
command += [
'-v',
- '%s:%s' % (_get_absolute_path(source_path), mount_location),
+ '%s:%s' % (_get_absolute_path(source_path), mount_path),
]
else:
if workdir == '/src':
- print('Cannot use local checkout with "WORKDIR: /src".',
- file=sys.stderr)
- return 1
+ logging.error('Cannot use local checkout with "WORKDIR: /src".')
+ return False
command += [
'-v',
@@ -585,50 +667,52 @@ def build_fuzzers_impl( # pylint: disable=too-many-arguments,too-many-locals,to
command += [
'-v',
- '%s:/out' % project_out_dir, '-v',
- '%s:/work' % project_work_dir, '-t',
- 'gcr.io/oss-fuzz/%s' % project_name
+ '%s:/out' % project.out, '-v',
+ '%s:/work' % project.work, '-t',
+ 'gcr.io/oss-fuzz/%s' % project.name
]
- result_code = docker_run(command)
- if result_code:
- print('Building fuzzers failed.', file=sys.stderr)
- return result_code
-
- # Patch MSan builds to use instrumented shared libraries.
- if sanitizer == 'memory':
- docker_run([
- '-v',
- '%s:/out' % project_out_dir, '-v',
- '%s:/work' % project_work_dir
- ] + _env_to_docker_args(env) + [
- 'gcr.io/oss-fuzz-base/base-sanitizer-libs-builder', 'patch_build.py',
- '/out'
- ])
+ result = docker_run(command)
+ if not result:
+ logging.error('Building fuzzers failed.')
+ return False
- return 0
+ return True
def build_fuzzers(args):
- """Build fuzzers."""
- return build_fuzzers_impl(args.project_name, args.clean, args.engine,
- args.sanitizer, args.architecture, args.e,
- args.source_path)
+ """Builds fuzzers."""
+ return build_fuzzers_impl(args.project,
+ args.clean,
+ args.engine,
+ args.sanitizer,
+ args.architecture,
+ args.e,
+ args.source_path,
+ mount_path=args.mount_path)
+
+
+def _add_oss_fuzz_ci_if_needed(env):
+ """Adds value of |OSS_FUZZ_CI| environment variable to |env| if it is set."""
+ oss_fuzz_ci = os.getenv('OSS_FUZZ_CI')
+ if oss_fuzz_ci:
+ env.append('OSS_FUZZ_CI=' + oss_fuzz_ci)
def check_build(args):
"""Checks that fuzzers in the container execute without errors."""
- if not check_project_exists(args.project_name):
- return 1
+ if not check_project_exists(args.project):
+ return False
if (args.fuzzer_name and
- not _check_fuzzer_exists(args.project_name, args.fuzzer_name)):
- return 1
+ not _check_fuzzer_exists(args.project, args.fuzzer_name)):
+ return False
- fuzzing_language = _get_project_language(args.project_name)
- if fuzzing_language is None:
- print('WARNING: language not specified in project.yaml. Defaulting to C++.')
- fuzzing_language = 'c++'
+ fuzzing_language = args.project.language
+ if not fuzzing_language:
+ fuzzing_language = constants.DEFAULT_LANGUAGE
+ logging.warning('Language not specified in project.yaml. Defaulting to %s.',
+ fuzzing_language)
env = [
'FUZZING_ENGINE=' + args.engine,
@@ -636,13 +720,12 @@ def check_build(args):
'ARCHITECTURE=' + args.architecture,
'FUZZING_LANGUAGE=' + fuzzing_language,
]
+ _add_oss_fuzz_ci_if_needed(env)
if args.e:
env += args.e
run_args = _env_to_docker_args(env) + [
- '-v',
- '%s:/out' % _get_output_dir(args.project_name), '-t',
- 'gcr.io/oss-fuzz-base/base-runner'
+ '-v', '%s:/out' % args.project.out, '-t', BASE_RUNNER_IMAGE
]
if args.fuzzer_name:
@@ -650,39 +733,45 @@ def check_build(args):
else:
run_args.append('test_all.py')
- exit_code = docker_run(run_args)
- if exit_code == 0:
- print('Check build passed.')
+ result = docker_run(run_args)
+ if result:
+ logging.info('Check build passed.')
else:
- print('Check build failed.')
+ logging.error('Check build failed.')
- return exit_code
+ return result
-def _get_fuzz_targets(project_name):
- """Return names of fuzz targest build in the project's /out directory."""
+def _get_fuzz_targets(project):
+ """Returns names of fuzz targest build in the project's /out directory."""
fuzz_targets = []
- for name in os.listdir(_get_output_dir(project_name)):
+ for name in os.listdir(project.out):
if name.startswith('afl-'):
continue
+ if name.startswith('jazzer_'):
+ continue
+ if name == 'llvm-symbolizer':
+ continue
- path = os.path.join(_get_output_dir(project_name), name)
- if os.path.isfile(path) and os.access(path, os.X_OK):
+ path = os.path.join(project.out, name)
+ # Python and JVM fuzz targets are only executable for the root user, so
+ # we can't use os.access.
+ if os.path.isfile(path) and (os.stat(path).st_mode & 0o111):
fuzz_targets.append(name)
return fuzz_targets
-def _get_latest_corpus(project_name, fuzz_target, base_corpus_dir):
- """Download the latest corpus for the given fuzz target."""
+def _get_latest_corpus(project, fuzz_target, base_corpus_dir):
+ """Downloads the latest corpus for the given fuzz target."""
corpus_dir = os.path.join(base_corpus_dir, fuzz_target)
if not os.path.exists(corpus_dir):
os.makedirs(corpus_dir)
- if not fuzz_target.startswith(project_name + '_'):
- fuzz_target = '%s_%s' % (project_name, fuzz_target)
+ if not fuzz_target.startswith(project.name + '_'):
+ fuzz_target = '%s_%s' % (project.name, fuzz_target)
- corpus_backup_url = CORPUS_BACKUP_URL_FORMAT.format(project_name=project_name,
+ corpus_backup_url = CORPUS_BACKUP_URL_FORMAT.format(project_name=project.name,
fuzz_target=fuzz_target)
command = ['gsutil', 'ls', corpus_backup_url]
@@ -693,8 +782,7 @@ def _get_latest_corpus(project_name, fuzz_target, base_corpus_dir):
# Some fuzz targets (e.g. new ones) may not have corpus yet, just skip those.
if corpus_listing.returncode:
- print('WARNING: corpus for {0} not found:\n'.format(fuzz_target),
- file=sys.stderr)
+ logging.warning('Corpus for %s not found:\n', fuzz_target)
return
if output:
@@ -708,80 +796,73 @@ def _get_latest_corpus(project_name, fuzz_target, base_corpus_dir):
os.remove(archive_path)
else:
# Sync the working corpus copy if a minimized backup is not available.
- corpus_url = CORPUS_URL_FORMAT.format(project_name=project_name,
+ corpus_url = CORPUS_URL_FORMAT.format(project_name=project.name,
fuzz_target=fuzz_target)
command = ['gsutil', '-m', '-q', 'rsync', '-R', corpus_url, corpus_dir]
subprocess.check_call(command)
def download_corpora(args):
- """Download most recent corpora from GCS for the given project."""
- if not check_project_exists(args.project_name):
- return 1
+ """Downloads most recent corpora from GCS for the given project."""
+ if not check_project_exists(args.project):
+ return False
try:
with open(os.devnull, 'w') as stdout:
subprocess.check_call(['gsutil', '--version'], stdout=stdout)
except OSError:
- print(
- 'ERROR: gsutil not found. Please install it from '
- 'https://cloud.google.com/storage/docs/gsutil_install',
- file=sys.stderr)
+ logging.error('gsutil not found. Please install it from '
+ 'https://cloud.google.com/storage/docs/gsutil_install')
return False
if args.fuzz_target:
fuzz_targets = [args.fuzz_target]
else:
- fuzz_targets = _get_fuzz_targets(args.project_name)
+ fuzz_targets = _get_fuzz_targets(args.project)
- corpus_dir = _get_corpus_dir(args.project_name)
- if not os.path.exists(corpus_dir):
- os.makedirs(corpus_dir)
+ corpus_dir = args.project.corpus
def _download_for_single_target(fuzz_target):
try:
- _get_latest_corpus(args.project_name, fuzz_target, corpus_dir)
+ _get_latest_corpus(args.project, fuzz_target, corpus_dir)
return True
except Exception as error: # pylint:disable=broad-except
- print('ERROR: corpus download for %s failed: %s' %
- (fuzz_target, str(error)),
- file=sys.stderr)
+ logging.error('Corpus download for %s failed: %s.', fuzz_target,
+ str(error))
return False
- print('Downloading corpora for %s project to %s' %
- (args.project_name, corpus_dir))
+ logging.info('Downloading corpora for %s project to %s.', args.project.name,
+ corpus_dir)
thread_pool = ThreadPool()
return all(thread_pool.map(_download_for_single_target, fuzz_targets))
def coverage(args):
- """Generate code coverage using clang source based code coverage."""
+ """Generates code coverage using clang source based code coverage."""
if args.corpus_dir and not args.fuzz_target:
- print(
- 'ERROR: --corpus-dir requires specifying a particular fuzz target '
- 'using --fuzz-target',
- file=sys.stderr)
- return 1
-
- if not check_project_exists(args.project_name):
- return 1
-
- project_language = _get_project_language(args.project_name)
- if project_language not in LANGUAGES_WITH_COVERAGE_SUPPORT:
- print(
- 'ERROR: Project is written in %s, coverage for it is not supported yet.'
- % project_language,
- file=sys.stderr)
- return 1
-
- if not args.no_corpus_download and not args.corpus_dir:
+ logging.error(
+ '--corpus-dir requires specifying a particular fuzz target using '
+ '--fuzz-target')
+ return False
+
+ if not check_project_exists(args.project):
+ return False
+
+ if args.project.language not in constants.LANGUAGES_WITH_COVERAGE_SUPPORT:
+ logging.error(
+ 'Project is written in %s, coverage for it is not supported yet.',
+ args.project.language)
+ return False
+
+ if (not args.no_corpus_download and not args.corpus_dir and
+ not args.project.is_external):
if not download_corpora(args):
- return 1
+ return False
env = [
'FUZZING_ENGINE=libfuzzer',
- 'FUZZING_LANGUAGE=%s' % project_language,
- 'PROJECT=%s' % args.project_name,
+ 'FUZZING_LANGUAGE=%s' % args.project.language,
+ 'PROJECT=%s' % args.project.name,
'SANITIZER=coverage',
'HTTP_PORT=%s' % args.port,
'COVERAGE_EXTRA_ARGS=%s' % ' '.join(args.extra_args),
@@ -797,41 +878,41 @@ def coverage(args):
if args.corpus_dir:
if not os.path.exists(args.corpus_dir):
- print('ERROR: the path provided in --corpus-dir argument does not exist',
- file=sys.stderr)
- return 1
+ logging.error('The path provided in --corpus-dir argument does not '
+ 'exist.')
+ return False
corpus_dir = os.path.realpath(args.corpus_dir)
run_args.extend(['-v', '%s:/corpus/%s' % (corpus_dir, args.fuzz_target)])
else:
- run_args.extend(['-v', '%s:/corpus' % _get_corpus_dir(args.project_name)])
+ run_args.extend(['-v', '%s:/corpus' % args.project.corpus])
run_args.extend([
'-v',
- '%s:/out' % _get_output_dir(args.project_name),
+ '%s:/out' % args.project.out,
'-t',
- 'gcr.io/oss-fuzz-base/base-runner',
+ BASE_RUNNER_IMAGE,
])
run_args.append('coverage')
if args.fuzz_target:
run_args.append(args.fuzz_target)
- exit_code = docker_run(run_args)
- if exit_code == 0:
- print('Successfully generated clang code coverage report.')
+ result = docker_run(run_args)
+ if result:
+ logging.info('Successfully generated clang code coverage report.')
else:
- print('Failed to generate clang code coverage report.')
+ logging.error('Failed to generate clang code coverage report.')
- return exit_code
+ return result
def run_fuzzer(args):
"""Runs a fuzzer in the container."""
- if not check_project_exists(args.project_name):
- return 1
+ if not check_project_exists(args.project):
+ return False
- if not _check_fuzzer_exists(args.project_name, args.fuzzer_name):
- return 1
+ if not _check_fuzzer_exists(args.project, args.fuzzer_name):
+ return False
env = [
'FUZZING_ENGINE=' + args.engine,
@@ -846,9 +927,8 @@ def run_fuzzer(args):
if args.corpus_dir:
if not os.path.exists(args.corpus_dir):
- print('ERROR: the path provided in --corpus-dir argument does not exist',
- file=sys.stderr)
- return 1
+ logging.error('The path provided in --corpus-dir argument does not exist')
+ return False
corpus_dir = os.path.realpath(args.corpus_dir)
run_args.extend([
'-v',
@@ -858,9 +938,9 @@ def run_fuzzer(args):
run_args.extend([
'-v',
- '%s:/out' % _get_output_dir(args.project_name),
+ '%s:/out' % args.project.out,
'-t',
- 'gcr.io/oss-fuzz-base/base-runner',
+ BASE_RUNNER_IMAGE,
'run_fuzzer',
args.fuzzer_name,
] + args.fuzzer_args)
@@ -869,25 +949,25 @@ def run_fuzzer(args):
def reproduce(args):
- """Reproduce a specific test case from a specific project."""
- return reproduce_impl(args.project_name, args.fuzzer_name, args.valgrind,
- args.e, args.fuzzer_args, args.testcase_path)
+ """Reproduces a specific test case from a specific project."""
+ return reproduce_impl(args.project, args.fuzzer_name, args.valgrind, args.e,
+ args.fuzzer_args, args.testcase_path)
def reproduce_impl( # pylint: disable=too-many-arguments
- project_name,
+ project,
fuzzer_name,
valgrind,
env_to_add,
fuzzer_args,
testcase_path,
- runner=docker_run,
- err_result=1):
+ run_function=docker_run,
+ err_result=False):
"""Reproduces a testcase in the container."""
- if not check_project_exists(project_name):
+ if not check_project_exists(project):
return err_result
- if not _check_fuzzer_exists(project_name, fuzzer_name):
+ if not _check_fuzzer_exists(project, fuzzer_name):
return err_result
debugger = ''
@@ -906,7 +986,7 @@ def reproduce_impl( # pylint: disable=too-many-arguments
run_args = _env_to_docker_args(env) + [
'-v',
- '%s:/out' % _get_output_dir(project_name),
+ '%s:/out' % project.out,
'-v',
'%s:/testcase' % _get_absolute_path(testcase_path),
'-t',
@@ -916,55 +996,109 @@ def reproduce_impl( # pylint: disable=too-many-arguments
'-runs=100',
] + fuzzer_args
- return runner(run_args)
+ return run_function(run_args)
-def generate(args):
- """Generate empty project files."""
- if len(args.project_name) > MAX_PROJECT_NAME_LENGTH:
- print('Project name needs to be less than or equal to %d characters.' %
- MAX_PROJECT_NAME_LENGTH,
- file=sys.stderr)
- return 1
+def _validate_project_name(project_name):
+ """Validates |project_name| is a valid OSS-Fuzz project name."""
+ if len(project_name) > MAX_PROJECT_NAME_LENGTH:
+ logging.error(
+ 'Project name needs to be less than or equal to %d characters.',
+ MAX_PROJECT_NAME_LENGTH)
+ return False
- if not VALID_PROJECT_NAME_REGEX.match(args.project_name):
- print('Invalid project name.', file=sys.stderr)
- return 1
+ if not VALID_PROJECT_NAME_REGEX.match(project_name):
+ logging.info('Invalid project name: %s.', project_name)
+ return False
+
+ return True
- directory = os.path.join('projects', args.project_name)
+def _validate_language(language):
+ if not LANGUAGE_REGEX.match(language):
+ logging.error('Invalid project language %s.', language)
+ return False
+
+ return True
+
+
+def _create_build_integration_directory(directory):
+ """Returns True on successful creation of a build integration directory.
+ Suitable for OSS-Fuzz and external projects."""
try:
- os.mkdir(directory)
+ os.makedirs(directory)
except OSError as error:
if error.errno != errno.EEXIST:
raise
- print(directory, 'already exists.', file=sys.stderr)
- return 1
+ logging.error('%s already exists.', directory)
+ return False
+ return True
- print('Writing new files to', directory)
- template_args = {
- 'project_name': args.project_name,
- 'year': datetime.datetime.now().year
- }
- with open(os.path.join(directory, 'project.yaml'), 'w') as file_handle:
- file_handle.write(templates.PROJECT_YAML_TEMPLATE % template_args)
+def _template_project_file(filename, template, template_args, directory):
+ """Templates |template| using |template_args| and writes the result to
+ |directory|/|filename|. Sets the file to executable if |filename| is
+ build.sh."""
+ file_path = os.path.join(directory, filename)
+ with open(file_path, 'w') as file_handle:
+ file_handle.write(template % template_args)
+
+ if filename == 'build.sh':
+ os.chmod(file_path, 0o755)
+
+
+def generate(args):
+ """Generates empty project files."""
+ return _generate_impl(args.project, args.language)
+
- with open(os.path.join(directory, 'Dockerfile'), 'w') as file_handle:
- file_handle.write(templates.DOCKER_TEMPLATE % template_args)
+def _get_current_datetime():
+ """Returns this year. Needed for mocking."""
+ return datetime.datetime.now()
- build_sh_path = os.path.join(directory, 'build.sh')
- with open(build_sh_path, 'w') as file_handle:
- file_handle.write(templates.BUILD_TEMPLATE % template_args)
- os.chmod(build_sh_path, 0o755)
- return 0
+def _base_builder_from_language(language):
+ """Returns the base builder for the specified language."""
+ if language not in LANGUAGES_WITH_BUILDER_IMAGES:
+ return 'base-builder'
+ return 'base-builder-{language}'.format(language=language)
+
+
+def _generate_impl(project, language):
+ """Implementation of generate(). Useful for testing."""
+ if project.is_external:
+ # External project.
+ project_templates = templates.EXTERNAL_TEMPLATES
+ else:
+ # Internal project.
+ if not _validate_project_name(project.name):
+ return False
+ project_templates = templates.TEMPLATES
+
+ if not _validate_language(language):
+ return False
+
+ directory = project.build_integration_path
+ if not _create_build_integration_directory(directory):
+ return False
+
+ logging.info('Writing new files to: %s.', directory)
+
+ template_args = {
+ 'project_name': project.name,
+ 'base_builder': _base_builder_from_language(language),
+ 'language': language,
+ 'year': _get_current_datetime().year
+ }
+ for filename, template in project_templates.items():
+ _template_project_file(filename, template, template_args, directory)
+ return True
def shell(args):
"""Runs a shell within a docker image."""
- if not build_image_impl(args.project_name):
- return 1
+ if not build_image_impl(args.project):
+ return False
env = [
'FUZZING_ENGINE=' + args.engine,
@@ -972,18 +1106,18 @@ def shell(args):
'ARCHITECTURE=' + args.architecture,
]
- if args.project_name != 'base-runner-debug':
- env.append('FUZZING_LANGUAGE=' + _get_project_language(args.project_name))
+ if args.project.name != 'base-runner-debug':
+ env.append('FUZZING_LANGUAGE=' + args.project.language)
if args.e:
env += args.e
- if is_base_image(args.project_name):
+ if is_base_image(args.project.name):
image_project = 'oss-fuzz-base'
- out_dir = _get_output_dir()
+ out_dir = _get_out_dir()
else:
image_project = 'oss-fuzz'
- out_dir = _get_output_dir(args.project_name)
+ out_dir = args.project.out
run_args = _env_to_docker_args(env)
if args.source_path:
@@ -995,21 +1129,25 @@ def shell(args):
run_args.extend([
'-v',
'%s:/out' % out_dir, '-v',
- '%s:/work' % _get_work_dir(args.project_name), '-t',
- 'gcr.io/%s/%s' % (image_project, args.project_name), '/bin/bash'
+ '%s:/work' % args.project.work, '-t',
+ 'gcr.io/%s/%s' % (image_project, args.project.name), '/bin/bash'
])
docker_run(run_args)
- return 0
+ return True
-def pull_images(_):
- """Pull base images."""
- for base_image in BASE_IMAGES:
- if not docker_pull(base_image):
- return 1
+def pull_images(language=None):
+ """Pulls base images used to build projects in language lang (or all if lang
+ is None)."""
+ for base_image_lang, base_images in BASE_IMAGES.items():
+ if (language is None or base_image_lang == 'generic' or
+ base_image_lang == language):
+ for base_image in base_images:
+ if not docker_pull(base_image):
+ return False
- return 0
+ return True
if __name__ == '__main__':
diff --git a/infra/helper_test.py b/infra/helper_test.py
index d899a835b..951eba47a 100644
--- a/infra/helper_test.py
+++ b/infra/helper_test.py
@@ -13,23 +13,224 @@
# limitations under the License.
"""Tests for helper.py"""
+import datetime
+import os
+import tempfile
import unittest
from unittest import mock
+from pyfakefs import fake_filesystem_unittest
+
+import constants
import helper
+import templates
+
+# pylint: disable=no-self-use,protected-access
-class TestShell(unittest.TestCase):
+class ShellTest(unittest.TestCase):
"""Tests 'shell' command."""
@mock.patch('helper.docker_run')
@mock.patch('helper.build_image_impl')
- def test_base_runner_debug(self, mocked_build_image_impl, _):
+ def test_base_runner_debug(self, _, __):
"""Tests that shell base-runner-debug works as intended."""
image_name = 'base-runner-debug'
unparsed_args = ['shell', image_name]
- args = helper.parse_args(unparsed_args)
+ parser = helper.get_parser()
+ args = helper.parse_args(parser, unparsed_args)
args.sanitizer = 'address'
result = helper.shell(args)
- mocked_build_image_impl.assert_called_with(image_name)
- self.assertEqual(result, 0)
+ self.assertTrue(result)
+
+
+class BuildImageImplTest(unittest.TestCase):
+ """Tests for build_image_impl."""
+
+ @mock.patch('helper.docker_build')
+ def test_no_cache(self, mock_docker_build):
+ """Tests that cache=False is handled properly."""
+ image_name = 'base-image'
+ helper.build_image_impl(helper.Project(image_name), cache=False)
+ self.assertIn('--no-cache', mock_docker_build.call_args_list[0][0][0])
+
+ @mock.patch('helper.docker_build')
+ @mock.patch('helper.pull_images')
+ def test_pull(self, mock_pull_images, _):
+ """Tests that pull=True is handled properly."""
+ image_name = 'base-image'
+ project = helper.Project(image_name, is_external=True)
+ self.assertTrue(helper.build_image_impl(project, pull=True))
+ mock_pull_images.assert_called_with('c++')
+
+ @mock.patch('helper.docker_build')
+ def test_base_image(self, mock_docker_build):
+ """Tests that build_image_impl works as intended with a base-image."""
+ image_name = 'base-image'
+ self.assertTrue(helper.build_image_impl(helper.Project(image_name)))
+ build_dir = os.path.join(helper.OSS_FUZZ_DIR,
+ 'infra/base-images/base-image')
+ mock_docker_build.assert_called_with([
+ '-t', 'gcr.io/oss-fuzz-base/base-image', '--file',
+ os.path.join(build_dir, 'Dockerfile'), build_dir
+ ])
+
+ @mock.patch('helper.docker_build')
+ def test_oss_fuzz_project(self, mock_docker_build):
+ """Tests that build_image_impl works as intended with an OSS-Fuzz
+ project."""
+ project_name = 'example'
+ self.assertTrue(helper.build_image_impl(helper.Project(project_name)))
+ build_dir = os.path.join(helper.OSS_FUZZ_DIR, 'projects', project_name)
+ mock_docker_build.assert_called_with([
+ '-t', 'gcr.io/oss-fuzz/example', '--file',
+ os.path.join(build_dir, 'Dockerfile'), build_dir
+ ])
+
+ @mock.patch('helper.docker_build')
+ def test_external_project(self, mock_docker_build):
+ """Tests that build_image_impl works as intended with a non-OSS-Fuzz
+ project."""
+ with tempfile.TemporaryDirectory() as temp_dir:
+ project_src_path = os.path.join(temp_dir, 'example')
+ os.mkdir(project_src_path)
+ build_integration_path = 'build-integration'
+ project = helper.Project(project_src_path,
+ is_external=True,
+ build_integration_path=build_integration_path)
+ self.assertTrue(helper.build_image_impl(project))
+ mock_docker_build.assert_called_with([
+ '-t', 'gcr.io/oss-fuzz/example', '--file',
+ os.path.join(project_src_path, build_integration_path, 'Dockerfile'),
+ project_src_path
+ ])
+
+
+class GenerateImplTest(fake_filesystem_unittest.TestCase):
+ """Tests for _generate_impl."""
+ PROJECT_NAME = 'newfakeproject'
+ PROJECT_LANGUAGE = 'python'
+
+ def setUp(self):
+ self.setUpPyfakefs()
+ self.fs.add_real_directory(helper.OSS_FUZZ_DIR)
+
+ def _verify_templated_files(self, template_dict, directory, language):
+ template_args = {
+ 'project_name': self.PROJECT_NAME,
+ 'year': 2021,
+ 'base_builder': helper._base_builder_from_language(language),
+ 'language': language,
+ }
+ for filename, template in template_dict.items():
+ file_path = os.path.join(directory, filename)
+ with open(file_path, 'r') as file_handle:
+ contents = file_handle.read()
+ self.assertEqual(contents, template % template_args)
+
+ @mock.patch('helper._get_current_datetime',
+ return_value=datetime.datetime(year=2021, month=1, day=1))
+ def test_generate_oss_fuzz_project(self, _):
+ """Tests that the correct files are generated for an OSS-Fuzz project."""
+ helper._generate_impl(helper.Project(self.PROJECT_NAME),
+ self.PROJECT_LANGUAGE)
+ self._verify_templated_files(
+ templates.TEMPLATES,
+ os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.PROJECT_NAME),
+ self.PROJECT_LANGUAGE)
+
+ def test_generate_external_project(self):
+ """Tests that the correct files are generated for a non-OSS-Fuzz project."""
+ build_integration_path = '/newfakeproject/build-integration'
+ helper._generate_impl(
+ helper.Project('/newfakeproject/',
+ is_external=True,
+ build_integration_path=build_integration_path),
+ self.PROJECT_LANGUAGE)
+ self._verify_templated_files(templates.EXTERNAL_TEMPLATES,
+ build_integration_path, self.PROJECT_LANGUAGE)
+
+ def test_generate_swift_project(self):
+ """Tests that the swift project uses the correct base image."""
+ helper._generate_impl(helper.Project(self.PROJECT_NAME), 'swift')
+ self._verify_templated_files(
+ templates.TEMPLATES,
+ os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.PROJECT_NAME),
+ 'swift')
+
+
+class ProjectTest(fake_filesystem_unittest.TestCase):
+ """Tests for Project class."""
+
+ def setUp(self):
+ self.project_name = 'project'
+ self.internal_project = helper.Project(self.project_name)
+ self.external_project_path = os.path.join('/path', 'to', self.project_name)
+ self.external_project = helper.Project(self.external_project_path,
+ is_external=True)
+ self.setUpPyfakefs()
+
+ def test_init_external_project(self):
+ """Tests __init__ method for external projects."""
+ self.assertEqual(self.external_project.name, self.project_name)
+ self.assertEqual(self.external_project.path, self.external_project_path)
+ self.assertEqual(
+ self.external_project.build_integration_path,
+ os.path.join(self.external_project_path,
+ constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH))
+
+ def test_init_internal_project(self):
+ """Tests __init__ method for internal projects."""
+ self.assertEqual(self.internal_project.name, self.project_name)
+ path = os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.project_name)
+ self.assertEqual(self.internal_project.path, path)
+ self.assertEqual(self.internal_project.build_integration_path, path)
+
+ def test_dockerfile_path_internal_project(self):
+ """Tests that dockerfile_path works as intended."""
+ self.assertEqual(
+ self.internal_project.dockerfile_path,
+ os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.project_name,
+ 'Dockerfile'))
+
+ def test_dockerfile_path_external_project(self):
+ """Tests that dockerfile_path works as intended."""
+ self.assertEqual(
+ self.external_project.dockerfile_path,
+ os.path.join(self.external_project_path,
+ constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH,
+ 'Dockerfile'))
+
+ def test_out(self):
+ """Tests that out works as intended."""
+ out_dir = self.internal_project.out
+ self.assertEqual(
+ out_dir,
+ os.path.join(helper.OSS_FUZZ_DIR, 'build', 'out', self.project_name))
+ self.assertTrue(os.path.exists(out_dir))
+
+ def test_work(self):
+ """Tests that work works as intended."""
+ work_dir = self.internal_project.work
+ self.assertEqual(
+ work_dir,
+ os.path.join(helper.OSS_FUZZ_DIR, 'build', 'work', self.project_name))
+ self.assertTrue(os.path.exists(work_dir))
+
+ def test_corpus(self):
+ """Tests that corpus works as intended."""
+ corpus_dir = self.internal_project.corpus
+ self.assertEqual(
+ corpus_dir,
+ os.path.join(helper.OSS_FUZZ_DIR, 'build', 'corpus', self.project_name))
+ self.assertTrue(os.path.exists(corpus_dir))
+
+ def test_language_internal_project(self):
+ """Tests that language works as intended for an internal project."""
+ project_yaml_path = os.path.join(self.internal_project.path, 'project.yaml')
+ self.fs.create_file(project_yaml_path, contents='language: python')
+ self.assertEqual(self.internal_project.language, 'python')
+
+ def test_language_external_project(self):
+ """Tests that language works as intended for an external project."""
+ self.assertEqual(self.external_project.language, 'c++')
diff --git a/infra/presubmit.py b/infra/presubmit.py
index 90b4f90ac..6db1862be 100755
--- a/infra/presubmit.py
+++ b/infra/presubmit.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright 2020 Google LLC.
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,7 +14,7 @@
# limitations under the License.
#
################################################################################
-"""Check code for common issues before submitting."""
+"""Checks code for common issues before submitting."""
import argparse
import os
@@ -23,6 +23,8 @@ import sys
import unittest
import yaml
+import constants
+
_SRC_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -62,8 +64,8 @@ def _check_one_lib_fuzzing_engine(build_sh_file):
def check_lib_fuzzing_engine(paths):
- """Call _check_one_lib_fuzzing_engine on each path in |paths|. Return True if
- the result of every call is True."""
+ """Calls _check_one_lib_fuzzing_engine on each path in |paths|. Returns True
+ if the result of every call is True."""
return all([_check_one_lib_fuzzing_engine(path) for path in paths])
@@ -73,9 +75,9 @@ class ProjectYamlChecker:
# Sections in a project.yaml and the constant values that they are allowed
# to have.
SECTIONS_AND_CONSTANTS = {
- 'sanitizers': {'address', 'none', 'memory', 'undefined', 'dataflow'},
- 'architectures': {'i386', 'x86_64'},
- 'fuzzing_engines': {'afl', 'libfuzzer', 'honggfuzz', 'dataflow', 'none'},
+ 'sanitizers': constants.SANITIZERS,
+ 'architectures': constants.ARCHITECTURES,
+ 'fuzzing_engines': constants.ENGINES,
}
# Note: this list must be updated when we allow new sections.
@@ -100,15 +102,6 @@ class ProjectYamlChecker:
'view_restrictions',
]
- LANGUAGES_SUPPORTED = [
- 'c',
- 'c++',
- 'go',
- 'jvm',
- 'python',
- 'rust',
- ]
-
# Note that some projects like boost only have auto-ccs. However, forgetting
# primary contact is probably a mistake.
REQUIRED_SECTIONS = ['primary_contact', 'main_repo']
@@ -121,7 +114,7 @@ class ProjectYamlChecker:
self.success = True
def do_checks(self):
- """Do all project.yaml checks. Return True if they pass."""
+ """Does all project.yaml checks. Returns True if they pass."""
if self.is_disabled():
return True
@@ -131,23 +124,43 @@ class ProjectYamlChecker:
self.check_valid_section_names,
self.check_valid_emails,
self.check_valid_language,
+ self.check_dataflow,
]
for check_function in checks:
check_function()
return self.success
def is_disabled(self):
- """Is this project disabled."""
+ """Returns True if this project is disabled."""
return self.data.get('disabled', False)
def error(self, message):
- """Print an error message and set self.success to False."""
+ """Prints an error message and sets self.success to False."""
self.success = False
print('Error in {filename}: {message}'.format(filename=self.filename,
message=message))
+ def check_dataflow(self):
+ """Checks that if "dataflow" is specified in "fuzzing_engines", it is also
+ specified in "sanitizers", and that if specified in "sanitizers", it is also
+ specified in "fuzzing_engines". Returns True if this condition is met."""
+ engines = self.data.get('fuzzing_engines', [])
+ dfsan_engines = 'dataflow' in engines
+ sanitizers = self.data.get('sanitizers', [])
+ dfsan_sanitizers = 'dataflow' in sanitizers
+
+ if dfsan_engines and not dfsan_sanitizers:
+ self.error('"dataflow" only specified in "fuzzing_engines" must also be '
+ 'specified in "sanitizers" or in neither.')
+ return
+
+ if dfsan_sanitizers and not dfsan_engines:
+ self.error('"dataflow" only specified in "sanitizers" must also be '
+ 'specified in "fuzzing_engines" or in neither.')
+ return
+
def check_project_yaml_constants(self):
- """Check that certain sections only have certain constant values."""
+ """Returns True if certain sections only have certain constant values."""
for section, allowed_constants in self.SECTIONS_AND_CONSTANTS.items():
if section not in self.data:
continue
@@ -172,20 +185,20 @@ class ProjectYamlChecker:
self.error('Not allowed value in the project.yaml: ' + str(constant))
def check_valid_section_names(self):
- """Check that only valid sections are included."""
+ """Returns True if all section names are valid."""
for name in self.data:
if name not in self.VALID_SECTION_NAMES:
self.error('{name} is not a valid section name ({valid_names})'.format(
name=name, valid_names=self.VALID_SECTION_NAMES))
def check_required_sections(self):
- """Check that all required sections are present."""
+ """Returns True if all required sections are in |self.data|."""
for section in self.REQUIRED_SECTIONS:
if section not in self.data:
self.error(section + ' section is missing.')
def check_valid_emails(self):
- """Check that emails are valid looking."""
+ """Returns True if emails are valid looking.."""
# Get email addresses.
email_addresses = []
primary_contact = self.data.get('primary_contact')
@@ -201,18 +214,18 @@ class ProjectYamlChecker:
self.error(email_address + ' is an invalid email address.')
def check_valid_language(self):
- """Check that the language is specified and valid."""
+ """Returns True if the language is specified and valid."""
language = self.data.get('language')
if not language:
self.error('Missing "language" attribute in project.yaml.')
- elif language not in self.LANGUAGES_SUPPORTED:
+ elif language not in constants.LANGUAGES:
self.error(
'"language: {language}" is not supported ({supported}).'.format(
- language=language, supported=self.LANGUAGES_SUPPORTED))
+ language=language, supported=constants.LANGUAGES))
def _check_one_project_yaml(project_yaml_filename):
- """Do checks on the project.yaml file."""
+ """Does checks on the project.yaml file. Returns True on success."""
if not _is_project_file(project_yaml_filename, 'project.yaml'):
return True
@@ -221,13 +234,13 @@ def _check_one_project_yaml(project_yaml_filename):
def check_project_yaml(paths):
- """Call _check_one_project_yaml on each path in |paths|. Return True if
- the result of every call is True."""
+ """Calls _check_one_project_yaml on each path in |paths|. Returns True if the
+ result of every call is True."""
return all([_check_one_project_yaml(path) for path in paths])
def do_checks(changed_files):
- """Run all presubmit checks return False if any fails."""
+ """Runs all presubmit checks. Returns False if any fails."""
checks = [
check_license, yapf, lint, check_project_yaml, check_lib_fuzzing_engine
]
@@ -245,6 +258,7 @@ _CHECK_LICENSE_EXTENSIONS = [
'.cc',
'.cpp',
'.css',
+ '.Dockerfile',
'.h',
'.htm',
'.html',
@@ -253,17 +267,21 @@ _CHECK_LICENSE_EXTENSIONS = [
'.py',
'.sh',
]
+THIRD_PARTY_DIR_NAME = 'third_party'
_LICENSE_STRING = 'http://www.apache.org/licenses/LICENSE-2.0'
def check_license(paths):
- """Validate license header."""
+ """Validates license header."""
if not paths:
return True
success = True
for path in paths:
+ path_parts = str(path).split(os.sep)
+ if any(path_part == THIRD_PARTY_DIR_NAME for path_part in path_parts):
+ continue
filename = os.path.basename(path)
extension = os.path.splitext(path)[1]
if (filename not in _CHECK_LICENSE_FILENAMES and
@@ -279,7 +297,7 @@ def check_license(paths):
def bool_to_returncode(success):
- """Return 0 if |success|. Otherwise return 1."""
+ """Returns 0 if |success|. Otherwise returns 1."""
if success:
print('Success.')
return 0
@@ -294,7 +312,7 @@ def is_nonfuzzer_python(path):
def lint(_=None):
- """Run python's linter on infra. Return False if it fails linting."""
+ """Runs python's linter on infra. Returns False if it fails linting."""
command = ['python3', '-m', 'pylint', '-j', '0', 'infra']
returncode = subprocess.run(command, check=False).returncode
@@ -302,9 +320,9 @@ def lint(_=None):
def yapf(paths, validate=True):
- """Do yapf on |path| if it is Python file. Only validates format if
- |validate| otherwise, formats the file. Returns False if validation
- or formatting fails."""
+ """Does yapf on |path| if it is Python file. Only validates format if
+ |validate|. Otherwise, formats the file. Returns False if validation or
+ formatting fails."""
paths = [path for path in paths if is_nonfuzzer_python(path)]
if not paths:
return True
@@ -318,9 +336,9 @@ def yapf(paths, validate=True):
def get_changed_files():
- """Return a list of absolute paths of files changed in this git branch."""
+ """Returns a list of absolute paths of files changed in this git branch."""
branch_commit_hash = subprocess.check_output(
- ['git', 'merge-base', 'FETCH_HEAD', 'origin/HEAD']).strip().decode()
+ ['git', 'merge-base', 'HEAD', 'origin/HEAD']).strip().decode()
diff_commands = [
# Return list of modified files in the commits on this branch.
@@ -354,9 +372,9 @@ def run_build_tests():
def run_nonbuild_tests(parallel):
- """Run all tests but build tests. Do it in parallel if |parallel|. The reason
- why we exclude build tests is because they use an emulator that prevents them
- from being used in parallel."""
+ """Runs all tests but build tests. Does them in parallel if |parallel|. The
+ reason why we exclude build tests is because they use an emulator that
+ prevents them from being used in parallel."""
# We look for all project directories because otherwise pytest won't run tests
# that are not in valid modules (e.g. "base-images").
relevant_dirs = set()
@@ -369,21 +387,34 @@ def run_nonbuild_tests(parallel):
# pass directories to pytest.
command = [
'pytest',
- # Test errors with error: "ModuleNotFoundError: No module named 'apt'.
- '--ignore-glob=infra/base-images/base-sanitizer-libs-builder/*',
'--ignore-glob=infra/build/*',
]
if parallel:
command.extend(['-n', 'auto'])
command += list(relevant_dirs)
print('Running non-build tests.')
- return subprocess.run(command, check=False).returncode == 0
+ # TODO(metzman): Get rid of this once config_utils stops using it.
+ env = os.environ.copy()
+ env['CIFUZZ_TEST'] = '1'
+
+ return subprocess.run(command, check=False, env=env).returncode == 0
-def run_tests(_=None, parallel=False):
+
+def run_tests(_=None, parallel=False, build_tests=True, nonbuild_tests=True):
"""Runs all unit tests."""
- nonbuild_success = run_nonbuild_tests(parallel)
- build_success = run_build_tests()
+ build_success = True
+ nonbuild_success = True
+ if nonbuild_tests:
+ nonbuild_success = run_nonbuild_tests(parallel)
+ else:
+ print('Skipping nonbuild tests as specified.')
+
+ if build_tests:
+ build_success = run_build_tests()
+ else:
+ print('Skipping build tests as specified.')
+
return nonbuild_success and build_success
@@ -411,6 +442,17 @@ def main():
action='store_true',
help='Run tests in parallel.',
default=False)
+ parser.add_argument('-s',
+ '--skip-build-tests',
+ action='store_true',
+ help='Skip build tests which are slow and must run '
+ 'sequentially.',
+ default=False)
+ parser.add_argument('-n',
+ '--skip-nonbuild-tests',
+ action='store_true',
+ help='Only do build tests.',
+ default=False)
args = parser.parse_args()
if args.all_files:
@@ -434,7 +476,10 @@ def main():
return bool_to_returncode(success)
if args.command == 'infra-tests':
- success = run_tests(relevant_files, parallel=args.parallel)
+ success = run_tests(relevant_files,
+ parallel=args.parallel,
+ build_tests=(not args.skip_build_tests),
+ nonbuild_tests=(not args.skip_nonbuild_tests))
return bool_to_returncode(success)
# Do all the checks (but no tests).
diff --git a/infra/pytest.ini b/infra/pytest.ini
index d9bb3737e..2a10272e2 100644
--- a/infra/pytest.ini
+++ b/infra/pytest.ini
@@ -1,2 +1,3 @@
[pytest]
-python_files = *_test.py \ No newline at end of file
+python_files = *_test.py
+log_cli = true \ No newline at end of file
diff --git a/infra/repo_manager.py b/infra/repo_manager.py
index a0b97b3ef..07880d81a 100644
--- a/infra/repo_manager.py
+++ b/infra/repo_manager.py
@@ -135,7 +135,7 @@ class RepoManager:
check_result=True)
self.git(['remote', 'update'], check_result=True)
- def get_commit_list(self, newest_commit, oldest_commit=None):
+ def get_commit_list(self, newest_commit, oldest_commit=None, limit=None):
"""Gets the list of commits(inclusive) between the old and new commits.
Args:
@@ -162,7 +162,11 @@ class RepoManager:
else:
commit_range = newest_commit
- out, _, err_code = self.git(['rev-list', commit_range])
+ limit_args = []
+ if limit:
+ limit_args.append(f'--max-count={limit}')
+
+ out, _, err_code = self.git(['rev-list', commit_range] + limit_args)
commits = out.split('\n')
commits = [commit for commit in commits if commit]
if err_code or not commits:
diff --git a/infra/retry.py b/infra/retry.py
index 1a94180c6..1f6d54b8d 100644
--- a/infra/retry.py
+++ b/infra/retry.py
@@ -56,9 +56,9 @@ def wrap(retries,
"""Handle retry."""
if (exception is None or
isinstance(exception, exception_type)) and num_try < tries:
- logging.log('Retrying on %s failed with %s. Retrying again.',
- function_with_type,
- sys.exc_info()[1])
+ logging.info('Retrying on %s failed with %s. Retrying again.',
+ function_with_type,
+ sys.exc_info()[1])
sleep(get_delay(num_try, delay, backoff))
return True
diff --git a/infra/run_fuzzers.Dockerfile b/infra/run_fuzzers.Dockerfile
index b00bb12b9..8c8d7bb1b 100644
--- a/infra/run_fuzzers.Dockerfile
+++ b/infra/run_fuzzers.Dockerfile
@@ -13,7 +13,8 @@
# limitations under the License.
#
################################################################################
-# Docker image to run the CIFuzz action run_fuzzers in.
+# Docker image for running fuzzers on CIFuzz (the run_fuzzers action on GitHub
+# actions).
FROM gcr.io/oss-fuzz-base/cifuzz-base
@@ -22,5 +23,9 @@ FROM gcr.io/oss-fuzz-base/cifuzz-base
# just expand to '/opt/oss-fuzz'.
ENTRYPOINT ["python3", "/opt/oss-fuzz/infra/cifuzz/run_fuzzers_entrypoint.py"]
+WORKDIR ${OSS_FUZZ_ROOT}/infra
+
# Copy infra source code.
-ADD . ${OSS_FUZZ_ROOT}/infra \ No newline at end of file
+ADD . ${OSS_FUZZ_ROOT}/infra
+
+RUN python3 -m pip install -r ${OSS_FUZZ_ROOT}/infra/cifuzz/requirements.txt
diff --git a/infra/templates.py b/infra/templates.py
index f16da924f..3db291453 100755
--- a/infra/templates.py
+++ b/infra/templates.py
@@ -17,7 +17,7 @@
PROJECT_YAML_TEMPLATE = """\
homepage: "<your_project_homepage>"
-language: <programming_language> # Example values: c, c++, go, rust.
+language: %(language)s"
primary_contact: "<primary_contact_email>"
main_repo: "https://path/to/main/repo.git"
"""
@@ -39,13 +39,21 @@ DOCKER_TEMPLATE = """\
#
################################################################################
-FROM gcr.io/oss-fuzz-base/base-builder
+FROM gcr.io/oss-fuzz-base/%(base_builder)s
RUN apt-get update && apt-get install -y make autoconf automake libtool
RUN git clone --depth 1 <git_url> %(project_name)s # or use other version control
WORKDIR %(project_name)s
COPY build.sh $SRC/
"""
+EXTERNAL_DOCKER_TEMPLATE = """\
+FROM gcr.io/oss-fuzz-base/%(base_builder)s:v1
+RUN apt-get update && apt-get install -y make autoconf automake libtool
+RUN COPY . $SRC/%(project_name)s
+WORKDIR %(project_name)s
+COPY .clusterfuzzlite/build.sh $SRC/
+"""
+
BUILD_TEMPLATE = """\
#!/bin/bash -eu
# Copyright %(year)d Google LLC
@@ -76,3 +84,30 @@ BUILD_TEMPLATE = """\
# /path/to/name_of_fuzzer.cc -o $OUT/name_of_fuzzer \\
# $LIB_FUZZING_ENGINE /path/to/library.a
"""
+
+EXTERNAL_BUILD_TEMPLATE = """\
+#!/bin/bash -eu
+
+# build project
+# e.g.
+# ./autogen.sh
+# ./configure
+# make -j$(nproc) all
+
+# build fuzzers
+# e.g.
+# $CXX $CXXFLAGS -std=c++11 -Iinclude \\
+# /path/to/name_of_fuzzer.cc -o $OUT/name_of_fuzzer \\
+# $LIB_FUZZING_ENGINE /path/to/library.a
+"""
+
+TEMPLATES = {
+ 'build.sh': BUILD_TEMPLATE,
+ 'Dockerfile': DOCKER_TEMPLATE,
+ 'project.yaml': PROJECT_YAML_TEMPLATE
+}
+
+EXTERNAL_TEMPLATES = {
+ 'build.sh': EXTERNAL_BUILD_TEMPLATE,
+ 'Dockerfile': EXTERNAL_DOCKER_TEMPLATE
+}
diff --git a/infra/test_helpers.py b/infra/test_helpers.py
deleted file mode 100644
index be0b1b811..000000000
--- a/infra/test_helpers.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Contains convenient helpers for writing tests."""
-
-import contextlib
-import os
-import shutil
-import tempfile
-from unittest import mock
-
-
-def patch_environ(testcase_obj, env=None):
- """Patch environment."""
- if env is None:
- env = {}
-
- patcher = mock.patch.dict(os.environ, env)
- testcase_obj.addCleanup(patcher.stop)
- patcher.start()
-
-
-@contextlib.contextmanager
-def temp_dir_copy(directory):
- """Context manager that yields a temporary copy of |directory|."""
- with tempfile.TemporaryDirectory() as temp_dir:
- temp_copy_path = os.path.join(temp_dir, os.path.basename(directory))
- shutil.copytree(directory, temp_copy_path)
- yield temp_copy_path
diff --git a/infra/test_repos.py b/infra/test_repos.py
index fb12fbec5..389876864 100644
--- a/infra/test_repos.py
+++ b/infra/test_repos.py
@@ -27,7 +27,7 @@ import os
ExampleRepo = collections.namedtuple('ExampleRepo', [
'project_name', 'oss_repo_name', 'git_repo_name', 'image_location',
'git_url', 'new_commit', 'old_commit', 'intro_commit', 'fuzz_target',
- 'test_case_path'
+ 'testcase_path'
])
TEST_DIR_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)),
@@ -36,6 +36,8 @@ TEST_DIR_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)),
# WARNING: Tests are dependent upon the following repos existing and the
# specified commits existing.
# TODO(metzman): Fix this problem.
+# TODO(metzman): The testcases got deleted here because the test that used them
+# was skipped. Probably worth deleting the test.
TEST_REPOS = [
ExampleRepo(project_name='curl',
oss_repo_name='curl',
@@ -46,7 +48,7 @@ TEST_REPOS = [
new_commit='dda418266c99ceab368d723facb52069cbb9c8d5',
intro_commit='df26f5f9c36e19cd503c0e462e9f72ad37b84c82',
fuzz_target='curl_fuzzer_ftp',
- test_case_path=os.path.join(TEST_DIR_PATH, 'curl_test_data')),
+ testcase_path=os.path.join(TEST_DIR_PATH, 'curl_test_data')),
ExampleRepo(project_name='libarchive',
oss_repo_name='libarchive',
git_repo_name='libarchive',
@@ -56,8 +58,8 @@ TEST_REPOS = [
new_commit='458e49358f17ec58d65ab1c45cf299baaf3c98d1',
intro_commit='840266712006de5e737f8052db920dfea2be4260',
fuzz_target='libarchive_fuzzer',
- test_case_path=os.path.join(TEST_DIR_PATH,
- 'libarchive_test_data')),
+ testcase_path=os.path.join(TEST_DIR_PATH,
+ 'libarchive_test_data')),
ExampleRepo(project_name='gonids',
oss_repo_name='gonids',
git_repo_name='gonids',
@@ -67,7 +69,7 @@ TEST_REPOS = [
new_commit='',
intro_commit='',
fuzz_target='',
- test_case_path='')
+ testcase_path='')
]
INVALID_REPO = ExampleRepo(project_name='notaproj',
@@ -79,4 +81,4 @@ INVALID_REPO = ExampleRepo(project_name='notaproj',
new_commit='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
intro_commit='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
fuzz_target='NONEFUZZER',
- test_case_path='not/a/path')
+ testcase_path='not/a/path')
diff --git a/infra/triage-party/README.md b/infra/triage-party/README.md
new file mode 100644
index 000000000..2ab424148
--- /dev/null
+++ b/infra/triage-party/README.md
@@ -0,0 +1,13 @@
+# triage-party
+
+This folder contains the triage party config and deploy script for the oss-fuzz instance of [triage-party](https://github.com/google/triage-party).
+
+To make changes to triage party, you'll need to:
+1. Make changes to the [config](oss-fuzz.yaml)
+1. Deploy a new revision to Cloud Run via [deploy.sh](deploy.sh):
+
+```
+GITHUB_TOKEN_PATH=[path to file containing github token] DB_PASS=[CloudSQL database password] ./deploy.sh
+```
+
+Visit https://triage-party-pahypmb2lq-uc.a.run.app to join the party!
diff --git a/infra/triage-party/deploy.sh b/infra/triage-party/deploy.sh
new file mode 100755
index 000000000..cabdf43ab
--- /dev/null
+++ b/infra/triage-party/deploy.sh
@@ -0,0 +1,42 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eux
+
+
+export PROJECT=oss-fuzz
+export IMAGE=gcr.io/oss-fuzz-base/triage-party
+export SERVICE_NAME=triage-party
+export CONFIG_FILE=config/examples/oss-fuzz.yaml
+
+
+# Copy triage-party into tmp dir, and copy config into correct spot
+readonly clean_repo=$(mktemp -d)
+git clone --depth 1 https://github.com/google/triage-party.git "${clean_repo}"
+cp ./oss-fuzz.yaml "${clean_repo}"/${CONFIG_FILE}
+cd "${clean_repo}"
+
+
+docker build -t "${IMAGE}" --build-arg "CFG=./${CONFIG_FILE}" .
+docker push "${IMAGE}" || exit 2
+
+readonly token="$(cat "${GITHUB_TOKEN_PATH}")"
+gcloud beta run deploy "${SERVICE_NAME}" \
+ --project "${PROJECT}" \
+ --image "${IMAGE}" \
+ --set-env-vars="GITHUB_TOKEN=${token},PERSIST_BACKEND=cloudsql,PERSIST_PATH=tp:${DB_PASS}@tcp(oss-fuzz/us-central1/triage-party)/tp" \
+ --allow-unauthenticated \
+ --region us-central1 \
+ --memory 384Mi \
+ --platform managed
diff --git a/infra/triage-party/oss-fuzz.yaml b/infra/triage-party/oss-fuzz.yaml
new file mode 100644
index 000000000..107e53e40
--- /dev/null
+++ b/infra/triage-party/oss-fuzz.yaml
@@ -0,0 +1,172 @@
+settings:
+ name: oss-fuzz
+ repos:
+ - https://github.com/google/oss-fuzz
+
+collections:
+ - id: Fuzzing Issues
+ name: Fuzzing Issues
+ dedup: true
+ description: >
+ Status of issues across oss-fuzz repos
+ rules:
+ # People who need a response
+ - fuzz-issue-updated-support
+ # fuzzing issues
+ - fuzz-bugs
+ - fuzz-priority
+ # Issues needing reprioritization
+ - fuzz-many-reactions
+ - fuzz-many-commenters
+ - fuzz-issue-zombies
+ # People with questions
+ - fuzz-issue-has-question
+ - id: PRs - fuzzing
+ name: OSS Fuzz PRs
+ description: >
+ Status of PRs in OSS-Fuzz
+ rules:
+ - prs-fuzz
+ - fuzz-pr-approved-stale
+ - fuzz-pr-unapproved-stale
+
+
+rules:
+ ### Pull requests
+
+ prs-fuzz:
+ name: "OSS Fuzz PRs"
+ type: pull_request
+ resolution: "Review requests or mark them as do-not-merge/work-in-progress"
+ filters:
+ - title: "!.*(WIP|wip).*"
+ - tag: "!(changes-requested|draft|approved)"
+ repos:
+ - https://github.com/google/oss-fuzz
+
+ # PR's needing closure
+ fuzz-pr-approved-stale:
+ name: "Pull requests: Approved and getting old"
+ resolution: "Merge PR"
+ type: pull_request
+ filters:
+ - label: "approved"
+ - updated: +2d
+ - responded: +1d
+ repos:
+ - https://github.com/google/oss-fuzz
+
+ fuzz-pr-unapproved-stale:
+ name: "Pull Requests: Stale"
+ resolution: "Add comment and/or close PR"
+ type: pull_request
+ filters:
+ - created: +3d
+ - updated: +2d
+ - responded: +1d
+ - tag: "!draft"
+ repos:
+ - https://github.com/google/oss-fuzz
+
+ pr-approved-stale:
+ name: "Pull requests: Approved and getting old"
+ resolution: "Merge PR"
+ type: pull_request
+ filters:
+ - label: "approved"
+ - updated: +5d
+ - responded: +2d
+
+ pr-unapproved-stale:
+ name: "Pull Requests: Stale"
+ type: pull_request
+ resolution: "Add comment and/or close PR"
+ filters:
+ - created: +20d
+ - updated: +5d
+ - responded: +2d
+
+
+ ### Fuzzing Issues
+
+ fuzz-bugs:
+ name: "Fuzzing bugs that have not been commented on for 6 months"
+ resolution: "comment a status update"
+ type: issue
+ filters:
+ - label: "bug"
+ - responded: +180d
+ - tag: "!member-last"
+ repos:
+ - https://github.com/google/oss-fuzz
+
+
+ fuzz-priority:
+ name: "Fuzzing priority issues that have not been commented on for 6 months"
+ resolution: "comment a status update"
+ type: issue
+ filters:
+ - label: "priority"
+ - responded: +180d
+ - tag: "!member-last"
+ repos:
+ - https://github.com/google/oss-fuzz
+
+
+ fuzz-many-reactions:
+ name: "many reactions, low priority"
+ resolution: "Upgrade to priority"
+ filters:
+ - reactions: ">3"
+ - reactions-per-month: ">0.75"
+ - label: "!priority"
+ repos:
+ - https://github.com/google/oss-fuzz
+
+ fuzz-many-commenters:
+ name: "many commenters, low priority"
+ resolution: "Upgrade to priority"
+ filters:
+ - commenters: ">2"
+ - commenters-per-month: ">1.9"
+ - created: "+30d"
+ - label: "!priority"
+ repos:
+ - https://github.com/google/oss-fuzz
+
+ fuzz-issue-zombies:
+ name: "Screaming into the void"
+ resolution: "Reopen, or ask folks to open a new issue"
+ type: issue
+ filters:
+ - state: closed
+ - updated: -7d
+ - tag: recv
+ - comments-while-closed: ">1"
+ repos:
+ - https://github.com/google/oss-fuzz
+
+ # People with questions
+ fuzz-issue-has-question:
+ name: "Overdue answers for a question"
+ resolution: "Add a comment"
+ type: issue
+ filters:
+ - tag: recv-q
+ - tag: "!member-last"
+ - tag: "!contributor-last"
+ - responded: +6d
+ repos:
+ - https://github.com/google/oss-fuzz
+
+ fuzz-issue-updated-support:
+ name: "Open support requests"
+ resolution: "Add a comment"
+ type: issue
+ filters:
+ - tag: recv
+ - tag: "!member-last"
+ - tag: "!contributor-last"
+ - responded: +6d
+ repos:
+ - https://github.com/google/oss-fuzz
diff --git a/infra/utils.py b/infra/utils.py
index fe5dd8730..f0b58a4da 100644
--- a/infra/utils.py
+++ b/infra/utils.py
@@ -17,6 +17,7 @@ import logging
import os
import posixpath
import re
+import shlex
import stat
import subprocess
import sys
@@ -25,7 +26,8 @@ import helper
ALLOWED_FUZZ_TARGET_EXTENSIONS = ['', '.exe']
FUZZ_TARGET_SEARCH_STRING = 'LLVMFuzzerTestOneInput'
-VALID_TARGET_NAME = re.compile(r'^[a-zA-Z0-9_-]+$')
+VALID_TARGET_NAME_REGEX = re.compile(r'^[a-zA-Z0-9_-]+$')
+BLOCKLISTED_TARGET_NAME_REGEX = re.compile(r'^(jazzer_driver.*)$')
# Location of google cloud storage for latest OSS-Fuzz builds.
GCS_BASE_URL = 'https://storage.googleapis.com/'
@@ -38,16 +40,25 @@ def chdir_to_root():
os.chdir(helper.OSS_FUZZ_DIR)
-def execute(command, location=None, check_result=False):
- """ Runs a shell command in the specified directory location.
+def command_to_string(command):
+ """Returns the stringfied version of |command| a list representing a binary to
+ run and arguments to pass to it or a string representing a binary to run."""
+ if isinstance(command, str):
+ return command
+ return shlex.join(command)
+
+
+def execute(command, env=None, location=None, check_result=False):
+ """Runs a shell command in the specified directory location.
Args:
command: The command as a list to be run.
- location: The directory the command is run in.
- check_result: Should an exception be thrown on failed command.
+ env: (optional) an environment to pass to Popen to run the command in.
+ location (optional): The directory to run command in.
+ check_result (optional): Should an exception be thrown on failure.
Returns:
- stdout, stderr, error code.
+ stdout, stderr, returncode.
Raises:
RuntimeError: running a command resulted in an error.
@@ -58,24 +69,27 @@ def execute(command, location=None, check_result=False):
process = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
- cwd=location)
+ cwd=location,
+ env=env)
out, err = process.communicate()
out = out.decode('utf-8', errors='ignore')
err = err.decode('utf-8', errors='ignore')
+
+ command_str = command_to_string(command)
if err:
- logging.debug('Stderr of command \'%s\' is %s.', ' '.join(command), err)
+ logging.debug('Stderr of command "%s" is: %s.', command_str, err)
if check_result and process.returncode:
- raise RuntimeError(
- 'Executing command \'{0}\' failed with error: {1}.'.format(
- ' '.join(command), err))
+ raise RuntimeError('Executing command "{0}" failed with error: {1}.'.format(
+ command_str, err))
return out, err, process.returncode
-def get_fuzz_targets(path):
- """Get list of fuzz targets in a directory.
+def get_fuzz_targets(path, top_level_only=False):
+ """Gets fuzz targets in a directory.
Args:
path: A path to search for fuzz targets in.
+ top_level_only: If True, only search |path|, do not recurse into subdirs.
Returns:
A list of paths to fuzzers or an empty list if None.
@@ -84,6 +98,9 @@ def get_fuzz_targets(path):
return []
fuzz_target_paths = []
for root, _, fuzzers in os.walk(path):
+ if top_level_only and path != root:
+ continue
+
for fuzzer in fuzzers:
file_path = os.path.join(root, fuzzer)
if is_fuzz_target_local(file_path):
@@ -112,11 +129,17 @@ def is_fuzz_target_local(file_path):
Copied from clusterfuzz src/python/bot/fuzzers/utils.py
with slight modifications.
"""
+ # pylint: disable=too-many-return-statements
filename, file_extension = os.path.splitext(os.path.basename(file_path))
- if not VALID_TARGET_NAME.match(filename):
+ if not VALID_TARGET_NAME_REGEX.match(filename):
# Check fuzz target has a valid name (without any special chars).
return False
+ if BLOCKLISTED_TARGET_NAME_REGEX.match(filename):
+ # Check fuzz target an explicitly disallowed name (e.g. binaries used for
+ # jazzer-based targets).
+ return False
+
if file_extension not in ALLOWED_FUZZ_TARGET_EXTENSIONS:
# Ignore files with disallowed extensions (to prevent opening e.g. .zips).
return False
@@ -135,7 +158,7 @@ def is_fuzz_target_local(file_path):
def binary_print(string):
- """Print that can print a binary string."""
+ """Prints string. Can print a binary string."""
if isinstance(string, bytes):
string += b'\n'
else:
diff --git a/infra/utils_test.py b/infra/utils_test.py
index aa6ec7ba7..9b7fbc903 100644
--- a/infra/utils_test.py
+++ b/infra/utils_test.py
@@ -24,7 +24,7 @@ import helper
EXAMPLE_PROJECT = 'example'
TEST_OUT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'cifuzz', 'test_data', 'out')
+ 'cifuzz', 'test_data', 'build-out')
class IsFuzzTargetLocalTest(unittest.TestCase):
@@ -118,17 +118,33 @@ class BinaryPrintTest(unittest.TestCase):
def test_string(self): # pylint: disable=no-self-use
"""Tests that utils.binary_print can print a regular string."""
# Should execute without raising any exceptions.
- with mock.patch('sys.stdout.buffer.write') as mocked_write:
+ with mock.patch('sys.stdout.buffer.write') as mock_write:
utils.binary_print('hello')
- mocked_write.assert_called_with('hello\n')
+ mock_write.assert_called_with('hello\n')
@unittest.skip('Causes spurious failures because of side-effects.')
def test_binary_string(self): # pylint: disable=no-self-use
"""Tests that utils.binary_print can print a bianry string."""
# Should execute without raising any exceptions.
- with mock.patch('sys.stdout.buffer.write') as mocked_write:
+ with mock.patch('sys.stdout.buffer.write') as mock_write:
utils.binary_print(b'hello')
- mocked_write.assert_called_with(b'hello\n')
+ mock_write.assert_called_with(b'hello\n')
+
+
+class CommandToStringTest(unittest.TestCase):
+ """Tests for command_to_string."""
+
+ def test_string(self):
+ """Tests that command_to_string returns the argument passed to it when it is
+ passed a string."""
+ command = 'command'
+ self.assertEqual(utils.command_to_string(command), command)
+
+ def test_list(self):
+ """Tests that command_to_string returns the correct stringwhen it is passed
+ a list."""
+ command = ['command', 'arg1', 'arg2']
+ self.assertEqual(utils.command_to_string(command), 'command arg1 arg2')
if __name__ == '__main__':