aboutsummaryrefslogtreecommitdiff
path: root/infra/build/functions
diff options
context:
space:
mode:
Diffstat (limited to 'infra/build/functions')
-rw-r--r--infra/build/functions/base_images.py62
-rwxr-xr-x[-rw-r--r--]infra/build/functions/build_and_run_coverage.py194
-rw-r--r--infra/build/functions/build_and_run_coverage_test.py78
-rw-r--r--infra/build/functions/build_lib.py102
-rwxr-xr-x[-rw-r--r--]infra/build/functions/build_project.py725
-rw-r--r--infra/build/functions/build_project_test.py77
-rwxr-xr-xinfra/build/functions/deploy.sh12
-rw-r--r--infra/build/functions/expected_build_steps.json330
-rw-r--r--infra/build/functions/main.py5
-rw-r--r--infra/build/functions/project_sync.py12
-rw-r--r--infra/build/functions/project_sync_test.py5
-rw-r--r--infra/build/functions/request_build.py63
-rw-r--r--infra/build/functions/request_build_test.py61
-rw-r--r--infra/build/functions/request_coverage_build.py16
-rw-r--r--infra/build/functions/request_coverage_build_test.py90
-rw-r--r--infra/build/functions/test_data/expected_build_steps.json628
-rw-r--r--infra/build/functions/test_data/expected_coverage_build_steps.json (renamed from infra/build/functions/expected_coverage_build_steps.json)34
-rw-r--r--infra/build/functions/test_utils.py48
-rw-r--r--infra/build/functions/update_build_status.py24
-rw-r--r--infra/build/functions/update_build_status_test.py51
20 files changed, 1551 insertions, 1066 deletions
diff --git a/infra/build/functions/base_images.py b/infra/build/functions/base_images.py
index 8c9b2d85f..593323fc3 100644
--- a/infra/build/functions/base_images.py
+++ b/infra/build/functions/base_images.py
@@ -15,7 +15,6 @@
################################################################################
"""Cloud function to build base images on Google Cloud Builder."""
-import datetime
import logging
import google.auth
@@ -25,14 +24,17 @@ BASE_IMAGES = [
'base-image',
'base-clang',
'base-builder',
+ 'base-builder-go',
+ 'base-builder-jvm',
+ 'base-builder-python',
+ 'base-builder-rust',
+ 'base-builder-swift',
'base-runner',
'base-runner-debug',
]
BASE_PROJECT = 'oss-fuzz-base'
TAG_PREFIX = f'gcr.io/{BASE_PROJECT}/'
-
-BASE_SANITIZER_LIBS_IMAGE = TAG_PREFIX + 'base-sanitizer-libs-builder'
-MSAN_LIBS_IMAGE = TAG_PREFIX + 'msan-libs-builder'
+MAJOR_VERSION = 'v1'
def _get_base_image_steps(images, tag_prefix=TAG_PREFIX):
@@ -46,11 +48,14 @@ def _get_base_image_steps(images, tag_prefix=TAG_PREFIX):
}]
for base_image in images:
+ image = tag_prefix + base_image
steps.append({
'args': [
'build',
'-t',
- tag_prefix + base_image,
+ image,
+ '-t',
+ f'{image}:{MAJOR_VERSION}',
'.',
],
'dir': 'oss-fuzz/infra/base-images/' + base_image,
@@ -62,9 +67,8 @@ def _get_base_image_steps(images, tag_prefix=TAG_PREFIX):
def get_logs_url(build_id, project_id='oss-fuzz-base'):
"""Returns url that displays the build logs."""
- url_format = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project={1}')
- return url_format.format(build_id, project_id)
+ return ('https://console.developers.google.com/logs/viewer?'
+ f'resource=build%2Fbuild_id%2F{build_id}&project={project_id}')
# pylint: disable=no-member
@@ -77,7 +81,7 @@ def run_build(steps, images):
'options': {
'machineType': 'N1_HIGHCPU_32'
},
- 'images': images
+ 'images': images + [f'{image}:{MAJOR_VERSION}' for image in images]
}
cloudbuild = build('cloudbuild',
'v1',
@@ -99,43 +103,3 @@ def base_builder(event, context):
images = [tag_prefix + base_image for base_image in BASE_IMAGES]
run_build(steps, images)
-
-
-def _get_msan_steps(image):
- """Get build steps for msan-libs-builder."""
- timestamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M')
- upload_name = 'msan-libs-' + timestamp + '.zip'
-
- steps = _get_base_image_steps([
- 'base-sanitizer-libs-builder',
- 'msan-libs-builder',
- ])
- steps.extend([{
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'cd /msan && zip -r /workspace/libs.zip .',
- ],
- }, {
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- 'cp',
- '/workspace/libs.zip',
- 'gs://oss-fuzz-msan-libs/' + upload_name,
- ],
- }])
- return steps
-
-
-def base_msan_builder(event, context):
- """Cloud function to build base images."""
- del event, context
- steps = _get_msan_steps(MSAN_LIBS_IMAGE)
- images = [
- BASE_SANITIZER_LIBS_IMAGE,
- MSAN_LIBS_IMAGE,
- ]
-
- run_build(steps, images)
diff --git a/infra/build/functions/build_and_run_coverage.py b/infra/build/functions/build_and_run_coverage.py
index cc2de5a32..1195776d9 100644..100755
--- a/infra/build/functions/build_and_run_coverage.py
+++ b/infra/build/functions/build_and_run_coverage.py
@@ -13,11 +13,11 @@
# limitations under the License.
#
################################################################################
-#!/usr/bin/python2
+#!/usr/bin/env python3
"""Starts and runs coverage build on Google Cloud Builder.
-Usage: build_and_run_coverage.py <project_dir>
+
+Usage: build_and_run_coverage.py <project>.
"""
-import datetime
import json
import logging
import os
@@ -27,119 +27,105 @@ import build_lib
import build_project
SANITIZER = 'coverage'
-CONFIGURATION = ['FUZZING_ENGINE=libfuzzer', 'SANITIZER=%s' % SANITIZER]
+FUZZING_ENGINE = 'libfuzzer'
+ARCHITECTURE = 'x86_64'
+
PLATFORM = 'linux'
-COVERAGE_BUILD_TAG = 'coverage'
+COVERAGE_BUILD_TYPE = 'coverage'
# Where code coverage reports need to be uploaded to.
COVERAGE_BUCKET_NAME = 'oss-fuzz-coverage'
-# Link to the code coverage report in HTML format.
-HTML_REPORT_URL_FORMAT = (build_lib.GCS_URL_BASENAME + COVERAGE_BUCKET_NAME +
- '/{project}/reports/{date}/{platform}/index.html')
-
# This is needed for ClusterFuzz to pick up the most recent reports data.
-LATEST_REPORT_INFO_URL = ('/' + COVERAGE_BUCKET_NAME +
- '/latest_report_info/{project}.json')
-LATEST_REPORT_INFO_CONTENT_TYPE = 'application/json'
-# Link where to upload code coverage report files to.
-UPLOAD_URL_FORMAT = 'gs://' + COVERAGE_BUCKET_NAME + '/{project}/{type}/{date}'
+LATEST_REPORT_INFO_CONTENT_TYPE = 'application/json'
# Languages from project.yaml that have code coverage support.
-LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++', 'go', 'rust']
+LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'c++', 'go', 'jvm', 'rust', 'swift']
+
+
+class Bucket: # pylint: disable=too-few-public-methods
+ """Class representing the coverage GCS bucket."""
+ def __init__(self, project, date, platform, testing):
+ self.coverage_bucket_name = 'oss-fuzz-coverage'
+ if testing:
+ self.coverage_bucket_name += '-testing'
-def usage():
- """Exit with code 1 and display syntax to use this file."""
- sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n")
- sys.exit(1)
+ self.date = date
+ self.project = project
+ self.html_report_url = (
+ f'{build_lib.GCS_URL_BASENAME}{self.coverage_bucket_name}/{project}'
+ f'/reports/{date}/{platform}/index.html')
+ self.latest_report_info_url = (f'/{COVERAGE_BUCKET_NAME}'
+ f'/latest_report_info/{project}.json')
+ def get_upload_url(self, upload_type):
+ """Returns an upload url for |upload_type|."""
+ return (f'gs://{self.coverage_bucket_name}/{self.project}'
+ f'/{upload_type}/{self.date}')
-# pylint: disable=too-many-locals
-def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project):
+
+def get_build_steps( # pylint: disable=too-many-locals, too-many-arguments
+ project_name, project_yaml_contents, dockerfile_lines, image_project,
+ base_images_project, config):
"""Returns build steps for project."""
- project_yaml = build_project.load_project_yaml(project_name,
- project_yaml_file,
- image_project)
- if project_yaml['disabled']:
- logging.info('Project "%s" is disabled.', project_name)
+ project = build_project.Project(project_name, project_yaml_contents,
+ dockerfile_lines, image_project)
+ if project.disabled:
+ logging.info('Project "%s" is disabled.', project.name)
return []
- if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
+ if project.fuzzing_language not in LANGUAGES_WITH_COVERAGE_SUPPORT:
logging.info(
'Project "%s" is written in "%s", coverage is not supported yet.',
- project_name, project_yaml['language'])
+ project.name, project.fuzzing_language)
return []
- name = project_yaml['name']
- image = project_yaml['image']
- language = project_yaml['language']
- report_date = datetime.datetime.now().strftime('%Y%m%d')
-
- build_steps = build_lib.project_image_steps(name, image, language)
+ report_date = build_project.get_datetime_now().strftime('%Y%m%d')
+ bucket = Bucket(project.name, report_date, PLATFORM, config.testing)
- env = CONFIGURATION[:]
- out = '/workspace/out/' + SANITIZER
- env.append('OUT=' + out)
- env.append('FUZZING_LANGUAGE=' + language)
+ build_steps = build_lib.project_image_steps(
+ project.name,
+ project.image,
+ project.fuzzing_language,
+ branch=config.branch,
+ test_image_suffix=config.test_image_suffix)
- workdir = build_project.workdir_from_dockerfile(dockerfile_lines)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer coverage '
- '{name}\n' + '*' * 80).format(name=name)
-
- # Compilation step.
- build_steps.append({
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to make sure there are non instrumented binaries.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from the
- # Dockerfile). Container Builder overrides our workdir so we need
- # to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
- 'compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
-
- download_corpora_steps = build_lib.download_corpora_steps(project_name)
+ build = build_project.Build('libfuzzer', 'coverage', 'x86_64')
+ env = build_project.get_env(project.fuzzing_language, build)
+ build_steps.append(
+ build_project.get_compile_step(project, build, env, config.parallel))
+ download_corpora_steps = build_lib.download_corpora_steps(
+ project.name, testing=config.testing)
if not download_corpora_steps:
- logging.info('Skipping code coverage build for %s.', project_name)
+ logging.info('Skipping code coverage build for %s.', project.name)
return []
build_steps.extend(download_corpora_steps)
failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
+ f'python infra/helper.py build_image {project.name}\n'
'python infra/helper.py build_fuzzers --sanitizer coverage '
- '{name}\n'
- 'python infra/helper.py coverage {name}\n' +
- '*' * 80).format(name=name)
+ f'{project.name}\n'
+ f'python infra/helper.py coverage {project.name}\n' + '*' * 80)
# Unpack the corpus and run coverage script.
coverage_env = env + [
'HTTP_PORT=',
- 'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip(),
+ f'COVERAGE_EXTRA_ARGS={project.coverage_extra_args.strip()}',
]
- if 'dataflow' in project_yaml['fuzzing_engines']:
+ if 'dataflow' in project.fuzzing_engines:
coverage_env.append('FULL_SUMMARY_PER_TARGET=1')
build_steps.append({
- 'name': 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env': coverage_env,
+ 'name':
+ build_project.get_runner_image_name(base_images_project,
+ config.test_image_suffix),
+ 'env':
+ coverage_env,
'args': [
'bash', '-c',
('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
@@ -158,9 +144,7 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
})
# Upload the report.
- upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='reports',
- date=report_date)
+ upload_report_url = bucket.get_upload_url('reports')
# Delete the existing report as gsutil cannot overwrite it in a useful way due
# to the lack of `-T` option (it creates a subdir in the destination dir).
@@ -172,15 +156,14 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
- os.path.join(out, 'report'),
+ os.path.join(build.out, 'report'),
upload_report_url,
],
})
# Upload the fuzzer stats. Delete the old ones just in case.
- upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='fuzzer_stats',
- date=report_date)
+ upload_fuzzer_stats_url = bucket.get_upload_url('fuzzer_stats')
+
build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url))
build_steps.append({
'name':
@@ -189,15 +172,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
- os.path.join(out, 'fuzzer_stats'),
+ os.path.join(build.out, 'fuzzer_stats'),
upload_fuzzer_stats_url,
],
})
# Upload the fuzzer logs. Delete the old ones just in case
- upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='logs',
- date=report_date)
+ upload_fuzzer_logs_url = bucket.get_upload_url('logs')
build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
build_steps.append({
'name':
@@ -206,15 +187,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
'-m',
'cp',
'-r',
- os.path.join(out, 'logs'),
+ os.path.join(build.out, 'logs'),
upload_fuzzer_logs_url,
],
})
# Upload srcmap.
- srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='srcmap',
- date=report_date)
+ srcmap_upload_url = bucket.get_upload_url('srcmap')
srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
build_steps.append({
'name': 'gcr.io/cloud-builders/gsutil',
@@ -227,15 +206,13 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
# Update the latest report information file for ClusterFuzz.
latest_report_info_url = build_lib.get_signed_url(
- LATEST_REPORT_INFO_URL.format(project=project_name),
+ bucket.latest_report_info_url,
content_type=LATEST_REPORT_INFO_CONTENT_TYPE)
latest_report_info_body = json.dumps({
'fuzzer_stats_dir':
upload_fuzzer_stats_url,
'html_report_url':
- HTML_REPORT_URL_FORMAT.format(project=project_name,
- date=report_date,
- platform=PLATFORM),
+ bucket.html_report_url,
'report_date':
report_date,
'report_summary_path':
@@ -251,25 +228,10 @@ def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
def main():
"""Build and run coverage for projects."""
- if len(sys.argv) != 2:
- usage()
-
- image_project = 'oss-fuzz'
- base_images_project = 'oss-fuzz-base'
- project_dir = sys.argv[1].rstrip(os.path.sep)
- project_name = os.path.basename(project_dir)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
-
- with open(dockerfile_path) as docker_file:
- dockerfile_lines = docker_file.readlines()
-
- with open(project_yaml_path) as project_yaml_file:
- steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project)
-
- build_project.run_build(steps, project_name, COVERAGE_BUILD_TAG)
+ return build_project.build_script_main(
+ 'Generates coverage report for project.', get_build_steps,
+ COVERAGE_BUILD_TYPE)
-if __name__ == "__main__":
- main()
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/infra/build/functions/build_and_run_coverage_test.py b/infra/build/functions/build_and_run_coverage_test.py
new file mode 100644
index 000000000..83ea39ecd
--- /dev/null
+++ b/infra/build/functions/build_and_run_coverage_test.py
@@ -0,0 +1,78 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Unit tests for build_and_run_coverage."""
+import json
+import os
+import sys
+import unittest
+from unittest import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+FUNCTIONS_DIR = os.path.dirname(__file__)
+sys.path.append(FUNCTIONS_DIR)
+# pylint: disable=wrong-import-position
+
+import build_and_run_coverage
+import build_project
+import test_utils
+
+# pylint: disable=no-member
+
+
+class TestRequestCoverageBuilds(fake_filesystem_unittest.TestCase):
+ """Unit tests for sync."""
+
+ def setUp(self):
+ self.maxDiff = None # pylint: disable=invalid-name
+ self.setUpPyfakefs()
+
+ @mock.patch('build_lib.get_signed_url', return_value='test_url')
+ @mock.patch('build_lib.download_corpora_steps',
+ return_value=[{
+ 'url': 'test_download'
+ }])
+ @mock.patch('build_project.get_datetime_now',
+ return_value=test_utils.FAKE_DATETIME)
+ def test_get_coverage_build_steps(self, mock_url, mock_corpora_steps,
+ mock_get_datetime_now):
+ """Test for get_build_steps."""
+ del mock_url, mock_corpora_steps, mock_get_datetime_now
+ project_yaml_contents = ('language: c++\n'
+ 'sanitizers:\n'
+ ' - address\n'
+ 'architectures:\n'
+ ' - x86_64\n')
+ self.fs.create_dir(test_utils.PROJECT_DIR)
+ test_utils.create_project_data(test_utils.PROJECT, project_yaml_contents)
+
+ expected_build_steps_file_path = test_utils.get_test_data_file_path(
+ 'expected_coverage_build_steps.json')
+ self.fs.add_real_file(expected_build_steps_file_path)
+ with open(expected_build_steps_file_path) as expected_build_steps_file:
+ expected_coverage_build_steps = json.load(expected_build_steps_file)
+
+ config = build_project.Config(False, False, None, False)
+ project_yaml, dockerfile = build_project.get_project_data(
+ test_utils.PROJECT)
+ build_steps = build_and_run_coverage.get_build_steps(
+ test_utils.PROJECT, project_yaml, dockerfile, test_utils.IMAGE_PROJECT,
+ test_utils.BASE_IMAGES_PROJECT, config)
+ self.assertEqual(build_steps, expected_coverage_build_steps)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/infra/build/functions/build_lib.py b/infra/build/functions/build_lib.py
index 007579ef9..292ef682f 100644
--- a/infra/build/functions/build_lib.py
+++ b/infra/build/functions/build_lib.py
@@ -83,11 +83,23 @@ def get_targets_list_url(bucket, project, sanitizer):
return url
-def _get_targets_list(project_name):
+def get_upload_bucket(engine, architecture, testing):
+ """Returns the upload bucket for |engine| and architecture. Returns the
+ testing bucket if |testing|."""
+ bucket = ENGINE_INFO[engine].upload_bucket
+ if architecture != 'x86_64':
+ bucket += '-' + architecture
+ if testing:
+ bucket += '-testing'
+ return bucket
+
+
+def _get_targets_list(project_name, testing):
"""Returns target list."""
- # libFuzzer ASan is the default configuration, get list of targets from it.
- url = get_targets_list_url(ENGINE_INFO['libfuzzer'].upload_bucket,
- project_name, 'address')
+ # libFuzzer ASan 'x86_84' is the default configuration, get list of targets
+ # from it.
+ bucket = get_upload_bucket('libfuzzer', 'x86_64', testing)
+ url = get_targets_list_url(bucket, project_name, 'address')
url = urlparse.urljoin(GCS_URL_BASENAME, url)
response = requests.get(url)
@@ -104,7 +116,7 @@ def _get_targets_list(project_name):
def get_signed_url(path, method='PUT', content_type=''):
"""Returns signed url."""
timestamp = int(time.time() + BUILD_TIMEOUT)
- blob = '{0}\n\n{1}\n{2}\n{3}'.format(method, content_type, timestamp, path)
+ blob = f'{method}\n\n{content_type}\n{timestamp}\n{path}'
service_account_path = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS')
if service_account_path:
@@ -119,7 +131,7 @@ def get_signed_url(path, method='PUT', content_type=''):
credentials=credentials,
cache_discovery=False)
client_id = project + '@appspot.gserviceaccount.com'
- service_account = 'projects/-/serviceAccounts/{0}'.format(client_id)
+ service_account = f'projects/-/serviceAccounts/{client_id}'
response = iam.projects().serviceAccounts().signBlob(
name=service_account,
body={
@@ -133,14 +145,13 @@ def get_signed_url(path, method='PUT', content_type=''):
'Expires': timestamp,
'Signature': signature,
}
- return ('https://storage.googleapis.com{0}?'.format(path) +
- urlparse.urlencode(values))
+ return f'https://storage.googleapis.com{path}?{urlparse.urlencode(values)}'
-def download_corpora_steps(project_name):
+def download_corpora_steps(project_name, testing):
"""Returns GCB steps for downloading corpora backups for the given project.
"""
- fuzz_targets = _get_targets_list(project_name)
+ fuzz_targets = _get_targets_list(project_name, testing)
if not fuzz_targets:
sys.stderr.write('No fuzz targets found for project "%s".\n' % project_name)
return None
@@ -206,15 +217,72 @@ def gsutil_rm_rf_step(url):
return step
-def project_image_steps(name, image, language):
+def get_pull_test_images_steps(test_image_suffix):
+ """Returns steps to pull testing versions of base-images and tag them so that
+ they are used in builds."""
+ images = [
+ 'gcr.io/oss-fuzz-base/base-builder',
+ 'gcr.io/oss-fuzz-base/base-builder-swift',
+ 'gcr.io/oss-fuzz-base/base-builder-jvm',
+ 'gcr.io/oss-fuzz-base/base-builder-go',
+ 'gcr.io/oss-fuzz-base/base-builder-python',
+ 'gcr.io/oss-fuzz-base/base-builder-rust',
+ ]
+ steps = []
+ for image in images:
+ test_image = image + '-' + test_image_suffix
+ steps.append({
+ 'name': 'gcr.io/cloud-builders/docker',
+ 'args': [
+ 'pull',
+ test_image,
+ ],
+ 'waitFor': '-' # Start this immediately, don't wait for previous step.
+ })
+
+ # This step is hacky but gives us great flexibility. OSS-Fuzz has hardcoded
+ # references to gcr.io/oss-fuzz-base/base-builder (in dockerfiles, for
+ # example) and gcr.io/oss-fuzz-base-runner (in this build code). But the
+ # testing versions of those images are called e.g.
+ # gcr.io/oss-fuzz-base/base-builder-testing and
+ # gcr.io/oss-fuzz-base/base-runner-testing. How can we get the build to use
+ # the testing images instead of the real ones? By doing this step: tagging
+ # the test image with the non-test version, so that the test version is used
+ # instead of pulling the real one.
+ steps.append({
+ 'name': 'gcr.io/cloud-builders/docker',
+ 'args': ['tag', test_image, image],
+ })
+ return steps
+
+
+def get_srcmap_step_id():
+ """Returns the id for the srcmap step."""
+ return 'srcmap'
+
+
+def project_image_steps(name,
+ image,
+ language,
+ branch=None,
+ test_image_suffix=None):
"""Returns GCB steps to build OSS-Fuzz project image."""
- steps = [{
+ clone_step = {
'args': [
- 'clone',
- 'https://github.com/google/oss-fuzz.git',
+ 'clone', 'https://github.com/google/oss-fuzz.git', '--depth', '1'
],
'name': 'gcr.io/cloud-builders/git',
- }, {
+ }
+ if branch:
+ # Do this to support testing other branches.
+ clone_step['args'].extend(['--branch', branch])
+
+ steps = [clone_step]
+ if test_image_suffix:
+ steps.extend(get_pull_test_images_steps(test_image_suffix))
+
+ srcmap_step_id = get_srcmap_step_id()
+ steps += [{
'name': 'gcr.io/cloud-builders/docker',
'args': [
'build',
@@ -224,8 +292,7 @@ def project_image_steps(name, image, language):
],
'dir': 'oss-fuzz/projects/' + name,
}, {
- 'name':
- image,
+ 'name': image,
'args': [
'bash', '-c',
'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
@@ -234,6 +301,7 @@ def project_image_steps(name, image, language):
'OSSFUZZ_REVISION=$REVISION_ID',
'FUZZING_LANGUAGE=%s' % language,
],
+ 'id': srcmap_step_id
}]
return steps
diff --git a/infra/build/functions/build_project.py b/infra/build/functions/build_project.py
index 9115c85fd..bdc7985e1 100644..100755
--- a/infra/build/functions/build_project.py
+++ b/infra/build/functions/build_project.py
@@ -13,7 +13,7 @@
# limitations under the License.
#
################################################################################
-#!/usr/bin/python2
+#!/usr/bin/env python3
"""Starts project build on Google Cloud Builder.
Usage: build_project.py <project_dir>
@@ -21,37 +21,27 @@ Usage: build_project.py <project_dir>
from __future__ import print_function
+import argparse
+import collections
import datetime
import json
import logging
import os
+import posixpath
import re
import sys
+from googleapiclient.discovery import build as cloud_build
+import oauth2client.client
import six
import yaml
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
import build_lib
-FUZZING_BUILD_TAG = 'fuzzing'
+FUZZING_BUILD_TYPE = 'fuzzing'
GCB_LOGS_BUCKET = 'oss-fuzz-gcb-logs'
-CONFIGURATIONS = {
- 'sanitizer-address': ['SANITIZER=address'],
- 'sanitizer-dataflow': ['SANITIZER=dataflow'],
- 'sanitizer-memory': ['SANITIZER=memory'],
- 'sanitizer-undefined': ['SANITIZER=undefined'],
- 'engine-libfuzzer': ['FUZZING_ENGINE=libfuzzer'],
- 'engine-afl': ['FUZZING_ENGINE=afl'],
- 'engine-honggfuzz': ['FUZZING_ENGINE=honggfuzz'],
- 'engine-dataflow': ['FUZZING_ENGINE=dataflow'],
- 'engine-none': ['FUZZING_ENGINE=none'],
-}
-
DEFAULT_ARCHITECTURES = ['x86_64']
DEFAULT_ENGINES = ['libfuzzer', 'afl', 'honggfuzz']
DEFAULT_SANITIZERS = ['address', 'undefined']
@@ -61,19 +51,100 @@ LATEST_VERSION_CONTENT_TYPE = 'text/plain'
QUEUE_TTL_SECONDS = 60 * 60 * 24 # 24 hours.
+PROJECTS_DIR = os.path.abspath(
+ os.path.join(__file__, os.path.pardir, os.path.pardir, os.path.pardir,
+ os.path.pardir, 'projects'))
+
+DEFAULT_GCB_OPTIONS = {'machineType': 'N1_HIGHCPU_32'}
+
+Config = collections.namedtuple(
+ 'Config', ['testing', 'test_image_suffix', 'branch', 'parallel'])
+
+WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+
-def usage():
- """Exit with code 1 and display syntax to use this file."""
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
- sys.exit(1)
+class Build: # pylint: disable=too-few-public-methods
+ """Class representing the configuration for a build."""
+ def __init__(self, fuzzing_engine, sanitizer, architecture):
+ self.fuzzing_engine = fuzzing_engine
+ self.sanitizer = sanitizer
+ self.architecture = architecture
+ self.targets_list_filename = build_lib.get_targets_list_filename(
+ self.sanitizer)
-def set_yaml_defaults(project_name, project_yaml, image_project):
- """Set project.yaml's default parameters."""
+ @property
+ def out(self):
+ """Returns the out directory for the build."""
+ return posixpath.join(
+ '/workspace/out/',
+ f'{self.fuzzing_engine}-{self.sanitizer}-{self.architecture}')
+
+
+def get_project_data(project_name):
+ """Returns a tuple containing the contents of the project.yaml and Dockerfile
+ of |project_name|. Raises a FileNotFoundError if there is no Dockerfile for
+ |project_name|."""
+ project_dir = os.path.join(PROJECTS_DIR, project_name)
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ try:
+ with open(dockerfile_path) as dockerfile:
+ dockerfile = dockerfile.read()
+ except FileNotFoundError:
+ logging.error('Project "%s" does not have a dockerfile.', project_name)
+ raise
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
+ with open(project_yaml_path, 'r') as project_yaml_file_handle:
+ project_yaml_contents = project_yaml_file_handle.read()
+ return project_yaml_contents, dockerfile
+
+
+class Project: # pylint: disable=too-many-instance-attributes
+ """Class representing an OSS-Fuzz project."""
+
+ def __init__(self, name, project_yaml_contents, dockerfile, image_project):
+ project_yaml = yaml.safe_load(project_yaml_contents)
+ self.name = name
+ self.image_project = image_project
+ self.workdir = workdir_from_dockerfile(dockerfile)
+ set_yaml_defaults(project_yaml)
+ self._sanitizers = project_yaml['sanitizers']
+ self.disabled = project_yaml['disabled']
+ self.architectures = project_yaml['architectures']
+ self.fuzzing_engines = project_yaml['fuzzing_engines']
+ self.coverage_extra_args = project_yaml['coverage_extra_args']
+ self.labels = project_yaml['labels']
+ self.fuzzing_language = project_yaml['language']
+ self.run_tests = project_yaml['run_tests']
+
+ @property
+ def sanitizers(self):
+ """Returns processed sanitizers."""
+ assert isinstance(self._sanitizers, list)
+ processed_sanitizers = []
+ for sanitizer in self._sanitizers:
+ if isinstance(sanitizer, six.string_types):
+ processed_sanitizers.append(sanitizer)
+ elif isinstance(sanitizer, dict):
+ for key in sanitizer.keys():
+ processed_sanitizers.append(key)
+
+ return processed_sanitizers
+
+ @property
+ def image(self):
+ """Returns the docker image for the project."""
+ return f'gcr.io/{self.image_project}/{self.name}'
+
+
+def get_last_step_id(steps):
+ """Returns the id of the last step in |steps|."""
+ return steps[-1]['id']
+
+
+def set_yaml_defaults(project_yaml):
+ """Sets project.yaml's default parameters."""
project_yaml.setdefault('disabled', False)
- project_yaml.setdefault('name', project_name)
- project_yaml.setdefault('image',
- 'gcr.io/{0}/{1}'.format(image_project, project_name))
project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
@@ -82,291 +153,310 @@ def set_yaml_defaults(project_name, project_yaml, image_project):
project_yaml.setdefault('labels', {})
-def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
+def is_supported_configuration(build):
"""Check if the given configuration is supported."""
- fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
- if architecture == 'i386' and sanitizer != 'address':
+ fuzzing_engine_info = build_lib.ENGINE_INFO[build.fuzzing_engine]
+ if build.architecture == 'i386' and build.sanitizer != 'address':
return False
- return (sanitizer in fuzzing_engine_info.supported_sanitizers and
- architecture in fuzzing_engine_info.supported_architectures)
-
+ return (build.sanitizer in fuzzing_engine_info.supported_sanitizers and
+ build.architecture in fuzzing_engine_info.supported_architectures)
-def get_sanitizers(project_yaml):
- """Retrieve sanitizers from project.yaml."""
- sanitizers = project_yaml['sanitizers']
- assert isinstance(sanitizers, list)
- processed_sanitizers = []
- for sanitizer in sanitizers:
- if isinstance(sanitizer, six.string_types):
- processed_sanitizers.append(sanitizer)
- elif isinstance(sanitizer, dict):
- for key in sanitizer.keys():
- processed_sanitizers.append(key)
-
- return processed_sanitizers
-
-
-def workdir_from_dockerfile(dockerfile_lines):
- """Parse WORKDIR from the Dockerfile."""
- workdir_regex = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+def workdir_from_dockerfile(dockerfile):
+ """Parses WORKDIR from the Dockerfile."""
+ dockerfile_lines = dockerfile.split('\n')
for line in dockerfile_lines:
- match = re.match(workdir_regex, line)
+ match = re.match(WORKDIR_REGEX, line)
if match:
# We need to escape '$' since they're used for subsitutions in Container
# Builer builds.
return match.group(1).replace('$', '$$')
- return None
+ return '/src'
-def load_project_yaml(project_name, project_yaml_file, image_project):
- """Loads project yaml and sets default values."""
- project_yaml = yaml.safe_load(project_yaml_file)
- set_yaml_defaults(project_name, project_yaml, image_project)
- return project_yaml
+def get_datetime_now():
+ """Returns datetime.datetime.now(). Used for mocking."""
+ return datetime.datetime.now()
-# pylint: disable=too-many-locals, too-many-statements, too-many-branches
-def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project):
- """Returns build steps for project."""
- project_yaml = load_project_yaml(project_name, project_yaml_file,
- image_project)
-
- if project_yaml['disabled']:
- logging.info('Project "%s" is disabled.', project_name)
- return []
-
- name = project_yaml['name']
- image = project_yaml['image']
- language = project_yaml['language']
- run_tests = project_yaml['run_tests']
- time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
-
- build_steps = build_lib.project_image_steps(name, image, language)
- # Copy over MSan instrumented libraries.
- build_steps.append({
- 'name': 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
+def get_env(fuzzing_language, build):
+ """Returns an environment for building. The environment is returned as a list
+ and is suitable for use as the "env" parameter in a GCB build step. The
+ environment variables are based on the values of |fuzzing_language| and
+ |build."""
+ env_dict = {
+ 'FUZZING_LANGUAGE': fuzzing_language,
+ 'FUZZING_ENGINE': build.fuzzing_engine,
+ 'SANITIZER': build.sanitizer,
+ 'ARCHITECTURE': build.architecture,
+ # Set HOME so that it doesn't point to a persisted volume (see
+ # https://github.com/google/oss-fuzz/issues/6035).
+ 'HOME': '/root',
+ 'OUT': build.out,
+ }
+ return list(sorted([f'{key}={value}' for key, value in env_dict.items()]))
+
+
+def get_compile_step(project, build, env, parallel):
+ """Returns the GCB step for compiling |projects| fuzzers using |env|. The type
+ of build is specified by |build|."""
+ failure_msg = (
+ '*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
+ f'python infra/helper.py build_image {project.name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} --architecture '
+ f'{build.architecture} {project.name}\n' + '*' * 80)
+ compile_step = {
+ 'name': project.image,
+ 'env': env,
'args': [
'bash',
'-c',
- 'cp -r /msan /workspace',
+ # Remove /out to make sure there are non instrumented binaries.
+ # `cd /src && cd {workdir}` (where {workdir} is parsed from the
+ # Dockerfile). Container Builder overrides our workdir so we need
+ # to add this step to set it back.
+ (f'rm -r /out && cd /src && cd {project.workdir} && '
+ f'mkdir -p {build.out} && compile || '
+ f'(echo "{failure_msg}" && false)'),
],
- })
+ 'id': get_id('compile', build),
+ }
+ if parallel:
+ maybe_add_parallel(compile_step, build_lib.get_srcmap_step_id(), parallel)
+ return compile_step
- for fuzzing_engine in project_yaml['fuzzing_engines']:
- for sanitizer in get_sanitizers(project_yaml):
- for architecture in project_yaml['architectures']:
- if not is_supported_configuration(fuzzing_engine, sanitizer,
- architecture):
- continue
- env = CONFIGURATIONS['engine-' + fuzzing_engine][:]
- env.extend(CONFIGURATIONS['sanitizer-' + sanitizer])
- out = '/workspace/out/' + sanitizer
- stamped_name = '-'.join([name, sanitizer, time_stamp])
- latest_version_file = '-'.join(
- [name, sanitizer, LATEST_VERSION_FILENAME])
- zip_file = stamped_name + '.zip'
- stamped_srcmap_file = stamped_name + '.srcmap.json'
- bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
- if architecture != 'x86_64':
- bucket += '-' + architecture
-
- upload_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
- srcmap_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name,
- stamped_srcmap_file))
- latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
- bucket, name, latest_version_file)
- latest_version_url = build_lib.get_signed_url(
- latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
-
- targets_list_filename = build_lib.get_targets_list_filename(sanitizer)
- targets_list_url = build_lib.get_signed_url(
- build_lib.get_targets_list_url(bucket, name, sanitizer))
-
- env.append('OUT=' + out)
- env.append('MSAN_LIBS_PATH=/workspace/msan')
- env.append('ARCHITECTURE=' + architecture)
- env.append('FUZZING_LANGUAGE=' + language)
-
- workdir = workdir_from_dockerfile(dockerfile_lines)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # compile
- {
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to break loudly when a build script
- # incorrectly uses /out instead of $OUT.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from
- # the Dockerfile). Container Builder overrides our workdir
- # so we need to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} '
- '&& compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
+def maybe_add_parallel(step, wait_for_id, parallel):
+ """Makes |step| run immediately after |wait_for_id| if |parallel|. Mutates
+ |step|."""
+ if not parallel:
+ return
+ step['waitFor'] = wait_for_id
- if sanitizer == 'memory':
- # Patch dynamic libraries to use instrumented ones.
- build_steps.append({
+
+def get_id(step_type, build):
+ """Returns a unique step id based on |step_type| and |build|. Useful for
+ parallelizing builds."""
+ return (f'{step_type}-{build.fuzzing_engine}-{build.sanitizer}'
+ f'-{build.architecture}')
+
+
+def get_build_steps( # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-arguments
+ project_name, project_yaml_contents, dockerfile, image_project,
+ base_images_project, config):
+ """Returns build steps for project."""
+
+ project = Project(project_name, project_yaml_contents, dockerfile,
+ image_project)
+
+ if project.disabled:
+ logging.info('Project "%s" is disabled.', project.name)
+ return []
+
+ timestamp = get_datetime_now().strftime('%Y%m%d%H%M')
+
+ build_steps = build_lib.project_image_steps(
+ project.name,
+ project.image,
+ project.fuzzing_language,
+ branch=config.branch,
+ test_image_suffix=config.test_image_suffix)
+
+ # Sort engines to make AFL first to test if libFuzzer has an advantage in
+ # finding bugs first since it is generally built first.
+ for fuzzing_engine in sorted(project.fuzzing_engines):
+ for sanitizer in project.sanitizers:
+ for architecture in project.architectures:
+ build = Build(fuzzing_engine, sanitizer, architecture)
+ if not is_supported_configuration(build):
+ continue
+
+ env = get_env(project.fuzzing_language, build)
+ compile_step = get_compile_step(project, build, env, config.parallel)
+ build_steps.append(compile_step)
+
+ if project.run_tests:
+ failure_msg = (
+ '*' * 80 + '\nBuild checks failed.\n'
+ 'To reproduce, run:\n'
+ f'python infra/helper.py build_image {project.name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} '
+ f'--architecture {build.architecture} {project.name}\n'
+ 'python infra/helper.py check_build --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} '
+ f'--architecture {build.architecture} {project.name}\n' +
+ '*' * 80)
+ # Test fuzz targets.
+ test_step = {
'name':
- 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
+ get_runner_image_name(base_images_project,
+ config.test_image_suffix),
+ 'env':
+ env,
'args': [
- 'bash',
- '-c',
- # TODO(ochang): Replace with just patch_build.py once
- # permission in image is fixed.
- 'python /usr/local/bin/patch_build.py {0}'.format(out),
+ 'bash', '-c',
+ f'test_all.py || (echo "{failure_msg}" && false)'
],
- })
-
- if run_tests:
- failure_msg = ('*' * 80 + '\nBuild checks failed.\n'
- 'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n'
- 'python infra/helper.py check_build --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # test binaries
- {
- 'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env':
- env,
- 'args': [
- 'bash', '-c',
- 'test_all.py || (echo "{0}" && false)'.format(failure_msg)
- ],
- })
-
- if project_yaml['labels']:
- # write target labels
+ 'id':
+ get_id('build-check', build)
+ }
+ maybe_add_parallel(test_step, get_last_step_id(build_steps),
+ config.parallel)
+ build_steps.append(test_step)
+
+ if project.labels:
+ # Write target labels.
build_steps.append({
'name':
- image,
+ project.image,
'env':
env,
'args': [
'/usr/local/bin/write_labels.py',
- json.dumps(project_yaml['labels']),
- out,
+ json.dumps(project.labels),
+ build.out,
],
})
- if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
- dataflow_steps = dataflow_post_build_steps(name, env,
- base_images_project)
+ if build.sanitizer == 'dataflow' and build.fuzzing_engine == 'dataflow':
+ dataflow_steps = dataflow_post_build_steps(project.name, env,
+ base_images_project,
+ config.testing,
+ config.test_image_suffix)
if dataflow_steps:
build_steps.extend(dataflow_steps)
else:
sys.stderr.write('Skipping dataflow post build steps.\n')
build_steps.extend([
- # generate targets list
+ # Generate targets list.
{
'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
+ get_runner_image_name(base_images_project,
+ config.test_image_suffix),
'env':
env,
'args': [
- 'bash',
- '-c',
- 'targets_list > /workspace/{0}'.format(
- targets_list_filename),
- ],
- },
- # zip binaries
- {
- 'name':
- image,
- 'args': [
'bash', '-c',
- 'cd {out} && zip -r {zip_file} *'.format(out=out,
- zip_file=zip_file)
- ],
- },
- # upload srcmap
- {
- 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- '/workspace/srcmap.json',
- srcmap_url,
- ],
- },
- # upload binaries
- {
- 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- os.path.join(out, zip_file),
- upload_url,
- ],
- },
- # upload targets list
- {
- 'name':
- 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- '/workspace/{0}'.format(targets_list_filename),
- targets_list_url,
- ],
- },
- # upload the latest.version file
- build_lib.http_upload_step(zip_file, latest_version_url,
- LATEST_VERSION_CONTENT_TYPE),
- # cleanup
- {
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'rm -r ' + out,
+ f'targets_list > /workspace/{build.targets_list_filename}'
],
- },
+ }
])
+ upload_steps = get_upload_steps(project, build, timestamp,
+ base_images_project, config.testing)
+ build_steps.extend(upload_steps)
return build_steps
-def dataflow_post_build_steps(project_name, env, base_images_project):
+def get_targets_list_upload_step(bucket, project, build, uploader_image):
+ """Returns the step to upload targets_list for |build| of |project| to
+ |bucket|."""
+ targets_list_url = build_lib.get_signed_url(
+ build_lib.get_targets_list_url(bucket, project.name, build.sanitizer))
+ return {
+ 'name': uploader_image,
+ 'args': [
+ f'/workspace/{build.targets_list_filename}',
+ targets_list_url,
+ ],
+ }
+
+
+def get_uploader_image(base_images_project):
+ """Returns the uploader base image in |base_images_project|."""
+ return f'gcr.io/{base_images_project}/uploader'
+
+
+def get_upload_steps(project, build, timestamp, base_images_project, testing):
+ """Returns the steps for uploading the fuzzer build specified by |project| and
+ |build|. Uses |timestamp| for naming the uploads. Uses |base_images_project|
+ and |testing| for determining which image to use for the upload."""
+ bucket = build_lib.get_upload_bucket(build.fuzzing_engine, build.architecture,
+ testing)
+ stamped_name = '-'.join([project.name, build.sanitizer, timestamp])
+ zip_file = stamped_name + '.zip'
+ upload_url = build_lib.get_signed_url(
+ build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name, zip_file))
+ stamped_srcmap_file = stamped_name + '.srcmap.json'
+ srcmap_url = build_lib.get_signed_url(
+ build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name,
+ stamped_srcmap_file))
+ latest_version_file = '-'.join(
+ [project.name, build.sanitizer, LATEST_VERSION_FILENAME])
+ latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
+ bucket, project.name, latest_version_file)
+ latest_version_url = build_lib.get_signed_url(
+ latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
+ uploader_image = get_uploader_image(base_images_project)
+
+ upload_steps = [
+ # Zip binaries.
+ {
+ 'name': project.image,
+ 'args': ['bash', '-c', f'cd {build.out} && zip -r {zip_file} *'],
+ },
+ # Upload srcmap.
+ {
+ 'name': uploader_image,
+ 'args': [
+ '/workspace/srcmap.json',
+ srcmap_url,
+ ],
+ },
+ # Upload binaries.
+ {
+ 'name': uploader_image,
+ 'args': [
+ os.path.join(build.out, zip_file),
+ upload_url,
+ ],
+ },
+ # Upload targets list.
+ get_targets_list_upload_step(bucket, project, build, uploader_image),
+ # Upload the latest.version file.
+ build_lib.http_upload_step(zip_file, latest_version_url,
+ LATEST_VERSION_CONTENT_TYPE),
+ # Cleanup.
+ get_cleanup_step(project, build),
+ ]
+ return upload_steps
+
+
+def get_cleanup_step(project, build):
+ """Returns the step for cleaning up after doing |build| of |project|."""
+ return {
+ 'name': project.image,
+ 'args': [
+ 'bash',
+ '-c',
+ 'rm -r ' + build.out,
+ ],
+ }
+
+
+def get_runner_image_name(base_images_project, test_image_suffix):
+ """Returns the runner image that should be used, based on
+ |base_images_project|. Returns the testing image if |test_image_suffix|."""
+ image = f'gcr.io/{base_images_project}/base-runner'
+ if test_image_suffix:
+ image += '-' + test_image_suffix
+ return image
+
+
+def dataflow_post_build_steps(project_name, env, base_images_project, testing,
+ test_image_suffix):
"""Appends dataflow post build steps."""
- steps = build_lib.download_corpora_steps(project_name)
+ steps = build_lib.download_corpora_steps(project_name, testing)
if not steps:
return None
steps.append({
'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
+ get_runner_image_name(base_images_project, test_image_suffix),
'env':
env + [
'COLLECT_DFT_TIMEOUT=2h',
@@ -387,63 +477,126 @@ def dataflow_post_build_steps(project_name, env, base_images_project):
return steps
-def get_logs_url(build_id, image_project='oss-fuzz'):
+def get_logs_url(build_id, cloud_project='oss-fuzz'):
"""Returns url where logs are displayed for the build."""
- url_format = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project={1}')
- return url_format.format(build_id, image_project)
+ return ('https://console.cloud.google.com/logs/viewer?'
+ f'resource=build%2Fbuild_id%2F{build_id}&project={cloud_project}')
+
+
+def get_gcb_url(build_id, cloud_project='oss-fuzz'):
+ """Returns url where logs are displayed for the build."""
+ return (f'https://console.cloud.google.com/cloud-build/builds/{build_id}'
+ f'?project={cloud_project}')
# pylint: disable=no-member
-def run_build(build_steps, project_name, tag):
- """Run the build for given steps on cloud build."""
+def run_build(oss_fuzz_project,
+ build_steps,
+ credentials,
+ build_type,
+ cloud_project='oss-fuzz'):
+ """Run the build for given steps on cloud build. |build_steps| are the steps
+ to run. |credentials| are are used to authenticate to GCB and build in
+ |cloud_project|. |oss_fuzz_project| and |build_type| are used to tag the build
+ in GCB so the build can be queried for debugging purposes."""
options = {}
if 'GCB_OPTIONS' in os.environ:
options = yaml.safe_load(os.environ['GCB_OPTIONS'])
+ else:
+ options = DEFAULT_GCB_OPTIONS
+ tags = [oss_fuzz_project + '-' + build_type, build_type, oss_fuzz_project]
build_body = {
'steps': build_steps,
'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
'options': options,
'logsBucket': GCB_LOGS_BUCKET,
- 'tags': [project_name + '-' + tag,],
+ 'tags': tags,
'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
}
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild',
- 'v1',
- credentials=credentials,
- cache_discovery=False)
- build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
+ cloudbuild = cloud_build('cloudbuild',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
+ build_info = cloudbuild.projects().builds().create(projectId=cloud_project,
body=build_body).execute()
build_id = build_info['metadata']['build']['id']
- print('Logs:', get_logs_url(build_id), file=sys.stderr)
- print(build_id)
-
-
-def main():
- """Build and run projects."""
- if len(sys.argv) != 2:
- usage()
+ logging.info('Build ID: %s', build_id)
+ logging.info('Logs: %s', get_logs_url(build_id, cloud_project))
+ logging.info('Cloud build page: %s', get_gcb_url(build_id, cloud_project))
+ return build_id
+
+
+def get_args(description):
+ """Parses command line arguments and returns them. Suitable for a build
+ script."""
+ parser = argparse.ArgumentParser(sys.argv[0], description=description)
+ parser.add_argument('projects', help='Projects.', nargs='+')
+ parser.add_argument('--testing',
+ action='store_true',
+ required=False,
+ default=False,
+ help='Upload to testing buckets.')
+ parser.add_argument('--test-image-suffix',
+ required=False,
+ default=None,
+ help='Use testing base-images.')
+ parser.add_argument('--branch',
+ required=False,
+ default=None,
+ help='Use specified OSS-Fuzz branch.')
+ parser.add_argument('--parallel',
+ action='store_true',
+ required=False,
+ default=False,
+ help='Do builds in parallel.')
+ return parser.parse_args()
+
+
+def build_script_main(script_description, get_build_steps_func, build_type):
+ """Gets arguments from command line using |script_description| as helpstring
+ description. Gets build_steps using |get_build_steps_func| and then runs those
+ steps on GCB, tagging the builds with |build_type|. Returns 0 on success, 1 on
+ failure."""
+ args = get_args(script_description)
+ logging.basicConfig(level=logging.INFO)
image_project = 'oss-fuzz'
base_images_project = 'oss-fuzz-base'
- project_dir = sys.argv[1].rstrip(os.path.sep)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
- project_name = os.path.basename(project_dir)
- with open(dockerfile_path) as dockerfile:
- dockerfile_lines = dockerfile.readlines()
+ credentials = oauth2client.client.GoogleCredentials.get_application_default()
+ error = False
+ config = Config(args.testing, args.test_image_suffix, args.branch,
+ args.parallel)
+ for project_name in args.projects:
+ logging.info('Getting steps for: "%s".', project_name)
+ try:
+ project_yaml_contents, dockerfile_contents = get_project_data(
+ project_name)
+ except FileNotFoundError:
+ logging.error('Couldn\'t get project data. Skipping %s.', project_name)
+ error = True
+ continue
+
+ steps = get_build_steps_func(project_name, project_yaml_contents,
+ dockerfile_contents, image_project,
+ base_images_project, config)
+ if not steps:
+ logging.error('No steps. Skipping %s.', project_name)
+ error = True
+ continue
+
+ run_build(project_name, steps, credentials, build_type)
+ return 0 if not error else 1
- with open(project_yaml_path) as project_yaml_file:
- steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project)
- run_build(steps, project_name, FUZZING_BUILD_TAG)
+def main():
+ """Build and run projects."""
+ return build_script_main('Builds a project on GCB.', get_build_steps,
+ FUZZING_BUILD_TYPE)
if __name__ == '__main__':
- main()
+ sys.exit(main())
diff --git a/infra/build/functions/build_project_test.py b/infra/build/functions/build_project_test.py
new file mode 100644
index 000000000..43f6c1cfa
--- /dev/null
+++ b/infra/build/functions/build_project_test.py
@@ -0,0 +1,77 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Unit tests for build_project."""
+import json
+import os
+import sys
+import unittest
+from unittest import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+FUNCTIONS_DIR = os.path.dirname(__file__)
+sys.path.append(FUNCTIONS_DIR)
+# pylint: disable=wrong-import-position
+
+import build_project
+import test_utils
+
+# pylint: disable=no-member
+
+
+class TestRequestCoverageBuilds(fake_filesystem_unittest.TestCase):
+ """Unit tests for sync."""
+
+ def setUp(self):
+ self.maxDiff = None # pylint: disable=invalid-name
+ self.setUpPyfakefs()
+
+ @mock.patch('build_lib.get_signed_url', return_value='test_url')
+ @mock.patch('build_project.get_datetime_now',
+ return_value=test_utils.FAKE_DATETIME)
+ def test_get_build_steps(self, mock_url, mock_get_datetime_now):
+ """Test for get_build_steps."""
+ del mock_url, mock_get_datetime_now
+ project_yaml_contents = ('language: c++\n'
+ 'sanitizers:\n'
+ ' - address\n'
+ ' - memory\n'
+ ' - undefined\n'
+ 'architectures:\n'
+ ' - x86_64\n'
+ ' - i386\n')
+ self.fs.create_dir(test_utils.PROJECT_DIR)
+ test_utils.create_project_data(test_utils.PROJECT, project_yaml_contents)
+
+ expected_build_steps_file_path = test_utils.get_test_data_file_path(
+ 'expected_build_steps.json')
+ self.fs.add_real_file(expected_build_steps_file_path)
+ with open(expected_build_steps_file_path) as expected_build_steps_file:
+ expected_build_steps = json.load(expected_build_steps_file)
+
+ config = build_project.Config(False, False, None, False)
+ project_yaml, dockerfile = build_project.get_project_data(
+ test_utils.PROJECT)
+ build_steps = build_project.get_build_steps(test_utils.PROJECT,
+ project_yaml, dockerfile,
+ test_utils.IMAGE_PROJECT,
+ test_utils.BASE_IMAGES_PROJECT,
+ config)
+ self.assertEqual(build_steps, expected_build_steps)
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
diff --git a/infra/build/functions/deploy.sh b/infra/build/functions/deploy.sh
index ea094e3b2..3edf6ee17 100755
--- a/infra/build/functions/deploy.sh
+++ b/infra/build/functions/deploy.sh
@@ -80,9 +80,10 @@ function deploy_cloud_function {
--runtime python38 \
--project $project \
--timeout 540 \
- --region us-central1 \
- --set-env-vars GCP_PROJECT=$project,FUNCTION_REGION=us-central1 \
- --max-instances 1
+ --region us-central1 \
+ --set-env-vars GCP_PROJECT=$project,FUNCTION_REGION=us-central1 \
+ --max-instances 1 \
+ --memory 2048MB
}
if [ $# == 1 ]; then
@@ -135,11 +136,6 @@ deploy_cloud_function base-image-build \
$BASE_IMAGE_JOB_TOPIC \
$PROJECT_ID
-deploy_cloud_function base-msan-build \
- build_msan \
- $BASE_IMAGE_JOB_TOPIC \
- $PROJECT_ID
-
deploy_cloud_function request-build \
build_project \
$BUILD_JOB_TOPIC \
diff --git a/infra/build/functions/expected_build_steps.json b/infra/build/functions/expected_build_steps.json
deleted file mode 100644
index da9c63654..000000000
--- a/infra/build/functions/expected_build_steps.json
+++ /dev/null
@@ -1,330 +0,0 @@
-[
- {
- "args": [
- "clone",
- "https://github.com/google/oss-fuzz.git"
- ],
- "name": "gcr.io/cloud-builders/git"
- },
- {
- "name": "gcr.io/cloud-builders/docker",
- "args": [
- "build",
- "-t",
- "gcr.io/oss-fuzz/test-project",
- "."
- ],
- "dir": "oss-fuzz/projects/test-project"
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json"
- ],
- "env": [
- "OSSFUZZ_REVISION=$REVISION_ID",
- "FUZZING_LANGUAGE=c++"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/msan-libs-builder",
- "args": [
- "bash",
- "-c",
- "cp -r /msan /workspace"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "env": [
- "FUZZING_ENGINE=libfuzzer",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=libfuzzer",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=libfuzzer",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "targets_list > /workspace/targets.list.address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/srcmap.json",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/out/address/test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/targets.list.address",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/cloud-builders/curl",
- "args": [
- "-H",
- "Content-Type: text/plain",
- "-X",
- "PUT",
- "-d",
- "test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "rm -r /workspace/out/address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "env": [
- "FUZZING_ENGINE=afl",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=afl",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=afl",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "targets_list > /workspace/targets.list.address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/srcmap.json",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/out/address/test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/targets.list.address",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/cloud-builders/curl",
- "args": [
- "-H",
- "Content-Type: text/plain",
- "-X",
- "PUT",
- "-d",
- "test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "rm -r /workspace/out/address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "env": [
- "FUZZING_ENGINE=honggfuzz",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/address && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=honggfuzz",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/base-runner",
- "env": [
- "FUZZING_ENGINE=honggfuzz",
- "SANITIZER=address",
- "OUT=/workspace/out/address",
- "MSAN_LIBS_PATH=/workspace/msan",
- "ARCHITECTURE=x86_64",
- "FUZZING_LANGUAGE=c++"
- ],
- "args": [
- "bash",
- "-c",
- "targets_list > /workspace/targets.list.address"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "cd /workspace/out/address && zip -r test-project-address-202001010000.zip *"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/srcmap.json",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/out/address/test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz-base/uploader",
- "args": [
- "/workspace/targets.list.address",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/cloud-builders/curl",
- "args": [
- "-H",
- "Content-Type: text/plain",
- "-X",
- "PUT",
- "-d",
- "test-project-address-202001010000.zip",
- "test_url"
- ]
- },
- {
- "name": "gcr.io/oss-fuzz/test-project",
- "args": [
- "bash",
- "-c",
- "rm -r /workspace/out/address"
- ]
- }
-]
diff --git a/infra/build/functions/main.py b/infra/build/functions/main.py
index 1bfd35818..c34dc1329 100644
--- a/infra/build/functions/main.py
+++ b/infra/build/functions/main.py
@@ -45,8 +45,3 @@ def coverage_build(event, context):
def builds_status(event, context):
"""Entry point for builds status cloud function."""
update_build_status.update_status(event, context)
-
-
-def build_msan(event, context):
- """Entry point for base msan builder."""
- base_images.base_msan_builder(event, context)
diff --git a/infra/build/functions/project_sync.py b/infra/build/functions/project_sync.py
index debdbbd9a..7b30cae2d 100644
--- a/infra/build/functions/project_sync.py
+++ b/infra/build/functions/project_sync.py
@@ -94,8 +94,8 @@ def update_scheduler(cloud_scheduler_client, project, schedule, tag):
def delete_project(cloud_scheduler_client, project):
"""Delete the given project."""
logging.info('Deleting project %s', project.name)
- for tag in (build_project.FUZZING_BUILD_TAG,
- build_and_run_coverage.COVERAGE_BUILD_TAG):
+ for tag in (build_project.FUZZING_BUILD_TYPE,
+ build_and_run_coverage.COVERAGE_BUILD_TYPE):
try:
delete_scheduler(cloud_scheduler_client, project.name, tag)
except exceptions.NotFound:
@@ -124,9 +124,9 @@ def sync_projects(cloud_scheduler_client, projects):
try:
create_scheduler(cloud_scheduler_client, project_name,
projects[project_name].schedule,
- build_project.FUZZING_BUILD_TAG, FUZZING_BUILD_TOPIC)
+ build_project.FUZZING_BUILD_TYPE, FUZZING_BUILD_TOPIC)
create_scheduler(cloud_scheduler_client, project_name, COVERAGE_SCHEDULE,
- build_and_run_coverage.COVERAGE_BUILD_TAG,
+ build_and_run_coverage.COVERAGE_BUILD_TYPE,
COVERAGE_BUILD_TOPIC)
project_metadata = projects[project_name]
Project(name=project_name,
@@ -149,7 +149,7 @@ def sync_projects(cloud_scheduler_client, projects):
logging.info('Schedule changed.')
update_scheduler(cloud_scheduler_client, project,
projects[project.name].schedule,
- build_project.FUZZING_BUILD_TAG)
+ build_project.FUZZING_BUILD_TYPE)
project.schedule = project_metadata.schedule
project_changed = True
except exceptions.GoogleAPICallError as error:
@@ -232,7 +232,7 @@ def get_github_creds():
def sync(event, context):
"""Sync projects with cloud datastore."""
- del event, context #unused
+ del event, context # Unused.
with ndb.Client().context():
git_creds = get_github_creds()
diff --git a/infra/build/functions/project_sync_test.py b/infra/build/functions/project_sync_test.py
index f90733810..ad1330eaf 100644
--- a/infra/build/functions/project_sync_test.py
+++ b/infra/build/functions/project_sync_test.py
@@ -71,7 +71,7 @@ class CloudSchedulerClient:
# pylint: disable=no-self-use
def location_path(self, project_id, location_id):
"""Return project path."""
- return 'projects/{}/location/{}'.format(project_id, location_id)
+ return f'projects/{project_id}/location/{location_id}'
def create_job(self, parent, job):
"""Simulate create job."""
@@ -81,8 +81,7 @@ class CloudSchedulerClient:
# pylint: disable=no-self-use
def job_path(self, project_id, location_id, name):
"""Return job path."""
- return 'projects/{}/location/{}/jobs/{}'.format(project_id, location_id,
- name)
+ return f'projects/{project_id}/location/{location_id}/jobs/{name}'
def delete_job(self, name):
"""Simulate delete jobs."""
diff --git a/infra/build/functions/request_build.py b/infra/build/functions/request_build.py
index 6f0ab62a3..543bafb33 100644
--- a/infra/build/functions/request_build.py
+++ b/infra/build/functions/request_build.py
@@ -15,13 +15,10 @@
################################################################################
"""Cloud function to request builds."""
import base64
-import logging
import google.auth
-from googleapiclient.discovery import build
from google.cloud import ndb
-import build_lib
import build_project
from datastore_entities import BuildsHistory
from datastore_entities import Project
@@ -55,46 +52,33 @@ def get_project_data(project_name):
project = query.get()
if not project:
raise RuntimeError(
- 'Project {0} not available in cloud datastore'.format(project_name))
- project_yaml_contents = project.project_yaml_contents
- dockerfile_lines = project.dockerfile_contents.split('\n')
+ f'Project {project_name} not available in cloud datastore')
- return (project_yaml_contents, dockerfile_lines)
+ return project.project_yaml_contents, project.dockerfile_contents
+
+
+def get_empty_config():
+ """Returns an empty build config."""
+ return build_project.Config(False, None, None, False)
def get_build_steps(project_name, image_project, base_images_project):
"""Retrieve build steps."""
+ # TODO(metzman): Figure out if we need this.
project_yaml_contents, dockerfile_lines = get_project_data(project_name)
+ build_config = get_empty_config()
return build_project.get_build_steps(project_name, project_yaml_contents,
dockerfile_lines, image_project,
- base_images_project)
+ base_images_project, build_config)
-# pylint: disable=no-member
-def run_build(project_name, image_project, build_steps, credentials, tag):
- """Execute build on cloud build."""
- build_body = {
- 'steps': build_steps,
- 'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
- 'options': {
- 'machineType': 'N1_HIGHCPU_32'
- },
- 'logsBucket': build_project.GCB_LOGS_BUCKET,
- 'tags': [project_name + '-' + tag,],
- 'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
- }
-
- cloudbuild = build('cloudbuild',
- 'v1',
- credentials=credentials,
- cache_discovery=False)
- build_info = cloudbuild.projects().builds().create(projectId=image_project,
- body=build_body).execute()
- build_id = build_info['metadata']['build']['id']
-
- update_build_history(project_name, build_id, tag)
- logging.info('Build ID: %s', build_id)
- logging.info('Logs: %s', build_project.get_logs_url(build_id, image_project))
+def run_build(oss_fuzz_project, build_steps, credentials, build_type,
+ cloud_project):
+ """Execute build on cloud build. Wrapper around build_project.py that also
+ updates the db."""
+ build_id = build_project.run_build(oss_fuzz_project, build_steps, credentials,
+ build_type, cloud_project)
+ update_build_history(oss_fuzz_project, build_id, build_type)
# pylint: disable=no-member
@@ -107,9 +91,14 @@ def request_build(event, context):
raise RuntimeError('Project name missing from payload')
with ndb.Client().context():
- credentials, image_project = google.auth.default()
- build_steps = get_build_steps(project_name, image_project, BASE_PROJECT)
+ credentials, cloud_project = google.auth.default()
+ build_steps = get_build_steps(project_name, cloud_project, BASE_PROJECT)
if not build_steps:
return
- run_build(project_name, image_project, build_steps, credentials,
- build_project.FUZZING_BUILD_TAG)
+ run_build(
+ project_name,
+ build_steps,
+ credentials,
+ build_project.FUZZING_BUILD_TYPE,
+ cloud_project=cloud_project,
+ )
diff --git a/infra/build/functions/request_build_test.py b/infra/build/functions/request_build_test.py
index 22a4a1056..1eb1d8efc 100644
--- a/infra/build/functions/request_build_test.py
+++ b/infra/build/functions/request_build_test.py
@@ -14,23 +14,17 @@
#
################################################################################
"""Unit tests for Cloud Function request builds which builds projects."""
-import json
-import datetime
import os
import sys
import unittest
-from unittest import mock
from google.cloud import ndb
sys.path.append(os.path.dirname(__file__))
# pylint: disable=wrong-import-position
-from datastore_entities import BuildsHistory
-from datastore_entities import Project
-from request_build import get_build_steps
-from request_build import get_project_data
-from request_build import update_build_history
+import datastore_entities
+import request_build
import test_utils
# pylint: disable=no-member
@@ -48,65 +42,42 @@ class TestRequestBuilds(unittest.TestCase):
def setUp(self):
test_utils.reset_ds_emulator()
-
- @mock.patch('build_lib.get_signed_url', return_value='test_url')
- @mock.patch('datetime.datetime')
- def test_get_build_steps(self, mocked_url, mocked_time):
- """Test for get_build_steps."""
- del mocked_url, mocked_time
- datetime.datetime = test_utils.SpoofedDatetime
- project_yaml_contents = ('language: c++\n'
- 'sanitizers:\n'
- ' - address\n'
- 'architectures:\n'
- ' - x86_64\n')
- image_project = 'oss-fuzz'
- base_images_project = 'oss-fuzz-base'
- testcase_path = os.path.join(os.path.dirname(__file__),
- 'expected_build_steps.json')
- with open(testcase_path) as testcase_file:
- expected_build_steps = json.load(testcase_file)
-
- with ndb.Client().context():
- Project(name='test-project',
- project_yaml_contents=project_yaml_contents,
- dockerfile_contents='test line').put()
- build_steps = get_build_steps('test-project', image_project,
- base_images_project)
- self.assertEqual(build_steps, expected_build_steps)
+ self.maxDiff = None # pylint: disable=invalid-name
def test_get_build_steps_no_project(self):
"""Test for when project isn't available in datastore."""
with ndb.Client().context():
- self.assertRaises(RuntimeError, get_build_steps, 'test-project',
- 'oss-fuzz', 'oss-fuzz-base')
+ self.assertRaises(RuntimeError, request_build.get_build_steps,
+ 'test-project', 'oss-fuzz', 'oss-fuzz-base')
def test_build_history(self):
"""Testing build history."""
with ndb.Client().context():
- BuildsHistory(id='test-project-fuzzing',
- build_tag='fuzzing',
- project='test-project',
- build_ids=[str(i) for i in range(1, 65)]).put()
- update_build_history('test-project', '65', 'fuzzing')
+ datastore_entities.BuildsHistory(id='test-project-fuzzing',
+ build_tag='fuzzing',
+ project='test-project',
+ build_ids=[str(i) for i in range(1, 65)
+ ]).put()
+ request_build.update_build_history('test-project', '65', 'fuzzing')
expected_build_ids = [str(i) for i in range(2, 66)]
- self.assertEqual(BuildsHistory.query().get().build_ids,
+ self.assertEqual(datastore_entities.BuildsHistory.query().get().build_ids,
expected_build_ids)
def test_build_history_no_existing_project(self):
"""Testing build history when build history object is missing."""
with ndb.Client().context():
- update_build_history('test-project', '1', 'fuzzing')
+ request_build.update_build_history('test-project', '1', 'fuzzing')
expected_build_ids = ['1']
- self.assertEqual(BuildsHistory.query().get().build_ids,
+ self.assertEqual(datastore_entities.BuildsHistory.query().get().build_ids,
expected_build_ids)
def test_get_project_data(self):
"""Testing get project data."""
with ndb.Client().context():
- self.assertRaises(RuntimeError, get_project_data, 'test-project')
+ self.assertRaises(RuntimeError, request_build.get_project_data,
+ 'test-project')
@classmethod
def tearDownClass(cls):
diff --git a/infra/build/functions/request_coverage_build.py b/infra/build/functions/request_coverage_build.py
index 1b4ac0e47..a3890cb32 100644
--- a/infra/build/functions/request_coverage_build.py
+++ b/infra/build/functions/request_coverage_build.py
@@ -27,27 +27,31 @@ BASE_PROJECT = 'oss-fuzz-base'
def get_build_steps(project_name, image_project, base_images_project):
"""Retrieve build steps."""
+ build_config = request_build.get_empty_config()
project_yaml_contents, dockerfile_lines = request_build.get_project_data(
project_name)
return build_and_run_coverage.get_build_steps(project_name,
project_yaml_contents,
dockerfile_lines, image_project,
- base_images_project)
+ base_images_project,
+ build_config)
def request_coverage_build(event, context):
"""Entry point for coverage build cloud function."""
- del context #unused
+ del context # Unused.
if 'data' in event:
project_name = base64.b64decode(event['data']).decode('utf-8')
else:
raise RuntimeError('Project name missing from payload')
with ndb.Client().context():
- credentials, image_project = google.auth.default()
- build_steps = get_build_steps(project_name, image_project, BASE_PROJECT)
+ credentials, cloud_project = google.auth.default()
+ build_steps = get_build_steps(project_name, cloud_project, BASE_PROJECT)
if not build_steps:
return
- request_build.run_build(project_name, image_project, build_steps,
+ request_build.run_build(project_name,
+ build_steps,
credentials,
- build_and_run_coverage.COVERAGE_BUILD_TAG)
+ build_and_run_coverage.COVERAGE_BUILD_TYPE,
+ cloud_project=cloud_project)
diff --git a/infra/build/functions/request_coverage_build_test.py b/infra/build/functions/request_coverage_build_test.py
deleted file mode 100644
index 1327e36a0..000000000
--- a/infra/build/functions/request_coverage_build_test.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright 2020 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-"""Unit tests for Cloud Function that builds coverage reports."""
-import json
-import datetime
-import os
-import sys
-import unittest
-from unittest import mock
-
-from google.cloud import ndb
-
-sys.path.append(os.path.dirname(__file__))
-# pylint: disable=wrong-import-position
-
-from datastore_entities import Project
-from build_and_run_coverage import get_build_steps
-import test_utils
-
-# pylint: disable=no-member
-
-
-class TestRequestCoverageBuilds(unittest.TestCase):
- """Unit tests for sync."""
-
- @classmethod
- def setUpClass(cls):
- cls.ds_emulator = test_utils.start_datastore_emulator()
- test_utils.wait_for_emulator_ready(cls.ds_emulator, 'datastore',
- test_utils.DATASTORE_READY_INDICATOR)
- test_utils.set_gcp_environment()
-
- def setUp(self):
- test_utils.reset_ds_emulator()
-
- @mock.patch('build_lib.get_signed_url', return_value='test_url')
- @mock.patch('build_lib.download_corpora_steps',
- return_value=[{
- 'url': 'test_download'
- }])
- @mock.patch('datetime.datetime')
- def test_get_coverage_build_steps(self, mocked_url, mocked_corpora_steps,
- mocked_time):
- """Test for get_build_steps."""
- del mocked_url, mocked_corpora_steps, mocked_time
- datetime.datetime = test_utils.SpoofedDatetime
- project_yaml_contents = ('language: c++\n'
- 'sanitizers:\n'
- ' - address\n'
- 'architectures:\n'
- ' - x86_64\n')
- dockerfile_contents = 'test line'
- image_project = 'oss-fuzz'
- base_images_project = 'oss-fuzz-base'
- testcase_path = os.path.join(os.path.dirname(__file__),
- 'expected_coverage_build_steps.json')
- with open(testcase_path) as testcase_file:
- expected_coverage_build_steps = json.load(testcase_file)
-
- with ndb.Client().context():
- Project(name='test-project',
- project_yaml_contents=project_yaml_contents,
- dockerfile_contents=dockerfile_contents).put()
-
- dockerfile_lines = dockerfile_contents.split('\n')
- build_steps = get_build_steps('test-project', project_yaml_contents,
- dockerfile_lines, image_project,
- base_images_project)
- self.assertEqual(build_steps, expected_coverage_build_steps)
-
- @classmethod
- def tearDownClass(cls):
- test_utils.cleanup_emulator(cls.ds_emulator)
-
-
-if __name__ == '__main__':
- unittest.main(exit=False)
diff --git a/infra/build/functions/test_data/expected_build_steps.json b/infra/build/functions/test_data/expected_build_steps.json
new file mode 100644
index 000000000..f0e39832b
--- /dev/null
+++ b/infra/build/functions/test_data/expected_build_steps.json
@@ -0,0 +1,628 @@
+[
+ {
+ "args": [
+ "clone",
+ "https://github.com/google/oss-fuzz.git",
+ "--depth",
+ "1"
+ ],
+ "name": "gcr.io/cloud-builders/git"
+ },
+ {
+ "name": "gcr.io/cloud-builders/docker",
+ "args": [
+ "build",
+ "-t",
+ "gcr.io/oss-fuzz/test-project",
+ "."
+ ],
+ "dir": "oss-fuzz/projects/test-project"
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json"
+ ],
+ "env": [
+ "OSSFUZZ_REVISION=$REVISION_ID",
+ "FUZZING_LANGUAGE=c++"
+ ],
+ "id": "srcmap"
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=afl",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/afl-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/afl-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-afl-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=afl",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/afl-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine afl --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine afl --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-afl-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=afl",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/afl-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/afl-address-x86_64 && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/afl-address-x86_64/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/afl-address-x86_64"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=honggfuzz",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/honggfuzz-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/honggfuzz-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-honggfuzz-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=honggfuzz",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/honggfuzz-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine honggfuzz --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine honggfuzz --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-honggfuzz-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=honggfuzz",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/honggfuzz-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/honggfuzz-address-x86_64 && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/honggfuzz-address-x86_64/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/honggfuzz-address-x86_64"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-address-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-libfuzzer-address-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-x86_64",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/libfuzzer-address-x86_64 && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/libfuzzer-address-x86_64/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/libfuzzer-address-x86_64"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=i386",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-i386",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-address-i386 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture i386 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-address-i386"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=i386",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-i386",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer address --engine libfuzzer --architecture i386 test-project\npython infra/helper.py check_build --sanitizer address --engine libfuzzer --architecture i386 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-libfuzzer-address-i386"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=i386",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-address-i386",
+ "SANITIZER=address"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.address"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/libfuzzer-address-i386 && zip -r test-project-address-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/libfuzzer-address-i386/test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.address",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-address-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/libfuzzer-address-i386"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-memory-x86_64",
+ "SANITIZER=memory"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-memory-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-memory-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-memory-x86_64",
+ "SANITIZER=memory"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer memory --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-libfuzzer-memory-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-memory-x86_64",
+ "SANITIZER=memory"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.memory"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/libfuzzer-memory-x86_64 && zip -r test-project-memory-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/libfuzzer-memory-x86_64/test-project-memory-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.memory",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-memory-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/libfuzzer-memory-x86_64"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-undefined-x86_64",
+ "SANITIZER=undefined"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-undefined-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-undefined-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-undefined-x86_64",
+ "SANITIZER=undefined"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "test_all.py || (echo \"********************************************************************************\nBuild checks failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\npython infra/helper.py check_build --sanitizer undefined --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "build-check-libfuzzer-undefined-x86_64"
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/base-runner",
+ "env": [
+ "ARCHITECTURE=x86_64",
+ "FUZZING_ENGINE=libfuzzer",
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-undefined-x86_64",
+ "SANITIZER=undefined"
+ ],
+ "args": [
+ "bash",
+ "-c",
+ "targets_list > /workspace/targets.list.undefined"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "cd /workspace/out/libfuzzer-undefined-x86_64 && zip -r test-project-undefined-202001010000.zip *"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/srcmap.json",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/out/libfuzzer-undefined-x86_64/test-project-undefined-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz-base/uploader",
+ "args": [
+ "/workspace/targets.list.undefined",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/cloud-builders/curl",
+ "args": [
+ "-H",
+ "Content-Type: text/plain",
+ "-X",
+ "PUT",
+ "-d",
+ "test-project-undefined-202001010000.zip",
+ "test_url"
+ ]
+ },
+ {
+ "name": "gcr.io/oss-fuzz/test-project",
+ "args": [
+ "bash",
+ "-c",
+ "rm -r /workspace/out/libfuzzer-undefined-x86_64"
+ ]
+ }
+]
diff --git a/infra/build/functions/expected_coverage_build_steps.json b/infra/build/functions/test_data/expected_coverage_build_steps.json
index 19b1d5b81..2af48f58d 100644
--- a/infra/build/functions/expected_coverage_build_steps.json
+++ b/infra/build/functions/test_data/expected_coverage_build_steps.json
@@ -2,7 +2,9 @@
{
"args": [
"clone",
- "https://github.com/google/oss-fuzz.git"
+ "https://github.com/google/oss-fuzz.git",
+ "--depth",
+ "1"
],
"name": "gcr.io/cloud-builders/git"
},
@@ -26,21 +28,25 @@
"env": [
"OSSFUZZ_REVISION=$REVISION_ID",
"FUZZING_LANGUAGE=c++"
- ]
+ ],
+ "id": "srcmap"
},
{
"name": "gcr.io/oss-fuzz/test-project",
"env": [
+ "ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
- "SANITIZER=coverage",
- "OUT=/workspace/out/coverage",
- "FUZZING_LANGUAGE=c++"
+ "FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-coverage-x86_64",
+ "SANITIZER=coverage"
],
"args": [
"bash",
"-c",
- "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/coverage && compile || (echo \"********************************************************************************\nCoverage build failed.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer coverage test-project\n********************************************************************************\" && false)"
- ]
+ "rm -r /out && cd /src && cd /src && mkdir -p /workspace/out/libfuzzer-coverage-x86_64 && compile || (echo \"********************************************************************************\nFailed to build.\nTo reproduce, run:\npython infra/helper.py build_image test-project\npython infra/helper.py build_fuzzers --sanitizer coverage --engine libfuzzer --architecture x86_64 test-project\n********************************************************************************\" && false)"
+ ],
+ "id": "compile-libfuzzer-coverage-x86_64"
},
{
"url": "test_download"
@@ -48,10 +54,12 @@
{
"name": "gcr.io/oss-fuzz-base/base-runner",
"env": [
+ "ARCHITECTURE=x86_64",
"FUZZING_ENGINE=libfuzzer",
- "SANITIZER=coverage",
- "OUT=/workspace/out/coverage",
"FUZZING_LANGUAGE=c++",
+ "HOME=/root",
+ "OUT=/workspace/out/libfuzzer-coverage-x86_64",
+ "SANITIZER=coverage",
"HTTP_PORT=",
"COVERAGE_EXTRA_ARGS="
],
@@ -81,7 +89,7 @@
"-m",
"cp",
"-r",
- "/workspace/out/coverage/report",
+ "/workspace/out/libfuzzer-coverage-x86_64/report",
"gs://oss-fuzz-coverage/test-project/reports/20200101"
]
},
@@ -99,7 +107,7 @@
"-m",
"cp",
"-r",
- "/workspace/out/coverage/fuzzer_stats",
+ "/workspace/out/libfuzzer-coverage-x86_64/fuzzer_stats",
"gs://oss-fuzz-coverage/test-project/fuzzer_stats/20200101"
]
},
@@ -117,7 +125,7 @@
"-m",
"cp",
"-r",
- "/workspace/out/coverage/logs",
+ "/workspace/out/libfuzzer-coverage-x86_64/logs",
"gs://oss-fuzz-coverage/test-project/logs/20200101"
]
},
@@ -141,4 +149,4 @@
"test_url"
]
}
-] \ No newline at end of file
+]
diff --git a/infra/build/functions/test_utils.py b/infra/build/functions/test_utils.py
index 9aac8eac8..a093bcfa0 100644
--- a/infra/build/functions/test_utils.py
+++ b/infra/build/functions/test_utils.py
@@ -24,16 +24,31 @@ import requests
DATASTORE_READY_INDICATOR = b'is now running'
DATASTORE_EMULATOR_PORT = 8432
EMULATOR_TIMEOUT = 20
-TEST_PROJECT_ID = 'test-project'
+FUNCTIONS_DIR = os.path.dirname(__file__)
+OSS_FUZZ_DIR = os.path.dirname(os.path.dirname(os.path.dirname(FUNCTIONS_DIR)))
+PROJECTS_DIR = os.path.join(OSS_FUZZ_DIR, 'projects')
-# pylint: disable=arguments-differ
-class SpoofedDatetime(datetime.datetime):
- """Mocking Datetime class for now() function."""
+FAKE_DATETIME = datetime.datetime(2020, 1, 1, 0, 0, 0)
+IMAGE_PROJECT = 'oss-fuzz'
+BASE_IMAGES_PROJECT = 'oss-fuzz-base'
+PROJECT = 'test-project'
+PROJECT_DIR = os.path.join(PROJECTS_DIR, PROJECT)
- @classmethod
- def now(cls):
- return datetime.datetime(2020, 1, 1, 0, 0, 0)
+
+def create_project_data(project,
+ project_yaml_contents,
+ dockerfile_contents='test line'):
+ """Creates a project.yaml with |project_yaml_contents| and a Dockerfile with
+ |dockerfile_contents| for |project|."""
+ project_dir = os.path.join(PROJECTS_DIR, project)
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
+ with open(project_yaml_path, 'w') as project_yaml_handle:
+ project_yaml_handle.write(project_yaml_contents)
+
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ with open(dockerfile_path, 'w') as dockerfile_handle:
+ dockerfile_handle.write(dockerfile_contents)
def start_datastore_emulator():
@@ -46,7 +61,7 @@ def start_datastore_emulator():
'start',
'--consistency=1.0',
'--host-port=localhost:' + str(DATASTORE_EMULATOR_PORT),
- '--project=' + TEST_PROJECT_ID,
+ '--project=' + PROJECT,
'--no-store-on-disk',
],
stdout=subprocess.PIPE,
@@ -76,15 +91,13 @@ def wait_for_emulator_ready(proc,
thread.daemon = True
thread.start()
if not ready_event.wait(timeout):
- raise RuntimeError(
- '{} emulator did not get ready in time.'.format(emulator))
+ raise RuntimeError(f'{emulator} emulator did not get ready in time.')
return thread
def reset_ds_emulator():
"""Reset ds emulator/clean all entities."""
- req = requests.post(
- 'http://localhost:{}/reset'.format(DATASTORE_EMULATOR_PORT))
+ req = requests.post(f'http://localhost:{DATASTORE_EMULATOR_PORT}/reset')
req.raise_for_status()
@@ -98,7 +111,12 @@ def set_gcp_environment():
"""Set environment variables for simulating in google cloud platform."""
os.environ['DATASTORE_EMULATOR_HOST'] = 'localhost:' + str(
DATASTORE_EMULATOR_PORT)
- os.environ['GOOGLE_CLOUD_PROJECT'] = TEST_PROJECT_ID
- os.environ['DATASTORE_DATASET'] = TEST_PROJECT_ID
- os.environ['GCP_PROJECT'] = TEST_PROJECT_ID
+ os.environ['GOOGLE_CLOUD_PROJECT'] = PROJECT
+ os.environ['DATASTORE_DATASET'] = PROJECT
+ os.environ['GCP_PROJECT'] = PROJECT
os.environ['FUNCTION_REGION'] = 'us-central1'
+
+
+def get_test_data_file_path(filename):
+ """Returns the path to a test data file with name |filename|."""
+ return os.path.join(os.path.dirname(__file__), 'test_data', filename)
diff --git a/infra/build/functions/update_build_status.py b/infra/build/functions/update_build_status.py
index af65a41ab..927216628 100644
--- a/infra/build/functions/update_build_status.py
+++ b/infra/build/functions/update_build_status.py
@@ -145,8 +145,8 @@ def get_build_history(build_ids):
}
if not upload_log(build_id):
- log_name = 'log-{0}'.format(build_id)
- raise MissingBuildLogError('Missing build log file {0}'.format(log_name))
+ log_name = f'log-{build_id}'
+ raise MissingBuildLogError(f'Missing build log file {log_name}')
history.append({
'build_id': build_id,
@@ -203,19 +203,15 @@ def update_build_badges(project, last_build_successful,
if not last_build_successful:
badge = 'failing'
- print("[badge] {}: {}".format(project, badge))
+ print(f'[badge] {project}: {badge}')
for extension in BADGE_IMAGE_TYPES:
- badge_name = '{badge}.{extension}'.format(badge=badge, extension=extension)
+ badge_name = f'{badge}.{extension}'
# Copy blob from badge_images/badge_name to badges/project/
- blob_name = '{badge_dir}/{badge_name}'.format(badge_dir=BADGE_DIR,
- badge_name=badge_name)
+ blob_name = f'{BADGE_DIR}/{badge_name}'
- destination_blob_name = '{badge_dir}/{project_name}.{extension}'.format(
- badge_dir=DESTINATION_BADGE_DIR,
- project_name=project,
- extension=extension)
+ destination_blob_name = f'{DESTINATION_BADGE_DIR}/{project}.{extension}'
status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
badge_blob = status_bucket.blob(blob_name)
@@ -228,12 +224,12 @@ def upload_log(build_id):
"""Upload log file to GCS."""
status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
gcb_bucket = get_storage_client().get_bucket(build_project.GCB_LOGS_BUCKET)
- log_name = 'log-{0}.txt'.format(build_id)
+ log_name = f'log-{build_id}.txt'
log = gcb_bucket.blob(log_name)
dest_log = status_bucket.blob(log_name)
if not log.exists():
- print('Failed to find build log {0}'.format(log_name), file=sys.stderr)
+ print('Failed to find build log', log_name, file=sys.stderr)
return False
if dest_log.exists():
@@ -258,10 +254,10 @@ def update_status(event, context):
return
if status_type == 'fuzzing':
- tag = build_project.FUZZING_BUILD_TAG
+ tag = build_project.FUZZING_BUILD_TYPE
status_filename = FUZZING_STATUS_FILENAME
elif status_type == 'coverage':
- tag = build_and_run_coverage.COVERAGE_BUILD_TAG
+ tag = build_and_run_coverage.COVERAGE_BUILD_TYPE
status_filename = COVERAGE_STATUS_FILENAME
else:
raise RuntimeError('Invalid build status type ' + status_type)
diff --git a/infra/build/functions/update_build_status_test.py b/infra/build/functions/update_build_status_test.py
index 6784fac2d..24a32f676 100644
--- a/infra/build/functions/update_build_status_test.py
+++ b/infra/build/functions/update_build_status_test.py
@@ -56,14 +56,14 @@ class MockGetBuild:
class TestGetBuildHistory(unittest.TestCase):
"""Unit tests for get_build_history."""
- def test_get_build_history(self, mocked_upload_log, mocked_cloud_build,
- mocked_google_auth):
+ def test_get_build_history(self, mock_upload_log, mock_cloud_build,
+ mock_google_auth):
"""Test for get_build_steps."""
- del mocked_cloud_build, mocked_google_auth
- mocked_upload_log.return_value = True
+ del mock_cloud_build, mock_google_auth
+ mock_upload_log.return_value = True
builds = [{'build_id': '1', 'finishTime': 'test_time', 'status': 'SUCCESS'}]
- mocked_get_build = MockGetBuild(builds)
- update_build_status.get_build = mocked_get_build.get_build
+ mock_get_build = MockGetBuild(builds)
+ update_build_status.get_build = mock_get_build.get_build
expected_projects = {
'history': [{
@@ -79,27 +79,26 @@ class TestGetBuildHistory(unittest.TestCase):
self.assertDictEqual(update_build_status.get_build_history(['1']),
expected_projects)
- def test_get_build_history_missing_log(self, mocked_upload_log,
- mocked_cloud_build,
- mocked_google_auth):
+ def test_get_build_history_missing_log(self, mock_upload_log,
+ mock_cloud_build, mock_google_auth):
"""Test for missing build log file."""
- del mocked_cloud_build, mocked_google_auth
+ del mock_cloud_build, mock_google_auth
builds = [{'build_id': '1', 'finishTime': 'test_time', 'status': 'SUCCESS'}]
- mocked_get_build = MockGetBuild(builds)
- update_build_status.get_build = mocked_get_build.get_build
- mocked_upload_log.return_value = False
+ mock_get_build = MockGetBuild(builds)
+ update_build_status.get_build = mock_get_build.get_build
+ mock_upload_log.return_value = False
self.assertRaises(update_build_status.MissingBuildLogError,
update_build_status.get_build_history, ['1'])
- def test_get_build_history_no_last_success(self, mocked_upload_log,
- mocked_cloud_build,
- mocked_google_auth):
+ def test_get_build_history_no_last_success(self, mock_upload_log,
+ mock_cloud_build,
+ mock_google_auth):
"""Test when there is no last successful build."""
- del mocked_cloud_build, mocked_google_auth
+ del mock_cloud_build, mock_google_auth
builds = [{'build_id': '1', 'finishTime': 'test_time', 'status': 'FAILURE'}]
- mocked_get_build = MockGetBuild(builds)
- update_build_status.get_build = mocked_get_build.get_build
- mocked_upload_log.return_value = True
+ mock_get_build = MockGetBuild(builds)
+ update_build_status.get_build = mock_get_build.get_build
+ mock_upload_log.return_value = True
expected_projects = {
'history': [{
@@ -229,12 +228,12 @@ class TestUpdateBuildStatus(unittest.TestCase):
@mock.patch('google.auth.default', return_value=['temp', 'temp'])
@mock.patch('update_build_status.build', return_value='cloudbuild')
@mock.patch('update_build_status.upload_log')
- def test_update_build_status(self, mocked_upload_log, mocked_cloud_build,
- mocked_google_auth):
+ def test_update_build_status(self, mock_upload_log, mock_cloud_build,
+ mock_google_auth):
"""Testing update build status as a whole."""
- del self, mocked_cloud_build, mocked_google_auth
+ del self, mock_cloud_build, mock_google_auth
update_build_status.upload_status = MagicMock()
- mocked_upload_log.return_value = True
+ mock_upload_log.return_value = True
status_filename = 'status.json'
with ndb.Client().context():
BuildsHistory(id='test-project-1-fuzzing',
@@ -264,8 +263,8 @@ class TestUpdateBuildStatus(unittest.TestCase):
'build_id': '3',
'status': 'WORKING'
}]
- mocked_get_build = MockGetBuild(builds)
- update_build_status.get_build = mocked_get_build.get_build
+ mock_get_build = MockGetBuild(builds)
+ update_build_status.get_build = mock_get_build.get_build
expected_data = {
'projects': [{