aboutsummaryrefslogtreecommitdiff
path: root/infra/build/functions/build_project.py
diff options
context:
space:
mode:
Diffstat (limited to 'infra/build/functions/build_project.py')
-rwxr-xr-x[-rw-r--r--]infra/build/functions/build_project.py725
1 files changed, 439 insertions, 286 deletions
diff --git a/infra/build/functions/build_project.py b/infra/build/functions/build_project.py
index 9115c85fd..bdc7985e1 100644..100755
--- a/infra/build/functions/build_project.py
+++ b/infra/build/functions/build_project.py
@@ -13,7 +13,7 @@
# limitations under the License.
#
################################################################################
-#!/usr/bin/python2
+#!/usr/bin/env python3
"""Starts project build on Google Cloud Builder.
Usage: build_project.py <project_dir>
@@ -21,37 +21,27 @@ Usage: build_project.py <project_dir>
from __future__ import print_function
+import argparse
+import collections
import datetime
import json
import logging
import os
+import posixpath
import re
import sys
+from googleapiclient.discovery import build as cloud_build
+import oauth2client.client
import six
import yaml
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
import build_lib
-FUZZING_BUILD_TAG = 'fuzzing'
+FUZZING_BUILD_TYPE = 'fuzzing'
GCB_LOGS_BUCKET = 'oss-fuzz-gcb-logs'
-CONFIGURATIONS = {
- 'sanitizer-address': ['SANITIZER=address'],
- 'sanitizer-dataflow': ['SANITIZER=dataflow'],
- 'sanitizer-memory': ['SANITIZER=memory'],
- 'sanitizer-undefined': ['SANITIZER=undefined'],
- 'engine-libfuzzer': ['FUZZING_ENGINE=libfuzzer'],
- 'engine-afl': ['FUZZING_ENGINE=afl'],
- 'engine-honggfuzz': ['FUZZING_ENGINE=honggfuzz'],
- 'engine-dataflow': ['FUZZING_ENGINE=dataflow'],
- 'engine-none': ['FUZZING_ENGINE=none'],
-}
-
DEFAULT_ARCHITECTURES = ['x86_64']
DEFAULT_ENGINES = ['libfuzzer', 'afl', 'honggfuzz']
DEFAULT_SANITIZERS = ['address', 'undefined']
@@ -61,19 +51,100 @@ LATEST_VERSION_CONTENT_TYPE = 'text/plain'
QUEUE_TTL_SECONDS = 60 * 60 * 24 # 24 hours.
+PROJECTS_DIR = os.path.abspath(
+ os.path.join(__file__, os.path.pardir, os.path.pardir, os.path.pardir,
+ os.path.pardir, 'projects'))
+
+DEFAULT_GCB_OPTIONS = {'machineType': 'N1_HIGHCPU_32'}
+
+Config = collections.namedtuple(
+ 'Config', ['testing', 'test_image_suffix', 'branch', 'parallel'])
+
+WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+
-def usage():
- """Exit with code 1 and display syntax to use this file."""
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
- sys.exit(1)
+class Build: # pylint: disable=too-few-public-methods
+ """Class representing the configuration for a build."""
+ def __init__(self, fuzzing_engine, sanitizer, architecture):
+ self.fuzzing_engine = fuzzing_engine
+ self.sanitizer = sanitizer
+ self.architecture = architecture
+ self.targets_list_filename = build_lib.get_targets_list_filename(
+ self.sanitizer)
-def set_yaml_defaults(project_name, project_yaml, image_project):
- """Set project.yaml's default parameters."""
+ @property
+ def out(self):
+ """Returns the out directory for the build."""
+ return posixpath.join(
+ '/workspace/out/',
+ f'{self.fuzzing_engine}-{self.sanitizer}-{self.architecture}')
+
+
+def get_project_data(project_name):
+ """Returns a tuple containing the contents of the project.yaml and Dockerfile
+ of |project_name|. Raises a FileNotFoundError if there is no Dockerfile for
+ |project_name|."""
+ project_dir = os.path.join(PROJECTS_DIR, project_name)
+ dockerfile_path = os.path.join(project_dir, 'Dockerfile')
+ try:
+ with open(dockerfile_path) as dockerfile:
+ dockerfile = dockerfile.read()
+ except FileNotFoundError:
+ logging.error('Project "%s" does not have a dockerfile.', project_name)
+ raise
+ project_yaml_path = os.path.join(project_dir, 'project.yaml')
+ with open(project_yaml_path, 'r') as project_yaml_file_handle:
+ project_yaml_contents = project_yaml_file_handle.read()
+ return project_yaml_contents, dockerfile
+
+
+class Project: # pylint: disable=too-many-instance-attributes
+ """Class representing an OSS-Fuzz project."""
+
+ def __init__(self, name, project_yaml_contents, dockerfile, image_project):
+ project_yaml = yaml.safe_load(project_yaml_contents)
+ self.name = name
+ self.image_project = image_project
+ self.workdir = workdir_from_dockerfile(dockerfile)
+ set_yaml_defaults(project_yaml)
+ self._sanitizers = project_yaml['sanitizers']
+ self.disabled = project_yaml['disabled']
+ self.architectures = project_yaml['architectures']
+ self.fuzzing_engines = project_yaml['fuzzing_engines']
+ self.coverage_extra_args = project_yaml['coverage_extra_args']
+ self.labels = project_yaml['labels']
+ self.fuzzing_language = project_yaml['language']
+ self.run_tests = project_yaml['run_tests']
+
+ @property
+ def sanitizers(self):
+ """Returns processed sanitizers."""
+ assert isinstance(self._sanitizers, list)
+ processed_sanitizers = []
+ for sanitizer in self._sanitizers:
+ if isinstance(sanitizer, six.string_types):
+ processed_sanitizers.append(sanitizer)
+ elif isinstance(sanitizer, dict):
+ for key in sanitizer.keys():
+ processed_sanitizers.append(key)
+
+ return processed_sanitizers
+
+ @property
+ def image(self):
+ """Returns the docker image for the project."""
+ return f'gcr.io/{self.image_project}/{self.name}'
+
+
+def get_last_step_id(steps):
+ """Returns the id of the last step in |steps|."""
+ return steps[-1]['id']
+
+
+def set_yaml_defaults(project_yaml):
+ """Sets project.yaml's default parameters."""
project_yaml.setdefault('disabled', False)
- project_yaml.setdefault('name', project_name)
- project_yaml.setdefault('image',
- 'gcr.io/{0}/{1}'.format(image_project, project_name))
project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
@@ -82,291 +153,310 @@ def set_yaml_defaults(project_name, project_yaml, image_project):
project_yaml.setdefault('labels', {})
-def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
+def is_supported_configuration(build):
"""Check if the given configuration is supported."""
- fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
- if architecture == 'i386' and sanitizer != 'address':
+ fuzzing_engine_info = build_lib.ENGINE_INFO[build.fuzzing_engine]
+ if build.architecture == 'i386' and build.sanitizer != 'address':
return False
- return (sanitizer in fuzzing_engine_info.supported_sanitizers and
- architecture in fuzzing_engine_info.supported_architectures)
-
+ return (build.sanitizer in fuzzing_engine_info.supported_sanitizers and
+ build.architecture in fuzzing_engine_info.supported_architectures)
-def get_sanitizers(project_yaml):
- """Retrieve sanitizers from project.yaml."""
- sanitizers = project_yaml['sanitizers']
- assert isinstance(sanitizers, list)
- processed_sanitizers = []
- for sanitizer in sanitizers:
- if isinstance(sanitizer, six.string_types):
- processed_sanitizers.append(sanitizer)
- elif isinstance(sanitizer, dict):
- for key in sanitizer.keys():
- processed_sanitizers.append(key)
-
- return processed_sanitizers
-
-
-def workdir_from_dockerfile(dockerfile_lines):
- """Parse WORKDIR from the Dockerfile."""
- workdir_regex = re.compile(r'\s*WORKDIR\s*([^\s]+)')
+def workdir_from_dockerfile(dockerfile):
+ """Parses WORKDIR from the Dockerfile."""
+ dockerfile_lines = dockerfile.split('\n')
for line in dockerfile_lines:
- match = re.match(workdir_regex, line)
+ match = re.match(WORKDIR_REGEX, line)
if match:
# We need to escape '$' since they're used for subsitutions in Container
# Builer builds.
return match.group(1).replace('$', '$$')
- return None
+ return '/src'
-def load_project_yaml(project_name, project_yaml_file, image_project):
- """Loads project yaml and sets default values."""
- project_yaml = yaml.safe_load(project_yaml_file)
- set_yaml_defaults(project_name, project_yaml, image_project)
- return project_yaml
+def get_datetime_now():
+ """Returns datetime.datetime.now(). Used for mocking."""
+ return datetime.datetime.now()
-# pylint: disable=too-many-locals, too-many-statements, too-many-branches
-def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project):
- """Returns build steps for project."""
- project_yaml = load_project_yaml(project_name, project_yaml_file,
- image_project)
-
- if project_yaml['disabled']:
- logging.info('Project "%s" is disabled.', project_name)
- return []
-
- name = project_yaml['name']
- image = project_yaml['image']
- language = project_yaml['language']
- run_tests = project_yaml['run_tests']
- time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M')
-
- build_steps = build_lib.project_image_steps(name, image, language)
- # Copy over MSan instrumented libraries.
- build_steps.append({
- 'name': 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
+def get_env(fuzzing_language, build):
+ """Returns an environment for building. The environment is returned as a list
+ and is suitable for use as the "env" parameter in a GCB build step. The
+ environment variables are based on the values of |fuzzing_language| and
+ |build."""
+ env_dict = {
+ 'FUZZING_LANGUAGE': fuzzing_language,
+ 'FUZZING_ENGINE': build.fuzzing_engine,
+ 'SANITIZER': build.sanitizer,
+ 'ARCHITECTURE': build.architecture,
+ # Set HOME so that it doesn't point to a persisted volume (see
+ # https://github.com/google/oss-fuzz/issues/6035).
+ 'HOME': '/root',
+ 'OUT': build.out,
+ }
+ return list(sorted([f'{key}={value}' for key, value in env_dict.items()]))
+
+
+def get_compile_step(project, build, env, parallel):
+ """Returns the GCB step for compiling |projects| fuzzers using |env|. The type
+ of build is specified by |build|."""
+ failure_msg = (
+ '*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
+ f'python infra/helper.py build_image {project.name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} --architecture '
+ f'{build.architecture} {project.name}\n' + '*' * 80)
+ compile_step = {
+ 'name': project.image,
+ 'env': env,
'args': [
'bash',
'-c',
- 'cp -r /msan /workspace',
+ # Remove /out to make sure there are non instrumented binaries.
+ # `cd /src && cd {workdir}` (where {workdir} is parsed from the
+ # Dockerfile). Container Builder overrides our workdir so we need
+ # to add this step to set it back.
+ (f'rm -r /out && cd /src && cd {project.workdir} && '
+ f'mkdir -p {build.out} && compile || '
+ f'(echo "{failure_msg}" && false)'),
],
- })
+ 'id': get_id('compile', build),
+ }
+ if parallel:
+ maybe_add_parallel(compile_step, build_lib.get_srcmap_step_id(), parallel)
+ return compile_step
- for fuzzing_engine in project_yaml['fuzzing_engines']:
- for sanitizer in get_sanitizers(project_yaml):
- for architecture in project_yaml['architectures']:
- if not is_supported_configuration(fuzzing_engine, sanitizer,
- architecture):
- continue
- env = CONFIGURATIONS['engine-' + fuzzing_engine][:]
- env.extend(CONFIGURATIONS['sanitizer-' + sanitizer])
- out = '/workspace/out/' + sanitizer
- stamped_name = '-'.join([name, sanitizer, time_stamp])
- latest_version_file = '-'.join(
- [name, sanitizer, LATEST_VERSION_FILENAME])
- zip_file = stamped_name + '.zip'
- stamped_srcmap_file = stamped_name + '.srcmap.json'
- bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
- if architecture != 'x86_64':
- bucket += '-' + architecture
-
- upload_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
- srcmap_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name,
- stamped_srcmap_file))
- latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
- bucket, name, latest_version_file)
- latest_version_url = build_lib.get_signed_url(
- latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
-
- targets_list_filename = build_lib.get_targets_list_filename(sanitizer)
- targets_list_url = build_lib.get_signed_url(
- build_lib.get_targets_list_url(bucket, name, sanitizer))
-
- env.append('OUT=' + out)
- env.append('MSAN_LIBS_PATH=/workspace/msan')
- env.append('ARCHITECTURE=' + architecture)
- env.append('FUZZING_LANGUAGE=' + language)
-
- workdir = workdir_from_dockerfile(dockerfile_lines)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # compile
- {
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to break loudly when a build script
- # incorrectly uses /out instead of $OUT.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from
- # the Dockerfile). Container Builder overrides our workdir
- # so we need to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} '
- '&& compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
+def maybe_add_parallel(step, wait_for_id, parallel):
+ """Makes |step| run immediately after |wait_for_id| if |parallel|. Mutates
+ |step|."""
+ if not parallel:
+ return
+ step['waitFor'] = wait_for_id
- if sanitizer == 'memory':
- # Patch dynamic libraries to use instrumented ones.
- build_steps.append({
+
+def get_id(step_type, build):
+ """Returns a unique step id based on |step_type| and |build|. Useful for
+ parallelizing builds."""
+ return (f'{step_type}-{build.fuzzing_engine}-{build.sanitizer}'
+ f'-{build.architecture}')
+
+
+def get_build_steps( # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-arguments
+ project_name, project_yaml_contents, dockerfile, image_project,
+ base_images_project, config):
+ """Returns build steps for project."""
+
+ project = Project(project_name, project_yaml_contents, dockerfile,
+ image_project)
+
+ if project.disabled:
+ logging.info('Project "%s" is disabled.', project.name)
+ return []
+
+ timestamp = get_datetime_now().strftime('%Y%m%d%H%M')
+
+ build_steps = build_lib.project_image_steps(
+ project.name,
+ project.image,
+ project.fuzzing_language,
+ branch=config.branch,
+ test_image_suffix=config.test_image_suffix)
+
+ # Sort engines to make AFL first to test if libFuzzer has an advantage in
+ # finding bugs first since it is generally built first.
+ for fuzzing_engine in sorted(project.fuzzing_engines):
+ for sanitizer in project.sanitizers:
+ for architecture in project.architectures:
+ build = Build(fuzzing_engine, sanitizer, architecture)
+ if not is_supported_configuration(build):
+ continue
+
+ env = get_env(project.fuzzing_language, build)
+ compile_step = get_compile_step(project, build, env, config.parallel)
+ build_steps.append(compile_step)
+
+ if project.run_tests:
+ failure_msg = (
+ '*' * 80 + '\nBuild checks failed.\n'
+ 'To reproduce, run:\n'
+ f'python infra/helper.py build_image {project.name}\n'
+ 'python infra/helper.py build_fuzzers --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} '
+ f'--architecture {build.architecture} {project.name}\n'
+ 'python infra/helper.py check_build --sanitizer '
+ f'{build.sanitizer} --engine {build.fuzzing_engine} '
+ f'--architecture {build.architecture} {project.name}\n' +
+ '*' * 80)
+ # Test fuzz targets.
+ test_step = {
'name':
- 'gcr.io/{0}/msan-libs-builder'.format(base_images_project),
+ get_runner_image_name(base_images_project,
+ config.test_image_suffix),
+ 'env':
+ env,
'args': [
- 'bash',
- '-c',
- # TODO(ochang): Replace with just patch_build.py once
- # permission in image is fixed.
- 'python /usr/local/bin/patch_build.py {0}'.format(out),
+ 'bash', '-c',
+ f'test_all.py || (echo "{failure_msg}" && false)'
],
- })
-
- if run_tests:
- failure_msg = ('*' * 80 + '\nBuild checks failed.\n'
- 'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n'
- 'python infra/helper.py check_build --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # test binaries
- {
- 'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
- 'env':
- env,
- 'args': [
- 'bash', '-c',
- 'test_all.py || (echo "{0}" && false)'.format(failure_msg)
- ],
- })
-
- if project_yaml['labels']:
- # write target labels
+ 'id':
+ get_id('build-check', build)
+ }
+ maybe_add_parallel(test_step, get_last_step_id(build_steps),
+ config.parallel)
+ build_steps.append(test_step)
+
+ if project.labels:
+ # Write target labels.
build_steps.append({
'name':
- image,
+ project.image,
'env':
env,
'args': [
'/usr/local/bin/write_labels.py',
- json.dumps(project_yaml['labels']),
- out,
+ json.dumps(project.labels),
+ build.out,
],
})
- if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
- dataflow_steps = dataflow_post_build_steps(name, env,
- base_images_project)
+ if build.sanitizer == 'dataflow' and build.fuzzing_engine == 'dataflow':
+ dataflow_steps = dataflow_post_build_steps(project.name, env,
+ base_images_project,
+ config.testing,
+ config.test_image_suffix)
if dataflow_steps:
build_steps.extend(dataflow_steps)
else:
sys.stderr.write('Skipping dataflow post build steps.\n')
build_steps.extend([
- # generate targets list
+ # Generate targets list.
{
'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
+ get_runner_image_name(base_images_project,
+ config.test_image_suffix),
'env':
env,
'args': [
- 'bash',
- '-c',
- 'targets_list > /workspace/{0}'.format(
- targets_list_filename),
- ],
- },
- # zip binaries
- {
- 'name':
- image,
- 'args': [
'bash', '-c',
- 'cd {out} && zip -r {zip_file} *'.format(out=out,
- zip_file=zip_file)
- ],
- },
- # upload srcmap
- {
- 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- '/workspace/srcmap.json',
- srcmap_url,
- ],
- },
- # upload binaries
- {
- 'name': 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- os.path.join(out, zip_file),
- upload_url,
- ],
- },
- # upload targets list
- {
- 'name':
- 'gcr.io/{0}/uploader'.format(base_images_project),
- 'args': [
- '/workspace/{0}'.format(targets_list_filename),
- targets_list_url,
- ],
- },
- # upload the latest.version file
- build_lib.http_upload_step(zip_file, latest_version_url,
- LATEST_VERSION_CONTENT_TYPE),
- # cleanup
- {
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'rm -r ' + out,
+ f'targets_list > /workspace/{build.targets_list_filename}'
],
- },
+ }
])
+ upload_steps = get_upload_steps(project, build, timestamp,
+ base_images_project, config.testing)
+ build_steps.extend(upload_steps)
return build_steps
-def dataflow_post_build_steps(project_name, env, base_images_project):
+def get_targets_list_upload_step(bucket, project, build, uploader_image):
+ """Returns the step to upload targets_list for |build| of |project| to
+ |bucket|."""
+ targets_list_url = build_lib.get_signed_url(
+ build_lib.get_targets_list_url(bucket, project.name, build.sanitizer))
+ return {
+ 'name': uploader_image,
+ 'args': [
+ f'/workspace/{build.targets_list_filename}',
+ targets_list_url,
+ ],
+ }
+
+
+def get_uploader_image(base_images_project):
+ """Returns the uploader base image in |base_images_project|."""
+ return f'gcr.io/{base_images_project}/uploader'
+
+
+def get_upload_steps(project, build, timestamp, base_images_project, testing):
+ """Returns the steps for uploading the fuzzer build specified by |project| and
+ |build|. Uses |timestamp| for naming the uploads. Uses |base_images_project|
+ and |testing| for determining which image to use for the upload."""
+ bucket = build_lib.get_upload_bucket(build.fuzzing_engine, build.architecture,
+ testing)
+ stamped_name = '-'.join([project.name, build.sanitizer, timestamp])
+ zip_file = stamped_name + '.zip'
+ upload_url = build_lib.get_signed_url(
+ build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name, zip_file))
+ stamped_srcmap_file = stamped_name + '.srcmap.json'
+ srcmap_url = build_lib.get_signed_url(
+ build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name,
+ stamped_srcmap_file))
+ latest_version_file = '-'.join(
+ [project.name, build.sanitizer, LATEST_VERSION_FILENAME])
+ latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format(
+ bucket, project.name, latest_version_file)
+ latest_version_url = build_lib.get_signed_url(
+ latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE)
+ uploader_image = get_uploader_image(base_images_project)
+
+ upload_steps = [
+ # Zip binaries.
+ {
+ 'name': project.image,
+ 'args': ['bash', '-c', f'cd {build.out} && zip -r {zip_file} *'],
+ },
+ # Upload srcmap.
+ {
+ 'name': uploader_image,
+ 'args': [
+ '/workspace/srcmap.json',
+ srcmap_url,
+ ],
+ },
+ # Upload binaries.
+ {
+ 'name': uploader_image,
+ 'args': [
+ os.path.join(build.out, zip_file),
+ upload_url,
+ ],
+ },
+ # Upload targets list.
+ get_targets_list_upload_step(bucket, project, build, uploader_image),
+ # Upload the latest.version file.
+ build_lib.http_upload_step(zip_file, latest_version_url,
+ LATEST_VERSION_CONTENT_TYPE),
+ # Cleanup.
+ get_cleanup_step(project, build),
+ ]
+ return upload_steps
+
+
+def get_cleanup_step(project, build):
+ """Returns the step for cleaning up after doing |build| of |project|."""
+ return {
+ 'name': project.image,
+ 'args': [
+ 'bash',
+ '-c',
+ 'rm -r ' + build.out,
+ ],
+ }
+
+
+def get_runner_image_name(base_images_project, test_image_suffix):
+ """Returns the runner image that should be used, based on
+ |base_images_project|. Returns the testing image if |test_image_suffix|."""
+ image = f'gcr.io/{base_images_project}/base-runner'
+ if test_image_suffix:
+ image += '-' + test_image_suffix
+ return image
+
+
+def dataflow_post_build_steps(project_name, env, base_images_project, testing,
+ test_image_suffix):
"""Appends dataflow post build steps."""
- steps = build_lib.download_corpora_steps(project_name)
+ steps = build_lib.download_corpora_steps(project_name, testing)
if not steps:
return None
steps.append({
'name':
- 'gcr.io/{0}/base-runner'.format(base_images_project),
+ get_runner_image_name(base_images_project, test_image_suffix),
'env':
env + [
'COLLECT_DFT_TIMEOUT=2h',
@@ -387,63 +477,126 @@ def dataflow_post_build_steps(project_name, env, base_images_project):
return steps
-def get_logs_url(build_id, image_project='oss-fuzz'):
+def get_logs_url(build_id, cloud_project='oss-fuzz'):
"""Returns url where logs are displayed for the build."""
- url_format = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project={1}')
- return url_format.format(build_id, image_project)
+ return ('https://console.cloud.google.com/logs/viewer?'
+ f'resource=build%2Fbuild_id%2F{build_id}&project={cloud_project}')
+
+
+def get_gcb_url(build_id, cloud_project='oss-fuzz'):
+ """Returns url where logs are displayed for the build."""
+ return (f'https://console.cloud.google.com/cloud-build/builds/{build_id}'
+ f'?project={cloud_project}')
# pylint: disable=no-member
-def run_build(build_steps, project_name, tag):
- """Run the build for given steps on cloud build."""
+def run_build(oss_fuzz_project,
+ build_steps,
+ credentials,
+ build_type,
+ cloud_project='oss-fuzz'):
+ """Run the build for given steps on cloud build. |build_steps| are the steps
+ to run. |credentials| are are used to authenticate to GCB and build in
+ |cloud_project|. |oss_fuzz_project| and |build_type| are used to tag the build
+ in GCB so the build can be queried for debugging purposes."""
options = {}
if 'GCB_OPTIONS' in os.environ:
options = yaml.safe_load(os.environ['GCB_OPTIONS'])
+ else:
+ options = DEFAULT_GCB_OPTIONS
+ tags = [oss_fuzz_project + '-' + build_type, build_type, oss_fuzz_project]
build_body = {
'steps': build_steps,
'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
'options': options,
'logsBucket': GCB_LOGS_BUCKET,
- 'tags': [project_name + '-' + tag,],
+ 'tags': tags,
'queueTtl': str(QUEUE_TTL_SECONDS) + 's',
}
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild',
- 'v1',
- credentials=credentials,
- cache_discovery=False)
- build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
+ cloudbuild = cloud_build('cloudbuild',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
+ build_info = cloudbuild.projects().builds().create(projectId=cloud_project,
body=build_body).execute()
build_id = build_info['metadata']['build']['id']
- print('Logs:', get_logs_url(build_id), file=sys.stderr)
- print(build_id)
-
-
-def main():
- """Build and run projects."""
- if len(sys.argv) != 2:
- usage()
+ logging.info('Build ID: %s', build_id)
+ logging.info('Logs: %s', get_logs_url(build_id, cloud_project))
+ logging.info('Cloud build page: %s', get_gcb_url(build_id, cloud_project))
+ return build_id
+
+
+def get_args(description):
+ """Parses command line arguments and returns them. Suitable for a build
+ script."""
+ parser = argparse.ArgumentParser(sys.argv[0], description=description)
+ parser.add_argument('projects', help='Projects.', nargs='+')
+ parser.add_argument('--testing',
+ action='store_true',
+ required=False,
+ default=False,
+ help='Upload to testing buckets.')
+ parser.add_argument('--test-image-suffix',
+ required=False,
+ default=None,
+ help='Use testing base-images.')
+ parser.add_argument('--branch',
+ required=False,
+ default=None,
+ help='Use specified OSS-Fuzz branch.')
+ parser.add_argument('--parallel',
+ action='store_true',
+ required=False,
+ default=False,
+ help='Do builds in parallel.')
+ return parser.parse_args()
+
+
+def build_script_main(script_description, get_build_steps_func, build_type):
+ """Gets arguments from command line using |script_description| as helpstring
+ description. Gets build_steps using |get_build_steps_func| and then runs those
+ steps on GCB, tagging the builds with |build_type|. Returns 0 on success, 1 on
+ failure."""
+ args = get_args(script_description)
+ logging.basicConfig(level=logging.INFO)
image_project = 'oss-fuzz'
base_images_project = 'oss-fuzz-base'
- project_dir = sys.argv[1].rstrip(os.path.sep)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
- project_name = os.path.basename(project_dir)
- with open(dockerfile_path) as dockerfile:
- dockerfile_lines = dockerfile.readlines()
+ credentials = oauth2client.client.GoogleCredentials.get_application_default()
+ error = False
+ config = Config(args.testing, args.test_image_suffix, args.branch,
+ args.parallel)
+ for project_name in args.projects:
+ logging.info('Getting steps for: "%s".', project_name)
+ try:
+ project_yaml_contents, dockerfile_contents = get_project_data(
+ project_name)
+ except FileNotFoundError:
+ logging.error('Couldn\'t get project data. Skipping %s.', project_name)
+ error = True
+ continue
+
+ steps = get_build_steps_func(project_name, project_yaml_contents,
+ dockerfile_contents, image_project,
+ base_images_project, config)
+ if not steps:
+ logging.error('No steps. Skipping %s.', project_name)
+ error = True
+ continue
+
+ run_build(project_name, steps, credentials, build_type)
+ return 0 if not error else 1
- with open(project_yaml_path) as project_yaml_file:
- steps = get_build_steps(project_name, project_yaml_file, dockerfile_lines,
- image_project, base_images_project)
- run_build(steps, project_name, FUZZING_BUILD_TAG)
+def main():
+ """Build and run projects."""
+ return build_script_main('Builds a project on GCB.', get_build_steps,
+ FUZZING_BUILD_TYPE)
if __name__ == '__main__':
- main()
+ sys.exit(main())