aboutsummaryrefslogtreecommitdiff
path: root/infra/gcb
diff options
context:
space:
mode:
Diffstat (limited to 'infra/gcb')
-rw-r--r--infra/gcb/.gitignore1
-rw-r--r--infra/gcb/badge_images/building.pngbin3020 -> 0 bytes
-rw-r--r--infra/gcb/badge_images/building.svg1
-rw-r--r--infra/gcb/badge_images/coverage_failing.pngbin4160 -> 0 bytes
-rw-r--r--infra/gcb/badge_images/coverage_failing.svg1
-rw-r--r--infra/gcb/badge_images/failing.pngbin3465 -> 0 bytes
-rw-r--r--infra/gcb/badge_images/failing.svg1
-rw-r--r--infra/gcb/build_and_run_coverage.py282
-rwxr-xr-xinfra/gcb/build_base_images.py79
-rw-r--r--infra/gcb/build_lib.py134
-rwxr-xr-xinfra/gcb/build_msan_libs.py66
-rw-r--r--infra/gcb/build_project.py405
-rwxr-xr-xinfra/gcb/builds_status.py243
-rwxr-xr-xinfra/gcb/cancel.py40
-rw-r--r--infra/gcb/jenkins_config/base_job.xml62
-rw-r--r--infra/gcb/jenkins_config/coverage_job.xml60
-rw-r--r--infra/gcb/requirements.txt35
-rwxr-xr-xinfra/gcb/sync.py106
-rw-r--r--infra/gcb/templates/bower.json20
-rwxr-xr-xinfra/gcb/templates/deploy.sh3
-rw-r--r--infra/gcb/templates/index.html28
-rw-r--r--infra/gcb/templates/manifest.json6
-rw-r--r--infra/gcb/templates/polymer.json7
-rw-r--r--infra/gcb/templates/src/build-status/build-status.html223
-rwxr-xr-xinfra/gcb/wait_for_build.py68
25 files changed, 0 insertions, 1871 deletions
diff --git a/infra/gcb/.gitignore b/infra/gcb/.gitignore
deleted file mode 100644
index 4d3dae74c..000000000
--- a/infra/gcb/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-default/
diff --git a/infra/gcb/badge_images/building.png b/infra/gcb/badge_images/building.png
deleted file mode 100644
index 9e7e73447..000000000
--- a/infra/gcb/badge_images/building.png
+++ /dev/null
Binary files differ
diff --git a/infra/gcb/badge_images/building.svg b/infra/gcb/badge_images/building.svg
deleted file mode 100644
index 19f28d7fb..000000000
--- a/infra/gcb/badge_images/building.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="104" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="a"><rect width="104" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#a)"><path fill="#555" d="M0 0h55v20H0z"/><path fill="#4c1" d="M55 0h49v20H55z"/><path fill="url(#b)" d="M0 0h104v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"> <text x="285" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="285" y="140" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="785" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="390">fuzzing</text><text x="785" y="140" transform="scale(.1)" textLength="390">fuzzing</text></g> </svg> \ No newline at end of file
diff --git a/infra/gcb/badge_images/coverage_failing.png b/infra/gcb/badge_images/coverage_failing.png
deleted file mode 100644
index 85abe2352..000000000
--- a/infra/gcb/badge_images/coverage_failing.png
+++ /dev/null
Binary files differ
diff --git a/infra/gcb/badge_images/coverage_failing.svg b/infra/gcb/badge_images/coverage_failing.svg
deleted file mode 100644
index dc7b72e39..000000000
--- a/infra/gcb/badge_images/coverage_failing.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="152" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="a"><rect width="152" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#a)"><path fill="#555" d="M0 0h55v20H0z"/><path fill="#dfb317" d="M55 0h97v20H55z"/><path fill="url(#b)" d="M0 0h152v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"> <text x="285" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="285" y="140" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="1025" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="870">coverage failing</text><text x="1025" y="140" transform="scale(.1)" textLength="870">coverage failing</text></g> </svg> \ No newline at end of file
diff --git a/infra/gcb/badge_images/failing.png b/infra/gcb/badge_images/failing.png
deleted file mode 100644
index 0d2bb4701..000000000
--- a/infra/gcb/badge_images/failing.png
+++ /dev/null
Binary files differ
diff --git a/infra/gcb/badge_images/failing.svg b/infra/gcb/badge_images/failing.svg
deleted file mode 100644
index ed0f8621e..000000000
--- a/infra/gcb/badge_images/failing.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="a"><rect width="128" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#a)"><path fill="#555" d="M0 0h55v20H0z"/><path fill="#e05d44" d="M55 0h73v20H55z"/><path fill="url(#b)" d="M0 0h128v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"> <text x="285" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="285" y="140" transform="scale(.1)" textLength="450">oss-fuzz</text><text x="905" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="630">build failing</text><text x="905" y="140" transform="scale(.1)" textLength="630">build failing</text></g> </svg> \ No newline at end of file
diff --git a/infra/gcb/build_and_run_coverage.py b/infra/gcb/build_and_run_coverage.py
deleted file mode 100644
index b94fe3558..000000000
--- a/infra/gcb/build_and_run_coverage.py
+++ /dev/null
@@ -1,282 +0,0 @@
-#!/usr/bin/python2
-"""Starts and runs coverage build on Google Cloud Builder.
-
-Usage: build_and_run_coverage.py <project_dir>
-"""
-
-import datetime
-import json
-import os
-import requests
-import sys
-import urlparse
-
-import build_lib
-import build_project
-
-SANITIZER = 'coverage'
-CONFIGURATION = ['FUZZING_ENGINE=libfuzzer', 'SANITIZER=%s' % SANITIZER]
-PLATFORM = 'linux'
-
-COVERAGE_BUILD_TAG = 'coverage'
-
-# Where code coverage reports need to be uploaded to.
-COVERAGE_BUCKET_NAME = 'oss-fuzz-coverage'
-
-# Link to the code coverage report in HTML format.
-HTML_REPORT_URL_FORMAT = (build_lib.GCS_URL_BASENAME + COVERAGE_BUCKET_NAME +
- '/{project}/reports/{date}/{platform}/index.html')
-
-# This is needed for ClusterFuzz to pick up the most recent reports data.
-LATEST_REPORT_INFO_URL = ('/' + COVERAGE_BUCKET_NAME +
- '/latest_report_info/{project}.json')
-
-# Link where to upload code coverage report files to.
-UPLOAD_URL_FORMAT = 'gs://' + COVERAGE_BUCKET_NAME + '/{project}/{type}/{date}'
-
-# Languages from project.yaml that have code coverage support.
-LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'cpp']
-
-
-def skip_build(message):
- """Exit with 0 code not to mark code coverage job as failed."""
- sys.stderr.write('%s\n' % message)
-
- # Since the script should print build_id, print '0' as a special value.
- print '0'
- exit(0)
-
-
-def usage():
- sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n")
- exit(1)
-
-
-def get_build_steps(project_dir):
- project_name = os.path.basename(project_dir)
- project_yaml = build_project.load_project_yaml(project_dir)
- if project_yaml['disabled']:
- skip_build('Project "%s" is disabled.' % project_name)
-
- build_script_path = os.path.join(project_dir, 'build.sh')
- if os.path.exists(build_script_path):
- with open(build_script_path) as fh:
- if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
- skip_build(('Project "{project_name}" is written in "{language}", '
- 'coverage is not supported yet.').format(
- project_name=project_name,
- language=project_yaml['language']))
-
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- name = project_yaml['name']
- image = project_yaml['image']
- report_date = datetime.datetime.now().strftime('%Y%m%d')
-
- build_steps = [
- {
- 'args': [
- 'clone',
- 'https://github.com/google/oss-fuzz.git',
- ],
- 'name': 'gcr.io/cloud-builders/git',
- },
- {
- 'name': 'gcr.io/cloud-builders/docker',
- 'args': [
- 'build',
- '-t',
- image,
- '.',
- ],
- 'dir': 'oss-fuzz/projects/' + name,
- },
- {
- 'name': image,
- 'args': [
- 'bash', '-c',
- 'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
- ],
- 'env': ['OSSFUZZ_REVISION=$REVISION_ID'],
- },
- ]
-
- env = CONFIGURATION[:]
- out = '/workspace/out/' + SANITIZER
- env.append('OUT=' + out)
-
- workdir = build_project.workdir_from_dockerfile(dockerfile_path)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer coverage '
- '{name}\n' + '*' * 80).format(name=name)
-
- # Compilation step.
- build_steps.append({
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to make sure there are non instrumented binaries.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from the
- # Dockerfile). Container Builder overrides our workdir so we need
- # to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
- 'compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
-
- download_corpora_step = build_lib.download_corpora_step(project_name)
- if not download_corpora_step:
- skip_build("Skipping code coverage build for %s.\n" % project_name)
-
- build_steps.append(download_corpora_step)
-
- failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
- 'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer coverage '
- '{name}\n'
- 'python infra/helper.py coverage {name}\n' +
- '*' * 80).format(name=name)
-
- # Unpack the corpus and run coverage script.
- build_steps.append({
- 'name':
- 'gcr.io/oss-fuzz-base/base-runner',
- 'env':
- env + [
- 'HTTP_PORT=',
- 'COVERAGE_EXTRA_ARGS=%s' %
- project_yaml['coverage_extra_args'].strip()
- ],
- 'args': [
- 'bash', '-c',
- ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
- 'echo "Failed to unpack the corpus for $(basename ${f%%.*}). '
- 'This usually means that corpus backup for a particular fuzz '
- 'target does not exist. If a fuzz target was added in the last '
- '24 hours, please wait one more day. Otherwise, something is '
- 'wrong with the fuzz target or the infrastructure, and corpus '
- 'pruning task does not finish successfully." && exit 1'
- '); done && coverage || (echo "' + failure_msg + '" && false)')
- ],
- 'volumes': [{
- 'name': 'corpus',
- 'path': '/corpus'
- }],
- })
-
- # Upload the report.
- upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='reports',
- date=report_date)
- build_steps.append({
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- '-m',
- 'cp',
- '-r',
- os.path.join(out, 'report'),
- upload_report_url,
- ],
- })
-
- # Upload the fuzzer stats.
- upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='fuzzer_stats',
- date=report_date)
- build_steps.append({
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- '-m',
- 'cp',
- '-r',
- os.path.join(out, 'fuzzer_stats'),
- upload_fuzzer_stats_url,
- ],
- })
-
- # Upload the fuzzer logs.
- build_steps.append({
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- '-m',
- 'cp',
- '-r',
- os.path.join(out, 'logs'),
- UPLOAD_URL_FORMAT.format(project=project_name,
- type='logs',
- date=report_date),
- ],
- })
-
- # Upload srcmap.
- srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
- type='srcmap',
- date=report_date)
- srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
- build_steps.append({
- 'name': 'gcr.io/cloud-builders/gsutil',
- 'args': [
- 'cp',
- '/workspace/srcmap.json',
- srcmap_upload_url,
- ],
- })
-
- # Update the latest report information file for ClusterFuzz.
- latest_report_info_url = build_lib.get_signed_url(
- LATEST_REPORT_INFO_URL.format(project=project_name),
- method='PUT',
- content_type='application/json')
- latest_report_info_body = json.dumps({
- 'fuzzer_stats_dir':
- upload_fuzzer_stats_url,
- 'html_report_url':
- HTML_REPORT_URL_FORMAT.format(project=project_name,
- date=report_date,
- platform=PLATFORM),
- 'report_date':
- report_date,
- 'report_summary_path':
- os.path.join(upload_report_url, PLATFORM, 'summary.json'),
- })
-
- build_steps.append({
- 'name':
- 'gcr.io/cloud-builders/curl',
- 'args': [
- '-H',
- 'Content-Type: application/json',
- '-X',
- 'PUT',
- '-d',
- latest_report_info_body,
- latest_report_info_url,
- ],
- })
- return build_steps
-
-
-def main():
- if len(sys.argv) != 2:
- usage()
-
- project_dir = sys.argv[1].rstrip(os.path.sep)
- project_name = os.path.basename(project_dir)
- steps = get_build_steps(project_dir)
- build_project.run_build(steps, project_name, COVERAGE_BUILD_TAG)
-
-
-if __name__ == "__main__":
- main()
diff --git a/infra/gcb/build_base_images.py b/infra/gcb/build_base_images.py
deleted file mode 100755
index 3c2bace2e..000000000
--- a/infra/gcb/build_base_images.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/python2
-"""Build base images on Google Cloud Builder.
-
-Usage: build_base_images.py
-"""
-
-import os
-import sys
-import yaml
-
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
-BASE_IMAGES = [
- 'base-image',
- 'base-clang',
- 'base-builder',
- 'base-runner',
- 'base-runner-debug',
- 'base-msan-builder',
-]
-
-TAG_PREFIX = 'gcr.io/oss-fuzz-base/'
-
-
-def get_steps(images):
- steps = [{
- 'args': [
- 'clone',
- 'https://github.com/google/oss-fuzz.git',
- ],
- 'name': 'gcr.io/cloud-builders/git',
- }]
-
- for base_image in images:
- steps.append({
- 'args': [
- 'build',
- '-t',
- TAG_PREFIX + base_image,
- '.',
- ],
- 'dir': 'oss-fuzz/infra/base-images/' + base_image,
- 'name': 'gcr.io/cloud-builders/docker',
- })
-
- return steps
-
-
-def get_logs_url(build_id):
- URL_FORMAT = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project=oss-fuzz-base')
- return URL_FORMAT.format(build_id)
-
-
-def main():
- options = {}
- if 'GCB_OPTIONS' in os.environ:
- options = yaml.safe_load(os.environ['GCB_OPTIONS'])
-
- build_body = {
- 'steps': get_steps(BASE_IMAGES),
- 'timeout': str(4 * 3600) + 's',
- 'options': options,
- 'images': [TAG_PREFIX + base_image for base_image in BASE_IMAGES],
- }
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
- build_info = cloudbuild.projects().builds().create(
- projectId='oss-fuzz-base', body=build_body).execute()
- build_id = build_info['metadata']['build']['id']
-
- print >> sys.stderr, 'Logs:', get_logs_url(build_id)
- print build_id
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/build_lib.py b/infra/gcb/build_lib.py
deleted file mode 100644
index d3508730c..000000000
--- a/infra/gcb/build_lib.py
+++ /dev/null
@@ -1,134 +0,0 @@
-"""Utility module for Google Cloud Build scripts."""
-import base64
-import collections
-import os
-import requests
-import sys
-import time
-import urllib
-import urlparse
-
-from oauth2client.service_account import ServiceAccountCredentials
-
-BUILD_TIMEOUT = 12 * 60 * 60
-
-# Needed for reading public target.list.* files.
-GCS_URL_BASENAME = 'https://storage.googleapis.com/'
-
-GCS_UPLOAD_URL_FORMAT = '/{0}/{1}/{2}'
-
-# Where corpus backups can be downloaded from.
-CORPUS_BACKUP_URL = ('/{project}-backup.clusterfuzz-external.appspot.com/'
- 'corpus/libFuzzer/{fuzzer}/latest.zip')
-
-# Cloud Builder has a limit of 100 build steps and 100 arguments for each step.
-CORPUS_DOWNLOAD_BATCH_SIZE = 100
-
-TARGETS_LIST_BASENAME = 'targets.list'
-
-EngineInfo = collections.namedtuple(
- 'EngineInfo',
- ['upload_bucket', 'supported_sanitizers', 'supported_architectures'])
-
-ENGINE_INFO = {
- 'libfuzzer':
- EngineInfo(upload_bucket='clusterfuzz-builds',
- supported_sanitizers=['address', 'memory', 'undefined'],
- supported_architectures=['x86_64', 'i386']),
- 'afl':
- EngineInfo(upload_bucket='clusterfuzz-builds-afl',
- supported_sanitizers=['address'],
- supported_architectures=['x86_64']),
- 'honggfuzz':
- EngineInfo(upload_bucket='clusterfuzz-builds-honggfuzz',
- supported_sanitizers=['address', 'memory', 'undefined'],
- supported_architectures=['x86_64']),
- 'dataflow':
- EngineInfo(upload_bucket='clusterfuzz-builds-dataflow',
- supported_sanitizers=['dataflow'],
- supported_architectures=['x86_64']),
- 'none':
- EngineInfo(upload_bucket='clusterfuzz-builds-no-engine',
- supported_sanitizers=['address'],
- supported_architectures=['x86_64']),
-}
-
-
-def get_targets_list_filename(sanitizer):
- return TARGETS_LIST_BASENAME + '.' + sanitizer
-
-
-def get_targets_list_url(bucket, project, sanitizer):
- filename = get_targets_list_filename(sanitizer)
- url = GCS_UPLOAD_URL_FORMAT.format(bucket, project, filename)
- return url
-
-
-def _get_targets_list(project_name):
- # libFuzzer ASan is the default configuration, get list of targets from it.
- url = get_targets_list_url(ENGINE_INFO['libfuzzer'].upload_bucket,
- project_name, 'address')
-
- url = urlparse.urljoin(GCS_URL_BASENAME, url)
- response = requests.get(url)
- if not response.status_code == 200:
- sys.stderr.write('Failed to get list of targets from "%s".\n' % url)
- sys.stderr.write('Status code: %d \t\tText:\n%s\n' %
- (response.status_code, response.text))
- return None
-
- return response.text.split()
-
-
-def get_signed_url(path, method='PUT', content_type=''):
- timestamp = int(time.time() + BUILD_TIMEOUT)
- blob = '{0}\n\n{1}\n{2}\n{3}'.format(method, content_type, timestamp, path)
-
- creds = ServiceAccountCredentials.from_json_keyfile_name(
- os.environ['GOOGLE_APPLICATION_CREDENTIALS'])
- client_id = creds.service_account_email
- signature = base64.b64encode(creds.sign_blob(blob)[1])
- values = {
- 'GoogleAccessId': client_id,
- 'Expires': timestamp,
- 'Signature': signature,
- }
-
- return ('https://storage.googleapis.com{0}?'.format(path) +
- urllib.urlencode(values))
-
-
-def download_corpora_step(project_name):
- """Returns a GCB step for downloading corpora backups for the given project.
- """
- fuzz_targets = _get_targets_list(project_name)
- if not fuzz_targets:
- sys.stderr.write('No fuzz targets found for project "%s".\n' % project_name)
- return None
-
- # Split fuzz targets into batches of CORPUS_DOWNLOAD_BATCH_SIZE.
- for i in range(0, len(fuzz_targets), CORPUS_DOWNLOAD_BATCH_SIZE):
- download_corpus_args = []
- for binary_name in fuzz_targets[i:i + CORPUS_DOWNLOAD_BATCH_SIZE]:
- qualified_name = binary_name
- qualified_name_prefix = '%s_' % project_name
- if not binary_name.startswith(qualified_name_prefix):
- qualified_name = qualified_name_prefix + binary_name
-
- url = get_signed_url(CORPUS_BACKUP_URL.format(project=project_name,
- fuzzer=qualified_name),
- method='GET')
-
- corpus_archive_path = os.path.join('/corpus', binary_name + '.zip')
- download_corpus_args.append('%s %s' % (corpus_archive_path, url))
-
- step = {
- 'name': 'gcr.io/oss-fuzz-base/base-runner',
- 'entrypoint': 'download_corpus',
- 'args': download_corpus_args,
- 'volumes': [{
- 'name': 'corpus',
- 'path': '/corpus'
- }],
- }
- return step
diff --git a/infra/gcb/build_msan_libs.py b/infra/gcb/build_msan_libs.py
deleted file mode 100755
index 2c87b15a3..000000000
--- a/infra/gcb/build_msan_libs.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/python2
-"""Build base images on Google Cloud Builder.
-
-Usage: build_base_images.py
-"""
-
-import datetime
-import os
-import yaml
-import sys
-
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
-import build_base_images
-
-
-def main():
- options = {}
- if 'GCB_OPTIONS' in os.environ:
- options = yaml.safe_load(os.environ['GCB_OPTIONS'])
-
- image = 'gcr.io/oss-fuzz-base/msan-builder'
- steps = build_base_images.get_steps(['base-msan-builder', 'msan-builder'])
- ts = datetime.datetime.utcnow().strftime('%Y%m%d%H%M')
- upload_name = 'msan-libs-' + ts + '.zip'
-
- steps.extend([{
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'cd /msan && zip -r /workspace/libs.zip .',
- ],
- }, {
- 'name':
- 'gcr.io/cloud-builders/gsutil',
- 'args': [
- 'cp',
- '/workspace/libs.zip',
- 'gs://oss-fuzz-msan-libs/' + upload_name,
- ],
- }])
-
- build_body = {
- 'steps': steps,
- 'timeout': str(6 * 3600) + 's',
- 'options': options,
- 'images': [
- 'gcr.io/oss-fuzz-base/base-msan-builder',
- image,
- ],
- }
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
- build_info = cloudbuild.projects().builds().create(
- projectId='oss-fuzz-base', body=build_body).execute()
- build_id = build_info['metadata']['build']['id']
-
- print >> sys.stderr, 'Logs:', build_base_images.get_logs_url(build_id)
- print build_id
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/build_project.py b/infra/gcb/build_project.py
deleted file mode 100644
index f45b0996a..000000000
--- a/infra/gcb/build_project.py
+++ /dev/null
@@ -1,405 +0,0 @@
-#!/usr/bin/python2
-"""Starts project build on Google Cloud Builder.
-
-Usage: build_project.py <project_dir>
-"""
-
-from __future__ import print_function
-
-import datetime
-import json
-import os
-import re
-import sys
-import yaml
-
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
-import build_lib
-
-FUZZING_BUILD_TAG = 'fuzzing'
-
-GCB_LOGS_BUCKET = 'oss-fuzz-gcb-logs'
-
-CONFIGURATIONS = {
- 'sanitizer-address': ['SANITIZER=address'],
- 'sanitizer-dataflow': ['SANITIZER=dataflow'],
- 'sanitizer-memory': ['SANITIZER=memory'],
- 'sanitizer-undefined': ['SANITIZER=undefined'],
- 'engine-libfuzzer': ['FUZZING_ENGINE=libfuzzer'],
- 'engine-afl': ['FUZZING_ENGINE=afl'],
- 'engine-honggfuzz': ['FUZZING_ENGINE=honggfuzz'],
- 'engine-dataflow': ['FUZZING_ENGINE=dataflow'],
- 'engine-none': ['FUZZING_ENGINE=none'],
-}
-
-DEFAULT_ARCHITECTURES = ['x86_64']
-DEFAULT_ENGINES = ['libfuzzer', 'afl', 'honggfuzz']
-DEFAULT_SANITIZERS = ['address', 'undefined']
-
-
-def usage():
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <project_dir>\n')
- exit(1)
-
-
-def load_project_yaml(project_dir):
- project_name = os.path.basename(project_dir)
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
- with open(project_yaml_path) as f:
- project_yaml = yaml.safe_load(f)
- project_yaml.setdefault('disabled', False)
- project_yaml.setdefault('name', project_name)
- project_yaml.setdefault('image', 'gcr.io/oss-fuzz/' + project_name)
- project_yaml.setdefault('architectures', DEFAULT_ARCHITECTURES)
- project_yaml.setdefault('sanitizers', DEFAULT_SANITIZERS)
- project_yaml.setdefault('fuzzing_engines', DEFAULT_ENGINES)
- project_yaml.setdefault('run_tests', True)
- project_yaml.setdefault('coverage_extra_args', '')
- project_yaml.setdefault('labels', {})
- project_yaml.setdefault('language', 'cpp')
- return project_yaml
-
-
-def is_supported_configuration(fuzzing_engine, sanitizer, architecture):
- fuzzing_engine_info = build_lib.ENGINE_INFO[fuzzing_engine]
- if architecture == 'i386' and sanitizer != 'address':
- return False
- return (sanitizer in fuzzing_engine_info.supported_sanitizers and
- architecture in fuzzing_engine_info.supported_architectures)
-
-
-def get_sanitizers(project_yaml):
- sanitizers = project_yaml['sanitizers']
- assert isinstance(sanitizers, list)
-
- processed_sanitizers = []
- for sanitizer in sanitizers:
- if isinstance(sanitizer, basestring):
- processed_sanitizers.append(sanitizer)
- elif isinstance(sanitizer, dict):
- for key in sanitizer.iterkeys():
- processed_sanitizers.append(key)
-
- return processed_sanitizers
-
-
-def workdir_from_dockerfile(dockerfile):
- """Parse WORKDIR from the Dockerfile."""
- WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
-
- with open(dockerfile) as f:
- lines = f.readlines()
-
- for line in lines:
- match = re.match(WORKDIR_REGEX, line)
- if match:
- # We need to escape '$' since they're used for subsitutions in Container
- # Builer builds.
- return match.group(1).replace('$', '$$')
-
- return None
-
-
-def get_build_steps(project_dir):
- project_yaml = load_project_yaml(project_dir)
- dockerfile_path = os.path.join(project_dir, 'Dockerfile')
- name = project_yaml['name']
- image = project_yaml['image']
- run_tests = project_yaml['run_tests']
-
- ts = datetime.datetime.now().strftime('%Y%m%d%H%M')
-
- build_steps = [
- {
- 'args': [
- 'clone',
- 'https://github.com/google/oss-fuzz.git',
- ],
- 'name': 'gcr.io/cloud-builders/git',
- },
- {
- 'name': 'gcr.io/cloud-builders/docker',
- 'args': [
- 'build',
- '-t',
- image,
- '.',
- ],
- 'dir': 'oss-fuzz/projects/' + name,
- },
- {
- 'name': image,
- 'args': [
- 'bash', '-c',
- 'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
- ],
- 'env': ['OSSFUZZ_REVISION=$REVISION_ID'],
- },
- {
- 'name': 'gcr.io/oss-fuzz-base/msan-builder',
- 'args': [
- 'bash',
- '-c',
- 'cp -r /msan /workspace',
- ],
- },
- ]
-
- for fuzzing_engine in project_yaml['fuzzing_engines']:
- for sanitizer in get_sanitizers(project_yaml):
- for architecture in project_yaml['architectures']:
- if not is_supported_configuration(fuzzing_engine, sanitizer,
- architecture):
- continue
-
- env = CONFIGURATIONS['engine-' + fuzzing_engine][:]
- env.extend(CONFIGURATIONS['sanitizer-' + sanitizer])
- out = '/workspace/out/' + sanitizer
- stamped_name = '-'.join([name, sanitizer, ts])
- zip_file = stamped_name + '.zip'
- stamped_srcmap_file = stamped_name + '.srcmap.json'
- bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket
- if architecture != 'x86_64':
- bucket += '-' + architecture
- upload_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name, zip_file))
- srcmap_url = build_lib.get_signed_url(
- build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, name,
- stamped_srcmap_file))
-
- targets_list_filename = build_lib.get_targets_list_filename(sanitizer)
- targets_list_url = build_lib.get_signed_url(
- build_lib.get_targets_list_url(bucket, name, sanitizer))
-
- env.append('OUT=' + out)
- env.append('MSAN_LIBS_PATH=/workspace/msan')
- env.append('ARCHITECTURE=' + architecture)
-
- workdir = workdir_from_dockerfile(dockerfile_path)
- if not workdir:
- workdir = '/src'
-
- failure_msg = ('*' * 80 + '\nFailed to build.\nTo reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # compile
- {
- 'name':
- image,
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- # Remove /out to break loudly when a build script
- # incorrectly uses /out instead of $OUT.
- # `cd /src && cd {workdir}` (where {workdir} is parsed from
- # the Dockerfile). Container Builder overrides our workdir
- # so we need to add this step to set it back.
- ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
- 'compile || (echo "{failure_msg}" && false)'
- ).format(workdir=workdir, out=out, failure_msg=failure_msg),
- ],
- })
-
- if sanitizer == 'memory':
- # Patch dynamic libraries to use instrumented ones.
- build_steps.append({
- 'name':
- 'gcr.io/oss-fuzz-base/msan-builder',
- 'args': [
- 'bash',
- '-c',
- # TODO(ochang): Replace with just patch_build.py once
- # permission in image is fixed.
- 'python /usr/local/bin/patch_build.py {0}'.format(out),
- ],
- })
-
- if run_tests:
- failure_msg = ('*' * 80 + '\nBuild checks failed.\n'
- 'To reproduce, run:\n'
- 'python infra/helper.py build_image {name}\n'
- 'python infra/helper.py build_fuzzers --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n'
- 'python infra/helper.py check_build --sanitizer '
- '{sanitizer} --engine {engine} --architecture '
- '{architecture} {name}\n' + '*' * 80).format(
- name=name,
- sanitizer=sanitizer,
- engine=fuzzing_engine,
- architecture=architecture)
-
- build_steps.append(
- # test binaries
- {
- 'name':
- 'gcr.io/oss-fuzz-base/base-runner',
- 'env':
- env,
- 'args': [
- 'bash', '-c',
- 'test_all || (echo "{0}" && false)'.format(failure_msg)
- ],
- })
-
- if project_yaml['labels']:
- # write target labels
- build_steps.append({
- 'name':
- image,
- 'env':
- env,
- 'args': [
- '/usr/local/bin/write_labels.py',
- json.dumps(project_yaml['labels']),
- out,
- ],
- })
-
- if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow':
- dataflow_steps = dataflow_post_build_steps(name, env)
- if dataflow_steps:
- build_steps.extend(dataflow_steps)
- else:
- sys.stderr.write('Skipping dataflow post build steps.\n')
-
- build_steps.extend([
- # generate targets list
- {
- 'name':
- 'gcr.io/oss-fuzz-base/base-runner',
- 'env':
- env,
- 'args': [
- 'bash',
- '-c',
- 'targets_list > /workspace/{0}'.format(
- targets_list_filename),
- ],
- },
- # zip binaries
- {
- 'name':
- image,
- 'args': [
- 'bash', '-c',
- 'cd {out} && zip -r {zip_file} *'.format(out=out,
- zip_file=zip_file)
- ],
- },
- # upload srcmap
- {
- 'name': 'gcr.io/oss-fuzz-base/uploader',
- 'args': [
- '/workspace/srcmap.json',
- srcmap_url,
- ],
- },
- # upload binaries
- {
- 'name': 'gcr.io/oss-fuzz-base/uploader',
- 'args': [
- os.path.join(out, zip_file),
- upload_url,
- ],
- },
- # upload targets list
- {
- 'name':
- 'gcr.io/oss-fuzz-base/uploader',
- 'args': [
- '/workspace/{0}'.format(targets_list_filename),
- targets_list_url,
- ],
- },
- # cleanup
- {
- 'name': image,
- 'args': [
- 'bash',
- '-c',
- 'rm -r ' + out,
- ],
- },
- ])
-
- return build_steps
-
-
-def dataflow_post_build_steps(project_name, env):
- steps = []
- download_corpora_step = build_lib.download_corpora_step(project_name)
- if not download_corpora_step:
- return None
-
- steps = [download_corpora_step]
- steps.append({
- 'name': 'gcr.io/oss-fuzz-base/base-runner',
- 'env': env,
- 'args': [
- 'bash', '-c',
- ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && '
- 'collect_dft || (echo "DFT collection failed." && false)')
- ],
- 'volumes': [{
- 'name': 'corpus',
- 'path': '/corpus'
- }],
- })
- return steps
-
-
-def get_logs_url(build_id):
- URL_FORMAT = ('https://console.developers.google.com/logs/viewer?'
- 'resource=build%2Fbuild_id%2F{0}&project=oss-fuzz')
- return URL_FORMAT.format(build_id)
-
-
-def run_build(build_steps, project_name, tag):
- options = {}
- if 'GCB_OPTIONS' in os.environ:
- options = yaml.safe_load(os.environ['GCB_OPTIONS'])
-
- build_body = {
- 'steps': build_steps,
- 'timeout': str(build_lib.BUILD_TIMEOUT) + 's',
- 'options': options,
- 'logsBucket': GCB_LOGS_BUCKET,
- 'tags': [project_name + '-' + tag,],
- }
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
- build_info = cloudbuild.projects().builds().create(projectId='oss-fuzz',
- body=build_body).execute()
- build_id = build_info['metadata']['build']['id']
-
- print('Logs:', get_logs_url(build_id), file=sys.stderr)
- print(build_id)
-
-
-def main():
- if len(sys.argv) != 2:
- usage()
-
- project_dir = sys.argv[1].rstrip(os.path.sep)
- steps = get_build_steps(project_dir)
-
- project_name = os.path.basename(project_dir)
- run_build(steps, project_name, FUZZING_BUILD_TAG)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/builds_status.py b/infra/gcb/builds_status.py
deleted file mode 100755
index 5352d36f5..000000000
--- a/infra/gcb/builds_status.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python2
-
-import datetime
-import os
-import sys
-import json
-import tempfile
-import time
-
-import dateutil.parser
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build as gcb_build
-from google.cloud import storage
-
-import build_and_run_coverage
-import build_project
-
-STATUS_BUCKET = 'oss-fuzz-build-logs'
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-BADGE_DIR = 'badges'
-RETRY_COUNT = 3
-RETRY_WAIT = 5
-MAX_BUILD_RESULTS = 2000
-BUILDS_PAGE_SIZE = 256
-BADGE_IMAGE_TYPES = {'svg': 'image/svg+xml', 'png': 'image/png'}
-
-_client = None
-
-
-def _get_storage_client():
- """Return storage client."""
- global _client
- if not _client:
- _client = storage.Client()
-
- return _client
-
-
-def usage():
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <projects_dir>\n')
- exit(1)
-
-
-def scan_project_names(projects_dir):
- projects = []
- for root, dirs, files in os.walk(projects_dir):
- for f in files:
- if f == 'Dockerfile':
- projects.append(os.path.basename(root))
- return sorted(projects)
-
-
-def upload_status(successes, failures, status_filename):
- """Upload main status page."""
- data = {
- 'projects': failures + successes,
- 'failures': failures,
- 'successes': successes,
- 'last_updated': datetime.datetime.utcnow().ctime()
- }
-
- bucket = _get_storage_client().get_bucket(STATUS_BUCKET)
- blob = bucket.blob(status_filename)
- blob.cache_control = 'no-cache'
- blob.upload_from_string(json.dumps(data), content_type='application/json')
-
-
-def is_build_successful(build):
- return build['status'] == 'SUCCESS'
-
-
-def find_last_build(builds, project, build_tag_suffix):
- DELAY_MINUTES = 40
- tag = project + '-' + build_tag_suffix
-
- builds = builds.get(tag)
- if not builds:
- print >> sys.stderr, 'Failed to find builds with tag', tag
- return None
-
- for build in builds:
- if build['status'] == 'WORKING':
- continue
-
- if tag not in build['tags']:
- continue
-
- if not 'finishTime' in build:
- continue
-
- finish_time = dateutil.parser.parse(build['finishTime'], ignoretz=True)
- if (datetime.datetime.utcnow() - finish_time >=
- datetime.timedelta(minutes=DELAY_MINUTES)):
- status_bucket = _get_storage_client().get_bucket(STATUS_BUCKET)
- gcb_bucket = _get_storage_client().get_bucket(
- build_project.GCB_LOGS_BUCKET)
- log_name = 'log-{0}.txt'.format(build['id'])
- log = gcb_bucket.blob(log_name)
- dest_log = status_bucket.blob(log_name)
-
- with tempfile.NamedTemporaryFile() as f:
- log.download_to_filename(f.name)
- dest_log.upload_from_filename(f.name, content_type='text/plain')
-
- return build
-
- return None
-
-
-def execute_with_retries(request):
- for i in xrange(RETRY_COUNT + 1):
- try:
- return request.execute()
- except Exception as e:
- print('request failed with {0}, retrying...'.format(str(e)))
- if i < RETRY_COUNT:
- time.sleep(RETRY_WAIT)
- continue
-
- raise
-
-
-def get_builds(cloudbuild):
- """Get a batch of the latest builds (up to MAX_BUILD_RESULTS), grouped by
- tag."""
- ungrouped_builds = []
- next_page_token = None
-
- while True:
- page_size = min(BUILDS_PAGE_SIZE, MAX_BUILD_RESULTS - len(ungrouped_builds))
- response = execute_with_retries(cloudbuild.projects().builds().list(
- projectId='oss-fuzz', pageSize=page_size, pageToken=next_page_token))
-
- if not 'builds' in response:
- print >> sys.stderr, 'Invalid response from builds list:', response
- return None
-
- ungrouped_builds.extend(response['builds'])
- if len(ungrouped_builds) >= MAX_BUILD_RESULTS:
- break
-
- next_page_token = response.get('nextPageToken')
-
- builds = {}
- for build in ungrouped_builds:
- for tag in build['tags']:
- builds.setdefault(tag, []).append(build)
-
- return builds
-
-
-def update_build_status(builds, projects, build_tag_suffix, status_filename):
- successes = []
- failures = []
-
- for project in projects:
- print project
-
- last_build = find_last_build(builds, project, build_tag_suffix)
- if not last_build:
- print >> sys.stderr, 'Failed to get build for', project
- continue
-
- print last_build['startTime'], last_build['status'], last_build['id']
- if is_build_successful(last_build):
- successes.append({
- 'name': project,
- 'build_id': last_build['id'],
- 'finish_time': last_build['finishTime'],
- 'success': True,
- })
- else:
- failures.append({
- 'name': project,
- 'build_id': last_build['id'],
- 'finish_time': last_build['finishTime'],
- 'success': False,
- })
-
- upload_status(successes, failures, status_filename)
-
-
-def update_build_badges(builds, projects, build_tag, coverage_tag):
- for project in projects:
- last_build = find_last_build(builds, project, build_tag)
- last_coverage_build = find_last_build(builds, project, coverage_tag)
- if not last_build or not last_coverage_build:
- continue
-
- badge = 'building'
- if not is_build_successful(last_coverage_build):
- badge = 'coverage_failing'
- if not is_build_successful(last_build):
- badge = 'failing'
-
- print("[badge] {}: {}".format(project, badge))
-
- for extension, mime_type in BADGE_IMAGE_TYPES.items():
- badge_name = '{badge}.{extension}'.format(
- badge=badge, extension=extension)
- # Retrieve the image relative to this script's location
- badge_file = os.path.join(SCRIPT_DIR, 'badge_images', badge_name)
-
- # The uploaded blob name should look like `badges/project.png`
- blob_name = '{badge_dir}/{project_name}.{extension}'.format(
- badge_dir=BADGE_DIR, project_name=project, extension=extension)
-
- status_bucket = _get_storage_client().get_bucket(STATUS_BUCKET)
- badge_blob = status_bucket.blob(blob_name)
- badge_blob.upload_from_filename(badge_file, content_type=mime_type)
-
-
-def main():
- if len(sys.argv) != 2:
- usage()
-
- projects_dir = sys.argv[1]
- projects = scan_project_names(projects_dir)
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = gcb_build('cloudbuild', 'v1', credentials=credentials)
-
- builds = get_builds(cloudbuild)
- update_build_status(
- builds,
- projects,
- build_project.FUZZING_BUILD_TAG,
- status_filename='status.json')
- update_build_status(
- builds,
- projects,
- build_and_run_coverage.COVERAGE_BUILD_TAG,
- status_filename='status-coverage.json')
-
- update_build_badges(
- builds,
- projects,
- build_tag=build_project.FUZZING_BUILD_TAG,
- coverage_tag=build_and_run_coverage.COVERAGE_BUILD_TAG)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/cancel.py b/infra/gcb/cancel.py
deleted file mode 100755
index 8393a5144..000000000
--- a/infra/gcb/cancel.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/python2
-"""Cancels project build on Google Cloud Builder.
-
-Usage: cancel.py <build_id>
-"""
-
-import base64
-import collections
-import datetime
-import os
-import subprocess
-import sys
-import time
-import urllib
-import yaml
-
-from oauth2client.client import GoogleCredentials
-from googleapiclient.discovery import build
-
-
-def usage():
- sys.stderr.write('Usage: ' + sys.argv[0] + ' <build_id>\n')
- exit(1)
-
-
-def main():
- if len(sys.argv) != 2:
- usage()
-
- build_id = sys.argv[1]
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
- print cloudbuild.projects().builds().cancel(projectId='oss-fuzz',
- id=build_id,
- body={}).execute()
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/jenkins_config/base_job.xml b/infra/gcb/jenkins_config/base_job.xml
deleted file mode 100644
index fa90aa474..000000000
--- a/infra/gcb/jenkins_config/base_job.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<?xml version='1.0' encoding='UTF-8'?>
-<project>
- <actions/>
- <description></description>
- <keepDependencies>false</keepDependencies>
- <properties/>
- <scm class="hudson.plugins.git.GitSCM" plugin="git@3.1.0">
- <configVersion>2</configVersion>
- <userRemoteConfigs>
- <hudson.plugins.git.UserRemoteConfig>
- <url>https://github.com/google/oss-fuzz.git</url>
- </hudson.plugins.git.UserRemoteConfig>
- </userRemoteConfigs>
- <branches>
- <hudson.plugins.git.BranchSpec>
- <name>*/master</name>
- </hudson.plugins.git.BranchSpec>
- </branches>
- <doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
- <submoduleCfg class="list"/>
- <extensions>
- <hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
- <relativeTargetDir>oss-fuzz</relativeTargetDir>
- </hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
- </extensions>
- </scm>
- <canRoam>true</canRoam>
- <disabled>false</disabled>
- <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
- <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
- <triggers>
- <jenkins.triggers.ReverseBuildTrigger>
- <spec/>
- <upstreamProjects>infra/base-images</upstreamProjects>
- <threshold>
- <name>SUCCESS</name>
- <ordinal>0</ordinal>
- <color>BLUE</color>
- <completeBuild>true</completeBuild>
- </threshold>
- </jenkins.triggers.ReverseBuildTrigger>
- </triggers>
- <concurrentBuild>false</concurrentBuild>
- <builders>
- <hudson.tasks.Shell>
- <command>#!/bin/bash -eux
-
-virtualenv ENV
-set +o nounset
-. ENV/bin/activate
-set -o nounset
-
-cd $WORKSPACE/oss-fuzz/infra/gcb
-pip install -r requirements.txt
-build_id=$(python build_project.py $WORKSPACE/oss-fuzz/$JOB_NAME)
-python wait_for_build.py $build_id
-</command>
- </hudson.tasks.Shell>
- </builders>
- <publishers/>
- <buildWrappers/>
-</project>
diff --git a/infra/gcb/jenkins_config/coverage_job.xml b/infra/gcb/jenkins_config/coverage_job.xml
deleted file mode 100644
index be5cb8296..000000000
--- a/infra/gcb/jenkins_config/coverage_job.xml
+++ /dev/null
@@ -1,60 +0,0 @@
-<?xml version='1.0' encoding='UTF-8'?>
-<project>
- <actions/>
- <description></description>
- <keepDependencies>false</keepDependencies>
- <properties/>
- <scm class="hudson.plugins.git.GitSCM" plugin="git@3.1.0">
- <configVersion>2</configVersion>
- <userRemoteConfigs>
- <hudson.plugins.git.UserRemoteConfig>
- <url>https://github.com/google/oss-fuzz.git</url>
- </hudson.plugins.git.UserRemoteConfig>
- </userRemoteConfigs>
- <branches>
- <hudson.plugins.git.BranchSpec>
- <name>*/master</name>
- </hudson.plugins.git.BranchSpec>
- </branches>
- <doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
- <submoduleCfg class="list"/>
- <extensions>
- <hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
- <relativeTargetDir>oss-fuzz</relativeTargetDir>
- </hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
- </extensions>
- </scm>
- <canRoam>true</canRoam>
- <disabled>false</disabled>
- <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
- <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
- <triggers>
- <hudson.triggers.TimerTrigger>
- <spec>H 6 * * *</spec>
- </hudson.triggers.TimerTrigger>
- </triggers>
- <concurrentBuild>false</concurrentBuild>
- <builders>
- <hudson.tasks.Shell>
- <command>#!/bin/bash -eux
-
-virtualenv ENV
-set +o nounset
-. ENV/bin/activate
-set -o nounset
-
-cd $WORKSPACE/oss-fuzz/infra/gcb
-pip install -r requirements.txt
-project_dir=$WORKSPACE/oss-fuzz/projects/$(basename $JOB_NAME)
-build_id=$(python build_and_run_coverage.py $project_dir)
-if [[ "$build_id" == "0" ]]; then
- echo "Intentionally skipping code coverage job."
-else
- python wait_for_build.py $build_id
-fi
-</command>
- </hudson.tasks.Shell>
- </builders>
- <publishers/>
- <buildWrappers/>
-</project>
diff --git a/infra/gcb/requirements.txt b/infra/gcb/requirements.txt
deleted file mode 100644
index af53d16ae..000000000
--- a/infra/gcb/requirements.txt
+++ /dev/null
@@ -1,35 +0,0 @@
-cachetools==2.1.0
-certifi==2018.4.16
-chardet==3.0.4
-enum34==1.1.6
-futures==3.2.0
-google-api-core==1.2.0
-google-api-python-client==1.7.0
-google-auth==1.5.0
-google-auth-httplib2==0.0.3
-google-cloud-core==0.28.1
-google-cloud-logging==1.6.0
-google-cloud-pubsub==0.35.2
-google-cloud-storage==1.10.0
-google-resumable-media==0.3.1
-googleapis-common-protos==1.5.3
-grpc-google-iam-v1==0.11.4
-grpcio==1.12.0
-httplib2==0.11.3
-idna==2.6
-Jinja2==2.10.1
-MarkupSafe==1.0
-multi-key-dict==2.0.3
-oauth2client==4.1.2
-pbr==4.0.3
-protobuf==3.5.2.post1
-pyasn1==0.4.3
-pyasn1-modules==0.2.1
-python-dateutil==2.7.3
-python-jenkins==1.0.0
-pytz==2018.4
-PyYAML==5.1
-requests==2.21.0
-rsa==3.4.2
-six==1.11.0
-uritemplate==3.0.0
diff --git a/infra/gcb/sync.py b/infra/gcb/sync.py
deleted file mode 100755
index 9004678d9..000000000
--- a/infra/gcb/sync.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-"""Script to sync CF and Jenkins jobs."""
-
-import json
-import os
-import re
-import sys
-import yaml
-
-import jenkins
-
-JENKINS_SERVER = ('localhost', 8080)
-
-JOB_TEMPLATES = [
- {'prefix': 'projects/', 'config': 'base_job.xml'},
- {'prefix': 'coverage/', 'config': 'coverage_job.xml'},
-]
-
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-OSSFUZZ_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR))
-
-VALID_PROJECT_NAME = re.compile(r'^[a-zA-Z0-9_-]+$')
-
-
-def main():
- # Connect to jenkins server.
- jenkins_login = get_jenkins_login()
- server = jenkins.Jenkins(
- 'http://%s:%d' % JENKINS_SERVER,
- username=jenkins_login[0],
- password=jenkins_login[1])
-
- for project in get_projects():
- print 'syncing configs for', project
- try:
- # Create/update jenkins build job.
- sync_jenkins_job(server, project)
-
- except Exception as e:
- print >> sys.stderr, 'Failed to setup job with exception', e
-
-
-def _has_dockerfile(project_dir):
- """Whether or not the project has a Dockerfile."""
- if os.path.exists(os.path.join(project_dir, 'Dockerfile')):
- return True
-
- project_yaml_path = os.path.join(project_dir, 'project.yaml')
- if not os.path.exists(project_yaml_path):
- return False
-
- with open(project_yaml_path) as f:
- project_info = yaml.safe_load(f)
-
- return 'dockerfile' in project_info
-
-
-def get_projects():
- """Return list of projects for oss-fuzz."""
- projects = []
- projects_dir = os.path.join(OSSFUZZ_DIR, 'projects')
- for name in os.listdir(projects_dir):
- full_path = os.path.join(projects_dir, name)
- if not os.path.isdir(full_path) or not _has_dockerfile(full_path):
- continue
-
- if not VALID_PROJECT_NAME.match(name):
- print >> sys.stderr, 'Invalid project name:', name
- continue
-
- projects.append(name)
-
- if not projects:
- print >> sys.stderr, 'No projects found.'
-
- return projects
-
-
-def get_jenkins_login():
- """Returns (username, password) for jenkins."""
- username = os.getenv('JENKINS_USER')
- password = os.getenv('JENKINS_PASS')
-
- return username, password
-
-
-def sync_jenkins_job(server, project):
- """Sync the config with jenkins."""
- project_yaml = os.path.join(OSSFUZZ_DIR, 'projects', project, 'project.yaml')
- with open(project_yaml, 'r') as f:
- project_json_string = json.dumps(json.dumps(yaml.safe_load(f)))
-
- for job in JOB_TEMPLATES:
- job_name = job['prefix'] + project
- with open(os.path.join(SCRIPT_DIR, 'jenkins_config', job['config'])) as f:
- job_config_xml = f.read()
-
- if server.job_exists(job_name):
- server.reconfig_job(job_name, job_config_xml)
- else:
- server.create_job(job_name, job_config_xml)
- server.build_job(job_name)
-
-
-if __name__ == '__main__':
- main()
diff --git a/infra/gcb/templates/bower.json b/infra/gcb/templates/bower.json
deleted file mode 100644
index b6b6d765c..000000000
--- a/infra/gcb/templates/bower.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "name": "build-status",
- "main": "index.html",
- "dependencies": {
- "polymer": "Polymer/polymer#^2.0.0-rc.3",
- "paper-item": "PolymerElements/paper-item#2.0-preview",
- "app-layout": "PolymerElements/app-layout#2.0-preview",
- "paper-card": "PolymerElements/paper-card#2.0-preview",
- "paper-tabs": "PolymerElements/paper-tabs#2.0-preview",
- "iron-icons": "PolymerElements/iron-icons#2.0-preview",
- "iron-ajax": "PolymerElements/iron-ajax#2.0-preview",
- "iron-flex-layout": "PolymerElements/iron-flex-layout#2.0-preview",
- "paper-icon-button": "PolymerElements/paper-icon-button#2.0-preview",
- "app-route": "PolymerElements/app-route#2.0-preview"
- },
- "devDependencies": {
- "web-component-tester": "^6.0.0-prerelease.5",
- "webcomponentsjs": "webcomponents/webcomponentsjs#^1.1.0"
- }
-}
diff --git a/infra/gcb/templates/deploy.sh b/infra/gcb/templates/deploy.sh
deleted file mode 100755
index 36aa87163..000000000
--- a/infra/gcb/templates/deploy.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-gsutil -h "Cache-Control:no-cache,max-age=0" -m cp -r bower_components index.html src manifest.json gs://oss-fuzz-build-logs
diff --git a/infra/gcb/templates/index.html b/infra/gcb/templates/index.html
deleted file mode 100644
index 36dd15b31..000000000
--- a/infra/gcb/templates/index.html
+++ /dev/null
@@ -1,28 +0,0 @@
-<!doctype html>
-<html lang="en">
- <head>
- <meta charset="utf-8">
- <meta name="viewport" content="width=device-width, minimum-scale=1, initial-scale=1, user-scalable=yes">
- <title>OSS-Fuzz build status</title>
- <meta name="description" content="OSS-Fuzz build status">
-
- <!-- See https://goo.gl/OOhYW5 -->
- <link rel="manifest" href="/manifest.json">
-
- <script src="/bower_components/webcomponentsjs/webcomponents-loader.js"></script>
-
-
- <link rel="import" href="/src/build-status/build-status.html">
-
- <style>
- body {
- font-family: 'Roboto', 'Noto', sans-serif;
- background: #f1f1f1;
- margin: 0;
- }
- </style>
- </head>
- <body>
- <build-status></build-status>
- </body>
-</html>
diff --git a/infra/gcb/templates/manifest.json b/infra/gcb/templates/manifest.json
deleted file mode 100644
index c2d45e4f6..000000000
--- a/infra/gcb/templates/manifest.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "name": "build-status",
- "short_name": "build-status",
- "start_url": "/",
- "display": "standalone"
-}
diff --git a/infra/gcb/templates/polymer.json b/infra/gcb/templates/polymer.json
deleted file mode 100644
index 2bd10b64a..000000000
--- a/infra/gcb/templates/polymer.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "lint": {
- "rules": [
- "polymer-2"
- ]
- }
-}
diff --git a/infra/gcb/templates/src/build-status/build-status.html b/infra/gcb/templates/src/build-status/build-status.html
deleted file mode 100644
index 35ee99a03..000000000
--- a/infra/gcb/templates/src/build-status/build-status.html
+++ /dev/null
@@ -1,223 +0,0 @@
-<link rel="import" href="../../bower_components/polymer/polymer-element.html">
-<link rel="import" href="../../bower_components/app-layout/app-drawer-layout/app-drawer-layout.html">
-<link rel="import" href="../../bower_components/app-layout/app-drawer/app-drawer.html">
-<link rel="import" href="../../bower_components/app-layout/app-scroll-effects/app-scroll-effects.html">
-<link rel="import" href="../../bower_components/app-layout/app-header/app-header.html">
-<link rel="import" href="../../bower_components/app-layout/app-header-layout/app-header-layout.html">
-<link rel="import" href="../../bower_components/app-layout/app-toolbar/app-toolbar.html">
-<link rel="import" href="../../bower_components/paper-item/paper-item.html">
-<link rel="import" href="../../bower_components/paper-item/paper-item-body.html">
-<link rel="import" href="../../bower_components/paper-card/paper-card.html">
-<link rel="import" href="../../bower_components/paper-tabs/paper-tabs.html">
-<link rel="import" href="../../bower_components/paper-icon-button/paper-icon-button.html">
-<link rel="import" href="../../bower_components/iron-icons/iron-icons.html">
-<link rel="import" href="../../bower_components/iron-ajax/iron-ajax.html">
-<link rel="import" href="../../bower_components/iron-flex-layout/iron-flex-layout-classes.html">
-<link rel="import" href="../../bower_components/polymer/lib/elements/dom-if.html">
-<link rel="import" href="../../bower_components/polymer/lib/elements/dom-repeat.html">
-<link rel="import" href="../../bower_components/app-route/app-location.html">
-<link rel="import" href="../../bower_components/app-route/app-route.html">
-
-<dom-module id="build-status">
- <template>
- <app-location route="{{route}}" use-hash-as-path></app-location>
- <app-route route="{{route}}"
- pattern=":project_name"
- data="{{routeData}}">
- </app-route>
- <style is="custom-style" include="iron-flex iron-flex-alignment">
- <style>
- .paper-item-link {
- color: inherit;
- text-decoration: none;
- }
-
- app-header {
- background-color: #2ba4ad;
- color: #fff;
- }
-
- paper-card {
- margin: 0.5em;
- }
-
- paper-item {
- cursor: pointer;
- }
-
- paper-tabs {
- -webkit-font-smoothing: antialiased;
- width: 100%;
- margin-bottom: 1px;
- height: 40px;
- }
-
- :host {
- display: block;
- }
-
- .icon-error {
- color: #e83030;
- margin-right: 0.2em;
- }
-
- .projects {
- min-width: 10em;
- }
-
- .log {
- width: 80%;
- display: inline;
- }
-
- pre {
- white-space: pre-wrap;
- }
- </style>
- <app-header reveals>
- <app-toolbar>
- <div main-title>OSS-Fuzz build status</div>
- <div><small>(Updated every 30 minutes)</small></div>
- </app-toolbar>
- </app-header>
- <div class="layout horizontal">
- <paper-card class="projects">
- <div class="card-tabs">
- <paper-tabs selected="fuzzing" id="build_type" attr-for-selected="type" on-click="onChanged">
- <paper-tab type="fuzzing">Fuzzing Builds</paper-tab>
- <paper-tab type="coverage">Coverage Builds</paper-tab>
- </paper-tabs>
- </div>
- <div class="card-content">
- <template is="dom-repeat" items="[[status.projects]]" as="project">
- <paper-item on-tap="onTap">
- <paper-item-body two-line>
- <div>
- <template is="dom-if" if="[[!project.success]]">
- <iron-icon class="icon-error" icon="icons:error"></iron-icon>
- </template>
- [[project.name]]
- </div>
- <div secondary title$="Last built [[toLocalDate(project.finish_time)]]">
- Last built [[toLocalDate(project.finish_time)]]
- </div>
- </paper-item-body>
- </paper-item>
- </template>
- </div>
- </paper-card>
- <paper-card class="log">
- <div class="card-content">
- <template is="dom-if" if="[[showMessage(loading_log, log)]]">
- Select a project to see logs.
- </template>
- <template is="dom-if" if="[[loading_log]]">
- Loading...
- </template>
- <template is="dom-if" if="[[showLog(log)]]">
- <a href="/log-[[build_id]].txt" tabindex="-1">
- <iron-icon icon="icons:link"></iron-iron>
- </a>
- </template>
- <pre>[[log]]</pre>
- </div>
- </paper-card>
- </div>
- <iron-ajax id="status_fuzzing" auto handle-as="json" url="/status.json" on-response="onResponseForFuzzing"></iron-ajax>
- <iron-ajax id="status_coverage" auto handle-as="json" url="/status-coverage.json" on-response="onResponseForCoverage"></iron-ajax>
- <iron-ajax id="logxhr" auto handle-as="text" on-response="onLogResponse"></iron-ajax>
- </template>
-
- <script>
- /** @polymerElement */
- class BuildStatus extends Polymer.Element {
- static get is() { return 'build-status'; }
- static get properties() {
- return {
- log: {
- type: String,
- value: ''
- },
- loading_log: {
- type: Boolean,
- value: false
- }
- };
- }
- static get observers() {
- return [
- '_routeChanged(route.*)'
- ];
- }
-
- _routeChanged() {
- if (!this.status || !this.routeData.project_name) {
- // If our status json is loaded and there is a project_name specified
- // in the URL, we can proceed to load that project's log.
- return;
- }
- var project = this.getProjectByName(this.routeData.project_name);
-
- this.$.logxhr.url = "/log-" + project.build_id + ".txt";
- this.build_id = project.build_id;
- this.log = '';
- this.loading_log = true;
- }
-
- getProjectByName(project_name) {
- return this.status.projects.find(p => p.name === project_name);
- }
-
- onResponseForFuzzing(e) {
- this.status_fuzzing = e.detail.response;
- if (!this.status) {
- // Show status of the fuzzing builds by default.
- this.status = this.status_fuzzing;
- // Manually invoke a _routeChanged call, in order to load the log for
- // someone going directly to a project's URL.
- this._routeChanged();
- }
- }
-
- onResponseForCoverage(e) {
- this.status_coverage = e.detail.response;
- }
-
- onLogResponse(e) {
- this.log = e.detail.response;
- this.loading_log = false;
- }
-
- onTap(e) {
- // Change the route, this should auto-magically update the url in the
- // browser and invoke the _routeChanged method.
- this.set('route.path', e.model.project.name);
- }
-
- onChanged(e) {
- if (this.$.build_type.selected == 'coverage') {
- this.status = this.status_coverage
- } else {
- this.status = this.status_fuzzing
- }
- }
-
- showMessage(loading_log, log) {
- return !loading_log && log === '';
- }
-
- showLog(log) {
- return log !== '';
- }
-
- toLocalDate(str) {
- let date = new Date(str);
- let ds = date.toString();
- let timezone = ds.substring(ds.indexOf("("));
- return date.toLocaleString() + " " + timezone;
- }
- };
-
- window.customElements.define(BuildStatus.is, BuildStatus);
- </script>
-</dom-module>
diff --git a/infra/gcb/wait_for_build.py b/infra/gcb/wait_for_build.py
deleted file mode 100755
index ec2f89ee4..000000000
--- a/infra/gcb/wait_for_build.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/python2
-"""Waits for project build on Google Cloud Builder.
-
-Usage: wait_for_build.py <build_id>
-"""
-
-import argparse
-import sys
-import time
-import datetime
-
-from googleapiclient.discovery import build
-from oauth2client.client import GoogleCredentials
-
-POLL_INTERVAL = 15
-cloudbuild = None
-
-
-def get_build(build_id, cloudbuild, project):
- return cloudbuild.projects().builds().get(
- projectId=project, id=build_id).execute()
-
-
-def wait_for_build(build_id, project):
- DONE_STATUSES = [
- 'SUCCESS',
- 'FAILURE',
- 'INTERNAL_ERROR',
- 'CANCELLED',
- 'TIMEOUT',
- ]
-
- status = None
- while True:
- build_info = get_build(build_id, cloudbuild, project)
-
- current_status = build_info['status']
- if current_status != status:
- print datetime.datetime.now(), current_status
- sys.stdout.flush()
- status = current_status
- if status in DONE_STATUSES:
- return status == 'SUCCESS'
-
- time.sleep(POLL_INTERVAL)
-
-
-def main():
- global cloudbuild
-
- parser = argparse.ArgumentParser(description='Wait for build to complete')
- parser.add_argument(
- '-p', '--project', help='Cloud Project', default='oss-fuzz')
- parser.add_argument('build_id', help='The Container Builder build ID.')
-
- args = parser.parse_args()
-
- credentials = GoogleCredentials.get_application_default()
- cloudbuild = build('cloudbuild', 'v1', credentials=credentials)
-
- success = wait_for_build(args.build_id, args.project)
-
- if not success:
- sys.exit(1)
-
-
-if __name__ == '__main__':
- main()