aboutsummaryrefslogtreecommitdiff
path: root/infra/build
diff options
context:
space:
mode:
authorkabeer27 <32016558+kabeer27@users.noreply.github.com>2020-07-22 23:23:31 -0700
committerGitHub <noreply@github.com>2020-07-23 16:23:31 +1000
commit9d2381a6216ef25b3607b88fbc16c679c338e099 (patch)
tree6cae59ce83868d7b51f82edbb1fce48081ca824d /infra/build
parentbdb0b339d045edd49846e7b691d85e84ff861337 (diff)
downloadoss-fuzz-9d2381a6216ef25b3607b88fbc16c679c338e099.tar.gz
Initial integration of builds_status (#4175)
Diffstat (limited to 'infra/build')
l---------infra/build/functions/builds_status.py1
-rw-r--r--infra/build/functions/datastore_entities.py8
-rwxr-xr-xinfra/build/functions/deploy.sh18
-rw-r--r--infra/build/functions/main.py6
-rw-r--r--infra/build/functions/request_build.py24
-rw-r--r--infra/build/functions/request_build_test.py30
-rw-r--r--infra/build/functions/requirements.txt2
-rw-r--r--infra/build/functions/update_build_status.py143
8 files changed, 231 insertions, 1 deletions
diff --git a/infra/build/functions/builds_status.py b/infra/build/functions/builds_status.py
new file mode 120000
index 000000000..a773a7e5a
--- /dev/null
+++ b/infra/build/functions/builds_status.py
@@ -0,0 +1 @@
+../../gcb/builds_status.py \ No newline at end of file
diff --git a/infra/build/functions/datastore_entities.py b/infra/build/functions/datastore_entities.py
index f8389112a..d2ebb647c 100644
--- a/infra/build/functions/datastore_entities.py
+++ b/infra/build/functions/datastore_entities.py
@@ -30,3 +30,11 @@ class Project(ndb.Model):
class GitAuth(ndb.Model):
"""Represents Github access token entity."""
access_token = ndb.StringProperty()
+
+
+# pylint: disable=too-few-public-methods
+class BuildsHistory(ndb.Model):
+ """Container for build history of projects."""
+ build_tag_suffix = ndb.StringProperty()
+ project = ndb.StringProperty()
+ build_ids = ndb.StringProperty(repeated=True)
diff --git a/infra/build/functions/deploy.sh b/infra/build/functions/deploy.sh
index d9ec203fc..08810bb83 100755
--- a/infra/build/functions/deploy.sh
+++ b/infra/build/functions/deploy.sh
@@ -31,6 +31,11 @@ SYNC_SCHEDULER_JOB=sync-scheduler
SYNC_JOB_SCHEDULE="*/30 * * * *"
SYNC_MESSAGE="Start Sync"
+UPDATE_BUILD_JOB_TOPIC=builds-status
+UPDATE_BUILD_SCHEDULER_JOB=builds-status-scheduler
+UPDATE_BUILD_JOB_SCHEDULE="*/30 * * * *"
+UPDATE_BUILD_MESSAGE="Update build statuses"
+
function deploy_pubsub_topic {
topic=$1
@@ -91,6 +96,7 @@ deploy_pubsub_topic $BUILD_JOB_TOPIC $PROJECT_ID
deploy_pubsub_topic $SYNC_JOB_TOPIC $PROJECT_ID
deploy_pubsub_topic $BASE_IMAGE_JOB_TOPIC $BASE_PROJECT_ID
deploy_pubsub_topic $COVERAGE_BUILD_JOB_TOPIC $PROJECT_ID
+deploy_pubsub_topic $UPDATE_BUILD_JOB_TOPIC $PROJECT_ID
deploy_scheduler $SYNC_SCHEDULER_JOB \
"$SYNC_JOB_SCHEDULE" \
@@ -110,6 +116,13 @@ deploy_scheduler $COVERAGE_BUILD_SCHEDULER_JOB \
"$COVERAGE_BUILD_MESSAGE" \
$PROJECT_ID
+deploy_scheduler $UPDATE_BUILD_SCHEDULER_JOB \
+ "$UPDATE_BUILD_JOB_SCHEDULE" \
+ $UPDATE_BUILD_JOB_TOPIC \
+ "$UPDATE_BUILD_MESSAGE" \
+ $PROJECT_ID
+
+
deploy_cloud_function sync \
sync \
$SYNC_JOB_TOPIC \
@@ -129,3 +142,8 @@ deploy_cloud_function request-coverage-build \
coverage_build \
$COVERAGE_BUILD_JOB_TOPIC \
$PROJECT_ID
+
+deploy_cloud_function update-builds \
+ builds_status \
+ $UPDATE_BUILD_JOB_TOPIC \
+ $PROJECT_ID
diff --git a/infra/build/functions/main.py b/infra/build/functions/main.py
index dace1f8bc..c34dc1329 100644
--- a/infra/build/functions/main.py
+++ b/infra/build/functions/main.py
@@ -19,6 +19,7 @@ import base_images
import project_sync
import request_build
import request_coverage_build
+import update_build_status
def build_project(event, context):
@@ -39,3 +40,8 @@ def build_base_images(event, context):
def coverage_build(event, context):
"""Entry point for cloud function to build coverage reports."""
request_coverage_build.request_coverage_build(event, context)
+
+
+def builds_status(event, context):
+ """Entry point for builds status cloud function."""
+ update_build_status.update_status(event, context)
diff --git a/infra/build/functions/request_build.py b/infra/build/functions/request_build.py
index 44b2fcf18..c50c6e3d5 100644
--- a/infra/build/functions/request_build.py
+++ b/infra/build/functions/request_build.py
@@ -23,9 +23,29 @@ from google.cloud import ndb
import build_lib
import build_project
+from datastore_entities import BuildsHistory
from datastore_entities import Project
BASE_PROJECT = 'oss-fuzz-base'
+MAX_BUILD_HISTORY_LENGTH = 64
+
+
+def update_build_history(project_name, build_id, build_tag_suffix):
+ """Update build history of project."""
+ project_key = ndb.Key(BuildsHistory, project_name + build_tag_suffix)
+ project = project_key.get()
+
+ if not project:
+ project = BuildsHistory(id=project_name + '-' + build_tag_suffix,
+ build_tag_suffix=build_tag_suffix,
+ project=project_name,
+ build_ids=[])
+
+ if len(project.build_ids) >= MAX_BUILD_HISTORY_LENGTH:
+ project.build_ids.pop(0)
+
+ project.build_ids.append(build_id)
+ project.put()
def get_project_data(project_name):
@@ -33,7 +53,7 @@ def get_project_data(project_name):
with ndb.Client().context():
query = Project.query(Project.name == project_name)
project = query.get()
- if project is None:
+ if not project:
raise RuntimeError(
'Project {0} not available in cloud datastore'.format(project_name))
project_yaml_contents = project.project_yaml_contents
@@ -61,6 +81,7 @@ def run_build(project_name, image_project, build_steps, credentials, tag):
'options': {
'machineType': 'N1_HIGHCPU_32'
},
+ 'logsBucket': build_project.GCB_LOGS_BUCKET,
'tags': [project_name + tag,],
}
@@ -72,6 +93,7 @@ def run_build(project_name, image_project, build_steps, credentials, tag):
body=build_body).execute()
build_id = build_info['metadata']['build']['id']
+ update_build_history(project_name, build_id, tag)
logging.info('Build ID: %s', build_id)
logging.info('Logs: %s', build_project.get_logs_url(build_id, image_project))
diff --git a/infra/build/functions/request_build_test.py b/infra/build/functions/request_build_test.py
index 2af82eb58..57ff3fe4c 100644
--- a/infra/build/functions/request_build_test.py
+++ b/infra/build/functions/request_build_test.py
@@ -22,8 +22,11 @@ from unittest import mock
from google.cloud import ndb
+from datastore_entities import BuildsHistory
from datastore_entities import Project
from request_build import get_build_steps
+from request_build import get_project_data
+from request_build import update_build_history
import test_utils
@@ -73,6 +76,33 @@ class TestRequestBuilds(unittest.TestCase):
self.assertRaises(RuntimeError, get_build_steps, 'test-project',
'oss-fuzz', 'oss-fuzz-base')
+ def test_build_history(self):
+ """Testing build history."""
+ with ndb.Client().context():
+ BuildsHistory(id='test-project-fuzzing',
+ build_tag_suffix='fuzzing',
+ project='test-project',
+ build_ids=[str(i) for i in range(1, 65)]).put()
+ update_build_history('test-project', '65', '-fuzzing')
+ expected_build_ids = [str(i) for i in range(2, 66)]
+
+ self.assertEqual(BuildsHistory.query().get().build_ids,
+ expected_build_ids)
+
+ def test_build_history_no_existing_project(self):
+ """Testing build history when build history object is missing."""
+ with ndb.Client().context():
+ update_build_history('test-project', '1', 'fuzzing')
+ expected_build_ids = ['1']
+
+ self.assertEqual(BuildsHistory.query().get().build_ids,
+ expected_build_ids)
+
+ def test_get_project_data(self):
+ """Testing get project data."""
+ with ndb.Client().context():
+ self.assertRaises(RuntimeError, get_project_data, 'test-project')
+
@classmethod
def tearDownClass(cls):
test_utils.cleanup_emulator(cls.ds_emulator)
diff --git a/infra/build/functions/requirements.txt b/infra/build/functions/requirements.txt
index d623f8f5b..1e5f51097 100644
--- a/infra/build/functions/requirements.txt
+++ b/infra/build/functions/requirements.txt
@@ -20,6 +20,8 @@ grpcio==1.29.0
google-auth==1.18.0
google-cloud-ndb==1.3.0
google-cloud-scheduler==1.3.0
+google-cloud-storage==1.29.0
google-api-core==1.21.0
google-api-python-client==1.9.3
oauth2client==4.1.3
+python-dateutil==2.8.1
diff --git a/infra/build/functions/update_build_status.py b/infra/build/functions/update_build_status.py
new file mode 100644
index 000000000..490efab9c
--- /dev/null
+++ b/infra/build/functions/update_build_status.py
@@ -0,0 +1,143 @@
+# Copyright 2020 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+"""Cloud function to request builds."""
+import logging
+
+import google.auth
+from googleapiclient.discovery import build
+from google.cloud import ndb
+
+import build_and_run_coverage
+import build_project
+import builds_status
+from datastore_entities import BuildsHistory
+from datastore_entities import Project
+
+BADGE_DIR = 'badge_images'
+DESTINATION_BADGE_DIR = 'badges'
+
+
+class MissingBuildLogError(Exception):
+ """Missing build log file in cloud storage."""
+
+
+# pylint: disable=no-member
+def get_last_build(build_ids):
+ """Returns build object for the last finished build of project."""
+ credentials, image_project = google.auth.default()
+ cloudbuild = build('cloudbuild',
+ 'v1',
+ credentials=credentials,
+ cache_discovery=False)
+
+ for build_id in reversed(build_ids):
+ project_build = cloudbuild.projects().builds().get(projectId=image_project,
+ id=build_id).execute()
+ if project_build['status'] == 'WORKING':
+ continue
+
+ if not builds_status.upload_log(build_id):
+ log_name = 'log-{0}'.format(build_id)
+ raise MissingBuildLogError('Missing build log file {0}'.format(log_name))
+
+ return project_build
+
+ return None
+
+
+def update_build_status(build_tag_suffix, status_filename):
+ """Update build statuses."""
+ statuses = {}
+ successes = []
+ failures = []
+ for project_build in BuildsHistory.query(
+ BuildsHistory.build_tag_suffix == build_tag_suffix):
+ last_build = get_last_build(project_build.build_ids)
+ if not last_build:
+ logging.error('Failed to get last build for project %s',
+ project_build.project)
+ continue
+
+ if last_build['status'] == 'SUCCESS':
+ statuses[project_build.project] = True
+ successes.append({
+ 'name': project_build.project,
+ 'build_id': last_build['id'],
+ 'finish_time': last_build['finishTime'],
+ 'success': True,
+ })
+ else:
+ statuses[project_build.project] = False
+ failures.append({
+ 'name': project_build.project,
+ 'build_id': last_build['id'],
+ 'finish_time': last_build['finishTime'],
+ 'success': False,
+ })
+
+ builds_status.upload_status(successes, failures, status_filename)
+ return statuses
+
+
+def update_build_badges(project, last_build_successful,
+ last_coverage_build_successful):
+ """Upload badges of given project."""
+ badge = 'building'
+ if not last_coverage_build_successful:
+ badge = 'coverage_failing'
+ if not last_build_successful:
+ badge = 'failing'
+
+ print("[badge] {}: {}".format(project, badge))
+
+ for extension in builds_status.BADGE_IMAGE_TYPES:
+ badge_name = '{badge}.{extension}'.format(badge=badge, extension=extension)
+
+ # Copy blob from badge_images/badge_name to badges/project/
+ blob_name = '{badge_dir}/{badge_name}'.format(badge_dir=BADGE_DIR,
+ badge_name=badge_name)
+
+ destination_blob_name = '{badge_dir}/{project_name}.{extension}'.format(
+ badge_dir=DESTINATION_BADGE_DIR,
+ project_name=project,
+ extension=extension)
+
+ status_bucket = builds_status.get_storage_client().get_bucket(
+ builds_status.STATUS_BUCKET)
+ badge_blob = status_bucket.blob(blob_name)
+ status_bucket.copy_blob(badge_blob,
+ status_bucket,
+ new_name=destination_blob_name)
+
+
+# pylint: disable=no-member
+def update_status(event, context):
+ """Entry point for cloud function to update build statuses and badges."""
+ del event, context #unused
+
+ with ndb.Client().context():
+ project_build_statuses = update_build_status(
+ build_project.FUZZING_BUILD_TAG, status_filename='status.json')
+ coverage_build_statuses = update_build_status(
+ build_and_run_coverage.COVERAGE_BUILD_TAG,
+ status_filename='status-coverage.json')
+
+ for project in Project.query():
+ if project.name not in project_build_statuses or project.name not in coverage_build_statuses:
+ continue
+
+ update_build_badges(project.name, project_build_statuses[project.name],
+ coverage_build_statuses[project.name])