summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cbuildbot/stages/report_stages.py2
-rwxr-xr-xcbuildbot/stages/report_stages_unittest.py1
-rw-r--r--cbuildbot/validation_pool.py3
-rw-r--r--lib/cidb.py57
-rwxr-xr-xlib/cidb_integration_test.py141
5 files changed, 101 insertions, 103 deletions
diff --git a/cbuildbot/stages/report_stages.py b/cbuildbot/stages/report_stages.py
index ab7881e1e..c3ba9178d 100644
--- a/cbuildbot/stages/report_stages.py
+++ b/cbuildbot/stages/report_stages.py
@@ -96,8 +96,6 @@ class BuildStartStage(generic_stages.BuilderStage):
build_number=d['build-number'],
build_config=d['bot-config'],
bot_hostname=d['bot-hostname'],
- start_time=cros_build_lib.ParseUserDateTimeFormat(
- d['time']['start']),
master_build_id=d['master_build_id'])
self._run.attrs.metadata.UpdateWithDict({'build_id': build_id,
'db_type': db_type})
diff --git a/cbuildbot/stages/report_stages_unittest.py b/cbuildbot/stages/report_stages_unittest.py
index 953e69794..28da0258f 100755
--- a/cbuildbot/stages/report_stages_unittest.py
+++ b/cbuildbot/stages/report_stages_unittest.py
@@ -55,7 +55,6 @@ class BuildStartStageTest(generic_stages_unittest.AbstractStageTest):
build_number=1234321,
builder_name=mox.IgnoreArg(),
master_build_id=None,
- start_time=mox.IgnoreArg(),
waterfall='chromiumos').AndReturn(31337)
mox.Replay(self.mock_cidb)
self.RunStage()
diff --git a/cbuildbot/validation_pool.py b/cbuildbot/validation_pool.py
index b86a43a55..b3564f181 100644
--- a/cbuildbot/validation_pool.py
+++ b/cbuildbot/validation_pool.py
@@ -2274,9 +2274,6 @@ class ValidationPool(object):
db = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
if db:
build_id = self._metadata.GetValue('build_id')
- # NOTE(akeshet): If timestamp is None, then the database timestamp
- # will differ slightly from the metadata timestamp, but I don't
- # think that matters.
db.InsertCLActions(build_id,
[metadata_lib.GetCLActionTuple(change, action, timestamp)])
diff --git a/lib/cidb.py b/lib/cidb.py
index 54bf86f46..709bf1bc7 100644
--- a/lib/cidb.py
+++ b/lib/cidb.py
@@ -4,7 +4,6 @@
"""Continuous Integration Database Library."""
-import datetime
import glob
import logging
import os
@@ -19,8 +18,6 @@ except ImportError:
'Unable to import sqlalchemy. Please install this package by running '
'`sudo apt-get install python-sqlalchemy` or similar.')
-import time
-
from chromite.cbuildbot import constants
from chromite.lib import retry_util
@@ -254,19 +251,18 @@ class SchemaVersionedMySQLConnection(object):
self._meta.reflect(bind=self._GetEngine())
def _Insert(self, table, values):
- """Create and execute an INSERT query.
+ """Create and execute a one-row INSERT query.
Args:
table: Table name to insert to.
- values: Dictionary of column values to insert. Or, list of
- value dictionaries to insert multiple rows.
+ values: Dictionary of column values to insert.
Returns:
- Integer primary key of the last inserted row.
+ Integer primary key of the inserted row.
"""
self._ReflectToMetadata()
- ins = self._meta.tables[table].insert()
- r = self._Execute(ins, values)
+ ins = self._meta.tables[table].insert().values(values)
+ r = self._Execute(ins)
return r.inserted_primary_key[0]
def _InsertMany(self, table, values):
@@ -279,9 +275,16 @@ class SchemaVersionedMySQLConnection(object):
Returns:
The number of inserted rows.
"""
+ # sqlalchemy 0.7 and prior has a bug in which it does not always
+ # correctly unpack a list of rows to multi-insert if the list contains
+ # only one item.
+ if len(values) == 1:
+ self._Insert(table, values[0])
+ return 1
+
self._ReflectToMetadata()
- ins = self._meta.tables[table].insert()
- r = self._Execute(ins, values)
+ ins = self._meta.tables[table].insert().values(values)
+ r = self._Execute(ins)
return r.rowcount
def _GetPrimaryKey(self, table):
@@ -330,8 +333,9 @@ class SchemaVersionedMySQLConnection(object):
"""
self._ReflectToMetadata()
primary_key = self._GetPrimaryKey(table)
- upd = self._meta.tables[table].update().where(primary_key==row_id)
- r = self._Execute(upd, values)
+ upd = self._meta.tables[table].update().where(primary_key==row_id
+ ).values(values)
+ r = self._Execute(upd)
return r.rowcount
def _UpdateWhere(self, table, where, values):
@@ -459,8 +463,7 @@ class CIDBConnection(SchemaVersionedMySQLConnection):
@minimum_schema(2)
def InsertBuild(self, builder_name, waterfall, build_number,
- build_config, bot_hostname, start_time=None,
- master_build_id=None):
+ build_config, bot_hostname, master_build_id=None):
"""Insert a build row.
Args:
@@ -469,13 +472,8 @@ class CIDBConnection(SchemaVersionedMySQLConnection):
build_number: buildbot build number.
build_config: cbuildbot config of build
bot_hostname: hostname of bot running the build
- start_time: (Optional) Unix timestamp of build start time. If None,
- current time will be used.
master_build_id: (Optional) primary key of master build to this build.
"""
- start_time = start_time or time.time()
- dt = datetime.datetime.fromtimestamp(start_time)
-
return self._Insert('buildTable', {'builder_name': builder_name,
'buildbot_generation':
constants.BUILDBOT_GENERATION,
@@ -483,7 +481,8 @@ class CIDBConnection(SchemaVersionedMySQLConnection):
'build_number': build_number,
'build_config' : build_config,
'bot_hostname': bot_hostname,
- 'start_time' : dt,
+ 'start_time' :
+ sqlalchemy.func.current_timestamp(),
'master_build_id' : master_build_id}
)
@@ -511,7 +510,6 @@ class CIDBConnection(SchemaVersionedMySQLConnection):
change_number = cl_action[0]['gerrit_number']
patch_number = cl_action[0]['patch_number']
action = cl_action[1]
- timestamp = cl_action[2]
reason = cl_action[3]
values.append({
'build_id' : build_id,
@@ -519,7 +517,6 @@ class CIDBConnection(SchemaVersionedMySQLConnection):
'change_number': change_number,
'patch_number' : patch_number,
'action' : action,
- 'timestamp' : datetime.datetime.fromtimestamp(timestamp),
'reason' : reason})
return self._InsertMany('clActionTable', values)
@@ -635,8 +632,7 @@ class CIDBConnection(SchemaVersionedMySQLConnection):
@minimum_schema(2)
- def FinishBuild(self, build_id, finish_time=None, status=None,
- status_pickle=None):
+ def FinishBuild(self, build_id, status=None, status_pickle=None):
"""Update the given build row, marking it as finished.
This should be called once per build, as the last update to the build.
@@ -644,19 +640,14 @@ class CIDBConnection(SchemaVersionedMySQLConnection):
Args:
build_id: id of row to update.
- finish_time: Unix timestamp of build finish time. If None, current time
- will be used.
status: Final build status, one of
manifest_version.BuilderStatus.COMPLETED_STATUSES.
status_pickle: Pickled manifest_version.BuilderStatus.
"""
self._ReflectToMetadata()
- finish_time = finish_time or time.time()
- dt = datetime.datetime.fromtimestamp(finish_time)
-
- # TODO(akeshet) atomically update the final field of metadata to
- # True
- self._Update('buildTable', build_id, {'finish_time' : dt,
+ # The current timestamp is evaluated on the database, not locally.
+ current_timestamp = sqlalchemy.func.current_timestamp()
+ self._Update('buildTable', build_id, {'finish_time' : current_timestamp,
'status' : status,
'status_pickle' : status_pickle,
'final' : True})
diff --git a/lib/cidb_integration_test.py b/lib/cidb_integration_test.py
index 524700f50..b7113b371 100755
--- a/lib/cidb_integration_test.py
+++ b/lib/cidb_integration_test.py
@@ -14,7 +14,6 @@ to the above test instance.
# pylint: disable-msg= W0212
-import datetime
import glob
import logging
import os
@@ -96,6 +95,29 @@ class CIDBMigrationsTest(CIDBIntegrationTest):
for i in range(1, max_version+1):
db.ApplySchemaMigrations(i)
+ def testActions(self):
+ """Test that InsertCLActions accepts 0-, 1-, and multi-item lists."""
+ db = self._PrepareFreshDatabase()
+ build_id = db.InsertBuild('my builder', 'chromiumos', 12, 'my config',
+ 'my bot hostname')
+
+ a1 = metadata_lib.GetCLActionTuple(
+ metadata_lib.GerritPatchTuple(1, 1, True),
+ constants.CL_ACTION_PICKED_UP)
+ a2 = metadata_lib.GetCLActionTuple(
+ metadata_lib.GerritPatchTuple(1, 1, True),
+ constants.CL_ACTION_PICKED_UP)
+ a3 = metadata_lib.GetCLActionTuple(
+ metadata_lib.GerritPatchTuple(1, 1, True),
+ constants.CL_ACTION_PICKED_UP)
+
+ db.InsertCLActions(build_id, [])
+ db.InsertCLActions(build_id, [a1])
+ db.InsertCLActions(build_id, [a2, a3])
+
+ action_count = db._GetEngine().execute('select count(*) from clActionTable'
+ ).fetchall()[0][0]
+ self.assertEqual(action_count, 3)
class CIDBAPITest(CIDBIntegrationTest):
"""Tests of the CIDB API."""
@@ -136,59 +158,12 @@ def GetTestDataSeries(test_data_path):
class DataSeries0Test(CIDBIntegrationTest):
"""Simulate a set of 630 master/slave CQ builds."""
- # TODO(akeshet): Once our prod and debug databases are migrated
- # to schema 11, this test of the migration can be removed.
- def testCQWithSchema8(self):
- """Run the CQ test with schema version 8, then migrate to 11."""
- # Run the CQ test at schema version 8
- self._PrepareFreshDatabase(8)
+ def testCQWithSchema11(self):
+ """Run the CQ test with schema version 11."""
+ # Run the CQ test at schema version 11
+ self._PrepareFreshDatabase(11)
self._runCQTest()
- # Now migrate to schema version 11, and run sanity checks.
- root_db = cidb.CIDBConnection(TEST_DB_CRED_ROOT)
- root_db.ApplySchemaMigrations(11)
-
- readonly_db = cidb.CIDBConnection(TEST_DB_CRED_READONLY)
-
- # last_updated column should be 0 for all rows.
- num_0_last_updated = readonly_db._GetEngine().execute(
- 'select count(*) from buildTable where last_updated = 0'
- ).fetchall()[0][0]
- self.assertEqual(num_0_last_updated, 630)
-
- self._start_and_finish_time_checks(readonly_db)
- self._cl_action_checks(readonly_db)
-
- def testCQWithSchema9(self):
- """Run the CQ test with schema version 9."""
- # Run the CQ test at schema version 8
- self._PrepareFreshDatabase(9)
- self._runCQTest()
-
- readonly_db = cidb.CIDBConnection(TEST_DB_CRED_READONLY)
-
- # We should have a diversity of last_updated times. Since the timestamp
- # resolution is only 1 second, and we have lots of parallelism in the test,
- # we won't have a distring last_updated time per row. But we will have at
- # least 100 distinct last_updated times.
- distinct_last_updated = readonly_db._GetEngine().execute(
- 'select count(distinct last_updated) from buildTable').fetchall()[0][0]
- self.assertTrue(distinct_last_updated > 100)
-
- ids_by_last_updated = readonly_db._GetEngine().execute(
- 'select id from buildTable order by last_updated').fetchall()
-
- ids_by_last_updated = [id_tuple[0] for id_tuple in ids_by_last_updated]
-
- # Build #1 should have been last updated before build # 200.
- self.assertLess(ids_by_last_updated.index(1),
- ids_by_last_updated.index(200))
-
- # However, build #1 (which was a master build) should have been last updated
- # AFTER build #2 which was its slave.
- self.assertGreater(ids_by_last_updated.index(1),
- ids_by_last_updated.index(2))
-
def _runCQTest(self):
"""Simulate a set of 630 master/slave CQ builds.
@@ -231,6 +206,34 @@ class DataSeries0Test(CIDBIntegrationTest):
self.assertEqual(len(readonly_db.GetSlaveStatuses(1)), 29)
self.assertEqual(len(readonly_db.GetSlaveStatuses(2)), 0)
+ self._start_and_finish_time_checks(readonly_db)
+ self._cl_action_checks(readonly_db)
+ self._last_updated_time_checks(readonly_db)
+
+ def _last_updated_time_checks(self, db):
+ """Sanity checks on the last_updated column."""
+ # We should have a diversity of last_updated times. Since the timestamp
+ # resolution is only 1 second, and we have lots of parallelism in the test,
+ # we won't have a distring last_updated time per row. But we will have at
+ # least 100 distinct last_updated times.
+ distinct_last_updated = db._GetEngine().execute(
+ 'select count(distinct last_updated) from buildTable').fetchall()[0][0]
+ self.assertTrue(distinct_last_updated > 100)
+
+ ids_by_last_updated = db._GetEngine().execute(
+ 'select id from buildTable order by last_updated').fetchall()
+
+ ids_by_last_updated = [id_tuple[0] for id_tuple in ids_by_last_updated]
+
+ # Build #1 should have been last updated before build # 200.
+ self.assertLess(ids_by_last_updated.index(1),
+ ids_by_last_updated.index(200))
+
+ # However, build #1 (which was a master build) should have been last updated
+ # AFTER build #2 which was its slave.
+ self.assertGreater(ids_by_last_updated.index(1),
+ ids_by_last_updated.index(2))
+
def _cl_action_checks(self, db):
"""Sanity checks that correct cl actions were recorded."""
submitted_cl_count = db._GetEngine().execute(
@@ -255,10 +258,15 @@ class DataSeries0Test(CIDBIntegrationTest):
'select max(finish_time) from buildTable').fetchall()[0][0]
min_fin_time = db._GetEngine().execute(
'select min(finish_time) from buildTable').fetchall()[0][0]
- self.assertEqual(max_start_time, datetime.datetime(2014, 7, 7, 12, 49, 44))
- self.assertEqual(min_start_time, datetime.datetime(2014, 7, 4, 16, 14, 28))
- self.assertEqual(max_fin_time, datetime.datetime(2014, 7, 7, 14, 51, 38))
- self.assertEqual(min_fin_time, datetime.datetime(2014, 7, 4, 16, 33, 10))
+ self.assertGreater(max_start_time, min_start_time)
+ self.assertGreater(max_fin_time, min_fin_time)
+
+ # For all builds, finish_time should equal last_updated.
+ mismatching_times = db._GetEngine().execute(
+ 'select count(*) from buildTable where finish_time != last_updated'
+ ).fetchall()[0][0]
+ self.assertEqual(mismatching_times, 0)
+
def simulate_builds(self, db, metadatas):
"""Simulate a serires of Commit Queue master and slave builds.
@@ -323,7 +331,7 @@ class DataSeries1Test(CIDBIntegrationTest):
# Migrate db to specified version. As new schema versions are added,
# migrations to later version can be applied after the test builds are
# simulated, to test that db contents are correctly migrated.
- self._PrepareFreshDatabase(8)
+ self._PrepareFreshDatabase(11)
bot_db = cidb.CIDBConnection(TEST_DB_CRED_BOT)
@@ -350,6 +358,12 @@ class DataSeries1Test(CIDBIntegrationTest):
).fetchall()[0][0]
self.assertEqual(main_firmware_versions, 29)
+ # For all builds, finish_time should equal last_updated.
+ mismatching_times = bot_db._GetEngine().execute(
+ 'select count(*) from buildTable where finish_time != last_updated'
+ ).fetchall()[0][0]
+ self.assertEqual(mismatching_times, 0)
+
def _simulate_canary(self, db, metadata, master_build_id=None):
"""Helper method to simulate an individual canary build.
@@ -375,6 +389,11 @@ class DataSeries1Test(CIDBIntegrationTest):
db.UpdateBoardPerBuildMetadata(build_id, board, bm)
db.UpdateMetadata(build_id, metadata)
+
+ status = metadata_dict['status']['status']
+ status = _TranslateStatus(status)
+ db.FinishBuild(build_id, status)
+
return build_id
@@ -399,15 +418,11 @@ def _SimulateBuildStart(db, metadata, master_build_id=None):
# build was on.
waterfall = 'chromeos'
- start_time = cros_build_lib.ParseUserDateTimeFormat(
- metadata_dict['time']['start'])
-
build_id = db.InsertBuild(metadata_dict['builder-name'],
waterfall,
metadata_dict['build-number'],
metadata_dict['bot-config'],
metadata_dict['bot-hostname'],
- start_time,
master_build_id)
return build_id
@@ -443,13 +458,11 @@ def _SimulateCQBuildFinish(db, metadata, build_id):
db.UpdateMetadata(build_id, metadata)
- finish_time = cros_build_lib.ParseUserDateTimeFormat(
- metadata_dict['time']['finish'])
status = metadata_dict['status']['status']
status = _TranslateStatus(status)
- db.FinishBuild(build_id, finish_time, status)
+ db.FinishBuild(build_id, status)
# TODO(akeshet): Allow command line args to specify alternate CIDB instance