summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorShawn Nematbakhsh <shawnn@chromium.org>2014-03-05 20:59:37 +0000
committerShawn Nematbakhsh <shawnn@chromium.org>2014-03-05 21:03:49 +0000
commitee5cccbf01e35e92b15ff8525c9d7abafa85826a (patch)
tree7a324ae87f68608a1dc5123e2d24fd7290281200
parente9a2dbc1cfb906c6705715b0c906f16c76d79530 (diff)
downloadchromite-ee5cccbf01e35e92b15ff8525c9d7abafa85826a.tar.gz
Revert "Change ArchiveStage to optionally get upload list from a board overlay file."
This reverts commit 0a0a64089185292b81ac73e7fc172b16dc58d580. Change-Id: I308bbef8b818f6b42d9bc7a84820f52ee6949c99 Reviewed-on: https://chromium-review.googlesource.com/188903 Reviewed-by: Shawn Nematbakhsh <shawnn@chromium.org> Commit-Queue: Shawn Nematbakhsh <shawnn@chromium.org> Tested-by: Shawn Nematbakhsh <shawnn@chromium.org>
-rw-r--r--buildbot/cbuildbot_commands.py72
-rwxr-xr-xbuildbot/cbuildbot_commands_unittest.py32
-rw-r--r--buildbot/cbuildbot_stages.py77
-rw-r--r--buildbot/portage_utilities.py28
-rwxr-xr-xbuildbot/portage_utilities_unittest.py12
-rw-r--r--lib/cros_build_lib.py20
6 files changed, 28 insertions, 213 deletions
diff --git a/buildbot/cbuildbot_commands.py b/buildbot/cbuildbot_commands.py
index 5eba460fe..35901b557 100644
--- a/buildbot/cbuildbot_commands.py
+++ b/buildbot/cbuildbot_commands.py
@@ -1584,73 +1584,23 @@ def BuildImageZip(archive_dir, image_dir):
return filename
-def BuildStandaloneArchive(archive_dir, image_dir, artifact_info):
- """Create a compressed archive from the specified image information.
-
- The artifact info is derived from a JSON file in the board overlay. It
- should be in the following format:
- {
- "artifacts": [
- { artifact },
- { artifact },
- ...
- ]
- }
- Each artifact can contain the following keys:
- input - Required. A list of paths and globs that expands to
- the list of files to archive.
- output - the name of the archive to be created. If omitted,
- it will default to the first filename, stripped of
- extensions, plus the appropriate .tar.gz or other suffix.
- archive - "tar" or "zip". If omitted, files will be uploaded
- directly, without being archived together.
- compress - a value cros_build_lib.CompressionStrToType knows about. Only
- useful for tar. If omitted, an uncompressed tar will be created.
+def BuildStandaloneImageTarball(archive_dir, image_bin):
+ """Create a compressed tarball from the specified image.
Args:
archive_dir: Directory to store image zip.
- image_dir: Base path for all inputs.
- artifact_info: Extended archive configuration dictionary containing:
- - paths - required, list of files to archive.
- - output, archive & compress entries from the JSON file.
+ image_bin: Image to zip up.
Returns:
- The base name of the archive.
-
- Raises:
- A ValueError if the compression or archive values are unknown.
- A KeyError is a required field is missing from artifact_info.
+ The base name of the tarball.
"""
- if 'archive' not in artifact_info:
- # Nothing to do, just return the list as-is.
- return artifact_info['paths']
-
- inputs = artifact_info['paths']
- archive = artifact_info['archive']
- compress = artifact_info.get('compress')
- compress_type = cros_build_lib.CompressionStrToType(compress)
- if compress_type is None:
- raise ValueError('unknown compression type: %s' % compress)
-
- # If the output is fixed, use that. Otherwise, construct it
- # from the name of the first archived file, stripping extensions.
- filename = artifact_info.get(
- 'output', '%s.%s' % (os.path.splitext(inputs[0])[0], archive))
- if archive == 'tar':
- # Add the .compress extension if we don't have a fixed name.
- if 'output' not in artifact_info and compress:
- filename = "%s.%s" % (filename, compress)
- cros_build_lib.CreateTarball(
- os.path.join(archive_dir, filename), image_dir,
- inputs=inputs, compression=compress_type)
- elif archive == 'zip':
- cros_build_lib.RunCommand(
- ['zip', os.path.join(archive_dir, filename), '-r'] + inputs,
- cwd=image_dir, capture_output=True)
- else:
- raise ValueError('unknown archive type: %s' % archive)
-
- return [filename]
+ # Strip off the .bin from the filename.
+ image_dir, image_filename = os.path.split(image_bin)
+ filename = '%s.tar.xz' % os.path.splitext(image_filename)[0]
+ archive_filename = os.path.join(archive_dir, filename)
+ cros_build_lib.CreateTarball(archive_filename, image_dir,
+ inputs=[image_filename])
+ return filename
def BuildFirmwareArchive(buildroot, board, archive_dir):
diff --git a/buildbot/cbuildbot_commands_unittest.py b/buildbot/cbuildbot_commands_unittest.py
index 6ea851d45..7021018d6 100755
--- a/buildbot/cbuildbot_commands_unittest.py
+++ b/buildbot/cbuildbot_commands_unittest.py
@@ -553,38 +553,6 @@ class UnmockedTests(cros_test_lib.TempDirTestCase):
# TODO(build): Use assertIn w/python-2.7.
self.assertTrue('>%s</a>' % f in html)
- def testArchiveGeneration(self):
- """Verifies BuildStandaloneImageArchive produces correct archives"""
- image_dir = os.path.join(self.tempdir, 'inputs')
- archive_dir = os.path.join(self.tempdir, 'outputs')
- files = ('a.bin', 'aa', 'b b b', 'c', 'dalsdkjfasdlkf',)
- osutils.SafeMakedirs(image_dir)
- osutils.SafeMakedirs(archive_dir)
- for f in files:
- osutils.Touch(os.path.join(image_dir, f))
-
- # Check specifying tar functionality.
- artifact = {'paths': ['a.bin'], 'output': 'a.tar.gz', 'archive': 'tar',
- 'compress':'gz'}
- path = commands.BuildStandaloneArchive(archive_dir, image_dir, artifact)
- self.assertEquals(path, ['a.tar.gz'])
- cros_test_lib.VerifyTarball(os.path.join(archive_dir, path[0]),
- ['a.bin'])
-
- # Check multiple input files.
- artifact = {'paths': ['a.bin', 'aa'], 'output': 'aa.tar.gz',
- 'archive': 'tar', 'compress': 'gz'}
- path = commands.BuildStandaloneArchive(archive_dir, image_dir, artifact)
- self.assertEquals(path, ['aa.tar.gz'])
- cros_test_lib.VerifyTarball(os.path.join(archive_dir, path[0]),
- ['a.bin', 'aa'])
-
- # Check zip functionality.
- artifact = {'paths': ['a.bin'], 'archive': 'zip'}
- path = commands.BuildStandaloneArchive(archive_dir, image_dir, artifact)
- self.assertEquals(path, ['a.zip'])
- self.assertExists(os.path.join(archive_dir, path[0]))
-
if __name__ == '__main__':
cros_test_lib.main()
diff --git a/buildbot/cbuildbot_stages.py b/buildbot/cbuildbot_stages.py
index e882fbd2a..ba7b6790e 100644
--- a/buildbot/cbuildbot_stages.py
+++ b/buildbot/cbuildbot_stages.py
@@ -8,7 +8,6 @@ import contextlib
import datetime
import functools
import glob
-import itertools
import json
import logging
import math
@@ -3307,7 +3306,6 @@ class ArchiveStage(ArchivingStage):
self._upload_queue = multiprocessing.Queue()
self._push_image_status_queue = multiprocessing.Queue()
self._wait_for_channel_signing = multiprocessing.Queue()
- self.artifacts = []
def WaitForRecoveryImage(self):
"""Wait until artifacts needed by SignerTest stage are created.
@@ -3410,51 +3408,6 @@ class ArchiveStage(ArchivingStage):
extra_env=extra_env)
self._upload_queue.put([constants.DELTA_SYSROOT_TAR])
- def LoadArtifactsList(self, board, image_dir):
- """Load the list of artifacts to upload for this board.
-
- It attempts to load a JSON file, scripts/artifacts.json, from the
- overlay directories for this board. This file specifies the artifacts
- to generate, if it can't be found, it will use a default set that
- uploads every .bin file as a .tar.xz file except for
- chromiumos_qemu_image.bin.
-
- See BuildStandaloneArchive in cbuildbot_commands.py for format docs.
- """
- custom_artifacts_file = portage_utilities.ReadOverlayFile(
- 'scripts/artifacts.json', board=board)
- if custom_artifacts_file is None:
- artifacts = []
- for image_file in glob.glob(os.path.join(image_dir, '*.bin')):
- basename = os.path.basename(image_file)
- if basename != constants.VM_IMAGE_BIN:
- info = {'input': [basename], 'archive': 'tar', 'compress': 'xz'}
- artifacts.append(info)
- else:
- artifacts = json.loads(custom_artifacts_file)['artifacts']
-
- for artifact in artifacts:
- # Resolve the (possible) globs in the input list, and store
- # the actual set of files to use in 'paths'
- paths = []
- for s in artifact['input']:
- glob_paths = glob.glob(os.path.join(image_dir, s))
- if not glob_paths:
- logging.warning('No artifacts generated for input: %s', s)
- else:
- for path in glob_paths:
- paths.append(os.path.relpath(path, image_dir))
- artifact['paths'] = paths
- self.artifacts = artifacts
-
- def IsArchivedFile(self, filename):
- """Return True if filename is the name of a file being archived."""
- for artifact in self.artifacts:
- for path in itertools.chain(artifact['paths'], artifact['input']):
- if os.path.basename(path) == filename:
- return True
- return False
-
def PerformStage(self):
buildroot = self._build_root
config = self._run.config
@@ -3479,8 +3432,8 @@ class ArchiveStage(ArchivingStage):
# \- BuildAndArchiveAllImages
# (builds recovery image first, then launches functions below)
# \- BuildAndArchiveFactoryImages
- # \- ArchiveStandaloneArtifacts
- # \- ArchiveStandaloneArtifact
+ # \- ArchiveStandaloneTarballs
+ # \- ArchiveStandaloneTarball
# \- ArchiveZipFiles
# \- ArchiveHWQual
# \- PushImage (blocks on BuildAndArchiveAllImages)
@@ -3526,18 +3479,19 @@ class ArchiveStage(ArchivingStage):
self._run.attrs.release_tag)
self._release_upload_queue.put([filename])
- def ArchiveStandaloneArtifact(artifact_info):
- """Build and upload a single archive."""
- if artifact_info['paths']:
- for path in commands.BuildStandaloneArchive(archive_path, image_dir,
- artifact_info):
- self._release_upload_queue.put([path])
+ def ArchiveStandaloneTarball(image_file):
+ """Build and upload a single tarball."""
+ self._release_upload_queue.put([commands.BuildStandaloneImageTarball(
+ archive_path, image_file)])
- def ArchiveStandaloneArtifacts():
- """Build and upload standalone archives for each image."""
+ def ArchiveStandaloneTarballs():
+ """Build and upload standalone tarballs for each image."""
if config['upload_standalone_images']:
- parallel.RunTasksInProcessPool(ArchiveStandaloneArtifact,
- [[x] for x in self.artifacts])
+ inputs = []
+ for image_file in glob.glob(os.path.join(image_dir, '*.bin')):
+ if os.path.basename(image_file) != 'chromiumos_qemu_image.bin':
+ inputs.append([image_file])
+ parallel.RunTasksInProcessPool(ArchiveStandaloneTarball, inputs)
def ArchiveZipFiles():
"""Build and archive zip files.
@@ -3588,18 +3542,17 @@ class ArchiveStage(ArchivingStage):
# Generate the recovery image. To conserve loop devices, we try to only
# run one instance of build_image at a time. TODO(davidjames): Move the
# image generation out of the archive stage.
- self.LoadArtifactsList(self._current_board, image_dir)
# For recovery image to be generated correctly, BuildRecoveryImage must
# run before BuildAndArchiveFactoryImages.
- if self.IsArchivedFile(constants.BASE_IMAGE_BIN):
+ if 'base' in config['images']:
commands.BuildRecoveryImage(buildroot, board, image_dir, extra_env)
self._recovery_image_status_queue.put(True)
if config['images']:
parallel.RunParallelSteps([BuildAndArchiveFactoryImages,
ArchiveHWQual,
- ArchiveStandaloneArtifacts,
+ ArchiveStandaloneTarballs,
ArchiveZipFiles])
def ArchiveImageScripts():
diff --git a/buildbot/portage_utilities.py b/buildbot/portage_utilities.py
index c48d382cb..59a0321eb 100644
--- a/buildbot/portage_utilities.py
+++ b/buildbot/portage_utilities.py
@@ -81,12 +81,12 @@ def FindOverlays(overlay_type, board=None, buildroot=constants.SOURCE_ROOT):
"""Return the list of overlays to use for a given buildbot.
Args:
+ board: Board to look at.
+ buildroot: Source root to find overlays.
overlay_type: A string describing which overlays you want.
'private': Just the private overlays.
'public': Just the public overlays.
'both': Both the public and private overlays.
- board: Board to look at.
- buildroot: Source root to find overlays.
"""
overlays = _ListOverlays(board=board, buildroot=buildroot)
private_prefix = _PRIVATE_PREFIX % dict(buildroot=buildroot)
@@ -101,30 +101,6 @@ def FindOverlays(overlay_type, board=None, buildroot=constants.SOURCE_ROOT):
return []
-def ReadOverlayFile(filename, overlay_type='both', board=None,
- buildroot=constants.SOURCE_ROOT):
- """Attempt to open a file in the overlay directories.
-
- Args:
- filename: Path to open inside the overlay.
- overlay_type: A string describing which overlays you want.
- 'private': Just the private overlays.
- 'public': Just the public overlays.
- 'both': Both the public and private overlays.
- board: Board to look at.
- buildroot: Source root to find overlays.
-
- Returns:
- The contents of the file, or None if no files could be opened.
- """
- for overlay in FindOverlays(overlay_type, board, buildroot):
- try:
- return osutils.ReadFile(os.path.join(overlay, filename))
- except IOError as e:
- if e.errno != os.errno.ENOENT:
- raise
-
-
class MissingOverlayException(Exception):
"""This exception indicates that a needed overlay is missing."""
diff --git a/buildbot/portage_utilities_unittest.py b/buildbot/portage_utilities_unittest.py
index f202e690f..186d7bc5f 100755
--- a/buildbot/portage_utilities_unittest.py
+++ b/buildbot/portage_utilities_unittest.py
@@ -513,18 +513,6 @@ class FindOverlaysTest(cros_test_lib.MoxTestCase):
self.assertEqual(self.overlays[self.MARIO][self.PUBLIC][:-1],
self.overlays[self.FAKE][self.PUBLIC])
- def testReadOverlayFile(self):
- """Verify that the boards are examined in the right order"""
- overlays = self.overlays[self.MARIO][self.PUBLIC]
- self.mox.StubOutWithMock(osutils, 'ReadFile')
- for overlay in overlays:
- osutils.ReadFile(os.path.join(overlay, 'test')).AndRaise(
- IOError(os.errno.ENOENT, 'ENOENT'))
- self.mox.ReplayAll()
- portage_utilities.ReadOverlayFile('test', self.PUBLIC, self.MARIO,
- constants.SOURCE_ROOT)
- self.mox.VerifyAll()
-
class BuildEBuildDictionaryTest(cros_test_lib.MoxTestCase):
"""Tests of the EBuild Dictionary."""
diff --git a/lib/cros_build_lib.py b/lib/cros_build_lib.py
index 4e9386d26..df6e53362 100644
--- a/lib/cros_build_lib.py
+++ b/lib/cros_build_lib.py
@@ -801,26 +801,6 @@ def FindCompressor(compression, chroot=None):
return std
-def CompressionStrToType(s):
- """Convert a compression string type to a constant.
-
- Args:
- s: string to check
-
- Returns:
- A constant, or None if the compression type is unknown.
- """
- _COMP_STR = {
- 'gz': COMP_GZIP,
- 'bz2': COMP_BZIP2,
- 'xz': COMP_XZ,
- }
- if s:
- return _COMP_STR.get(s)
- else:
- return COMP_NONE
-
-
def CreateTarball(target, cwd, sudo=False, compression=COMP_XZ, chroot=None,
inputs=None, extra_args=None, **kwargs):
"""Create a tarball. Executes 'tar' on the commandline.