summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--buildbot/builderstage.py5
-rw-r--r--buildbot/cbuildbot_commands.py42
-rwxr-xr-xbuildbot/cbuildbot_config.py6
-rwxr-xr-xbuildbot/cbuildbot_config_unittest.py21
-rw-r--r--buildbot/cbuildbot_results.py36
-rwxr-xr-xbuildbot/cbuildbot_run_unittest.py3
-rw-r--r--buildbot/lab_status.py2
-rwxr-xr-xbuildbot/lab_status_unittest.py1
-rwxr-xr-xbuildbot/lkgm_manager.py30
-rw-r--r--buildbot/manifest_version.py25
-rw-r--r--buildbot/portage_utilities.py9
-rwxr-xr-xbuildbot/repository.py4
-rw-r--r--buildbot/trybot_patch_pool.py6
-rw-r--r--buildbot/validation_pool.py33
-rw-r--r--lib/binpkg.py3
-rw-r--r--lib/cgroups.py2
-rw-r--r--lib/chrome_util.py1
-rw-r--r--lib/commandline.py2
-rw-r--r--lib/cros_build_lib.py26
-rw-r--r--lib/cros_test_lib.py3
-rw-r--r--lib/gerrit.py5
-rw-r--r--lib/git.py66
-rwxr-xr-xlib/gob_util.py5
-rw-r--r--lib/gs.py24
-rw-r--r--lib/locking.py18
-rw-r--r--lib/namespaces.py1
-rw-r--r--lib/operation.py1
-rw-r--r--lib/osutils.py14
-rw-r--r--lib/parallel.py2
-rw-r--r--lib/partial_mock.py9
-rw-r--r--lib/patch.py14
-rw-r--r--lib/remote_access.py7
-rwxr-xr-xlib/rewrite_git_alternates.py6
-rw-r--r--lib/stats.py10
-rwxr-xr-xlib/timeout_util_unittest.py1
-rw-r--r--lib/toolchain.py8
-rw-r--r--licensing/licenses.py7
-rwxr-xr-xlicensing/process-pkg.py5
-rw-r--r--scripts/cbuildbot.py6
-rw-r--r--scripts/cros_generate_breakpad_symbols.py4
-rw-r--r--scripts/cros_list_modified_packages.py3
-rw-r--r--scripts/cros_mark_as_stable.py1
-rw-r--r--scripts/cros_mark_chrome_as_stable.py9
-rwxr-xr-xscripts/cros_mark_chrome_as_stable_unittest.py7
-rw-r--r--scripts/cros_merge_to_branch.py1
-rw-r--r--scripts/cros_portage_upgrade.py7
-rwxr-xr-xscripts/cros_portage_upgrade_unittest.py46
-rw-r--r--scripts/cros_setup_toolchains.py6
-rw-r--r--scripts/merge_package_status.py3
-rw-r--r--scripts/parallel_emerge.py5
-rwxr-xr-xscripts/update_manifest_remotes.py2
-rw-r--r--scripts/upload_prebuilts.py22
-rw-r--r--scripts/upload_symbols.py4
-rwxr-xr-xscripts/wrapper.py1
54 files changed, 369 insertions, 221 deletions
diff --git a/buildbot/builderstage.py b/buildbot/builderstage.py
index af2841696..f2c242257 100644
--- a/buildbot/builderstage.py
+++ b/buildbot/builderstage.py
@@ -122,6 +122,7 @@ class BuilderStage(object):
Args:
stage: Link to a specific |stage|, otherwise the general buildbot log
+
Returns:
The fully formed URL
"""
@@ -187,7 +188,8 @@ class BuilderStage(object):
def _GetPortageEnvVar(self, envvar, board):
"""Get a portage environment variable for the configuration's board.
- envvar: The environment variable to get. E.g. 'PORTAGE_BINHOST'.
+ Args:
+ envvar: The environment variable to get. E.g. 'PORTAGE_BINHOST'.
Returns:
The value of the environment variable, as a string. If no such variable
@@ -211,6 +213,7 @@ class BuilderStage(object):
build_config: A build config for a master builder.
configs: Option override of cbuildbot_config.config for the list
of build configs to look through for slaves.
+
Returns:
A list of build configs corresponding to the slaves for the master
represented by build_config.
diff --git a/buildbot/cbuildbot_commands.py b/buildbot/cbuildbot_commands.py
index 044da864f..68a83f6ae 100644
--- a/buildbot/cbuildbot_commands.py
+++ b/buildbot/cbuildbot_commands.py
@@ -137,6 +137,7 @@ def ValidateClobber(buildroot):
Args:
buildroot: buildroot that's potentially clobbered.
+
Returns:
True if the clobber is ok.
"""
@@ -526,7 +527,8 @@ def ArchiveTestResults(buildroot, test_results_dir, test_basename):
This must a subdir of /tmp.
test_basename: The basename of the tarball.
- Returns the path to the tarball.
+ Returns:
+ The path to the tarball.
"""
test_results_dir = test_results_dir.lstrip('/')
chroot = os.path.join(buildroot, 'chroot')
@@ -559,13 +561,13 @@ def ArchiveVMFiles(buildroot, test_results_dir, archive_path):
tar file for each of these files, so that each can be downloaded
independently.
- Arguments:
+ Args:
images_dir: Directory containing the VM disk images.
archive_path: Directory the tarballs should be written to.
- Returns the paths to the tarballs.
+ Returns:
+ The paths to the tarballs.
"""
-
images_dir = os.path.join(buildroot, 'chroot', test_results_dir.lstrip('/'))
images = []
for path, _, filenames in os.walk(images_dir):
@@ -1094,7 +1096,8 @@ def GenerateDebugTarball(buildroot, board, archive_path, gdb_symbols):
archive_dir: Directory where tarball should be stored.
gdb_symbols: Include *.debug files for debugging core files with gdb.
- Returns the filename of the created debug tarball.
+ Returns:
+ The filename of the created debug tarball.
"""
# Generate debug tarball. This needs to run as root because some of the
# symbols are only readable by root.
@@ -1277,7 +1280,8 @@ def BuildFactoryTestImage(buildroot, board, extra_env):
board: Board type that was built on this machine
extra_env: Flags to be added to the environment for the new process.
- Returns the basename of the symlink created for the image.
+ Returns:
+ The basename of the symlink created for the image.
"""
# We use build_attempt=2 here to ensure that this image uses a different
@@ -1303,7 +1307,8 @@ def BuildFactoryInstallImage(buildroot, board, extra_env):
board: Board type that was built on this machine
extra_env: Flags to be added to the environment for the new process.
- Returns the basename of the symlink created for the image.
+ Returns:
+ The basename of the symlink created for the image.
"""
# We use build_attempt=3 here to ensure that this image uses a different
@@ -1384,7 +1389,8 @@ def FindFilesWithPattern(pattern, target='./', cwd=os.curdir):
target: the target directory to search.
cwd: current working directory.
- Returns a list of paths of the matched files.
+ Returns:
+ A list of paths of the matched files.
"""
# Backup the current working directory before changing it
old_cwd = os.getcwd()
@@ -1452,7 +1458,8 @@ def BuildFullAutotestTarball(buildroot, board, tarball_dir):
board: Board type that was built on this machine.
tarball_dir: Location for storing autotest tarballs.
- Returns a tuple the path of the full autotest tarball.
+ Returns:
+ A tuple the path of the full autotest tarball.
"""
tarball = os.path.join(tarball_dir, 'autotest.tar.bz2')
@@ -1485,7 +1492,8 @@ def BuildImageZip(archive_dir, image_dir):
archive_dir: Directory to store image.zip.
image_dir: Directory to zip up.
- Returns the basename of the zipfile.
+ Returns:
+ The basename of the zipfile.
"""
filename = 'image.zip'
zipfile = os.path.join(archive_dir, filename)
@@ -1501,7 +1509,8 @@ def BuildStandaloneImageTarball(archive_dir, image_bin):
archive_dir: Directory to store image zip.
image_bin: Image to zip up.
- Returns the base name of the tarball.
+ Returns:
+ The base name of the tarball.
"""
# Strip off the .bin from the filename.
image_dir, image_filename = os.path.split(image_bin)
@@ -1520,8 +1529,9 @@ def BuildFirmwareArchive(buildroot, board, archive_dir):
board: Board name of build target.
archive_dir: Directory to store output file.
- Returns the basename of the archived file, or None if the target board does
- not have firmware from source.
+ Returns:
+ The basename of the archived file, or None if the target board does
+ not have firmware from source.
"""
patterns = ['*image*.bin', 'updater-*.sh', 'ec.bin', 'dts/*']
firmware_root = os.path.join(buildroot, 'chroot', 'build', board, 'firmware')
@@ -1547,7 +1557,8 @@ def BuildFactoryZip(buildroot, board, archive_dir, image_root):
archive_dir: Directory to store image.zip.
image_root: Directory containing factory_shim and factory_test symlinks.
- Returns the basename of the zipfile.
+ Returns:
+ The basename of the zipfile.
"""
filename = 'factory_image.zip'
@@ -1636,7 +1647,7 @@ def CreateTestRoot(build_root):
"""Returns a temporary directory for test results in chroot.
Returns:
- Returns the path inside the chroot rather than whole path.
+ The path inside the chroot rather than whole path.
"""
# Create test directory within tmp in chroot.
chroot = os.path.join(build_root, 'chroot')
@@ -1732,6 +1743,7 @@ def CheckPGOData(architectures, cpv):
Args:
architectures: Set of architectures we're going to build Chrome for.
cpv: The portage_utilities.CPV object for chromeos-chrome.
+
Returns:
True if PGO data is available; false otherwise.
"""
diff --git a/buildbot/cbuildbot_config.py b/buildbot/cbuildbot_config.py
index d5c94bbc9..7ef2aed68 100755
--- a/buildbot/cbuildbot_config.py
+++ b/buildbot/cbuildbot_config.py
@@ -60,6 +60,7 @@ def OverrideConfigForTrybot(build_config, options):
build_config: The build configuration dictionary to override.
The dictionary is not modified.
options: The options passed on the commandline.
+
Returns:
A build configuration dictionary with the overrides applied.
"""
@@ -1917,8 +1918,9 @@ def _GetDisplayPosition(config_name, type_order=CONFIG_TYPE_DUMP_ORDER):
type_order: A tuple/list of config types in the order they are to be
displayed.
- If config name does not contain any of the suffixes, returns the index
- position after the last element of suffix_order.
+ Returns:
+ If |config_name| does not contain any of the suffixes, returns the index
+ position after the last element of suffix_order.
"""
for index, config_type in enumerate(type_order):
if config_name.endswith('-' + config_type) or config_name == config_type:
diff --git a/buildbot/cbuildbot_config_unittest.py b/buildbot/cbuildbot_config_unittest.py
index 4eb2618a8..f5da6bcda 100755
--- a/buildbot/cbuildbot_config_unittest.py
+++ b/buildbot/cbuildbot_config_unittest.py
@@ -42,9 +42,8 @@ class CBuildBotTest(cros_test_lib.MoxTestCase):
def testConfigsKeysMismatch(self):
"""Verify that all configs contain exactly the default keys.
- This checks for mispelled keys, or keys that are somehow removed.
+ This checks for mispelled keys, or keys that are somehow removed.
"""
-
expected_keys = set(cbuildbot_config._default.keys())
for build_name, config in cbuildbot_config.config.iteritems():
config_keys = set(config.keys())
@@ -58,16 +57,16 @@ class CBuildBotTest(cros_test_lib.MoxTestCase):
(build_name, list(missing_keys))))
def testConfigsHaveName(self):
- """ Configs must have names set."""
+ """Configs must have names set."""
for build_name, config in cbuildbot_config.config.iteritems():
self.assertTrue(build_name == config['name'])
def testConfigUseflags(self):
- """ Useflags must be lists.
- Strings are interpreted as arrays of characters for this, which is not
- useful.
- """
+ """Useflags must be lists.
+ Strings are interpreted as arrays of characters for this, which is not
+ useful.
+ """
for build_name, config in cbuildbot_config.config.iteritems():
useflags = config.get('useflags')
if not useflags is None:
@@ -77,7 +76,6 @@ class CBuildBotTest(cros_test_lib.MoxTestCase):
def testBoards(self):
"""Verify 'boards' is explicitly set for every config."""
-
for build_name, config in cbuildbot_config.config.iteritems():
self.assertTrue(isinstance(config['boards'], (tuple, list)),
"Config %s doesn't have a list of boards." % build_name)
@@ -88,7 +86,6 @@ class CBuildBotTest(cros_test_lib.MoxTestCase):
def testOverlaySettings(self):
"""Verify overlays and push_overlays have legal values."""
-
for build_name, config in cbuildbot_config.config.iteritems():
overlays = config['overlays']
push_overlays = config['push_overlays']
@@ -132,7 +129,6 @@ class CBuildBotTest(cros_test_lib.MoxTestCase):
def testChromeRev(self):
"""Verify chrome_rev has an expected value"""
-
for build_name, config in cbuildbot_config.config.iteritems():
self.assertTrue(
config['chrome_rev'] in constants.VALID_CHROME_REVISIONS + [None],
@@ -146,7 +142,6 @@ class CBuildBotTest(cros_test_lib.MoxTestCase):
def testValidVMTestType(self):
"""Verify vm_tests has an expected value"""
-
for build_name, config in cbuildbot_config.config.iteritems():
self.assertTrue(
config['vm_tests'] in constants.VALID_VM_TEST_TYPES + [None],
@@ -169,7 +164,6 @@ class CBuildBotTest(cros_test_lib.MoxTestCase):
def testBuildToRun(self):
"""Verify we don't try to run tests without building them."""
-
for build_name, config in cbuildbot_config.config.iteritems():
self.assertFalse(
isinstance(config['useflags'], list) and
@@ -178,13 +172,12 @@ class CBuildBotTest(cros_test_lib.MoxTestCase):
def testARMNoVMTest(self):
"""Verify ARM builds don't get VMTests turned on by accident"""
-
for build_name, config in cbuildbot_config.config.iteritems():
if build_name.startswith('arm-') or config['arm']:
self.assertTrue(config['vm_tests'] is None,
"ARM builder %s can't run vm tests!" % build_name)
- #TODO: Add test for compare functionality
+ # TODO: Add test for compare functionality
def testJSONDumpLoadable(self):
"""Make sure config export functionality works."""
cwd = os.path.dirname(os.path.abspath(__file__))
diff --git a/buildbot/cbuildbot_results.py b/buildbot/cbuildbot_results.py
index bf93b0be8..f3052ee41 100644
--- a/buildbot/cbuildbot_results.py
+++ b/buildbot/cbuildbot_results.py
@@ -168,8 +168,8 @@ class _Results(object):
def PreviouslyCompletedRecord(self, name):
"""Check to see if this stage was previously completed.
- Returns:
- A boolean showing the stage was successful in the previous run.
+ Returns:
+ A boolean showing the stage was successful in the previous run.
"""
return self._previous.get(name)
@@ -198,18 +198,18 @@ class _Results(object):
def Record(self, name, result, description=None, prefix=None, time=0):
"""Store off an additional stage result.
- Args:
- name: The name of the stage (e.g. HWTest [bvt])
- result:
- Result should be one of:
- Results.SUCCESS if the stage was successful.
- Results.SKIPPED if the stage was skipped.
- Results.FORGIVEN if the stage had warnings.
- Otherwise, it should be the exception stage errored with.
- description:
- The textual backtrace of the exception, or None
- prefix: The prefix of the stage (e.g. HWTest). Defaults to
- the value of name.
+ Args:
+ name: The name of the stage (e.g. HWTest [bvt])
+ result:
+ Result should be one of:
+ Results.SUCCESS if the stage was successful.
+ Results.SKIPPED if the stage was skipped.
+ Results.FORGIVEN if the stage had warnings.
+ Otherwise, it should be the exception stage errored with.
+ description:
+ The textual backtrace of the exception, or None
+ prefix: The prefix of the stage (e.g. HWTest). Defaults to
+ the value of name.
"""
if prefix is None:
prefix = name
@@ -218,16 +218,16 @@ class _Results(object):
def Get(self):
"""Fetch stage results.
- Returns:
- A list with one entry per stage run with a result.
+ Returns:
+ A list with one entry per stage run with a result.
"""
return self._results_log
def GetPrevious(self):
"""Fetch stage results.
- Returns:
- A list of stages names that were completed in a previous run.
+ Returns:
+ A list of stages names that were completed in a previous run.
"""
return self._previous
diff --git a/buildbot/cbuildbot_run_unittest.py b/buildbot/cbuildbot_run_unittest.py
index 454275fd2..9cb860889 100755
--- a/buildbot/cbuildbot_run_unittest.py
+++ b/buildbot/cbuildbot_run_unittest.py
@@ -40,6 +40,7 @@ def _NewBuilderRun(options=None, config=None):
Args:
options: Specify options or default to DEFAULT_OPTIONS.
config: Specify build config or default to DEFAULT_CONFIG.
+
Returns:
BuilderRun object.
"""
@@ -54,6 +55,7 @@ def _NewChildBuilderRun(child_index, options=None, config=None):
Args:
options: Specify options or default to DEFAULT_OPTIONS.
config: Specify build config or default to DEFAULT_CONFIG.
+
Returns:
ChildBuilderRun object.
"""
@@ -158,6 +160,7 @@ class BuilderRunTest(cros_test_lib.TestCase):
method: A BuilderRun method to call.
options_dict: Extend default options with this.
config_dict: Extend default config with this.
+
Returns:
Result of calling the given method.
"""
diff --git a/buildbot/lab_status.py b/buildbot/lab_status.py
index eff9b3753..16d95cfb9 100644
--- a/buildbot/lab_status.py
+++ b/buildbot/lab_status.py
@@ -33,6 +33,7 @@ def GetLabStatus(max_attempts=5):
Args:
max_attempts: max attempts to hit the lab status url.
+
Returns:
a dict with keys 'lab_is_up' and 'message'. lab_is_up points
to a boolean and message points to a string.
@@ -74,6 +75,7 @@ def CheckLabStatus(board=None):
Args:
board: board name that we want to check the status of.
+
Raises:
LabIsDownException if the lab is not up.
BoardIsDisabledException if the desired board is currently disabled.
diff --git a/buildbot/lab_status_unittest.py b/buildbot/lab_status_unittest.py
index 1b65b9537..38ab0f5e0 100755
--- a/buildbot/lab_status_unittest.py
+++ b/buildbot/lab_status_unittest.py
@@ -34,6 +34,7 @@ class TestLabStatus(cros_test_lib.MockTestCase):
def _TestGetLabStatusHelper(self, lab_message, general_state, expected_return,
max_attempts=5, failed_attempts=0):
"""Tests whether we get correct lab status.
+
Args:
lab_message: A message describing lab status and
disabled boards, e.g. "Lab is Up [stumpy, kiev]"
diff --git a/buildbot/lkgm_manager.py b/buildbot/lkgm_manager.py
index 8b1133f6e..2e9158238 100755
--- a/buildbot/lkgm_manager.py
+++ b/buildbot/lkgm_manager.py
@@ -235,12 +235,13 @@ class LKGMManager(manifest_version.BuildSpecsManager):
retries=manifest_version.NUM_RETRIES):
"""Creates, syncs to, and returns the next candidate manifest.
- Args:
- validation_pool: Validation pool to apply to the manifest before
- publishing.
- retries: Number of retries for updating the status.
- Raises:
- GenerateBuildSpecException in case of failure to generate a buildspec
+ Args:
+ validation_pool: Validation pool to apply to the manifest before
+ publishing.
+ retries: Number of retries for updating the status.
+
+ Raises:
+ GenerateBuildSpecException in case of failure to generate a buildspec
"""
self.CheckoutSourceCode()
@@ -308,11 +309,13 @@ class LKGMManager(manifest_version.BuildSpecsManager):
This method sets up an LKGM manager and publishes a new manifest to the
manifest versions repo based on the passed in manifest but filtering
internal repositories and changes out of it.
+
Args:
manifest: A manifest that possibly contains private changes/projects. It
is named with the given version we want to create a new manifest from
i.e R20-1920.0.1-rc7.xml where R20-1920.0.1-rc7 is the version.
retries: Number of retries for updating the status.
+
Raises:
GenerateBuildSpecException in case of failure to check-in the new
manifest because of a git error or the manifest is already checked-in.
@@ -341,12 +344,15 @@ class LKGMManager(manifest_version.BuildSpecsManager):
def GetLatestCandidate(self):
"""Gets and syncs to the next candiate manifest.
- Args:
- retries: Number of retries for updating the status
- Returns:
- Local path to manifest to build or None in case of no need to build.
- Raises:
- GenerateBuildSpecException in case of failure to generate a buildspec
+
+ Args:
+ retries: Number of retries for updating the status
+
+ Returns:
+ Local path to manifest to build or None in case of no need to build.
+
+ Raises:
+ GenerateBuildSpecException in case of failure to generate a buildspec
"""
def _AttemptToGetLatestCandidate():
"""Attempts to acquire latest candidate using manifest repo."""
diff --git a/buildbot/manifest_version.py b/buildbot/manifest_version.py
index 8216685a2..72989862a 100644
--- a/buildbot/manifest_version.py
+++ b/buildbot/manifest_version.py
@@ -206,10 +206,12 @@ class VersionInfo(object):
def FindValue(self, key, line):
"""Given the key find the value from the line, if it finds key = value
+
Args:
key: key to look for
line: string to search
- returns:
+
+ Returns:
None: on a non match
value: for a matching key
"""
@@ -218,8 +220,9 @@ class VersionInfo(object):
def IncrementVersion(self):
"""Updates the version file by incrementing the patch component.
+
Args:
- message: Commit message to use when incrementing the version.
+ message: Commit message to use when incrementing the version.
dry_run: Git dry_run.
"""
if not self.incr_type or self.incr_type not in self.VALID_INCR_TYPES:
@@ -408,6 +411,7 @@ class BuildSpecsManager(object):
Args:
specs: List of specs.
+
Returns:
The latest spec if specs is non-empty.
None otherwise.
@@ -417,6 +421,7 @@ class BuildSpecsManager(object):
def _LatestSpecFromDir(self, version_info, directory):
"""Returns the latest buildspec that match '*.xml' in a directory.
+
Args:
directory: Directory of the buildspecs.
"""
@@ -566,8 +571,9 @@ class BuildSpecsManager(object):
def GetLocalManifest(self, version=None):
"""Return path to local copy of manifest given by version.
- Returns path of version. By default if version is not set, returns the path
- of the current version.
+ Returns:
+ Path of |version|. By default if version is not set, returns the path
+ of the current version.
"""
if not self.all_specs_dir:
raise BuildSpecsValueError('GetLocalManifest failed, BuildSpecsManager '
@@ -603,10 +609,11 @@ class BuildSpecsManager(object):
def GetNextBuildSpec(self, retries=NUM_RETRIES):
"""Returns a path to the next manifest to build.
- Args:
- retries: Number of retries for updating the status.
- Raises:
- GenerateBuildSpecException in case of failure to generate a buildspec
+ Args:
+ retries: Number of retries for updating the status.
+
+ Raises:
+ GenerateBuildSpecException in case of failure to generate a buildspec
"""
last_error = None
for index in range(0, retries + 1):
@@ -673,6 +680,7 @@ class BuildSpecsManager(object):
def UploadStatus(self, success, message=None):
"""Uploads the status of the build for the current build spec.
+
Args:
success: True for success, False for failure
message: Message accompanied with change in status.
@@ -711,6 +719,7 @@ class BuildSpecsManager(object):
def UpdateStatus(self, success, message=None, retries=NUM_RETRIES):
"""Updates the status of the build for the current build spec.
+
Args:
success: True for success, False for failure
message: Message accompanied with change in status.
diff --git a/buildbot/portage_utilities.py b/buildbot/portage_utilities.py
index 104e1b8c7..4d5803af4 100644
--- a/buildbot/portage_utilities.py
+++ b/buildbot/portage_utilities.py
@@ -118,6 +118,7 @@ def FindPrimaryOverlay(overlay_type, board, buildroot=constants.SOURCE_ROOT):
'public': Just the public overlays.
'both': Both the public and private overlays.
board: Board to look at.
+
Raises:
MissingOverlayException: No primary overlay found.
"""
@@ -245,6 +246,7 @@ class EBuild(object):
Args:
message: the commit string to write when committing to git.
overlay: directory in which to commit the changes.
+
Raises:
RunCommandError: Error occurred while committing.
"""
@@ -444,12 +446,13 @@ class EBuild(object):
it is written using the standard rev'ing logic. This file must be
opened and closed by the caller.
- Raises:
- OSError: Error occurred while creating a new ebuild.
- IOError: Error occurred while writing to the new revved ebuild file.
Returns:
If the revved package is different than the old ebuild, return the full
revved package name, including the version number. Otherwise, return None.
+
+ Raises:
+ OSError: Error occurred while creating a new ebuild.
+ IOError: Error occurred while writing to the new revved ebuild file.
"""
if self.is_stable:
diff --git a/buildbot/repository.py b/buildbot/repository.py
index 78a2990ab..6424450bf 100755
--- a/buildbot/repository.py
+++ b/buildbot/repository.py
@@ -43,6 +43,7 @@ def IsInternalRepoCheckout(root):
def CloneGitRepo(working_dir, repo_url, reference=None, bare=False,
mirror=False, depth=None):
"""Clone given git repo
+
Args:
working_dir: location where it should be cloned to
repo_url: git repo to clone
@@ -125,6 +126,7 @@ def ClearBuildRoot(buildroot, preserve_paths=()):
class RepoRepository(object):
""" A Class that encapsulates a repo repository.
+
Args:
repo_url: gitserver URL to fetch repo manifest from.
directory: local path where to checkout the repository.
@@ -402,6 +404,7 @@ class RepoRepository(object):
<manifest revision="1234">.
revisions: If True, then rewrite all branches/tags into a specific
sha1 revision. If False, don't.
+
Returns:
The manifest as a string.
"""
@@ -427,6 +430,7 @@ class RepoRepository(object):
Args:
other_manfiest: Second manifest file to compare against.
+
Returns:
True: If the manifests are different
False: If the manifests are same
diff --git a/buildbot/trybot_patch_pool.py b/buildbot/trybot_patch_pool.py
index 012d3fdd2..0a5bc121b 100644
--- a/buildbot/trybot_patch_pool.py
+++ b/buildbot/trybot_patch_pool.py
@@ -38,9 +38,9 @@ class TrybotPatchPool(object):
def Filter(self, **kwargs):
"""Returns a new pool with only patches that match constraints.
- Args:
- **kwargs: constraints in the form of attr=value. I.e.,
- project='chromiumos/chromite', tracking_branch='master'.
+ Args:
+ **kwargs: constraints in the form of attr=value. I.e.,
+ project='chromiumos/chromite', tracking_branch='master'.
"""
def AttributeFilter(patch):
for key in kwargs:
diff --git a/buildbot/validation_pool.py b/buildbot/validation_pool.py
index 52b7cf7e4..1d9ebe599 100644
--- a/buildbot/validation_pool.py
+++ b/buildbot/validation_pool.py
@@ -56,7 +56,8 @@ class TreeIsClosedException(Exception):
"""Raised when the tree is closed and we wanted to submit changes."""
def __init__(self, closed_or_throttled=False):
- """
+ """Initialization.
+
Args:
closed_or_throttled: True if the exception is being thrown on a
possibly 'throttled' tree. False if only
@@ -273,6 +274,7 @@ class HelperPool(object):
Args:
internal: If True, allow access to a GerritHelper for internal.
external: If True, allow access to a GerritHelper for external.
+
Returns:
An appropriately configured HelperPool instance.
"""
@@ -445,11 +447,12 @@ class PatchSeries(object):
then this function will lie and always return True to avoid the
admin-level access required of <=gerrit-2.1.
+ Returns:
+ True if the change's project has content merging enabled, False if not.
+
Raises:
AssertionError: If the gerrit helper requested is disallowed.
GerritException: If there is a failure in querying gerrit.
- Returns:
- True if the change's project has content merging enabled, False if not.
"""
if self.force_content_merging:
return True
@@ -520,6 +523,7 @@ class PatchSeries(object):
limit_to: If non-None, then this must be a mapping (preferably a
cros_patch.PatchCache for translation reasons) of which non-committed
changes are allowed to be used for a transaction.
+
Returns:
A sequence of cros_patch.GitRepoPatch instances (or derivatives) that
need to be resolved for this change to be mergable.
@@ -573,6 +577,7 @@ class PatchSeries(object):
for.
limit_to: If non-None, limit the allowed uncommitted patches to
what's in that container/mapping.
+
Returns:
A sequence of the necessary cros_patch.GitRepoPatch objects for
this transaction.
@@ -718,12 +723,12 @@ class PatchSeries(object):
def GetDepsForChange(self, change):
"""Look up the gerrit/paladin deps for a change
+ Returns:
+ A tuple of the change's GerritDependencies(), and PaladinDependencies()
+
Raises:
DependencyError: If we could not resolve a dependency.
GerritException or GOBError: If there is a failure in querying gerrit.
-
- Returns:
- A tuple of the change's GerritDependencies(), and PaladinDependencies()
"""
val = self._change_deps_cache.get(change)
if val is None:
@@ -803,6 +808,7 @@ class PatchSeries(object):
changes being inspected, and expand the changes if necessary.
Primarily this is of use for cbuildbot patching when dealing w/
uploaded/remote patches.
+
Returns:
A tuple of changes-applied, Exceptions for the changes that failed
against ToT, and Exceptions that failed inflight; These exceptions
@@ -971,6 +977,7 @@ class PatchSeries(object):
kwargs: See PatchSeries.__init__ for the various optional args;
not forced_manifest cannot be used here, and force_content_merging
defaults to True in this usage.
+
Returns:
A PatchSeries instance w/ a forced manifest.
"""
@@ -1092,6 +1099,7 @@ class ValidationFailedMessage(object):
Args:
changes: List of changes to examine.
+
Returns:
Set of changes that likely caused the failure.
"""
@@ -1304,6 +1312,7 @@ class ValidationPool(object):
builder_name: Builder name on buildbot dashboard.
build_number: Build number for this validation attempt.
stage: Link directly to a stage log, else use the general landing page.
+
Returns:
The fully formed URL
"""
@@ -1400,8 +1409,10 @@ class ValidationPool(object):
non_manifest_changes) to filter out unwanted patches.
throttled_ok: if |check_tree_open|, treat a throttled tree as open.
Default: True.
+
Returns:
ValidationPool object.
+
Raises:
TreeIsClosedException: if the tree is closed (or throttled, if not
|throttled_ok|).
@@ -1474,6 +1485,7 @@ class ValidationPool(object):
is_master: Boolean that indicates whether this is a pool for a master.
config or not.
dryrun: Don't submit anything to gerrit.
+
Returns:
ValidationPool object.
"""
@@ -1522,8 +1534,9 @@ class ValidationPool(object):
changes: List of GerritPatch objects.
manifest: The manifest to check projects/branches against.
- Returns tuple of
- relevant reviews in a manifest, relevant reviews not in the manifest.
+ Returns:
+ Tuple of (relevant reviews in a manifest,
+ relevant reviews not in the manifest).
"""
def IsCrosReview(change):
@@ -1623,9 +1636,9 @@ class ValidationPool(object):
This method applies changes in the order specified. It also respects
dependency order.
- Returns:
- True if we managed to apply any changes.
+ Returns:
+ True if we managed to apply any changes.
"""
patch_series = PatchSeries(self.build_root, helper_pool=self._helper_pool)
try:
diff --git a/lib/binpkg.py b/lib/binpkg.py
index e2cbe0994..283788368 100644
--- a/lib/binpkg.py
+++ b/lib/binpkg.py
@@ -94,7 +94,8 @@ class PackageIndex(object):
Args:
pkgfile: A python file object.
- Returns the dictionary of key-value pairs that was read from the file.
+ Returns:
+ The dictionary of key-value pairs that was read from the file.
"""
d = {}
for line in pkgfile:
diff --git a/lib/cgroups.py b/lib/cgroups.py
index 67d08d471..b2a9505ff 100644
--- a/lib/cgroups.py
+++ b/lib/cgroups.py
@@ -428,7 +428,7 @@ class Cgroup(object):
def RemoveGroup(self, name, strict=False):
"""Removes a nested cgroup of ours
- Args
+ Args:
name: the namespace to remove.
strict: if False, remove it if possible. If True, its an error if it
cannot be removed.
diff --git a/lib/chrome_util.py b/lib/chrome_util.py
index a6e953243..1b1b153e2 100644
--- a/lib/chrome_util.py
+++ b/lib/chrome_util.py
@@ -184,6 +184,7 @@ class Copier(object):
path: A Path instance that specifies what is to be copied.
strict: If set, enforce that all optional files are copied.
sloppy: If set, ignore when mandatory artifacts are missing.
+
Returns:
A list of the artifacts copied.
"""
diff --git a/lib/commandline.py b/lib/commandline.py
index 899496301..757376dcd 100644
--- a/lib/commandline.py
+++ b/lib/commandline.py
@@ -237,9 +237,11 @@ class BaseParser(object):
"""Method called to handle post opts/args setup.
This can be anything from logging setup to positional arg count validation.
+
Args:
opts: optparse.Values instance
args: position arguments unconsumed from parsing.
+
Returns:
(opts, args), w/ whatever modification done.
"""
diff --git a/lib/cros_build_lib.py b/lib/cros_build_lib.py
index f6875d9b7..196a03704 100644
--- a/lib/cros_build_lib.py
+++ b/lib/cros_build_lib.py
@@ -107,8 +107,10 @@ def SudoRunCommand(cmd, user='root', **kwargs):
kwargs: See RunCommand options, it's a direct pass thru to it.
Note that this supports a 'strict' keyword that defaults to True.
If set to False, it'll suppress strict sudo behavior.
+
Returns:
See RunCommand documentation.
+
Raises:
This function may immediately raise RunCommandError if we're operating
in a strict sudo context and the API is being misused.
@@ -300,6 +302,7 @@ def RunCommand(cmd, print_cmd=True, error_message=None, redirect_stdout=False,
Specified in seconds.
log_output: Log the command and its output automatically.
stdout_to_pipe: Redirect stdout to pipe.
+
Returns:
A CommandResult object.
@@ -630,8 +633,10 @@ def GenericRetry(handler, max_retry, functor, *args, **kwargs):
sleep: Optional keyword. Multiplier for how long to sleep between
retries; will delay (1*sleep) the first time, then (2*sleep),
continuing via attempt * sleep.
+
Returns:
Whatever functor(*args, **kwargs) returns.
+
Raises:
Exception: Whatever exceptions functor(*args, **kwargs) throws and
isn't suppressed is raised. Note that the first exception encountered
@@ -696,8 +701,10 @@ def RetryCommand(functor, max_retry, *args, **kwargs):
signal. By default, we retry on all non-negative exit codes.
args: Positional args passed to RunCommand; see RunCommand for specifics.
kwargs: Optional args passed to RunCommand; see RunCommand for specifics.
+
Returns:
A CommandResult object.
+
Raises:
Exception: Raises RunCommandError on error with optional error_message.
"""
@@ -729,8 +736,10 @@ def RunCommandWithRetries(max_retry, *args, **kwargs):
Args:
See RetryCommand and RunCommand; This is just a wrapper around it.
+
Returns:
A CommandResult object.
+
Raises:
Exception: Raises RunCommandError on error with optional error_message.
"""
@@ -804,8 +813,10 @@ def FindCompressor(compression, chroot=None):
Args:
compression: The type of compression desired.
chroot: Optional path to a chroot to search.
+
Returns:
Path to a compressor.
+
Raises:
ValueError: If compression is unknown.
"""
@@ -883,6 +894,7 @@ def BooleanPrompt(prompt="Do you want to continue?", default=True,
default: Boolean to return if the user just presses enter.
true_value: The text to display that represents a True returned.
false_value: The text to display that represents a False returned.
+
Returns:
True or False.
"""
@@ -928,8 +940,10 @@ def BooleanShellValue(sval, default, msg=None):
default: If we can't figure out if the value is true or false, use this.
msg: If |sval| is an unknown value, use |msg| to warn the user that we
could not decode the input. Otherwise, raise ValueError().
+
Returns:
The interpreted boolean value of |sval|.
+
Raises:
ValueError() if |sval| is an unknown value and |msg| is not set.
"""
@@ -1144,10 +1158,11 @@ def GetTargetChromiteApiVersion(buildroot, validate_version=True):
compatibility, and raises an ApiMismatchError when there is an
incompatibility.
+ Returns:
+ The version number in (major, minor) tuple.
+
Raises:
May raise an ApiMismatchError if validate_version is set.
-
- Returns the version number in (major, minor) tuple.
"""
try:
api = RunCommandCaptureOutput(
@@ -1217,11 +1232,11 @@ def load_module(name):
Args:
name: python dotted namespace path of the module to import
- Raises:
- FailedImport if importing fails
-
Returns:
imported module
+
+ Raises:
+ FailedImport if importing fails
"""
m = __import__(name)
# __import__('foo.bar') returns foo, so...
@@ -1392,6 +1407,7 @@ def UserDateTimeFormat(timeval=None):
Args:
timeval: Either a datetime object or a floating point time value as accepted
by gmtime()/localtime(). If None, the current time is used.
+
Returns:
A string format such as 'Wed, 20 Feb 2013 15:25:15 -0500 (EST)'
"""
diff --git a/lib/cros_test_lib.py b/lib/cros_test_lib.py
index 728fcb3d8..f1b84baff 100644
--- a/lib/cros_test_lib.py
+++ b/lib/cros_test_lib.py
@@ -366,6 +366,7 @@ class TruthTable(object):
Args:
inputs_index: Following must hold: 0 <= inputs_index < self.num_lines.
+
Returns:
Tuple of bools representing one line of inputs.
"""
@@ -388,6 +389,7 @@ class TruthTable(object):
Args:
inputs: Tuple of bools, length must be equal to self.dimension.
+
Returns:
bool value representing truth table output for given inputs.
"""
@@ -1614,6 +1616,7 @@ def FindTests(directory, module_namespace=''):
Args:
directory: The directory to scan for tests.
module_namespace: What namespace to prefix all found tests with.
+
Returns:
A list of python unittests in python namespace form.
"""
diff --git a/lib/gerrit.py b/lib/gerrit.py
index 34233793e..14d6a75f9 100644
--- a/lib/gerrit.py
+++ b/lib/gerrit.py
@@ -179,7 +179,8 @@ class GerritHelper(object):
kwargs: A dict of query parameters, as described here:
https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
- Returns: A list of python dicts or cros_patch.GerritChange.
+ Returns:
+ A list of python dicts or cros_patch.GerritChange.
"""
query_kwds = kwargs
if options:
@@ -248,6 +249,7 @@ class GerritHelper(object):
Args:
changes: A sequence of gerrit change numbers.
+
Returns:
A list of cros_patch.GerritPatch.
"""
@@ -414,6 +416,7 @@ def GetChangeRef(change_number, patchset=None):
patchset: If given it must either be an integer or '*'. When given,
the returned refspec is for that exact patchset. If '*' is given, it's
used for pulling down all patchsets for that change.
+
Returns:
A git refspec.
"""
diff --git a/lib/git.py b/lib/git.py
index 5fda770af..f77f6e376 100644
--- a/lib/git.py
+++ b/lib/git.py
@@ -469,6 +469,7 @@ class ManifestCheckout(Manifest):
search: If True, the path can point into the repo, and the root will
be found automatically. If False, the path *must* be the root, else
an OSError ENOENT will be thrown.
+
Raises:
OSError: if a failure occurs.
"""
@@ -511,6 +512,7 @@ class ManifestCheckout(Manifest):
Returns:
True if content merging is enabled.
+
Raises:
RunCommandError: If the branch can't be fetched due to network
conditions or if this was invoked against a <gerrit-2.2 server,
@@ -546,12 +548,12 @@ class ManifestCheckout(Manifest):
branch: The branch that the project is tracking.
strict: Raise AssertionError if a checkout cannot be found.
+ Returns:
+ A ProjectCheckout object.
+
Raises:
AssertionError if there is more than one checkout associated with the
given project/branch combination.
-
- Returns:
- A ProjectCheckout object.
"""
checkouts = self.FindCheckouts(project, branch)
if len(checkouts) < 1:
@@ -583,7 +585,8 @@ class ManifestCheckout(Manifest):
strict: If True, fail when no checkout is found.
Returns:
- None if no checkout is found, else the checkout."""
+ None if no checkout is found, else the checkout.
+ """
# Realpath everything sans the target to keep people happy about
# how symlinks are handled; exempt the final node since following
# through that is unlikely even remotely desired.
@@ -711,6 +714,7 @@ def _GitRepoIsContentMerging(git_repo, remote):
Returns:
True if content merging is enabled, False if not.
+
Raises:
RunCommandError: Thrown if fetching fails due to either the namespace
not existing, or a network error intervening.
@@ -753,6 +757,7 @@ def RunGit(git_repo, cmd, retry=True, **kwargs):
this would be ['remote', 'update'] for example.
retry: If set, retry on transient errors. Defaults to True.
kwargs: Any RunCommand or GenericRetry options/overrides to use.
+
Returns:
A CommandResult object.
"""
@@ -790,6 +795,7 @@ def MatchBranchName(git_repo, pattern, namespace=''):
git_repo: The git repository to operate upon.
pattern: The regexp to search with.
namespace: The namespace to restrict search to (e.g. 'refs/heads/').
+
Returns:
List of matching branch names (with |namespace| trimmed).
"""
@@ -809,8 +815,10 @@ def MatchSingleBranchName(*args, **kwargs):
Args:
See MatchBranchName for more details; all args are passed on.
+
Returns:
The branch name.
+
Raises:
raise AmbiguousBranchName if we did not match exactly one branch.
"""
@@ -1038,14 +1046,14 @@ def GitPush(git_repo, refspec, push_to, dryrun=False, force=False, retry=True):
def CreatePushBranch(branch, git_repo, sync=True, remote_push_branch=None):
"""Create a local branch for pushing changes inside a repo repository.
- Args:
- branch: Local branch to create.
- git_repo: Git repository to create the branch in.
- sync: Update remote before creating push branch.
- remote_push_branch: A tuple of the (remote, branch) to push to. i.e.,
- ('cros', 'master'). By default it tries to
- automatically determine which tracking branch to use
- (see GetTrackingBranch()).
+ Args:
+ branch: Local branch to create.
+ git_repo: Git repository to create the branch in.
+ sync: Update remote before creating push branch.
+ remote_push_branch: A tuple of the (remote, branch) to push to. i.e.,
+ ('cros', 'master'). By default it tries to
+ automatically determine which tracking branch to use
+ (see GetTrackingBranch()).
"""
if not remote_push_branch:
remote, push_branch = GetTrackingBranch(git_repo, for_push=True)
@@ -1062,12 +1070,12 @@ def CreatePushBranch(branch, git_repo, sync=True, remote_push_branch=None):
def SyncPushBranch(git_repo, remote, rebase_target):
"""Sync and rebase a local push branch to the latest remote version.
- Args:
- git_repo: Git repository to rebase in.
- remote: The remote returned by GetTrackingBranch(for_push=True)
- rebase_target: The branch name returned by GetTrackingBranch(). Must
- start with refs/remotes/ (specifically must be a proper remote
- target rather than an ambiguous name).
+ Args:
+ git_repo: Git repository to rebase in.
+ remote: The remote returned by GetTrackingBranch(for_push=True)
+ rebase_target: The branch name returned by GetTrackingBranch(). Must
+ start with refs/remotes/ (specifically must be a proper remote
+ target rather than an ambiguous name).
"""
if not rebase_target.startswith("refs/remotes/"):
raise Exception(
@@ -1091,19 +1099,19 @@ def SyncPushBranch(git_repo, remote, rebase_target):
def PushWithRetry(branch, git_repo, dryrun=False, retries=5):
"""General method to push local git changes.
- This method only works with branches created via the CreatePushBranch
- function.
+ This method only works with branches created via the CreatePushBranch
+ function.
- Args:
- branch: Local branch to push. Branch should have already been created
- with a local change committed ready to push to the remote branch. Must
- also already be checked out to that branch.
- git_repo: Git repository to push from.
- dryrun: Git push --dry-run if set to True.
- retries: The number of times to retry before giving up, default: 5
+ Args:
+ branch: Local branch to push. Branch should have already been created
+ with a local change committed ready to push to the remote branch. Must
+ also already be checked out to that branch.
+ git_repo: Git repository to push from.
+ dryrun: Git push --dry-run if set to True.
+ retries: The number of times to retry before giving up, default: 5
- Raises:
- GitPushFailed if push was unsuccessful after retries
+ Raises:
+ GitPushFailed if push was unsuccessful after retries
"""
remote, ref = GetTrackingBranch(git_repo, branch, for_checkout=False,
for_push=True)
diff --git a/lib/gob_util.py b/lib/gob_util.py
index 145f04fa7..97a99a94c 100755
--- a/lib/gob_util.py
+++ b/lib/gob_util.py
@@ -89,6 +89,7 @@ def ReadHttpResponse(conn, ignore_404=True):
ignore_404: For many requests, gerrit-on-borg will return 404 if the request
doesn't match the database contents. In most such cases, we
want the API to return None rather than raise an Exception.
+
Returns:
A string buffer containing the connection's reply.
"""
@@ -140,8 +141,7 @@ def ReadHttpJsonResponse(conn, ignore_404=True):
def QueryChanges(host, param_dict, first_param=None, limit=None, o_params=None,
sortkey=None):
- """
- Queries a gerrit-on-borg server for changes matching query terms.
+ """Queries a gerrit-on-borg server for changes matching query terms.
Args:
param_dict: A dictionary of search parameters, as documented here:
@@ -150,6 +150,7 @@ def QueryChanges(host, param_dict, first_param=None, limit=None, o_params=None,
limit: Maximum number of results to return.
o_params: A list of additional output specifiers, as documented here:
https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+
Returns:
A list of json-decoded query results.
"""
diff --git a/lib/gs.py b/lib/gs.py
index 5e614951b..e1bfe9893 100644
--- a/lib/gs.py
+++ b/lib/gs.py
@@ -51,6 +51,7 @@ def GetGsURL(bucket, for_gsutil=False, public=True, suburl=''):
for_gsutil: Do you want a URL for passing to `gsutil`?
public: Do we want the public or private url
suburl: A url fragment to tack onto the end
+
Returns:
The fully constructed URL
"""
@@ -101,6 +102,7 @@ class GSCounter(object):
does not exist.
operation: Function that takes the current counter value as a
parameter, and returns the new desired value.
+
Returns:
The new counter value. None if value could not be set.
"""
@@ -132,28 +134,32 @@ class GSCounter(object):
"""Decrement the counter.
Returns:
- The new counter value. None if value could not be set."""
+ The new counter value. None if value could not be set.
+ """
return self.AtomicCounterOperation(-1, lambda x: x - 1)
def Reset(self):
"""Reset the counter to zero.
Returns:
- The new counter value. None if value could not be set."""
+ The new counter value. None if value could not be set.
+ """
return self.AtomicCounterOperation(0, lambda x: 0)
def StreakIncrement(self):
"""Increment the counter if it is positive, otherwise set it to 1.
Returns:
- The new counter value. None if value could not be set."""
+ The new counter value. None if value could not be set.
+ """
return self.AtomicCounterOperation(1, lambda x: x + 1 if x > 0 else 1)
def StreakDecrement(self):
"""Decrement the counter if it is negative, otherwise set it to -1.
Returns:
- The new counter value. None if value could not be set."""
+ The new counter value. None if value could not be set.
+ """
return self.AtomicCounterOperation(-1, lambda x: x - 1 if x < 0 else -1)
@@ -360,6 +366,7 @@ class GSContext(object):
Args:
dest_path: either a GS path or an absolute local path.
+
Returns:
The list of potential tracker filenames.
"""
@@ -392,6 +399,7 @@ class GSContext(object):
e: Exception object to filter. Exception may be re-raised as
as different type, if _RetryFilter determines a more appropriate
exception type based on the contents of e.
+
Returns:
True for exceptions thrown by a RunCommand gsutil that should be retried.
"""
@@ -509,10 +517,11 @@ class GSContext(object):
to ensure you don't overwrite someone else's creation, a version of
0 states "only update if no version exists".
- Raises:
- RunCommandError if the command failed despite retries.
Returns:
Return the CommandResult from the run.
+
+ Raises:
+ RunCommandError if the command failed despite retries.
"""
cmd, headers = [], []
@@ -596,7 +605,8 @@ class GSContext(object):
def GetGeneration(self, path):
"""Get the generation and metageneration of the given |path|.
- Returns a tuple of the generation and metageneration.
+ Returns:
+ A tuple of the generation and metageneration.
"""
def _Header(name):
if res and res.returncode == 0 and res.output is not None:
diff --git a/lib/locking.py b/lib/locking.py
index 74ebd4de4..ec51b87c6 100644
--- a/lib/locking.py
+++ b/lib/locking.py
@@ -66,13 +66,14 @@ class _Lock(cros_build_lib.MasterPidContextManager):
fcntl.lockf(self.fd, flags)
def read_lock(self, message="taking read lock"):
- """
- Take a read lock (shared), downgrading from write if required.
+ """Take a read lock (shared), downgrading from write if required.
Args:
message: A description of what/why this lock is being taken.
+
Returns:
self, allowing it to be used as a `with` target.
+
Raises:
IOError if the operation fails in some way.
"""
@@ -80,8 +81,7 @@ class _Lock(cros_build_lib.MasterPidContextManager):
return self
def write_lock(self, message="taking write lock"):
- """
- Take a write lock (exclusive), upgrading from read if required.
+ """Take a write lock (exclusive), upgrading from read if required.
Note that if the lock state is being upgraded from read to write,
a deadlock potential exists- as such we *will* release the lock
@@ -91,8 +91,10 @@ class _Lock(cros_build_lib.MasterPidContextManager):
Args:
message: A description of what/why this lock is being taken.
+
Returns:
self, allowing it to be used as a `with` target.
+
Raises:
IOError if the operation fails in some way.
"""
@@ -100,8 +102,7 @@ class _Lock(cros_build_lib.MasterPidContextManager):
return self
def unlock(self):
- """
- Release any locks held. Noop if no locks are held.
+ """Release any locks held. Noop if no locks are held.
Raises:
IOError if the operation fails in some way.
@@ -115,9 +116,7 @@ class _Lock(cros_build_lib.MasterPidContextManager):
self.close()
def close(self):
- """
- Release the underlying lock and close the fd.
- """
+ """Release the underlying lock and close the fd."""
if self._fd is not None:
self.unlock()
os.close(self._fd)
@@ -168,7 +167,6 @@ class FileLock(_Lock):
class ProcessLock(_Lock):
-
"""Process level locking visible to parent/child only.
This lock is basically a more robust version of what
diff --git a/lib/namespaces.py b/lib/namespaces.py
index d09fb89fe..b16958620 100644
--- a/lib/namespaces.py
+++ b/lib/namespaces.py
@@ -24,6 +24,7 @@ def Unshare(flags):
Args:
flags: Namespaces to unshare; bitwise OR of CLONE_* flags.
+
Raises:
OSError: if unshare failed.
"""
diff --git a/lib/operation.py b/lib/operation.py
index eda6851ea..2ef7d057e 100644
--- a/lib/operation.py
+++ b/lib/operation.py
@@ -406,7 +406,6 @@ class Operation:
Args:
request: True to request verbose mode if available, False to do nothing.
-
"""
old_verbose = self.verbose
if request and not self.explicit_verbose:
diff --git a/lib/osutils.py b/lib/osutils.py
index d47dd3c75..a2650f5cd 100644
--- a/lib/osutils.py
+++ b/lib/osutils.py
@@ -27,7 +27,7 @@ def GetNonRootUser():
ran the emerge command. If running using sudo, returns the username
of the person who ran the sudo command. If no non-root user is
found, returns None.
-"""
+ """
uid = os.getuid()
if uid == 0:
user = os.environ.get('PORTAGE_USERNAME', os.environ.get('SUDO_USER'))
@@ -106,7 +106,8 @@ def ReadFile(path, mode='r'):
def SafeUnlink(path, sudo=False):
"""Unlink a file from disk, ignoring if it doesn't exist.
- Returns True if the file existed and was removed, False if it didn't exist.
+ Returns:
+ True if the file existed and was removed, False if it didn't exist.
"""
if sudo:
try:
@@ -136,12 +137,13 @@ def SafeMakedirs(path, mode=0o775, sudo=False, user='root'):
mode: The access permissions in the style of chmod.
sudo: If True, create it via sudo, thus root owned.
user: If |sudo| is True, run sudo as |user|.
- Raises:
- EnvironmentError: if the makedir failed and it was non sudo.
- RunCommandError: If sudo mode, and the command failed for any reason.
Returns:
True if the directory had to be created, False if otherwise.
+
+ Raises:
+ EnvironmentError: if the makedir failed and it was non sudo.
+ RunCommandError: If sudo mode, and the command failed for any reason.
"""
if sudo:
if os.path.isdir(path):
@@ -222,6 +224,7 @@ def Which(binary, path=None, mode=os.X_OK):
binary: The binary to look for.
path: Search path. Defaults to os.environ['PATH'].
mode: File mode to check on the binary.
+
Returns:
The full path to |binary| if found (with the right mode). Otherwise, None.
"""
@@ -601,6 +604,7 @@ def StrSignal(sig_num):
Args:
sig_num: The numeric signal you wish to convert
+
Returns:
A string of the signal name(s)
"""
diff --git a/lib/parallel.py b/lib/parallel.py
index 4c433daeb..77db90116 100644
--- a/lib/parallel.py
+++ b/lib/parallel.py
@@ -526,7 +526,7 @@ def RunParallelSteps(steps, max_parallel=None, halt_on_error=False,
# Blocks until all calls have completed.
"""
def ReturnWrapper(queue, fn):
- """A function that """
+ """Put the return value of |fn| into |queue|."""
queue.put(fn())
full_steps = []
diff --git a/lib/partial_mock.py b/lib/partial_mock.py
index c8c179b4d..23f0dc2e2 100644
--- a/lib/partial_mock.py
+++ b/lib/partial_mock.py
@@ -256,10 +256,11 @@ class MockedCallResults(object):
def LookupResult(self, args, kwargs=None, hook_args=None, hook_kwargs=None):
"""For a given mocked function call lookup the recorded internal results.
- args: A list containing positional args the function was called with.
- kwargs: A dict containing keyword args the function was called with.
- hook_args: A list of positional args to call the hook with.
- hook_kwargs: A dict of key/value args to call the hook with.
+ Args:
+ args: A list containing positional args the function was called with.
+ kwargs: A dict containing keyword args the function was called with.
+ hook_args: A list of positional args to call the hook with.
+ hook_kwargs: A dict of key/value args to call the hook with.
Returns:
The recorded result for the invocation.
diff --git a/lib/patch.py b/lib/patch.py
index 16f43a1fa..59fb0f9fc 100644
--- a/lib/patch.py
+++ b/lib/patch.py
@@ -546,6 +546,7 @@ class GitRepoPatch(object):
Args:
git_repo: The git repository to fetch this patch into.
+
Returns:
The sha1 of the patch.
"""
@@ -609,7 +610,8 @@ class GitRepoPatch(object):
Args:
git_repo: Git repository to operate upon.
- returns: A dictionary of path -> modification_type tuples. See
+ Returns:
+ A dictionary of path -> modification_type tuples. See
`git log --help`, specifically the --diff-filter section for details.
"""
@@ -903,14 +905,14 @@ class GitRepoPatch(object):
def GetCheckout(self, manifest, strict=True):
"""Get the ProjectCheckout associated with this patch.
- Raises:
- ChangeMatchesMultipleCheckouts if there are multiple checkouts that
- match this change.
-
Args:
manifest: A ManifestCheckout object.
strict: If the change refers to a project/branch that is not in the
manifest, raise a ChangeNotInManifest error.
+
+ Raises:
+ ChangeMatchesMultipleCheckouts if there are multiple checkouts that
+ match this change.
"""
checkouts = manifest.FindCheckouts(self.project, self.tracking_branch)
if len(checkouts) != 1:
@@ -1020,6 +1022,7 @@ class LocalPatch(GitRepoPatch):
dryrun: Do the git push with --dry-run
reviewers: Iterable of reviewers to add.
cc: Iterable of people to add to cc.
+
Returns:
A list of gerrit URLs found in the output
"""
@@ -1271,6 +1274,7 @@ class GerritPatch(GitRepoPatch):
Args:
field: Which field to check ('VRIF', 'CRVW', ...).
+
Returns:
Most recent field value (as str) or '0' if no such field.
"""
diff --git a/lib/remote_access.py b/lib/remote_access.py
index d37c37f0a..6ab8c2748 100644
--- a/lib/remote_access.py
+++ b/lib/remote_access.py
@@ -94,15 +94,16 @@ class RemoteAccess(object):
See ssh_error_ok.
ssh_error_ok: Does not throw an exception when the ssh command itself
fails (return code 255).
- debug_level: See cros_build_lib.RunCommand documentation.
+ debug_level: See cros_build_lib.RunCommand documentation.
Returns:
A CommandResult object. The returncode is the returncode of the command,
or 255 if ssh encountered an error (could not connect, connection
interrupted, etc.)
- Raises: RunCommandError when error is not ignored through error_code_ok and
- ssh_error_ok flags.
+ Raises:
+ RunCommandError when error is not ignored through error_code_ok and
+ ssh_error_ok flags.
"""
if not debug_level:
debug_level = self.debug_level
diff --git a/lib/rewrite_git_alternates.py b/lib/rewrite_git_alternates.py
index 583c8ef7c..80d26466a 100755
--- a/lib/rewrite_git_alternates.py
+++ b/lib/rewrite_git_alternates.py
@@ -175,8 +175,7 @@ def _RebuildRepoCheckout(target_root, reference_map,
def WalkReferences(repo_root, max_depth=5, suppress=()):
- """
- Given a repo checkout root, find the repo's it references up to max_depth
+ """Given a repo checkout root, find the repo's it references up to max_depth.
Args:
repo_root: The root of a repo checkout to start from
@@ -222,8 +221,7 @@ def WalkReferences(repo_root, max_depth=5, suppress=()):
def RebuildRepoCheckout(repo_root, initial_reference,
chroot_reference_root=None):
- """
- Rebuild a repo checkouts ondisk 'alternate tree' rewriting the repo to use it
+ """Rebuild a repo checkout's 'alternate tree' rewriting the repo to use it
Args:
repo_root: absolute path to the root of a repository checkout
diff --git a/lib/stats.py b/lib/stats.py
index 63a5f4f5d..8eb5325b4 100644
--- a/lib/stats.py
+++ b/lib/stats.py
@@ -144,12 +144,12 @@ class StatsUploader(object):
def Upload(cls, stats, url=None, timeout=None):
"""Upload |stats| to |url|.
- Does nothing if upload conditions aren't met.
+ Does nothing if upload conditions aren't met.
- Args:
- stats: A Stats object to upload.
- url: The url to send the request to.
- timeout: A timeout value to set, in seconds.
+ Args:
+ stats: A Stats object to upload.
+ url: The url to send the request to.
+ timeout: A timeout value to set, in seconds.
"""
if url is None:
url = cls.URL
diff --git a/lib/timeout_util_unittest.py b/lib/timeout_util_unittest.py
index 96966d446..16c8dc65f 100755
--- a/lib/timeout_util_unittest.py
+++ b/lib/timeout_util_unittest.py
@@ -162,7 +162,6 @@ class TestTreeStatus(cros_test_lib.MoxTestCase):
"""Mocks out urllib.urlopen commands to simulate a given tree status.
Args:
-
status_url: The status url that status will be fetched from.
final_tree_status: The final value of tree status that will be returned
by urlopen.
diff --git a/lib/toolchain.py b/lib/toolchain.py
index 679a12580..f239271f3 100644
--- a/lib/toolchain.py
+++ b/lib/toolchain.py
@@ -71,7 +71,8 @@ def GetTuplesForOverlays(overlays):
def GetAllTargets():
"""Get the complete list of targets.
- returns the list of cross targets for the current tree
+ Returns:
+ The list of cross targets for the current tree
"""
targets = GetToolchainsForBoard('all')
@@ -83,7 +84,8 @@ def GetAllTargets():
def GetToolchainsForBoard(board, buildroot=constants.SOURCE_ROOT):
"""Get a list of toolchain tuples for a given board name
- returns the list of toolchain tuples for the given board
+ Returns:
+ The list of toolchain tuples for the given board
"""
overlays = portage_utilities.FindOverlays(
constants.BOTH_OVERLAYS, None if board in ('all', 'sdk') else board,
@@ -101,6 +103,7 @@ def FilterToolchains(targets, key, value):
targets: dict of toolchains
key: metadata to examine
value: expected value for metadata
+
Returns:
dict where all targets whose metadata |key| does not match |value|
have been deleted
@@ -114,6 +117,7 @@ def GetSdkURL(for_gsutil=False, suburl=''):
Args:
for_gsutil: Do you want a URL for passing to `gsutil`?
suburl: A url fragment to tack onto the end
+
Returns:
The fully constructed URL
"""
diff --git a/licensing/licenses.py b/licensing/licenses.py
index 7999a6452..12ae0d833 100644
--- a/licensing/licenses.py
+++ b/licensing/licenses.py
@@ -1136,10 +1136,11 @@ def ReadUnknownEncodedFile(file_path, logging_text=None):
logging_text: what to display for logging depending on file read.
Returns:
- file content, possibly converted from latin1 to UTF-8.
+ File content, possibly converted from latin1 to UTF-8.
- Raises: Assertion error: if non-whitelisted illegal XML characters
- are found in the file.
+ Raises:
+ Assertion error: if non-whitelisted illegal XML characters
+ are found in the file.
"""
try:
diff --git a/licensing/process-pkg.py b/licensing/process-pkg.py
index c42c8d8d7..0ad845f22 100755
--- a/licensing/process-pkg.py
+++ b/licensing/process-pkg.py
@@ -265,12 +265,11 @@ def identifyLicenseText(workdir, metadata_licenses, stock_licenses):
license_text = _GetStockLicense(metadata_licenses, stock_licenses)
if not license_text:
raise Exception("failed finding a license in both the source, and a "
- "usable stock license")
+ "usable stock license")
return license_text
def EvaluateTemplate(template, env, escape=True):
- """Expand a template with variables like {{foo}} using a
- dictionary of expansions."""
+ """Expand |template| with content like {{foo}} using a dict of expansions."""
for key, val in env.items():
if escape:
val = cgi.escape(val)
diff --git a/scripts/cbuildbot.py b/scripts/cbuildbot.py
index 039688525..163947350 100644
--- a/scripts/cbuildbot.py
+++ b/scripts/cbuildbot.py
@@ -188,6 +188,7 @@ class Builder(object):
stage: A BuilderStage class.
args: args to pass to stage constructor.
kwargs: kwargs to pass to stage constructor.
+
Returns:
Whatever the stage's Run method returns.
"""
@@ -603,8 +604,9 @@ class DistributedBuilder(SimpleBuilder):
def GetCompletionInstance(self):
"""Returns the completion_stage_class instance that was used for this build.
- Returns None if the completion_stage instance was not yet created (this
- occurs during Publish).
+ Returns:
+ None if the completion_stage instance was not yet created (this
+ occurs during Publish).
"""
return self._completion_stage
diff --git a/scripts/cros_generate_breakpad_symbols.py b/scripts/cros_generate_breakpad_symbols.py
index 4c443e520..d136aa642 100644
--- a/scripts/cros_generate_breakpad_symbols.py
+++ b/scripts/cros_generate_breakpad_symbols.py
@@ -43,8 +43,10 @@ def ReadSymsHeader(sym_file):
Args:
sym_file: The symbol file to parse
+
Returns:
A SymbolHeader object
+
Raises:
ValueError if the first line of |sym_file| is invalid
"""
@@ -71,6 +73,7 @@ def GenerateBreakpadSymbol(elf_file, debug_file=None, breakpad_dir=None,
board: If |breakpad_dir| is not specified, use |board| to find it
strip_cfi: Do not generate CFI data
num_errors: An object to update with the error count (needs a .value member)
+
Returns:
The number of errors that were encountered.
"""
@@ -178,6 +181,7 @@ def GenerateBreakpadSymbols(board, breakpad_dir=None, strip_cfi=False,
file_list: Only generate symbols for files in this list. Each file must be a
full path (including |sysroot| prefix).
TODO(build): Support paths w/o |sysroot|.
+
Returns:
The number of errors that were encountered.
"""
diff --git a/scripts/cros_list_modified_packages.py b/scripts/cros_list_modified_packages.py
index ae9ff90fe..cb2099322 100644
--- a/scripts/cros_list_modified_packages.py
+++ b/scripts/cros_list_modified_packages.py
@@ -144,7 +144,8 @@ def ListWorkonPackagesInfo(board, host):
board: The board to look at. If host is True, this should be set to None.
host: Whether to look at workon packages for the host.
- Returns a list of unique packages being worked on.
+ Returns:
+ A list of unique packages being worked on.
"""
# Import portage late so that this script can be imported outside the chroot.
# pylint: disable=W0404
diff --git a/scripts/cros_mark_as_stable.py b/scripts/cros_mark_as_stable.py
index 2b71793d7..4d72bbda2 100644
--- a/scripts/cros_mark_as_stable.py
+++ b/scripts/cros_mark_as_stable.py
@@ -127,6 +127,7 @@ def PushChange(stable_branch, tracking_branch, dryrun, cwd):
tracking_branch: The tracking branch of the local branch.
dryrun: Use git push --dryrun to emulate a push.
cwd: The directory to run commands in.
+
Raises:
OSError: Error occurred while pushing.
"""
diff --git a/scripts/cros_mark_chrome_as_stable.py b/scripts/cros_mark_chrome_as_stable.py
index 667352e25..6f82d0fff 100644
--- a/scripts/cros_mark_chrome_as_stable.py
+++ b/scripts/cros_mark_chrome_as_stable.py
@@ -126,6 +126,7 @@ def _GetLatestRelease(base_url, branch=None):
Args:
branch: If set, gets the latest release for branch, otherwise latest
release.
+
Returns:
Latest version string.
"""
@@ -189,8 +190,10 @@ def FindChromeCandidates(overlay_dir):
Args:
overlay_dir: The path to chrome's portage overlay dir.
+
Returns:
Tuple [unstable_ebuild, stable_ebuilds].
+
Raises:
Exception: if no unstable ebuild exists for Chrome.
"""
@@ -230,8 +233,9 @@ def FindChromeUprevCandidate(stable_ebuilds, chrome_rev, sticky_branch):
sticky_branch: The the branch that is currently sticky with Major/Minor
components. For example: 9.0.553. Can be None but not if chrome_rev
is CHROME_REV_STICKY.
+
Returns:
- Returns the EBuild, otherwise None if none found.
+ The EBuild, otherwise None if none found.
"""
candidates = []
if chrome_rev in [constants.CHROME_REV_LOCAL, constants.CHROME_REV_TOT,
@@ -280,6 +284,7 @@ def GetChromeRevisionLinkFromVersions(old_chrome_version, chrome_version):
Args:
old_chrome_version: version to diff from
chrome_version: version to which to diff
+
Returns:
The desired URL.
"""
@@ -297,6 +302,7 @@ def GetChromeRevisionListLink(old_chrome, new_chrome, chrome_rev):
old_chrome: ebuild for the version to diff from
new_chrome: ebuild for the version to which to diff
chrome_rev: one of constants.VALID_CHROME_REVISIONS
+
Returns:
The desired URL.
"""
@@ -329,6 +335,7 @@ def MarkChromeEBuildAsStable(stable_candidate, unstable_ebuild, chrome_rev,
chrome_version: The \d.\d.\d.\d version of Chrome.
commit: Used with constants.CHROME_REV_TOT. The svn revision of chrome.
overlay_dir: Path to the chromeos-chrome package dir.
+
Returns:
Full portage version atom (including rc's, etc) that was revved.
"""
diff --git a/scripts/cros_mark_chrome_as_stable_unittest.py b/scripts/cros_mark_chrome_as_stable_unittest.py
index 62fae935f..5297845c9 100755
--- a/scripts/cros_mark_chrome_as_stable_unittest.py
+++ b/scripts/cros_mark_chrome_as_stable_unittest.py
@@ -223,8 +223,11 @@ class CrosMarkChromeAsStable(cros_test_lib.MoxTempDirTestCase):
self.assertEqual('8.0.224.2', release)
def testLatestChromeRevisionListLink(self):
- """Tests that we can generate a link to the revision list between the
- latest Chromium release and the last one we successfully built."""
+ """Tests link generation to rev lists.
+
+ Verifies that we can generate a link to the revision list between the
+ latest Chromium release and the last one we successfully built.
+ """
_TouchAndWrite(self.latest_new, stable_data)
expected = cros_mark_chrome_as_stable.GetChromeRevisionLinkFromVersions(
self.latest_stable_version, self.latest_new_version)
diff --git a/scripts/cros_merge_to_branch.py b/scripts/cros_merge_to_branch.py
index ed425ec93..9041c1238 100644
--- a/scripts/cros_merge_to_branch.py
+++ b/scripts/cros_merge_to_branch.py
@@ -86,6 +86,7 @@ def _UploadChangeToBranch(work_dir, patch, branch, draft, dryrun):
draft: If True, upload to refs/draft/|branch| rather than refs/for/|branch|.
dryrun: Don't actually upload a change but go through all the steps up to
and including git push --dry-run.
+
Returns:
A list of all the gerrit URLs found.
"""
diff --git a/scripts/cros_portage_upgrade.py b/scripts/cros_portage_upgrade.py
index a9abeb687..a2532fee9 100644
--- a/scripts/cros_portage_upgrade.py
+++ b/scripts/cros_portage_upgrade.py
@@ -478,9 +478,10 @@ class Upgrader(object):
This essentially runs emerge with the --pretend option to verify
that all dependencies for these package versions are satisfied.
- Return tuple with two elements:
- [0] True if |cpvlist| can be emerged.
- [1] Output from the emerge command.
+ Returns:
+ Tuple with two elements:
+ [0] True if |cpvlist| can be emerged.
+ [1] Output from the emerge command.
"""
envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
emerge = self._GetBoardCmd(self.EMERGE_CMD)
diff --git a/scripts/cros_portage_upgrade_unittest.py b/scripts/cros_portage_upgrade_unittest.py
index 1934a71d0..6084a78a9 100755
--- a/scripts/cros_portage_upgrade_unittest.py
+++ b/scripts/cros_portage_upgrade_unittest.py
@@ -420,19 +420,19 @@ class CpuTestBase(cros_test_lib.MoxOutputTestCase):
This leverages test code in existing Portage modules to create an ebuild
hierarchy. This can be a little slow.
- |ebuilds| is a list of hashes representing ebuild files in
- a portdir.
- |installed| is a list of hashes representing ebuilds files
- already installed.
- |world| is a list of lines to simulate in the world file.
- |active| True means that os.environ variables should be set
- to point to the created playground, such that Portage tools
- (such as emerge) can be run now using the playground as the active
- PORTDIR. Also saves the playground as self._playground. If |active|
- is False, then no os.environ variables are set and playground is
- not saved (only returned).
-
- Returns tuple (playground, envvars).
+ Args:
+ ebuilds: A list of hashes representing ebuild files in a portdir.
+ installed: A list of hashes representing ebuilds files already installed.
+ world: A list of lines to simulate in the world file.
+ active: True means that os.environ variables should be set
+ to point to the created playground, such that Portage tools
+ (such as emerge) can be run now using the playground as the active
+ PORTDIR. Also saves the playground as self._playground. If |active|
+ is False, then no os.environ variables are set and playground is
+ not saved (only returned).
+
+ Returns:
+ Tuple (playground, envvars).
"""
# TODO(mtennant): Support multiple overlays? This essentially
@@ -544,15 +544,17 @@ class CopyUpstreamTest(CpuTestBase):
ebuilds=None, missing=False):
"""Hack to insert an eclass into the playground source.
- |eclass| Name of eclass to create (without .eclass suffix). Will be
- created as an empty file unless |lines| is specified.
- |lines| Lines of text to put into created eclass, if given.
- |ebuilds| List of ebuilds to put inherit line into. Should be path
- to ebuild from playground portdir.
- |missing| If True, do not actually create the eclass file. Only makes
- sense if |ebuilds| is non-empty, presumably to test inherit failure.
-
- Return full path to the eclass file, whether it was created or not.
+ Args:
+ eclass: Name of eclass to create (without .eclass suffix). Will be
+ created as an empty file unless |lines| is specified.
+ lines: Lines of text to put into created eclass, if given.
+ ebuilds: List of ebuilds to put inherit line into. Should be path
+ to ebuild from playground portdir.
+ missing: If True, do not actually create the eclass file. Only makes
+ sense if |ebuilds| is non-empty, presumably to test inherit failure.
+
+ Returns:
+ Full path to the eclass file, whether it was created or not.
"""
portdir = self._GetPlaygroundPortdir()
eclass_path = os.path.join(portdir, 'eclass', '%s.eclass' % eclass)
diff --git a/scripts/cros_setup_toolchains.py b/scripts/cros_setup_toolchains.py
index a6ddb0a61..dce7e50e6 100644
--- a/scripts/cros_setup_toolchains.py
+++ b/scripts/cros_setup_toolchains.py
@@ -488,6 +488,7 @@ def ExpandTargets(targets_wanted):
Args:
targets_wanted: The targets specified by the user.
+
Returns:
Full list of tuples with pseudo targets removed.
"""
@@ -605,6 +606,7 @@ def FileIsCrosSdkElf(elf):
Args:
elf: The file to check
+
Returns:
True if we think |elf| is a native ELF
"""
@@ -631,6 +633,7 @@ def IsPathPackagable(ptype, path):
Args:
ptype: A string describing the path type (i.e. 'file' or 'dir' or 'sym')
path: The full path to inspect
+
Returns:
True if we want to include this path in the package
"""
@@ -646,6 +649,7 @@ def ReadlinkRoot(path, root):
Args:
path: The symlink to read
root: The path to use for resolving absolute symlinks
+
Returns:
A fully resolved symlink path
"""
@@ -662,6 +666,7 @@ def _GetFilesForTarget(target, root='/'):
Args:
target: The toolchain target name
root: The root path to pull all packages from
+
Returns:
A tuple of a set of all packable paths, and a set of all paths which
are also native ELFs
@@ -784,6 +789,7 @@ def _EnvdGetVar(envd, var):
Args:
envd: The env.d file to load (may be a glob path)
var: The var to extract
+
Returns:
The value of |var|
"""
diff --git a/scripts/merge_package_status.py b/scripts/merge_package_status.py
index ccfdb3276..e155d6cfc 100644
--- a/scripts/merge_package_status.py
+++ b/scripts/merge_package_status.py
@@ -34,7 +34,8 @@ def _GetCrosTargetRank(target):
All valid rankings are greater than zero.
- Returns valid ranking for target or a false value if target is unrecognized.
+ Returns:
+ Valid ranking for target or a false value if target is unrecognized.
"""
for ix, targ in enumerate(CHROMEOS_TARGET_ORDER):
if target == targ:
diff --git a/scripts/parallel_emerge.py b/scripts/parallel_emerge.py
index fc7797487..db54021ea 100644
--- a/scripts/parallel_emerge.py
+++ b/scripts/parallel_emerge.py
@@ -226,8 +226,10 @@ class DepGraphGenerator(object):
We need to be compatible with emerge arg format. We scrape arguments that
are specific to parallel_emerge, and pass through the rest directly to
emerge.
+
Args:
argv: arguments list
+
Returns:
Arguments that don't belong to parallel_emerge
"""
@@ -525,6 +527,7 @@ class DepGraphGenerator(object):
Args:
deps_tree: Dependency tree structure.
deps_info: More details on the dependencies.
+
Returns:
Deps graph in the form of a dict of packages, with each package
specifying a "needs" list and "provides" list.
@@ -547,8 +550,10 @@ class DepGraphGenerator(object):
Take the tree of package -> requirements and reverse it to a digraph of
buildable packages -> packages they unblock.
+
Args:
packages: Tree(s) of dependencies.
+
Returns:
Unsanitized digraph.
"""
diff --git a/scripts/update_manifest_remotes.py b/scripts/update_manifest_remotes.py
index e8e29a461..95a2768ab 100755
--- a/scripts/update_manifest_remotes.py
+++ b/scripts/update_manifest_remotes.py
@@ -76,6 +76,7 @@ def UpdateRemotes(manifest):
Args:
manifest: Path to manifest file to modify in place.
+
Returns:
True if file was modified.
"""
@@ -104,6 +105,7 @@ def GetRemotes(manifest):
Args:
manifest: Path to manifest file to scan for remotes.
+
Returns:
List of Remote tuples.
"""
diff --git a/scripts/upload_prebuilts.py b/scripts/upload_prebuilts.py
index 4c34d9625..272630d63 100644
--- a/scripts/upload_prebuilts.py
+++ b/scripts/upload_prebuilts.py
@@ -84,6 +84,7 @@ class BuildTarget(object):
def UpdateLocalFile(filename, value, key='PORTAGE_BINHOST'):
"""Update the key in file with the value passed.
+
File format:
key="value"
Note quotes are added automatically
@@ -224,9 +225,11 @@ def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
def GetBoardOverlay(build_path, target):
"""Get the path to the board variant.
+
Args:
build_path: The path to the root of the build directory
target: The target board as a BuildTarget object.
+
Returns:
The last overlay configured for the given board as a string.
"""
@@ -240,13 +243,13 @@ def GetBoardOverlay(build_path, target):
def DeterminePrebuiltConfFile(build_path, target):
"""Determine the prebuilt.conf file that needs to be updated for prebuilts.
- Args:
- build_path: The path to the root of the build directory
- target: String representation of the board. This includes host and board
- targets
+ Args:
+ build_path: The path to the root of the build directory
+ target: String representation of the board. This includes host and board
+ targets
- Returns
- A string path to a prebuilt.conf file to be updated.
+ Returns:
+ A string path to a prebuilt.conf file to be updated.
"""
if _HOST_ARCH == target:
# We are host.
@@ -358,9 +361,7 @@ class PrebuiltUploader(object):
Args:
package_path: The path to the packages dir.
url_suffix: The remote subdirectory where we should upload the packages.
-
"""
-
# Process Packages file, removing duplicates and filtered packages.
pkg_index = binpkg.GrabLocalPackageIndex(package_path)
pkg_index.SetUploadLocation(self._binhost_base_url, url_suffix)
@@ -577,8 +578,9 @@ def _AddSlaveProfile(_option, _opt_str, value, parser):
def ParseOptions():
"""Returns options given by the user and the target specified.
- Returns a tuple containing a parsed options object and BuildTarget.
- target instance is None if no board is specified.
+ Returns:
+ A tuple containing a parsed options object and BuildTarget.
+ The target instance is None if no board is specified.
"""
parser = optparse.OptionParser()
parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url',
diff --git a/scripts/upload_symbols.py b/scripts/upload_symbols.py
index 877c59c25..b999ae879 100644
--- a/scripts/upload_symbols.py
+++ b/scripts/upload_symbols.py
@@ -149,6 +149,7 @@ def ErrorLimitHit(num_errors, watermark_errors):
Args:
num_errors: A multiprocessing.Value of the raw number of failures.
watermark_errors: A multiprocessing.Value of the current rate of failures.
+
Returns:
True if our error limits have been exceeded.
"""
@@ -192,6 +193,7 @@ def UploadSymbol(sym_file, upload_url, file_limit=DEFAULT_FILE_LIMIT,
num_errors: An object to update with the error count (needs a .value member)
watermark_errors: An object to track current error behavior (needs a .value)
failed_queue: When a symbol fails, add it to this queue
+
Returns:
The number of errors that were encountered.
"""
@@ -268,6 +270,7 @@ def SymbolFinder(paths):
Args:
paths: A list of input paths to walk. Files are returned w/out any checks.
Dirs are searched for files that end in ".sym".
+
Returns:
Yield every viable sym file.
"""
@@ -302,6 +305,7 @@ def UploadSymbols(board=None, official=False, breakpad_dir=None,
otherwise search |breakpad_dir|
root: The tree to prefix to |breakpad_dir| (if |breakpad_dir| is not set)
retry: Whether we should retry failures.
+
Returns:
The number of errors that were encountered.
"""
diff --git a/scripts/wrapper.py b/scripts/wrapper.py
index 25d722f3c..57f308fcf 100755
--- a/scripts/wrapper.py
+++ b/scripts/wrapper.py
@@ -88,6 +88,7 @@ def FindTarget(target):
Args:
target: Path to the script we're trying to run.
+
Returns:
The module main functor.
"""