aboutsummaryrefslogtreecommitdiff
path: root/cros_utils
diff options
context:
space:
mode:
Diffstat (limited to 'cros_utils')
-rwxr-xr-xcros_utils/buildbot_json.py1534
-rw-r--r--cros_utils/buildbot_utils.py59
-rwxr-xr-xcros_utils/buildbot_utils_unittest.py59
-rwxr-xr-xcros_utils/command_executer.py68
-rwxr-xr-xcros_utils/device_setup_utils_unittest.py3
-rw-r--r--cros_utils/logger.py1
-rw-r--r--cros_utils/manifest_versions.py12
-rw-r--r--cros_utils/misc.py12
-rw-r--r--cros_utils/tabulator.py100
-rwxr-xr-xcros_utils/tabulator_test.py14
-rw-r--r--cros_utils/tiny_render.py181
-rwxr-xr-xcros_utils/tiny_render_test.py177
12 files changed, 571 insertions, 1649 deletions
diff --git a/cros_utils/buildbot_json.py b/cros_utils/buildbot_json.py
deleted file mode 100755
index 08a8ae05..00000000
--- a/cros_utils/buildbot_json.py
+++ /dev/null
@@ -1,1534 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# NOTE: This file is NOT under GPL. See above.
-
-"""Queries buildbot through the json interface.
-"""
-
-from __future__ import print_function
-
-__author__ = 'maruel@chromium.org'
-__version__ = '1.2'
-
-import code
-import datetime
-import functools
-import json
-
-# Pylint recommends we use "from chromite.lib import cros_logging as logging".
-# Chromite specific policy message, we want to keep using the standard logging.
-# pylint: disable=cros-logging-import
-import logging
-
-# pylint: disable=deprecated-module
-import optparse
-
-import time
-import sys
-import urllib.error
-import urllib.parse
-import urllib.request
-
-try:
- from natsort import natsorted
-except ImportError:
- # natsorted is a simple helper to sort "naturally", e.g. "vm40" is sorted
- # after "vm7". Defaults to normal sorting.
- natsorted = sorted
-
-# These values are buildbot constants used for Build and BuildStep.
-# This line was copied from master/buildbot/status/builder.py.
-SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY = list(range(6))
-
-## Generic node caching code.
-
-
-class Node(object):
- """Root class for all nodes in the graph.
-
- Provides base functionality for any node in the graph, independent if it has
- children or not or if its content can be addressed through an url or needs to
- be fetched as part of another node.
-
- self.printable_attributes is only used for self documentation and for str()
- implementation.
- """
- printable_attributes = []
-
- def __init__(self, parent, url):
- self.printable_attributes = self.printable_attributes[:]
- if url:
- self.printable_attributes.append('url')
- url = url.rstrip('/')
- if parent is not None:
- self.printable_attributes.append('parent')
- self.url = url
- self.parent = parent
-
- def __str__(self):
- return self.to_string()
-
- def __repr__(self):
- """Embeds key if present."""
- key = getattr(self, 'key', None)
- if key is not None:
- return '<%s key=%s>' % (self.__class__.__name__, key)
- cached_keys = getattr(self, 'cached_keys', None)
- if cached_keys is not None:
- return '<%s keys=%s>' % (self.__class__.__name__, cached_keys)
- return super(Node, self).__repr__()
-
- def to_string(self, maximum=100):
- out = ['%s:' % self.__class__.__name__]
- assert not 'printable_attributes' in self.printable_attributes
-
- def limit(txt):
- txt = str(txt)
- if maximum > 0:
- if len(txt) > maximum + 2:
- txt = txt[:maximum] + '...'
- return txt
-
- for k in sorted(self.printable_attributes):
- if k == 'parent':
- # Avoid infinite recursion.
- continue
- out.append(limit(' %s: %r' % (k, getattr(self, k))))
- return '\n'.join(out)
-
- def refresh(self):
- """Refreshes the data."""
- self.discard()
- return self.cache()
-
- def cache(self): # pragma: no cover
- """Caches the data."""
- raise NotImplementedError()
-
- def discard(self): # pragma: no cover
- """Discards cached data.
-
- Pretty much everything is temporary except completed Build.
- """
- raise NotImplementedError()
-
-
-class AddressableBaseDataNode(Node): # pylint: disable=W0223
- """A node that contains a dictionary of data that can be fetched with an url.
-
- The node is directly addressable. It also often can be fetched by the parent.
- """
- printable_attributes = Node.printable_attributes + ['data']
-
- def __init__(self, parent, url, data):
- super(AddressableBaseDataNode, self).__init__(parent, url)
- self._data = data
-
- @property
- def cached_data(self):
- return self._data
-
- @property
- def data(self):
- self.cache()
- return self._data
-
- def cache(self):
- if self._data is None:
- self._data = self._readall()
- return True
- return False
-
- def discard(self):
- self._data = None
-
- def read(self, suburl):
- assert self.url, self.__class__.__name__
- url = self.url
- if suburl:
- url = '%s/%s' % (self.url, suburl)
- return self.parent.read(url)
-
- def _readall(self):
- return self.read('')
-
-
-class AddressableDataNode(AddressableBaseDataNode): # pylint: disable=W0223
- """Automatically encodes the url."""
-
- def __init__(self, parent, url, data):
- super(AddressableDataNode, self).__init__(parent, urllib.parse.quote(url),
- data)
-
-
-class NonAddressableDataNode(Node): # pylint: disable=W0223
- """A node that cannot be addressed by an unique url.
-
- The data comes directly from the parent.
- """
-
- def __init__(self, parent, subkey):
- super(NonAddressableDataNode, self).__init__(parent, None)
- self.subkey = subkey
-
- @property
- def cached_data(self):
- if self.parent.cached_data is None:
- return None
- return self.parent.cached_data[self.subkey]
-
- @property
- def data(self):
- return self.parent.data[self.subkey]
-
- def cache(self):
- self.parent.cache()
-
- def discard(self): # pragma: no cover
- """Avoid invalid state when parent recreate the object."""
- raise AttributeError('Call parent discard() instead')
-
-
-class VirtualNodeList(Node):
- """Base class for every node that has children.
-
- Adds partial supports for keys and iterator functionality. 'key' can be a
- string or a int. Not to be used directly.
- """
- printable_attributes = Node.printable_attributes + ['keys']
-
- def __init__(self, parent, url):
- super(VirtualNodeList, self).__init__(parent, url)
- # Keeps the keys independently when ordering is needed.
- self._is_cached = False
- self._has_keys_cached = False
-
- def __contains__(self, key):
- """Enables 'if i in obj:'."""
- return key in self.keys
-
- def __iter__(self):
- """Enables 'for i in obj:'. It returns children."""
- self.cache_keys()
- for key in self.keys:
- yield self[key]
-
- def __len__(self):
- """Enables 'len(obj)' to get the number of childs."""
- return len(self.keys)
-
- def discard(self):
- """Discards data.
-
- The default behavior is to not invalidate cached keys. The only place where
- keys need to be invalidated is with Builds.
- """
- self._is_cached = False
- self._has_keys_cached = False
-
- @property
- def cached_children(self): # pragma: no cover
- """Returns an iterator over the children that are cached."""
- raise NotImplementedError()
-
- @property
- def cached_keys(self): # pragma: no cover
- raise NotImplementedError()
-
- @property
- def keys(self): # pragma: no cover
- """Returns the keys for every children."""
- raise NotImplementedError()
-
- def __getitem__(self, key): # pragma: no cover
- """Returns a child, without fetching its data.
-
- The children could be invalid since no verification is done.
- """
- raise NotImplementedError()
-
- def cache(self): # pragma: no cover
- """Cache all the children."""
- raise NotImplementedError()
-
- def cache_keys(self): # pragma: no cover
- """Cache all children's keys."""
- raise NotImplementedError()
-
-
-class NodeList(VirtualNodeList): # pylint: disable=W0223
- """Adds a cache of the keys."""
-
- def __init__(self, parent, url):
- super(NodeList, self).__init__(parent, url)
- self._keys = []
-
- @property
- def cached_keys(self):
- return self._keys
-
- @property
- def keys(self):
- self.cache_keys()
- return self._keys
-
-
-class NonAddressableNodeList(VirtualNodeList): # pylint: disable=W0223
- """A node that contains children but retrieves all its data from its parent.
-
- I.e. there's no url to get directly this data.
- """
- # Child class object for children of this instance. For example, BuildSteps
- # has BuildStep children.
- _child_cls = None
-
- def __init__(self, parent, subkey):
- super(NonAddressableNodeList, self).__init__(parent, None)
- self.subkey = subkey
- assert (not isinstance(self._child_cls, NonAddressableDataNode) and
- issubclass(self._child_cls, NonAddressableDataNode)), (
- self._child_cls.__name__)
-
- @property
- def cached_children(self):
- if self.parent.cached_data is not None:
- for i in range(len(self.parent.cached_data[self.subkey])):
- yield self[i]
-
- @property
- def cached_data(self):
- if self.parent.cached_data is None:
- return None
- return self.parent.data.get(self.subkey, None)
-
- @property
- def cached_keys(self):
- if self.parent.cached_data is None:
- return None
- return list(range(len(self.parent.data.get(self.subkey, []))))
-
- @property
- def data(self):
- return self.parent.data[self.subkey]
-
- def cache(self):
- self.parent.cache()
-
- def cache_keys(self):
- self.parent.cache()
-
- def discard(self): # pragma: no cover
- """Do not call.
-
- Avoid infinite recursion by having the caller calls the parent's
- discard() explicitely.
- """
- raise AttributeError('Call parent discard() instead')
-
- def __iter__(self):
- """Enables 'for i in obj:'. It returns children."""
- if self.data:
- for i in range(len(self.data)):
- yield self[i]
-
- def __getitem__(self, key):
- """Doesn't cache the value, it's not needed.
-
- TODO(maruel): Cache?
- """
- if isinstance(key, int) and key < 0:
- key = len(self.data) + key
- # pylint: disable=E1102
- return self._child_cls(self, key)
-
-
-class AddressableNodeList(NodeList):
- """A node that has children that can be addressed with an url."""
-
- # Child class object for children of this instance. For example, Builders has
- # Builder children and Builds has Build children.
- _child_cls = None
-
- def __init__(self, parent, url):
- super(AddressableNodeList, self).__init__(parent, url)
- self._cache = {}
- assert (not isinstance(self._child_cls, AddressableDataNode) and
- issubclass(self._child_cls, AddressableDataNode)), (
- self._child_cls.__name__)
-
- @property
- def cached_children(self):
- for item in self._cache.values():
- if item.cached_data is not None:
- yield item
-
- @property
- def cached_keys(self):
- return list(self._cache.keys())
-
- def __getitem__(self, key):
- """Enables 'obj[i]'."""
- if self._has_keys_cached and not key in self._keys:
- raise KeyError(key)
-
- if not key in self._cache:
- # Create an empty object.
- self._create_obj(key, None)
- return self._cache[key]
-
- def cache(self):
- if not self._is_cached:
- data = self._readall()
- for key in sorted(data):
- self._create_obj(key, data[key])
- self._is_cached = True
- self._has_keys_cached = True
-
- def cache_partial(self, children):
- """Caches a partial number of children.
-
- This method is more efficient since it does a single request for all the
- children instead of one request per children.
-
- It only grab objects not already cached.
- """
- # pylint: disable=W0212
- if not self._is_cached:
- to_fetch = [
- child for child in children
- if not (child in self._cache and self._cache[child].cached_data)
- ]
- if to_fetch:
- # Similar to cache(). The only reason to sort is to simplify testing.
- params = '&'.join(
- 'select=%s' % urllib.parse.quote(str(v)) for v in sorted(to_fetch))
- data = self.read('?' + params)
- for key in sorted(data):
- self._create_obj(key, data[key])
-
- def cache_keys(self):
- """Implement to speed up enumeration. Defaults to call cache()."""
- if not self._has_keys_cached:
- self.cache()
- assert self._has_keys_cached
-
- def discard(self):
- """Discards temporary children."""
- super(AddressableNodeList, self).discard()
- for v in self._cache.values():
- v.discard()
-
- def read(self, suburl):
- assert self.url, self.__class__.__name__
- url = self.url
- if suburl:
- url = '%s/%s' % (self.url, suburl)
- return self.parent.read(url)
-
- def _create_obj(self, key, data):
- """Creates an object of type self._child_cls."""
- # pylint: disable=E1102
- obj = self._child_cls(self, key, data)
- # obj.key and key may be different.
- # No need to overide cached data with None.
- if data is not None or obj.key not in self._cache:
- self._cache[obj.key] = obj
- if obj.key not in self._keys:
- self._keys.append(obj.key)
-
- def _readall(self):
- return self.read('')
-
-
-class SubViewNodeList(VirtualNodeList): # pylint: disable=W0223
- """A node that shows a subset of children that comes from another structure.
-
- The node is not addressable.
-
- E.g. the keys are retrieved from parent but the actual data comes from
- virtual_parent.
- """
-
- def __init__(self, parent, virtual_parent, subkey):
- super(SubViewNodeList, self).__init__(parent, None)
- self.subkey = subkey
- self.virtual_parent = virtual_parent
- assert isinstance(self.parent, AddressableDataNode)
- assert isinstance(self.virtual_parent, NodeList)
-
- @property
- def cached_children(self):
- if self.parent.cached_data is not None:
- for item in self.keys:
- if item in self.virtual_parent.keys:
- child = self[item]
- if child.cached_data is not None:
- yield child
-
- @property
- def cached_keys(self):
- return (self.parent.cached_data or {}).get(self.subkey, [])
-
- @property
- def keys(self):
- self.cache_keys()
- return self.parent.data.get(self.subkey, [])
-
- def cache(self):
- """Batch request for each child in a single read request."""
- if not self._is_cached:
- self.virtual_parent.cache_partial(self.keys)
- self._is_cached = True
-
- def cache_keys(self):
- if not self._has_keys_cached:
- self.parent.cache()
- self._has_keys_cached = True
-
- def discard(self):
- if self.parent.cached_data is not None:
- for child in self.virtual_parent.cached_children:
- if child.key in self.keys:
- child.discard()
- self.parent.discard()
- super(SubViewNodeList, self).discard()
-
- def __getitem__(self, key):
- """Makes sure the key is in our key but grab it from the virtual parent."""
- return self.virtual_parent[key]
-
- def __iter__(self):
- self.cache()
- return super(SubViewNodeList, self).__iter__()
-
-
-# Buildbot-specific code
-
-
-class Slave(AddressableDataNode):
- """Buildbot slave class."""
- printable_attributes = AddressableDataNode.printable_attributes + [
- 'name',
- 'key',
- 'connected',
- 'version',
- ]
-
- def __init__(self, parent, name, data):
- super(Slave, self).__init__(parent, name, data)
- self.name = name
- self.key = self.name
- # TODO(maruel): Add SlaveBuilders and a 'builders' property.
- # TODO(maruel): Add a 'running_builds' property.
-
- @property
- def connected(self):
- return self.data.get('connected', False)
-
- @property
- def version(self):
- return self.data.get('version')
-
-
-class Slaves(AddressableNodeList):
- """Buildbot slaves."""
- _child_cls = Slave
- printable_attributes = AddressableNodeList.printable_attributes + ['names']
-
- def __init__(self, parent):
- super(Slaves, self).__init__(parent, 'slaves')
-
- @property
- def names(self):
- return self.keys
-
-
-class BuilderSlaves(SubViewNodeList):
- """Similar to Slaves but only list slaves connected to a specific builder."""
- printable_attributes = SubViewNodeList.printable_attributes + ['names']
-
- def __init__(self, parent):
- super(BuilderSlaves, self).__init__(parent, parent.parent.parent.slaves,
- 'slaves')
-
- @property
- def names(self):
- return self.keys
-
-
-class BuildStep(NonAddressableDataNode):
- """Class for a buildbot build step."""
- printable_attributes = NonAddressableDataNode.printable_attributes + [
- 'name',
- 'number',
- 'start_time',
- 'end_time',
- 'duration',
- 'is_started',
- 'is_finished',
- 'is_running',
- 'result',
- 'simplified_result',
- ]
-
- def __init__(self, parent, number):
- """Pre-loaded, since the data is retrieved via the Build object."""
- assert isinstance(number, int)
- super(BuildStep, self).__init__(parent, number)
- self.number = number
-
- @property
- def start_time(self):
- if self.data.get('times'):
- return int(round(self.data['times'][0]))
-
- @property
- def end_time(self):
- times = self.data.get('times')
- if times and len(times) == 2 and times[1]:
- return int(round(times[1]))
-
- @property
- def duration(self):
- if self.start_time:
- return (self.end_time or int(round(time.time()))) - self.start_time
-
- @property
- def name(self):
- return self.data['name']
-
- @property
- def is_started(self):
- return self.data.get('isStarted', False)
-
- @property
- def is_finished(self):
- return self.data.get('isFinished', False)
-
- @property
- def is_running(self):
- return self.is_started and not self.is_finished
-
- @property
- def result(self):
- result = self.data.get('results')
- if result is None:
- # results may be 0, in that case with filter=1, the value won't be
- # present.
- if self.data.get('isFinished'):
- result = self.data.get('results', 0)
- while isinstance(result, list):
- result = result[0]
- return result
-
- @property
- def simplified_result(self):
- """Returns a simplified 3 state value, True, False or None."""
- result = self.result
- if result in (SUCCESS, WARNINGS):
- return True
- elif result in (FAILURE, EXCEPTION, RETRY):
- return False
- assert result in (None, SKIPPED), (result, self.data)
- return None
-
-
-class BuildSteps(NonAddressableNodeList):
- """Duplicates keys to support lookup by both step number and step name."""
- printable_attributes = NonAddressableNodeList.printable_attributes + [
- 'failed',
- ]
- _child_cls = BuildStep
-
- def __init__(self, parent):
- """Pre-loaded, since the data is retrieved via the Build object."""
- super(BuildSteps, self).__init__(parent, 'steps')
-
- @property
- def keys(self):
- """Returns the steps name in order."""
- return [i['name'] for i in self.data or []]
-
- @property
- def failed(self):
- """Shortcuts that lists the step names of steps that failed."""
- return [step.name for step in self if step.simplified_result is False]
-
- def __getitem__(self, key):
- """Accept step name in addition to index number."""
- if isinstance(key, str):
- # It's a string, try to find the corresponding index.
- for i, step in enumerate(self.data):
- if step['name'] == key:
- key = i
- break
- else:
- raise KeyError(key)
- return super(BuildSteps, self).__getitem__(key)
-
-
-class Build(AddressableDataNode):
- """Buildbot build info."""
- printable_attributes = AddressableDataNode.printable_attributes + [
- 'key',
- 'number',
- 'steps',
- 'blame',
- 'reason',
- 'revision',
- 'result',
- 'simplified_result',
- 'start_time',
- 'end_time',
- 'duration',
- 'slave',
- 'properties',
- 'completed',
- ]
-
- def __init__(self, parent, key, data):
- super(Build, self).__init__(parent, str(key), data)
- self.number = int(key)
- self.key = self.number
- self.steps = BuildSteps(self)
-
- @property
- def blame(self):
- return self.data.get('blame', [])
-
- @property
- def builder(self):
- """Returns the Builder object.
-
- Goes up the hierarchy to find the Buildbot.builders[builder] instance.
- """
- return self.parent.parent.parent.parent.builders[self.data['builderName']]
-
- @property
- def start_time(self):
- if self.data.get('times'):
- return int(round(self.data['times'][0]))
-
- @property
- def end_time(self):
- times = self.data.get('times')
- if times and len(times) == 2 and times[1]:
- return int(round(times[1]))
-
- @property
- def duration(self):
- if self.start_time:
- return (self.end_time or int(round(time.time()))) - self.start_time
-
- @property
- def eta(self):
- return self.data.get('eta', 0)
-
- @property
- def completed(self):
- return self.data.get('currentStep') is None
-
- @property
- def properties(self):
- return self.data.get('properties', [])
-
- @property
- def reason(self):
- return self.data.get('reason')
-
- @property
- def result(self):
- result = self.data.get('results')
- while isinstance(result, list):
- result = result[0]
- if result is None and self.steps:
- # results may be 0, in that case with filter=1, the value won't be
- # present.
- result = self.steps[-1].result
- return result
-
- @property
- def revision(self):
- return self.data.get('sourceStamp', {}).get('revision')
-
- @property
- def simplified_result(self):
- """Returns a simplified 3 state value, True, False or None."""
- result = self.result
- if result in (SUCCESS, WARNINGS, SKIPPED):
- return True
- elif result in (FAILURE, EXCEPTION, RETRY):
- return False
- assert result is None, (result, self.data)
- return None
-
- @property
- def slave(self):
- """Returns the Slave object.
-
- Goes up the hierarchy to find the Buildbot.slaves[slave] instance.
- """
- return self.parent.parent.parent.parent.slaves[self.data['slave']]
-
- def discard(self):
- """Completed Build isn't discarded."""
- if self._data and self.result is None:
- assert not self.steps or not self.steps[-1].data.get('isFinished')
- self._data = None
-
-
-class CurrentBuilds(SubViewNodeList):
- """Lists of the current builds."""
-
- def __init__(self, parent):
- super(CurrentBuilds, self).__init__(parent, parent.builds, 'currentBuilds')
-
-
-class PendingBuilds(AddressableDataNode):
- """List of the pending builds."""
-
- def __init__(self, parent):
- super(PendingBuilds, self).__init__(parent, 'pendingBuilds', None)
-
-
-class Builds(AddressableNodeList):
- """Supports iteration.
-
- Recommends using .cache() to speed up if a significant number of builds are
- iterated over.
- """
- _child_cls = Build
-
- def __init__(self, parent):
- super(Builds, self).__init__(parent, 'builds')
-
- def __getitem__(self, key):
- """Support for negative reference and enable retrieving non-cached builds.
-
- e.g. -1 is the last build, -2 is the previous build before the last one.
- """
- key = int(key)
- if key < 0:
- # Convert negative to positive build number.
- self.cache_keys()
- # Since the negative value can be outside of the cache keys range, use the
- # highest key value and calculate from it.
- key = max(self._keys) + key + 1
-
- if key not in self._cache:
- # Create an empty object.
- self._create_obj(key, None)
- return self._cache[key]
-
- def __iter__(self):
- """Returns cached Build objects in reversed order.
-
- The most recent build is returned first and then in reverse chronological
- order, up to the oldest cached build by the server. Older builds can be
- accessed but will trigger significantly more I/O so they are not included by
- default in the iteration.
-
- To access the older builds, use self.iterall() instead.
- """
- self.cache()
- return reversed(list(self._cache.values()))
-
- def iterall(self):
- """Returns Build objects in decreasing order unbounded up to build 0.
-
- The most recent build is returned first and then in reverse chronological
- order. Older builds can be accessed and will trigger significantly more I/O
- so use this carefully.
- """
- # Only cache keys here.
- self.cache_keys()
- if self._keys:
- for i in range(max(self._keys), -1, -1):
- yield self[i]
-
- def cache_keys(self):
- """Grabs the keys (build numbers) from the builder."""
- if not self._has_keys_cached:
- for i in self.parent.data.get('cachedBuilds', []):
- i = int(i)
- self._cache.setdefault(i, Build(self, i, None))
- if i not in self._keys:
- self._keys.append(i)
- self._has_keys_cached = True
-
- def discard(self):
- super(Builds, self).discard()
- # Can't keep keys.
- self._has_keys_cached = False
-
- def _readall(self):
- return self.read('_all')
-
-
-class Builder(AddressableDataNode):
- """Builder status."""
- printable_attributes = AddressableDataNode.printable_attributes + [
- 'name',
- 'key',
- 'builds',
- 'slaves',
- 'pending_builds',
- 'current_builds',
- ]
-
- def __init__(self, parent, name, data):
- super(Builder, self).__init__(parent, name, data)
- self.name = name
- self.key = name
- self.builds = Builds(self)
- self.slaves = BuilderSlaves(self)
- self.current_builds = CurrentBuilds(self)
- self.pending_builds = PendingBuilds(self)
-
- def discard(self):
- super(Builder, self).discard()
- self.builds.discard()
- self.slaves.discard()
- self.current_builds.discard()
-
-
-class Builders(AddressableNodeList):
- """Root list of builders."""
- _child_cls = Builder
-
- def __init__(self, parent):
- super(Builders, self).__init__(parent, 'builders')
-
-
-class Buildbot(AddressableBaseDataNode):
- """This object should be recreated on a master restart as it caches data."""
- # Throttle fetches to not kill the server.
- auto_throttle = None
- printable_attributes = AddressableDataNode.printable_attributes + [
- 'slaves',
- 'builders',
- 'last_fetch',
- ]
-
- def __init__(self, url):
- super(Buildbot, self).__init__(None, url.rstrip('/') + '/json', None)
- self._builders = Builders(self)
- self._slaves = Slaves(self)
- self.last_fetch = None
-
- @property
- def builders(self):
- return self._builders
-
- @property
- def slaves(self):
- return self._slaves
-
- def discard(self):
- """Discards information about Builders and Slaves."""
- super(Buildbot, self).discard()
- self._builders.discard()
- self._slaves.discard()
-
- def read(self, suburl):
- if self.auto_throttle:
- if self.last_fetch:
- delta = datetime.datetime.utcnow() - self.last_fetch
- remaining = (datetime.timedelta(seconds=self.auto_throttle) - delta)
- if remaining > datetime.timedelta(seconds=0):
- logging.debug('Sleeping for %ss', remaining)
- time.sleep(remaining.seconds)
- self.last_fetch = datetime.datetime.utcnow()
- url = '%s/%s' % (self.url, suburl)
- if '?' in url:
- url += '&filter=1'
- else:
- url += '?filter=1'
- logging.info('read(%s)', suburl)
- channel = urllib.request.urlopen(url)
- data = channel.read()
- try:
- return json.loads(data)
- except ValueError:
- if channel.getcode() >= 400:
- # Convert it into an HTTPError for easier processing.
- raise urllib.error.HTTPError(url, channel.getcode(),
- '%s:\n%s' % (url, data), channel.headers,
- None)
- raise
-
- def _readall(self):
- return self.read('project')
-
-
-# Controller code
-
-
-def usage(more):
-
- def hook(fn):
- fn.func_usage_more = more
- return fn
-
- return hook
-
-
-def need_buildbot(fn):
- """Post-parse args to create a buildbot object."""
-
- @functools.wraps(fn)
- def hook(parser, args, *extra_args, **kwargs):
- old_parse_args = parser.parse_args
-
- def new_parse_args(args):
- options, args = old_parse_args(args)
- if len(args) < 1:
- parser.error('Need to pass the root url of the buildbot')
- url = args.pop(0)
- if not url.startswith('http'):
- url = 'http://' + url
- buildbot = Buildbot(url)
- buildbot.auto_throttle = options.throttle
- return options, args, buildbot
-
- parser.parse_args = new_parse_args
- # Call the original function with the modified parser.
- return fn(parser, args, *extra_args, **kwargs)
-
- hook.func_usage_more = '[options] <url>'
- return hook
-
-
-@need_buildbot
-def CMDpending(parser, args):
- """Lists pending jobs."""
- parser.add_option(
- '-b',
- '--builder',
- dest='builders',
- action='append',
- default=[],
- help='Builders to filter on')
- options, args, buildbot = parser.parse_args(args)
- if args:
- parser.error('Unrecognized parameters: %s' % ' '.join(args))
- if not options.builders:
- options.builders = buildbot.builders.keys
- for builder in options.builders:
- builder = buildbot.builders[builder]
- pending_builds = builder.data.get('pendingBuilds', 0)
- if not pending_builds:
- continue
- print('Builder %s: %d' % (builder.name, pending_builds))
- if not options.quiet:
- for pending in builder.pending_builds.data:
- if 'revision' in pending['source']:
- print(' revision: %s' % pending['source']['revision'])
- for change in pending['source']['changes']:
- print(' change:')
- print(' comment: %r' % change['comments'][:50])
- print(' who: %s' % change['who'])
- return 0
-
-
-@usage('[options] <url> [commands] ...')
-@need_buildbot
-def CMDrun(parser, args):
- """Runs commands passed as parameters.
-
- When passing commands on the command line, each command will be run as if it
- was on its own line.
- """
- parser.add_option('-f', '--file', help='Read script from file')
- parser.add_option(
- '-i', dest='use_stdin', action='store_true', help='Read script on stdin')
- # Variable 'buildbot' is not used directly.
- # pylint: disable=W0612
- options, args, _ = parser.parse_args(args)
- if (bool(args) + bool(options.use_stdin) + bool(options.file)) != 1:
- parser.error('Need to pass only one of: <commands>, -f <file> or -i')
- if options.use_stdin:
- cmds = sys.stdin.read()
- elif options.file:
- cmds = open(options.file).read()
- else:
- cmds = '\n'.join(args)
- compiled = compile(cmds, '<cmd line>', 'exec')
- # pylint: disable=eval-used
- eval(compiled, globals(), locals())
- return 0
-
-
-@need_buildbot
-def CMDinteractive(parser, args):
- """Runs an interactive shell to run queries."""
- _, args, buildbot = parser.parse_args(args)
- if args:
- parser.error('Unrecognized parameters: %s' % ' '.join(args))
- prompt = ('Buildbot interactive console for "%s".\n'
- "Hint: Start with typing: 'buildbot.printable_attributes' or "
- "'print str(buildbot)' to explore.") % buildbot.url[:-len('/json')]
- local_vars = {'buildbot': buildbot, 'b': buildbot}
- code.interact(prompt, None, local_vars)
-
-
-@need_buildbot
-def CMDidle(parser, args):
- """Lists idle slaves."""
- return find_idle_busy_slaves(parser, args, True)
-
-
-@need_buildbot
-def CMDbusy(parser, args):
- """Lists idle slaves."""
- return find_idle_busy_slaves(parser, args, False)
-
-
-@need_buildbot
-def CMDdisconnected(parser, args):
- """Lists disconnected slaves."""
- _, args, buildbot = parser.parse_args(args)
- if args:
- parser.error('Unrecognized parameters: %s' % ' '.join(args))
- for slave in buildbot.slaves:
- if not slave.connected:
- print(slave.name)
- return 0
-
-
-def find_idle_busy_slaves(parser, args, show_idle):
- parser.add_option(
- '-b',
- '--builder',
- dest='builders',
- action='append',
- default=[],
- help='Builders to filter on')
- parser.add_option(
- '-s',
- '--slave',
- dest='slaves',
- action='append',
- default=[],
- help='Slaves to filter on')
- options, args, buildbot = parser.parse_args(args)
- if args:
- parser.error('Unrecognized parameters: %s' % ' '.join(args))
- if not options.builders:
- options.builders = buildbot.builders.keys
- for builder in options.builders:
- builder = buildbot.builders[builder]
- if options.slaves:
- # Only the subset of slaves connected to the builder.
- slaves = list(set(options.slaves).intersection(set(builder.slaves.names)))
- if not slaves:
- continue
- else:
- slaves = builder.slaves.names
- busy_slaves = [build.slave.name for build in builder.current_builds]
- if show_idle:
- slaves = natsorted(set(slaves) - set(busy_slaves))
- else:
- slaves = natsorted(set(slaves) & set(busy_slaves))
- if options.quiet:
- for slave in slaves:
- print(slave)
- else:
- if slaves:
- print('Builder %s: %s' % (builder.name, ', '.join(slaves)))
- return 0
-
-
-def last_failure(buildbot,
- builders=None,
- slaves=None,
- steps=None,
- no_cache=False):
- """Returns Build object with last failure with the specific filters."""
- builders = builders or buildbot.builders.keys
- for builder in builders:
- builder = buildbot.builders[builder]
- if slaves:
- # Only the subset of slaves connected to the builder.
- builder_slaves = list(set(slaves).intersection(set(builder.slaves.names)))
- if not builder_slaves:
- continue
- else:
- builder_slaves = builder.slaves.names
-
- if not no_cache and len(builder.slaves) > 2:
- # Unless you just want the last few builds, it's often faster to
- # fetch the whole thing at once, at the cost of a small hickup on
- # the buildbot.
- # TODO(maruel): Cache only N last builds or all builds since
- # datetime.
- builder.builds.cache()
-
- found = []
- for build in builder.builds:
- if build.slave.name not in builder_slaves or build.slave.name in found:
- continue
- # Only add the slave for the first completed build but still look for
- # incomplete builds.
- if build.completed:
- found.append(build.slave.name)
-
- if steps:
- if any(build.steps[step].simplified_result is False for step in steps):
- yield build
- elif build.simplified_result is False:
- yield build
-
- if len(found) == len(builder_slaves):
- # Found all the slaves, quit.
- break
-
-
-@need_buildbot
-def CMDlast_failure(parser, args):
- """Lists all slaves that failed on that step on their last build.
-
- Examples:
- To find all slaves where their last build was a compile failure,
- run with --step compile
- """
- parser.add_option(
- '-S',
- '--step',
- dest='steps',
- action='append',
- default=[],
- help='List all slaves that failed on that step on their last build')
- parser.add_option(
- '-b',
- '--builder',
- dest='builders',
- action='append',
- default=[],
- help='Builders to filter on')
- parser.add_option(
- '-s',
- '--slave',
- dest='slaves',
- action='append',
- default=[],
- help='Slaves to filter on')
- parser.add_option(
- '-n',
- '--no_cache',
- action='store_true',
- help="Don't load all builds at once")
- options, args, buildbot = parser.parse_args(args)
- if args:
- parser.error('Unrecognized parameters: %s' % ' '.join(args))
- print_builders = not options.quiet and len(options.builders) != 1
- last_builder = None
- for build in last_failure(
- buildbot,
- builders=options.builders,
- slaves=options.slaves,
- steps=options.steps,
- no_cache=options.no_cache):
-
- if print_builders and last_builder != build.builder:
- print(build.builder.name)
- last_builder = build.builder
-
- if options.quiet:
- if options.slaves:
- print('%s: %s' % (build.builder.name, build.slave.name))
- else:
- print(build.slave.name)
- else:
- out = '%d on %s: blame:%s' % (build.number, build.slave.name, ', '.join(
- build.blame))
- if print_builders:
- out = ' ' + out
- print(out)
-
- if len(options.steps) != 1:
- for step in build.steps:
- if step.simplified_result is False:
- # Assume the first line is the text name anyway.
- summary = ', '.join(step.data['text'][1:])[:40]
- out = ' %s: "%s"' % (step.data['name'], summary)
- if print_builders:
- out = ' ' + out
- print(out)
- return 0
-
-
-@need_buildbot
-def CMDcurrent(parser, args):
- """Lists current jobs."""
- parser.add_option(
- '-b',
- '--builder',
- dest='builders',
- action='append',
- default=[],
- help='Builders to filter on')
- parser.add_option(
- '--blame', action='store_true', help='Only print the blame list')
- options, args, buildbot = parser.parse_args(args)
- if args:
- parser.error('Unrecognized parameters: %s' % ' '.join(args))
- if not options.builders:
- options.builders = buildbot.builders.keys
-
- if options.blame:
- blame = set()
- for builder in options.builders:
- for build in buildbot.builders[builder].current_builds:
- if build.blame:
- for blamed in build.blame:
- blame.add(blamed)
- print('\n'.join(blame))
- return 0
-
- for builder in options.builders:
- builder = buildbot.builders[builder]
- if not options.quiet and builder.current_builds:
- print(builder.name)
- for build in builder.current_builds:
- if options.quiet:
- print(build.slave.name)
- else:
- out = '%4d: slave=%10s' % (build.number, build.slave.name)
- out += ' duration=%5d' % (build.duration or 0)
- if build.eta:
- out += ' eta=%5.0f' % build.eta
- else:
- out += ' '
- if build.blame:
- out += ' blame=' + ', '.join(build.blame)
- print(out)
-
- return 0
-
-
-@need_buildbot
-def CMDbuilds(parser, args):
- """Lists all builds.
-
- Examples:
- To find all builds on a single slave, run with -b bar -s foo.
- """
- parser.add_option(
- '-r', '--result', type='int', help='Build result to filter on')
- parser.add_option(
- '-b',
- '--builder',
- dest='builders',
- action='append',
- default=[],
- help='Builders to filter on')
- parser.add_option(
- '-s',
- '--slave',
- dest='slaves',
- action='append',
- default=[],
- help='Slaves to filter on')
- parser.add_option(
- '-n',
- '--no_cache',
- action='store_true',
- help="Don't load all builds at once")
- options, args, buildbot = parser.parse_args(args)
- if args:
- parser.error('Unrecognized parameters: %s' % ' '.join(args))
- builders = options.builders or buildbot.builders.keys
- for builder in builders:
- builder = buildbot.builders[builder]
- for build in builder.builds:
- if not options.slaves or build.slave.name in options.slaves:
- if options.quiet:
- out = ''
- if options.builders:
- out += '%s/' % builder.name
- if len(options.slaves) != 1:
- out += '%s/' % build.slave.name
- out += '%d revision:%s result:%s blame:%s' % (
- build.number, build.revision, build.result, ','.join(build.blame))
- print(out)
- else:
- print(build)
- return 0
-
-
-@need_buildbot
-def CMDcount(parser, args):
- """Count the number of builds that occured during a specific period."""
- parser.add_option(
- '-o', '--over', type='int', help='Number of seconds to look for')
- parser.add_option(
- '-b',
- '--builder',
- dest='builders',
- action='append',
- default=[],
- help='Builders to filter on')
- options, args, buildbot = parser.parse_args(args)
- if args:
- parser.error('Unrecognized parameters: %s' % ' '.join(args))
- if not options.over:
- parser.error(
- 'Specify the number of seconds, e.g. --over 86400 for the last 24 '
- 'hours')
- builders = options.builders or buildbot.builders.keys
- counts = {}
- since = time.time() - options.over
- for builder in builders:
- builder = buildbot.builders[builder]
- counts[builder.name] = 0
- if not options.quiet:
- print(builder.name)
- for build in builder.builds.iterall():
- try:
- start_time = build.start_time
- except urllib.error.HTTPError:
- # The build was probably trimmed.
- print(
- 'Failed to fetch build %s/%d' % (builder.name, build.number),
- file=sys.stderr)
- continue
- if start_time >= since:
- counts[builder.name] += 1
- else:
- break
- if not options.quiet:
- print('.. %d' % counts[builder.name])
-
- align_name = max(len(b) for b in counts)
- align_number = max(len(str(c)) for c in counts.values())
- for builder in sorted(counts):
- print('%*s: %*d' % (align_name, builder, align_number, counts[builder]))
- print('Total: %d' % sum(counts.values()))
- return 0
-
-
-def gen_parser():
- """Returns an OptionParser instance with default options.
-
- It should be then processed with gen_usage() before being used.
- """
- parser = optparse.OptionParser(version=__version__)
- # Remove description formatting
- parser.format_description = lambda x: parser.description
- # Add common parsing.
- old_parser_args = parser.parse_args
-
- def Parse(*args, **kwargs):
- options, args = old_parser_args(*args, **kwargs)
- if options.verbose >= 2:
- logging.basicConfig(level=logging.DEBUG)
- elif options.verbose:
- logging.basicConfig(level=logging.INFO)
- else:
- logging.basicConfig(level=logging.WARNING)
- return options, args
-
- parser.parse_args = Parse
-
- parser.add_option(
- '-v',
- '--verbose',
- action='count',
- help='Use multiple times to increase logging leve')
- parser.add_option(
- '-q',
- '--quiet',
- action='store_true',
- help='Reduces the output to be parsed by scripts, independent of -v')
- parser.add_option(
- '--throttle',
- type='float',
- help='Minimum delay to sleep between requests')
- return parser
-
-
-# Generic subcommand handling code
-
-
-def Command(name):
- return getattr(sys.modules[__name__], 'CMD' + name, None)
-
-
-@usage('<command>')
-def CMDhelp(parser, args):
- """Print list of commands or use 'help <command>'."""
- _, args = parser.parse_args(args)
- if len(args) == 1:
- return main(args + ['--help'])
- parser.print_help()
- return 0
-
-
-def gen_usage(parser, command):
- """Modifies an OptionParser object with the command's documentation.
-
- The documentation is taken from the function's docstring.
- """
- obj = Command(command)
- more = getattr(obj, 'func_usage_more')
- # OptParser.description prefer nicely non-formatted strings.
- parser.description = obj.__doc__ + '\n'
- parser.set_usage('usage: %%prog %s %s' % (command, more))
-
-
-def main(args=None):
- # Do it late so all commands are listed.
- # pylint: disable=E1101
- CMDhelp.__doc__ += '\n\nCommands are:\n' + '\n'.join(
- ' %-12s %s' % (fn[3:], Command(fn[3:]).__doc__.split('\n', 1)[0])
- for fn in dir(sys.modules[__name__])
- if fn.startswith('CMD'))
-
- parser = gen_parser()
- if args is None:
- args = sys.argv[1:]
- if args:
- command = Command(args[0])
- if command:
- # "fix" the usage and the description now that we know the subcommand.
- gen_usage(parser, args[0])
- return command(parser, args[1:])
-
- # Not a known command. Default to help.
- gen_usage(parser, 'help')
- return CMDhelp(parser, args)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/cros_utils/buildbot_utils.py b/cros_utils/buildbot_utils.py
index a06abd26..b600c6aa 100644
--- a/cros_utils/buildbot_utils.py
+++ b/cros_utils/buildbot_utils.py
@@ -21,13 +21,12 @@ INITIAL_SLEEP_TIME = 7200 # 2 hours; wait time before polling buildbot.
SLEEP_TIME = 600 # 10 minutes; time between polling of buildbot.
# Some of our slower builders (llvm-next) are taking more
-# than 11 hours. So, increase this TIME_OUT to 12 hours.
-TIME_OUT = 43200 # Decide the build is dead or will never finish
+# than 12 hours. So, increase this TIME_OUT to 15 hours.
+TIME_OUT = 15 * 60 * 60 # Decide the build is dead or will never finish
class BuildbotTimeout(Exception):
"""Exception to throw when a buildbot operation timesout."""
- pass
def RunCommandInPath(path, cmd):
@@ -51,8 +50,8 @@ def PeekTrybotImage(chromeos_root, buildbucket_id):
and url looks like:
gs://chromeos-image-archive/trybot-elm-release-tryjob/R67-10468.0.0-b20789
"""
- command = (
- 'cros buildresult --report json --buildbucket-id %s' % buildbucket_id)
+ command = ('cros buildresult --report json --buildbucket-id %s' %
+ buildbucket_id)
rc, out, _ = RunCommandInPath(chromeos_root, command)
# Current implementation of cros buildresult returns fail when a job is still
@@ -195,9 +194,9 @@ def GetTrybotImage(chromeos_root,
image = ''
if not image:
- logger.GetLogger().LogError(
- 'Trybot job (buildbucket id: %s) failed with'
- 'status %s; no trybot image generated. ' % (buildbucket_id, status))
+ logger.GetLogger().LogError('Trybot job (buildbucket id: %s) failed with'
+ 'status %s; no trybot image generated. ' %
+ (buildbucket_id, status))
else:
# Convert full gs path to what crosperf expects. For example, convert
# gs://chromeos-image-archive/trybot-elm-release-tryjob/R67-10468.0.0-b20789
@@ -227,13 +226,13 @@ def WaitForImage(chromeos_root, build):
while elapsed_time < TIME_OUT:
if DoesImageExist(chromeos_root, build):
return
- logger.GetLogger().LogOutput(
- 'Image %s not ready, waiting for 10 minutes' % build)
+ logger.GetLogger().LogOutput('Image %s not ready, waiting for 10 minutes' %
+ build)
time.sleep(SLEEP_TIME)
elapsed_time += SLEEP_TIME
- logger.GetLogger().LogOutput(
- 'Image %s not found, waited for %d hours' % (build, (TIME_OUT / 3600)))
+ logger.GetLogger().LogOutput('Image %s not found, waited for %d hours' %
+ (build, (TIME_OUT / 3600)))
raise BuildbotTimeout('Timeout while waiting for image %s' % build)
@@ -244,18 +243,50 @@ def GetLatestImage(chromeos_root, path):
ce = command_executer.GetCommandExecuter()
command = ('gsutil ls gs://chromeos-image-archive/%s' % path)
- _, out, _ = ce.ChrootRunCommandWOutput(
+ ret, out, _ = ce.ChrootRunCommandWOutput(
chromeos_root, command, print_to_console=False)
+ if ret != 0:
+ raise RuntimeError('Failed to list buckets with command: %s.' % command)
candidates = [l.split('/')[-2] for l in out.split()]
candidates = [fmt.match(c) for c in candidates]
candidates = [[int(r) for r in m.group(1, 2, 3, 4)] for m in candidates if m]
candidates.sort(reverse=True)
for c in candidates:
build = '%s/R%d-%d.%d.%d' % (path, c[0], c[1], c[2], c[3])
- # Blacklist "R79-12384.0.0" image released by mistake.
+ # Denylist "R79-12384.0.0" image released by mistake.
# TODO(crbug.com/992242): Remove the filter by 2019-09-05.
if c == [79, 12384, 0, 0]:
continue
if DoesImageExist(chromeos_root, build):
return build
+
+
+def GetLatestRecipeImage(chromeos_root, path):
+ """Get latest nightly test image from recipe bucket.
+
+ Image location example:
+ $ARCHIVE/lulu-llvm-next-nightly/R84-13037.0.0-31011-8883172717979984032
+ """
+
+ fmt = re.compile(r'R([0-9]+)-([0-9]+).([0-9]+).([0-9]+)-([0-9]+)')
+
+ ce = command_executer.GetCommandExecuter()
+ command = ('gsutil ls gs://chromeos-image-archive/%s' % path)
+ ret, out, _ = ce.ChrootRunCommandWOutput(
+ chromeos_root, command, print_to_console=False)
+ if ret != 0:
+ raise RuntimeError('Failed to list buckets with command: %s.' % command)
+ candidates = [l.split('/')[-2] for l in out.split()]
+ candidates = [(fmt.match(c), c) for c in candidates]
+ candidates = [([int(r)
+ for r in m[0].group(1, 2, 3, 4, 5)], m[1])
+ for m in candidates
+ if m]
+ candidates.sort(key=lambda x: x[0], reverse=True)
+ # Try to get ony last two days of images since nightly tests are run once
+ # another day.
+ for c in candidates[:2]:
+ build = '%s/%s' % (path, c[1])
+ if DoesImageExist(chromeos_root, build):
+ return build
diff --git a/cros_utils/buildbot_utils_unittest.py b/cros_utils/buildbot_utils_unittest.py
index 4fc3d170..c615c95f 100755
--- a/cros_utils/buildbot_utils_unittest.py
+++ b/cros_utils/buildbot_utils_unittest.py
@@ -21,18 +21,6 @@ from cros_utils import command_executer
class TrybotTest(unittest.TestCase):
"""Test for CommandExecuter class."""
- old_tryjob_out = (
- 'Verifying patches...\n'
- 'Submitting tryjob...\n'
- 'Successfully sent PUT request to [buildbucket_bucket:master.chromiumos.t'
- 'ryserver] with [config:success-build] [buildbucket_id:895272114382368817'
- '6].\n'
- 'Tryjob submitted!\n'
- 'To view your tryjobs, visit:\n'
- ' http://cros-goldeneye/chromeos/healthmonitoring/buildDetails?buildbuck'
- 'etId=8952721143823688176\n'
- ' https://uberchromegw.corp.google.com/i/chromiumos.tryserver/waterfall?'
- 'committer=laszio@chromium.org&builder=etc\n')
tryjob_out = (
'[{"buildbucket_id": "8952721143823688176", "build_config": '
'"cave-llvm-toolchain-tryjob", "url": '
@@ -44,9 +32,12 @@ class TrybotTest(unittest.TestCase):
'gs://chromeos-image-archive/{0}/R78-12421.0.0/',
'gs://chromeos-image-archive/{0}/R78-12422.0.0/',
'gs://chromeos-image-archive/{0}/R78-12423.0.0/',
- # "R79-12384.0.0" image should be blacklisted.
- # TODO(crbug.com/992242): Remove the filter by 2019-09-05.
- 'gs://chromeos-image-archive/{0}/R79-12384.0.0/',
+ ])
+
+ GSUTILS_LS_RECIPE = '\n'.join([
+ 'gs://chromeos-image-archive/{0}/R83-12995.0.0-30031-8885075268947031/',
+ 'gs://chromeos-image-archive/{0}/R83-13003.0.0-30196-8884755532184725/',
+ 'gs://chromeos-image-archive/{0}/R83-13003.0.0-30218-8884712858556419/',
])
buildresult_out = (
@@ -144,6 +135,44 @@ class TrybotTest(unittest.TestCase):
image = buildbot_utils.GetLatestImage('', IMAGE_DIR)
self.assertIsNone(image)
+ def testGetLatestRecipeImageValid(self):
+ with patch.object(command_executer.CommandExecuter,
+ 'ChrootRunCommandWOutput') as mocked_run:
+ with patch.object(buildbot_utils, 'DoesImageExist') as mocked_imageexist:
+ IMAGE_DIR = 'lulu-llvm-next-nightly'
+ mocked_run.return_value = (0, self.GSUTILS_LS_RECIPE.format(IMAGE_DIR),
+ '')
+ mocked_imageexist.return_value = True
+ image = buildbot_utils.GetLatestRecipeImage('', IMAGE_DIR)
+ self.assertEqual(
+ image, '{0}/R83-13003.0.0-30218-8884712858556419'.format(IMAGE_DIR))
+
+ def testGetLatestRecipeImageInvalid(self):
+ with patch.object(command_executer.CommandExecuter,
+ 'ChrootRunCommandWOutput') as mocked_run:
+ with patch.object(buildbot_utils, 'DoesImageExist') as mocked_imageexist:
+ IMAGE_DIR = 'kefka-llvm-next-nightly'
+ mocked_run.return_value = (0, self.GSUTILS_LS_RECIPE.format(IMAGE_DIR),
+ '')
+ mocked_imageexist.return_value = False
+ image = buildbot_utils.GetLatestRecipeImage('', IMAGE_DIR)
+ self.assertIsNone(image)
+
+ def testGetLatestRecipeImageTwodays(self):
+ with patch.object(command_executer.CommandExecuter,
+ 'ChrootRunCommandWOutput') as mocked_run:
+ with patch.object(buildbot_utils, 'DoesImageExist') as mocked_imageexist:
+ IMAGE_DIR = 'lulu-llvm-next-nightly'
+ mocked_run.return_value = (0, self.GSUTILS_LS_RECIPE.format(IMAGE_DIR),
+ '')
+ mocked_imageexist.side_effect = [False, False, True]
+ image = buildbot_utils.GetLatestRecipeImage('', IMAGE_DIR)
+ self.assertIsNone(image)
+ mocked_imageexist.side_effect = [False, True, True]
+ image = buildbot_utils.GetLatestRecipeImage('', IMAGE_DIR)
+ self.assertEqual(
+ image, '{0}/R83-13003.0.0-30196-8884755532184725'.format(IMAGE_DIR))
+
if __name__ == '__main__':
unittest.main()
diff --git a/cros_utils/command_executer.py b/cros_utils/command_executer.py
index 39bff5ed..aeedf3ea 100755
--- a/cros_utils/command_executer.py
+++ b/cros_utils/command_executer.py
@@ -229,9 +229,11 @@ class CommandExecuter(object):
kwargs['return_output'] = True
return self.RunCommandGeneric(*args, **kwargs)
- def RemoteAccessInitCommand(self, chromeos_root, machine):
+ def RemoteAccessInitCommand(self, chromeos_root, machine, port=None):
command = ''
command += '\nset -- --remote=' + machine
+ if port:
+ command += ' --ssh_port=' + port
command += '\n. ' + chromeos_root + '/src/scripts/common.sh'
command += '\n. ' + chromeos_root + '/src/scripts/remote_access.sh'
command += '\nTMP=$(mktemp -d)'
@@ -240,10 +242,9 @@ class CommandExecuter(object):
return command
def WriteToTempShFile(self, contents):
- # TODO(crbug.com/1048938): use encoding='utf-8' when all dependencies have
- # migrated to python 3.
with tempfile.NamedTemporaryFile(
- 'w', delete=False, prefix=os.uname()[1], suffix='.sh') as f:
+ 'w', encoding='utf-8', delete=False, prefix=os.uname()[1],
+ suffix='.sh') as f:
f.write('#!/bin/bash\n')
f.write(contents)
f.flush()
@@ -286,12 +287,16 @@ class CommandExecuter(object):
sys.exit(1)
chromeos_root = os.path.expanduser(chromeos_root)
+ port = None
+ if ':' in machine:
+ machine, port = machine.split(':')
# Write all commands to a file.
command_file = self.WriteToTempShFile(cmd)
retval = self.CopyFiles(
command_file,
command_file,
dest_machine=machine,
+ dest_port=port,
command_terminator=command_terminator,
chromeos_root=chromeos_root,
dest_cros=True,
@@ -303,7 +308,7 @@ class CommandExecuter(object):
' Is the machine up?')
return (retval, '', '')
- command = self.RemoteAccessInitCommand(chromeos_root, machine)
+ command = self.RemoteAccessInitCommand(chromeos_root, machine, port)
command += '\nremote_sh bash %s' % command_file
command += '\nl_retval=$?; echo "$REMOTE_OUT"; exit $l_retval'
retval = self.RunCommandGeneric(
@@ -314,8 +319,8 @@ class CommandExecuter(object):
terminated_timeout=terminated_timeout,
print_to_console=print_to_console)
if return_output:
- connect_signature = (
- 'Initiating first contact with remote host\n' + 'Connection OK\n')
+ connect_signature = ('Initiating first contact with remote host\n' +
+ 'Connection OK\n')
connect_signature_re = re.compile(connect_signature)
modded_retval = list(retval)
modded_retval[1] = connect_signature_re.sub('', retval[1])
@@ -367,10 +372,9 @@ class CommandExecuter(object):
if self.logger:
self.logger.LogCmd(command, print_to_console=print_to_console)
- # TODO(crbug.com/1048938): use encoding='utf-8' when all dependencies have
- # migrated to python 3.
with tempfile.NamedTemporaryFile(
'w',
+ encoding='utf-8',
delete=False,
dir=os.path.join(chromeos_root, 'src/scripts'),
suffix='.sh',
@@ -383,7 +387,7 @@ class CommandExecuter(object):
command_file = f.name
os.chmod(command_file, 0o777)
- # if return_output is set, run a dummy command first to make sure that
+ # if return_output is set, run a test command first to make sure that
# the chroot already exists. We want the final returned output to skip
# the output from chroot creation steps.
if return_output:
@@ -451,7 +455,9 @@ class CommandExecuter(object):
src,
dest,
src_machine=None,
+ src_port=None,
dest_machine=None,
+ dest_port=None,
src_user=None,
dest_user=None,
recursive=True,
@@ -477,30 +483,34 @@ class CommandExecuter(object):
sys.exit(1)
if src_cros:
cros_machine = src_machine
+ cros_port = src_port
+ host_machine = dest_machine
+ host_user = dest_user
else:
cros_machine = dest_machine
-
- command = self.RemoteAccessInitCommand(chromeos_root, cros_machine)
- ssh_command = (
- 'ssh -p ${FLAGS_ssh_port}' + ' -o StrictHostKeyChecking=no' +
- ' -o UserKnownHostsFile=$(mktemp)' + ' -i $TMP_PRIVATE_KEY')
+ cros_port = dest_port
+ host_machine = src_machine
+ host_user = src_user
+
+ command = self.RemoteAccessInitCommand(chromeos_root, cros_machine,
+ cros_port)
+ ssh_command = ('ssh -o StrictHostKeyChecking=no' +
+ ' -o UserKnownHostsFile=$(mktemp)' +
+ ' -i $TMP_PRIVATE_KEY')
+ if cros_port:
+ ssh_command += ' -p %s' % cros_port
rsync_prefix = '\nrsync -r -e "%s" ' % ssh_command
if dest_cros:
- command += rsync_prefix + '%s root@%s:%s' % (src, dest_machine, dest)
- return self.RunCommand(
- command,
- machine=src_machine,
- username=src_user,
- command_terminator=command_terminator,
- print_to_console=print_to_console)
+ command += rsync_prefix + '%s root@%s:%s' % (src, cros_machine, dest)
else:
- command += rsync_prefix + 'root@%s:%s %s' % (src_machine, src, dest)
- return self.RunCommand(
- command,
- machine=dest_machine,
- username=dest_user,
- command_terminator=command_terminator,
- print_to_console=print_to_console)
+ command += rsync_prefix + 'root@%s:%s %s' % (cros_machine, src, dest)
+
+ return self.RunCommand(
+ command,
+ machine=host_machine,
+ username=host_user,
+ command_terminator=command_terminator,
+ print_to_console=print_to_console)
if dest_machine == src_machine:
command = 'rsync -a %s %s' % (src, dest)
diff --git a/cros_utils/device_setup_utils_unittest.py b/cros_utils/device_setup_utils_unittest.py
index 63f9bf66..12a70811 100755
--- a/cros_utils/device_setup_utils_unittest.py
+++ b/cros_utils/device_setup_utils_unittest.py
@@ -14,10 +14,9 @@ import time
import unittest
from unittest import mock
-from device_setup_utils import DutWrapper
-
from cros_utils import command_executer
from cros_utils import logger
+from cros_utils.device_setup_utils import DutWrapper
BIG_LITTLE_CPUINFO = """processor : 0
model name : ARMv8 Processor rev 4 (v8l)
diff --git a/cros_utils/logger.py b/cros_utils/logger.py
index 4cc4618e..e304fe12 100644
--- a/cros_utils/logger.py
+++ b/cros_utils/logger.py
@@ -207,7 +207,6 @@ class MockLogger(object):
def __init__(self, *_args, **_kwargs):
self.stdout = sys.stdout
self.stderr = sys.stderr
- return None
def _AddSuffix(self, basename, suffix):
return '%s%s' % (basename, suffix)
diff --git a/cros_utils/manifest_versions.py b/cros_utils/manifest_versions.py
index 115c6046..4838de3c 100644
--- a/cros_utils/manifest_versions.py
+++ b/cros_utils/manifest_versions.py
@@ -19,6 +19,8 @@ import time
from cros_utils import command_executer
from cros_utils import logger
+MANIFEST_VERSION_MAIN_BRANCH = 'master'
+
def IsCrosVersion(version):
match = re.search(r'(\d+\.\d+\.\d+\.\d+)', version)
@@ -72,7 +74,8 @@ class ManifestVersions(object):
commands = [
'cd {0}'.format(self.clone_location), 'cd manifest-versions',
'git checkout -f $(git rev-list' +
- ' --max-count=1 --before={0} origin/master)'.format(my_time)
+ ' --max-count=1 --before={0} origin/{1})'.format(
+ my_time, MANIFEST_VERSION_MAIN_BRANCH)
]
ret = self.ce.RunCommands(commands)
if ret:
@@ -106,7 +109,7 @@ class ManifestVersions(object):
version = pp[-2] + '.' + small
commands = [
'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout master'
+ 'git checkout {0}'.format(MANIFEST_VERSION_MAIN_BRANCH)
]
self.ce.RunCommands(commands)
return version
@@ -120,7 +123,8 @@ class ManifestVersions(object):
commands = [
'cd {0}'.format(self.clone_location), 'cd manifest-versions',
'git checkout -f $(git rev-list' +
- ' --max-count=1 --before={0} origin/master)'.format(my_time)
+ ' --max-count=1 --before={0} origin/{1})'.format(
+ my_time, MANIFEST_VERSION_MAIN_BRANCH)
]
ret = self.ce.RunCommands(commands)
if ret:
@@ -133,7 +137,7 @@ class ManifestVersions(object):
version = pp[-2] + '.' + small
commands = [
'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout master'
+ 'git checkout {0}'.format(MANIFEST_VERSION_MAIN_BRANCH)
]
self.ce.RunCommands(commands)
return version
diff --git a/cros_utils/misc.py b/cros_utils/misc.py
index 246767f0..93d1b3b6 100644
--- a/cros_utils/misc.py
+++ b/cros_utils/misc.py
@@ -24,12 +24,15 @@ CHROMEOS_SCRIPTS_DIR = '/mnt/host/source/src/scripts'
TOOLCHAIN_UTILS_PATH = ('/mnt/host/source/src/third_party/toolchain-utils/'
'cros_utils/toolchain_utils.sh')
+CROS_MAIN_BRANCH = 'cros/master'
+
def GetChromeOSVersionFromLSBVersion(lsb_version):
"""Get Chromeos version from Lsb version."""
ce = command_executer.GetCommandExecuter()
command = ('git ls-remote '
- 'https://chromium.googlesource.com/chromiumos/manifest.git')
+ 'https://chromium.googlesource.com/chromiumos/manifest.git '
+ 'refs/heads/release-R*')
ret, out, _ = ce.RunCommandWOutput(command, print_to_console=False)
assert ret == 0, 'Command %s failed' % command
lower = []
@@ -444,8 +447,9 @@ def DeleteChromeOsTree(chromeos_root, dry_run=False):
cmd1, print_to_console=True) == 0
-def ApplyGerritPatches(chromeos_root, gerrit_patch_string,
- branch='cros/master'):
+def ApplyGerritPatches(chromeos_root,
+ gerrit_patch_string,
+ branch=CROS_MAIN_BRANCH):
"""Apply gerrit patches on a chromeos tree.
Args:
@@ -462,7 +466,7 @@ def ApplyGerritPatches(chromeos_root, gerrit_patch_string,
sys.path.append(os.path.join(chromeos_root, 'chromite'))
# Imports below are ok after modifying path to add chromite.
# Pylint cannot detect that and complains.
- # pylint: disable=import-error
+ # pylint: disable=import-error, import-outside-toplevel
from lib import git
from lib import gerrit
manifest = git.ManifestCheckout(chromeos_root)
diff --git a/cros_utils/tabulator.py b/cros_utils/tabulator.py
index 300c2d79..1a3fd4a7 100644
--- a/cros_utils/tabulator.py
+++ b/cros_utils/tabulator.py
@@ -64,12 +64,13 @@ table:
from __future__ import division
from __future__ import print_function
+import collections
import getpass
import math
+import statistics
import sys
-# TODO(zhizhouy): Drop numpy in the future
+# TODO(crbug.com/980719): Drop scipy in the future.
# pylint: disable=import-error
-import numpy
import scipy
from cros_utils.email_sender import EmailSender
@@ -112,21 +113,22 @@ class TableGenerator(object):
SORT_BY_KEYS_DESC = 1
SORT_BY_VALUES = 2
SORT_BY_VALUES_DESC = 3
+ NO_SORT = 4
MISSING_VALUE = 'x'
- def __init__(self, d, l, sort=SORT_BY_KEYS, key_name='keys'):
+ def __init__(self, d, l, sort=NO_SORT, key_name='keys'):
self._runs = d
self._labels = l
self._sort = sort
self._key_name = key_name
def _AggregateKeys(self):
- keys = set([])
+ keys = collections.OrderedDict()
for run_list in self._runs:
for run in run_list:
- keys = keys.union(run.keys())
- return keys
+ keys.update(dict.fromkeys(run.keys()))
+ return list(keys.keys())
def _GetHighestValue(self, key):
values = []
@@ -159,6 +161,8 @@ class TableGenerator(object):
elif self._sort == self.SORT_BY_VALUES_DESC:
# pylint: disable=unnecessary-lambda
return sorted(keys, key=lambda x: self._GetHighestValue(x), reverse=True)
+ elif self._sort == self.NO_SORT:
+ return keys
else:
assert 0, 'Unimplemented sort %s' % self._sort
@@ -295,8 +299,8 @@ class SamplesTableGenerator(TableGenerator):
all_runs_empty = all(not dict for label in bench_runs for dict in label)
if all_runs_empty:
cell = Cell()
- cell.string_value = 'Benchmark %s contains no result.' + \
- ' Is the benchmark name valid?' % k
+ cell.string_value = ('Benchmark %s contains no result.'
+ ' Is the benchmark name valid?' % k)
table.append([cell])
else:
row = [k]
@@ -318,7 +322,7 @@ class SamplesTableGenerator(TableGenerator):
v.append(None)
run_fail += 1
one_tuple = ((run_pass, run_fail), v)
- if iterations != 0 and iterations != run_pass + run_fail:
+ if iterations not in (0, run_pass + run_fail):
raise ValueError('Iterations of each benchmark run ' \
'are not the same')
iterations = run_pass + run_fail
@@ -358,7 +362,7 @@ class SamplesTableGenerator(TableGenerator):
# Accumulate each run result to composite benchmark run
# If any run fails, then we set this run for composite benchmark
# to None so that we know it fails.
- if bench_runs[index] and row[label_index][index] != None:
+ if bench_runs[index] and row[label_index][index] is not None:
row[label_index][index] += bench_runs[index]
else:
row[label_index][index] = None
@@ -552,17 +556,15 @@ class AmeanResult(StringMeanResult):
def _ComputeFloat(self, cell, values, baseline_values):
if self.ignore_min_max:
values = _RemoveMinMax(cell, values)
- cell.value = numpy.mean(values)
+ cell.value = statistics.mean(values)
class RawResult(Result):
"""Raw result."""
- pass
class IterationResult(Result):
"""Iteration result."""
- pass
class MinResult(Result):
@@ -608,7 +610,7 @@ class StdResult(NumericalResult):
def _ComputeFloat(self, cell, values, baseline_values):
if self.ignore_min_max:
values = _RemoveMinMax(cell, values)
- cell.value = numpy.std(values)
+ cell.value = statistics.pstdev(values)
class CoeffVarResult(NumericalResult):
@@ -621,8 +623,8 @@ class CoeffVarResult(NumericalResult):
def _ComputeFloat(self, cell, values, baseline_values):
if self.ignore_min_max:
values = _RemoveMinMax(cell, values)
- if numpy.mean(values) != 0.0:
- noise = numpy.abs(numpy.std(values) / numpy.mean(values))
+ if statistics.mean(values) != 0.0:
+ noise = abs(statistics.pstdev(values) / statistics.mean(values))
else:
noise = 0.0
cell.value = noise
@@ -707,7 +709,7 @@ class KeyAwareComparisonResult(ComparisonResult):
'dropped_percent', '(ms)', '(seconds)', '--ms',
'--average_num_missing_tiles', '--experimental_jank',
'--experimental_mean_frame', '--experimental_median_frame_time',
- '--total_deferred_image_decode_count', '--seconds', 'samples'
+ '--total_deferred_image_decode_count', '--seconds', 'samples', 'bytes'
]
return any([l in key for l in lower_is_better_keys])
@@ -729,9 +731,12 @@ class AmeanRatioResult(KeyAwareComparisonResult):
if self.ignore_min_max:
values = _RemoveMinMax(cell, values)
baseline_values = _RemoveMinMax(cell, baseline_values)
- if numpy.mean(baseline_values) != 0:
- cell.value = numpy.mean(values) / numpy.mean(baseline_values)
- elif numpy.mean(values) != 0:
+
+ baseline_mean = statistics.mean(baseline_values)
+ values_mean = statistics.mean(values)
+ if baseline_mean != 0:
+ cell.value = values_mean / baseline_mean
+ elif values_mean != 0:
cell.value = 0.00
# cell.value = 0 means the values and baseline_values have big difference
else:
@@ -1148,8 +1153,10 @@ class TableFormatter(object):
result_name = column.result.__class__.__name__
format_name = column.fmt.__class__.__name__
- cell.string_value = '%s %s' % (result_name.replace('Result', ''),
- format_name.replace('Format', ''))
+ cell.string_value = '%s %s' % (
+ result_name.replace('Result', ''),
+ format_name.replace('Format', ''),
+ )
header.append(cell)
@@ -1491,7 +1498,7 @@ def GetComplexTable(runs, labels, out_to=TablePrinter.CONSOLE):
if __name__ == '__main__':
# Run a few small tests here.
- runs = [[{
+ run1 = {
'k1': '10',
'k2': '12',
'k5': '40',
@@ -1501,29 +1508,30 @@ if __name__ == '__main__':
'k8': 'PASS',
'k9': 'PASS',
'k10': '0'
- },
- {
- 'k1': '13',
- 'k2': '14',
- 'k3': '15',
- 'ms_1': '10',
- 'k8': 'PASS',
- 'k9': 'FAIL',
- 'k10': '0'
- }],
- [{
- 'k1': '50',
- 'k2': '51',
- 'k3': '52',
- 'k4': '53',
- 'k5': '35',
- 'k6': '45',
- 'ms_1': '200',
- 'ms_2': '20',
- 'k7': 'FAIL',
- 'k8': 'PASS',
- 'k9': 'PASS'
- }]]
+ }
+ run2 = {
+ 'k1': '13',
+ 'k2': '14',
+ 'k3': '15',
+ 'ms_1': '10',
+ 'k8': 'PASS',
+ 'k9': 'FAIL',
+ 'k10': '0'
+ }
+ run3 = {
+ 'k1': '50',
+ 'k2': '51',
+ 'k3': '52',
+ 'k4': '53',
+ 'k5': '35',
+ 'k6': '45',
+ 'ms_1': '200',
+ 'ms_2': '20',
+ 'k7': 'FAIL',
+ 'k8': 'PASS',
+ 'k9': 'PASS'
+ }
+ runs = [[run1, run2], [run3]]
labels = ['vanilla', 'modified']
t = GetComplexTable(runs, labels, TablePrinter.CONSOLE)
print(t)
diff --git a/cros_utils/tabulator_test.py b/cros_utils/tabulator_test.py
index 227e2d70..9dd4828e 100755
--- a/cros_utils/tabulator_test.py
+++ b/cros_utils/tabulator_test.py
@@ -33,6 +33,20 @@ class TabulatorTest(unittest.TestCase):
result.Compute(cell, table[2], table[1])
self.assertTrue(cell.value == float(table[2][0]))
+ def testStdResult(self):
+ table = ['k1', [], ['1', '2']]
+ result = tabulator.StdResult()
+ cell = tabulator.Cell()
+ result.Compute(cell, table[2], table[1])
+ self.assertTrue(cell.value == 0.5)
+
+ def testStdResultOfSampleSize1(self):
+ table = ['k1', [], ['1']]
+ result = tabulator.StdResult()
+ cell = tabulator.Cell()
+ result.Compute(cell, table[2], table[1])
+ self.assertTrue(cell.value == 0.0)
+
def testStringMean(self):
smr = tabulator.StringMeanResult()
cell = tabulator.Cell()
diff --git a/cros_utils/tiny_render.py b/cros_utils/tiny_render.py
new file mode 100644
index 00000000..629e7719
--- /dev/null
+++ b/cros_utils/tiny_render.py
@@ -0,0 +1,181 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A super minimal module that allows rendering of readable text/html.
+
+Usage should be relatively straightforward. You wrap things you want to write
+out in some of the nice types defined here, and then pass the result to one of
+render_text_pieces/render_html_pieces.
+
+In HTML, the types should all nest nicely. In text, eh (nesting anything in
+Bold is going to be pretty ugly, probably).
+
+Lists and tuples may be used to group different renderable elements.
+
+Example:
+
+render_text_pieces([
+ Bold("Daily to-do list:"),
+ UnorderedList([
+ "Write code",
+ "Go get lunch",
+ ["Fix ", Bold("some"), " of the bugs in the aforementioned code"],
+ [
+ "Do one of the following:",
+ UnorderedList([
+ "Nap",
+ "Round 2 of lunch",
+ ["Look at ", Link("https://google.com/?q=memes", "memes")],
+ ]),
+ ],
+ "What a rough day; time to go home",
+ ]),
+])
+
+Turns into
+
+**Daily to-do list:**
+ - Write code
+ - Go get lunch
+ - Fix **some** of the bugs in said code
+ - Do one of the following:
+ - Nap
+ - Round 2 of lunch
+ - Look at memes
+ - What a rough day; time to go home
+
+...And similarly in HTML, though with an actual link.
+
+The rendering functions should never mutate your input.
+"""
+
+from __future__ import print_function
+
+import collections
+import html
+import typing as t
+
+Bold = collections.namedtuple('Bold', ['inner'])
+LineBreak = collections.namedtuple('LineBreak', [])
+Link = collections.namedtuple('Link', ['href', 'inner'])
+UnorderedList = collections.namedtuple('UnorderedList', ['items'])
+# Outputs different data depending on whether we're emitting text or HTML.
+Switch = collections.namedtuple('Switch', ['text', 'html'])
+
+line_break = LineBreak()
+
+# Note that these build up their values in a funky way: they append to a list
+# that ends up being fed to `''.join(into)`. This avoids quadratic string
+# concatenation behavior. Probably doesn't matter, but I care.
+
+# Pieces are really a recursive type:
+# Union[
+# Bold,
+# LineBreak,
+# Link,
+# List[Piece],
+# Tuple[...Piece],
+# UnorderedList,
+# str,
+# ]
+#
+# It doesn't seem possible to have recursive types, so just go with Any.
+Piece = t.Any # pylint: disable=invalid-name
+
+
+def _render_text_pieces(piece: Piece, indent_level: int,
+ into: t.List[str]) -> None:
+ """Helper for |render_text_pieces|. Accumulates strs into |into|."""
+ if isinstance(piece, LineBreak):
+ into.append('\n' + indent_level * ' ')
+ return
+
+ if isinstance(piece, str):
+ into.append(piece)
+ return
+
+ if isinstance(piece, Bold):
+ into.append('**')
+ _render_text_pieces(piece.inner, indent_level, into)
+ into.append('**')
+ return
+
+ if isinstance(piece, Link):
+ # Don't even try; it's ugly more often than not.
+ _render_text_pieces(piece.inner, indent_level, into)
+ return
+
+ if isinstance(piece, UnorderedList):
+ for p in piece.items:
+ _render_text_pieces([line_break, '- ', p], indent_level + 2, into)
+ return
+
+ if isinstance(piece, Switch):
+ _render_text_pieces(piece.text, indent_level, into)
+ return
+
+ if isinstance(piece, (list, tuple)):
+ for p in piece:
+ _render_text_pieces(p, indent_level, into)
+ return
+
+ raise ValueError('Unknown piece type: %s' % type(piece))
+
+
+def render_text_pieces(piece: Piece) -> str:
+ """Renders the given Pieces into text."""
+ into = []
+ _render_text_pieces(piece, 0, into)
+ return ''.join(into)
+
+
+def _render_html_pieces(piece: Piece, into: t.List[str]) -> None:
+ """Helper for |render_html_pieces|. Accumulates strs into |into|."""
+ if piece is line_break:
+ into.append('<br />\n')
+ return
+
+ if isinstance(piece, str):
+ into.append(html.escape(piece))
+ return
+
+ if isinstance(piece, Bold):
+ into.append('<b>')
+ _render_html_pieces(piece.inner, into)
+ into.append('</b>')
+ return
+
+ if isinstance(piece, Link):
+ into.append('<a href="' + piece.href + '">')
+ _render_html_pieces(piece.inner, into)
+ into.append('</a>')
+ return
+
+ if isinstance(piece, UnorderedList):
+ into.append('<ul>\n')
+ for p in piece.items:
+ into.append('<li>')
+ _render_html_pieces(p, into)
+ into.append('</li>\n')
+ into.append('</ul>\n')
+ return
+
+ if isinstance(piece, Switch):
+ _render_html_pieces(piece.html, into)
+ return
+
+ if isinstance(piece, (list, tuple)):
+ for p in piece:
+ _render_html_pieces(p, into)
+ return
+
+ raise ValueError('Unknown piece type: %s' % type(piece))
+
+
+def render_html_pieces(piece: Piece) -> str:
+ """Renders the given Pieces into HTML."""
+ into = []
+ _render_html_pieces(piece, into)
+ return ''.join(into)
diff --git a/cros_utils/tiny_render_test.py b/cros_utils/tiny_render_test.py
new file mode 100755
index 00000000..114a1796
--- /dev/null
+++ b/cros_utils/tiny_render_test.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for tiny_render."""
+
+from __future__ import print_function
+
+import unittest
+
+import tiny_render
+
+
+# Admittedly, the HTML generated by this isn't always _beautiful_ to read
+# (especially with e.g., ordered lists). Since the intent is for the HTML to be
+# shipped alongside the plain-text, the hope is that people won't have to
+# subject themselves to reading the HTML often. :)
+class Test(unittest.TestCase):
+ """Tests for tiny_render."""
+
+ def test_bold(self):
+ pieces = [
+ tiny_render.Bold('hello'),
+ ', ',
+ tiny_render.Bold(['world', '!']),
+ ]
+
+ self.assertEqual(
+ tiny_render.render_text_pieces(pieces),
+ '**hello**, **world!**',
+ )
+
+ self.assertEqual(
+ tiny_render.render_html_pieces(pieces),
+ '<b>hello</b>, <b>world!</b>',
+ )
+
+ def test_line_break(self):
+ pieces = [
+ 'hello',
+ tiny_render.line_break,
+ ['world', '!'],
+ ]
+
+ self.assertEqual(
+ tiny_render.render_text_pieces(pieces),
+ 'hello\nworld!',
+ )
+
+ self.assertEqual(
+ tiny_render.render_html_pieces(pieces),
+ 'hello<br />\nworld!',
+ )
+
+ def test_linkification(self):
+ pieces = [
+ 'hello ',
+ tiny_render.Link(href='https://google.com', inner='world!'),
+ ]
+
+ self.assertEqual(
+ tiny_render.render_text_pieces(pieces),
+ 'hello world!',
+ )
+
+ self.assertEqual(
+ tiny_render.render_html_pieces(pieces),
+ 'hello <a href="https://google.com">world!</a>',
+ )
+
+ def test_unordered_list(self):
+ pieces = [
+ 'hello:',
+ tiny_render.UnorderedList([
+ 'world',
+ 'w o r l d',
+ ]),
+ ]
+
+ self.assertEqual(
+ tiny_render.render_text_pieces(pieces),
+ '\n'.join((
+ 'hello:',
+ ' - world',
+ ' - w o r l d',
+ )),
+ )
+
+ self.assertEqual(
+ tiny_render.render_html_pieces(pieces),
+ '\n'.join((
+ 'hello:<ul>',
+ '<li>world</li>',
+ '<li>w o r l d</li>',
+ '</ul>',
+ '',
+ )),
+ )
+
+ def test_nested_unordered_list(self):
+ pieces = [
+ 'hello:',
+ tiny_render.UnorderedList([
+ 'world',
+ ['and more:', tiny_render.UnorderedList(['w o r l d'])],
+ 'world2',
+ ])
+ ]
+
+ self.assertEqual(
+ tiny_render.render_text_pieces(pieces),
+ '\n'.join((
+ 'hello:',
+ ' - world',
+ ' - and more:',
+ ' - w o r l d',
+ ' - world2',
+ )),
+ )
+
+ self.assertEqual(
+ tiny_render.render_html_pieces(pieces),
+ '\n'.join((
+ 'hello:<ul>',
+ '<li>world</li>',
+ '<li>and more:<ul>',
+ '<li>w o r l d</li>',
+ '</ul>',
+ '</li>',
+ '<li>world2</li>',
+ '</ul>',
+ '',
+ )),
+ )
+
+ def test_switch(self):
+ pieces = ['hello ', tiny_render.Switch(text='text', html='html')]
+ self.assertEqual(tiny_render.render_text_pieces(pieces), 'hello text')
+ self.assertEqual(tiny_render.render_html_pieces(pieces), 'hello html')
+
+ def test_golden(self):
+ pieces = [
+ 'hello',
+ tiny_render.UnorderedList([
+ tiny_render.Switch(text='text', html=tiny_render.Bold('html')),
+ 'the',
+ tiny_render.Bold('sun'),
+ ]),
+ tiny_render.line_break,
+ ['is', ' out!'],
+ ]
+
+ self.assertEqual(
+ tiny_render.render_text_pieces(pieces), '\n'.join((
+ 'hello',
+ ' - text',
+ ' - the',
+ ' - **sun**',
+ 'is out!',
+ )))
+
+ self.assertEqual(
+ tiny_render.render_html_pieces(pieces), '\n'.join((
+ 'hello<ul>',
+ '<li><b>html</b></li>',
+ '<li>the</li>',
+ '<li><b>sun</b></li>',
+ '</ul>',
+ '<br />',
+ 'is out!',
+ )))
+
+
+if __name__ == '__main__':
+ unittest.main()