aboutsummaryrefslogtreecommitdiff
path: root/crosperf
diff options
context:
space:
mode:
authorcmtice <cmtice@google.com>2014-06-13 15:38:45 -0700
committerchrome-internal-fetch <chrome-internal-fetch@google.com>2014-07-09 22:28:29 +0000
commitc490e073c811f60d6fdfec5c193e7f042302b5e8 (patch)
tree1150998b5e56c1e8ed92f8999c3e8892163a24e1 /crosperf
parent4536ef6c9da0f8c0c544552798c0bcf516519f73 (diff)
downloadtoolchain-utils-c490e073c811f60d6fdfec5c193e7f042302b5e8.tar.gz
Update and expand regression tests for results_cache.py
BUG=None TEST=Ran regression tests successfully. Ran 'normal' crosperf tests successfully as well. Change-Id: Ib3387cb63f1050cc0d561cc676d91be2334e0c79 Reviewed-on: https://chrome-internal-review.googlesource.com/166133 Reviewed-by: Caroline Tice <cmtice@google.com> Commit-Queue: Caroline Tice <cmtice@google.com> Tested-by: Caroline Tice <cmtice@google.com>
Diffstat (limited to 'crosperf')
-rw-r--r--crosperf/results_cache.py17
-rwxr-xr-xcrosperf/results_cache_unittest.py883
-rw-r--r--crosperf/test_cache/compare_output/autotest.tbz2bin0 -> 847904 bytes
-rw-r--r--crosperf/test_cache/compare_output/machine.txt1
-rw-r--r--crosperf/test_cache/compare_output/results.txt6
-rw-r--r--crosperf/test_cache/test_input/autotest.tbz2bin0 -> 110940 bytes
-rw-r--r--crosperf/test_cache/test_input/machine.txt1
-rw-r--r--crosperf/test_cache/test_input/results.txt6
-rw-r--r--crosperf/test_cache/test_puretelemetry_input/machine.txt1
-rw-r--r--crosperf/test_cache/test_puretelemetry_input/results.txt6
-rw-r--r--crosperf/unittest_keyval_file.txt20
11 files changed, 921 insertions, 20 deletions
diff --git a/crosperf/results_cache.py b/crosperf/results_cache.py
index 35adf0d6..d584cbc4 100644
--- a/crosperf/results_cache.py
+++ b/crosperf/results_cache.py
@@ -34,10 +34,10 @@ class Result(object):
perf.report, etc. The key generation is handled by the ResultsCache class.
"""
- def __init__(self, logger, label, log_level):
+ def __init__(self, logger, label, log_level, cmd_exec=None):
self._chromeos_root = label.chromeos_root
self._logger = logger
- self._ce = command_executer.GetCommandExecuter(self._logger,
+ self._ce = cmd_exec or command_executer.GetCommandExecuter(self._logger,
log_level=log_level)
self._temp_dir = None
self.label = label
@@ -81,7 +81,7 @@ class Result(object):
# Otherwise get the base filename and create the correct
# path for it.
f_dir, f_base = misc.GetRoot(f)
- data_filename = os.path.join(self._chromeos_root, "/tmp",
+ data_filename = os.path.join(self._chromeos_root, "chroot/tmp",
self._temp_dir, f_base)
if os.path.exists(data_filename):
with open(data_filename, "r") as data_file:
@@ -384,8 +384,8 @@ class Result(object):
class TelemetryResult(Result):
- def __init__(self, logger, label, log_level):
- super(TelemetryResult, self).__init__(logger, label, log_level)
+ def __init__(self, logger, label, log_level, cmd_exec=None):
+ super(TelemetryResult, self).__init__(logger, label, log_level, cmd_exec)
def _PopulateFromRun(self, out, err, retval, show_all, test, suite):
self.out = out
@@ -512,9 +512,10 @@ class ResultsCache(object):
else:
cache_path = [os.path.join(SCRATCH_DIR, cache_dir)]
- for i in [x.strip() for x in self.share_users.split(",")]:
- path = SCRATCH_BASE % i
- cache_path.append(os.path.join(path, cache_dir))
+ if len(self.share_users):
+ for i in [x.strip() for x in self.share_users.split(",")]:
+ path = SCRATCH_BASE % i
+ cache_path.append(os.path.join(path, cache_dir))
return cache_path
diff --git a/crosperf/results_cache_unittest.py b/crosperf/results_cache_unittest.py
index 6713a3eb..dff51b3a 100755
--- a/crosperf/results_cache_unittest.py
+++ b/crosperf/results_cache_unittest.py
@@ -4,16 +4,26 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import hashlib
+import mock
+import mock_instance
+import os
+import tempfile
import unittest
-from utils import logger
+import image_checksummer
+import machine_manager
+from label import MockLabel
+from results_cache import CacheConditions
from results_cache import Result
from results_cache import ResultsCache
-from label import MockLabel
-import mock_instance
+from results_cache import TelemetryResult
+from utils import command_executer
+from utils import logger
+from utils import misc
-output = """CMD (True): ./run_remote_tests.sh --remote=172.17.128.241 --board=lumpy LibCBench
+OUTPUT = """CMD (True): ./run_remote_tests.sh --remote=172.17.128.241 --board=lumpy LibCBench
CMD (None): cd /usr/local/google/home/yunlian/gd/src/build/images/lumpy/latest/../../../../..; cros_sdk -- ./in_chroot_cmd6X7Cxu.sh
Identity added: /tmp/run_remote_tests.PO1234567/autotest_key (/tmp/run_remote_tests.PO1234567/autotest_key)
INFO : Using emerged autotests already installed at /build/lumpy/usr/local/autotest.
@@ -112,31 +122,880 @@ INFO : Elapsed time: 0m18s
keyvals = {'': 'PASS', 'b_stdio_putcgetc__0_': '0.100005711667', 'b_string_strstr___azbycxdwevfugthsirjqkplomn__': '0.0133123556667', 'b_malloc_thread_local__0_': '0.01138439', 'b_string_strlen__0_': '0.044893587', 'b_malloc_sparse__0_': '0.015053784', 'b_string_memset__0_': '0.00275405066667', 'platform_LibCBench': 'PASS', 'b_pthread_uselesslock__0_': '0.0294113346667', 'b_string_strchr__0_': '0.00456903', 'b_pthread_create_serial1__0_': '0.0291785246667', 'b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac__': '0.118360778', 'b_string_strstr___aaaaaaaaaaaaaacccccccccccc__': '0.0135694476667', 'b_pthread_createjoin_serial1__0_': '0.031907936', 'b_malloc_thread_stress__0_': '0.0367894733333', 'b_regex_search____a_b_c__d_b__': '0.00165455066667', 'b_malloc_bubble__0_': '0.015066374', 'b_malloc_big2__0_': '0.002951359', 'b_stdio_putcgetc_unlocked__0_': '0.0371443833333', 'b_pthread_createjoin_serial2__0_': '0.043485347', 'b_regex_search___a_25_b__': '0.0496191923333', 'b_utf8_bigbuf__0_': '0.0473772253333', 'b_malloc_big1__0_': '0.00375231466667', 'b_regex_compile____a_b_c__d_b__': '0.00529833933333', 'b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaac__': '0.068957325', 'b_malloc_tiny2__0_': '0.000581407333333', 'b_utf8_onebyone__0_': '0.130938538333', 'b_malloc_tiny1__0_': '0.000768474333333', 'b_string_strstr___abcdefghijklmnopqrstuvwxyz__': '0.0134553343333'}
+
+TMP_DIR1 = '/tmp/tmpAbcXyz'
+
class MockResult(Result):
def __init__(self, logger, label, logging_level):
super(MockResult, self).__init__(logger, label, logging_level)
def _FindFilesInResultsDir(self, find_args):
- return ""
+ return ''
def _GetKeyvals(self, show_all_results):
return keyvals
class ResultTest(unittest.TestCase):
- mock_label = MockLabel("mock_label", "chromeos_image", "/tmp", "lumpy",
- "remote", "image_args", "cache_dir", False)
+
+ mock_label = MockLabel('mock_label', 'chromeos_image', '/tmp', 'lumpy',
+ 'remote', 'image_args', 'cache_dir', False)
+ mock_logger = mock.Mock(spec=logger.Logger)
+ mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter)
+
def testCreateFromRun(self):
- result = MockResult.CreateFromRun(logger.GetLogger(), "average",
+ result = MockResult.CreateFromRun(logger.GetLogger(), 'average',
self.mock_label,
- output, error, 0, True, 0)
+ OUTPUT, error, 0, True, 0)
self.assertEqual(result.keyvals, keyvals)
self.assertEqual(result.chroot_results_dir,
- "/tmp/run_remote_tests.PO1234567/platform_LibCBench")
+ '/tmp/run_remote_tests.PO1234567/platform_LibCBench')
self.assertEqual(result.results_dir,
- "/tmp/chroot/tmp/run_remote_tests.PO1234567/platform_LibCBench")
+ '/tmp/chroot/tmp/run_remote_tests.PO1234567/platform_LibCBench')
self.assertEqual(result.retval, 0)
-if __name__ == "__main__":
+
+
+ def setUp(self):
+ self.result = Result(self.mock_logger, self.mock_label, 'average',
+ self.mock_cmd_exec)
+
+ @mock.patch.object(os.path, 'isdir')
+ @mock.patch.object(command_executer.CommandExecuter, 'RunCommand')
+ @mock.patch.object(command_executer.CommandExecuter, 'CopyFiles')
+ def test_copy_files_to(self, mock_copyfiles, mock_runcmd, mock_isdir):
+
+ files = ['src_file_1', 'src_file_2', 'src_file_3']
+ dest_dir = '/tmp/test'
+ self.mock_cmd_exec.RunCommand = mock_runcmd
+ self.mock_cmd_exec.CopyFiles = mock_copyfiles
+
+ mock_copyfiles.return_value = 0
+
+ #test 1. dest_dir exists; CopyFiles returns 0.
+ mock_isdir.return_value = True
+ self.result._CopyFilesTo(dest_dir, files)
+ self.assertEqual(mock_runcmd.call_count, 0)
+ self.assertEqual(mock_copyfiles.call_count, 3)
+ first_args = mock_copyfiles.call_args_list[0][0]
+ second_args = mock_copyfiles.call_args_list[1][0]
+ third_args = mock_copyfiles.call_args_list[2][0]
+ self.assertEqual(first_args, ('src_file_1', '/tmp/test/src_file_1.0'))
+ self.assertEqual(second_args, ('src_file_2', '/tmp/test/src_file_2.0'))
+ self.assertEqual(third_args, ('src_file_3', '/tmp/test/src_file_3.0'))
+
+ mock_runcmd.reset_mock()
+ mock_copyfiles.reset_mock()
+ #test 2. dest_dir does not exist; CopyFiles returns 0.
+ mock_isdir.return_value = False
+ self.result._CopyFilesTo(dest_dir, files)
+ self.assertEqual(mock_runcmd.call_count, 3)
+ self.assertEqual(mock_copyfiles.call_count, 3)
+ self.assertEqual(mock_runcmd.call_args_list[0],
+ mock_runcmd.call_args_list[1])
+ self.assertEqual(mock_runcmd.call_args_list[0],
+ mock_runcmd.call_args_list[2])
+ self.assertEqual(mock_runcmd.call_args_list[0][0], ('mkdir -p /tmp/test',))
+
+ #test 3. CopyFiles returns 1 (fails).
+ mock_copyfiles.return_value = 1
+ self.assertRaises(Exception, self.result._CopyFilesTo, dest_dir, files)
+
+
+ @mock.patch.object (Result, '_CopyFilesTo')
+ def test_copy_results_to(self, mock_CopyFilesTo):
+ perf_data_files = ['/tmp/perf.data.0', '/tmp/perf.data.1',
+ '/tmp/perf.data.2']
+ perf_report_files = ['/tmp/perf.report.0', '/tmp/perf.report.1',
+ '/tmp/perf.report.2']
+
+ self.result.perf_data_files = perf_data_files
+ self.result.perf_report_files = perf_report_files
+
+ self.result._CopyFilesTo = mock_CopyFilesTo
+ self.result.CopyResultsTo('/tmp/results/')
+ self.assertEqual(mock_CopyFilesTo.call_count, 2)
+ self.assertEqual(len(mock_CopyFilesTo.call_args_list), 2)
+ self.assertEqual(mock_CopyFilesTo.call_args_list[0][0],
+ ('/tmp/results/', perf_data_files))
+ self.assertEqual(mock_CopyFilesTo.call_args_list[1][0],
+ ('/tmp/results/', perf_report_files))
+
+
+ def test_get_new_keyvals(self):
+ kv_dict = {}
+
+ def FakeGetDataMeasurementsFiles():
+ filename = os.path.join(os.getcwd(), 'unittest_keyval_file.txt')
+ return [filename]
+
+ self.result._GetDataMeasurementsFiles = FakeGetDataMeasurementsFiles
+ kv_dict2, udict = self.result._GetNewKeyvals(kv_dict)
+ self.assertEqual(kv_dict2,
+ {u'Box2D__Box2D': 4775, u'Mandreel__Mandreel': 6620,
+ u'Gameboy__Gameboy': 9901, u'Crypto__Crypto': 8737,
+ u'telemetry_page_measurement_results__num_errored': 0,
+ u'telemetry_page_measurement_results__num_failed': 0,
+ u'PdfJS__PdfJS': 6455, u'Total__Score': 7918,
+ u'EarleyBoyer__EarleyBoyer': 14340,
+ u'MandreelLatency__MandreelLatency': 5188,
+ u'CodeLoad__CodeLoad': 6271, u'DeltaBlue__DeltaBlue': 14401,
+ u'Typescript__Typescript': 9815,
+ u'SplayLatency__SplayLatency': 7653, u'zlib__zlib': 16094,
+ u'Richards__Richards': 10358, u'RegExp__RegExp': 1765,
+ u'NavierStokes__NavierStokes': 9815, u'Splay__Splay': 4425,
+ u'RayTrace__RayTrace': 16600})
+ self.assertEqual(udict,
+ {u'Box2D__Box2D': u'score', u'Mandreel__Mandreel': u'score',
+ u'Gameboy__Gameboy': u'score', u'Crypto__Crypto': u'score',
+ u'telemetry_page_measurement_results__num_errored': u'count',
+ u'telemetry_page_measurement_results__num_failed': u'count',
+ u'PdfJS__PdfJS': u'score', u'Total__Score': u'score',
+ u'EarleyBoyer__EarleyBoyer': u'score',
+ u'MandreelLatency__MandreelLatency': u'score',
+ u'CodeLoad__CodeLoad': u'score',
+ u'DeltaBlue__DeltaBlue': u'score',
+ u'Typescript__Typescript': u'score',
+ u'SplayLatency__SplayLatency': u'score', u'zlib__zlib': u'score',
+ u'Richards__Richards': u'score', u'RegExp__RegExp': u'score',
+ u'NavierStokes__NavierStokes': u'score',
+ u'Splay__Splay': u'score', u'RayTrace__RayTrace': u'score'})
+
+
+ def test_append_telemetry_units(self):
+ kv_dict = {u'Box2D__Box2D': 4775, u'Mandreel__Mandreel': 6620,
+ u'Gameboy__Gameboy': 9901, u'Crypto__Crypto': 8737,
+ u'PdfJS__PdfJS': 6455, u'Total__Score': 7918,
+ u'EarleyBoyer__EarleyBoyer': 14340,
+ u'MandreelLatency__MandreelLatency': 5188,
+ u'CodeLoad__CodeLoad': 6271, u'DeltaBlue__DeltaBlue': 14401,
+ u'Typescript__Typescript': 9815,
+ u'SplayLatency__SplayLatency': 7653, u'zlib__zlib': 16094,
+ u'Richards__Richards': 10358, u'RegExp__RegExp': 1765,
+ u'NavierStokes__NavierStokes': 9815, u'Splay__Splay': 4425,
+ u'RayTrace__RayTrace': 16600}
+ units_dict = {u'Box2D__Box2D': u'score', u'Mandreel__Mandreel': u'score',
+ u'Gameboy__Gameboy': u'score', u'Crypto__Crypto': u'score',
+ u'PdfJS__PdfJS': u'score', u'Total__Score': u'score',
+ u'EarleyBoyer__EarleyBoyer': u'score',
+ u'MandreelLatency__MandreelLatency': u'score',
+ u'CodeLoad__CodeLoad': u'score',
+ u'DeltaBlue__DeltaBlue': u'score',
+ u'Typescript__Typescript': u'score',
+ u'SplayLatency__SplayLatency': u'score',
+ u'zlib__zlib': u'score',
+ u'Richards__Richards': u'score', u'RegExp__RegExp': u'score',
+ u'NavierStokes__NavierStokes': u'score',
+ u'Splay__Splay': u'score', u'RayTrace__RayTrace': u'score'}
+
+ results_dict = self.result._AppendTelemetryUnits(kv_dict, units_dict)
+ self.assertEqual(results_dict,
+ {u'Box2D__Box2D': [4775, u'score'],
+ u'Splay__Splay': [4425, u'score'],
+ u'Gameboy__Gameboy': [9901, u'score'],
+ u'Crypto__Crypto': [8737, u'score'],
+ u'PdfJS__PdfJS': [6455, u'score'],
+ u'Total__Score': [7918, u'score'],
+ u'EarleyBoyer__EarleyBoyer': [14340, u'score'],
+ u'MandreelLatency__MandreelLatency': [5188, u'score'],
+ u'DeltaBlue__DeltaBlue': [14401, u'score'],
+ u'SplayLatency__SplayLatency': [7653, u'score'],
+ u'Mandreel__Mandreel': [6620, u'score'],
+ u'Richards__Richards': [10358, u'score'],
+ u'zlib__zlib': [16094, u'score'],
+ u'CodeLoad__CodeLoad': [6271, u'score'],
+ u'Typescript__Typescript': [9815, u'score'],
+ u'RegExp__RegExp': [1765, u'score'],
+ u'RayTrace__RayTrace': [16600, u'score'],
+ u'NavierStokes__NavierStokes': [9815, u'score']})
+
+
+ @mock.patch.object (misc, 'GetInsideChrootPath')
+ @mock.patch.object (tempfile, 'mkdtemp')
+ @mock.patch.object (command_executer.CommandExecuter, 'RunCommand')
+ @mock.patch.object (command_executer.CommandExecuter, 'ChrootRunCommand')
+ def test_get_keyvals(self, mock_chrootruncmd, mock_runcmd, mock_mkdtemp,
+ mock_getpath):
+
+ self.kv_dict = {}
+ self.call_GetNewKeyvals = False
+
+ def reset():
+ self.kv_dict = {}
+ self.call_GetNewKeyvals = False
+ mock_chrootruncmd.reset_mock()
+ mock_runcmd.reset_mock()
+ mock_mkdtemp.reset_mock()
+ mock_getpath.reset_mock()
+
+ def FakeGetNewKeyvals(kv_dict):
+ self.kv_dict = kv_dict
+ self.call_GetNewKeyvals = True
+ return_kvdict = { 'first_time' : 680, 'Total' : 10}
+ return_udict = { 'first_time' : 'ms', 'Total' : 'score'}
+ return return_kvdict, return_udict
+
+
+ mock_mkdtemp.return_value = TMP_DIR1
+ mock_chrootruncmd.return_value = ['',
+ ('%s,PASS\n%s/telemetry_Crosperf,PASS\n')
+ % (TMP_DIR1, TMP_DIR1),
+ '']
+ mock_getpath.return_value = TMP_DIR1
+ self.result._ce.ChrootRunCommand = mock_chrootruncmd
+ self.result._ce.RunCommand = mock_runcmd
+ self.result._GetNewKeyvals = FakeGetNewKeyvals
+ self.result.suite = 'telemetry_Crosperf'
+ self.result.results_dir = '/tmp/test_that_resultsNmq'
+
+ # Test 1. no self._temp_dir.
+ res = self.result._GetKeyvals(True)
+ self.assertTrue(self.call_GetNewKeyvals)
+ self.assertEqual(self.kv_dict, { '': 'PASS', 'telemetry_Crosperf': 'PASS' })
+ self.assertEqual(mock_runcmd.call_count, 1)
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('cp -r /tmp/test_that_resultsNmq/* %s' % TMP_DIR1,))
+ self.assertEqual(mock_chrootruncmd.call_count, 1)
+ self.assertEqual(mock_chrootruncmd.call_args_list[0][0],
+ ('/tmp',
+ ('python generate_test_report --no-color --csv %s') %
+ TMP_DIR1))
+ self.assertEqual(mock_getpath.call_count, 1)
+ self.assertEqual(mock_mkdtemp.call_count, 1)
+ self.assertEqual(res, {'Total': [10, 'score'], 'first_time': [680, 'ms']})
+
+
+ # Test 2. self._temp_dir
+ reset()
+ mock_chrootruncmd.return_value = ['',
+ ('/tmp/tmpJCajRG,PASS\n/tmp/tmpJCajRG/'
+ 'telemetry_Crosperf,PASS\n'),
+ '']
+ mock_getpath.return_value = '/tmp/tmpJCajRG'
+ self.result._temp_dir = '/tmp/tmpJCajRG'
+ res = self.result._GetKeyvals(True)
+ self.assertEqual(mock_runcmd.call_count, 0)
+ self.assertEqual(mock_mkdtemp.call_count, 0)
+ self.assertEqual(mock_chrootruncmd.call_count, 1)
+ self.assertTrue(self.call_GetNewKeyvals)
+ self.assertEqual(self.kv_dict, { '': 'PASS', 'telemetry_Crosperf': 'PASS' })
+ self.assertEqual(res, {'Total': [10, 'score'], 'first_time': [680, 'ms']})
+
+ # Test 3. suite != telemetry_Crosperf. Normally this would be for
+ # running non-Telemetry autotests, such as BootPerfServer. In this test
+ # case, the keyvals we have set up were returned from a Telemetry test run;
+ # so this pass is basically testing that we don't append the units to the
+ # test results (which we do for Telemetry autotest runs).
+ reset()
+ self.result.suite = ''
+ res = self.result._GetKeyvals(True)
+ self.assertEqual(res, {'Total': 10, 'first_time': 680 })
+
+
+ def test_get_results_dir(self):
+
+ self.result.out = ''
+ self.assertRaises(Exception, self.result._GetResultsDir)
+
+ self.result.out = OUTPUT
+ resdir = self.result._GetResultsDir()
+ self.assertEqual(resdir,
+ '/tmp/run_remote_tests.PO1234567/platform_LibCBench')
+
+
+ @mock.patch.object (command_executer.CommandExecuter, 'RunCommand')
+ def test_find_files_in_results_dir(self, mock_runcmd):
+
+ self.result.results_dir = None
+ res = self.result._FindFilesInResultsDir('-name perf.data')
+ self.assertIsNone(res)
+
+ self.result._ce.RunCommand = mock_runcmd
+ self.result.results_dir = '/tmp/test_results'
+ mock_runcmd.return_value = [0, '/tmp/test_results/perf.data', '']
+ res = self.result._FindFilesInResultsDir('-name perf.data')
+ self.assertEqual(mock_runcmd.call_count, 1)
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('find /tmp/test_results -name perf.data',))
+ self.assertEqual(res, '/tmp/test_results/perf.data')
+
+ mock_runcmd.reset_mock()
+ mock_runcmd.return_value = [1, '', '']
+ self.assertRaises(Exception, self.result._FindFilesInResultsDir,
+ '-name perf.data')
+
+
+
+ @mock.patch.object (Result, '_FindFilesInResultsDir')
+ def test_get_perf_data_files(self, mock_findfiles):
+ self.args = None
+
+ mock_findfiles.return_value = 'line1\nline1\n'
+ self.result._FindFilesInResultsDir = mock_findfiles
+ res = self.result._GetPerfDataFiles()
+ self.assertEqual(res, ['line1', 'line1'])
+ self.assertEqual(mock_findfiles.call_args_list[0][0], ('-name perf.data',))
+
+
+ def test_get_perf_report_files(self):
+ self.args = None
+
+ def FakeFindFiles(find_args):
+ self.args = find_args
+ return 'line1\nline1\n'
+
+ self.result._FindFilesInResultsDir = FakeFindFiles
+ res = self.result._GetPerfReportFiles()
+ self.assertEqual(res, ['line1', 'line1'])
+ self.assertEqual(self.args, '-name perf.data.report')
+
+
+ def test_get_data_measurement_files(self):
+ self.args = None
+
+ def FakeFindFiles(find_args):
+ self.args = find_args
+ return 'line1\nline1\n'
+
+ self.result._FindFilesInResultsDir = FakeFindFiles
+ res = self.result._GetDataMeasurementsFiles()
+ self.assertEqual(res, ['line1', 'line1'])
+ self.assertEqual(self.args, '-name perf_measurements')
+
+
+ @mock.patch.object (misc, 'GetInsideChrootPath')
+ @mock.patch.object (command_executer.CommandExecuter, 'ChrootRunCommand')
+ def test_generate_perf_report_files(self, mock_chrootruncmd, mock_getpath):
+ fake_file = '/usr/chromeos/chroot/tmp/results/fake_file'
+ self.result.perf_data_files = ['/tmp/results/perf.data']
+ self.result._board = 'lumpy'
+ mock_getpath.return_value = fake_file
+ self.result._ce.ChrootRunCommand = mock_chrootruncmd
+ tmp = self.result._GeneratePerfReportFiles()
+ self.assertEqual(tmp, ['/tmp/chroot%s' % fake_file])
+ self.assertEqual(mock_chrootruncmd.call_args_list[0][0],
+ ('/tmp',
+ ('/tmp/perf.static report -n --symfs /build/lumpy '
+ '--vmlinux /build/lumpy/usr/lib/debug/boot/vmlinux '
+ '--kallsyms /build/lumpy/boot/System.map-* -i '
+ '%s --stdio > %s') % (fake_file, fake_file)))
+
+
+
+ @mock.patch.object (misc, 'GetOutsideChrootPath')
+ def test_populate_from_run(self, mock_getpath):
+
+ def FakeGetResultsDir():
+ self.callGetResultsDir = True
+ return '/tmp/results_dir'
+
+ def FakeGetPerfDataFiles():
+ self.callGetPerfDataFiles = True
+ return []
+
+ def FakeGetPerfReportFiles():
+ self.callGetPerfReportFiles = True
+ return []
+
+ def FakeProcessResults(show_results):
+ self.callProcessResults = True
+
+ mock.get_path = '/tmp/chromeos/tmp/results_dir'
+ self.result._chromeos_root = '/tmp/chromeos'
+
+ self.callGetResultsDir = False
+ self.callGetPerfDataFiles = False
+ self.callGetPerfReportFiles = False
+ self.callProcessResults = False
+
+ self.result._GetResultsDir = FakeGetResultsDir
+ self.result._GetPerfDataFiles = FakeGetPerfDataFiles
+ self.result._GeneratePerfReportFiles = FakeGetPerfReportFiles
+ self.result._ProcessResults = FakeProcessResults
+
+ self.result._PopulateFromRun(OUTPUT, '', 0, True, 'test',
+ 'telemetry_Crosperf')
+ self.assertTrue(self.callGetResultsDir)
+ self.assertTrue(self.callGetPerfDataFiles)
+ self.assertTrue(self.callGetPerfReportFiles)
+ self.assertTrue(self.callProcessResults)
+
+ def test_process_results(self):
+
+ def FakeGetKeyvals(show_all):
+ if show_all:
+ return { 'first_time' : 680, 'Total' : 10}
+ else:
+ return { 'Total' : 10}
+
+ def FakeGatherPerfResults():
+ self.callGatherPerfResults = True
+
+ self.callGatherPerfResults = False
+
+ self.result._GetKeyvals = FakeGetKeyvals
+ self.result._GatherPerfResults = FakeGatherPerfResults
+
+ self.result.retval = 0
+ self.result._ProcessResults(True)
+ self.assertTrue(self.callGatherPerfResults)
+ self.assertEqual(len(self.result.keyvals), 3)
+ self.assertEqual(self.result.keyvals,
+ { 'first_time' : 680, 'Total' : 10, 'retval' : 0 })
+
+ self.result.retval = 1
+ self.result._ProcessResults(False)
+ self.assertEqual(len(self.result.keyvals), 2)
+ self.assertEqual(self.result.keyvals,
+ { 'Total' : 10, 'retval' : 1 })
+
+
+ @mock.patch.object (misc, 'GetInsideChrootPath')
+ @mock.patch.object (command_executer.CommandExecuter, 'ChrootRunCommand')
+ def test_populate_from_cache_dir(self, mock_runchrootcmd, mock_getpath):
+
+ def FakeMkdtemp(dir=''):
+ return self.tmpdir
+
+ current_path = os.getcwd()
+ cache_dir = os.path.join(current_path, 'test_cache/test_input')
+ self.result._ce = command_executer.GetCommandExecuter(log_level='average')
+ self.result._ce.ChrootRunCommand = mock_runchrootcmd
+ mock_runchrootcmd.return_value = ['',
+ ('%s,PASS\n%s/\telemetry_Crosperf,PASS\n')
+ % (TMP_DIR1, TMP_DIR1),
+ '']
+ mock_getpath.return_value = TMP_DIR1
+ self.tmpdir = tempfile.mkdtemp()
+ save_real_mkdtemp = tempfile.mkdtemp
+ tempfile.mkdtemp = FakeMkdtemp
+
+ self.result._PopulateFromCacheDir(cache_dir, True, 'sunspider',
+ 'telemetry_Crosperf')
+ self.assertEqual(self.result.keyvals,
+ {u'Total__Total': [444.0, u'ms'],
+ u'regexp-dna__regexp-dna': [16.2, u'ms'],
+ u'telemetry_page_measurement_results__num_failed':
+ [0, u'count'],
+ u'telemetry_page_measurement_results__num_errored':
+ [0, u'count'],
+ u'string-fasta__string-fasta': [23.2, u'ms'],
+ u'crypto-sha1__crypto-sha1': [11.6, u'ms'],
+ u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte':
+ [3.2, u'ms'],
+ u'access-nsieve__access-nsieve': [7.9, u'ms'],
+ u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'],
+ u'string-validate-input__string-validate-input':
+ [19.3, u'ms'],
+ u'3d-raytrace__3d-raytrace': [24.7, u'ms'],
+ u'3d-cube__3d-cube': [28.0, u'ms'],
+ u'string-unpack-code__string-unpack-code': [46.7, u'ms'],
+ u'date-format-tofte__date-format-tofte': [26.3, u'ms'],
+ u'math-partial-sums__math-partial-sums': [22.0, u'ms'],
+ '\telemetry_Crosperf': ['PASS', ''],
+ u'crypto-aes__crypto-aes': [15.2, u'ms'],
+ u'bitops-bitwise-and__bitops-bitwise-and': [8.4, u'ms'],
+ u'crypto-md5__crypto-md5': [10.5, u'ms'],
+ u'string-tagcloud__string-tagcloud': [52.8, u'ms'],
+ u'access-nbody__access-nbody': [8.5, u'ms'],
+ 'retval': 0,
+ u'math-spectral-norm__math-spectral-norm': [6.6, u'ms'],
+ u'math-cordic__math-cordic': [8.7, u'ms'],
+ u'access-binary-trees__access-binary-trees': [4.5, u'ms'],
+ u'controlflow-recursive__controlflow-recursive':
+ [4.4, u'ms'],
+ u'access-fannkuch__access-fannkuch': [17.8, u'ms'],
+ u'string-base64__string-base64': [16.0, u'ms'],
+ u'date-format-xparb__date-format-xparb': [20.9, u'ms'],
+ u'3d-morph__3d-morph': [22.1, u'ms'],
+ u'bitops-bits-in-byte__bitops-bits-in-byte': [9.1, u'ms']
+ })
+
+
+ # Clean up after test.
+ tempfile.mkdtemp = save_real_mkdtemp
+ command = 'rm -Rf %s' % self.tmpdir
+ self.result._ce.RunCommand(command)
+
+
+ @mock.patch.object (misc, 'GetRoot')
+ @mock.patch.object (command_executer.CommandExecuter, 'RunCommand')
+ def test_cleanup(self, mock_runcmd, mock_getroot):
+
+ # Test 1. 'rm_chroot_tmp' is True; self.results_dir exists;
+ # self._temp_dir exists; results_dir name contains 'test_that_results_'.
+ mock_getroot.return_value = ['/tmp/tmp_AbcXyz', 'test_that_results_fake']
+ self.result._ce.RunCommand = mock_runcmd
+ self.result.results_dir = 'test_results_dir'
+ self.result._temp_dir = 'test_temp_dir'
+ self.result.CleanUp(True)
+ self.assertEqual(mock_getroot.call_count, 1)
+ self.assertEqual(mock_runcmd.call_count, 2)
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('rm -rf test_results_dir',))
+ self.assertEqual(mock_runcmd.call_args_list[1][0],
+ ('rm -rf test_temp_dir',))
+
+ # Test 2. Same, except ath results_dir name does not contain
+ # 'test_that_results_'
+ mock_getroot.reset_mock()
+ mock_runcmd.reset_mock()
+ mock_getroot.return_value = ['/tmp/tmp_AbcXyz', 'other_results_fake']
+ self.result._ce.RunCommand = mock_runcmd
+ self.result.results_dir = 'test_results_dir'
+ self.result._temp_dir = 'test_temp_dir'
+ self.result.CleanUp(True)
+ self.assertEqual(mock_getroot.call_count, 1)
+ self.assertEqual(mock_runcmd.call_count, 2)
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('rm -rf /tmp/tmp_AbcXyz',))
+ self.assertEqual(mock_runcmd.call_args_list[1][0],
+ ('rm -rf test_temp_dir',))
+
+ # Test 3. mock_getroot returns nothing; 'rm_chroot_tmp' is False.
+ mock_getroot.reset_mock()
+ mock_runcmd.reset_mock()
+ self.result.CleanUp(False)
+ self.assertEqual(mock_getroot.call_count, 0)
+ self.assertEqual(mock_runcmd.call_count, 1)
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('rm -rf test_temp_dir',))
+
+ # Test 4. 'rm_chroot_tmp' is True, but result_dir & _temp_dir are None.
+ mock_getroot.reset_mock()
+ mock_runcmd.reset_mock()
+ self.result.results_dir = None
+ self.result._temp_dir = None
+ self.result.CleanUp(True)
+ self.assertEqual(mock_getroot.call_count, 0)
+ self.assertEqual(mock_runcmd.call_count, 0)
+
+
+ @mock.patch.object (misc, 'GetInsideChrootPath')
+ @mock.patch.object (command_executer.CommandExecuter, 'ChrootRunCommand')
+ def test_store_to_cache_dir(self, mock_chrootruncmd, mock_getpath):
+
+ def FakeMkdtemp(dir=''):
+ return self.tmpdir
+
+ current_path = os.getcwd()
+ cache_dir = os.path.join(current_path, 'test_cache/test_output')
+
+ self.result._ce = command_executer.GetCommandExecuter(log_level='average')
+ self.result.out = OUTPUT
+ self.result.err = error
+ self.result.retval = 0
+ self.tmpdir = tempfile.mkdtemp()
+ if not os.path.exists(self.tmpdir):
+ os.makedirs(self.tmpdir)
+ self.result.results_dir = os.path.join(os.getcwd(), 'test_cache')
+ save_real_mkdtemp = tempfile.mkdtemp
+ tempfile.mkdtemp = FakeMkdtemp
+
+ mock_mm = machine_manager.MockMachineManager('/tmp/chromeos_root', 0,
+ 'average')
+ mock_mm.machine_checksum_string['mock_label'] = 'fake_machine_checksum123'
+
+ self.result.StoreToCacheDir(cache_dir, mock_mm)
+
+ # Check that the correct things were written to the 'cache'.
+ test_dir = os.path.join(os.getcwd(), 'test_cache/test_output')
+ base_dir = os.path.join(os.getcwd(), 'test_cache/compare_output')
+ self.assertTrue(os.path.exists(os.path.join(test_dir, 'autotest.tbz2')))
+ self.assertTrue(os.path.exists(os.path.join(test_dir, 'machine.txt')))
+ self.assertTrue(os.path.exists(os.path.join(test_dir, 'results.txt')))
+
+ f1 = os.path.join(test_dir, 'machine.txt')
+ f2 = os.path.join(base_dir, 'machine.txt')
+ cmd = 'diff %s %s' % (f1, f2)
+ [_, out, _] = self.result._ce.RunCommand(cmd, return_output=True)
+ self.assertEqual(len(out), 0)
+
+ f1 = os.path.join(test_dir, 'results.txt')
+ f2 = os.path.join(base_dir, 'results.txt')
+ cmd = 'diff %s %s' % (f1, f2)
+ [_, out, _] = self.result._ce.RunCommand(cmd, return_output=True)
+ self.assertEqual(len(out), 0)
+
+ # Clean up after test.
+ tempfile.mkdtemp = save_real_mkdtemp
+ command = 'rm %s/*' % test_dir
+ self.result._ce.RunCommand(command)
+
+
+TELEMETRY_RESULT_KEYVALS = {'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html math-cordic (ms)': '11.4', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html access-nbody (ms)': '6.9', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html access-fannkuch (ms)': '26.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html math-spectral-norm (ms)': '6.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html bitops-nsieve-bits (ms)': '9.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html math-partial-sums (ms)': '32.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html regexp-dna (ms)': '16.1', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html 3d-cube (ms)': '42.7', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html crypto-md5 (ms)': '10.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html crypto-sha1 (ms)': '12.4', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-tagcloud (ms)': '47.2', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-fasta (ms)': '36.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html access-binary-trees (ms)': '7.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html date-format-xparb (ms)': '138.1', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html crypto-aes (ms)': '19.2', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html Total (ms)': '656.5', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-base64 (ms)': '17.5', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-validate-input (ms)': '24.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html 3d-raytrace (ms)': '28.7', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html controlflow-recursive (ms)': '5.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html bitops-bits-in-byte (ms)': '9.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html 3d-morph (ms)': '50.2', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html bitops-bitwise-and (ms)': '8.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html access-nsieve (ms)': '8.6', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html date-format-tofte (ms)': '31.2', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html bitops-3bit-bits-in-byte (ms)': '3.5', 'retval': 0, 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-unpack-code (ms)': '45.0'}
+
+PURE_TELEMETRY_OUTPUT = """page_name,3d-cube (ms),3d-morph (ms),3d-raytrace (ms),Total (ms),access-binary-trees (ms),access-fannkuch (ms),access-nbody (ms),access-nsieve (ms),bitops-3bit-bits-in-byte (ms),bitops-bits-in-byte (ms),bitops-bitwise-and (ms),bitops-nsieve-bits (ms),controlflow-recursive (ms),crypto-aes (ms),crypto-md5 (ms),crypto-sha1 (ms),date-format-tofte (ms),date-format-xparb (ms),math-cordic (ms),math-partial-sums (ms),math-spectral-norm (ms),regexp-dna (ms),string-base64 (ms),string-fasta (ms),string-tagcloud (ms),string-unpack-code (ms),string-validate-input (ms)\r\nhttp://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html,42.7,50.2,28.7,656.5,7.3,26.3,6.9,8.6,3.5,9.8,8.8,9.3,5.3,19.2,10.8,12.4,31.2,138.1,11.4,32.8,6.3,16.1,17.5,36.3,47.2,45.0,24.8\r\n"""
+
+class TelemetryResultTest(unittest.TestCase):
+
+ mock_logger = mock.Mock(spec=logger.Logger)
+ mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter)
+ mock_label = MockLabel('mock_label', 'chromeos_image', '/tmp', 'lumpy',
+ 'remote', 'image_args', 'cache_dir', False)
+
+ def test_populate_from_run(self):
+
+ def FakeProcessResults():
+ self.callFakeProcessResults = True
+
+ self.callFakeProcessResults = False
+ self.result = TelemetryResult(self.mock_logger, self.mock_label,
+ 'average', self.mock_cmd_exec)
+ self.result._ProcessResults = FakeProcessResults
+ self.result._PopulateFromRun(OUTPUT, error, 3, False, 'fake_test',
+ 'telemetry_Crosperf')
+ self.assertTrue(self.callFakeProcessResults)
+ self.assertEqual(self.result.out, OUTPUT)
+ self.assertEqual(self.result.err, error)
+ self.assertEqual(self.result.retval, 3)
+
+
+ def test_populate_from_cache_dir_and_process_results(self):
+
+ self.result = TelemetryResult(self.mock_logger, self.mock_label,
+ 'average')
+ current_path = os.getcwd()
+ cache_dir = os.path.join(current_path,
+ 'test_cache/test_puretelemetry_input')
+ self.result._PopulateFromCacheDir(cache_dir)
+ self.assertEqual(self.result.out, PURE_TELEMETRY_OUTPUT)
+ self.assertEqual(self.result.err, '')
+ self.assertEqual(self.result.retval, 0)
+ self.assertEqual(self.result.keyvals, TELEMETRY_RESULT_KEYVALS)
+
+
+class ResultsCacheTest(unittest.TestCase):
+
+ mock_logger = mock.Mock(spec=logger.Logger)
+ mock_label = MockLabel('mock_label', 'chromeos_image', '/tmp', 'lumpy',
+ 'remote', 'image_args', 'cache_dir', False)
+ def setUp(self):
+ self.results_cache = ResultsCache()
+
+
+ mock_mm = machine_manager.MockMachineManager('/tmp/chromeos_root', 0,
+ 'average')
+ mock_mm.machine_checksum_string['mock_label'] = 'fake_machine_checksum123'
+
+ self.results_cache.Init(self.mock_label.chromeos_image,
+ self.mock_label.chromeos_root,
+ 'sunspider',
+ 1, # benchmark_run.iteration,
+ '', # benchmark_run.test_args,
+ '', # benchmark_run.profiler_args,
+ mock_mm,
+ self.mock_label.board,
+ [CacheConditions.CACHE_FILE_EXISTS,
+ CacheConditions.CHECKSUMS_MATCH],
+ self.mock_logger,
+ 'average',
+ self.mock_label,
+ '', # benchmark_run.share_users
+ 'telemetry_Crosperf',
+ True) # benchmark_run.show_all_results
+
+
+ @mock.patch.object (image_checksummer.ImageChecksummer, 'Checksum')
+ def test_get_cache_dir_for_write(self, mock_checksum):
+
+ def FakeGetMachines(label):
+ m1 = machine_manager.MockCrosMachine('lumpy1.cros',
+ self.results_cache.chromeos_root,
+ 'average')
+ m2 = machine_manager.MockCrosMachine('lumpy2.cros',
+ self.results_cache.chromeos_root,
+ 'average')
+ return [m1, m2]
+
+
+ mock_checksum.return_value = 'FakeImageChecksumabc123'
+ self.results_cache.machine_manager.GetMachines = FakeGetMachines
+ self.results_cache.machine_manager.machine_checksum['mock_label'] = \
+ 'FakeMachineChecksumabc987'
+ # Based on the label, benchmark and machines, get the directory in which
+ # to store the cache information for this test run.
+ result_path = self.results_cache._GetCacheDirForWrite()
+ # Verify that the returned directory is correct (since the label
+ # contained a cache_dir, named 'cache_dir', that's what is expected in
+ # the result, rather than '~/cros_scratch').
+ comp_path = os.path.join(os.getcwd(),
+ 'cache_dir/54524606abaae4fdf7b02f49f7ae7127_'
+ 'sunspider_1_7215ee9c7d9dc229d2921a40e899ec5f_'
+ 'FakeImageChecksumabc123_FakeMachineChecksum'
+ 'abc987__6')
+ self.assertEqual(result_path, comp_path)
+
+
+ def test_form_cache_dir(self):
+ # This is very similar to the previous test (_FormCacheDir is called
+ # from _GetCacheDirForWrite).
+ cache_key_list = ('54524606abaae4fdf7b02f49f7ae7127', 'sunspider', '1',
+ '7215ee9c7d9dc229d2921a40e899ec5f',
+ 'FakeImageChecksumabc123', '*', '*', '6')
+ path = self.results_cache._FormCacheDir(cache_key_list)
+ self.assertEqual(len(path), 1)
+ path1 = path[0]
+ test_dirname = ('54524606abaae4fdf7b02f49f7ae7127_sunspider_1_7215ee9'
+ 'c7d9dc229d2921a40e899ec5f_FakeImageChecksumabc123_*_*_6')
+ comp_path = os.path.join(os.getcwd(), 'cache_dir', test_dirname)
+ self.assertEqual(path1, comp_path)
+
+
+ @mock.patch.object (image_checksummer.ImageChecksummer, 'Checksum')
+ def test_get_cache_key_list(self, mock_checksum):
+ # This tests the mechanism that generates the various pieces of the
+ # cache directory name, based on various conditions.
+
+ def FakeGetMachines(label):
+ m1 = machine_manager.MockCrosMachine('lumpy1.cros',
+ self.results_cache.chromeos_root,
+ 'average')
+ m2 = machine_manager.MockCrosMachine('lumpy2.cros',
+ self.results_cache.chromeos_root,
+ 'average')
+ return [m1, m2]
+
+
+ mock_checksum.return_value = 'FakeImageChecksumabc123'
+ self.results_cache.machine_manager.GetMachines = FakeGetMachines
+ self.results_cache.machine_manager.machine_checksum['mock_label'] = \
+ 'FakeMachineChecksumabc987'
+
+ # Test 1. Generating cache name for reading (not writing).
+ key_list = self.results_cache._GetCacheKeyList(True)
+ self.assertEqual(key_list[0], '*') # Machine checksum value, for read.
+ self.assertEqual(key_list[1], 'sunspider')
+ self.assertEqual(key_list[2], '1')
+ self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f')
+ self.assertEqual(key_list[4], 'FakeImageChecksumabc123')
+ self.assertEqual(key_list[5], '*')
+ self.assertEqual(key_list[6], '*')
+ self.assertEqual(key_list[7], '6')
+
+ # Test 2. Generating cache name for writing, with local image type.
+ key_list = self.results_cache._GetCacheKeyList(False)
+ self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127')
+ self.assertEqual(key_list[1], 'sunspider')
+ self.assertEqual(key_list[2], '1')
+ self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f')
+ self.assertEqual(key_list[4], 'FakeImageChecksumabc123')
+ self.assertEqual(key_list[5], 'FakeMachineChecksumabc987')
+ self.assertEqual(key_list[6], '')
+ self.assertEqual(key_list[7], '6')
+
+ # Test 3. Generating cache name for writing, with trybot image type.
+ self.results_cache.label.image_type = 'trybot'
+ key_list = self.results_cache._GetCacheKeyList(False)
+ self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127')
+ self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f')
+ self.assertEqual(key_list[4], '54524606abaae4fdf7b02f49f7ae7127')
+ self.assertEqual(key_list[5], 'FakeMachineChecksumabc987')
+
+ # Test 4. Generating cache name for writing, with official image type.
+ self.results_cache.label.image_type = 'official'
+ key_list = self.results_cache._GetCacheKeyList(False)
+ self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127')
+ self.assertEqual(key_list[1], 'sunspider')
+ self.assertEqual(key_list[2], '1')
+ self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f')
+ self.assertEqual(key_list[4], '*')
+ self.assertEqual(key_list[5], 'FakeMachineChecksumabc987')
+ self.assertEqual(key_list[6], '')
+ self.assertEqual(key_list[7], '6')
+
+ # Test 5. Generating cache name for writing, with local image type, and
+ # specifying that the image path must match the cached image path.
+ self.results_cache.label.image_type = 'local'
+ self.results_cache.cache_conditions.append(CacheConditions.IMAGE_PATH_MATCH)
+ key_list = self.results_cache._GetCacheKeyList(False)
+ self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127')
+ self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f')
+ self.assertEqual(key_list[4], 'FakeImageChecksumabc123')
+ self.assertEqual(key_list[5], 'FakeMachineChecksumabc987')
+
+
+ @mock.patch.object (command_executer.CommandExecuter, 'RunCommand')
+ @mock.patch.object (os.path, 'isdir')
+ @mock.patch.object (Result, 'CreateFromCacheHit')
+ def test_read_result(self, mock_create, mock_isdir, mock_runcmd):
+
+ self.fakeCacheReturnResult = None
+ def FakeGetCacheDirForRead():
+ return self.fakeCacheReturnResult
+
+ def FakeGetCacheDirForWrite():
+ return self.fakeCacheReturnResult
+
+ mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter)
+ fake_result = Result(self.mock_logger, self.mock_label, 'average',
+ mock_cmd_exec)
+ fake_result.retval = 0
+
+ # Set up results_cache _GetCacheDirFor{Read,Write} to return
+ # self.fakeCacheReturnResult, which is initially None (see above).
+ # So initially, no cache dir is returned.
+ self.results_cache._GetCacheDirForRead = FakeGetCacheDirForRead
+ self.results_cache._GetCacheDirForWrite = FakeGetCacheDirForWrite
+
+ mock_isdir.return_value = True
+ save_cc = [CacheConditions.CACHE_FILE_EXISTS,
+ CacheConditions.CHECKSUMS_MATCH]
+ self.results_cache.cache_conditions.append(CacheConditions.FALSE)
+
+ # Test 1. CacheCondition.FALSE, which means do not read from the cache.
+ # (force re-running of test). Result should be None.
+ res = self.results_cache.ReadResult()
+ self.assertIsNone(res)
+ self.assertEqual(mock_runcmd.call_count, 1)
+
+ # Test 2. Remove CacheCondition.FALSE. Result should still be None,
+ # because _GetCacheDirForRead is returning None at the moment.
+ mock_runcmd.reset_mock()
+ self.results_cache.cache_conditions = save_cc
+ res = self.results_cache.ReadResult()
+ self.assertIsNone(res)
+ self.assertEqual(mock_runcmd.call_count, 0)
+
+ # Test 3. Now set up cache dir to be returned by _GetCacheDirForRead.
+ # Since cache_dir is found, will call Result.CreateFromCacheHit, which
+ # which will actually all our mock_create and should return fake_result.
+ self.fakeCacheReturnResult = 'fake/cache/dir'
+ mock_create.return_value = fake_result
+ res = self.results_cache.ReadResult()
+ self.assertEqual(mock_runcmd.call_count, 0)
+ self.assertEqual(res, fake_result)
+
+ # Test 4. os.path.isdir(cache_dir) will now return false, so result
+ # should be None again (no cache found).
+ mock_isdir.return_value = False
+ res = self.results_cache.ReadResult()
+ self.assertEqual(mock_runcmd.call_count, 0)
+ self.assertIsNone(res)
+
+ # Test 5. os.path.isdir returns true, but mock_create now returns None
+ # (the call to CreateFromCacheHit returns None), so overal result is None.
+ mock_isdir.return_value = True
+ mock_create.return_value = None
+ res = self.results_cache.ReadResult()
+ self.assertEqual(mock_runcmd.call_count, 0)
+ self.assertIsNone(res)
+
+ # Test 6. Everything works 'as expected', result should be fake_result.
+ mock_create.return_value = fake_result
+ res = self.results_cache.ReadResult()
+ self.assertEqual(mock_runcmd.call_count, 0)
+ self.assertEqual(res, fake_result)
+
+ # Test 7. The run failed; result should be None.
+ mock_create.return_value = fake_result
+ fake_result.retval = 1
+ self.results_cache.cache_conditions.append(CacheConditions.RUN_SUCCEEDED)
+ res = self.results_cache.ReadResult()
+ self.assertEqual(mock_runcmd.call_count, 0)
+ self.assertIsNone(res)
+
+
+if __name__ == '__main__':
unittest.main()
diff --git a/crosperf/test_cache/compare_output/autotest.tbz2 b/crosperf/test_cache/compare_output/autotest.tbz2
new file mode 100644
index 00000000..066dd9ac
--- /dev/null
+++ b/crosperf/test_cache/compare_output/autotest.tbz2
Binary files differ
diff --git a/crosperf/test_cache/compare_output/machine.txt b/crosperf/test_cache/compare_output/machine.txt
new file mode 100644
index 00000000..a82af3aa
--- /dev/null
+++ b/crosperf/test_cache/compare_output/machine.txt
@@ -0,0 +1 @@
+fake_machine_checksum123 \ No newline at end of file
diff --git a/crosperf/test_cache/compare_output/results.txt b/crosperf/test_cache/compare_output/results.txt
new file mode 100644
index 00000000..134b8822
--- /dev/null
+++ b/crosperf/test_cache/compare_output/results.txt
@@ -0,0 +1,6 @@
+S"CMD (True): ./run_remote_tests.sh --remote=172.17.128.241 --board=lumpy LibCBench\nCMD (None): cd /usr/local/google/home/yunlian/gd/src/build/images/lumpy/latest/../../../../..; cros_sdk -- ./in_chroot_cmd6X7Cxu.sh\nIdentity added: /tmp/run_remote_tests.PO1234567/autotest_key (/tmp/run_remote_tests.PO1234567/autotest_key)\nINFO : Using emerged autotests already installed at /build/lumpy/usr/local/autotest.\n\nINFO : Running the following control files 1 times:\nINFO : * 'client/site_tests/platform_LibCBench/control'\n\nINFO : Running client test client/site_tests/platform_LibCBench/control\n./server/autoserv -m 172.17.128.241 --ssh-port 22 -c client/site_tests/platform_LibCBench/control -r /tmp/run_remote_tests.PO1234567/platform_LibCBench --test-retry=0 --args \nERROR:root:import statsd failed, no stats will be reported.\n14:20:22 INFO | Results placed in /tmp/run_remote_tests.PO1234567/platform_LibCBench\n14:20:22 INFO | Processing control file\n14:20:23 INFO | Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_VIIP67ssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'\n14:20:23 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts.\n14:20:23 INFO | INFO\t----\t----\tkernel=3.8.11\tlocaltime=May 22 14:20:23\ttimestamp=1369257623\t\n14:20:23 INFO | Installing autotest on 172.17.128.241\n14:20:23 INFO | Using installation dir /usr/local/autotest\n14:20:23 WARNI| No job_repo_url for <remote host: 172.17.128.241>\n14:20:23 INFO | Could not install autotest using the packaging system: No repos to install an autotest client from. Trying other methods\n14:20:23 INFO | Installation of autotest completed\n14:20:24 WARNI| No job_repo_url for <remote host: 172.17.128.241>\n14:20:24 INFO | Executing /usr/local/autotest/bin/autotest /usr/local/autotest/control phase 0\n14:20:24 INFO | Entered autotestd_monitor.\n14:20:24 INFO | Finished launching tail subprocesses.\n14:20:24 INFO | Finished waiting on autotestd to start.\n14:20:26 INFO | START\t----\t----\ttimestamp=1369257625\tlocaltime=May 22 14:20:25\t\n14:20:26 INFO | \tSTART\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257625\tlocaltime=May 22 14:20:25\t\n14:20:30 INFO | \t\tGOOD\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257630\tlocaltime=May 22 14:20:30\tcompleted successfully\n14:20:30 INFO | \tEND GOOD\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257630\tlocaltime=May 22 14:20:30\t\n14:20:31 INFO | END GOOD\t----\t----\ttimestamp=1369257630\tlocaltime=May 22 14:20:30\t\n14:20:31 INFO | Got lock of exit_code_file.\n14:20:31 INFO | Released lock of exit_code_file and closed it.\nOUTPUT: ==============================\nOUTPUT: Current time: 2013-05-22 14:20:32.818831 Elapsed: 0:01:30 ETA: Unknown\nDone: 0% [ ]\nOUTPUT: Thread Status:\nRUNNING: 1 ('ttt: LibCBench (1)' 0:01:21)\nMachine Status:\nMachine Thread Lock Status Checksum \n172.17.128.241 ttt: LibCBench (1) True RUNNING 3ba9f2ecbb222f20887daea5583d86ba\n\nOUTPUT: ==============================\n14:20:33 INFO | Killing child processes.\n14:20:33 INFO | Client complete\n14:20:33 INFO | Finished processing control file\n14:20:33 INFO | Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_aVJUgmssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'\n14:20:33 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts.\n\nINFO : Test results:\n-------------------------------------------------------------------\nplatform_LibCBench [ PASSED ]\nplatform_LibCBench/platform_LibCBench [ PASSED ]\nplatform_LibCBench/platform_LibCBench b_malloc_big1__0_ 0.00375231466667\nplatform_LibCBench/platform_LibCBench b_malloc_big2__0_ 0.002951359\nplatform_LibCBench/platform_LibCBench b_malloc_bubble__0_ 0.015066374\nplatform_LibCBench/platform_LibCBench b_malloc_sparse__0_ 0.015053784\nplatform_LibCBench/platform_LibCBench b_malloc_thread_local__0_ 0.01138439\nplatform_LibCBench/platform_LibCBench b_malloc_thread_stress__0_ 0.0367894733333\nplatform_LibCBench/platform_LibCBench b_malloc_tiny1__0_ 0.000768474333333\nplatform_LibCBench/platform_LibCBench b_malloc_tiny2__0_ 0.000581407333333\nplatform_LibCBench/platform_LibCBench b_pthread_create_serial1__0_ 0.0291785246667\nplatform_LibCBench/platform_LibCBench b_pthread_createjoin_serial1__0_ 0.031907936\nplatform_LibCBench/platform_LibCBench b_pthread_createjoin_serial2__0_ 0.043485347\nplatform_LibCBench/platform_LibCBench b_pthread_uselesslock__0_ 0.0294113346667\nplatform_LibCBench/platform_LibCBench b_regex_compile____a_b_c__d_b__ 0.00529833933333\nplatform_LibCBench/platform_LibCBench b_regex_search____a_b_c__d_b__ 0.00165455066667\nplatform_LibCBench/platform_LibCBench b_regex_search___a_25_b__ 0.0496191923333\nplatform_LibCBench/platform_LibCBench b_stdio_putcgetc__0_ 0.100005711667\nplatform_LibCBench/platform_LibCBench b_stdio_putcgetc_unlocked__0_ 0.0371443833333\nplatform_LibCBench/platform_LibCBench b_string_memset__0_ 0.00275405066667\nplatform_LibCBench/platform_LibCBench b_string_strchr__0_ 0.00456903\nplatform_LibCBench/platform_LibCBench b_string_strlen__0_ 0.044893587\nplatform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac__ 0.118360778\nplatform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaac__ 0.068957325\nplatform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaacccccccccccc__ 0.0135694476667\nplatform_LibCBench/platform_LibCBench b_string_strstr___abcdefghijklmnopqrstuvwxyz__ 0.0134553343333\nplatform_LibCBench/platform_LibCBench b_string_strstr___azbycxdwevfugthsirjqkplomn__ 0.0133123556667\nplatform_LibCBench/platform_LibCBench b_utf8_bigbuf__0_ 0.0473772253333\nplatform_LibCBench/platform_LibCBench b_utf8_onebyone__0_ 0.130938538333\n-------------------------------------------------------------------\nTotal PASS: 2/2 (100%)\n\nINFO : Elapsed time: 0m16s \n"
+p0
+.S"\nERROR: Identity added: /tmp/run_remote_tests.Z4Ld/autotest_key (/tmp/run_remote_tests.Z4Ld/autotest_key)\nINFO : Using emerged autotests already installed at /build/lumpy/usr/local/autotest.\nINFO : Running the following control files 1 times:\nINFO : * 'client/site_tests/platform_LibCBench/control'\nINFO : Running client test client/site_tests/platform_LibCBench/control\nINFO : Test results:\nINFO : Elapsed time: 0m18s\n"
+p0
+.I0
+. \ No newline at end of file
diff --git a/crosperf/test_cache/test_input/autotest.tbz2 b/crosperf/test_cache/test_input/autotest.tbz2
new file mode 100644
index 00000000..6ddbc6bf
--- /dev/null
+++ b/crosperf/test_cache/test_input/autotest.tbz2
Binary files differ
diff --git a/crosperf/test_cache/test_input/machine.txt b/crosperf/test_cache/test_input/machine.txt
new file mode 100644
index 00000000..9bd78434
--- /dev/null
+++ b/crosperf/test_cache/test_input/machine.txt
@@ -0,0 +1 @@
+processor : 0vendor_id : GenuineIntelcpu family : 6model : 42model name : Intel(R) Celeron(R) CPU 867 @ 1.30GHzstepping : 7microcode : 0x25cache size : 2048 KBphysical id : 0siblings : 2core id : 0cpu cores : 2apicid : 0initial apicid : 0fpu : yesfpu_exception : yescpuid level : 13wp : yesflags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx rdtscp lm constant_tsc arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf pni pclmulqdq dtes64 monitor ds_cpl vmx est tm2 ssse3 cx16 xtpr pdcm pcid sse4_1 sse4_2 x2apic popcnt tsc_deadline_timer xsave lahf_lm arat epb xsaveopt pln pts dts tpr_shadow vnmi flexpriority ept vpidclflush size : 64cache_alignment : 64address sizes : 36 bits physical, 48 bits virtualpower management:processor : 1vendor_id : GenuineIntelcpu family : 6model : 42model name : Intel(R) Celeron(R) CPU 867 @ 1.30GHzstepping : 7microcode : 0x25cache size : 2048 KBphysical id : 0siblings : 2core id : 1cpu cores : 2apicid : 2initial apicid : 2fpu : yesfpu_exception : yescpuid level : 13wp : yesflags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx rdtscp lm constant_tsc arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf pni pclmulqdq dtes64 monitor ds_cpl vmx est tm2 ssse3 cx16 xtpr pdcm pcid sse4_1 sse4_2 x2apic popcnt tsc_deadline_timer xsave lahf_lm arat epb xsaveopt pln pts dts tpr_shadow vnmi flexpriority ept vpidclflush size : 64cache_alignment : 64address sizes : 36 bits physical, 48 bits virtualpower management: 4194304 \ No newline at end of file
diff --git a/crosperf/test_cache/test_input/results.txt b/crosperf/test_cache/test_input/results.txt
new file mode 100644
index 00000000..33ba6ab7
--- /dev/null
+++ b/crosperf/test_cache/test_input/results.txt
@@ -0,0 +1,6 @@
+S"11:22:08 INFO | Running autotest_quickmerge step.\n11:22:08 INFO | quickmerge| 11:22:08: INFO: RunCommand: sudo -- /usr/bin/python2.7 /mnt/host/source/chromite/bin/autotest_quickmerge '--board=lumpy'\n11:22:08 INFO | quickmerge| 11:22:08: INFO: RunCommand: find /build/lumpy/usr/local/build/autotest/ -path /build/lumpy/usr/local/build/autotest/ExternalSource -prune -o -path /build/lumpy/usr/local/build/autotest/logs -prune -o -path /build/lumpy/usr/local/build/autotest/results -prune -o -path /build/lumpy/usr/local/build/autotest/site-packages -prune -o -printf '%T@\\n'\n11:22:22 INFO | quickmerge| 11:22:22: INFO: RunCommand: find /mnt/host/source/src/third_party/autotest/files/ -path /mnt/host/source/src/third_party/autotest/files/ExternalSource -prune -o -path /mnt/host/source/src/third_party/autotest/files/logs -prune -o -path /mnt/host/source/src/third_party/autotest/files/results -prune -o -path /mnt/host/source/src/third_party/autotest/files/site-packages -prune -o -printf '%T@\\n'\n11:22:32 INFO | quickmerge| 11:22:32: INFO: The sysroot appears to be newer than the source tree, doing nothing and exiting now.\n11:22:32 INFO | Re-running test_that script in /build/lumpy/usr/local/build/autotest copy of autotest.\n11:22:33 INFO | Began logging to /tmp/test_that_results_zZZfQa\nAdding labels [u'cros-version:ad_hoc_build', u'board:lumpy'] to host chromeos2-row2-rack4-host11.cros\n13:22:33 INFO | Fetching suite for job named telemetry_Crosperf...\n13:22:43 INFO | Scheduling suite for job named telemetry_Crosperf...\n13:22:43 INFO | ... scheduled 1 job(s).\n13:22:43 INFO | autoserv| DEBUG:root:import statsd failed, no stats will be reported.\n13:22:43 INFO | autoserv| Results placed in /tmp/test_that_results_zZZfQa/results-1-telemetry_Crosperf\n13:22:43 INFO | autoserv| Logged pid 25397 to /tmp/test_that_results_zZZfQa/results-1-telemetry_Crosperf/.autoserv_execute\n13:22:43 INFO | autoserv| I am PID 25397\n13:22:43 INFO | autoserv| Not checking if job_repo_url contains autotest packages on ['chromeos2-row2-rack4-host11.cros']\n13:22:43 INFO | autoserv| Processing control file\n13:22:44 INFO | autoserv| START\ttelemetry_Crosperf\ttelemetry_Crosperf\ttimestamp=1401301364\tlocaltime=May 28 11:22:44\n13:22:44 INFO | autoserv| Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_HsB3vQssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/tmp/tmpxFy6lj -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=300 -l root -p 22 chromeos2-row2-rack4-host11.cros'\n13:22:45 INFO | autoserv| Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_YTu9wYssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 chromeos2-row2-rack4-host11.cros'\n13:22:45 INFO | autoserv| Installing autotest on chromeos2-row2-rack4-host11.cros\n13:22:45 INFO | autoserv| Using installation dir /tmp/sysinfo/autoserv-MxOMOw\n13:22:46 INFO | autoserv| No job_repo_url for <remote host: chromeos2-row2-rack4-host11.cros>\n13:22:46 INFO | autoserv| Could not install autotest using the packaging system: No repos to install an autotest client from. Trying other methods\n13:22:47 INFO | autoserv| Installation of autotest completed\n13:22:47 INFO | autoserv| Installing updated global_config.ini.\n13:22:48 INFO | autoserv| No job_repo_url for <remote host: chromeos2-row2-rack4-host11.cros>\n13:22:48 INFO | autoserv| Executing /tmp/sysinfo/autoserv-MxOMOw/bin/autotest /tmp/sysinfo/autoserv-MxOMOw/control phase 0\n13:22:48 INFO | autoserv| Entered autotestd_monitor.\n13:22:48 INFO | autoserv| Finished launching tail subprocesses.\n13:22:48 INFO | autoserv| Finished waiting on autotestd to start.\n13:22:48 INFO | autoserv| START\t----\t----\ttimestamp=1401301368\tlocaltime=May 28 11:22:48\n13:22:48 INFO | autoserv| GOOD\t----\tsysinfo.before\ttimestamp=1401301368\tlocaltime=May 28 11:22:48\n13:22:48 INFO | autoserv| END GOOD\t----\t----\ttimestamp=1401301368\tlocaltime=May 28 11:22:48\n13:22:48 INFO | autoserv| Got lock of exit_code_file.\n13:22:48 INFO | autoserv| Released lock of exit_code_file and closed it.\n13:22:50 INFO | autoserv| Killing child processes.\n13:22:50 INFO | autoserv| Client complete\n13:22:52 INFO | autoserv| No job_repo_url for <remote host: chromeos2-row2-rack4-host11.cros>\n13:22:52 INFO | autoserv| Executing /tmp/sysinfo/autoserv-MxOMOw/bin/autotest /tmp/sysinfo/autoserv-MxOMOw/control phase 0\n13:22:53 INFO | autoserv| Entered autotestd_monitor.\n13:22:53 INFO | autoserv| Finished launching tail subprocesses.\n13:22:53 INFO | autoserv| Finished waiting on autotestd to start.\n13:22:53 INFO | autoserv| START\t----\t----\ttimestamp=1401301373\tlocaltime=May 28 11:22:53\n13:22:53 INFO | autoserv| GOOD\t----\tsysinfo.iteration.before\ttimestamp=1401301373\tlocaltime=May 28 11:22:53\n13:22:53 INFO | autoserv| END GOOD\t----\t----\ttimestamp=1401301373\tlocaltime=May 28 11:22:53\n13:22:53 INFO | autoserv| Got lock of exit_code_file.\n13:22:53 INFO | autoserv| Released lock of exit_code_file and closed it.\n13:22:55 INFO | autoserv| Killing child processes.\n13:22:55 INFO | autoserv| Client complete\n13:22:55 INFO | autoserv| Using Chrome source tree at /tmp/chrome_root\n13:22:55 INFO | autoserv| CMD: /tmp/chrome_root/src/tools/perf/run_benchmark --browser=cros-chrome --remote=chromeos2-row2-rack4-host11.cros sunspider\n13:23:35 INFO | autoserv| Telemetry completed with exit code: 0.\n13:23:35 INFO | autoserv| stdout:Pages: [http___www.webkit.org_perf_sunspider-1.0.2_sunspider-1.0.2_driver.html]\n13:23:35 INFO | autoserv| RESULT 3d-cube: 3d-cube= [28,28,28,28,31,26,28,28,28,27] ms\n13:23:35 INFO | autoserv| Avg 3d-cube: 28.000000ms\n13:23:35 INFO | autoserv| Sd 3d-cube: 1.247219ms\n13:23:35 INFO | autoserv| RESULT 3d-morph: 3d-morph= [23,22,22,22,22,22,22,22,22,22] ms\n13:23:35 INFO | autoserv| Avg 3d-morph: 22.100000ms\n13:23:35 INFO | autoserv| Sd 3d-morph: 0.316228ms\n13:23:35 INFO | autoserv| RESULT 3d-raytrace: 3d-raytrace= [26,23,24,25,25,25,26,24,24,25] ms\n13:23:35 INFO | autoserv| Avg 3d-raytrace: 24.700000ms\n13:23:35 INFO | autoserv| Sd 3d-raytrace: 0.948683ms\n13:23:35 INFO | autoserv| *RESULT Total: Total= [443,440,440,447,451,435,441,449,449,445] ms\n13:23:35 INFO | autoserv| Avg Total: 444.000000ms\n13:23:35 INFO | autoserv| Sd Total: 5.077182ms\n13:23:35 INFO | autoserv| RESULT access-binary-trees: access-binary-trees= [4,3,5,6,5,5,3,5,5,4] ms\n13:23:35 INFO | autoserv| Avg access-binary-trees: 4.500000ms\n13:23:35 INFO | autoserv| Sd access-binary-trees: 0.971825ms\n13:23:35 INFO | autoserv| RESULT access-fannkuch: access-fannkuch= [19,18,17,18,17,18,18,18,17,18] ms\n13:23:35 INFO | autoserv| Avg access-fannkuch: 17.800000ms\n13:23:35 INFO | autoserv| Sd access-fannkuch: 0.632456ms\n13:23:35 INFO | autoserv| RESULT access-nbody: access-nbody= [7,9,8,7,12,8,9,10,8,7] ms\n13:23:35 INFO | autoserv| Avg access-nbody: 8.500000ms\n13:23:35 INFO | autoserv| Sd access-nbody: 1.581139ms\n13:23:35 INFO | autoserv| RESULT access-nsieve: access-nsieve= [9,8,8,8,8,7,8,7,8,8] ms\n13:23:35 INFO | autoserv| Avg access-nsieve: 7.900000ms\n13:23:35 INFO | autoserv| Sd access-nsieve: 0.567646ms\n13:23:35 INFO | autoserv| RESULT bitops-3bit-bits-in-byte: bitops-3bit-bits-in-byte= [3,3,3,3,3,3,3,4,4,3] ms\n13:23:35 INFO | autoserv| Avg bitops-3bit-bits-in-byte: 3.200000ms\n13:23:35 INFO | autoserv| Sd bitops-3bit-bits-in-byte: 0.421637ms\n13:23:35 INFO | autoserv| RESULT bitops-bits-in-byte: bitops-bits-in-byte= [9,9,9,9,9,9,9,9,9,10] ms\n13:23:35 INFO | autoserv| Avg bitops-bits-in-byte: 9.100000ms\n13:23:35 INFO | autoserv| Sd bitops-bits-in-byte: 0.316228ms\n13:23:35 INFO | autoserv| RESULT bitops-bitwise-and: bitops-bitwise-and= [8,8,7,9,8,9,8,8,9,10] ms\n13:23:35 INFO | autoserv| Avg bitops-bitwise-and: 8.400000ms\n13:23:35 INFO | autoserv| Sd bitops-bitwise-and: 0.843274ms\n13:23:35 INFO | autoserv| RESULT bitops-nsieve-bits: bitops-nsieve-bits= [9,9,9,9,9,9,9,11,11,9] ms\n13:23:35 INFO | autoserv| Avg bitops-nsieve-bits: 9.400000ms\n13:23:35 INFO | autoserv| Sd bitops-nsieve-bits: 0.843274ms\n13:23:35 INFO | autoserv| RESULT controlflow-recursive: controlflow-recursive= [5,5,5,4,4,4,5,4,4,4] ms\n13:23:35 INFO | autoserv| Avg controlflow-recursive: 4.400000ms\n13:23:35 INFO | autoserv| Sd controlflow-recursive: 0.516398ms\n13:23:35 INFO | autoserv| RESULT crypto-aes: crypto-aes= [14,16,15,16,15,14,17,14,15,16] ms\n13:23:35 INFO | autoserv| Avg crypto-aes: 15.200000ms\n13:23:35 INFO | autoserv| Sd crypto-aes: 1.032796ms\n13:23:35 INFO | autoserv| RESULT crypto-md5: crypto-md5= [10,11,11,11,10,10,11,10,10,11] ms\n13:23:35 INFO | autoserv| Avg crypto-md5: 10.500000ms\n13:23:35 INFO | autoserv| Sd crypto-md5: 0.527046ms\n13:23:35 INFO | autoserv| RESULT crypto-sha1: crypto-sha1= [11,11,12,12,12,12,12,10,13,11] ms\n13:23:35 INFO | autoserv| Avg crypto-sha1: 11.600000ms\n13:23:35 INFO | autoserv| Sd crypto-sha1: 0.843274ms\n13:23:35 INFO | autoserv| RESULT date-format-tofte: date-format-tofte= [28,25,25,26,26,27,26,28,27,25] ms\n13:23:35 INFO | autoserv| Avg date-format-tofte: 26.300000ms\n13:23:35 INFO | autoserv| Sd date-format-tofte: 1.159502ms\n13:23:35 INFO | autoserv| RESULT date-format-xparb: date-format-xparb= [21,22,21,21,21,20,20,20,21,22] ms\n13:23:35 INFO | autoserv| Avg date-format-xparb: 20.900000ms\n13:23:35 INFO | autoserv| Sd date-format-xparb: 0.737865ms\n13:23:35 INFO | autoserv| RESULT math-cordic: math-cordic= [8,8,8,9,9,9,9,9,9,9] ms\n13:23:35 INFO | autoserv| Avg math-cordic: 8.700000ms\n13:23:35 INFO | autoserv| Sd math-cordic: 0.483046ms\n13:23:35 INFO | autoserv| RESULT math-partial-sums: math-partial-sums= [22,22,22,21,23,20,20,23,25,22] ms\n13:23:35 INFO | autoserv| Avg math-partial-sums: 22.000000ms\n13:23:35 INFO | autoserv| Sd math-partial-sums: 1.490712ms\n13:23:35 INFO | autoserv| RESULT math-spectral-norm: math-spectral-norm= [6,7,6,7,7,6,7,6,7,7] ms\n13:23:35 INFO | autoserv| Avg math-spectral-norm: 6.600000ms\n13:23:35 INFO | autoserv| Sd math-spectral-norm: 0.516398ms\n13:23:35 INFO | autoserv| RESULT regexp-dna: regexp-dna= [16,16,17,16,16,16,16,16,17,16] ms\n13:23:35 INFO | autoserv| Avg regexp-dna: 16.200000ms\n13:23:35 INFO | autoserv| Sd regexp-dna: 0.421637ms\n13:23:35 INFO | autoserv| RESULT string-base64: string-base64= [17,16,16,16,17,16,16,16,14,16] ms\n13:23:35 INFO | autoserv| Avg string-base64: 16.000000ms\n13:23:35 INFO | autoserv| Sd string-base64: 0.816497ms\n13:23:35 INFO | autoserv| RESULT string-fasta: string-fasta= [23,22,23,24,23,23,23,25,23,23] ms\n13:23:35 INFO | autoserv| Avg string-fasta: 23.200000ms\n13:23:35 INFO | autoserv| Sd string-fasta: 0.788811ms\n13:23:35 INFO | autoserv| RESULT string-tagcloud: string-tagcloud= [53,52,54,53,53,52,51,54,53,53] ms\n13:23:35 INFO | autoserv| Avg string-tagcloud: 52.800000ms\n13:23:35 INFO | autoserv| Sd string-tagcloud: 0.918937ms\n13:23:35 INFO | autoserv| RESULT string-unpack-code: string-unpack-code= [46,47,46,48,47,46,46,47,47,47] ms\n13:23:35 INFO | autoserv| Avg string-unpack-code: 46.700000ms\n13:23:35 INFO | autoserv| Sd string-unpack-code: 0.674949ms\n13:23:35 INFO | autoserv| RESULT string-validate-input: string-validate-input= [18,20,19,19,19,19,19,21,19,20] ms\n13:23:35 INFO | autoserv| Avg string-validate-input: 19.300000ms\n13:23:35 INFO | autoserv| Sd string-validate-input: 0.823273ms\n13:23:35 INFO | autoserv| RESULT telemetry_page_measurement_results: num_failed= 0 count\n13:23:35 INFO | autoserv| RESULT telemetry_page_measurement_results: num_errored= 0 count\n13:23:35 INFO | autoserv| \n13:23:35 INFO | autoserv| View result at file:///tmp/chrome_root/src/tools/perf/results.html\n13:23:35 INFO | autoserv| \n13:23:35 INFO | autoserv| stderr:\n13:23:35 INFO | autoserv| No job_repo_url for <remote host: chromeos2-row2-rack4-host11.cros>\n13:23:35 INFO | autoserv| Executing /tmp/sysinfo/autoserv-MxOMOw/bin/autotest /tmp/sysinfo/autoserv-MxOMOw/control phase 0\n13:23:36 INFO | autoserv| Entered autotestd_monitor.\n13:23:36 INFO | autoserv| Finished launching tail subprocesses.\n13:23:36 INFO | autoserv| Finished waiting on autotestd to start.\n13:23:37 INFO | autoserv| START\t----\t----\ttimestamp=1401301417\tlocaltime=May 28 11:23:37\n13:23:37 INFO | autoserv| GOOD\t----\tsysinfo.iteration.after\ttimestamp=1401301417\tlocaltime=May 28 11:23:37\n13:23:37 INFO | autoserv| END GOOD\t----\t----\ttimestamp=1401301417\tlocaltime=May 28 11:23:37\n13:23:37 INFO | autoserv| Got lock of exit_code_file.\n13:23:37 INFO | autoserv| Released lock of exit_code_file and closed it.\n13:23:39 INFO | autoserv| Killing child processes.\n13:23:39 INFO | autoserv| Client complete\n13:23:39 INFO | autoserv| No job_repo_url for <remote host: chromeos2-row2-rack4-host11.cros>\n13:23:40 INFO | autoserv| Executing /tmp/sysinfo/autoserv-MxOMOw/bin/autotest /tmp/sysinfo/autoserv-MxOMOw/control phase 0\n13:23:40 INFO | autoserv| Entered autotestd_monitor.\n13:23:40 INFO | autoserv| Finished launching tail subprocesses.\n13:23:40 INFO | autoserv| Finished waiting on autotestd to start.\n13:23:40 INFO | autoserv| START\t----\t----\ttimestamp=1401301420\tlocaltime=May 28 11:23:40\n13:23:40 INFO | autoserv| GOOD\t----\tsysinfo.after\ttimestamp=1401301420\tlocaltime=May 28 11:23:40\n13:23:40 INFO | autoserv| END GOOD\t----\t----\ttimestamp=1401301420\tlocaltime=May 28 11:23:40\n13:23:40 INFO | autoserv| Got lock of exit_code_file.\n13:23:40 INFO | autoserv| Released lock of exit_code_file and closed it.\n13:23:42 INFO | autoserv| Killing child processes.\n13:23:42 INFO | autoserv| Client complete\n13:23:44 INFO | autoserv| GOOD\ttelemetry_Crosperf\ttelemetry_Crosperf\ttimestamp=1401301424\tlocaltime=May 28 11:23:44\tcompleted successfully\n13:23:44 INFO | autoserv| END GOOD\ttelemetry_Crosperf\ttelemetry_Crosperf\ttimestamp=1401301424\tlocaltime=May 28 11:23:44\n13:23:44 INFO | autoserv| Finished processing control file\n13:23:44 INFO | autoserv| Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_UyjlWMssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/tmp/tmpCvMigR -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=300 -l root -p 22 chromeos2-row2-rack4-host11.cros'\n13:23:45 INFO | autoserv| Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_w_KGTassh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 chromeos2-row2-rack4-host11.cros'\n-----------------------------------------------------------------------------------------\n/tmp/test_that_results_zZZfQa/results-1-telemetry_Crosperf [ PASSED ]\n/tmp/test_that_results_zZZfQa/results-1-telemetry_Crosperf/telemetry_Crosperf [ PASSED ]\n-----------------------------------------------------------------------------------------\nTotal PASS: 2/2 (100%)\n\n13:23:47 INFO | Finished running tests. Results can be found in /tmp/test_that_results_zZZfQa\n"
+p0
+.S'INFO:root:Identity added: /tmp/test_that_results_PPRMIh/testing_rsa (/tmp/test_that_results_PPRMIh/testing_rsa)\nINFO:root:Identity added: /tmp/test_that_results_zZZfQa/testing_rsa (/tmp/test_that_results_zZZfQa/testing_rsa)\n'
+p0
+.I0
+. \ No newline at end of file
diff --git a/crosperf/test_cache/test_puretelemetry_input/machine.txt b/crosperf/test_cache/test_puretelemetry_input/machine.txt
new file mode 100644
index 00000000..9bd78434
--- /dev/null
+++ b/crosperf/test_cache/test_puretelemetry_input/machine.txt
@@ -0,0 +1 @@
+processor : 0vendor_id : GenuineIntelcpu family : 6model : 42model name : Intel(R) Celeron(R) CPU 867 @ 1.30GHzstepping : 7microcode : 0x25cache size : 2048 KBphysical id : 0siblings : 2core id : 0cpu cores : 2apicid : 0initial apicid : 0fpu : yesfpu_exception : yescpuid level : 13wp : yesflags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx rdtscp lm constant_tsc arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf pni pclmulqdq dtes64 monitor ds_cpl vmx est tm2 ssse3 cx16 xtpr pdcm pcid sse4_1 sse4_2 x2apic popcnt tsc_deadline_timer xsave lahf_lm arat epb xsaveopt pln pts dts tpr_shadow vnmi flexpriority ept vpidclflush size : 64cache_alignment : 64address sizes : 36 bits physical, 48 bits virtualpower management:processor : 1vendor_id : GenuineIntelcpu family : 6model : 42model name : Intel(R) Celeron(R) CPU 867 @ 1.30GHzstepping : 7microcode : 0x25cache size : 2048 KBphysical id : 0siblings : 2core id : 1cpu cores : 2apicid : 2initial apicid : 2fpu : yesfpu_exception : yescpuid level : 13wp : yesflags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx rdtscp lm constant_tsc arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf pni pclmulqdq dtes64 monitor ds_cpl vmx est tm2 ssse3 cx16 xtpr pdcm pcid sse4_1 sse4_2 x2apic popcnt tsc_deadline_timer xsave lahf_lm arat epb xsaveopt pln pts dts tpr_shadow vnmi flexpriority ept vpidclflush size : 64cache_alignment : 64address sizes : 36 bits physical, 48 bits virtualpower management: 4194304 \ No newline at end of file
diff --git a/crosperf/test_cache/test_puretelemetry_input/results.txt b/crosperf/test_cache/test_puretelemetry_input/results.txt
new file mode 100644
index 00000000..497d1cf3
--- /dev/null
+++ b/crosperf/test_cache/test_puretelemetry_input/results.txt
@@ -0,0 +1,6 @@
+S'page_name,3d-cube (ms),3d-morph (ms),3d-raytrace (ms),Total (ms),access-binary-trees (ms),access-fannkuch (ms),access-nbody (ms),access-nsieve (ms),bitops-3bit-bits-in-byte (ms),bitops-bits-in-byte (ms),bitops-bitwise-and (ms),bitops-nsieve-bits (ms),controlflow-recursive (ms),crypto-aes (ms),crypto-md5 (ms),crypto-sha1 (ms),date-format-tofte (ms),date-format-xparb (ms),math-cordic (ms),math-partial-sums (ms),math-spectral-norm (ms),regexp-dna (ms),string-base64 (ms),string-fasta (ms),string-tagcloud (ms),string-unpack-code (ms),string-validate-input (ms)\r\nhttp://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html,42.7,50.2,28.7,656.5,7.3,26.3,6.9,8.6,3.5,9.8,8.8,9.3,5.3,19.2,10.8,12.4,31.2,138.1,11.4,32.8,6.3,16.1,17.5,36.3,47.2,45.0,24.8\r\n'
+p0
+.S''
+p0
+.I0
+. \ No newline at end of file
diff --git a/crosperf/unittest_keyval_file.txt b/crosperf/unittest_keyval_file.txt
new file mode 100644
index 00000000..cc76398e
--- /dev/null
+++ b/crosperf/unittest_keyval_file.txt
@@ -0,0 +1,20 @@
+{"description": "Box2D", "graph": "Box2D", "higher_is_better": true, "units": "score", "value": 4775}
+{"description": "CodeLoad", "graph": "CodeLoad", "higher_is_better": true, "units": "score", "value": 6271}
+{"description": "Crypto", "graph": "Crypto", "higher_is_better": true, "units": "score", "value": 8737}
+{"description": "DeltaBlue", "graph": "DeltaBlue", "higher_is_better": true, "units": "score", "value": 14401}
+{"description": "EarleyBoyer", "graph": "EarleyBoyer", "higher_is_better": true, "units": "score", "value": 14340}
+{"description": "Gameboy", "graph": "Gameboy", "higher_is_better": true, "units": "score", "value": 9901}
+{"description": "Mandreel", "graph": "Mandreel", "higher_is_better": true, "units": "score", "value": 6620}
+{"description": "MandreelLatency", "graph": "MandreelLatency", "higher_is_better": true, "units": "score", "value": 5188}
+{"description": "NavierStokes", "graph": "NavierStokes", "higher_is_better": true, "units": "score", "value": 9815}
+{"description": "PdfJS", "graph": "PdfJS", "higher_is_better": true, "units": "score", "value": 6455}
+{"description": "RayTrace", "graph": "RayTrace", "higher_is_better": true, "units": "score", "value": 16600}
+{"description": "RegExp", "graph": "RegExp", "higher_is_better": true, "units": "score", "value": 1765}
+{"description": "Richards", "graph": "Richards", "higher_is_better": true, "units": "score", "value": 10358}
+{"description": "Splay", "graph": "Splay", "higher_is_better": true, "units": "score", "value": 4425}
+{"description": "SplayLatency", "graph": "SplayLatency", "higher_is_better": true, "units": "score", "value": 7653}
+{"description": "Typescript", "graph": "Typescript", "higher_is_better": true, "units": "score", "value": 9815}
+{"description": "zlib", "graph": "zlib", "higher_is_better": true, "units": "score", "value": 16094}
+{"description": "Score", "graph": "Total", "higher_is_better": true, "units": "score", "value": 7918}
+{"description": "num_failed", "graph": "telemetry_page_measurement_results", "higher_is_better": true, "units": "count", "value": 0}
+{"description": "num_errored", "graph": "telemetry_page_measurement_results", "higher_is_better": true, "units": "count", "value": 0}