diff options
author | android-build-team Robot <android-build-team-robot@google.com> | 2020-04-28 20:24:56 +0000 |
---|---|---|
committer | android-build-team Robot <android-build-team-robot@google.com> | 2020-04-28 20:24:56 +0000 |
commit | 656c7c0fa0d9b776c019f5423e01a9a28979d2bd (patch) | |
tree | 220a95d346d58fa5090d8033991bcfef2ad03d99 /crosperf/benchmark_run_unittest.py | |
parent | 091406baac88deeb2ef9c71d5047bd9daf3915ef (diff) | |
parent | ccead650bb8e050400f55dc44b60f989b1f8e117 (diff) | |
download | toolchain-utils-656c7c0fa0d9b776c019f5423e01a9a28979d2bd.tar.gz |
Snap for 6439596 from ccead650bb8e050400f55dc44b60f989b1f8e117 to qt-aml-tzdata-releaseq_tzdata_aml_297100400q_tzdata_aml_297100300q_tzdata_aml_297100000q_tzdata_aml_296200000q_tzdata_aml_295600118q_tzdata_aml_295600110q_tzdata_aml_295500002q_tzdata_aml_295500001q_tzdata_aml_294400310android-mainline-12.0.0_r54android-mainline-12.0.0_r111android-mainline-10.0.0_r13android-mainline-10.0.0_r12android-mainline-10.0.0_r11q_tzdata_aml_297100000android12-mainline-tzdata-releaseandroid10-mainline-tzdata-releaseandroid10-android13-mainline-tzdata-release
Change-Id: I3464301d9b41d48e11dfd361a411d60a1bd5a429
Diffstat (limited to 'crosperf/benchmark_run_unittest.py')
-rwxr-xr-x | crosperf/benchmark_run_unittest.py | 85 |
1 files changed, 34 insertions, 51 deletions
diff --git a/crosperf/benchmark_run_unittest.py b/crosperf/benchmark_run_unittest.py index 51b287cf..74757ac2 100755 --- a/crosperf/benchmark_run_unittest.py +++ b/crosperf/benchmark_run_unittest.py @@ -1,22 +1,20 @@ #!/usr/bin/env python2 -# -*- coding: utf-8 -*- # Copyright (c) 2013 The Chromium OS Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. - """Testing of benchmark_run.""" from __future__ import print_function +import mock import unittest import inspect -import mock +from cros_utils import logger import benchmark_run -from cros_utils import logger from suite_runner import MockSuiteRunner from suite_runner import SuiteRunner from label import MockLabel @@ -50,10 +48,8 @@ class BenchmarkRunTest(unittest.TestCase): self.test_label = MockLabel( 'test1', - 'build', 'image1', 'autotest_dir', - 'debug_dir', '/tmp/test_benchmark_run', 'x86-alex', 'chromeos2-row1-rack4-host9.cros', @@ -61,8 +57,7 @@ class BenchmarkRunTest(unittest.TestCase): cache_dir='', cache_only=False, log_level='average', - compiler='gcc', - skylab=False) + compiler='gcc') self.test_cache_conditions = [ CacheConditions.CACHE_FILE_EXISTS, CacheConditions.CHECKSUMS_MATCH @@ -75,10 +70,8 @@ class BenchmarkRunTest(unittest.TestCase): def testDryRun(self): my_label = MockLabel( 'test1', - 'build', 'image1', 'autotest_dir', - 'debug_dir', '/tmp/test_benchmark_run', 'x86-alex', 'chromeos2-row1-rack4-host9.cros', @@ -86,8 +79,7 @@ class BenchmarkRunTest(unittest.TestCase): cache_dir='', cache_only=False, log_level='average', - compiler='gcc', - skylab=False) + compiler='gcc') logging_level = 'average' m = MockMachineManager('/tmp/chromeos_root', 0, logging_level, '') @@ -100,16 +92,8 @@ class BenchmarkRunTest(unittest.TestCase): False, # rm_chroot_tmp '', # perf_args suite='telemetry_Crosperf') # suite - dut_conf = { - 'cooldown_time': 5, - 'cooldown_temp': 45, - 'governor': 'powersave', - 'cpu_usage': 'big_only', - 'cpu_freq_pct': 80, - } b = benchmark_run.MockBenchmarkRun('test run', bench, my_label, 1, [], m, - logger.GetLogger(), logging_level, '', - dut_conf) + logger.GetLogger(), logging_level, '') b.cache = MockResultsCache() b.suite_runner = MockSuiteRunner() b.start() @@ -118,8 +102,7 @@ class BenchmarkRunTest(unittest.TestCase): # since the last time this test was updated: args_list = [ 'self', 'name', 'benchmark', 'label', 'iteration', 'cache_conditions', - 'machine_manager', 'logger_to_use', 'log_level', 'share_cache', - 'dut_config', 'enable_aslr' + 'machine_manager', 'logger_to_use', 'log_level', 'share_cache' ] arg_spec = inspect.getargspec(benchmark_run.BenchmarkRun.__init__) self.assertEqual(len(arg_spec.args), len(args_list)) @@ -137,30 +120,30 @@ class BenchmarkRunTest(unittest.TestCase): br = benchmark_run.BenchmarkRun( 'test_run', self.test_benchmark, self.test_label, 1, self.test_cache_conditions, self.mock_machine_manager, self.mock_logger, - 'average', '', {}) + 'average', '') def MockLogOutput(msg, print_to_console=False): - """Helper function for test_run.""" + 'Helper function for test_run.' del print_to_console self.log_output.append(msg) def MockLogError(msg, print_to_console=False): - """Helper function for test_run.""" + 'Helper function for test_run.' del print_to_console self.log_error.append(msg) def MockRecordStatus(msg): - """Helper function for test_run.""" + 'Helper function for test_run.' self.status.append(msg) def FakeReadCache(): - """Helper function for test_run.""" + 'Helper function for test_run.' br.cache = mock.Mock(spec=ResultsCache) self.called_ReadCache = True return 0 def FakeReadCacheSucceed(): - """Helper function for test_run.""" + 'Helper function for test_run.' br.cache = mock.Mock(spec=ResultsCache) br.result = mock.Mock(spec=Result) br.result.out = 'result.out stuff' @@ -170,29 +153,29 @@ class BenchmarkRunTest(unittest.TestCase): return 0 def FakeReadCacheException(): - """Helper function for test_run.""" + 'Helper function for test_run.' raise RuntimeError('This is an exception test; it is supposed to happen') def FakeAcquireMachine(): - """Helper function for test_run.""" + 'Helper function for test_run.' mock_machine = MockCrosMachine('chromeos1-row3-rack5-host7.cros', 'chromeos', 'average') return mock_machine def FakeRunTest(_machine): - """Helper function for test_run.""" + 'Helper function for test_run.' mock_result = mock.Mock(spec=Result) mock_result.retval = 0 return mock_result def FakeRunTestFail(_machine): - """Helper function for test_run.""" + 'Helper function for test_run.' mock_result = mock.Mock(spec=Result) mock_result.retval = 1 return mock_result def ResetTestValues(): - """Helper function for test_run.""" + 'Helper function for test_run.' self.log_output = [] self.log_error = [] self.status = [] @@ -277,14 +260,14 @@ class BenchmarkRunTest(unittest.TestCase): br = benchmark_run.BenchmarkRun( 'test_run', self.test_benchmark, self.test_label, 1, self.test_cache_conditions, self.mock_machine_manager, self.mock_logger, - 'average', '', {}) + 'average', '') def GetLastEventPassed(): - """Helper function for test_terminate_pass""" + 'Helper function for test_terminate_pass' return benchmark_run.STATUS_SUCCEEDED def RecordStub(status): - """Helper function for test_terminate_pass""" + 'Helper function for test_terminate_pass' self.status = status self.status = benchmark_run.STATUS_SUCCEEDED @@ -304,14 +287,14 @@ class BenchmarkRunTest(unittest.TestCase): br = benchmark_run.BenchmarkRun( 'test_run', self.test_benchmark, self.test_label, 1, self.test_cache_conditions, self.mock_machine_manager, self.mock_logger, - 'average', '', {}) + 'average', '') def GetLastEventFailed(): - """Helper function for test_terminate_fail""" + 'Helper function for test_terminate_fail' return benchmark_run.STATUS_FAILED def RecordStub(status): - """Helper function for test_terminate_fail""" + 'Helper function for test_terminate_fail' self.status = status self.status = benchmark_run.STATUS_SUCCEEDED @@ -331,7 +314,7 @@ class BenchmarkRunTest(unittest.TestCase): br = benchmark_run.BenchmarkRun( 'test_run', self.test_benchmark, self.test_label, 1, self.test_cache_conditions, self.mock_machine_manager, self.mock_logger, - 'average', '', {}) + 'average', '') br.terminated = True self.assertRaises(Exception, br.AcquireMachine) @@ -348,10 +331,10 @@ class BenchmarkRunTest(unittest.TestCase): br = benchmark_run.BenchmarkRun( 'test_run', self.test_benchmark, self.test_label, 1, self.test_cache_conditions, self.mock_machine_manager, self.mock_logger, - 'average', '', {}) + 'average', '') def MockLogError(err_msg): - """Helper function for test_get_extra_autotest_args""" + 'Helper function for test_get_extra_autotest_args' self.err_msg = err_msg self.mock_logger.LogError = MockLogError @@ -363,8 +346,8 @@ class BenchmarkRunTest(unittest.TestCase): result = br.GetExtraAutotestArgs() self.assertEqual( result, - '--profiler=custom_perf --profiler_args=\'perf_options="record -a -e ' - 'cycles"\'') + "--profiler=custom_perf --profiler_args='perf_options=\"record -a -e " + "cycles\"'") self.test_benchmark.suite = 'telemetry' result = br.GetExtraAutotestArgs() @@ -387,7 +370,7 @@ class BenchmarkRunTest(unittest.TestCase): br = benchmark_run.BenchmarkRun( 'test_run', self.test_benchmark, self.test_label, 1, self.test_cache_conditions, self.mock_machine_manager, self.mock_logger, - 'average', '', {}) + 'average', '') self.status = [] @@ -402,9 +385,9 @@ class BenchmarkRunTest(unittest.TestCase): br.RunTest(mock_machine) self.assertTrue(br.run_completed) - self.assertEqual( - self.status, - [benchmark_run.STATUS_IMAGING, benchmark_run.STATUS_RUNNING]) + self.assertEqual(self.status, [ + benchmark_run.STATUS_IMAGING, benchmark_run.STATUS_RUNNING + ]) self.assertEqual(br.machine_manager.ImageMachine.call_count, 1) br.machine_manager.ImageMachine.assert_called_with(mock_machine, @@ -416,13 +399,13 @@ class BenchmarkRunTest(unittest.TestCase): self.assertEqual(mock_result.call_count, 1) mock_result.assert_called_with( self.mock_logger, 'average', self.test_label, None, "{'Score':100}", '', - 0, 'page_cycler.netsim.top_10', 'telemetry_Crosperf', '') + 0, 'page_cycler.netsim.top_10', 'telemetry_Crosperf') def test_set_cache_conditions(self): br = benchmark_run.BenchmarkRun( 'test_run', self.test_benchmark, self.test_label, 1, self.test_cache_conditions, self.mock_machine_manager, self.mock_logger, - 'average', '', {}) + 'average', '') phony_cache_conditions = [123, 456, True, False] |