aboutsummaryrefslogtreecommitdiff
path: root/crosperf
diff options
context:
space:
mode:
authorcmtice <cmtice@google.com>2014-05-08 10:56:54 -0700
committerchrome-internal-fetch <chrome-internal-fetch@google.com>2014-05-14 22:51:08 +0000
commit87ae9a0635ad5cb7606e2703ed60b3013920e727 (patch)
treebf29ef66baf81fbf8447de3bfbec2572dd4b5529 /crosperf
parent2250df9b2fb7ffe523b4f90e06b0d7602b83876f (diff)
downloadtoolchain-utils-87ae9a0635ad5cb7606e2703ed60b3013920e727.tar.gz
Update & complete benchmark_run_unittest.py
Add tests for all the methods in BenchmarkRun. BUG=Non TEST=Ran unittest. Change-Id: I9046f010ce0e48ba8d897585fec2f95732df9cd3 Reviewed-on: https://chrome-internal-review.googlesource.com/162988 Reviewed-by: Yunlian Jiang <yunlian@google.com> Commit-Queue: Caroline Tice <cmtice@google.com> Tested-by: Caroline Tice <cmtice@google.com>
Diffstat (limited to 'crosperf')
-rw-r--r--crosperf/benchmark_run.py4
-rwxr-xr-xcrosperf/benchmark_run_unittest.py368
2 files changed, 368 insertions, 4 deletions
diff --git a/crosperf/benchmark_run.py b/crosperf/benchmark_run.py
index da4b7d29..ec39b403 100644
--- a/crosperf/benchmark_run.py
+++ b/crosperf/benchmark_run.py
@@ -159,10 +159,12 @@ class BenchmarkRun(threading.Thread):
def _GetExtraAutotestArgs(self):
if self.benchmark.perf_args and self.benchmark.suite == "telemetry":
- self._logger.LogError("Telemetry benchmark does not support profiler.")
+ self._logger.LogError("Telemetry does not support profiler.")
+ self.benchmark.perf_args = ""
if self.benchmark.perf_args and self.benchmark.suite == "test_that":
self._logger.LogError("test_that does not support profiler.")
+ self.benchmark.perf_args = ""
if self.benchmark.perf_args:
perf_args_list = self.benchmark.perf_args.split(" ")
diff --git a/crosperf/benchmark_run_unittest.py b/crosperf/benchmark_run_unittest.py
index 0e7c92ff..a42e4598 100755
--- a/crosperf/benchmark_run_unittest.py
+++ b/crosperf/benchmark_run_unittest.py
@@ -6,25 +6,59 @@
"""Testing of benchmark_run."""
+import mock
import unittest
+import inspect
from utils import logger
+import benchmark_run
+
from suite_runner import MockSuiteRunner
-from benchmark_run import MockBenchmarkRun
+from suite_runner import SuiteRunner
from label import MockLabel
from benchmark import Benchmark
from machine_manager import MockMachineManager
+from machine_manager import MachineManager
+from machine_manager import MockCrosMachine
from results_cache import MockResultsCache
+from results_cache import CacheConditions
+from results_cache import Result
+from results_cache import ResultsCache
class BenchmarkRunTest(unittest.TestCase):
+ """
+ Unit tests for the BenchmarkRun class and all of its methods.
+ """
+
+ def setUp(self):
+ self.test_benchmark = Benchmark("page_cycler.netsim.top_10", # name
+ "page_cycler.netsim.top_10", # test_name
+ "", # test_args
+ 1, # iterations
+ False, # rm_chroot_tmp
+ "", # perf_args
+ suite="telemetry_Crosperf") # suite
+
+ self.test_label = MockLabel("test1", "image1", "/tmp/test_benchmark_run",
+ "x86-alex", "chromeos2-row1-rack4-host9.cros",
+ image_args="",
+ cache_dir="")
+
+ self.test_cache_conditions = [CacheConditions.CACHE_FILE_EXISTS,
+ CacheConditions.CHECKSUMS_MATCH]
+
+ self.mock_logger = logger.GetLogger(log_dir="", mock=True)
+
+ self.mock_machine_manager = mock.Mock(spec=MachineManager)
+
def testDryRun(self):
my_label = MockLabel("test1", "image1", "/tmp/test_benchmark_run",
"x86-alex", "chromeos2-row1-rack4-host9.cros",
image_args="",
cache_dir="")
- logging_level="average"
+ logging_level = "average"
m = MockMachineManager("/tmp/chromeos_root", 0, logging_level)
m.AddMachine("chromeos2-row1-rack4-host9.cros")
bench = Benchmark("page_cycler.netsim.top_10", # name
@@ -34,7 +68,7 @@ class BenchmarkRunTest(unittest.TestCase):
False, # rm_chroot_tmp
"", # perf_args
suite="telemetry_Crosperf") # suite
- b = MockBenchmarkRun("test run",
+ b = benchmark_run.MockBenchmarkRun("test run",
bench,
my_label,
1,
@@ -47,6 +81,334 @@ class BenchmarkRunTest(unittest.TestCase):
b.suite_runner = MockSuiteRunner()
b.start()
+ # Make sure the arguments to BenchmarkRun.__init__ have not changed
+ # since the last time this test was updated:
+ args_list = ['self', 'name', 'benchmark', 'label', 'iteration',
+ 'cache_conditions', 'machine_manager', 'logger_to_use',
+ 'log_level', 'share_users']
+ arg_spec = inspect.getargspec(benchmark_run.BenchmarkRun.__init__)
+ self.assertEqual(len(arg_spec.args), len(args_list))
+ self.assertEqual (arg_spec.args, args_list)
+
+
+ def test_init(self):
+ # Nothing really worth testing here; just field assignments.
+ pass
+
+ def test_read_cache(self):
+ # Nothing really worth testing here, either.
+ pass
+
+ def test_run(self):
+ br = benchmark_run.BenchmarkRun("test_run", self.test_benchmark,
+ self.test_label, 1, self.test_cache_conditions,
+ self.mock_machine_manager,
+ self.mock_logger,
+ "average", "")
+
+ def MockLogOutput(msg, print_to_console=False):
+ "Helper function for test_run."
+ self.log_output.append(msg)
+
+ def MockLogError(msg, print_to_console=False):
+ "Helper function for test_run."
+ self.log_error.append(msg)
+
+ def MockRecordStatus(msg):
+ "Helper function for test_run."
+ self.status.append(msg)
+
+ def FakeReadCache():
+ "Helper function for test_run."
+ br.cache = mock.Mock(spec=ResultsCache)
+ self.called_ReadCache = True
+ return 0
+
+ def FakeReadCacheSucceed():
+ "Helper function for test_run."
+ br.cache = mock.Mock(spec=ResultsCache)
+ br.result = mock.Mock(spec=Result)
+ br.result.out = "result.out stuff"
+ br.result.err = "result.err stuff"
+ br.result.retval = 0
+ self.called_ReadCache = True
+ return 0
+
+ def FakeReadCacheException():
+ "Helper function for test_run."
+ raise Exception("This is an exception test; it is supposed to happen")
+
+ def FakeAcquireMachine():
+ "Helper function for test_run."
+ mock_machine = MockCrosMachine ('chromeos1-row3-rack5-host7.cros',
+ 'chromeos', 'average')
+ return mock_machine
+
+ def FakeRunTest(_machine):
+ "Helper function for test_run."
+ mock_result = mock.Mock(spec=Result)
+ mock_result.retval = 0
+ return mock_result
+
+ def FakeRunTestFail(_machine):
+ "Helper function for test_run."
+ mock_result = mock.Mock(spec=Result)
+ mock_result.retval = 1
+ return mock_result
+
+ def ResetTestValues():
+ "Helper function for test_run."
+ self.log_output = []
+ self.log_error = []
+ self.status = []
+ br.result = None
+ self.called_ReadCache = False
+
+ # Assign all the fake functions to the appropriate objects.
+ br._logger.LogOutput = MockLogOutput
+ br._logger.LogError = MockLogError
+ br.timeline.Record = MockRecordStatus
+ br.ReadCache = FakeReadCache
+ br.RunTest = FakeRunTest
+ br.AcquireMachine = FakeAcquireMachine
+
+ # First test: No cache hit, all goes well.
+ ResetTestValues()
+ br.run()
+ self.assertTrue (self.called_ReadCache)
+ self.assertEqual (self.log_output,
+ ['test_run: No cache hit.',
+ 'Releasing machine: chromeos1-row3-rack5-host7.cros',
+ 'Released machine: chromeos1-row3-rack5-host7.cros'])
+ self.assertEqual (len(self.log_error), 0)
+ self.assertEqual (self.status, ['WAITING', 'SUCCEEDED'])
+
+ # Second test: No cached result found; test run was "terminated" for some
+ # reason.
+ ResetTestValues()
+ br.terminated = True
+ br.run()
+ self.assertTrue (self.called_ReadCache)
+ self.assertEqual (self.log_output,
+ ['test_run: No cache hit.',
+ 'Releasing machine: chromeos1-row3-rack5-host7.cros',
+ 'Released machine: chromeos1-row3-rack5-host7.cros'])
+ self.assertEqual (len(self.log_error), 0)
+ self.assertEqual (self.status, ['WAITING'])
+
+ # Third test. No cached result found; RunTest failed for some reason.
+ ResetTestValues()
+ br.terminated = False
+ br.RunTest = FakeRunTestFail
+ br.run()
+ self.assertTrue (self.called_ReadCache)
+ self.assertEqual (self.log_output,
+ ['test_run: No cache hit.',
+ 'Releasing machine: chromeos1-row3-rack5-host7.cros',
+ 'Released machine: chromeos1-row3-rack5-host7.cros'])
+ self.assertEqual (len(self.log_error), 0)
+ self.assertEqual (self.status, ['WAITING', 'FAILED'])
+
+ # Fourth test: ReadCache found a cached result.
+ ResetTestValues()
+ br.RunTest = FakeRunTest
+ br.ReadCache = FakeReadCacheSucceed
+ br.run()
+ self.assertTrue (self.called_ReadCache)
+ self.assertEqual (self.log_output,
+ ['test_run: Cache hit.',
+ 'result.out stuff',
+ 'Releasing machine: chromeos1-row3-rack5-host7.cros',
+ 'Released machine: chromeos1-row3-rack5-host7.cros'])
+ self.assertEqual (self.log_error, ['result.err stuff'])
+ self.assertEqual (self.status, ['SUCCEEDED'])
+
+ # Fifth test: ReadCache generates an exception; does the try/finally block
+ # work?
+ ResetTestValues()
+ br.ReadCache = FakeReadCacheException
+ br.machine = FakeAcquireMachine()
+ br.run()
+ self.assertEqual (self.log_output,
+ ['Releasing machine: chromeos1-row3-rack5-host7.cros',
+ 'Released machine: chromeos1-row3-rack5-host7.cros'])
+ self.assertEqual (self.log_error,
+ ["Benchmark run: 'test_run' failed: This is an exception test; it is supposed to happen"])
+ self.assertEqual (self.status, ['FAILED'])
+
+
+ def test_terminate_pass(self):
+ br = benchmark_run.BenchmarkRun("test_run", self.test_benchmark,
+ self.test_label, 1, self.test_cache_conditions,
+ self.mock_machine_manager,
+ self.mock_logger,
+ "average", "")
+
+ def GetLastEventPassed():
+ "Helper function for test_terminate_pass"
+ return benchmark_run.STATUS_SUCCEEDED
+
+ def RecordStub(status):
+ "Helper function for test_terminate_pass"
+ self.status = status
+
+ self.status = benchmark_run.STATUS_SUCCEEDED
+ self.assertFalse (br.terminated)
+ self.assertFalse (br.suite_runner._ct.IsTerminated())
+
+ br.timeline.GetLastEvent = GetLastEventPassed
+ br.timeline.Record = RecordStub
+
+ br.Terminate()
+
+ self.assertTrue (br.terminated)
+ self.assertTrue (br.suite_runner._ct.IsTerminated())
+ self.assertEqual (self.status, benchmark_run.STATUS_FAILED)
+
+
+
+ def test_terminate_fail(self):
+ br = benchmark_run.BenchmarkRun("test_run", self.test_benchmark,
+ self.test_label, 1, self.test_cache_conditions,
+ self.mock_machine_manager,
+ self.mock_logger,
+ "average", "")
+
+ def GetLastEventFailed():
+ "Helper function for test_terminate_fail"
+ return benchmark_run.STATUS_FAILED
+
+ def RecordStub(status):
+ "Helper function for test_terminate_fail"
+ self.status = status
+
+ self.status = benchmark_run.STATUS_SUCCEEDED
+ self.assertFalse (br.terminated)
+ self.assertFalse (br.suite_runner._ct.IsTerminated())
+
+ br.timeline.GetLastEvent = GetLastEventFailed
+ br.timeline.Record = RecordStub
+
+ br.Terminate()
+
+ self.assertTrue (br.terminated)
+ self.assertTrue (br.suite_runner._ct.IsTerminated())
+ self.assertEqual (self.status, benchmark_run.STATUS_SUCCEEDED)
+
+
+ def test_acquire_machine(self):
+ br = benchmark_run.BenchmarkRun("test_run", self.test_benchmark,
+ self.test_label, 1, self.test_cache_conditions,
+ self.mock_machine_manager,
+ self.mock_logger,
+ "average", "")
+
+
+ br.terminated = True
+ self.assertRaises (Exception, br.AcquireMachine)
+
+ br.terminated = False
+ mock_machine = MockCrosMachine ('chromeos1-row3-rack5-host7.cros',
+ 'chromeos', 'average')
+ self.mock_machine_manager.AcquireMachine.return_value = mock_machine
+
+ machine = br.AcquireMachine()
+ self.assertEqual (machine.name, 'chromeos1-row3-rack5-host7.cros')
+
+
+ def test_get_extra_autotest_args(self):
+ br = benchmark_run.BenchmarkRun("test_run", self.test_benchmark,
+ self.test_label, 1, self.test_cache_conditions,
+ self.mock_machine_manager,
+ self.mock_logger,
+ "average", "")
+
+ def MockLogError(err_msg):
+ "Helper function for test_get_extra_autotest_args"
+ self.err_msg = err_msg
+
+ self.mock_logger.LogError = MockLogError
+
+ result = br._GetExtraAutotestArgs()
+ self.assertEqual(result, "")
+
+ self.test_benchmark.perf_args = "record -e cycles"
+ result = br._GetExtraAutotestArgs()
+ self.assertEqual(result,
+"--profiler=custom_perf --profiler_args='perf_options=\"record -a -e cycles\"'")
+
+ self.test_benchmark.suite = "telemetry"
+ result = br._GetExtraAutotestArgs()
+ self.assertEqual(result, "")
+ self.assertEqual(self.err_msg, "Telemetry does not support profiler.")
+
+ self.test_benchmark.perf_args = "record -e cycles"
+ self.test_benchmark.suite = "test_that"
+ result = br._GetExtraAutotestArgs()
+ self.assertEqual(result, "")
+ self.assertEqual(self.err_msg, "test_that does not support profiler.")
+
+ self.test_benchmark.perf_args = "junk args"
+ self.test_benchmark.suite = "telemetry_Crosperf"
+ self.assertRaises(Exception, br._GetExtraAutotestArgs)
+
+
+ @mock.patch.object(SuiteRunner, 'Run')
+ @mock.patch.object(Result, 'CreateFromRun')
+ def test_run_test(self, mock_result, mock_runner):
+ br = benchmark_run.BenchmarkRun("test_run", self.test_benchmark,
+ self.test_label, 1, self.test_cache_conditions,
+ self.mock_machine_manager,
+ self.mock_logger,
+ "average", "")
+
+ self.status = []
+ def MockRecord(status):
+ self.status.append(status)
+
+ br.timeline.Record = MockRecord
+ mock_machine = MockCrosMachine ('chromeos1-row3-rack5-host7.cros',
+ 'chromeos', 'average')
+ mock_runner.return_value = [0, "{'Score':100}", ""]
+
+ br.RunTest(mock_machine)
+
+ self.assertTrue(br.run_completed)
+ self.assertEqual (self.status, [ benchmark_run.STATUS_IMAGING,
+ benchmark_run.STATUS_RUNNING])
+
+ self.assertEqual (br.machine_manager.ImageMachine.call_count, 1)
+ br.machine_manager.ImageMachine.assert_called_with (mock_machine,
+ self.test_label)
+ self.assertEqual (mock_runner.call_count, 1)
+ mock_runner.assert_called_with (mock_machine.name, br.label,
+ br.benchmark, "", br.profiler_args)
+
+ self.assertEqual (mock_result.call_count, 1)
+ mock_result.assert_called_with (self.mock_logger, 'average',
+ self.test_label, "{'Score':100}",
+ "", 0, False, 'page_cycler.netsim.top_10',
+ 'telemetry_Crosperf')
+
+
+
+ def test_set_cache_conditions(self):
+ br = benchmark_run.BenchmarkRun("test_run", self.test_benchmark,
+ self.test_label, 1, self.test_cache_conditions,
+ self.mock_machine_manager,
+ self.mock_logger,
+ "average", "")
+
+ phony_cache_conditions = [ 123, 456, True, False ]
+
+ self.assertEqual(br.cache_conditions, self.test_cache_conditions)
+
+ br.SetCacheConditions (phony_cache_conditions)
+ self.assertEqual(br.cache_conditions, phony_cache_conditions)
+
+ br.SetCacheConditions(self.test_cache_conditions)
+ self.assertEqual(br.cache_conditions, self.test_cache_conditions)
+
if __name__ == "__main__":
unittest.main()