summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGopinath Elanchezhian <gelanchezhian@google.com>2024-03-26 13:43:57 -0700
committerGopinath Elanchezhian <gelanchezhian@google.com>2024-03-27 13:17:35 -0700
commit01013f3febfb22915493def49460284f58c65b63 (patch)
tree60f3817dfd4d3bc347c9076a03f24ec4730aa91a
parent48dde924d2d53a2d90a223a2f5e3f0b5c84277ba (diff)
downloadplatform_testing-01013f3febfb22915493def49460284f58c65b63.tar.gz
Add power test host runner.
Files are moved from the internal projects. Bug: b/331241555 Test: PowerRunnerTests, InstrumentationResultProtoParserTest Change-Id: If9ae0370ee7f906790d66a56d878aaf2d76270f6
-rw-r--r--host_runners/power/Android.bp25
-rw-r--r--host_runners/power/src/com/android/power/PowerRunner.java246
-rw-r--r--host_runners/power/tests/Android.bp27
-rw-r--r--host_runners/power/tests/src/com/android/power/PowerRunnerTests.java188
-rw-r--r--host_runners/utils/Android.bp23
-rw-r--r--host_runners/utils/src/com/android/runner/utils/InstrumentationResultProtoParser.java207
-rw-r--r--host_runners/utils/src/com/android/runner/utils/NohupCommandHelper.java101
-rw-r--r--host_runners/utils/tests/Android.bp27
-rw-r--r--host_runners/utils/tests/src/com/android/runner/utils/InstrumentationResultProtoParserTest.java1234
9 files changed, 2078 insertions, 0 deletions
diff --git a/host_runners/power/Android.bp b/host_runners/power/Android.bp
new file mode 100644
index 000000000..b8c7b9e7a
--- /dev/null
+++ b/host_runners/power/Android.bp
@@ -0,0 +1,25 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_test_host {
+ name: "PowerHostRunner",
+ team: "trendy_team_android_platform_performance_testing",
+ srcs: ["src/**/*.java"],
+ static_libs: ["host_runner_utils"],
+ test_suites: ["general-tests"],
+}
diff --git a/host_runners/power/src/com/android/power/PowerRunner.java b/host_runners/power/src/com/android/power/PowerRunner.java
new file mode 100644
index 000000000..c937700cf
--- /dev/null
+++ b/host_runners/power/src/com/android/power/PowerRunner.java
@@ -0,0 +1,246 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.power;
+
+import com.android.ddmlib.IDevice;
+import com.android.ddmlib.testrunner.ITestRunListener;
+import com.android.runner.utils.InstrumentationResultProtoParser;
+import com.android.runner.utils.NohupCommandHelper;
+import com.android.tradefed.config.Option;
+import com.android.tradefed.config.OptionClass;
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.invoker.TestInformation;
+import com.android.tradefed.log.LogUtil;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.result.ddmlib.TestRunToTestInvocationForwarder;
+import com.android.tradefed.testtype.InstrumentationTest;
+import com.android.tradefed.util.FileUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A Power runner that runs an instrumentation test package on given device in a disconnected mode.
+ * Stores instrumentation status output into proto file on device. After execution pulls proto file
+ * from device and parses output from proto file, runs collectors and post processes.
+ */
+@OptionClass(alias = "power-runner")
+public class PowerRunner extends InstrumentationTest {
+
+ protected ITestInvocationListener mListener;
+ protected TestInformation mTestInfo;
+ private String mSDcardPath = null;
+ private String mRunName = "PowerTest";
+
+ public static final String INSTRUMENTATION_RESULTS_FILE_PATH = "protos";
+
+ @Option(
+ name = "power-instrumentation-arg",
+ description = "Additional instrumentation arguments to provide.",
+ requiredForRerun = true)
+ private final Map<String, String> mPowerInstrArgMap = new HashMap<String, String>();
+
+ @Option(
+ name = "max-wait-time-for-device-to-be-offline",
+ description =
+ "Maximum time a host should wait for a device to become offline after"
+ + " instrumentation command is run in nohup mode")
+ private long mMaxWaitTimeForDeviceToBeOffline = 120000;
+
+ @Option(
+ name = "max-wait-time-for-device-to-be-online",
+ description =
+ "This is the max timeout, the host will wait for the device to "
+ + "finish the test and reconnect. The device may connect back to host"
+ + " before this max time")
+ private long mMaxWaitTimeForDeviceToBeOnline = 480000;
+
+ /** {@inheritDoc} */
+ @Override
+ public void run(TestInformation testInfo, ITestInvocationListener listener)
+ throws DeviceNotAvailableException {
+ LogUtil.CLog.i("Starting run method of " + this.getClass().getSimpleName());
+ if (getDevice() == null) {
+ throw new IllegalArgumentException("Device has not been set");
+ }
+ setUp();
+
+ mListener = listener;
+ mTestInfo = testInfo;
+ String makeInstrDirCommand = String.format("mkdir %s", INSTRUMENTATION_RESULTS_FILE_PATH);
+ getDevice().executeShellCommand(makeInstrDirCommand);
+ String instrCmd = prepareInstrumentationCommand();
+ LogUtil.CLog.i("Command to run test in nohup mode prepared: " + instrCmd);
+ NohupCommandHelper.executeAdbNohupCommand(getDevice(), instrCmd);
+
+ waitForDeviceToBeDisconnected();
+ // In the meantime test will run in disconnected mode
+ waitForDeviceToBeConnected();
+
+ // stop cable_breaker watchdog after the test run
+ getDevice().executeShellCommand("cable_breaker -a end -w -");
+ LogUtil.CLog.i("Cable Breaker watchdog turned off");
+ parseInstrumentationResults();
+ }
+
+ /** Waits for the device to be disconnected from host. */
+ public void waitForDeviceToBeDisconnected() {
+ LogUtil.CLog.i(
+ "Waiting for device "
+ + getDevice().getIDevice().getSerialNumber()
+ + " to be disconnected for "
+ + mMaxWaitTimeForDeviceToBeOffline
+ + " ms");
+
+ // wait for device to be disconnected
+ getDevice().waitForDeviceNotAvailable(mMaxWaitTimeForDeviceToBeOffline);
+ if (getDevice().getIDevice().isOnline()) {
+ String message =
+ "Device "
+ + getDevice().getIDevice().getSerialNumber()
+ + " not disconnected from host after waiting for "
+ + mMaxWaitTimeForDeviceToBeOffline;
+ LogUtil.CLog.e(message);
+ throw new RuntimeException(message);
+ }
+ LogUtil.CLog.i("Device " + getDevice().getIDevice().getSerialNumber() + " disconnected");
+ }
+
+ /** Waits for the device to connect back to host */
+ public void waitForDeviceToBeConnected() throws DeviceNotAvailableException {
+ LogUtil.CLog.i(
+ "Waiting for device "
+ + getDevice().getIDevice().getSerialNumber()
+ + " to connect back to host, max wait time is "
+ + mMaxWaitTimeForDeviceToBeOnline
+ + " ms");
+ getDevice().waitForDeviceOnline(mMaxWaitTimeForDeviceToBeOnline);
+ if (getDevice().getIDevice().isOffline()) {
+ String message =
+ "Device "
+ + getDevice().getIDevice().getSerialNumber()
+ + " not connected back to host after waiting for "
+ + mMaxWaitTimeForDeviceToBeOnline;
+ LogUtil.CLog.e(message);
+ throw new RuntimeException(message);
+ }
+ LogUtil.CLog.i(
+ "Device "
+ + getDevice().getIDevice().getSerialNumber()
+ + " connected back to host after test completion");
+ }
+
+ private String prepareInstrumentationCommand() {
+ List<String> command = new ArrayList<String>();
+ command.add("am instrument -w -r");
+ command.add("-f " + INSTRUMENTATION_RESULTS_FILE_PATH + "/output.proto --no-logcat");
+ command.add("-w -r -e class");
+ command.add(getClassName());
+ for (Map.Entry<String, String> argEntry : mPowerInstrArgMap.entrySet()) {
+ command.add("-e");
+ command.add(argEntry.getKey());
+ command.add(argEntry.getValue());
+ }
+ // cable_breaker accepts timeout in secs, hence convert ms to secs
+ long timeToKeepDeviceDisconnected = (mMaxWaitTimeForDeviceToBeOffline / 1000);
+ command.add("-e time-to-keep-device-disconnected");
+ command.add(String.valueOf(timeToKeepDeviceDisconnected));
+
+ command.add(getPackageName() + "/" + getRunnerName());
+
+ // concatenate command tokens with spaces in between them
+ String builtCommand = String.join(" ", command);
+ return builtCommand;
+ }
+
+ /**
+ * Parse the instrumentation proto output and invoke the host side listeners for further
+ * collection and post-processing.
+ *
+ * @throws DeviceNotAvailableException
+ */
+ public void parseInstrumentationResults() throws DeviceNotAvailableException {
+ File tmpDestDir = null;
+ try {
+ try {
+ tmpDestDir = FileUtil.createTempDir("power-tests-tmp-results");
+ } catch (IOException e) {
+ throw new RuntimeException(
+ "Unable to create the local folder in the host"
+ + " to store the instrumentation results.");
+ }
+ mSDcardPath = getDevice().getMountPoint(IDevice.MNT_EXTERNAL_STORAGE);
+ if (getDevice()
+ .pullDir(
+ String.format("%s/%s", mSDcardPath, INSTRUMENTATION_RESULTS_FILE_PATH),
+ tmpDestDir)) {
+ File[] files = tmpDestDir.listFiles();
+ if (files.length == 0) {
+ throw new RuntimeException(
+ String.format(
+ "Instrumentation results proto file not found under"
+ + " %s/%s in the device",
+ mSDcardPath, INSTRUMENTATION_RESULTS_FILE_PATH));
+ }
+ if (files.length > 1) {
+ throw new RuntimeException(
+ "More than one instrumentation result proto file found.");
+ }
+
+ parseProtoFile(mRunName, files[0]);
+ }
+ } finally {
+ FileUtil.recursiveDelete(tmpDestDir);
+ }
+ }
+
+ public void parseProtoFile(String runName, File file) {
+ TestRunToTestInvocationForwarder runToInvocation =
+ new TestRunToTestInvocationForwarder(mListener);
+ List<ITestRunListener> runListeners = Collections.singletonList(runToInvocation);
+ InstrumentationResultProtoParser protoParser =
+ new InstrumentationResultProtoParser(runName, runListeners);
+ try {
+ protoParser.processProtoFile(file);
+ } catch (IOException e) {
+ throw new RuntimeException("Unable to process the instrumentation proto file.");
+ }
+ }
+
+ private void deleteTestFile(String filePath) throws DeviceNotAvailableException {
+ if (getDevice().doesFileExist(filePath)) {
+ getDevice().executeShellCommand(String.format("rm -rf %s", filePath));
+ }
+ }
+
+ protected void deleteTestFiles() throws DeviceNotAvailableException {
+ deleteTestFile(String.format("%s/%s", mSDcardPath, NohupCommandHelper.NOHUP_LOG));
+ deleteTestFile(String.format("%s/%s", mSDcardPath, INSTRUMENTATION_RESULTS_FILE_PATH));
+ }
+
+ public void setUp() throws DeviceNotAvailableException {
+ mSDcardPath = getDevice().getMountPoint(IDevice.MNT_EXTERNAL_STORAGE);
+ // Clean previous test files if exist
+ deleteTestFiles();
+ LogUtil.CLog.i("Deleted existing test files on device if existed");
+ }
+}
diff --git a/host_runners/power/tests/Android.bp b/host_runners/power/tests/Android.bp
new file mode 100644
index 000000000..7081d11e9
--- /dev/null
+++ b/host_runners/power/tests/Android.bp
@@ -0,0 +1,27 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_library_host {
+ name: "PowerHostRunnerTests",
+ srcs: ["src/**/*.java"],
+ static_libs: [
+ "PowerHostRunner",
+ "mockito",
+ "objenesis",
+ ],
+}
diff --git a/host_runners/power/tests/src/com/android/power/PowerRunnerTests.java b/host_runners/power/tests/src/com/android/power/PowerRunnerTests.java
new file mode 100644
index 000000000..280b992e9
--- /dev/null
+++ b/host_runners/power/tests/src/com/android/power/PowerRunnerTests.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.power;
+
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.spy;
+
+import com.android.ddmlib.AdbCommandRejectedException;
+import com.android.ddmlib.IDevice;
+import com.android.ddmlib.SyncException;
+import com.android.ddmlib.TimeoutException;
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.invoker.InvocationContext;
+import com.android.tradefed.invoker.TestInformation;
+import com.android.tradefed.result.ITestInvocationListener;
+
+import com.android.power.PowerRunner;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+public class PowerRunnerTests {
+ private ITestDevice mDevice;
+ private IDevice mIDevice;
+ private TestInformation mTestInformation;
+ private ITestInvocationListener mITestInvocationListener;
+ private PowerRunner mPowerRunner;
+
+ @Before
+ public void setUp()
+ throws DeviceNotAvailableException,
+ IOException,
+ AdbCommandRejectedException,
+ SyncException,
+ TimeoutException {
+ mDevice = Mockito.mock(ITestDevice.class);
+ mIDevice = Mockito.mock(IDevice.class);
+ mITestInvocationListener = Mockito.mock(ITestInvocationListener.class);
+
+ IInvocationContext context = new InvocationContext();
+ context.addAllocatedDevice("device", mDevice);
+ mTestInformation = TestInformation.newBuilder().setInvocationContext(context).build();
+ Mockito.when(mDevice.getIDevice()).thenReturn(mIDevice);
+ Mockito.when(mDevice.waitForDeviceNotAvailable(Mockito.anyLong())).thenReturn(true);
+ Mockito.when(mDevice.waitForDeviceAvailable(Mockito.anyLong())).thenReturn(true);
+ Mockito.when(mDevice.doesFileExist(Mockito.anyString())).thenReturn(true);
+ Mockito.when(mDevice.executeAdbCommand(Mockito.anyString())).thenReturn("Success");
+ Mockito.when(mDevice.executeShellCommand(Mockito.anyString())).thenReturn("Success");
+
+ Mockito.when(mIDevice.getMountPoint(Mockito.anyString())).thenReturn("mockedFilePath");
+ Mockito.when(mIDevice.getSerialNumber()).thenReturn("mockedSerialNumber");
+
+ Mockito.when(mDevice.pullDir(Mockito.anyString(), Mockito.any(File.class)))
+ .thenAnswer(
+ input -> {
+ File destDir = input.getArgument(1);
+ File file1 =
+ new File(
+ destDir.getAbsoluteFile()
+ + File.separator
+ + "filename1.proto");
+
+ file1.createNewFile();
+ return true;
+ });
+
+ mPowerRunner =
+ spy(
+ new PowerRunner() {
+ @Override
+ public ITestDevice getDevice() {
+ return mDevice;
+ }
+
+ protected Map<String, String> getAllInstrumentationArgs() {
+ HashMap<String, String> map = new HashMap<>();
+ map.put("k1", "v1");
+ map.put("k2", "v2");
+ return map;
+ }
+
+ @Override
+ public String getPackageName() {
+ return "testPackageName";
+ }
+
+ @Override
+ public String getRunnerName() {
+ return "testRunnerName";
+ }
+
+ @Override
+ public String getClassName() {
+ return "testClassName";
+ }
+ });
+ }
+
+ /** Positive workflow test: When device is connected and test run is successful */
+ @Test
+ public void testRunWhenDeviceIsOnline() throws DeviceNotAvailableException, IOException {
+
+ mPowerRunner.run(mTestInformation, mITestInvocationListener);
+
+ Mockito.verify(mDevice, Mockito.times(3)).getMountPoint(Mockito.anyString());
+ Mockito.verify(mDevice, Mockito.times(4)).executeShellCommand(Mockito.anyString());
+ Mockito.verify(mDevice, Mockito.times(3)).doesFileExist(Mockito.anyString());
+ Mockito.verify(mIDevice, Mockito.times(4)).getSerialNumber();
+ Mockito.verify(mDevice, Mockito.times(1))
+ .pullDir(Mockito.anyString(), Mockito.any(File.class));
+ Mockito.verify(mPowerRunner, Mockito.times(1)).setUp();
+ Mockito.verify(mPowerRunner, Mockito.times(1)).waitForDeviceToBeDisconnected();
+ Mockito.verify(mPowerRunner, Mockito.times(1)).waitForDeviceToBeConnected();
+ Mockito.verify(mPowerRunner, Mockito.times(1)).parseInstrumentationResults();
+ Mockito.verify(mPowerRunner, Mockito.times(1))
+ .parseProtoFile(Mockito.anyString(), Mockito.any(File.class));
+ }
+
+ /** Negative test: When device fails to disconnect after the test run is triggered */
+ @Test
+ public void testRunWhenDeviceIsNotDisconnectedAfterTestStart()
+ throws DeviceNotAvailableException, IOException {
+ Mockito.when(mIDevice.isOnline()).thenReturn(true);
+ try {
+ mPowerRunner.run(mTestInformation, mITestInvocationListener);
+ fail();
+ } catch (RuntimeException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(" not disconnected from host after waiting for 120000"));
+ }
+ }
+
+ /** Negative test: When device fails to connect back to host after the test run is completed */
+ @Test
+ public void testRunWhenDeviceIsNotConnectedBackAfterTestComplete()
+ throws DeviceNotAvailableException, IOException {
+ Mockito.when(mIDevice.isOnline()).thenReturn(false);
+ Mockito.when(mIDevice.isOffline()).thenReturn(true);
+ try {
+ mPowerRunner.run(mTestInformation, mITestInvocationListener);
+ fail();
+ } catch (RuntimeException e) {
+ Assert.assertTrue(
+ e.getMessage().contains("not connected back to host after waiting for"));
+ }
+ }
+
+ /** Negative workflow test: When proto file is not available on the device after the test run */
+ @Test
+ public void testParseInstrumentationResultsWhenNoProtoFileIsPresent()
+ throws DeviceNotAvailableException, IOException {
+ // Just return true and don't create any mock proto file so that there will not be any file
+ Mockito.when(mDevice.pullDir(Mockito.anyString(), Mockito.any(File.class)))
+ .thenReturn(true);
+
+ try {
+ mPowerRunner.run(mTestInformation, mITestInvocationListener);
+ fail();
+ } catch (RuntimeException e) {
+ Assert.assertTrue(
+ e.getMessage().contains("Instrumentation results proto file not found under"));
+ }
+ }
+}
diff --git a/host_runners/utils/Android.bp b/host_runners/utils/Android.bp
new file mode 100644
index 000000000..6e5feb6f9
--- /dev/null
+++ b/host_runners/utils/Android.bp
@@ -0,0 +1,23 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_library_host {
+ name: "host_runner_utils",
+ srcs: ["src/**/*.java"],
+ static_libs: ["tradefed"],
+}
diff --git a/host_runners/utils/src/com/android/runner/utils/InstrumentationResultProtoParser.java b/host_runners/utils/src/com/android/runner/utils/InstrumentationResultProtoParser.java
new file mode 100644
index 000000000..94e070a41
--- /dev/null
+++ b/host_runners/utils/src/com/android/runner/utils/InstrumentationResultProtoParser.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.runner.utils;
+
+import com.android.commands.am.InstrumentationData.ResultsBundle;
+import com.android.commands.am.InstrumentationData.ResultsBundleEntry;
+import com.android.commands.am.InstrumentationData.Session;
+import com.android.commands.am.InstrumentationData.SessionStatus;
+import com.android.commands.am.InstrumentationData.TestStatus;
+import com.android.ddmlib.IShellOutputReceiver;
+import com.android.ddmlib.testrunner.ITestRunListener;
+import com.android.ddmlib.testrunner.InstrumentationResultParser;
+
+import com.google.protobuf.InvalidProtocolBufferException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Parses the instrumentation result proto collected during instrumentation test run and informs
+ * ITestRunListener of the results.
+ */
+public class InstrumentationResultProtoParser implements IShellOutputReceiver {
+
+ /** Error message supplied when no test result file is found. */
+ public static final String NO_TEST_RESULTS_FILE =
+ "No instrumentation proto test" + " results file found";
+
+ /** Error message supplied when no test results are received from test run. */
+ public static final String NO_TEST_RESULTS_MSG = "No test results";
+
+ /** Error message supplied when no test result file is found. */
+ public static final String INVALID_TEST_RESULTS_FILE =
+ "Invalid instrumentation proto" + " test results file";
+
+ private static final String INSTRUMENTATION_STATUS_FORMAT = "INSTRUMENTATION_STATUS: %s=%s";
+ private static final String INSTRUMENTATION_STATUS_CODE_FORMAT =
+ "INSTRUMENTATION_STATUS_CODE: %d";
+ private static final String INSTRUMENTATION_RESULT_FORMAT = "INSTRUMENTATION_RESULT: %s=%s";
+ private static final String INSTRUMENTATION_CODE_FORMAT = "INSTRUMENTATION_CODE: %d";
+
+ private InstrumentationResultParser parser;
+
+ public InstrumentationResultProtoParser(
+ String runName, Collection<ITestRunListener> listeners) {
+ parser = new InstrumentationResultParser(runName, listeners);
+ }
+
+ /**
+ * Process the instrumentation result proto file collected during the instrumentation test run.
+ * Instrumentation proto file consist of test status and instrumentation session status. This
+ * method will be used only when complete instrumentation results proto file is available for
+ * parsing.
+ *
+ * @param protoFile that contains the test status and instrumentation session results.
+ * @throws IOException
+ */
+ public void processProtoFile(File protoFile) throws IOException {
+
+ // Report tes run failures in case of null and empty proto file.
+ if (protoFile == null) {
+ parser.handleTestRunFailed(NO_TEST_RESULTS_FILE);
+ return;
+ }
+ if (protoFile.length() == 0) {
+ parser.handleTestRunFailed(NO_TEST_RESULTS_MSG);
+ return;
+ }
+
+ // Read the input proto file
+ byte[] bytesArray = new byte[(int) protoFile.length()];
+ FileInputStream fis = new FileInputStream(protoFile);
+ fis.read(bytesArray);
+ fis.close();
+
+ try {
+ // Parse the proto file.
+ Session instrumentSession = Session.parseFrom(bytesArray);
+
+ // Process multiple test status.
+ List<TestStatus> multipleTestStatus = instrumentSession.getTestStatusList();
+ for (TestStatus teststatus : multipleTestStatus) {
+ processTestStatus(teststatus);
+ }
+
+ // Process instrumentation session status.
+ SessionStatus sessionStatus = instrumentSession.getSessionStatus();
+ if (sessionStatus.isInitialized()) {
+ processSessionStatus(sessionStatus);
+ }
+ } catch (InvalidProtocolBufferException ex) {
+ parser.handleTestRunFailed(INVALID_TEST_RESULTS_FILE);
+ }
+ parser.done();
+ }
+
+ /**
+ * Preprocess the single TestStatus proto message which includes the test info or test results
+ * and result code in to shell output format for further processing by
+ * InstrumentationResultParser.
+ *
+ * @param testStatus The {@link TestStatus} holding the current test info collected during the
+ * test.
+ */
+ public void processTestStatus(TestStatus testStatus) {
+ // Process the test results.
+ ResultsBundle results = testStatus.getResults();
+ List<String> preProcessedLines = new LinkedList<>();
+ for (ResultsBundleEntry entry : results.getEntriesList()) {
+ String currentKey = entry.getKey();
+ String currentValue = null;
+ if (entry.hasValueString()) {
+ currentValue = entry.getValueString().trim();
+ } else if (entry.hasValueInt()) {
+ currentValue = String.valueOf(entry.getValueInt());
+ }
+ preProcessedLines.add(
+ String.format(INSTRUMENTATION_STATUS_FORMAT, currentKey, currentValue));
+ }
+ preProcessedLines.add(
+ String.format(INSTRUMENTATION_STATUS_CODE_FORMAT, testStatus.getResultCode()));
+ parser.processNewLines(preProcessedLines.toArray(new String[preProcessedLines.size()]));
+ }
+
+ /**
+ * Preprocess the instrumentation session status which includes the instrumentation test results
+ * and the session status code to shell output format for further processing by
+ * InstrumentationResultParser.
+ *
+ * @param sessionStatus The {@link SessionStatus} holding the current instrumentation session
+ * info collected during the test run.
+ */
+ public void processSessionStatus(SessionStatus sessionStatus) {
+
+ List<String> preProcessedLines = new LinkedList<>();
+ ResultsBundle results = sessionStatus.getResults();
+ for (ResultsBundleEntry entry : results.getEntriesList()) {
+ String currentKey = entry.getKey();
+ String currentValue = "";
+ if (entry.hasValueString()) {
+ currentValue = entry.getValueString();
+ String lines[] = currentValue.split("\\r?\\n");
+ int lineCount = 1;
+ for (String line : lines) {
+ if (lineCount == 1) {
+ // Only first line should have the Result code prefix.
+ preProcessedLines.add(
+ String.format(INSTRUMENTATION_RESULT_FORMAT, currentKey, line));
+ lineCount++;
+ continue;
+ }
+ preProcessedLines.add(line);
+ }
+ } else if (entry.hasValueInt()) {
+ currentValue = String.valueOf(entry.getValueInt());
+ preProcessedLines.add(
+ String.format(INSTRUMENTATION_RESULT_FORMAT, currentKey, currentValue));
+ }
+ }
+ if (results.isInitialized()) {
+ preProcessedLines.add(
+ String.format(INSTRUMENTATION_CODE_FORMAT, sessionStatus.getResultCode()));
+ }
+
+ parser.processNewLines(preProcessedLines.toArray(new String[preProcessedLines.size()]));
+ }
+
+ /* (non-Javadoc)
+ * @see com.android.ddmlib.IShellOutputReceiver#addOutput(byte[], int, int)
+ */
+ @Override
+ public void addOutput(byte[] protoData, int bytes, int length) {
+ // TODO : Process the streaming proto instrumentation results.
+ }
+
+ /* (non-Javadoc)
+ * @see com.android.ddmlib.IShellOutputReceiver#flush()
+ */
+ @Override
+ public void flush() {}
+
+ /* (non-Javadoc)
+ * @see com.android.ddmlib.IShellOutputReceiver#isCancelled()
+ */
+ @Override
+ public boolean isCancelled() {
+ return false;
+ }
+}
diff --git a/host_runners/utils/src/com/android/runner/utils/NohupCommandHelper.java b/host_runners/utils/src/com/android/runner/utils/NohupCommandHelper.java
new file mode 100644
index 000000000..e66b905d4
--- /dev/null
+++ b/host_runners/utils/src/com/android/runner/utils/NohupCommandHelper.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.runner.utils;
+
+import com.android.ddmlib.IDevice;
+import com.android.tradefed.device.CollectingOutputReceiver;
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.log.LogUtil.CLog;
+
+import java.util.concurrent.TimeUnit;
+import java.io.File;
+
+/**
+ * Helper class to execute async adb commands using nohup.
+ *
+ * <p>
+ *
+ * <p>If a process needs to keep running even after USB is disconnected use this helper.
+ */
+public class NohupCommandHelper {
+
+ private static final int DEFAULT_MAX_RETRY_ATTEMPTS = 0;
+ public static final String NOHUP_LOG = "nohup.log";
+
+ private static final int DEFAULT_TIMEOUT = 120; // 2 minutes
+
+ /** Helper method to execute adb command with nohup */
+ public static void executeAdbNohupCommand(ITestDevice device, String cmd, int timeout)
+ throws DeviceNotAvailableException {
+
+ String logPath =
+ String.format(
+ "%s/%s", device.getMountPoint(IDevice.MNT_EXTERNAL_STORAGE), NOHUP_LOG);
+ File out = new File(logPath);
+ if (!device.doesFileExist(out.getParent())) {
+ throw new IllegalArgumentException("Output log's directory doesn't exist.");
+ }
+
+ StringBuilder builder = new StringBuilder();
+
+ builder.append("nohup");
+ builder.append(" ");
+ builder.append(cmd);
+ builder.append(" ");
+
+ // Re-route stdout to a log.
+ builder.append(String.format(">> %s", logPath));
+ builder.append(" ");
+
+ // Re-route errors to stdout.
+ builder.append("2>&1");
+
+ String finalCommand = builder.toString();
+
+ new Thread(
+ new Runnable() {
+ @Override
+ public void run() {
+ try {
+ CLog.d(
+ "About to run async command on device %s: %s",
+ device.getSerialNumber(), finalCommand);
+
+ device.executeShellCommand(
+ finalCommand,
+ /* doing nothing with the output */
+ new CollectingOutputReceiver(),
+ timeout,
+ TimeUnit.SECONDS,
+ DEFAULT_MAX_RETRY_ATTEMPTS);
+ } catch (DeviceNotAvailableException e) {
+ CLog.e(
+ "Device became not available while running: %s",
+ finalCommand);
+ CLog.e(e);
+ }
+ }
+ })
+ .start();
+ }
+
+ public static void executeAdbNohupCommand(ITestDevice device, String cmd)
+ throws DeviceNotAvailableException {
+ executeAdbNohupCommand(device, cmd, DEFAULT_TIMEOUT);
+ }
+}
diff --git a/host_runners/utils/tests/Android.bp b/host_runners/utils/tests/Android.bp
new file mode 100644
index 000000000..cdd9b922b
--- /dev/null
+++ b/host_runners/utils/tests/Android.bp
@@ -0,0 +1,27 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_library_host {
+ name: "host_runner_utils_test",
+ srcs: ["src/**/*.java"],
+ static_libs: [
+ "host_runner_utils",
+ "mockito",
+ "objenesis",
+ ],
+}
diff --git a/host_runners/utils/tests/src/com/android/runner/utils/InstrumentationResultProtoParserTest.java b/host_runners/utils/tests/src/com/android/runner/utils/InstrumentationResultProtoParserTest.java
new file mode 100644
index 000000000..0062ca47e
--- /dev/null
+++ b/host_runners/utils/tests/src/com/android/runner/utils/InstrumentationResultProtoParserTest.java
@@ -0,0 +1,1234 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.runner.utils;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+
+import com.android.commands.am.InstrumentationData.ResultsBundle;
+import com.android.commands.am.InstrumentationData.ResultsBundleEntry;
+import com.android.commands.am.InstrumentationData.Session;
+import com.android.commands.am.InstrumentationData.SessionStatus;
+import com.android.commands.am.InstrumentationData.SessionStatusCode;
+import com.android.commands.am.InstrumentationData.TestStatus;
+import com.android.ddmlib.testrunner.ITestRunListener;
+import com.android.ddmlib.testrunner.TestIdentifier;
+
+import com.google.common.truth.Truth;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.ArgumentCaptor;
+import org.mockito.InOrder;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/** Unit tests for {@link InstrumentationResultProtoParser}. */
+@RunWith(JUnit4.class)
+public class InstrumentationResultProtoParserTest {
+
+ private InstrumentationResultProtoParser mParser;
+ @Mock ITestRunListener mMockListener;
+
+ private static final String RUN_KEY = "testing";
+ private static final String CLASS_NAME_1 = "class_1";
+ private static final String METHOD_NAME_1 = "method_1";
+ private static final String CLASS_NAME_2 = "class_2";
+ private static final String METHOD_NAME_2 = "method_2";
+ private static final String TEST_FAILURE_MESSAGE_1 = "java.lang.AssertionError: No App";
+ private static final String RUN_FAILURE_MESSAGE = "Unable to find instrumentation info:";
+ private static final String TEST_COMPLETED_STATUS_1 =
+ "Instrumentation reported numtests=2 but only ran 0";
+ private static final String TEST_COMPLETED_STATUS_2 =
+ "Instrumentation reported numtests=2 but only ran 1";
+ private static final String INCOMPLETE_TEST_ERR_MSG_PREFIX =
+ "Test failed to run" + " to completion";
+ private static final String FATAL_EXCEPTION_MSG = "Fatal exception when running tests";
+
+ private File protoTestFile = null;
+
+ @Before
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
+
+ List<ITestRunListener> runListeners = new ArrayList<>();
+
+ runListeners.add(mMockListener);
+ mParser = new InstrumentationResultProtoParser(RUN_KEY, runListeners);
+ }
+
+ // Sample one test success instrumentation proto file in a test run.
+
+ // result_code: 1
+ // results {
+ // entries {
+ // key: "class"
+ // value_string: "android.platform.test.scenario.clock.OpenAppMicrobenchmark"
+ // }
+ // entries {
+ // key: "current"
+ // value_int: 1
+ // }
+ // entries {
+ // key: "id"
+ // value_string: "AndroidJUnitRunner"
+ // }
+ // entries {
+ // key: "numtests"
+ // value_int: 1
+ // }
+ // entries {
+ // key: "stream"
+ // value_string: "\nandroid.platform.test.scenario.clock.OpenAppMicrobenchmark:"
+ // }
+ // entries {
+ // key: "test"
+ // value_string: "testOpen"
+ // }
+ // }
+ // result_code: 2
+ // results {
+ // entries {
+ // key: "cold_startup_com.google.android.deskclock"
+ // value_string: "626"
+ // }
+ // }
+ //
+ // results {
+ // entries {
+ // key: "class"
+ // value_string: "android.platform.test.scenario.clock.OpenAppMicrobenchmark"
+ // }
+ // entries {
+ // key: "current"
+ // value_int: 1
+ // }
+ // entries {
+ // key: "id"
+ // value_string: "AndroidJUnitRunner"
+ // }
+ // entries {
+ // key: "numtests"
+ // value_int: 1
+ // }
+ // entries {
+ // key: "stream"
+ // value_string: "."
+ // }
+ // entries {
+ // key: "test"
+ // value_string: "testOpen"
+ // }
+ // }
+ //
+ // result_code: -1
+ // results {
+ // entries {
+ // key: "stream"
+ // value_string: "\n\nTime: 27.013\n\nOK (1 test)\n\n"
+ // }
+ // entries {
+ // key: "total_cpu_usage"
+ // value_string: "39584"
+ // }
+ // }
+
+ /**
+ * Test for the null input instrumentation results proto file.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testNullProtoFile() throws IOException {
+ protoTestFile = null;
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ inOrder.verify(mMockListener)
+ .testRunFailed(Mockito.eq(InstrumentationResultProtoParser.NO_TEST_RESULTS_FILE));
+ inOrder.verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ verify(mMockListener)
+ .testRunFailed(Mockito.eq(InstrumentationResultProtoParser.NO_TEST_RESULTS_FILE));
+ verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+ }
+
+ /**
+ * Test for the empty input instrumentation results proto file.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testEmptyProtoFile() throws IOException {
+ protoTestFile = File.createTempFile("tmp", ".pb");
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ inOrder.verify(mMockListener)
+ .testRunFailed(Mockito.eq(InstrumentationResultProtoParser.NO_TEST_RESULTS_MSG));
+ inOrder.verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ verify(mMockListener)
+ .testRunFailed(Mockito.eq(InstrumentationResultProtoParser.NO_TEST_RESULTS_MSG));
+ verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+ }
+
+ /**
+ * Test for the invalid input instrumentation results proto file.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testInvalidResultsProtoFile() throws IOException {
+ protoTestFile = File.createTempFile("tmp", ".pb");
+ FileOutputStream fout = new FileOutputStream(protoTestFile);
+ fout.write(65);
+ fout.close();
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ inOrder.verify(mMockListener)
+ .testRunFailed(
+ Mockito.eq(InstrumentationResultProtoParser.INVALID_TEST_RESULTS_FILE));
+ inOrder.verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ verify(mMockListener)
+ .testRunFailed(
+ Mockito.eq(InstrumentationResultProtoParser.INVALID_TEST_RESULTS_FILE));
+ verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+ }
+
+ /**
+ * Test for the no test results in input instrumentation results proto file.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testNoTestResults() throws IOException {
+
+ protoTestFile = buildNoTestResultsProtoFile();
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ inOrder.verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+ }
+
+ /**
+ * Test for one test success results in input instrumentation results proto file.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testOneTestSuccessWithMetrics() throws IOException {
+ protoTestFile = buildSingleTestMetricSuccessProtoFile();
+
+ TestIdentifier td = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+ ArgumentCaptor<Map<String, String>> captureTestMetrics = ArgumentCaptor.forClass(Map.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ inOrder.verify(mMockListener).testStarted(td);
+ inOrder.verify(mMockListener).testEnded(Mockito.eq(td), captureTestMetrics.capture());
+ inOrder.verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ verify(mMockListener).testStarted(td);
+ verify(mMockListener).testEnded(Mockito.eq(td), captureTestMetrics.capture());
+ verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ // Verify the test metrics
+ assertEquals("626", captureTestMetrics.getValue().get("metric_key1"));
+ assertEquals("1", captureTestMetrics.getValue().get("metric_key2"));
+ }
+
+ /**
+ * Test for one test success result with multiple listeners in instrumentation results proto
+ * file.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testOneTestSuccessWithMultipleListeners() throws IOException {
+
+ List<ITestRunListener> runListeners = new ArrayList<>();
+ ITestRunListener mMockListener1 = mock(ITestRunListener.class);
+ ITestRunListener mMockListener2 = mock(ITestRunListener.class);
+ runListeners.add(mMockListener1);
+ runListeners.add(mMockListener2);
+
+ mParser = new InstrumentationResultProtoParser(RUN_KEY, runListeners);
+
+ protoTestFile = buildSingleTestMetricSuccessProtoFile();
+
+ TestIdentifier td = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener1, mMockListener2);
+ inOrder.verify(mMockListener1).testRunStarted(RUN_KEY, 1);
+ inOrder.verify(mMockListener2).testRunStarted(RUN_KEY, 1);
+ inOrder.verify(mMockListener1).testStarted(td);
+ inOrder.verify(mMockListener2).testStarted(td);
+ inOrder.verify(mMockListener1).testEnded(Mockito.eq(td), Mockito.any(Map.class));
+ inOrder.verify(mMockListener2).testEnded(Mockito.eq(td), Mockito.any(Map.class));
+ inOrder.verify(mMockListener1).testRunEnded(27013, Collections.emptyMap());
+ inOrder.verify(mMockListener2).testRunEnded(27013, Collections.emptyMap());
+
+ verify(mMockListener1).testRunStarted(RUN_KEY, 1);
+ verify(mMockListener1).testStarted(td);
+ ArgumentCaptor<Map<String, String>> captureListener1Metrics =
+ ArgumentCaptor.forClass(Map.class);
+ verify(mMockListener1).testEnded(Mockito.eq(td), captureListener1Metrics.capture());
+ verify(mMockListener1).testRunEnded(27013, Collections.emptyMap());
+
+ verify(mMockListener2).testRunStarted(RUN_KEY, 1);
+ verify(mMockListener2).testStarted(td);
+ ArgumentCaptor<Map<String, String>> captureListener2Metrics =
+ ArgumentCaptor.forClass(Map.class);
+ verify(mMockListener2).testEnded(Mockito.eq(td), captureListener2Metrics.capture());
+ verify(mMockListener2).testRunEnded(27013, Collections.emptyMap());
+
+ // Verify the test metrics
+ assertEquals("626", captureListener1Metrics.getValue().get("metric_key1"));
+ assertEquals("1", captureListener1Metrics.getValue().get("metric_key2"));
+
+ // Verify the test metrics
+ assertEquals("626", captureListener2Metrics.getValue().get("metric_key1"));
+ assertEquals("1", captureListener2Metrics.getValue().get("metric_key2"));
+ }
+
+ /**
+ * Test for test run with the metrics.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testOneRunSuccessWithMetrics() throws IOException {
+ protoTestFile = buildRunMetricSuccessProtoFile();
+
+ TestIdentifier td = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+ ArgumentCaptor<Map<String, String>> captureRunMetrics = ArgumentCaptor.forClass(Map.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ inOrder.verify(mMockListener).testStarted(td);
+ inOrder.verify(mMockListener).testEnded(td, Collections.emptyMap());
+ inOrder.verify(mMockListener).testRunEnded(Mockito.eq(27013L), captureRunMetrics.capture());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ verify(mMockListener).testStarted(td);
+ verify(mMockListener).testEnded(td, Collections.emptyMap());
+ verify(mMockListener).testRunEnded(Mockito.eq(27013L), captureRunMetrics.capture());
+
+ // Verify run metrics
+ assertEquals("39584", captureRunMetrics.getValue().get("run_metric_key"));
+ }
+
+ /**
+ * Test for test metrics and test run metrics in instrumentation proto file.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testOneTestAndRunSuccessWithMetrics() throws IOException {
+ protoTestFile = buildTestAndRunMetricSuccessProtoFile();
+
+ TestIdentifier td = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+ ArgumentCaptor<Map<String, String>> captureTestMetrics = ArgumentCaptor.forClass(Map.class);
+ ArgumentCaptor<Map<String, String>> captureRunMetrics = ArgumentCaptor.forClass(Map.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ inOrder.verify(mMockListener).testStarted(td);
+ inOrder.verify(mMockListener).testEnded(Mockito.eq(td), captureTestMetrics.capture());
+ inOrder.verify(mMockListener).testRunEnded(Mockito.eq(27013L), captureRunMetrics.capture());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ verify(mMockListener).testStarted(td);
+ verify(mMockListener).testEnded(Mockito.eq(td), captureTestMetrics.capture());
+ verify(mMockListener).testRunEnded(Mockito.eq(27013L), captureRunMetrics.capture());
+
+ // Verify the test metrics
+ assertEquals("626", captureTestMetrics.getValue().get("metric_key1"));
+ assertEquals("1", captureTestMetrics.getValue().get("metric_key2"));
+
+ // Verify run metrics
+ assertEquals("39584", captureRunMetrics.getValue().get("run_metric_key"));
+ }
+
+ /**
+ * Test for multiple test success with metrics.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testMultipleTestSuccessWithMetrics() throws IOException {
+ protoTestFile = buildMultipleTestAndRunMetricSuccessProtoFile();
+
+ TestIdentifier td1 = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+ TestIdentifier td2 = new TestIdentifier(CLASS_NAME_2, METHOD_NAME_2);
+
+ ArgumentCaptor<Map<String, String>> captureTest1Metrics =
+ ArgumentCaptor.forClass(Map.class);
+ ArgumentCaptor<Map<String, String>> captureTest2Metrics =
+ ArgumentCaptor.forClass(Map.class);
+ ArgumentCaptor<Map<String, String>> captureRunMetrics = ArgumentCaptor.forClass(Map.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ inOrder.verify(mMockListener).testStarted(td1);
+ inOrder.verify(mMockListener).testEnded(Mockito.eq(td1), captureTest1Metrics.capture());
+ inOrder.verify(mMockListener).testStarted(td2);
+ inOrder.verify(mMockListener).testEnded(Mockito.eq(td2), captureTest2Metrics.capture());
+ inOrder.verify(mMockListener).testRunEnded(Mockito.eq(27013L), captureRunMetrics.capture());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ verify(mMockListener).testStarted(td1);
+ verify(mMockListener).testEnded(Mockito.eq(td1), captureTest1Metrics.capture());
+ verify(mMockListener).testStarted(td2);
+ verify(mMockListener).testEnded(Mockito.eq(td2), captureTest2Metrics.capture());
+ verify(mMockListener).testRunEnded(Mockito.eq(27013L), captureRunMetrics.capture());
+
+ // Verify the test1 and test2 metrics
+ assertEquals("626", captureTest1Metrics.getValue().get("metric_key1"));
+ assertEquals("1", captureTest1Metrics.getValue().get("metric_key2"));
+ assertEquals("626", captureTest2Metrics.getValue().get("metric_key1"));
+ assertEquals("1", captureTest2Metrics.getValue().get("metric_key2"));
+
+ // Verify run metrics
+ assertEquals("39584", captureRunMetrics.getValue().get("run_metric_key"));
+ }
+
+ /**
+ * Test for one test failure.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testOneTestFailure() throws IOException {
+ protoTestFile = buildSingleTestFailureProtoFile();
+
+ TestIdentifier td = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+ ArgumentCaptor<Map<String, String>> captureTestMetrics = ArgumentCaptor.forClass(Map.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ inOrder.verify(mMockListener).testStarted(td);
+ inOrder.verify(mMockListener)
+ .testFailed(Mockito.eq(td), Mockito.eq(TEST_FAILURE_MESSAGE_1));
+ inOrder.verify(mMockListener).testEnded(Mockito.eq(td), captureTestMetrics.capture());
+ inOrder.verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ verify(mMockListener).testStarted(td);
+ verify(mMockListener).testFailed(Mockito.eq(td), Mockito.eq(TEST_FAILURE_MESSAGE_1));
+ verify(mMockListener).testEnded(Mockito.eq(td), captureTestMetrics.capture());
+ verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ // Verify the test metrics
+ assertEquals("626", captureTestMetrics.getValue().get("metric_key1"));
+ assertEquals("1", captureTestMetrics.getValue().get("metric_key2"));
+ }
+
+ /**
+ * Test for one test pass and one test failure.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testOneTestPassOneTestFailure() throws IOException {
+ protoTestFile = buildOneTestPassOneTestFailProtoFile();
+
+ TestIdentifier td1 = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+ TestIdentifier td2 = new TestIdentifier(CLASS_NAME_2, METHOD_NAME_2);
+
+ ArgumentCaptor<Map<String, String>> captureTest1Metrics =
+ ArgumentCaptor.forClass(Map.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ inOrder.verify(mMockListener).testStarted(td1);
+ inOrder.verify(mMockListener).testEnded(Mockito.eq(td1), captureTest1Metrics.capture());
+ inOrder.verify(mMockListener).testStarted(td2);
+ inOrder.verify(mMockListener)
+ .testFailed(Mockito.eq(td2), Mockito.eq(TEST_FAILURE_MESSAGE_1));
+ inOrder.verify(mMockListener).testEnded(td2, Collections.emptyMap());
+ inOrder.verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ verify(mMockListener).testStarted(td1);
+ verify(mMockListener).testEnded(Mockito.eq(td1), captureTest1Metrics.capture());
+ verify(mMockListener).testStarted(td2);
+ verify(mMockListener).testFailed(Mockito.eq(td2), Mockito.eq(TEST_FAILURE_MESSAGE_1));
+ verify(mMockListener).testEnded(td2, Collections.emptyMap());
+ verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ // Verify the test metrics
+ assertEquals("626", captureTest1Metrics.getValue().get("metric_key1"));
+ assertEquals("1", captureTest1Metrics.getValue().get("metric_key2"));
+ }
+
+ /**
+ * Test for all tests incomplete in a test run.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testAllTestsIncomplete() throws IOException {
+ protoTestFile = buildTestsIncompleteProtoFile();
+ ArgumentCaptor<String> testOutputErrorMessage = ArgumentCaptor.forClass(String.class);
+ ArgumentCaptor<String> runOutputErrorMessage = ArgumentCaptor.forClass(String.class);
+
+ TestIdentifier td1 = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ inOrder.verify(mMockListener).testStarted(td1);
+ inOrder.verify(mMockListener).testFailed(Mockito.eq(td1), Mockito.any(String.class));
+ inOrder.verify(mMockListener).testEnded(td1, Collections.emptyMap());
+ inOrder.verify(mMockListener).testRunFailed(Mockito.any(String.class));
+ inOrder.verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ verify(mMockListener).testStarted(td1);
+ verify(mMockListener).testFailed(Mockito.eq(td1), testOutputErrorMessage.capture());
+ verify(mMockListener).testEnded(td1, Collections.emptyMap());
+ verify(mMockListener).testRunFailed(runOutputErrorMessage.capture());
+ verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ Truth.assertThat(testOutputErrorMessage.getValue())
+ .contains(INCOMPLETE_TEST_ERR_MSG_PREFIX);
+ // Truth.assertThat(testOutputErrorMessage.getValue()).contains(TEST_COMPLETED_STATUS_1);
+ // Truth.assertThat(runOutputErrorMessage.getValue()).contains(TEST_COMPLETED_STATUS_1);
+ }
+
+ /**
+ * Test for one test complete and another test partial status.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testPartialTestsIncomplete() throws IOException {
+ protoTestFile = buildPartialTestsIncompleteProtoFile();
+
+ ArgumentCaptor<String> testOutputErrorMessage = ArgumentCaptor.forClass(String.class);
+ ArgumentCaptor<String> runOutputErrorMessage = ArgumentCaptor.forClass(String.class);
+ TestIdentifier td1 = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+ TestIdentifier td2 = new TestIdentifier(CLASS_NAME_2, METHOD_NAME_2);
+ ArgumentCaptor<Map<String, String>> captureTest1Metrics =
+ ArgumentCaptor.forClass(Map.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ inOrder.verify(mMockListener).testStarted(td1);
+ inOrder.verify(mMockListener).testEnded(Mockito.eq(td1), Mockito.any(Map.class));
+ inOrder.verify(mMockListener).testStarted(td2);
+ inOrder.verify(mMockListener).testFailed(Mockito.eq(td2), Mockito.any(String.class));
+ inOrder.verify(mMockListener).testEnded(td2, Collections.emptyMap());
+ inOrder.verify(mMockListener).testRunFailed(Mockito.any(String.class));
+ inOrder.verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ verify(mMockListener).testStarted(td1);
+ verify(mMockListener).testEnded(Mockito.eq(td1), captureTest1Metrics.capture());
+ verify(mMockListener).testStarted(td2);
+ verify(mMockListener).testFailed(Mockito.eq(td2), testOutputErrorMessage.capture());
+ verify(mMockListener).testEnded(td2, Collections.emptyMap());
+ verify(mMockListener).testRunFailed(runOutputErrorMessage.capture());
+ verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ assertEquals("626", captureTest1Metrics.getValue().get("metric_key1"));
+ assertEquals("1", captureTest1Metrics.getValue().get("metric_key2"));
+ Truth.assertThat(testOutputErrorMessage.getValue())
+ .contains(INCOMPLETE_TEST_ERR_MSG_PREFIX);
+ // Truth.assertThat(testOutputErrorMessage.getValue()).contains(TEST_COMPLETED_STATUS_2);
+ // Truth.assertThat(runOutputErrorMessage.getValue()).contains(TEST_COMPLETED_STATUS_2);
+ }
+
+ /**
+ * Test 1 test completed, 1 test not started from two expected tests in a test run.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testOneTestNotStarted() throws IOException {
+ protoTestFile = buildOneTestNotStarted();
+ ArgumentCaptor<String> runOutputErrorMessage = ArgumentCaptor.forClass(String.class);
+ TestIdentifier td1 = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+ ArgumentCaptor<Map<String, String>> captureTest1Metrics =
+ ArgumentCaptor.forClass(Map.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ inOrder.verify(mMockListener).testStarted(td1);
+ inOrder.verify(mMockListener).testEnded(Mockito.eq(td1), Mockito.any(Map.class));
+ inOrder.verify(mMockListener).testRunFailed(Mockito.any(String.class));
+ inOrder.verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 2);
+ verify(mMockListener).testStarted(td1);
+ verify(mMockListener).testEnded(Mockito.eq(td1), captureTest1Metrics.capture());
+ verify(mMockListener).testRunFailed(runOutputErrorMessage.capture());
+ verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ assertEquals("626", captureTest1Metrics.getValue().get("metric_key1"));
+ assertEquals("1", captureTest1Metrics.getValue().get("metric_key2"));
+ // Truth.assertThat(runOutputErrorMessage.getValue()).contains(TEST_COMPLETED_STATUS_2);
+ }
+
+ /**
+ * Test for no time stamp parsing error when the time stamp parsing is not enforced.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testTimeStampMissingNotEnforced() throws IOException {
+ protoTestFile = buildInvalidTimeStampResultsProto(false);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ inOrder.verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+ }
+
+ /**
+ * Tests parsing the fatal error output of an instrumentation invoked with "-e log true". Since
+ * it is log only, it will not report directly the failure, but the stream should still be
+ * populated.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testDirectFailure() throws IOException {
+ protoTestFile = buildValidTimeStampWithFatalExceptionResultsProto();
+
+ ArgumentCaptor<String> capture = ArgumentCaptor.forClass(String.class);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ inOrder.verify(mMockListener).testRunFailed(capture.capture());
+ inOrder.verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 0);
+ verify(mMockListener).testRunFailed(capture.capture());
+ verify(mMockListener).testRunEnded(0, Collections.emptyMap());
+
+ String failure = capture.getValue();
+ assertTrue(failure.contains("java.lang.RuntimeException: it failed super fast."));
+ }
+
+ /**
+ * Tests for ignore test status from the proto output.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testIgnoreProtoResult() throws IOException {
+ protoTestFile = buildTestIgnoredResultsProto();
+
+ TestIdentifier td1 = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ inOrder.verify(mMockListener).testStarted(td1);
+ inOrder.verify(mMockListener).testIgnored(td1);
+ inOrder.verify(mMockListener).testEnded(td1, Collections.emptyMap());
+ inOrder.verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ verify(mMockListener).testStarted(td1);
+ verify(mMockListener).testIgnored(td1);
+ verify(mMockListener).testEnded(td1, Collections.emptyMap());
+ verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+ }
+
+ /**
+ * Tests for assumption failure test status from the proto output.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void testAssumptionProtoResult() throws IOException {
+ protoTestFile = buildTestAssumptionResultsProto();
+
+ TestIdentifier td1 = new TestIdentifier(CLASS_NAME_1, METHOD_NAME_1);
+
+ mParser.processProtoFile(protoTestFile);
+
+ InOrder inOrder = Mockito.inOrder(mMockListener);
+ inOrder.verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ inOrder.verify(mMockListener).testStarted(td1);
+ inOrder.verify(mMockListener)
+ .testAssumptionFailure(
+ Mockito.eq(td1),
+ Mockito.startsWith(
+ "org.junit.AssumptionViolatedException:"
+ + " got: <false>, expected: is <true>"));
+ inOrder.verify(mMockListener).testEnded(td1, Collections.emptyMap());
+ inOrder.verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+
+ verify(mMockListener).testRunStarted(RUN_KEY, 1);
+ verify(mMockListener).testStarted(td1);
+ verify(mMockListener)
+ .testAssumptionFailure(
+ Mockito.eq(td1),
+ Mockito.startsWith(
+ "org.junit.AssumptionViolatedException:"
+ + " got: <false>, expected: is <true>"));
+ verify(mMockListener).testEnded(td1, Collections.emptyMap());
+ verify(mMockListener).testRunEnded(27013, Collections.emptyMap());
+ }
+
+ @After
+ public void tearDown() {
+ if (protoTestFile != null && protoTestFile.exists()) {
+ protoTestFile.delete();
+ }
+ }
+
+ private File buildNoTestResultsProtoFile() throws IOException {
+ Session sessionProto =
+ Session.newBuilder().setSessionStatus(getSessionStatusProto(false, false)).build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildSingleTestMetricSuccessProtoFile() throws IOException {
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, true, false));
+ // Test Metric
+ testStatusList.add(getTestStatusProto(true));
+ // Test End
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, false, false));
+ Session sessionProto =
+ Session.newBuilder()
+ .addAllTestStatus(testStatusList)
+ .setSessionStatus(getSessionStatusProto(false, false))
+ .build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildRunMetricSuccessProtoFile() throws IOException {
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(false));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, false, false));
+ // Session with metrics.
+ Session sessionProto =
+ Session.newBuilder()
+ .addAllTestStatus(testStatusList)
+ .setSessionStatus(getSessionStatusProto(true, false))
+ .build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildTestAndRunMetricSuccessProtoFile() throws IOException {
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(true));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, false, false));
+ // Session with metrics.
+ Session sessionProto =
+ Session.newBuilder()
+ .addAllTestStatus(testStatusList)
+ .setSessionStatus(getSessionStatusProto(true, false))
+ .build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildMultipleTestAndRunMetricSuccessProtoFile() throws IOException {
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(true));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, false, false));
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_2, METHOD_NAME_2, 2, 2, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(true));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_2, METHOD_NAME_2, 2, 2, false, false));
+ // Session with metrics.
+ Session sessionProto =
+ Session.newBuilder()
+ .addAllTestStatus(testStatusList)
+ .setSessionStatus(getSessionStatusProto(true, false))
+ .build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildSingleTestFailureProtoFile() throws IOException {
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(true));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, false, true));
+ // Session with metrics.
+ Session sessionProto =
+ Session.newBuilder()
+ .addAllTestStatus(testStatusList)
+ .setSessionStatus(getSessionStatusProto(false, false))
+ .build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildOneTestPassOneTestFailProtoFile() throws IOException {
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(true));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, false, false));
+ testStatusList.add(getTestInfoProto(CLASS_NAME_2, METHOD_NAME_2, 2, 2, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(false));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_2, METHOD_NAME_2, 2, 2, false, true));
+ // Session with metrics.
+ Session sessionProto =
+ Session.newBuilder()
+ .addAllTestStatus(testStatusList)
+ .setSessionStatus(getSessionStatusProto(false, false))
+ .build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildTestsIncompleteProtoFile() throws IOException {
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, true, false));
+
+ // Session with metrics.
+ Session sessionProto = Session.newBuilder().addAllTestStatus(testStatusList).build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildPartialTestsIncompleteProtoFile() throws IOException {
+
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(true));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, false, false));
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_2, METHOD_NAME_2, 2, 2, true, false));
+
+ // Session with metrics.
+ Session sessionProto = Session.newBuilder().addAllTestStatus(testStatusList).build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildOneTestNotStarted() throws IOException {
+
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, true, false));
+ // Test status without metrics.
+ testStatusList.add(getTestStatusProto(true));
+ // Test End.
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 2, false, false));
+
+ // Session with metrics.
+ Session sessionProto = Session.newBuilder().addAllTestStatus(testStatusList).build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildInvalidTimeStampResultsProto(boolean isWithStack) throws IOException {
+
+ List<ResultsBundleEntry> entryList = new LinkedList<ResultsBundleEntry>();
+
+ if (isWithStack) {
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("stream")
+ .setValueString(
+ FATAL_EXCEPTION_MSG
+ + " java.lang.IllegalArgumentException: Ambiguous"
+ + " arguments: cannot provide both test package and"
+ + " test class(es) to run")
+ .build());
+ } else {
+ entryList.add(
+ ResultsBundleEntry.newBuilder().setKey("stream").setValueString("").build());
+ }
+
+ SessionStatus sessionStatus =
+ SessionStatus.newBuilder()
+ .setResultCode(-1)
+ .setStatusCode(SessionStatusCode.SESSION_FINISHED)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build();
+
+ // Session with metrics.
+ Session sessionProto = Session.newBuilder().setSessionStatus(sessionStatus).build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildValidTimeStampWithFatalExceptionResultsProto() throws IOException {
+ List<ResultsBundleEntry> entryList = new LinkedList<ResultsBundleEntry>();
+
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("stream")
+ .setValueString(
+ FATAL_EXCEPTION_MSG
+ + "Time: 0 \n"
+ + "1) Fatal exception when running tests"
+ + "java.lang.RuntimeException: it failed super fast."
+ + "at stackstack")
+ .build());
+
+ SessionStatus sessionStatus =
+ SessionStatus.newBuilder()
+ .setResultCode(-1)
+ .setStatusCode(SessionStatusCode.SESSION_FINISHED)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build();
+
+ // Session with metrics.
+ Session sessionProto = Session.newBuilder().setSessionStatus(sessionStatus).build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildTestIgnoredResultsProto() throws IOException {
+
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+ // Test start
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, true, false));
+
+ // Test ignore status result.
+ List<ResultsBundleEntry> entryList = new LinkedList<ResultsBundleEntry>();
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("class")
+ .setValueString(CLASS_NAME_1)
+ .build());
+ entryList.add(ResultsBundleEntry.newBuilder().setKey("current").setValueInt(1).build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("id")
+ .setValueString("AndroidJUnitRunner")
+ .build());
+ entryList.add(ResultsBundleEntry.newBuilder().setKey("numtests").setValueInt(1).build());
+
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("test")
+ .setValueString(METHOD_NAME_1)
+ .build());
+
+ testStatusList.add(
+ TestStatus.newBuilder()
+ .setResultCode(-3)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build());
+
+ Session sessionProto =
+ Session.newBuilder()
+ .addAllTestStatus(testStatusList)
+ .setSessionStatus(getSessionStatusProto(false, false))
+ .build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ private File buildTestAssumptionResultsProto() throws IOException {
+
+ List<TestStatus> testStatusList = new LinkedList<TestStatus>();
+
+ // Test start
+ testStatusList.add(getTestInfoProto(CLASS_NAME_1, METHOD_NAME_1, 1, 1, true, false));
+
+ // Test ignore status result.
+ List<ResultsBundleEntry> entryList = new LinkedList<ResultsBundleEntry>();
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("class")
+ .setValueString(CLASS_NAME_1)
+ .build());
+ entryList.add(ResultsBundleEntry.newBuilder().setKey("current").setValueInt(1).build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("id")
+ .setValueString("AndroidJUnitRunner")
+ .build());
+ entryList.add(ResultsBundleEntry.newBuilder().setKey("numtests").setValueInt(1).build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("test")
+ .setValueString(METHOD_NAME_1)
+ .build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("stack")
+ .setValueString(
+ "org.junit.AssumptionViolatedException: got: <false>, expected: is"
+ + " <true>")
+ .build());
+ testStatusList.add(
+ TestStatus.newBuilder()
+ .setResultCode(-4)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build());
+
+ Session sessionProto =
+ Session.newBuilder()
+ .addAllTestStatus(testStatusList)
+ .setSessionStatus(getSessionStatusProto(false, false))
+ .build();
+ File protoFile = File.createTempFile("tmp", ".pb");
+ sessionProto.writeTo(new FileOutputStream(protoFile));
+ return protoFile;
+ }
+
+ /**
+ * Add test status proto message based on the args supplied to this method.
+ *
+ * @param className class name where the test method is.
+ * @param methodName method name currently running.
+ * @param current current number of the test.
+ * @param numTests total number of test.
+ * @param isStart true is if it is start of the test otherwise treated as end of the test.
+ * @param isFailure true if the test if failed.
+ * @return
+ */
+ private TestStatus getTestInfoProto(
+ String className,
+ String methodName,
+ int current,
+ int numTests,
+ boolean isStart,
+ boolean isFailure) {
+ List<ResultsBundleEntry> entryList = new LinkedList<ResultsBundleEntry>();
+ entryList.add(
+ ResultsBundleEntry.newBuilder().setKey("class").setValueString(className).build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder().setKey("current").setValueInt(current).build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("id")
+ .setValueString("AndroidJUnitRunner")
+ .build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder().setKey("numtests").setValueInt(numTests).build());
+
+ entryList.add(
+ ResultsBundleEntry.newBuilder().setKey("test").setValueString(methodName).build());
+
+ if (isFailure) {
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("stack")
+ .setValueString(TEST_FAILURE_MESSAGE_1)
+ .build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("stream")
+ .setValueString(TEST_FAILURE_MESSAGE_1)
+ .build());
+ // Test failure will have result code "-2"
+ return TestStatus.newBuilder()
+ .setResultCode(-2)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build();
+ }
+
+ entryList.add(
+ ResultsBundleEntry.newBuilder().setKey("stream").setValueString("\nabc:").build());
+
+ if (isStart) {
+ // Test start will have result code 1.
+ return TestStatus.newBuilder()
+ .setResultCode(1)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build();
+ }
+
+ return TestStatus.newBuilder()
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build();
+ }
+
+ /**
+ * Add test status with the metrics in the proto result file.
+ *
+ * @param isWithMetrics if false metric will be ignored.
+ * @return
+ */
+ private TestStatus getTestStatusProto(boolean isWithMetrics) {
+ List<ResultsBundleEntry> entryList = new LinkedList<ResultsBundleEntry>();
+ if (isWithMetrics) {
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("metric_key1")
+ .setValueString("626")
+ .build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("metric_key2")
+ .setValueString("1")
+ .build());
+ }
+
+ // Metric status will be in progress
+ return TestStatus.newBuilder()
+ .setResultCode(2)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build();
+ }
+
+ /**
+ * Add session status message in the proto result file based on the args supplied to this
+ * method.
+ *
+ * @param isWithMetrics is true then add metrics to the session message.
+ * @param isFailure is true then failure message will be added to the final message.
+ * @return
+ */
+ private SessionStatus getSessionStatusProto(boolean isWithMetrics, boolean isFailure) {
+ List<ResultsBundleEntry> entryList = new LinkedList<ResultsBundleEntry>();
+
+ if (isFailure) {
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("Error")
+ .setValueString(RUN_FAILURE_MESSAGE)
+ .build());
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("id")
+ .setValueString("ActivityManagerService")
+ .build());
+ return SessionStatus.newBuilder()
+ .setResultCode(-1)
+ .setStatusCode(SessionStatusCode.SESSION_FINISHED)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build();
+ }
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("stream")
+ .setValueString("\n\nTime: 27.013\n\nOK (1 test)\n\n")
+ .build());
+
+ if (isWithMetrics) {
+ entryList.add(
+ ResultsBundleEntry.newBuilder()
+ .setKey("run_metric_key")
+ .setValueString("39584")
+ .build());
+ }
+
+ return SessionStatus.newBuilder()
+ .setResultCode(-1)
+ .setStatusCode(SessionStatusCode.SESSION_FINISHED)
+ .setResults(ResultsBundle.newBuilder().addAllEntries(entryList).build())
+ .build();
+ }
+}