aboutsummaryrefslogtreecommitdiff
path: root/talk/app/webrtc/androidtests/src/org
diff options
context:
space:
mode:
Diffstat (limited to 'talk/app/webrtc/androidtests/src/org')
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java29
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java180
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java152
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java6
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java117
-rw-r--r--talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java214
6 files changed, 622 insertions, 76 deletions
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
index 1c01ffa0b8..63c05fb616 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
@@ -28,7 +28,6 @@ package org.webrtc;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
-import android.opengl.Matrix;
import android.test.ActivityTestCase;
import android.test.suitebuilder.annotation.MediumTest;
import android.test.suitebuilder.annotation.SmallTest;
@@ -36,9 +35,6 @@ import android.test.suitebuilder.annotation.SmallTest;
import java.nio.ByteBuffer;
import java.util.Random;
-import javax.microedition.khronos.egl.EGL10;
-import javax.microedition.khronos.egl.EGLContext;
-
public final class GlRectDrawerTest extends ActivityTestCase {
// Resolution of the test image.
private static final int WIDTH = 16;
@@ -46,7 +42,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Seed for random pixel creation.
private static final int SEED = 42;
// When comparing pixels, allow some slack for float arithmetic and integer rounding.
- private static final float MAX_DIFF = 1.0f;
+ private static final float MAX_DIFF = 1.5f;
private static float normalizedByte(byte b) {
return (b & 0xFF) / 255.0f;
@@ -100,7 +96,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
@SmallTest
public void testRgbRendering() {
// Create EGL base with a pixel buffer as display output.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
eglBase.makeCurrent();
@@ -119,7 +115,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Draw the RGB frame onto the pixel buffer.
final GlRectDrawer drawer = new GlRectDrawer();
- drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
+ drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -137,7 +133,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
@SmallTest
public void testYuvRendering() {
// Create EGL base with a pixel buffer as display output.
- EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
eglBase.makeCurrent();
@@ -166,7 +162,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Draw the YUV frame onto the pixel buffer.
final GlRectDrawer drawer = new GlRectDrawer();
- drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix());
+ drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -231,8 +227,9 @@ public final class GlRectDrawerTest extends ActivityTestCase {
private final int rgbTexture;
public StubOesTextureProducer(
- EGLContext sharedContext, SurfaceTexture surfaceTexture, int width, int height) {
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PLAIN);
+ EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width,
+ int height) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
surfaceTexture.setDefaultBufferSize(width, height);
eglBase.createSurface(surfaceTexture);
assertEquals(eglBase.surfaceWidth(), width);
@@ -253,7 +250,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
// Draw the RGB data onto the SurfaceTexture.
- drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
+ drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
eglBase.swapBuffers();
}
@@ -266,14 +263,14 @@ public final class GlRectDrawerTest extends ActivityTestCase {
}
// Create EGL base with a pixel buffer as display output.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
// Create resources for generating OES textures.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(eglBase.getContext());
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
- eglBase.getContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT);
+ eglBase.getEglBaseContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT);
final SurfaceTextureHelperTest.MockTextureListener listener =
new SurfaceTextureHelperTest.MockTextureListener();
surfaceTextureHelper.setListener(listener);
@@ -291,7 +288,7 @@ public final class GlRectDrawerTest extends ActivityTestCase {
// Draw the OES texture on the pixel buffer.
eglBase.makeCurrent();
final GlRectDrawer drawer = new GlRectDrawer();
- drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
new file mode 100644
index 0000000000..b1ec5dda0e
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
@@ -0,0 +1,180 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.util.Log;
+
+import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
+
+import java.nio.ByteBuffer;
+
+@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
+public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
+ final static String TAG = "MediaCodecVideoEncoderTest";
+
+ @SmallTest
+ public static void testInitializeUsingByteBuffer() {
+ if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+ Log.i(TAG,
+ "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
+ return;
+ }
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
+ encoder.release();
+ }
+
+ @SmallTest
+ public static void testInitilizeUsingTextures() {
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+ Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+ return;
+ }
+ EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+ eglBase.getEglBaseContext()));
+ encoder.release();
+ eglBase.release();
+ }
+
+ @SmallTest
+ public static void testInitializeUsingByteBufferReInitilizeUsingTextures() {
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+ Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+ return;
+ }
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+ null));
+ encoder.release();
+ EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+ eglBase.getEglBaseContext()));
+ encoder.release();
+ eglBase.release();
+ }
+
+ @SmallTest
+ public static void testEncoderUsingByteBuffer() throws InterruptedException {
+ if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+ Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingByteBuffer");
+ return;
+ }
+
+ final int width = 640;
+ final int height = 480;
+ final int min_size = width * height * 3 / 2;
+ final long presentationTimestampUs = 2;
+
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, null));
+ ByteBuffer[] inputBuffers = encoder.getInputBuffers();
+ assertNotNull(inputBuffers);
+ assertTrue(min_size <= inputBuffers[0].capacity());
+
+ int bufferIndex;
+ do {
+ Thread.sleep(10);
+ bufferIndex = encoder.dequeueInputBuffer();
+ } while (bufferIndex == -1); // |-1| is returned when there is no buffer available yet.
+
+ assertTrue(bufferIndex >= 0);
+ assertTrue(bufferIndex < inputBuffers.length);
+ assertTrue(encoder.encodeBuffer(true, bufferIndex, min_size, presentationTimestampUs));
+
+ OutputBufferInfo info;
+ do {
+ info = encoder.dequeueOutputBuffer();
+ Thread.sleep(10);
+ } while (info == null);
+ assertTrue(info.index >= 0);
+ assertEquals(presentationTimestampUs, info.presentationTimestampUs);
+ assertTrue(info.buffer.capacity() > 0);
+ encoder.releaseOutputBuffer(info.index);
+
+ encoder.release();
+ }
+
+ @SmallTest
+ public static void testEncoderUsingTextures() throws InterruptedException {
+ if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+ Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+ return;
+ }
+
+ final int width = 640;
+ final int height = 480;
+ final long presentationTs = 2;
+
+ final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglOesBase.createDummyPbufferSurface();
+ eglOesBase.makeCurrent();
+ int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+
+ // TODO(perkj): This test is week since we don't fill the texture with valid data with correct
+ // width and height and verify the encoded data. Fill the OES texture and figure out a way to
+ // verify that the output make sense.
+
+ MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+ assertTrue(encoder.initEncode(
+ MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30,
+ eglOesBase.getEglBaseContext()));
+ assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(),
+ presentationTs));
+ GlUtil.checkNoGLES2Error("encodeTexture");
+
+ // It should be Ok to delete the texture after calling encodeTexture.
+ GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+
+ OutputBufferInfo info = encoder.dequeueOutputBuffer();
+ while (info == null) {
+ info = encoder.dequeueOutputBuffer();
+ Thread.sleep(20);
+ }
+ assertTrue(info.index != -1);
+ assertTrue(info.buffer.capacity() > 0);
+ assertEquals(presentationTs, info.presentationTimestampUs);
+ encoder.releaseOutputBuffer(info.index);
+
+ encoder.release();
+ eglOesBase.release();
+ }
+}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
index 882fde1875..9e0164d4b8 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -37,8 +37,6 @@ import android.test.suitebuilder.annotation.SmallTest;
import java.nio.ByteBuffer;
-import javax.microedition.khronos.egl.EGL10;
-
public final class SurfaceTextureHelperTest extends ActivityTestCase {
/**
* Mock texture listener with blocking wait functionality.
@@ -99,6 +97,14 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
}
}
+ /** Assert that two integers are close, with difference at most
+ * {@code threshold}. */
+ public static void assertClose(int threshold, int expected, int actual) {
+ if (Math.abs(expected - actual) <= threshold)
+ return;
+ failNotEquals("Not close enough, threshold " + threshold, expected, actual);
+ }
+
/**
* Test normal use by receiving three uniform texture frames. Texture frames are returned as early
* as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
@@ -109,20 +115,21 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
final int width = 16;
final int height = 16;
// Create EGL base with a pixel buffer as display output.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(width, height);
final GlRectDrawer drawer = new GlRectDrawer();
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(eglBase.getContext());
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.setListener(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface.
- final EglBase eglOesBase = new EglBase(eglBase.getContext(), EglBase.ConfigType.PLAIN);
+ final EglBase eglOesBase =
+ EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height);
@@ -142,7 +149,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Wait for an OES texture to arrive and draw it onto the pixel buffer.
listener.waitForNewFrame();
eglBase.makeCurrent();
- drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, width, height);
surfaceTextureHelper.returnTextureFrame();
@@ -176,19 +183,20 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
final int width = 16;
final int height = 16;
// Create EGL base with a pixel buffer as display output.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(width, height);
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(eglBase.getContext());
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.setListener(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface.
- final EglBase eglOesBase = new EglBase(eglBase.getContext(), EglBase.ConfigType.PLAIN);
+ final EglBase eglOesBase =
+ EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height);
@@ -212,7 +220,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Draw the pending texture frame onto the pixel buffer.
eglBase.makeCurrent();
final GlRectDrawer drawer = new GlRectDrawer();
- drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+ drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, width, height);
drawer.release();
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
@@ -240,11 +248,11 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
public static void testDisconnect() throws InterruptedException {
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT);
+ SurfaceTextureHelper.create(null);
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.setListener(listener);
// Create EglBase with the SurfaceTexture as target EGLSurface.
- final EglBase eglBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
eglBase.makeCurrent();
// Assert no frame has been received yet.
@@ -276,7 +284,7 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
@SmallTest
public static void testDisconnectImmediately() {
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT);
+ SurfaceTextureHelper.create(null);
surfaceTextureHelper.disconnect();
}
@@ -292,14 +300,14 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
- SurfaceTextureHelper.create(EGL10.EGL_NO_CONTEXT, handler);
+ SurfaceTextureHelper.create(null, handler);
// Create a mock listener and expect frames to be delivered on |thread|.
final MockTextureListener listener = new MockTextureListener(thread);
surfaceTextureHelper.setListener(listener);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the
// SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
- final EglBase eglOesBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PLAIN);
+ final EglBase eglOesBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
eglOesBase.makeCurrent();
// Draw a frame onto the SurfaceTexture.
@@ -313,7 +321,119 @@ public final class SurfaceTextureHelperTest extends ActivityTestCase {
// Return the frame from this thread.
surfaceTextureHelper.returnTextureFrame();
+ surfaceTextureHelper.disconnect(handler);
+ }
+
+ /**
+ * Test use SurfaceTextureHelper on a separate thread. A uniform texture frame is created and
+ * received on a thread separate from the test thread and returned after disconnect.
+ */
+ @MediumTest
+ public static void testLateReturnFrameOnSeparateThread() throws InterruptedException {
+ final HandlerThread thread = new HandlerThread("SurfaceTextureHelperTestThread");
+ thread.start();
+ final Handler handler = new Handler(thread.getLooper());
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(null, handler);
+ // Create a mock listener and expect frames to be delivered on |thread|.
+ final MockTextureListener listener = new MockTextureListener(thread);
+ surfaceTextureHelper.setListener(listener);
+
+ // Create resources for stubbing an OES texture producer. |eglOesBase| has the
+ // SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
+ final EglBase eglOesBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglOesBase.makeCurrent();
+ // Draw a frame onto the SurfaceTexture.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+ eglOesBase.release();
+
+ // Wait for an OES texture to arrive.
+ listener.waitForNewFrame();
+
+ surfaceTextureHelper.disconnect(handler);
+
+ surfaceTextureHelper.returnTextureFrame();
+ }
+
+ @MediumTest
+ public static void testTexturetoYUV() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.setListener(listener);
+ surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in
+ // |surfaceTextureHelper| as the target EGLSurface.
+
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglBase.surfaceWidth(), width);
+ assertEquals(eglBase.surfaceHeight(), height);
+
+ final int red[] = new int[] {79, 144, 185};
+ final int green[] = new int[] {66, 210, 162};
+ final int blue[] = new int[] {161, 117, 158};
+
+ final int ref_y[] = new int[] {81, 180, 168};
+ final int ref_u[] = new int[] {173, 93, 122};
+ final int ref_v[] = new int[] {127, 103, 140};
+
+ // Draw three frames.
+ for (int i = 0; i < 3; ++i) {
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglBase.makeCurrent();
+ GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+
+ // Wait for an OES texture to arrive.
+ listener.waitForNewFrame();
+
+ // Memory layout: Lines are 16 bytes. First 16 lines are
+ // the Y data. These are followed by 8 lines with 8 bytes of U
+ // data on the left and 8 bytes of V data on the right.
+ //
+ // Offset
+ // 0 YYYYYYYY YYYYYYYY
+ // 16 YYYYYYYY YYYYYYYY
+ // ...
+ // 240 YYYYYYYY YYYYYYYY
+ // 256 UUUUUUUU VVVVVVVV
+ // 272 UUUUUUUU VVVVVVVV
+ // ...
+ // 368 UUUUUUUU VVVVVVVV
+ // 384 buffer end
+ ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
+ surfaceTextureHelper.textureToYUV(buffer, width, height, width,
+ listener.oesTextureId, listener.transformMatrix);
+
+ surfaceTextureHelper.returnTextureFrame();
+
+ // Allow off-by-one differences due to different rounding.
+ while (buffer.position() < width*height) {
+ assertClose(1, buffer.get() & 0xff, ref_y[i]);
+ }
+ while (buffer.hasRemaining()) {
+ if (buffer.position() % width < width/2)
+ assertClose(1, buffer.get() & 0xff, ref_u[i]);
+ else
+ assertClose(1, buffer.get() & 0xff, ref_v[i]);
+ }
+ }
+
surfaceTextureHelper.disconnect();
- thread.quitSafely();
+ eglBase.release();
}
}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
index 47fe780124..341c632b58 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
@@ -36,8 +36,6 @@ import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
-import javax.microedition.khronos.egl.EGL10;
-
public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
/**
* List with all possible scaling types.
@@ -111,7 +109,7 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
}
// Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
- surfaceViewRenderer.init(EGL10.EGL_NO_CONTEXT, null);
+ surfaceViewRenderer.init((EglBase.Context) null, null);
for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) {
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
@@ -134,7 +132,7 @@ public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
public void testFrame1280x720() {
final SurfaceViewRenderer surfaceViewRenderer =
new SurfaceViewRenderer(getInstrumentation().getContext());
- surfaceViewRenderer.init(EGL10.EGL_NO_CONTEXT, null);
+ surfaceViewRenderer.init((EglBase.Context) null, null);
// Test different rotation degress, but same rotated size.
for (int rotationDegree : new int[] {0, 90, 180, 270}) {
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
index dbbe5963cd..1b97201a0a 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
@@ -29,7 +29,6 @@ package org.webrtc;
import android.test.ActivityTestCase;
import android.test.suitebuilder.annotation.MediumTest;
import android.test.suitebuilder.annotation.SmallTest;
-import android.util.Log;
import android.util.Size;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@@ -37,8 +36,6 @@ import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import java.util.HashSet;
import java.util.Set;
-import javax.microedition.khronos.egl.EGL10;
-
@SuppressWarnings("deprecation")
public class VideoCapturerAndroidTest extends ActivityTestCase {
static final String TAG = "VideoCapturerAndroidTest";
@@ -87,8 +84,10 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@SmallTest
public void testCreateAndReleaseUsingTextures() {
+ EglBase eglBase = EglBase.create();
VideoCapturerAndroidTestFixtures.release(
- VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT));
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext()));
+ eglBase.release();
}
@SmallTest
@@ -108,12 +107,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
}
- // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
- public void DISABLED_testStartVideoCapturerUsingTextures() throws InterruptedException {
+ public void testStartVideoCapturerUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
VideoCapturerAndroid capturer =
- VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+ eglBase.release();
}
@SmallTest
@@ -151,11 +151,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
}
- // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
- public void DISABLED_testSwitchVideoCapturerUsingTextures() throws InterruptedException {
- VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ public void testSwitchVideoCapturerUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
+ eglBase.release();
}
@MediumTest
@@ -179,12 +181,14 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@MediumTest
public void testCameraCallsAfterStopUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
final String deviceName = CameraEnumerationAndroid.getDeviceName(0);
final VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName, null,
- EGL10.EGL_NO_CONTEXT);
+ eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.cameraCallsAfterStop(capturer,
getInstrumentation().getContext());
+ eglBase.release();
}
@SmallTest
@@ -195,11 +199,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
}
- // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
- public void DISABLED_testStopRestartVideoSourceUsingTextures() throws InterruptedException {
- VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ public void testStopRestartVideoSourceUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
+ eglBase.release();
}
@SmallTest
@@ -215,13 +221,50 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@SmallTest
public void testStartStopWithDifferentResolutionsUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
String deviceName = CameraEnumerationAndroid.getDeviceName(0);
VideoCapturerAndroid capturer =
- VideoCapturerAndroid.create(deviceName, null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroid.create(deviceName, null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.startStopWithDifferentResolutions(capturer,
getInstrumentation().getContext());
+ eglBase.release();
+ }
+
+ @SmallTest
+ // This test that an error is reported if the camera is already opened
+ // when VideoCapturerAndroid is started.
+ public void testStartWhileCameraAlreadyOpened() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpen(
+ capturer, getInstrumentation().getContext());
+ }
+
+ @SmallTest
+ // This test that VideoCapturerAndroid can be started, even if the camera is already opened
+ // if the camera is closed while VideoCapturerAndroid is re-trying to start.
+ public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpenAndCloseCamera(
+ capturer, getInstrumentation().getContext());
+ }
+
+ @SmallTest
+ // This test that VideoCapturerAndroid.stop can be called while VideoCapturerAndroid is
+ // re-trying to start.
+ public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create(deviceName, null);
+ VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpenAndStop(
+ capturer, getInstrumentation().getContext());
}
+
+
@SmallTest
// This test what happens if buffers are returned after the capturer have
// been stopped and restarted. It does not test or use the C++ layer.
@@ -235,11 +278,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@SmallTest
public void testReturnBufferLateUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
String deviceName = CameraEnumerationAndroid.getDeviceName(0);
VideoCapturerAndroid capturer =
- VideoCapturerAndroid.create(deviceName, null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroid.create(deviceName, null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.returnBufferLate(capturer,
getInstrumentation().getContext());
+ eglBase.release();
}
@MediumTest
@@ -251,11 +296,45 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
}
- // TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@MediumTest
- public void DISABLED_testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
+ public void testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
final VideoCapturerAndroid capturer =
- VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
+ eglBase.release();
+ }
+
+ @MediumTest
+ // This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
+ // the capturer.
+ public void testCameraFreezedEventOnBufferStarvationUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
+ VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
+ VideoCapturerAndroidTestFixtures.createCameraEvents();
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents,
+ eglBase.getEglBaseContext());
+ VideoCapturerAndroidTestFixtures.cameraFreezedEventOnBufferStarvationUsingTextures(capturer,
+ cameraEvents, getInstrumentation().getContext());
+ eglBase.release();
+ }
+
+ @MediumTest
+ // This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
+ // called. This test both Java and C++ parts of of the stack.
+ public void testScaleCameraOutput() throws InterruptedException {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+ VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
+ }
+
+ @MediumTest
+ // This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
+ // called. This test both Java and C++ parts of of the stack.
+ public void testScaleCameraOutputUsingTextures() throws InterruptedException {
+ EglBase eglBase = EglBase.create();
+ VideoCapturerAndroid capturer =
+ VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
+ VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
+ eglBase.release();
}
}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
index 11b3ce98a0..0b42e33785 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
@@ -29,6 +29,7 @@ package org.webrtc;
import android.content.Context;
import android.hardware.Camera;
+import org.webrtc.VideoCapturerAndroidTestFixtures;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.VideoRenderer.I420Frame;
@@ -42,16 +43,32 @@ public class VideoCapturerAndroidTestFixtures {
static class RendererCallbacks implements VideoRenderer.Callbacks {
private int framesRendered = 0;
private Object frameLock = 0;
+ private int width = 0;
+ private int height = 0;
@Override
public void renderFrame(I420Frame frame) {
synchronized (frameLock) {
++framesRendered;
+ width = frame.rotatedWidth();
+ height = frame.rotatedHeight();
frameLock.notify();
}
VideoRenderer.renderFrameDone(frame);
}
+ public int frameWidth() {
+ synchronized (frameLock) {
+ return width;
+ }
+ }
+
+ public int frameHeight() {
+ synchronized (frameLock) {
+ return height;
+ }
+ }
+
public int WaitForNextFrameToRender() throws InterruptedException {
synchronized (frameLock) {
frameLock.wait();
@@ -102,11 +119,11 @@ public class VideoCapturerAndroidTestFixtures {
}
@Override
- public void onByteBufferFrameCaptured(byte[] frame, int length, int width, int height,
- int rotation, long timeStamp) {
+ public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation,
+ long timeStamp) {
synchronized (frameLock) {
++framesCaptured;
- frameSize = length;
+ frameSize = frame.length;
frameWidth = width;
frameHeight = height;
timestamps.add(timeStamp);
@@ -115,7 +132,8 @@ public class VideoCapturerAndroidTestFixtures {
}
@Override
public void onTextureFrameCaptured(
- int width, int height, int oesTextureId, float[] transformMatrix, long timeStamp) {
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timeStamp) {
synchronized (frameLock) {
++framesCaptured;
frameWidth = width;
@@ -174,9 +192,20 @@ public class VideoCapturerAndroidTestFixtures {
VideoCapturerAndroid.CameraEventsHandler {
public boolean onCameraOpeningCalled;
public boolean onFirstFrameAvailableCalled;
+ public final Object onCameraFreezedLock = new Object();
+ private String onCameraFreezedDescription;
@Override
- public void onCameraError(String errorDescription) { }
+ public void onCameraError(String errorDescription) {
+ }
+
+ @Override
+ public void onCameraFreezed(String errorDescription) {
+ synchronized (onCameraFreezedLock) {
+ onCameraFreezedDescription = errorDescription;
+ onCameraFreezedLock.notifyAll();
+ }
+ }
@Override
public void onCameraOpening(int cameraId) {
@@ -190,6 +219,13 @@ public class VideoCapturerAndroidTestFixtures {
@Override
public void onCameraClosed() { }
+
+ public String WaitForCameraFreezed() throws InterruptedException {
+ synchronized (onCameraFreezedLock) {
+ onCameraFreezedLock.wait();
+ return onCameraFreezedDescription;
+ }
+ }
}
static public CameraEvents createCameraEvents() {
@@ -275,8 +311,8 @@ public class VideoCapturerAndroidTestFixtures {
assertTrue(observer.WaitForCapturerToStart());
observer.WaitForNextCapturedFrame();
capturer.stopCapture();
- for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
- capturer.returnBuffer(timeStamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
capturer.dispose();
@@ -296,9 +332,10 @@ public class VideoCapturerAndroidTestFixtures {
// Make sure camera is started and then stop it.
assertTrue(observer.WaitForCapturerToStart());
capturer.stopCapture();
- for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
- capturer.returnBuffer(timeStamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
+
// We can't change |capturer| at this point, but we should not crash.
capturer.switchCamera(null);
capturer.onOutputFormatRequest(640, 480, 15);
@@ -357,17 +394,90 @@ public class VideoCapturerAndroidTestFixtures {
if (capturer.isCapturingToTexture()) {
assertEquals(0, observer.frameSize());
} else {
- assertEquals(format.frameSize(), observer.frameSize());
+ assertTrue(format.frameSize() <= observer.frameSize());
}
capturer.stopCapture();
- for (long timestamp : observer.getCopyAndResetListOftimeStamps()) {
- capturer.returnBuffer(timestamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
}
capturer.dispose();
assertTrue(capturer.isReleased());
}
+ static void waitUntilIdle(VideoCapturerAndroid capturer) throws InterruptedException {
+ final CountDownLatch barrier = new CountDownLatch(1);
+ capturer.getCameraThreadHandler().post(new Runnable() {
+ @Override public void run() {
+ barrier.countDown();
+ }
+ });
+ barrier.await();
+ }
+
+ static public void startWhileCameraIsAlreadyOpen(
+ VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+ Camera camera = Camera.open(capturer.getCurrentCameraId());
+
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+
+ if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
+ // The first opened camera client will be evicted.
+ assertTrue(observer.WaitForCapturerToStart());
+ capturer.stopCapture();
+ } else {
+ assertFalse(observer.WaitForCapturerToStart());
+ }
+
+ capturer.dispose();
+ camera.release();
+ }
+
+ static public void startWhileCameraIsAlreadyOpenAndCloseCamera(
+ VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+ Camera camera = Camera.open(capturer.getCurrentCameraId());
+
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ waitUntilIdle(capturer);
+
+ camera.release();
+
+ // Make sure camera is started and first frame is received and then stop it.
+ assertTrue(observer.WaitForCapturerToStart());
+ observer.WaitForNextCapturedFrame();
+ capturer.stopCapture();
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
+ }
+ capturer.dispose();
+ assertTrue(capturer.isReleased());
+ }
+
+ static public void startWhileCameraIsAlreadyOpenAndStop(
+ VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+ Camera camera = Camera.open(capturer.getCurrentCameraId());
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ capturer.stopCapture();
+ capturer.dispose();
+ assertTrue(capturer.isReleased());
+ camera.release();
+ }
+
static public void returnBufferLate(VideoCapturerAndroid capturer,
Context appContext) throws InterruptedException {
FakeCapturerObserver observer = new FakeCapturerObserver();
@@ -387,9 +497,8 @@ public class VideoCapturerAndroidTestFixtures {
capturer.startCapture(format.width, format.height, format.maxFramerate,
appContext, observer);
observer.WaitForCapturerToStart();
-
- for (Long timeStamp : listOftimestamps) {
- capturer.returnBuffer(timeStamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
observer.WaitForNextCapturedFrame();
@@ -397,9 +506,10 @@ public class VideoCapturerAndroidTestFixtures {
listOftimestamps = observer.getCopyAndResetListOftimeStamps();
assertTrue(listOftimestamps.size() >= 1);
- for (Long timeStamp : listOftimestamps) {
- capturer.returnBuffer(timeStamp);
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
}
+
capturer.dispose();
assertTrue(capturer.isReleased());
}
@@ -410,6 +520,7 @@ public class VideoCapturerAndroidTestFixtures {
final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
final VideoTrack track = factory.createVideoTrack("dummy", source);
final FakeAsyncRenderer renderer = new FakeAsyncRenderer();
+
track.addRenderer(new VideoRenderer(renderer));
// Wait for at least one frame that has not been returned.
assertFalse(renderer.waitForPendingFrames().isEmpty());
@@ -420,9 +531,7 @@ public class VideoCapturerAndroidTestFixtures {
track.dispose();
source.dispose();
factory.dispose();
-
- // The pending frames should keep the JNI parts and |capturer| alive.
- assertFalse(capturer.isReleased());
+ assertTrue(capturer.isReleased());
// Return the frame(s), on a different thread out of spite.
final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
@@ -436,8 +545,71 @@ public class VideoCapturerAndroidTestFixtures {
});
returnThread.start();
returnThread.join();
+ }
+
+ static public void cameraFreezedEventOnBufferStarvationUsingTextures(
+ VideoCapturerAndroid capturer,
+ CameraEvents events, Context appContext) throws InterruptedException {
+ assertTrue("Not capturing to textures.", capturer.isCapturingToTexture());
- // Check that frames have successfully returned. This will cause |capturer| to be released.
+ final List<CaptureFormat> formats = capturer.getSupportedFormats();
+ final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+ final FakeCapturerObserver observer = new FakeCapturerObserver();
+ capturer.startCapture(format.width, format.height, format.maxFramerate,
+ appContext, observer);
+ // Make sure camera is started.
+ assertTrue(observer.WaitForCapturerToStart());
+ // Since we don't return the buffer, we should get a starvation message if we are
+ // capturing to a texture.
+ assertEquals("Camera failure. Client must return video buffers.",
+ events.WaitForCameraFreezed());
+
+ capturer.stopCapture();
+ if (capturer.isCapturingToTexture()) {
+ capturer.surfaceHelper.returnTextureFrame();
+ }
+
+ capturer.dispose();
assertTrue(capturer.isReleased());
}
+
+ static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
+ PeerConnectionFactory factory = new PeerConnectionFactory();
+ VideoSource source =
+ factory.createVideoSource(capturer, new MediaConstraints());
+ VideoTrack track = factory.createVideoTrack("dummy", source);
+ RendererCallbacks renderer = new RendererCallbacks();
+ track.addRenderer(new VideoRenderer(renderer));
+ assertTrue(renderer.WaitForNextFrameToRender() > 0);
+
+ final int startWidth = renderer.frameWidth();
+ final int startHeight = renderer.frameHeight();
+ final int frameRate = 30;
+ final int scaledWidth = startWidth / 2;
+ final int scaledHeight = startHeight / 2;
+
+ // Request the captured frames to be scaled.
+ capturer.onOutputFormatRequest(scaledWidth, scaledHeight, frameRate);
+
+ boolean gotExpectedResolution = false;
+ int numberOfInspectedFrames = 0;
+
+ do {
+ renderer.WaitForNextFrameToRender();
+ ++numberOfInspectedFrames;
+
+ gotExpectedResolution = (renderer.frameWidth() == scaledWidth
+ && renderer.frameHeight() == scaledHeight);
+ } while (!gotExpectedResolution && numberOfInspectedFrames < 30);
+
+ source.stop();
+ track.dispose();
+ source.dispose();
+ factory.dispose();
+ assertTrue(capturer.isReleased());
+
+ assertTrue(gotExpectedResolution);
+ }
+
}