summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Chromium Automerger <chromium-automerger@android>2014-09-16 12:34:40 +0000
committerAndroid Chromium Automerger <chromium-automerger@android>2014-09-16 12:34:40 +0000
commit5ff815fa882d6fc28261f401b7e9ce8594e243d0 (patch)
treec7a57cf6ec9e17117bfeaede910b72afa3895288
parent2ed33ddf445fd4c2b01069590cf255605724bb26 (diff)
parent1f59bcb2ae6b867fb2f52ff4654b137f98b30536 (diff)
downloadtalk-5ff815fa882d6fc28261f401b7e9ce8594e243d0.tar.gz
Merge third_party/libjingle/source/talk from https://chromium.googlesource.com/external/webrtc/trunk/talk.git at 1f59bcb2ae6b867fb2f52ff4654b137f98b30536
This commit was generated by merge_from_chromium.py. Change-Id: Ic6f021ebece4c960a234811c11205565441e01dc
-rw-r--r--app/webrtc/java/android/org/webrtc/VideoRendererGui.java273
-rw-r--r--app/webrtc/java/jni/peerconnection_jni.cc596
-rw-r--r--app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java261
-rw-r--r--app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java37
-rw-r--r--app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java6
-rw-r--r--app/webrtc/java/src/org/webrtc/VideoRenderer.java68
-rw-r--r--app/webrtc/peerconnection_unittest.cc10
-rw-r--r--app/webrtc/statstypes.h1
-rw-r--r--examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java11
9 files changed, 992 insertions, 271 deletions
diff --git a/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index 439f942..c3bf7a0 100644
--- a/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -37,6 +37,10 @@ import java.util.concurrent.LinkedBlockingQueue;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLContext;
+import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
@@ -54,6 +58,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private static VideoRendererGui instance = null;
private static final String TAG = "VideoRendererGui";
private GLSurfaceView surface;
+ private static EGLContext eglContext = null;
// Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
// If true then for every newly created yuv image renderer createTexture()
// should be called. The variable is accessed on multiple threads and
@@ -61,7 +66,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private boolean onSurfaceCreatedCalled;
// List of yuv renderers.
private ArrayList<YuvImageRenderer> yuvImageRenderers;
- private int program;
+ private int yuvProgram;
+ private int oesProgram;
private final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n" +
@@ -73,7 +79,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
" interp_tc = in_tc;\n" +
"}\n";
- private final String FRAGMENT_SHADER_STRING =
+ private final String YUV_FRAGMENT_SHADER_STRING =
"precision mediump float;\n" +
"varying vec2 interp_tc;\n" +
"\n" +
@@ -91,6 +97,19 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
" y + 1.77 * u, 1);\n" +
"}\n";
+
+ private static final String OES_FRAGMENT_SHADER_STRING =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" +
+ "varying vec2 interp_tc;\n" +
+ "\n" +
+ "uniform samplerExternalOES oes_tex;\n" +
+ "\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n" +
+ "}\n";
+
+
private VideoRendererGui(GLSurfaceView surface) {
this.surface = surface;
// Create an OpenGL ES 2.0 context.
@@ -124,23 +143,46 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return buffer;
}
- // Compile & attach a |type| shader specified by |source| to |program|.
- private static void addShaderTo(
- int type, String source, int program) {
+ private int loadShader(int shaderType, String source) {
int[] result = new int[] {
GLES20.GL_FALSE
};
- int shader = GLES20.glCreateShader(type);
+ int shader = GLES20.glCreateShader(shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
- abortUnless(result[0] == GLES20.GL_TRUE,
- GLES20.glGetShaderInfoLog(shader) + ", source: " + source);
- GLES20.glAttachShader(program, shader);
- GLES20.glDeleteShader(shader);
+ if (result[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":" +
+ GLES20.glGetShaderInfoLog(shader));
+ throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+ }
+ checkNoGLES2Error();
+ return shader;
+}
+
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ int program = GLES20.glCreateProgram();
+ if (program == 0) {
+ throw new RuntimeException("Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[] {
+ GLES20.GL_FALSE
+ };
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: " +
+ GLES20.glGetProgramInfoLog(program));
+ throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+ }
checkNoGLES2Error();
- }
+ return program;
+}
/**
* Class used to display stream of YUV420 frames at particular location
@@ -149,9 +191,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
*/
private static class YuvImageRenderer implements VideoRenderer.Callbacks {
private GLSurfaceView surface;
- private int program;
+ private int id;
+ private int yuvProgram;
+ private int oesProgram;
private FloatBuffer textureVertices;
private int[] yuvTextures = { -1, -1, -1 };
+ private int oesTexture = -1;
+ private float[] stMatrix = new float[16];
// Render frame queue - accessed by two threads. renderFrame() call does
// an offer (writing I420Frame to render) and early-returns (recording
@@ -159,8 +205,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// copies frame to texture and then removes it from a queue using poll().
LinkedBlockingQueue<I420Frame> frameToRenderQueue;
// Local copy of incoming video frame.
- private I420Frame frameToRender;
- // Flag if renderFrame() was ever called
+ private I420Frame yuvFrameToRender;
+ private I420Frame textureFrameToRender;
+ // Type of video frame used for recent frame rendering.
+ private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
+ private RendererType rendererType;
+ // Flag if renderFrame() was ever called.
boolean seenFrame;
// Total number of video frames received in renderFrame() call.
private int framesReceived;
@@ -174,7 +224,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Time in ns spent in draw() function.
private long drawTimeNs;
// Time in ns spent in renderFrame() function - including copying frame
- // data to rendering planes
+ // data to rendering planes.
private long copyTimeNs;
// Texture Coordinates mapping the entire texture.
@@ -184,10 +234,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
});
private YuvImageRenderer(
- GLSurfaceView surface,
+ GLSurfaceView surface, int id,
int x, int y, int width, int height) {
- Log.v(TAG, "YuvImageRenderer.Create");
+ Log.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface;
+ this.id = id;
frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
// Create texture vertices.
float xLeft = (x - 50) / 50.0f;
@@ -203,11 +254,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
textureVertices = directNativeFloatBuffer(textureVeticesFloat);
}
- private void createTextures(int program) {
- Log.v(TAG, " YuvImageRenderer.createTextures");
- this.program = program;
+ private void createTextures(int yuvProgram, int oesProgram) {
+ Log.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
+ Thread.currentThread().getId());
+ this.yuvProgram = yuvProgram;
+ this.oesProgram = oesProgram;
- // Generate 3 texture ids for Y/U/V and place them into |textures|.
+ // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
GLES20.glGenTextures(3, yuvTextures, 0);
for (int i = 0; i < 3; i++) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
@@ -227,38 +280,76 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
private void draw() {
- long now = System.nanoTime();
if (!seenFrame) {
// No frame received yet - nothing to render.
return;
}
+ long now = System.nanoTime();
+
I420Frame frameFromQueue;
synchronized (frameToRenderQueue) {
frameFromQueue = frameToRenderQueue.peek();
if (frameFromQueue != null && startTimeNs == -1) {
startTimeNs = now;
}
- for (int i = 0; i < 3; ++i) {
- int w = (i == 0) ? frameToRender.width : frameToRender.width / 2;
- int h = (i == 0) ? frameToRender.height : frameToRender.height / 2;
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+
+ if (rendererType == RendererType.RENDERER_YUV) {
+ // YUV textures rendering.
+ GLES20.glUseProgram(yuvProgram);
+
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ if (frameFromQueue != null) {
+ int w = (i == 0) ?
+ frameFromQueue.width : frameFromQueue.width / 2;
+ int h = (i == 0) ?
+ frameFromQueue.height : frameFromQueue.height / 2;
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
+ w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
+ frameFromQueue.yuvPlanes[i]);
+ }
+ }
+ } else {
+ // External texture rendering.
+ GLES20.glUseProgram(oesProgram);
+
if (frameFromQueue != null) {
- GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
- w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
- frameFromQueue.yuvPlanes[i]);
+ oesTexture = frameFromQueue.textureId;
+ if (frameFromQueue.textureObject instanceof SurfaceTexture) {
+ SurfaceTexture surfaceTexture =
+ (SurfaceTexture) frameFromQueue.textureObject;
+ surfaceTexture.updateTexImage();
+ surfaceTexture.getTransformMatrix(stMatrix);
+ }
}
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTexture);
}
+
if (frameFromQueue != null) {
frameToRenderQueue.poll();
}
}
- int posLocation = GLES20.glGetAttribLocation(program, "in_pos");
+
+ if (rendererType == RendererType.RENDERER_YUV) {
+ GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "y_tex"), 0);
+ GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "u_tex"), 1);
+ GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "v_tex"), 2);
+ }
+
+ int posLocation = GLES20.glGetAttribLocation(yuvProgram, "in_pos");
+ if (posLocation == -1) {
+ throw new RuntimeException("Could not get attrib location for in_pos");
+ }
GLES20.glEnableVertexAttribArray(posLocation);
GLES20.glVertexAttribPointer(
posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices);
- int texLocation = GLES20.glGetAttribLocation(program, "in_tc");
+ int texLocation = GLES20.glGetAttribLocation(yuvProgram, "in_tc");
+ if (texLocation == -1) {
+ throw new RuntimeException("Could not get attrib location for in_tc");
+ }
GLES20.glEnableVertexAttribArray(texLocation);
GLES20.glVertexAttribPointer(
texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
@@ -273,7 +364,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
if (frameFromQueue != null) {
framesRendered++;
drawTimeNs += (System.nanoTime() - now);
- if ((framesRendered % 150) == 0) {
+ if ((framesRendered % 90) == 0) {
logStatistics();
}
}
@@ -281,12 +372,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private void logStatistics() {
long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
- Log.v(TAG, "Frames received: " + framesReceived + ". Dropped: " +
- framesDropped + ". Rendered: " + framesRendered);
+ Log.d(TAG, "ID: " + id + ". Type: " + rendererType +
+ ". Frames received: " + framesReceived +
+ ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
- Log.v(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
+ Log.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
" ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs);
- Log.v(TAG, "Draw time: " +
+ Log.d(TAG, "Draw time: " +
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
}
@@ -294,16 +386,18 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override
public void setSize(final int width, final int height) {
- Log.v(TAG, "YuvImageRenderer.setSize: " + width + " x " + height);
+ Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
+ width + " x " + height);
int[] strides = { width, width / 2, width / 2 };
// Frame re-allocation need to be synchronized with copying
// frame to textures in draw() function to avoid re-allocating
// the frame while it is being copied.
synchronized (frameToRenderQueue) {
- // Clear rendering queue
+ // Clear rendering queue.
frameToRenderQueue.poll();
- // Re-allocate / allocate the frame
- frameToRender = new I420Frame(width, height, strides, null);
+ // Re-allocate / allocate the frame.
+ yuvFrameToRender = new I420Frame(width, height, strides, null);
+ textureFrameToRender = new I420Frame(width, height, null, -1);
}
}
@@ -311,24 +405,26 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public synchronized void renderFrame(I420Frame frame) {
long now = System.nanoTime();
framesReceived++;
- // Check input frame parameters.
- if (!(frame.yuvStrides[0] == frame.width &&
- frame.yuvStrides[1] == frame.width / 2 &&
- frame.yuvStrides[2] == frame.width / 2)) {
- Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
- frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
- return;
- }
// Skip rendering of this frame if setSize() was not called.
- if (frameToRender == null) {
+ if (yuvFrameToRender == null || textureFrameToRender == null) {
framesDropped++;
return;
}
- // Check incoming frame dimensions
- if (frame.width != frameToRender.width ||
- frame.height != frameToRender.height) {
- throw new RuntimeException("Wrong frame size " +
- frame.width + " x " + frame.height);
+ // Check input frame parameters.
+ if (frame.yuvFrame) {
+ if (!(frame.yuvStrides[0] == frame.width &&
+ frame.yuvStrides[1] == frame.width / 2 &&
+ frame.yuvStrides[2] == frame.width / 2)) {
+ Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
+ frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
+ return;
+ }
+ // Check incoming frame dimensions.
+ if (frame.width != yuvFrameToRender.width ||
+ frame.height != yuvFrameToRender.height) {
+ throw new RuntimeException("Wrong frame size " +
+ frame.width + " x " + frame.height);
+ }
}
if (frameToRenderQueue.size() > 0) {
@@ -336,20 +432,36 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
framesDropped++;
return;
}
- frameToRender.copyFrom(frame);
+
+ // Create a local copy of the frame.
+ if (frame.yuvFrame) {
+ yuvFrameToRender.copyFrom(frame);
+ rendererType = RendererType.RENDERER_YUV;
+ frameToRenderQueue.offer(yuvFrameToRender);
+ } else {
+ textureFrameToRender.copyFrom(frame);
+ rendererType = RendererType.RENDERER_TEXTURE;
+ frameToRenderQueue.offer(textureFrameToRender);
+ }
copyTimeNs += (System.nanoTime() - now);
- frameToRenderQueue.offer(frameToRender);
seenFrame = true;
+
+ // Request rendering.
surface.requestRender();
}
+
}
/** Passes GLSurfaceView to video renderer. */
public static void setView(GLSurfaceView surface) {
- Log.v(TAG, "VideoRendererGui.setView");
+ Log.d(TAG, "VideoRendererGui.setView");
instance = new VideoRendererGui(surface);
}
+ public static EGLContext getEGLContext() {
+ return eglContext;
+ }
+
/**
* Creates VideoRenderer with top left corner at (x, y) and resolution
* (width, height). All parameters are in percentage of screen resolution.
@@ -360,6 +472,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return new VideoRenderer(javaGuiRenderer);
}
+ public static VideoRenderer.Callbacks createGuiRenderer(
+ int x, int y, int width, int height) {
+ return create(x, y, width, height);
+ }
+
/**
* Creates VideoRenderer.Callbacks with top left corner at (x, y) and
* resolution (width, height). All parameters are in percentage of
@@ -379,7 +496,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
"Attempt to create yuv renderer before setting GLSurfaceView");
}
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
- instance.surface, x, y, width, height);
+ instance.surface, instance.yuvImageRenderers.size(),
+ x, y, width, height);
synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui -
@@ -388,7 +506,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
final CountDownLatch countDownLatch = new CountDownLatch(1);
instance.surface.queueEvent(new Runnable() {
public void run() {
- yuvImageRenderer.createTextures(instance.program);
+ yuvImageRenderer.createTextures(
+ instance.yuvProgram, instance.oesProgram);
countDownLatch.countDown();
}
});
@@ -407,41 +526,31 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
- Log.v(TAG, "VideoRendererGui.onSurfaceCreated");
-
- // Create program.
- program = GLES20.glCreateProgram();
- addShaderTo(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_STRING, program);
- addShaderTo(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_STRING, program);
-
- GLES20.glLinkProgram(program);
- int[] result = new int[] {
- GLES20.GL_FALSE
- };
- result[0] = GLES20.GL_FALSE;
- GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, result, 0);
- abortUnless(result[0] == GLES20.GL_TRUE,
- GLES20.glGetProgramInfoLog(program));
- GLES20.glUseProgram(program);
+ Log.d(TAG, "VideoRendererGui.onSurfaceCreated");
+ // Store render EGL context
+ eglContext = EGL14.eglGetCurrentContext();
+ Log.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
- GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "y_tex"), 0);
- GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "u_tex"), 1);
- GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "v_tex"), 2);
+ // Create YUV and OES programs.
+ yuvProgram = createProgram(VERTEX_SHADER_STRING,
+ YUV_FRAGMENT_SHADER_STRING);
+ oesProgram = createProgram(VERTEX_SHADER_STRING,
+ OES_FRAGMENT_SHADER_STRING);
synchronized (yuvImageRenderers) {
// Create textures for all images.
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
- yuvImageRenderer.createTextures(program);
+ yuvImageRenderer.createTextures(yuvProgram, oesProgram);
}
onSurfaceCreatedCalled = true;
}
checkNoGLES2Error();
- GLES20.glClearColor(0.0f, 0.0f, 0.3f, 1.0f);
+ GLES20.glClearColor(0.0f, 0.3f, 0.1f, 1.0f);
}
@Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
- Log.v(TAG, "VideoRendererGui.onSurfaceChanged: " +
+ Log.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
width + " x " + height + " ");
GLES20.glViewport(0, 0, width, height);
}
diff --git a/app/webrtc/java/jni/peerconnection_jni.cc b/app/webrtc/java/jni/peerconnection_jni.cc
index bfb5564..83a80a0 100644
--- a/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/app/webrtc/java/jni/peerconnection_jni.cc
@@ -81,20 +81,25 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/messagequeue.h"
#include "webrtc/base/ssladapter.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
#include "webrtc/system_wrappers/interface/compile_assert.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/voice_engine/include/voe_base.h"
-#ifdef ANDROID
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+#include <android/log.h>
#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
using webrtc::CodecSpecificInfo;
using webrtc::DecodedImageCallback;
using webrtc::EncodedImage;
using webrtc::I420VideoFrame;
using webrtc::LogcatTraceContext;
using webrtc::RTPFragmentationHeader;
+using webrtc::TextureVideoFrame;
+using webrtc::TickTime;
using webrtc::VideoCodec;
#endif
@@ -112,6 +117,7 @@ using webrtc::DataChannelInit;
using webrtc::DataChannelInterface;
using webrtc::DataChannelObserver;
using webrtc::IceCandidateInterface;
+using webrtc::NativeHandle;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaSourceInterface;
using webrtc::MediaStreamInterface;
@@ -152,6 +158,12 @@ static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
// were attached by the JVM because of a Java->native call.
static pthread_key_t g_jni_ptr;
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+// Set in PeerConnectionFactory_initializeAndroidGlobals().
+static bool factory_static_initialized = false;
+#endif
+
+
// Return thread ID as a string.
static std::string GetThreadId() {
char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
@@ -257,10 +269,13 @@ class ClassReferenceHolder {
LoadClass(jni, "org/webrtc/DataChannel$Init");
LoadClass(jni, "org/webrtc/DataChannel$State");
LoadClass(jni, "org/webrtc/IceCandidate");
-#ifdef ANDROID
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+ LoadClass(jni, "android/graphics/SurfaceTexture");
+ LoadClass(jni, "android/opengl/EGLContext");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
#endif
LoadClass(jni, "org/webrtc/MediaSource$State");
LoadClass(jni, "org/webrtc/MediaStream");
@@ -349,14 +364,14 @@ jclass FindClass(JNIEnv* jni, const char* name) {
jclass GetObjectClass(JNIEnv* jni, jobject object) {
jclass c = jni->GetObjectClass(object);
CHECK_EXCEPTION(jni) << "error during GetObjectClass";
- CHECK(c);
+ CHECK(c) << "GetObjectClass returned NULL";
return c;
}
jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
jobject o = jni->GetObjectField(object, id);
CHECK_EXCEPTION(jni) << "error during GetObjectField";
- CHECK(o);
+ CHECK(o) << "GetObjectField returned NULL";
return o;
}
@@ -1054,6 +1069,38 @@ class VideoRendererWrapper : public VideoRendererInterface {
scoped_ptr<cricket::VideoRenderer> renderer_;
};
+// Wrapper for texture object in TextureVideoFrame.
+class NativeHandleImpl : public NativeHandle {
+ public:
+ NativeHandleImpl() :
+ ref_count_(0), texture_object_(NULL), texture_id_(-1) {}
+ virtual ~NativeHandleImpl() {}
+ virtual int32_t AddRef() {
+ return ++ref_count_;
+ }
+ virtual int32_t Release() {
+ return --ref_count_;
+ }
+ virtual void* GetHandle() {
+ return texture_object_;
+ }
+ int GetTextureId() {
+ return texture_id_;
+ }
+ void SetTextureObject(void *texture_object, int texture_id) {
+ texture_object_ = reinterpret_cast<jobject>(texture_object);
+ texture_id_ = texture_id;
+ }
+ int32_t ref_count() {
+ return ref_count_;
+ }
+
+ private:
+ int32_t ref_count_;
+ jobject texture_object_;
+ int32_t texture_id_;
+};
+
// Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
// instance.
class JavaVideoRendererWrapper : public VideoRendererInterface {
@@ -1067,8 +1114,11 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
"(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
j_frame_class_(jni,
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
- j_frame_ctor_id_(GetMethodID(
+ j_i420_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>", "(II[I[Ljava/nio/ByteBuffer;)V")),
+ j_texture_frame_ctor_id_(GetMethodID(
+ jni, *j_frame_class_, "<init>",
+ "(IILjava/lang/Object;I)V")),
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
CHECK_EXCEPTION(jni);
}
@@ -1083,14 +1133,20 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
ScopedLocalRefFrame local_ref_frame(jni());
- jobject j_frame = CricketToJavaFrame(frame);
- jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
- CHECK_EXCEPTION(jni());
+ if (frame->GetNativeHandle() != NULL) {
+ jobject j_frame = CricketToJavaTextureFrame(frame);
+ jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
+ CHECK_EXCEPTION(jni());
+ } else {
+ jobject j_frame = CricketToJavaI420Frame(frame);
+ jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
+ CHECK_EXCEPTION(jni());
+ }
}
private:
// Return a VideoRenderer.I420Frame referring to the data in |frame|.
- jobject CricketToJavaFrame(const cricket::VideoFrame* frame) {
+ jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
jintArray strides = jni()->NewIntArray(3);
jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
strides_array[0] = frame->GetYPitch();
@@ -1109,10 +1165,21 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jni()->SetObjectArrayElement(planes, 1, u_buffer);
jni()->SetObjectArrayElement(planes, 2, v_buffer);
return jni()->NewObject(
- *j_frame_class_, j_frame_ctor_id_,
+ *j_frame_class_, j_i420_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(), strides, planes);
}
+ // Return a VideoRenderer.I420Frame referring texture object in |frame|.
+ jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
+ NativeHandleImpl* handle =
+ reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
+ jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
+ int texture_id = handle->GetTextureId();
+ return jni()->NewObject(
+ *j_frame_class_, j_texture_frame_ctor_id_,
+ frame->GetWidth(), frame->GetHeight(), texture_object, texture_id);
+ }
+
JNIEnv* jni() {
return AttachCurrentThreadIfNeeded();
}
@@ -1121,16 +1188,16 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jmethodID j_set_size_id_;
jmethodID j_render_frame_id_;
ScopedGlobalRef<jclass> j_frame_class_;
- jmethodID j_frame_ctor_id_;
+ jmethodID j_i420_frame_ctor_id_;
+ jmethodID j_texture_frame_ctor_id_;
ScopedGlobalRef<jclass> j_byte_buffer_class_;
};
-#ifdef ANDROID
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// TODO(fischman): consider pulling MediaCodecVideoEncoder out of this file and
// into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
// from this file.
-#include <android/log.h>
//#define TRACK_BUFFER_TIMING
#define TAG "MediaCodecVideo"
#ifdef TRACK_BUFFER_TIMING
@@ -1141,6 +1208,9 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
+// Set to false to switch HW video decoder back to byte buffer output.
+#define HW_DECODER_USE_SURFACE true
+
// Color formats supported by encoder - should mirror supportedColorList
// from MediaCodecVideoEncoder.java
enum COLOR_FORMATTYPE {
@@ -1156,6 +1226,14 @@ enum COLOR_FORMATTYPE {
// Arbitrary interval to poll the codec for new outputs.
enum { kMediaCodecPollMs = 10 };
+// Media codec maximum output buffer ready timeout.
+enum { kMediaCodecTimeoutMs = 500 };
+// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
+enum { kMediaCodecStatisticsIntervalMs = 3000 };
+
+static int64_t GetCurrentTimeMs() {
+ return TickTime::Now().Ticks() / 1000000LL;
+}
// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
@@ -1256,11 +1334,20 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
int last_set_fps_; // Last-requested frame rate.
- int frames_received_; // Number of frames received by encoder.
- int frames_dropped_; // Number of frames dropped by encoder.
- int frames_in_queue_; // Number of frames in encoder queue.
- int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
- int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
+ int64_t current_timestamp_us_; // Current frame timestamps in us.
+ int frames_received_; // Number of frames received by encoder.
+ int frames_dropped_; // Number of frames dropped by encoder.
+ int frames_in_queue_; // Number of frames in encoder queue.
+ int64_t start_time_ms_; // Start time for statistics.
+ int current_frames_; // Number of frames in the current statistics interval.
+ int current_bytes_; // Encoded bytes in the current statistics interval.
+ int current_encoding_time_ms_; // Overall encoding time in the current second
+ int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
+ int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
+ std::vector<int32_t> timestamps_; // Video frames timestamp queue.
+ std::vector<int64_t> render_times_ms_; // Video frames render time queue.
+ std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
+ // encoder input.
// Frame size in bytes fed to MediaCodec.
int yuv_size_;
// True only when between a callback_->Encoded() call return a positive value
@@ -1427,7 +1514,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
- ALOGD("InitEncodeOnCodecThread %d x %d", width, height);
+ ALOGD("InitEncodeOnCodecThread %d x %d. Fps: %d", width, height, fps);
if (width == 0) {
width = width_;
@@ -1444,8 +1531,16 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
frames_received_ = 0;
frames_dropped_ = 0;
frames_in_queue_ = 0;
+ current_timestamp_us_ = 0;
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_encoding_time_ms_ = 0;
last_input_timestamp_ms_ = -1;
last_output_timestamp_ms_ = -1;
+ timestamps_.clear();
+ render_times_ms_.clear();
+ frame_rtc_times_ms_.clear();
// We enforce no extra stride/padding in the format creation step.
jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
jni->CallObjectMethod(*j_media_codec_video_encoder_,
@@ -1505,23 +1600,23 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
if (drop_next_input_frame_) {
+ ALOGV("Encoder drop frame - failed callback.");
drop_next_input_frame_ = false;
return WEBRTC_VIDEO_CODEC_OK;
}
CHECK(frame_types->size() == 1) << "Unexpected stream count";
- bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
-
CHECK(frame.width() == width_) << "Unexpected resolution change";
CHECK(frame.height() == height_) << "Unexpected resolution change";
+ bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
+
// Check if we accumulated too many frames in encoder input buffers
- // so the encoder latency exceeds 100ms and drop frame if so.
- if (frames_in_queue_ > 0 && last_input_timestamp_ms_ > 0 &&
- last_output_timestamp_ms_ > 0) {
+ // or the encoder latency exceeds 70 ms and drop frame if so.
+ if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
int encoder_latency_ms = last_input_timestamp_ms_ -
last_output_timestamp_ms_;
- if (encoder_latency_ms > 100) {
+ if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
ALOGV("Drop frame - encoder is behind by %d ms. Q size: %d",
encoder_latency_ms, frames_in_queue_);
frames_dropped_++;
@@ -1534,7 +1629,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
CHECK_EXCEPTION(jni);
if (j_input_buffer_index == -1) {
// Video codec falls behind - no input buffer available.
- ALOGV("Drop frame - no input buffers available");
+ ALOGV("Encoder drop frame - no input buffers available");
frames_dropped_++;
return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
}
@@ -1544,7 +1639,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
- frames_received_, j_input_buffer_index, frame.render_time_ms());
+ frames_received_, j_input_buffer_index, current_timestamp_us_ / 1000);
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
uint8* yuv_buffer =
@@ -1552,21 +1647,30 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
CHECK_EXCEPTION(jni);
CHECK(yuv_buffer) << "Indirect buffer??";
CHECK(!libyuv::ConvertFromI420(
- frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
- frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
- frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
- yuv_buffer, width_, width_, height_, encoder_fourcc_))
+ frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
+ frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
+ frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
+ yuv_buffer, width_,
+ width_, height_,
+ encoder_fourcc_))
<< "ConvertFromI420 failed";
- jlong timestamp_us = frame.render_time_ms() * 1000;
- last_input_timestamp_ms_ = frame.render_time_ms();
+ last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
frames_in_queue_++;
+
+ // Save input image timestamps for later output
+ timestamps_.push_back(frame.timestamp());
+ render_times_ms_.push_back(frame.render_time_ms());
+ frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_encode_method_,
key_frame,
j_input_buffer_index,
yuv_size_,
- timestamp_us);
+ current_timestamp_us_);
CHECK_EXCEPTION(jni);
+ current_timestamp_us_ += 1000000 / last_set_fps_;
+
if (!encode_status || !DeliverPendingOutputs(jni)) {
ResetCodec();
return WEBRTC_VIDEO_CODEC_ERROR;
@@ -1610,12 +1714,16 @@ int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
}
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
- last_set_bitrate_kbps_ = new_bit_rate;
- last_set_fps_ = frame_rate;
+ if (new_bit_rate > 0) {
+ last_set_bitrate_kbps_ = new_bit_rate;
+ }
+ if (frame_rate > 0) {
+ last_set_fps_ = frame_rate;
+ }
bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_set_rates_method_,
- new_bit_rate,
- frame_rate);
+ last_set_bitrate_kbps_,
+ last_set_fps_);
CHECK_EXCEPTION(jni);
if (!ret) {
ResetCodec();
@@ -1665,8 +1773,9 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
jobject j_output_buffer_info = jni->CallObjectMethod(
*j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
CHECK_EXCEPTION(jni);
- if (IsNull(jni, j_output_buffer_info))
+ if (IsNull(jni, j_output_buffer_info)) {
break;
+ }
int output_buffer_index =
GetOutputBufferInfoIndex(jni, j_output_buffer_info);
@@ -1675,31 +1784,62 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
return false;
}
- jlong capture_time_ms =
+ // Get frame timestamps from a queue.
+ last_output_timestamp_ms_ =
GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
1000;
- last_output_timestamp_ms_ = capture_time_ms;
+ int32_t timestamp = timestamps_.front();
+ timestamps_.erase(timestamps_.begin());
+ int64_t render_time_ms = render_times_ms_.front();
+ render_times_ms_.erase(render_times_ms_.begin());
+ int64_t frame_encoding_time_ms = GetCurrentTimeMs() -
+ frame_rtc_times_ms_.front();
+ frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
frames_in_queue_--;
- ALOGV("Encoder got output buffer # %d. TS: %lld. Latency: %lld",
- output_buffer_index, last_output_timestamp_ms_,
- last_input_timestamp_ms_ - last_output_timestamp_ms_);
+ // Extract payload and key frame flag.
int32_t callback_status = 0;
+ jobject j_output_buffer =
+ GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
+ bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
+ size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
+ uint8* payload = reinterpret_cast<uint8_t*>(
+ jni->GetDirectBufferAddress(j_output_buffer));
+ CHECK_EXCEPTION(jni);
+
+ ALOGV("Encoder got output buffer # %d. Size: %d. TS: %lld. Latency: %lld."
+ " EncTime: %lld",
+ output_buffer_index, payload_size, last_output_timestamp_ms_,
+ last_input_timestamp_ms_ - last_output_timestamp_ms_,
+ frame_encoding_time_ms);
+
+ // Calculate and print encoding statistics - every 3 seconds.
+ current_frames_++;
+ current_bytes_ += payload_size;
+ current_encoding_time_ms_ += frame_encoding_time_ms;
+ int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
+ if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
+ current_frames_ > 0) {
+ ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
+ " encTime: %d for last %d ms",
+ current_bytes_ * 8 / statistic_time_ms,
+ last_set_bitrate_kbps_,
+ (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
+ current_encoding_time_ms_ / current_frames_, statistic_time_ms);
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_= 0;
+ current_encoding_time_ms_ = 0;
+ }
+
+ // Callback - return encoded frame.
if (callback_) {
- jobject j_output_buffer =
- GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
- bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
- size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
- uint8* payload = reinterpret_cast<uint8_t*>(
- jni->GetDirectBufferAddress(j_output_buffer));
- CHECK_EXCEPTION(jni);
scoped_ptr<webrtc::EncodedImage> image(
new webrtc::EncodedImage(payload, payload_size, payload_size));
image->_encodedWidth = width_;
image->_encodedHeight = height_;
- // Convert capture time to 90 kHz RTP timestamp.
- image->_timeStamp = static_cast<uint32_t>(90 * capture_time_ms);
- image->capture_time_ms_ = capture_time_ms;
+ image->_timeStamp = timestamp;
+ image->capture_time_ms_ = render_time_ms;
image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
image->_completeFrame = true;
@@ -1722,6 +1862,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
callback_status = callback_->Encoded(*image, &info, &header);
}
+ // Return output buffer back to the encoder.
bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_release_output_buffer_method_,
output_buffer_index);
@@ -1731,10 +1872,11 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
return false;
}
- if (callback_status > 0)
+ if (callback_status > 0) {
drop_next_input_frame_ = true;
// Theoretically could handle callback_status<0 here, but unclear what that
// would mean for us.
+ }
}
return true;
@@ -1809,6 +1951,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
explicit MediaCodecVideoDecoder(JNIEnv* jni);
virtual ~MediaCodecVideoDecoder();
+ static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
+
virtual int32_t InitDecode(const VideoCodec* codecSettings,
int32_t numberOfCores) OVERRIDE;
@@ -1834,13 +1978,29 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
int32_t InitDecodeOnCodecThread();
int32_t ReleaseOnCodecThread();
int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
+ // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+ // true on success.
+ bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
+
bool key_frame_required_;
bool inited_;
+ bool use_surface_;
VideoCodec codec_;
I420VideoFrame decoded_image_;
+ NativeHandleImpl native_handle_;
DecodedImageCallback* callback_;
- int frames_received_; // Number of frames received by decoder.
+ int frames_received_; // Number of frames received by decoder.
+ int frames_decoded_; // Number of frames decoded by decoder
+ int64_t start_time_ms_; // Start time for statistics.
+ int current_frames_; // Number of frames in the current statistics interval.
+ int current_bytes_; // Encoded bytes in the current statistics interval.
+ int current_decoding_time_ms_; // Overall decoding time in the current second
+ uint32_t max_pending_frames_; // Maximum number of pending input frames
+ std::vector<int32_t> timestamps_;
+ std::vector<int64_t> ntp_times_ms_;
+ std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
+ // decoder input.
// State that is constant for the lifetime of this object once the ctor
// returns.
@@ -1853,6 +2013,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jmethodID j_queue_input_buffer_method_;
jmethodID j_dequeue_output_buffer_method_;
jmethodID j_release_output_buffer_method_;
+ // MediaCodecVideoDecoder fields.
jfieldID j_input_buffers_field_;
jfieldID j_output_buffers_field_;
jfieldID j_color_format_field_;
@@ -1860,14 +2021,38 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jfieldID j_height_field_;
jfieldID j_stride_field_;
jfieldID j_slice_height_field_;
+ jfieldID j_surface_texture_field_;
+ jfieldID j_textureID_field_;
+ // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
+ jfieldID j_info_index_field_;
+ jfieldID j_info_offset_field_;
+ jfieldID j_info_size_field_;
+ jfieldID j_info_presentation_timestamp_us_field_;
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
+ jobject surface_texture_;
+
+ // Render EGL context.
+ static jobject render_egl_context_;
};
+jobject MediaCodecVideoDecoder::render_egl_context_ = NULL;
+
+int MediaCodecVideoDecoder::SetAndroidObjects(JNIEnv* jni,
+ jobject render_egl_context) {
+ if (render_egl_context_) {
+ jni->DeleteGlobalRef(render_egl_context_);
+ }
+ render_egl_context_ = jni->NewGlobalRef(render_egl_context);
+ ALOGD("VideoDecoder EGL context set");
+ return 0;
+}
+
MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
key_frame_required_(true),
inited_(false),
+ use_surface_(HW_DECODER_USE_SURFACE),
codec_thread_(new Thread()),
j_media_codec_video_decoder_class_(
jni,
@@ -1883,9 +2068,9 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
- j_init_decode_method_ = GetMethodID(jni,
- *j_media_codec_video_decoder_class_,
- "initDecode", "(II)Z");
+ j_init_decode_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "initDecode",
+ "(IIZLandroid/opengl/EGLContext;)Z");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
j_dequeue_input_buffer_method_ = GetMethodID(
@@ -1893,9 +2078,10 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
j_queue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
j_dequeue_output_buffer_method_ = GetMethodID(
- jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", "()I");
+ jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
j_release_output_buffer_method_ = GetMethodID(
- jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)Z");
+ jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
j_input_buffers_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_,
@@ -1913,6 +2099,22 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
jni, *j_media_codec_video_decoder_class_, "stride", "I");
j_slice_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
+ j_textureID_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "textureID", "I");
+ j_surface_texture_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
+ "Landroid/graphics/SurfaceTexture;");
+
+ jclass j_decoder_output_buffer_info_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
+ j_info_index_field_ = GetFieldID(
+ jni, j_decoder_output_buffer_info_class, "index", "I");
+ j_info_offset_field_ = GetFieldID(
+ jni, j_decoder_output_buffer_info_class, "offset", "I");
+ j_info_size_field_ = GetFieldID(
+ jni, j_decoder_output_buffer_info_class, "size", "I");
+ j_info_presentation_timestamp_us_field_ = GetFieldID(
+ jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
memset(&codec_, 0, sizeof(codec_));
@@ -1940,6 +2142,7 @@ int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
// Always start with a complete key frame.
key_frame_required_ = true;
frames_received_ = 0;
+ frames_decoded_ = 0;
// Call Java init.
return codec_thread_->Invoke<int32_t>(
@@ -1950,28 +2153,50 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
- ALOGD("InitDecodeOnCodecThread: %d x %d. FPS: %d",
+ ALOGD("InitDecodeOnCodecThread: %d x %d. fps: %d",
codec_.width, codec_.height, codec_.maxFramerate);
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
j_init_decode_method_,
codec_.width,
- codec_.height);
+ codec_.height,
+ use_surface_,
+ render_egl_context_);
CHECK_EXCEPTION(jni);
- if (!success)
+ if (!success) {
return WEBRTC_VIDEO_CODEC_ERROR;
+ }
inited_ = true;
+ max_pending_frames_ = 0;
+ if (use_surface_) {
+ max_pending_frames_ = 1;
+ }
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_decoding_time_ms_ = 0;
+ timestamps_.clear();
+ ntp_times_ms_.clear();
+ frame_rtc_times_ms_.clear();
+
jobjectArray input_buffers = (jobjectArray)GetObjectField(
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
size_t num_input_buffers = jni->GetArrayLength(input_buffers);
-
input_buffers_.resize(num_input_buffers);
for (size_t i = 0; i < num_input_buffers; ++i) {
input_buffers_[i] =
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
CHECK_EXCEPTION(jni);
}
+
+ if (use_surface_) {
+ jobject surface_texture = GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
+ surface_texture_ = jni->NewGlobalRef(surface_texture);
+ }
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -1981,15 +2206,29 @@ int32_t MediaCodecVideoDecoder::Release() {
}
int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
- if (!inited_)
+ if (!inited_) {
return WEBRTC_VIDEO_CODEC_OK;
+ }
CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
ScopedLocalRefFrame local_ref_frame(jni);
- for (size_t i = 0; i < input_buffers_.size(); ++i)
+ for (size_t i = 0; i < input_buffers_.size(); i++) {
jni->DeleteGlobalRef(input_buffers_[i]);
+ }
input_buffers_.clear();
+ if (use_surface_) {
+ // Before deleting texture object make sure it is no longer referenced
+ // by any TextureVideoFrame.
+ int32_t waitTimeoutUs = 3000000; // 3 second wait
+ while (waitTimeoutUs > 0 && native_handle_.ref_count() > 0) {
+ ALOGD("Current Texture RefCnt: %d", native_handle_.ref_count());
+ usleep(30000);
+ waitTimeoutUs -= 30000;
+ }
+ ALOGD("TextureRefCnt: %d", native_handle_.ref_count());
+ jni->DeleteGlobalRef(surface_texture_);
+ }
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
CHECK_EXCEPTION(jni);
inited_ = false;
@@ -2052,6 +2291,21 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
+ // Try to drain the decoder and wait until output is not too
+ // much behind the input.
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGV("Wait for output...");
+ if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
+ Reset();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGE("Output buffer dequeue timeout");
+ Reset();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
// Get input buffer.
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
j_dequeue_input_buffer_method_);
@@ -2075,10 +2329,17 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
Reset();
return WEBRTC_VIDEO_CODEC_ERROR;
}
- ALOGV("Decode frame # %d. Buffer # %d. Size: %d",
+ ALOGV("Decoder frame in # %d. Buffer # %d. Size: %d",
frames_received_, j_input_buffer_index, inputImage._length);
memcpy(buffer, inputImage._buffer, inputImage._length);
+ // Save input image timestamps for later output.
+ frames_received_++;
+ current_bytes_ += inputImage._length;
+ timestamps_.push_back(inputImage._timeStamp);
+ ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
+ frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+
// Feed input to decoder.
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
@@ -2093,26 +2354,57 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
return WEBRTC_VIDEO_CODEC_ERROR;
}
- // Get output index.
- int j_output_buffer_index =
- jni->CallIntMethod(*j_media_codec_video_decoder_,
- j_dequeue_output_buffer_method_);
- CHECK_EXCEPTION(jni);
- if (j_output_buffer_index < 0) {
- ALOGE("dequeueOutputBuffer error");
+ // Try to drain the decoder
+ if (!DeliverPendingOutputs(jni, 0)) {
+ ALOGE("DeliverPendingOutputs error");
Reset();
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoDecoder::DeliverPendingOutputs(
+ JNIEnv* jni, int dequeue_timeout_us) {
+ if (frames_received_ <= frames_decoded_) {
+ // No need to query for output buffers - decoder is drained.
+ return true;
+ }
+ // Get decoder output.
+ jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
+ *j_media_codec_video_decoder_,
+ j_dequeue_output_buffer_method_,
+ dequeue_timeout_us);
+
+ CHECK_EXCEPTION(jni);
+ if (IsNull(jni, j_decoder_output_buffer_info)) {
+ return true;
+ }
+
+ // Extract data from Java DecoderOutputBufferInfo.
+ int output_buffer_index =
+ GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
+ if (output_buffer_index < 0) {
+ ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
+ Reset();
+ return false;
+ }
+ int output_buffer_offset =
+ GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
+ int output_buffer_size =
+ GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
+ CHECK_EXCEPTION(jni);
+
// Extract data from Java ByteBuffer.
jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
jobject output_buffer =
- jni->GetObjectArrayElement(output_buffers, j_output_buffer_index);
- buffer_capacity = jni->GetDirectBufferCapacity(output_buffer);
+ jni->GetObjectArrayElement(output_buffers, output_buffer_index);
uint8_t* payload =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(output_buffer));
CHECK_EXCEPTION(jni);
+ payload += output_buffer_offset;
+ // Get decoded video frame properties.
int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
j_color_format_field_);
int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
@@ -2120,52 +2412,100 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
j_slice_height_field_);
- if (buffer_capacity < width * height * 3 / 2) {
- ALOGE("Insufficient output buffer capacity: %d", buffer_capacity);
+ int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_textureID_field_);
+ if (!use_surface_ && output_buffer_size < width * height * 3 / 2) {
+ ALOGE("Insufficient output buffer size: %d", output_buffer_size);
Reset();
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- ALOGV("Decoder got output buffer %d x %d. %d x %d. Color: 0x%x. Size: %d",
- width, height, stride, slice_height, color_format, buffer_capacity);
-
- if (color_format == COLOR_FormatYUV420Planar) {
- decoded_image_.CreateFrame(
- stride * slice_height, payload,
- (stride * slice_height) / 4, payload + (stride * slice_height),
- (stride * slice_height) / 4, payload + (5 * stride * slice_height / 4),
- width, height,
- stride, stride / 2, stride / 2);
- } else {
- // All other supported formats are nv12.
- decoded_image_.CreateEmptyFrame(width, height, width, width / 2, width / 2);
- libyuv::NV12ToI420(
- payload, stride,
- payload + stride * slice_height, stride,
- decoded_image_.buffer(webrtc::kYPlane),
- decoded_image_.stride(webrtc::kYPlane),
- decoded_image_.buffer(webrtc::kUPlane),
- decoded_image_.stride(webrtc::kUPlane),
- decoded_image_.buffer(webrtc::kVPlane),
- decoded_image_.stride(webrtc::kVPlane),
- width, height);
+ return false;
+ }
+
+ // Get frame timestamps from a queue.
+ int32_t timestamp = timestamps_.front();
+ timestamps_.erase(timestamps_.begin());
+ int64_t ntp_time_ms = ntp_times_ms_.front();
+ ntp_times_ms_.erase(ntp_times_ms_.begin());
+ int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
+ frame_rtc_times_ms_.front();
+ frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+
+ ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
+ " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
+ color_format, output_buffer_size, frame_decoding_time_ms);
+
+ // Create yuv420 frame.
+ if (!use_surface_) {
+ if (color_format == COLOR_FormatYUV420Planar) {
+ decoded_image_.CreateFrame(
+ stride * slice_height, payload,
+ (stride * slice_height) / 4, payload + (stride * slice_height),
+ (stride * slice_height) / 4, payload + (5 * stride * slice_height / 4),
+ width, height,
+ stride, stride / 2, stride / 2);
+ } else {
+ // All other supported formats are nv12.
+ decoded_image_.CreateEmptyFrame(width, height, width,
+ width / 2, width / 2);
+ libyuv::NV12ToI420(
+ payload, stride,
+ payload + stride * slice_height, stride,
+ decoded_image_.buffer(webrtc::kYPlane),
+ decoded_image_.stride(webrtc::kYPlane),
+ decoded_image_.buffer(webrtc::kUPlane),
+ decoded_image_.stride(webrtc::kUPlane),
+ decoded_image_.buffer(webrtc::kVPlane),
+ decoded_image_.stride(webrtc::kVPlane),
+ width, height);
+ }
}
// Return output buffer back to codec.
- success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
+ bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
j_release_output_buffer_method_,
- j_output_buffer_index);
+ output_buffer_index,
+ use_surface_);
CHECK_EXCEPTION(jni);
if (!success) {
ALOGE("releaseOutputBuffer error");
Reset();
- return WEBRTC_VIDEO_CODEC_ERROR;
+ return false;
+ }
+
+ // Calculate and print decoding statistics - every 3 seconds.
+ frames_decoded_++;
+ current_frames_++;
+ current_decoding_time_ms_ += frame_decoding_time_ms;
+ int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
+ if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
+ current_frames_ > 0) {
+ ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
+ current_bytes_ * 8 / statistic_time_ms,
+ (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
+ current_decoding_time_ms_ / current_frames_, statistic_time_ms);
+ start_time_ms_ = GetCurrentTimeMs();
+ current_frames_ = 0;
+ current_bytes_= 0;
+ current_decoding_time_ms_ = 0;
+ }
+
+ // Callback - output decoded frame.
+ int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
+ if (use_surface_) {
+ native_handle_.SetTextureObject(surface_texture_, texture_id);
+ TextureVideoFrame texture_image(
+ &native_handle_, width, height, timestamp, 0);
+ texture_image.set_ntp_time_ms(ntp_time_ms);
+ callback_status = callback_->Decoded(texture_image);
+ } else {
+ decoded_image_.set_timestamp(timestamp);
+ decoded_image_.set_ntp_time_ms(ntp_time_ms);
+ callback_status = callback_->Decoded(decoded_image_);
+ }
+ if (callback_status > 0) {
+ ALOGE("callback error");
}
- // Callback.
- decoded_image_.set_timestamp(inputImage._timeStamp);
- decoded_image_.set_ntp_time_ms(inputImage.ntp_time_ms_);
- frames_received_++;
- return callback_->Decoded(decoded_image_);
+ return true;
}
int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
@@ -2183,6 +2523,19 @@ int32_t MediaCodecVideoDecoder::Reset() {
}
void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ if (!inited_) {
+ return;
+ }
+ // We only ever send one message to |this| directly (not through a Bind()'d
+ // functor), so expect no ID/data.
+ CHECK(!msg->message_id) << "Unexpected message!";
+ CHECK(!msg->pdata) << "Unexpected message!";
+ CheckOnCodecThread();
+
+ DeliverPendingOutputs(jni, 0);
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
}
class MediaCodecVideoDecoderFactory
@@ -2226,7 +2579,7 @@ void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
delete decoder;
}
-#endif // ANDROID
+#endif // #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
} // anonymous namespace
@@ -2403,13 +2756,20 @@ JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
JNIEnv* jni, jclass, jobject context,
- jboolean initialize_audio, jboolean initialize_video) {
+ jboolean initialize_audio, jboolean initialize_video,
+ jobject render_egl_context) {
CHECK(g_jvm) << "JNI_OnLoad failed to run?";
bool failure = false;
+ if (!factory_static_initialized) {
+ if (initialize_video)
+ failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm, context);
+ if (initialize_audio)
+ failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
+ factory_static_initialized = true;
+ }
if (initialize_video)
- failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm, context);
- if (initialize_audio)
- failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
+ failure |= MediaCodecVideoDecoder::SetAndroidObjects(jni,
+ render_egl_context);
return !failure;
}
#endif // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
@@ -2456,7 +2816,7 @@ JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
<< "Failed to start threads";
scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
-#ifdef ANDROID
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
encoder_factory.reset(new MediaCodecVideoEncoderFactory());
decoder_factory.reset(new MediaCodecVideoDecoderFactory());
#endif
diff --git a/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
index a6a059e..fd78d27 100644
--- a/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -27,14 +27,24 @@
package org.webrtc;
+import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
-import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLSurface;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
+import android.view.Surface;
+
import java.nio.ByteBuffer;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
@@ -49,7 +59,7 @@ class MediaCodecVideoDecoder {
private static final String TAG = "MediaCodecVideoDecoder";
- private static final int DEQUEUE_TIMEOUT = 1000000; // 1 sec timeout.
+ private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers;
@@ -74,12 +84,21 @@ class MediaCodecVideoDecoder {
private int height;
private int stride;
private int sliceHeight;
+ private boolean useSurface;
+ private int textureID = -1;
+ private SurfaceTexture surfaceTexture = null;
+ private Surface surface = null;
+ private float[] stMatrix = new float[16];
+ private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
private MediaCodecVideoDecoder() { }
// Helper struct for findVp8HwDecoder() below.
private static class DecoderProperties {
- DecoderProperties(String codecName, int colorFormat) {
+ public DecoderProperties(String codecName, int colorFormat) {
this.codecName = codecName;
this.colorFormat = colorFormat;
}
@@ -107,26 +126,32 @@ class MediaCodecVideoDecoder {
continue; // No VP8 support in this codec; try the next one.
}
Log.d(TAG, "Found candidate decoder " + name);
+
+ // Check if this is supported HW decoder.
+ boolean supportedCodec = false;
+ for (String hwCodecPrefix : supportedHwCodecPrefixes) {
+ if (name.startsWith(hwCodecPrefix)) {
+ supportedCodec = true;
+ break;
+ }
+ }
+ if (!supportedCodec) {
+ continue;
+ }
+
+ // Check if codec supports either yuv420 or nv12.
CodecCapabilities capabilities =
info.getCapabilitiesForType(VP8_MIME_TYPE);
for (int colorFormat : capabilities.colorFormats) {
Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
-
- // Check if this is supported HW decoder
- for (String hwCodecPrefix : supportedHwCodecPrefixes) {
- if (!name.startsWith(hwCodecPrefix)) {
- continue;
- }
- // Check if codec supports either yuv420 or nv12
- for (int supportedColorFormat : supportedColorList) {
- for (int codecColorFormat : capabilities.colorFormats) {
- if (codecColorFormat == supportedColorFormat) {
- // Found supported HW VP8 decoder
- Log.d(TAG, "Found target decoder " + name +
- ". Color: 0x" + Integer.toHexString(codecColorFormat));
- return new DecoderProperties(name, codecColorFormat);
- }
+ for (int supportedColorFormat : supportedColorList) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ // Found supported HW VP8 decoder.
+ Log.d(TAG, "Found target decoder " + name +
+ ". Color: 0x" + Integer.toHexString(codecColorFormat));
+ return new DecoderProperties(name, codecColorFormat);
}
}
}
@@ -146,31 +171,166 @@ class MediaCodecVideoDecoder {
}
}
- private boolean initDecode(int width, int height) {
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ Log.e(TAG, msg + ": EGL Error: 0x" + Integer.toHexString(error));
+ throw new RuntimeException(
+ msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+
+ private void checkGlError(String msg) {
+ int error;
+ if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(TAG, msg + ": GL Error: 0x" + Integer.toHexString(error));
+ throw new RuntimeException(
+ msg + ": GL Error: 0x " + Integer.toHexString(error));
+ }
+ }
+
+ private void eglSetup(EGLContext sharedContext, int width, int height) {
+ Log.d(TAG, "EGL setup");
+ if (sharedContext == null) {
+ sharedContext = EGL14.EGL_NO_CONTEXT;
+ }
+ eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new RuntimeException("Unable to initialize EGL14");
+ }
+
+ // Configure EGL for pbuffer and OpenGL ES 2.0.
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
+ EGL14.EGL_NONE
+ };
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0,
+ configs.length, numConfigs, 0)) {
+ throw new RuntimeException("Unable to find RGB888 EGL config");
+ }
+
+ // Configure context for OpenGL ES 2.0.
+ int[] attrib_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+ eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], sharedContext,
+ attrib_list, 0);
+ checkEglError("eglCreateContext");
+ if (eglContext == null) {
+ throw new RuntimeException("Null EGL context");
+ }
+
+ // Create a pbuffer surface.
+ int[] surfaceAttribs = {
+ EGL14.EGL_WIDTH, width,
+ EGL14.EGL_HEIGHT, height,
+ EGL14.EGL_NONE
+ };
+ eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, configs[0],
+ surfaceAttribs, 0);
+ checkEglError("eglCreatePbufferSurface");
+ if (eglSurface == null) {
+ throw new RuntimeException("EGL surface was null");
+ }
+ }
+
+ private void eglRelease() {
+ Log.d(TAG, "EGL release");
+ if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ }
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+
+
+ private void makeCurrent() {
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ private boolean initDecode(int width, int height, boolean useSurface,
+ EGLContext sharedContext) {
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
+ if (useSurface && sharedContext == null) {
+ throw new RuntimeException("No shared EGL context.");
+ }
DecoderProperties properties = findVp8HwDecoder();
if (properties == null) {
throw new RuntimeException("Cannot find HW VP8 decoder");
}
Log.d(TAG, "Java initDecode: " + width + " x " + height +
- ". Color: 0x" + Integer.toHexString(properties.colorFormat));
+ ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
+ ". Use Surface: " + useSurface );
+ if (sharedContext != null) {
+ Log.d(TAG, "Decoder shared EGL Context: " + sharedContext);
+ }
mediaCodecThread = Thread.currentThread();
try {
+ Surface decodeSurface = null;
this.width = width;
this.height = height;
+ this.useSurface = useSurface;
stride = width;
sliceHeight = height;
+
+ if (useSurface) {
+ // Create shared EGL context.
+ eglSetup(sharedContext, width, height);
+ makeCurrent();
+
+ // Create output surface
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ checkGlError("glGenTextures");
+ textureID = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
+ checkGlError("glBindTexture mTextureID");
+
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+ GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+ GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+ GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+ GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+ Log.d(TAG, "Video decoder TextureID = " + textureID);
+ surfaceTexture = new SurfaceTexture(textureID);
+ surface = new Surface(surfaceTexture);
+ decodeSurface = surface;
+ }
+
MediaFormat format =
MediaFormat.createVideoFormat(VP8_MIME_TYPE, width, height);
- format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+ if (!useSurface) {
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+ }
Log.d(TAG, " Format: " + format);
mediaCodec = MediaCodec.createByCodecName(properties.codecName);
if (mediaCodec == null) {
return false;
}
- mediaCodec.configure(format, null, null, 0);
+ mediaCodec.configure(format, decodeSurface, null, 0);
mediaCodec.start();
colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
@@ -195,6 +355,19 @@ class MediaCodecVideoDecoder {
}
mediaCodec = null;
mediaCodecThread = null;
+ if (useSurface) {
+ surface.release();
+ surface = null;
+ surfaceTexture = null;
+ if (textureID >= 0) {
+ int[] textures = new int[1];
+ textures[0] = textureID;
+ Log.d(TAG, "Delete video decoder TextureID " + textureID);
+ GLES20.glDeleteTextures(1, textures, 0);
+ checkGlError("glDeleteTextures");
+ }
+ eglRelease();
+ }
}
// Dequeue an input buffer and return its index, -1 if no input buffer is
@@ -202,7 +375,7 @@ class MediaCodecVideoDecoder {
private int dequeueInputBuffer() {
checkOnMediaCodecThread();
try {
- return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
+ return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
} catch (IllegalStateException e) {
Log.e(TAG, "dequeueIntputBuffer failed", e);
return -2;
@@ -224,23 +397,40 @@ class MediaCodecVideoDecoder {
}
}
+ // Helper struct for dequeueOutputBuffer() below.
+ private static class DecoderOutputBufferInfo {
+ public DecoderOutputBufferInfo(
+ int index, int offset, int size, long presentationTimestampUs) {
+ this.index = index;
+ this.offset = offset;
+ this.size = size;
+ this.presentationTimestampUs = presentationTimestampUs;
+ }
+
+ private final int index;
+ private final int offset;
+ private final int size;
+ private final long presentationTimestampUs;
+ }
+
// Dequeue and return an output buffer index, -1 if no output
// buffer available or -2 if error happened.
- private int dequeueOutputBuffer() {
+ private DecoderOutputBufferInfo dequeueOutputBuffer(int dequeueTimeoutUs) {
checkOnMediaCodecThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
- int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+ int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
+ Log.d(TAG, "Output buffers changed: " + outputBuffers.length);
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = mediaCodec.getOutputFormat();
Log.d(TAG, "Format changed: " + format.toString());
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
- if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+ if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Log.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
// Check if new color space is supported.
@@ -253,7 +443,7 @@ class MediaCodecVideoDecoder {
}
if (!validColorFormat) {
Log.e(TAG, "Non supported color format");
- return -2;
+ return new DecoderOutputBufferInfo(-1, 0, 0, -1);
}
}
if (format.containsKey("stride")) {
@@ -267,21 +457,28 @@ class MediaCodecVideoDecoder {
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
}
- result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+ result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
}
- return result;
+ if (result >= 0) {
+ return new DecoderOutputBufferInfo(result, info.offset, info.size,
+ info.presentationTimeUs);
+ }
+ return null;
} catch (IllegalStateException e) {
Log.e(TAG, "dequeueOutputBuffer failed", e);
- return -2;
+ return new DecoderOutputBufferInfo(-1, 0, 0, -1);
}
}
// Release a dequeued output buffer back to the codec for re-use. Return
// false if the codec is no longer operable.
- private boolean releaseOutputBuffer(int index) {
+ private boolean releaseOutputBuffer(int index, boolean render) {
checkOnMediaCodecThread();
try {
- mediaCodec.releaseOutputBuffer(index, false);
+ if (!useSurface) {
+ render = false;
+ }
+ mediaCodec.releaseOutputBuffer(index, render);
return true;
} catch (IllegalStateException e) {
Log.e(TAG, "releaseOutputBuffer failed", e);
diff --git a/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
index 45b8d6a..659422d 100644
--- a/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -78,7 +78,7 @@ class MediaCodecVideoEncoder {
// Helper struct for findVp8HwEncoder() below.
private static class EncoderProperties {
- EncoderProperties(String codecName, int colorFormat) {
+ public EncoderProperties(String codecName, int colorFormat) {
this.codecName = codecName;
this.colorFormat = colorFormat;
}
@@ -106,26 +106,33 @@ class MediaCodecVideoEncoder {
continue; // No VP8 support in this codec; try the next one.
}
Log.d(TAG, "Found candidate encoder " + name);
+
+ // Check if this is supported HW encoder.
+ boolean supportedCodec = false;
+ for (String hwCodecPrefix : supportedHwCodecPrefixes) {
+ if (name.startsWith(hwCodecPrefix)) {
+ supportedCodec = true;
+ break;
+ }
+ }
+ if (!supportedCodec) {
+ continue;
+ }
+
CodecCapabilities capabilities =
info.getCapabilitiesForType(VP8_MIME_TYPE);
for (int colorFormat : capabilities.colorFormats) {
Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
- // Check if this is supported HW encoder
- for (String hwCodecPrefix : supportedHwCodecPrefixes) {
- if (!name.startsWith(hwCodecPrefix)) {
- continue;
- }
- // Check if codec supports either yuv420 or nv12
- for (int supportedColorFormat : supportedColorList) {
- for (int codecColorFormat : capabilities.colorFormats) {
- if (codecColorFormat == supportedColorFormat) {
- // Found supported HW VP8 encoder
- Log.d(TAG, "Found target encoder " + name +
- ". Color: 0x" + Integer.toHexString(codecColorFormat));
- return new EncoderProperties(name, codecColorFormat);
- }
+ // Check if codec supports either yuv420 or nv12.
+ for (int supportedColorFormat : supportedColorList) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ // Found supported HW VP8 encoder.
+ Log.d(TAG, "Found target encoder " + name +
+ ". Color: 0x" + Integer.toHexString(codecColorFormat));
+ return new EncoderProperties(name, codecColorFormat);
}
}
}
diff --git a/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java b/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
index 441f37b..f9f96e7 100644
--- a/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
+++ b/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
@@ -46,8 +46,12 @@ public class PeerConnectionFactory {
// Callers may specify either |initializeAudio| or |initializeVideo| as false
// to skip initializing the respective engine (and avoid the need for the
// respective permissions).
+ // |renderEGLContext| can be provided to suport HW video decoding to
+ // texture and will be used to create a shared EGL context on video
+ // decoding thread.
public static native boolean initializeAndroidGlobals(
- Object context, boolean initializeAudio, boolean initializeVideo);
+ Object context, boolean initializeAudio, boolean initializeVideo,
+ Object renderEGLContext);
public PeerConnectionFactory() {
nativeFactory = nativeCreatePeerConnectionFactory();
diff --git a/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/app/webrtc/java/src/org/webrtc/VideoRenderer.java
index 4cc341a..27ad80e 100644
--- a/app/webrtc/java/src/org/webrtc/VideoRenderer.java
+++ b/app/webrtc/java/src/org/webrtc/VideoRenderer.java
@@ -44,6 +44,9 @@ public class VideoRenderer {
public final int height;
public final int[] yuvStrides;
public final ByteBuffer[] yuvPlanes;
+ public final boolean yuvFrame;
+ public Object textureObject;
+ public int textureId;
/**
* Construct a frame of the given dimensions with the specified planar
@@ -62,25 +65,72 @@ public class VideoRenderer {
yuvPlanes[2] = ByteBuffer.allocateDirect(yuvStrides[2] * height);
}
this.yuvPlanes = yuvPlanes;
+ this.yuvFrame = true;
+ }
+
+ /**
+ * Construct a texture frame of the given dimensions with data in SurfaceTexture
+ */
+ public I420Frame(
+ int width, int height, Object textureObject, int textureId) {
+ this.width = width;
+ this.height = height;
+ this.yuvStrides = null;
+ this.yuvPlanes = null;
+ this.textureObject = textureObject;
+ this.textureId = textureId;
+ this.yuvFrame = false;
}
/**
* Copy the planes out of |source| into |this| and return |this|. Calling
- * this with mismatched frame dimensions is a programming error and will
- * likely crash.
+ * this with mismatched frame dimensions or frame type is a programming
+ * error and will likely crash.
*/
public I420Frame copyFrom(I420Frame source) {
- if (!Arrays.equals(yuvStrides, source.yuvStrides) ||
- width != source.width || height != source.height) {
- throw new RuntimeException("Mismatched dimensions! Source: " +
+ if (source.yuvFrame && yuvFrame) {
+ if (!Arrays.equals(yuvStrides, source.yuvStrides) ||
+ width != source.width || height != source.height) {
+ throw new RuntimeException("Mismatched dimensions! Source: " +
+ source.toString() + ", destination: " + toString());
+ }
+ copyPlane(source.yuvPlanes[0], yuvPlanes[0]);
+ copyPlane(source.yuvPlanes[1], yuvPlanes[1]);
+ copyPlane(source.yuvPlanes[2], yuvPlanes[2]);
+ return this;
+ } else if (!source.yuvFrame && !yuvFrame) {
+ textureObject = source.textureObject;
+ textureId = source.textureId;
+ return this;
+ } else {
+ throw new RuntimeException("Mismatched frame types! Source: " +
source.toString() + ", destination: " + toString());
}
- copyPlane(source.yuvPlanes[0], yuvPlanes[0]);
- copyPlane(source.yuvPlanes[1], yuvPlanes[1]);
- copyPlane(source.yuvPlanes[2], yuvPlanes[2]);
- return this;
}
+ public I420Frame copyFrom(byte[] yuvData) {
+ if (yuvData.length < width * height * 3 / 2) {
+ throw new RuntimeException("Wrong arrays size: " + yuvData.length);
+ }
+ if (!yuvFrame) {
+ throw new RuntimeException("Can not feed yuv data to texture frame");
+ }
+ int planeSize = width * height;
+ ByteBuffer[] planes = new ByteBuffer[3];
+ planes[0] = ByteBuffer.wrap(yuvData, 0, planeSize);
+ planes[1] = ByteBuffer.wrap(yuvData, planeSize, planeSize / 4);
+ planes[2] = ByteBuffer.wrap(yuvData, planeSize + planeSize / 4,
+ planeSize / 4);
+ for (int i = 0; i < 3; i++) {
+ yuvPlanes[i].position(0);
+ yuvPlanes[i].put(planes[i]);
+ yuvPlanes[i].position(0);
+ yuvPlanes[i].limit(yuvPlanes[i].capacity());
+ }
+ return this;
+ }
+
+
@Override
public String toString() {
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
diff --git a/app/webrtc/peerconnection_unittest.cc b/app/webrtc/peerconnection_unittest.cc
index 402a796..0d3e426 100644
--- a/app/webrtc/peerconnection_unittest.cc
+++ b/app/webrtc/peerconnection_unittest.cc
@@ -1512,9 +1512,8 @@ TEST_F(JsepPeerConnectionP2PTestClient, ReceivedBweStatsCombined) {
LocalP2PTest();
// Run until a non-zero bw is reported.
- EXPECT_TRUE_WAIT(
- receiving_client()->GetAvailableReceivedBandwidthStats() > 40000,
- kMaxWaitForRembMs);
+ EXPECT_TRUE_WAIT(receiving_client()->GetAvailableReceivedBandwidthStats() > 0,
+ kMaxWaitForRembMs);
// Halt video capturers, then run until we have gotten some audio. Following
// REMB should be non-zero.
@@ -1539,9 +1538,8 @@ TEST_F(JsepPeerConnectionP2PTestClient, ReceivedBweStatsNotCombined) {
LocalP2PTest();
// Run until a non-zero bw is reported.
- EXPECT_TRUE_WAIT(
- receiving_client()->GetAvailableReceivedBandwidthStats() > 40000,
- kMaxWaitForRembMs);
+ EXPECT_TRUE_WAIT(receiving_client()->GetAvailableReceivedBandwidthStats() > 0,
+ kMaxWaitForRembMs);
// Halt video capturers, then run until we have gotten some audio. Following
// REMB should be zero.
diff --git a/app/webrtc/statstypes.h b/app/webrtc/statstypes.h
index 8eae1ad..656b83c 100644
--- a/app/webrtc/statstypes.h
+++ b/app/webrtc/statstypes.h
@@ -36,6 +36,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
#include "webrtc/base/stringencode.h"
namespace webrtc {
diff --git a/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java b/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java
index 213da7b..468ce22 100644
--- a/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java
+++ b/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java
@@ -77,7 +77,6 @@ import java.util.regex.Pattern;
public class AppRTCDemoActivity extends Activity
implements AppRTCClient.IceServersObserver {
private static final String TAG = "AppRTCDemoActivity";
- private static boolean factoryStaticInitialized;
private PeerConnectionFactory factory;
private VideoSource videoSource;
private boolean videoSourceStopped;
@@ -133,13 +132,6 @@ public class AppRTCDemoActivity extends Activity
hudView.setVisibility(View.INVISIBLE);
addContentView(hudView, hudLayout);
- if (!factoryStaticInitialized) {
- abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
- this, true, true),
- "Failed to initializeAndroidGlobals");
- factoryStaticInitialized = true;
- }
-
AudioManager audioManager =
((AudioManager) getSystemService(AUDIO_SERVICE));
// TODO(fischman): figure out how to do this Right(tm) and remove the
@@ -282,6 +274,9 @@ public class AppRTCDemoActivity extends Activity
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
+ abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
+ this, true, true, VideoRendererGui.getEGLContext()),
+ "Failed to initializeAndroidGlobals");
factory = new PeerConnectionFactory();
MediaConstraints pcConstraints = appRtcClient.pcConstraints();