aboutsummaryrefslogtreecommitdiff
path: root/talk/app/webrtc/java/src
diff options
context:
space:
mode:
Diffstat (limited to 'talk/app/webrtc/java/src')
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java368
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java221
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/PeerConnection.java13
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java61
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/RtpSender.java25
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java23
6 files changed, 582 insertions, 129 deletions
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
index 42af9c7fd0..19002f70e1 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -33,23 +33,23 @@ import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
import android.os.Build;
+import android.os.SystemClock;
import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
import java.util.Arrays;
+import java.util.LinkedList;
import java.util.List;
-
-import javax.microedition.khronos.egl.EGLContext;
+import java.util.concurrent.CountDownLatch;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
// This class is an implementation detail of the Java PeerConnection API.
-// MediaCodec is thread-hostile so this class must be operated on a single
-// thread.
+@SuppressWarnings("deprecation")
public class MediaCodecVideoDecoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
@@ -66,18 +66,26 @@ public class MediaCodecVideoDecoder {
}
private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
// Active running decoder instance. Set in initDecode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoDecoder runningInstance = null;
+ private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 decoders.
private static final String[] supportedVp8HwCodecPrefixes =
{"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
+ // List of supported HW VP9 decoders.
+ private static final String[] supportedVp9HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Exynos." };
// List of supported HW H.264 decoders.
private static final String[] supportedH264HwCodecPrefixes =
{"OMX.qcom.", "OMX.Intel." };
@@ -96,13 +104,29 @@ public class MediaCodecVideoDecoder {
private int height;
private int stride;
private int sliceHeight;
+ private boolean hasDecodedFirstFrame;
+ private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
private boolean useSurface;
- private int textureID = 0;
- private SurfaceTexture surfaceTexture = null;
+
+ // The below variables are only used when decoding to a Surface.
+ private TextureListener textureListener;
+ // Max number of output buffers queued before starting to drop decoded frames.
+ private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
+ private int droppedFrames;
private Surface surface = null;
- private EglBase eglBase;
+ private final Queue<DecodedOutputBuffer>
+ dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
+
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoDecoderErrorCallback {
+ void onMediaCodecVideoDecoderCriticalError(int codecErrors);
+ }
- private MediaCodecVideoDecoder() {
+ public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoDecoder.errorCallback = errorCallback;
}
// Helper struct for findVp8Decoder() below.
@@ -120,6 +144,7 @@ public class MediaCodecVideoDecoder {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null; // MediaCodec.setParameters is missing.
}
+ Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
if (info.isEncoder()) {
@@ -135,7 +160,7 @@ public class MediaCodecVideoDecoder {
if (name == null) {
continue; // No HW support in this codec; try the next one.
}
- Logging.v(TAG, "Found candidate decoder " + name);
+ Logging.d(TAG, "Found candidate decoder " + name);
// Check if this is supported decoder.
boolean supportedCodec = false;
@@ -166,6 +191,7 @@ public class MediaCodecVideoDecoder {
}
}
}
+ Logging.d(TAG, "No HW decoder found for mime " + mime);
return null; // No HW decoder.
}
@@ -173,6 +199,10 @@ public class MediaCodecVideoDecoder {
return findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
}
+ public static boolean isVp9HwSupported() {
+ return findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null;
+ }
+
public static boolean isH264HwSupported() {
return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
}
@@ -197,17 +227,21 @@ public class MediaCodecVideoDecoder {
}
}
- // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
- private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
+ // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
+ private boolean initDecode(
+ VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
- useSurface = (sharedContext != null);
+ useSurface = (surfaceTextureHelper != null);
String mime = null;
String[] supportedCodecPrefixes = null;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
supportedCodecPrefixes = supportedH264HwCodecPrefixes;
@@ -221,9 +255,6 @@ public class MediaCodecVideoDecoder {
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface);
- if (sharedContext != null) {
- Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
- }
runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread();
try {
@@ -233,16 +264,8 @@ public class MediaCodecVideoDecoder {
sliceHeight = height;
if (useSurface) {
- // Create shared EGL context.
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
- eglBase.createDummyPbufferSurface();
- eglBase.makeCurrent();
-
- // Create output surface
- textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
- Logging.d(TAG, "Video decoder TextureID = " + textureID);
- surfaceTexture = new SurfaceTexture(textureID);
- surface = new Surface(surfaceTexture);
+ textureListener = new TextureListener(surfaceTextureHelper);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
}
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -261,6 +284,10 @@ public class MediaCodecVideoDecoder {
colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
inputBuffers = mediaCodec.getInputBuffers();
+ decodeStartTimeMs.clear();
+ hasDecodedFirstFrame = false;
+ dequeuedSurfaceOutputBuffers.clear();
+ droppedFrames = 0;
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
". Output buffers: " + outputBuffers.length);
return true;
@@ -271,25 +298,45 @@ public class MediaCodecVideoDecoder {
}
private void release() {
- Logging.d(TAG, "Java releaseDecoder");
+ Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
checkOnMediaCodecThread();
- try {
- mediaCodec.stop();
- mediaCodec.release();
- } catch (IllegalStateException e) {
- Logging.e(TAG, "release failed", e);
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseDecoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseDecoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media decoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
+ }
}
+
mediaCodec = null;
mediaCodecThread = null;
runningInstance = null;
if (useSurface) {
surface.release();
surface = null;
- Logging.d(TAG, "Delete video decoder TextureID " + textureID);
- GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
- textureID = 0;
- eglBase.release();
- eglBase = null;
+ textureListener.release();
}
Logging.d(TAG, "Java releaseDecoder done");
}
@@ -306,13 +353,15 @@ public class MediaCodecVideoDecoder {
}
}
- private boolean queueInputBuffer(
- int inputBufferIndex, int size, long timestampUs) {
+ private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
+ long timeStampMs, long ntpTimeStamp) {
checkOnMediaCodecThread();
try {
inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size);
- mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
+ decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
+ ntpTimeStamp));
+ mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
return true;
}
catch (IllegalStateException e) {
@@ -321,56 +370,183 @@ public class MediaCodecVideoDecoder {
}
}
- // Helper structs for dequeueOutputBuffer() below.
- private static class DecodedByteBuffer {
- public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) {
+ private static class TimeStamps {
+ public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ }
+ private final long decodeStartTimeMs; // Time when this frame was queued for decoding.
+ private final long timeStampMs; // Only used for bookkeeping in Java. Used in C++;
+ private final long ntpTimeStampMs; // Only used for bookkeeping in Java. Used in C++;
+ }
+
+ // Helper struct for dequeueOutputBuffer() below.
+ private static class DecodedOutputBuffer {
+ public DecodedOutputBuffer(int index, int offset, int size, long timeStampMs,
+ long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
this.index = index;
this.offset = offset;
this.size = size;
- this.presentationTimestampUs = presentationTimestampUs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTime;
+ this.endDecodeTimeMs = endDecodeTime;
}
private final int index;
private final int offset;
private final int size;
- private final long presentationTimestampUs;
+ private final long timeStampMs;
+ private final long ntpTimeStampMs;
+ // Number of ms it took to decode this frame.
+ private final long decodeTimeMs;
+ // System time when this frame finished decoding.
+ private final long endDecodeTimeMs;
}
+ // Helper struct for dequeueTextureBuffer() below.
private static class DecodedTextureBuffer {
private final int textureID;
- private final long presentationTimestampUs;
+ private final float[] transformMatrix;
+ private final long timeStampMs;
+ private final long ntpTimeStampMs;
+ private final long decodeTimeMs;
+ // Interval from when the frame finished decoding until this buffer has been created.
+ // Since there is only one texture, this interval depend on the time from when
+ // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
+ // so that the texture can be updated with the next decoded frame.
+ private final long frameDelayMs;
- public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
+ // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
+ // that was dropped.
+ public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timeStampMs,
+ long ntpTimeStampMs, long decodeTimeMs, long frameDelay) {
this.textureID = textureID;
- this.presentationTimestampUs = presentationTimestampUs;
+ this.transformMatrix = transformMatrix;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTimeMs;
+ this.frameDelayMs = frameDelay;
}
}
- // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or
- // DecodedTexturebuffer depending on |useSurface| configuration.
+ // Poll based texture listener.
+ private static class TextureListener
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
+ private final Object newFrameLock = new Object();
+ // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
+ // onTextureFrameAvailable().
+ private DecodedOutputBuffer bufferToRender;
+ private DecodedTextureBuffer renderedBuffer;
+
+ public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ surfaceTextureHelper.setListener(this);
+ }
+
+ public void addBufferToRender(DecodedOutputBuffer buffer) {
+ if (bufferToRender != null) {
+ Logging.e(TAG,
+ "Unexpected addBufferToRender() called while waiting for a texture.");
+ throw new IllegalStateException("Waiting for a texture.");
+ }
+ bufferToRender = buffer;
+ }
+
+ public boolean isWaitingForTexture() {
+ synchronized (newFrameLock) {
+ return bufferToRender != null;
+ }
+ }
+
+ // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ Logging.e(TAG,
+ "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+ throw new IllegalStateException("Already holding a texture.");
+ }
+ // |timestampNs| is always zero on some Android versions.
+ renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
+ bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
+ SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
+ bufferToRender = null;
+ newFrameLock.notifyAll();
+ }
+ }
+
+ // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
+ public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
+ try {
+ newFrameLock.wait(timeoutMs);
+ } catch(InterruptedException e) {
+ // Restore the interrupted status by reinterrupting the thread.
+ Thread.currentThread().interrupt();
+ }
+ }
+ DecodedTextureBuffer returnedBuffer = renderedBuffer;
+ renderedBuffer = null;
+ return returnedBuffer;
+ }
+ }
+
+ public void release() {
+ // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
+ // progress is done. Therefore, the call to disconnect() must be outside any synchronized
+ // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+ surfaceTextureHelper.disconnect();
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ surfaceTextureHelper.returnTextureFrame();
+ renderedBuffer = null;
+ }
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error.
- private Object dequeueOutputBuffer(int dequeueTimeoutUs)
- throws IllegalStateException, MediaCodec.CodecException {
+ private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
+ if (decodeStartTimeMs.isEmpty()) {
+ return null;
+ }
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
- final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
+ final int result = mediaCodec.dequeueOutputBuffer(
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
- case MediaCodec.INFO_TRY_AGAIN_LATER:
- return null;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+ if (hasDecodedFirstFrame) {
+ throw new RuntimeException("Unexpected output buffer change event.");
+ }
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString());
+ int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
+ int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
+ throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
+ height + ". New " + new_width + "*" + new_height);
+ }
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
@@ -388,18 +564,76 @@ public class MediaCodecVideoDecoder {
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
break;
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return null;
default:
- // Output buffer decoded.
- if (useSurface) {
- mediaCodec.releaseOutputBuffer(result, true /* render */);
- // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
- // frame.
- return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
- } else {
- return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
- }
+ hasDecodedFirstFrame = true;
+ TimeStamps timeStamps = decodeStartTimeMs.remove();
+ return new DecodedOutputBuffer(result, info.offset, info.size, timeStamps.timeStampMs,
+ timeStamps.ntpTimeStampMs,
+ SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs,
+ SystemClock.elapsedRealtime());
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
+ // a frame can't be returned.
+ private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
+ checkOnMediaCodecThread();
+ if (!useSurface) {
+ throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
+ }
+ DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
+ if (outputBuffer != null) {
+ dequeuedSurfaceOutputBuffers.add(outputBuffer);
+ }
+
+ MaybeRenderDecodedTextureBuffer();
+ // Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
+ DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
+ if (renderedBuffer != null) {
+ MaybeRenderDecodedTextureBuffer();
+ return renderedBuffer;
+ }
+
+ if ((dequeuedSurfaceOutputBuffers.size()
+ >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
+ || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
+ ++droppedFrames;
+ // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
+ // The oldest frame is owned by |textureListener| and can't be dropped since
+ // mediaCodec.releaseOutputBuffer has already been called.
+ final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
+ if (dequeueTimeoutMs > 0) {
+ // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
+ // return the one and only texture even if it does not render.
+ // Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ // + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames);
+ } else {
+ Logging.w(TAG, "Too many output buffers. Dropping frame with TS: "
+ + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames);
}
+
+ mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
+ return new DecodedTextureBuffer(0, null, droppedFrame.timeStampMs,
+ droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
+ SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
+ }
+ return null;
+ }
+
+ private void MaybeRenderDecodedTextureBuffer() {
+ if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
+ return;
}
+ // Get the first frame in the queue and render to the decoder output surface.
+ final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
+ textureListener.addBufferToRender(buffer);
+ mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
}
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
@@ -407,11 +641,11 @@ public class MediaCodecVideoDecoder {
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
// MediaCodec.CodecException upon codec error.
- private void returnDecodedByteBuffer(int index)
+ private void returnDecodedOutputBuffer(int index)
throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread();
if (useSurface) {
- throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding.");
+ throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
}
mediaCodec.releaseOutputBuffer(index, false /* render */);
}
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
index f3f03c1d20..5c8f9dc77e 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -27,24 +27,29 @@
package org.webrtc;
+import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
+import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
+import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
// This class is an implementation detail of the Java PeerConnection API.
-// MediaCodec is thread-hostile so this class must be operated on a single
-// thread.
+@TargetApi(19)
+@SuppressWarnings("deprecation")
public class MediaCodecVideoEncoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
@@ -60,18 +65,31 @@ public class MediaCodecVideoEncoder {
VIDEO_CODEC_H264
}
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
- // Active running encoder instance. Set in initDecode() (called from native code)
+ // Active running encoder instance. Set in initEncode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoEncoder runningInstance = null;
+ private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] outputBuffers;
+ private EglBase14 eglBase;
+ private int width;
+ private int height;
+ private Surface inputSurface;
+ private GlRectDrawer drawer;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 codecs.
private static final String[] supportedVp8HwCodecPrefixes =
{"OMX.qcom.", "OMX.Intel." };
+ // List of supported HW VP9 decoders.
+ private static final String[] supportedVp9HwCodecPrefixes =
+ {"OMX.qcom."};
// List of supported HW H.264 codecs.
private static final String[] supportedH264HwCodecPrefixes =
{"OMX.qcom." };
@@ -99,13 +117,25 @@ public class MediaCodecVideoEncoder {
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
};
- private int colorFormat;
- // Video encoder type.
+ private static final int[] supportedSurfaceColorList = {
+ CodecCapabilities.COLOR_FormatSurface
+ };
private VideoCodecType type;
+ private int colorFormat; // Used by native code.
+
// SPS and PPS NALs (Config frame) for H.264.
private ByteBuffer configData = null;
- private MediaCodecVideoEncoder() {
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoEncoderErrorCallback {
+ void onMediaCodecVideoEncoderCriticalError(int codecErrors);
+ }
+
+ public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoEncoder.errorCallback = errorCallback;
}
// Helper struct for findHwEncoder() below.
@@ -119,7 +149,7 @@ public class MediaCodecVideoEncoder {
}
private static EncoderProperties findHwEncoder(
- String mime, String[] supportedHwCodecPrefixes) {
+ String mime, String[] supportedHwCodecPrefixes, int[] colorList) {
// MediaCodec.setParameters is missing for JB and below, so bitrate
// can not be adjusted dynamically.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
@@ -130,8 +160,7 @@ public class MediaCodecVideoEncoder {
if (mime.equals(H264_MIME_TYPE)) {
List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
if (exceptionModels.contains(Build.MODEL)) {
- Logging.w(TAG, "Model: " + Build.MODEL +
- " has black listed H.264 encoder.");
+ Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
return null;
}
}
@@ -170,8 +199,7 @@ public class MediaCodecVideoEncoder {
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
- // Check if codec supports either yuv420 or nv12.
- for (int supportedColorFormat : supportedColorList) {
+ for (int supportedColorFormat : colorList) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW encoder.
@@ -182,15 +210,34 @@ public class MediaCodecVideoEncoder {
}
}
}
- return null; // No HW VP8 encoder.
+ return null; // No HW encoder.
}
public static boolean isVp8HwSupported() {
- return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
+ return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedColorList) != null;
+ }
+
+ public static boolean isVp9HwSupported() {
+ return findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedColorList) != null;
}
public static boolean isH264HwSupported() {
- return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
+ return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedColorList) != null;
+ }
+
+ public static boolean isVp8HwSupportedUsingTextures() {
+ return findHwEncoder(
+ VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedSurfaceColorList) != null;
+ }
+
+ public static boolean isVp9HwSupportedUsingTextures() {
+ return findHwEncoder(
+ VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedSurfaceColorList) != null;
+ }
+
+ public static boolean isH264HwSupportedUsingTextures() {
+ return findHwEncoder(
+ H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedSurfaceColorList) != null;
}
private void checkOnMediaCodecThread() {
@@ -223,32 +270,43 @@ public class MediaCodecVideoEncoder {
}
}
- // Return the array of input buffers, or null on failure.
- private ByteBuffer[] initEncode(
- VideoCodecType type, int width, int height, int kbps, int fps) {
+ boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
+ EglBase14.Context sharedContext) {
+ final boolean useSurface = sharedContext != null;
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
- ". @ " + kbps + " kbps. Fps: " + fps +
- ". Color: 0x" + Integer.toHexString(colorFormat));
+ ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
+
+ this.width = width;
+ this.height = height;
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
- this.type = type;
EncoderProperties properties = null;
String mime = null;
int keyFrameIntervalSec = 0;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
- properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes);
+ properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
+ keyFrameIntervalSec = 100;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 100;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
- properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes);
+ properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes,
+ useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 20;
}
if (properties == null) {
throw new RuntimeException("Can not find HW encoder for " + type);
}
runningInstance = this; // Encoder is now running and can be queried for stack traces.
+ colorFormat = properties.colorFormat;
+ Logging.d(TAG, "Color format: " + colorFormat);
+
mediaCodecThread = Thread.currentThread();
try {
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -259,26 +317,39 @@ public class MediaCodecVideoEncoder {
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
Logging.d(TAG, " Format: " + format);
mediaCodec = createByCodecName(properties.codecName);
+ this.type = type;
if (mediaCodec == null) {
Logging.e(TAG, "Can not create media encoder");
- return null;
+ return false;
}
mediaCodec.configure(
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (useSurface) {
+ eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
+ // Create an input surface and keep a reference since we must release the surface when done.
+ inputSurface = mediaCodec.createInputSurface();
+ eglBase.createSurface(inputSurface);
+ drawer = new GlRectDrawer();
+ }
mediaCodec.start();
- colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
- ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
- Logging.d(TAG, "Input buffers: " + inputBuffers.length +
- ". Output buffers: " + outputBuffers.length);
- return inputBuffers;
+ Logging.d(TAG, "Output buffers: " + outputBuffers.length);
+
} catch (IllegalStateException e) {
Logging.e(TAG, "initEncode failed", e);
- return null;
+ return false;
}
+ return true;
+ }
+
+ ByteBuffer[] getInputBuffers() {
+ ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
+ Logging.d(TAG, "Input buffers: " + inputBuffers.length);
+ return inputBuffers;
}
- private boolean encode(
+ boolean encodeBuffer(
boolean isKeyframe, int inputBuffer, int size,
long presentationTimestampUs) {
checkOnMediaCodecThread();
@@ -298,22 +369,82 @@ public class MediaCodecVideoEncoder {
return true;
}
catch (IllegalStateException e) {
- Logging.e(TAG, "encode failed", e);
+ Logging.e(TAG, "encodeBuffer failed", e);
return false;
}
}
- private void release() {
- Logging.d(TAG, "Java releaseEncoder");
+ boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
+ long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
- mediaCodec.stop();
- mediaCodec.release();
- } catch (IllegalStateException e) {
- Logging.e(TAG, "release failed", e);
+ if (isKeyframe) {
+ Logging.d(TAG, "Sync frame request");
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mediaCodec.setParameters(b);
+ }
+ eglBase.makeCurrent();
+ // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+ // but it's a workaround for bug webrtc:5147.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ drawer.drawOes(oesTextureId, transformationMatrix, 0, 0, width, height);
+ eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+ return true;
+ }
+ catch (RuntimeException e) {
+ Logging.e(TAG, "encodeTexture failed", e);
+ return false;
}
+ }
+
+ void release() {
+ Logging.d(TAG, "Java releaseEncoder");
+ checkOnMediaCodecThread();
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseEncoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseEncoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media encoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
+ }
+ }
+
mediaCodec = null;
mediaCodecThread = null;
+ if (drawer != null) {
+ drawer.release();
+ drawer = null;
+ }
+ if (eglBase != null) {
+ eglBase.release();
+ eglBase = null;
+ }
+ if (inputSurface != null) {
+ inputSurface.release();
+ inputSurface = null;
+ }
runningInstance = null;
Logging.d(TAG, "Java releaseEncoder done");
}
@@ -336,7 +467,7 @@ public class MediaCodecVideoEncoder {
// Dequeue an input buffer and return its index, -1 if no input buffer is
// available, or -2 if the codec is no longer operative.
- private int dequeueInputBuffer() {
+ int dequeueInputBuffer() {
checkOnMediaCodecThread();
try {
return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
@@ -347,7 +478,7 @@ public class MediaCodecVideoEncoder {
}
// Helper struct for dequeueOutputBuffer() below.
- private static class OutputBufferInfo {
+ static class OutputBufferInfo {
public OutputBufferInfo(
int index, ByteBuffer buffer,
boolean isKeyFrame, long presentationTimestampUs) {
@@ -357,15 +488,15 @@ public class MediaCodecVideoEncoder {
this.presentationTimestampUs = presentationTimestampUs;
}
- private final int index;
- private final ByteBuffer buffer;
- private final boolean isKeyFrame;
- private final long presentationTimestampUs;
+ public final int index;
+ public final ByteBuffer buffer;
+ public final boolean isKeyFrame;
+ public final long presentationTimestampUs;
}
// Dequeue and return an output buffer, or null if no output is ready. Return
// a fake OutputBufferInfo with index -1 if the codec is no longer operable.
- private OutputBufferInfo dequeueOutputBuffer() {
+ OutputBufferInfo dequeueOutputBuffer() {
checkOnMediaCodecThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
@@ -434,7 +565,7 @@ public class MediaCodecVideoEncoder {
// Release a dequeued output buffer back to the codec for re-use. Return
// false if the codec is no longer operable.
- private boolean releaseOutputBuffer(int index) {
+ boolean releaseOutputBuffer(int index) {
checkOnMediaCodecThread();
try {
mediaCodec.releaseOutputBuffer(index, false);
diff --git a/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java b/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java
index 50023001d7..36cd07595c 100644
--- a/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java
+++ b/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java
@@ -28,7 +28,6 @@
package org.webrtc;
-import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
@@ -151,6 +150,7 @@ public class PeerConnection {
public int audioJitterBufferMaxPackets;
public boolean audioJitterBufferFastAccelerate;
public int iceConnectionReceivingTimeout;
+ public int iceBackupCandidatePairPingInterval;
public KeyType keyType;
public ContinualGatheringPolicy continualGatheringPolicy;
@@ -163,6 +163,7 @@ public class PeerConnection {
audioJitterBufferMaxPackets = 50;
audioJitterBufferFastAccelerate = false;
iceConnectionReceivingTimeout = -1;
+ iceBackupCandidatePairPingInterval = -1;
keyType = KeyType.ECDSA;
continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
}
@@ -223,6 +224,14 @@ public class PeerConnection {
localStreams.remove(stream);
}
+ public RtpSender createSender(String kind, String stream_id) {
+ RtpSender new_sender = nativeCreateSender(kind, stream_id);
+ if (new_sender != null) {
+ senders.add(new_sender);
+ }
+ return new_sender;
+ }
+
// Note that calling getSenders will dispose of the senders previously
// returned (and same goes for getReceivers).
public List<RtpSender> getSenders() {
@@ -288,6 +297,8 @@ public class PeerConnection {
private native boolean nativeGetStats(
StatsObserver observer, long nativeTrack);
+ private native RtpSender nativeCreateSender(String kind, String stream_id);
+
private native List<RtpSender> nativeGetSenders();
private native List<RtpReceiver> nativeGetReceivers();
diff --git a/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java b/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
index 83999ece98..d759c69271 100644
--- a/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
+++ b/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
@@ -73,6 +73,15 @@ public class PeerConnectionFactory {
// Field trial initialization. Must be called before PeerConnectionFactory
// is created.
public static native void initializeFieldTrials(String fieldTrialsInitString);
+ // Internal tracing initialization. Must be called before PeerConnectionFactory is created to
+ // prevent racing with tracing code.
+ public static native void initializeInternalTracer();
+ // Internal tracing shutdown, called to prevent resource leaks. Must be called after
+ // PeerConnectionFactory is gone to prevent races with code performing tracing.
+ public static native void shutdownInternalTracer();
+ // Start/stop internal capturing of internal tracing.
+ public static native boolean startInternalTracingCapture(String tracing_filename);
+ public static native void stopInternalTracingCapture();
public PeerConnectionFactory() {
nativeFactory = nativeCreatePeerConnectionFactory();
@@ -131,12 +140,52 @@ public class PeerConnectionFactory {
nativeFactory, id, source.nativeSource));
}
+ // Starts recording an AEC dump. Ownership of the file is transfered to the
+ // native code. If an AEC dump is already in progress, it will be stopped and
+ // a new one will start using the provided file.
+ public boolean startAecDump(int file_descriptor) {
+ return nativeStartAecDump(nativeFactory, file_descriptor);
+ }
+
+ // Stops recording an AEC dump. If no AEC dump is currently being recorded,
+ // this call will have no effect.
+ public void stopAecDump() {
+ nativeStopAecDump(nativeFactory);
+ }
+
+ // Starts recording an RTC event log. Ownership of the file is transfered to
+ // the native code. If an RTC event log is already being recorded, it will be
+ // stopped and a new one will start using the provided file.
+ public boolean startRtcEventLog(int file_descriptor) {
+ return nativeStartRtcEventLog(nativeFactory, file_descriptor);
+ }
+
+ // Stops recording an RTC event log. If no RTC event log is currently being
+ // recorded, this call will have no effect.
+ public void StopRtcEventLog() {
+ nativeStopRtcEventLog(nativeFactory);
+ }
+
public void setOptions(Options options) {
nativeSetOptions(nativeFactory, options);
}
+ @Deprecated
public void setVideoHwAccelerationOptions(Object renderEGLContext) {
- nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext);
+ nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext, renderEGLContext);
+ }
+
+ /** Set the EGL context used by HW Video encoding and decoding.
+ *
+ *
+ * @param localEGLContext An instance of javax.microedition.khronos.egl.EGLContext.
+ * Must be the same as used by VideoCapturerAndroid and any local
+ * video renderer.
+ * @param remoteEGLContext An instance of javax.microedition.khronos.egl.EGLContext.
+ * Must be the same as used by any remote video renderer.
+ */
+ public void setVideoHwAccelerationOptions(Object localEGLContext, Object remoteEGLContext) {
+ nativeSetVideoHwAccelerationOptions(nativeFactory, localEGLContext, remoteEGLContext);
}
public void dispose() {
@@ -201,10 +250,18 @@ public class PeerConnectionFactory {
private static native long nativeCreateAudioTrack(
long nativeFactory, String id, long nativeSource);
+ private static native boolean nativeStartAecDump(long nativeFactory, int file_descriptor);
+
+ private static native void nativeStopAecDump(long nativeFactory);
+
+ private static native boolean nativeStartRtcEventLog(long nativeFactory, int file_descriptor);
+
+ private static native void nativeStopRtcEventLog(long nativeFactory);
+
public native void nativeSetOptions(long nativeFactory, Options options);
private static native void nativeSetVideoHwAccelerationOptions(
- long nativeFactory, Object renderEGLContext);
+ long nativeFactory, Object localEGLContext, Object remoteEGLContext);
private static native void nativeThreadsCallbacks(long nativeFactory);
diff --git a/talk/app/webrtc/java/src/org/webrtc/RtpSender.java b/talk/app/webrtc/java/src/org/webrtc/RtpSender.java
index 37357c0657..9ac2e7034f 100644
--- a/talk/app/webrtc/java/src/org/webrtc/RtpSender.java
+++ b/talk/app/webrtc/java/src/org/webrtc/RtpSender.java
@@ -32,6 +32,7 @@ public class RtpSender {
final long nativeRtpSender;
private MediaStreamTrack cachedTrack;
+ private boolean ownsTrack = true;
public RtpSender(long nativeRtpSender) {
this.nativeRtpSender = nativeRtpSender;
@@ -40,14 +41,22 @@ public class RtpSender {
cachedTrack = (track == 0) ? null : new MediaStreamTrack(track);
}
- // NOTE: This should not be called with a track that's already used by
- // another RtpSender, because then it would be double-disposed.
- public void setTrack(MediaStreamTrack track) {
- if (cachedTrack != null) {
+ // If |takeOwnership| is true, the RtpSender takes ownership of the track
+ // from the caller, and will auto-dispose of it when no longer needed.
+ // |takeOwnership| should only be used if the caller owns the track; it is
+ // not appropriate when the track is owned by, for example, another RtpSender
+ // or a MediaStream.
+ public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
+ if (!nativeSetTrack(nativeRtpSender,
+ (track == null) ? 0 : track.nativeTrack)) {
+ return false;
+ }
+ if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
}
cachedTrack = track;
- nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack);
+ ownsTrack = takeOwnership;
+ return true;
}
public MediaStreamTrack track() {
@@ -59,14 +68,14 @@ public class RtpSender {
}
public void dispose() {
- if (cachedTrack != null) {
+ if (cachedTrack != null && ownsTrack) {
cachedTrack.dispose();
}
free(nativeRtpSender);
}
- private static native void nativeSetTrack(long nativeRtpSender,
- long nativeTrack);
+ private static native boolean nativeSetTrack(long nativeRtpSender,
+ long nativeTrack);
// This should increment the reference count of the track.
// Will be released in dispose() or setTrack().
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
index 3c255dd123..2e307fc54b 100644
--- a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java
@@ -46,7 +46,11 @@ public class VideoRenderer {
public final int[] yuvStrides;
public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame;
- public Object textureObject;
+ // Matrix that transforms standard coordinates to their proper sampling locations in
+ // the texture. This transform compensates for any properties of the video source that
+ // cause it to appear different from a normalized texture. This matrix does not take
+ // |rotationDegree| into account.
+ public final float[] samplingMatrix;
public int textureId;
// Frame pointer in C++.
private long nativeFramePointer;
@@ -70,19 +74,27 @@ public class VideoRenderer {
if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
}
+ // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+ // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+ // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
+ // matrix.
+ samplingMatrix = new float[] {
+ 1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 1, 0, 1};
}
/**
* Construct a texture frame of the given dimensions with data in SurfaceTexture
*/
- I420Frame(
- int width, int height, int rotationDegree,
- Object textureObject, int textureId, long nativeFramePointer) {
+ I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
+ long nativeFramePointer) {
this.width = width;
this.height = height;
this.yuvStrides = null;
this.yuvPlanes = null;
- this.textureObject = textureObject;
+ this.samplingMatrix = samplingMatrix;
this.textureId = textureId;
this.yuvFrame = false;
this.rotationDegree = rotationDegree;
@@ -125,7 +137,6 @@ public class VideoRenderer {
*/
public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null;
- frame.textureObject = null;
frame.textureId = 0;
if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer);