aboutsummaryrefslogtreecommitdiff
path: root/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
diff options
context:
space:
mode:
Diffstat (limited to 'talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java')
-rw-r--r--talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java368
1 files changed, 301 insertions, 67 deletions
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
index 42af9c7fd0..19002f70e1 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -33,23 +33,23 @@ import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaFormat;
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
import android.os.Build;
+import android.os.SystemClock;
import android.view.Surface;
import org.webrtc.Logging;
import java.nio.ByteBuffer;
import java.util.Arrays;
+import java.util.LinkedList;
import java.util.List;
-
-import javax.microedition.khronos.egl.EGLContext;
+import java.util.concurrent.CountDownLatch;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
// This class is an implementation detail of the Java PeerConnection API.
-// MediaCodec is thread-hostile so this class must be operated on a single
-// thread.
+@SuppressWarnings("deprecation")
public class MediaCodecVideoDecoder {
// This class is constructed, operated, and destroyed by its C++ incarnation,
// so the class and its methods have non-public visibility. The API this
@@ -66,18 +66,26 @@ public class MediaCodecVideoDecoder {
}
private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
// Active running decoder instance. Set in initDecode() (called from native code)
// and reset to null in release() call.
private static MediaCodecVideoDecoder runningInstance = null;
+ private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
// List of supported HW VP8 decoders.
private static final String[] supportedVp8HwCodecPrefixes =
{"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
+ // List of supported HW VP9 decoders.
+ private static final String[] supportedVp9HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Exynos." };
// List of supported HW H.264 decoders.
private static final String[] supportedH264HwCodecPrefixes =
{"OMX.qcom.", "OMX.Intel." };
@@ -96,13 +104,29 @@ public class MediaCodecVideoDecoder {
private int height;
private int stride;
private int sliceHeight;
+ private boolean hasDecodedFirstFrame;
+ private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
private boolean useSurface;
- private int textureID = 0;
- private SurfaceTexture surfaceTexture = null;
+
+ // The below variables are only used when decoding to a Surface.
+ private TextureListener textureListener;
+ // Max number of output buffers queued before starting to drop decoded frames.
+ private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
+ private int droppedFrames;
private Surface surface = null;
- private EglBase eglBase;
+ private final Queue<DecodedOutputBuffer>
+ dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
+
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoDecoderErrorCallback {
+ void onMediaCodecVideoDecoderCriticalError(int codecErrors);
+ }
- private MediaCodecVideoDecoder() {
+ public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoDecoder.errorCallback = errorCallback;
}
// Helper struct for findVp8Decoder() below.
@@ -120,6 +144,7 @@ public class MediaCodecVideoDecoder {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return null; // MediaCodec.setParameters is missing.
}
+ Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
if (info.isEncoder()) {
@@ -135,7 +160,7 @@ public class MediaCodecVideoDecoder {
if (name == null) {
continue; // No HW support in this codec; try the next one.
}
- Logging.v(TAG, "Found candidate decoder " + name);
+ Logging.d(TAG, "Found candidate decoder " + name);
// Check if this is supported decoder.
boolean supportedCodec = false;
@@ -166,6 +191,7 @@ public class MediaCodecVideoDecoder {
}
}
}
+ Logging.d(TAG, "No HW decoder found for mime " + mime);
return null; // No HW decoder.
}
@@ -173,6 +199,10 @@ public class MediaCodecVideoDecoder {
return findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
}
+ public static boolean isVp9HwSupported() {
+ return findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null;
+ }
+
public static boolean isH264HwSupported() {
return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
}
@@ -197,17 +227,21 @@ public class MediaCodecVideoDecoder {
}
}
- // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
- private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
+ // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
+ private boolean initDecode(
+ VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?");
}
- useSurface = (sharedContext != null);
+ useSurface = (surfaceTextureHelper != null);
String mime = null;
String[] supportedCodecPrefixes = null;
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE;
supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE;
supportedCodecPrefixes = supportedH264HwCodecPrefixes;
@@ -221,9 +255,6 @@ public class MediaCodecVideoDecoder {
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface);
- if (sharedContext != null) {
- Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
- }
runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread();
try {
@@ -233,16 +264,8 @@ public class MediaCodecVideoDecoder {
sliceHeight = height;
if (useSurface) {
- // Create shared EGL context.
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
- eglBase.createDummyPbufferSurface();
- eglBase.makeCurrent();
-
- // Create output surface
- textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
- Logging.d(TAG, "Video decoder TextureID = " + textureID);
- surfaceTexture = new SurfaceTexture(textureID);
- surface = new Surface(surfaceTexture);
+ textureListener = new TextureListener(surfaceTextureHelper);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
}
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -261,6 +284,10 @@ public class MediaCodecVideoDecoder {
colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers();
inputBuffers = mediaCodec.getInputBuffers();
+ decodeStartTimeMs.clear();
+ hasDecodedFirstFrame = false;
+ dequeuedSurfaceOutputBuffers.clear();
+ droppedFrames = 0;
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
". Output buffers: " + outputBuffers.length);
return true;
@@ -271,25 +298,45 @@ public class MediaCodecVideoDecoder {
}
private void release() {
- Logging.d(TAG, "Java releaseDecoder");
+ Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
checkOnMediaCodecThread();
- try {
- mediaCodec.stop();
- mediaCodec.release();
- } catch (IllegalStateException e) {
- Logging.e(TAG, "release failed", e);
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseDecoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseDecoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media decoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
+ }
}
+
mediaCodec = null;
mediaCodecThread = null;
runningInstance = null;
if (useSurface) {
surface.release();
surface = null;
- Logging.d(TAG, "Delete video decoder TextureID " + textureID);
- GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
- textureID = 0;
- eglBase.release();
- eglBase = null;
+ textureListener.release();
}
Logging.d(TAG, "Java releaseDecoder done");
}
@@ -306,13 +353,15 @@ public class MediaCodecVideoDecoder {
}
}
- private boolean queueInputBuffer(
- int inputBufferIndex, int size, long timestampUs) {
+ private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
+ long timeStampMs, long ntpTimeStamp) {
checkOnMediaCodecThread();
try {
inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size);
- mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
+ decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
+ ntpTimeStamp));
+ mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
return true;
}
catch (IllegalStateException e) {
@@ -321,56 +370,183 @@ public class MediaCodecVideoDecoder {
}
}
- // Helper structs for dequeueOutputBuffer() below.
- private static class DecodedByteBuffer {
- public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) {
+ private static class TimeStamps {
+ public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ }
+ private final long decodeStartTimeMs; // Time when this frame was queued for decoding.
+ private final long timeStampMs; // Only used for bookkeeping in Java. Used in C++;
+ private final long ntpTimeStampMs; // Only used for bookkeeping in Java. Used in C++;
+ }
+
+ // Helper struct for dequeueOutputBuffer() below.
+ private static class DecodedOutputBuffer {
+ public DecodedOutputBuffer(int index, int offset, int size, long timeStampMs,
+ long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
this.index = index;
this.offset = offset;
this.size = size;
- this.presentationTimestampUs = presentationTimestampUs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTime;
+ this.endDecodeTimeMs = endDecodeTime;
}
private final int index;
private final int offset;
private final int size;
- private final long presentationTimestampUs;
+ private final long timeStampMs;
+ private final long ntpTimeStampMs;
+ // Number of ms it took to decode this frame.
+ private final long decodeTimeMs;
+ // System time when this frame finished decoding.
+ private final long endDecodeTimeMs;
}
+ // Helper struct for dequeueTextureBuffer() below.
private static class DecodedTextureBuffer {
private final int textureID;
- private final long presentationTimestampUs;
+ private final float[] transformMatrix;
+ private final long timeStampMs;
+ private final long ntpTimeStampMs;
+ private final long decodeTimeMs;
+ // Interval from when the frame finished decoding until this buffer has been created.
+ // Since there is only one texture, this interval depend on the time from when
+ // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
+ // so that the texture can be updated with the next decoded frame.
+ private final long frameDelayMs;
- public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
+ // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
+ // that was dropped.
+ public DecodedTextureBuffer(int textureID, float[] transformMatrix, long timeStampMs,
+ long ntpTimeStampMs, long decodeTimeMs, long frameDelay) {
this.textureID = textureID;
- this.presentationTimestampUs = presentationTimestampUs;
+ this.transformMatrix = transformMatrix;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTimeMs;
+ this.frameDelayMs = frameDelay;
}
}
- // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or
- // DecodedTexturebuffer depending on |useSurface| configuration.
+ // Poll based texture listener.
+ private static class TextureListener
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
+ private final Object newFrameLock = new Object();
+ // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
+ // onTextureFrameAvailable().
+ private DecodedOutputBuffer bufferToRender;
+ private DecodedTextureBuffer renderedBuffer;
+
+ public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ surfaceTextureHelper.setListener(this);
+ }
+
+ public void addBufferToRender(DecodedOutputBuffer buffer) {
+ if (bufferToRender != null) {
+ Logging.e(TAG,
+ "Unexpected addBufferToRender() called while waiting for a texture.");
+ throw new IllegalStateException("Waiting for a texture.");
+ }
+ bufferToRender = buffer;
+ }
+
+ public boolean isWaitingForTexture() {
+ synchronized (newFrameLock) {
+ return bufferToRender != null;
+ }
+ }
+
+ // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ Logging.e(TAG,
+ "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+ throw new IllegalStateException("Already holding a texture.");
+ }
+ // |timestampNs| is always zero on some Android versions.
+ renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
+ bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
+ SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
+ bufferToRender = null;
+ newFrameLock.notifyAll();
+ }
+ }
+
+ // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
+ public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
+ try {
+ newFrameLock.wait(timeoutMs);
+ } catch(InterruptedException e) {
+ // Restore the interrupted status by reinterrupting the thread.
+ Thread.currentThread().interrupt();
+ }
+ }
+ DecodedTextureBuffer returnedBuffer = renderedBuffer;
+ renderedBuffer = null;
+ return returnedBuffer;
+ }
+ }
+
+ public void release() {
+ // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
+ // progress is done. Therefore, the call to disconnect() must be outside any synchronized
+ // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+ surfaceTextureHelper.disconnect();
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ surfaceTextureHelper.returnTextureFrame();
+ renderedBuffer = null;
+ }
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error.
- private Object dequeueOutputBuffer(int dequeueTimeoutUs)
- throws IllegalStateException, MediaCodec.CodecException {
+ private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
+ if (decodeStartTimeMs.isEmpty()) {
+ return null;
+ }
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
- final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
+ final int result = mediaCodec.dequeueOutputBuffer(
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
- case MediaCodec.INFO_TRY_AGAIN_LATER:
- return null;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+ if (hasDecodedFirstFrame) {
+ throw new RuntimeException("Unexpected output buffer change event.");
+ }
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString());
+ int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
+ int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
+ throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
+ height + ". New " + new_width + "*" + new_height);
+ }
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
@@ -388,18 +564,76 @@ public class MediaCodecVideoDecoder {
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
break;
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return null;
default:
- // Output buffer decoded.
- if (useSurface) {
- mediaCodec.releaseOutputBuffer(result, true /* render */);
- // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
- // frame.
- return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
- } else {
- return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
- }
+ hasDecodedFirstFrame = true;
+ TimeStamps timeStamps = decodeStartTimeMs.remove();
+ return new DecodedOutputBuffer(result, info.offset, info.size, timeStamps.timeStampMs,
+ timeStamps.ntpTimeStampMs,
+ SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs,
+ SystemClock.elapsedRealtime());
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
+ // a frame can't be returned.
+ private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
+ checkOnMediaCodecThread();
+ if (!useSurface) {
+ throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
+ }
+ DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
+ if (outputBuffer != null) {
+ dequeuedSurfaceOutputBuffers.add(outputBuffer);
+ }
+
+ MaybeRenderDecodedTextureBuffer();
+ // Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
+ DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
+ if (renderedBuffer != null) {
+ MaybeRenderDecodedTextureBuffer();
+ return renderedBuffer;
+ }
+
+ if ((dequeuedSurfaceOutputBuffers.size()
+ >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
+ || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
+ ++droppedFrames;
+ // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
+ // The oldest frame is owned by |textureListener| and can't be dropped since
+ // mediaCodec.releaseOutputBuffer has already been called.
+ final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
+ if (dequeueTimeoutMs > 0) {
+ // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
+ // return the one and only texture even if it does not render.
+ // Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ // + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames);
+ } else {
+ Logging.w(TAG, "Too many output buffers. Dropping frame with TS: "
+ + droppedFrame.timeStampMs + ". Total number of dropped frames: " + droppedFrames);
}
+
+ mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
+ return new DecodedTextureBuffer(0, null, droppedFrame.timeStampMs,
+ droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
+ SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
+ }
+ return null;
+ }
+
+ private void MaybeRenderDecodedTextureBuffer() {
+ if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
+ return;
}
+ // Get the first frame in the queue and render to the decoder output surface.
+ final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
+ textureListener.addBufferToRender(buffer);
+ mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
}
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
@@ -407,11 +641,11 @@ public class MediaCodecVideoDecoder {
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
// MediaCodec.CodecException upon codec error.
- private void returnDecodedByteBuffer(int index)
+ private void returnDecodedOutputBuffer(int index)
throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread();
if (useSurface) {
- throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding.");
+ throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
}
mediaCodec.releaseOutputBuffer(index, false /* render */);
}