From c01c25434ba92f6ea32cdfdcde77ec8278182851 Mon Sep 17 00:00:00 2001 From: Per Date: Fri, 13 Nov 2015 16:58:26 +0100 Subject: Revert of Android MediaCodecVideoDecoder: Manage lifetime of texture frames (patchset #12 id:320001 of https://codereview.webrtc.org/1422963003/ ) Reason for revert: Causes fallback to SW decoder if a renderer is put in the background. Original issue's description: > Patchset 1 is a pure > revert of "Revert of "Android MediaCodecVideoDecoder: Manage lifetime of texture frames" https://codereview.webrtc.org/1378033003/ > > Following patchsets move the responsibility of calculating the decode time to Java. > > TESTED= Apprtc loopback using H264 and VP8 on N5, N6, N7, S5 > > Committed: https://crrev.com/9cb8982e64f08d3d630bf7c3d2bcc78c10db88e2 > Cr-Commit-Position: refs/heads/master@{#10597} TBR=magjed@webrtc.org,glaznev@webrtc.org NOPRESUBMIT=true NOTREECHECKS=true Review URL: https://codereview.webrtc.org/1441363002 . Cr-Commit-Position: refs/heads/master@{#10637} --- .../android/org/webrtc/SurfaceViewRenderer.java | 19 +- .../java/android/org/webrtc/VideoRendererGui.java | 21 +- .../app/webrtc/java/jni/androidmediadecoder_jni.cc | 172 +++++++------ .../webrtc/java/jni/androidvideocapturer_jni.cc | 18 +- .../app/webrtc/java/jni/androidvideocapturer_jni.h | 4 +- talk/app/webrtc/java/jni/classreferenceholder.cc | 2 +- talk/app/webrtc/java/jni/native_handle_impl.cc | 36 ++- talk/app/webrtc/java/jni/native_handle_impl.h | 37 ++- talk/app/webrtc/java/jni/peerconnection_jni.cc | 8 +- .../webrtc/java/jni/surfacetexturehelper_jni.cc | 2 +- .../app/webrtc/java/jni/surfacetexturehelper_jni.h | 2 +- .../src/org/webrtc/MediaCodecVideoDecoder.java | 267 +++++---------------- .../webrtc/java/src/org/webrtc/VideoRenderer.java | 23 +- 13 files changed, 263 insertions(+), 348 deletions(-) (limited to 'talk/app/webrtc/java') diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java index c4785c9913..d3e6d67290 100644 --- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java +++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java @@ -30,7 +30,9 @@ package org.webrtc; import android.content.Context; import android.content.res.Resources.NotFoundException; import android.graphics.Point; +import android.graphics.SurfaceTexture; import android.opengl.GLES20; +import android.opengl.Matrix; import android.os.Handler; import android.os.HandlerThread; import android.util.AttributeSet; @@ -455,10 +457,25 @@ public class SurfaceViewRenderer extends SurfaceView } final long startTimeNs = System.nanoTime(); + final float[] samplingMatrix; + if (frame.yuvFrame) { + // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the + // top-left corner of the image, but in glTexImage2D() the first element corresponds to the + // bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling + // matrix. + samplingMatrix = RendererCommon.verticalFlipMatrix(); + } else { + // TODO(magjed): Move updateTexImage() to the video source instead. + SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject; + surfaceTexture.updateTexImage(); + samplingMatrix = new float[16]; + surfaceTexture.getTransformMatrix(samplingMatrix); + } + final float[] texMatrix; synchronized (layoutLock) { final float[] rotatedSamplingMatrix = - RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree); + RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree); final float[] layoutMatrix = RendererCommon.getLayoutMatrix( mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight); texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix); diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java index da6e51bc29..cc2d230dcc 100644 --- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java +++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java @@ -38,6 +38,7 @@ import javax.microedition.khronos.opengles.GL10; import android.annotation.SuppressLint; import android.graphics.Point; import android.graphics.Rect; +import android.graphics.SurfaceTexture; import android.opengl.GLES20; import android.opengl.GLSurfaceView; @@ -240,15 +241,29 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { } if (isNewFrame) { - rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix( - pendingFrame.samplingMatrix, pendingFrame.rotationDegree); if (pendingFrame.yuvFrame) { rendererType = RendererType.RENDERER_YUV; drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height, pendingFrame.yuvStrides, pendingFrame.yuvPlanes); + // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the + // top-left corner of the image, but in glTexImage2D() the first element corresponds to + // the bottom-left corner. We correct this discrepancy by setting a vertical flip as + // sampling matrix. + final float[] samplingMatrix = RendererCommon.verticalFlipMatrix(); + rotatedSamplingMatrix = + RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree); } else { rendererType = RendererType.RENDERER_TEXTURE; - // External texture rendering. Make a deep copy of the external texture. + // External texture rendering. Update texture image to latest and make a deep copy of + // the external texture. + // TODO(magjed): Move updateTexImage() to the video source instead. + final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject; + surfaceTexture.updateTexImage(); + final float[] samplingMatrix = new float[16]; + surfaceTexture.getTransformMatrix(samplingMatrix); + rotatedSamplingMatrix = + RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree); + // Reallocate offscreen texture if necessary. textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight()); diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc index e506bc3ab3..3bfad6885f 100644 --- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc +++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc @@ -33,7 +33,6 @@ #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" #include "talk/app/webrtc/java/jni/classreferenceholder.h" #include "talk/app/webrtc/java/jni/native_handle_impl.h" -#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" #include "webrtc/base/bind.h" #include "webrtc/base/checks.h" #include "webrtc/base/logging.h" @@ -113,7 +112,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, bool use_surface_; VideoCodec codec_; webrtc::I420BufferPool decoded_frame_pool_; - rtc::scoped_refptr surface_texture_helper_; + NativeHandleImpl native_handle_; DecodedImageCallback* callback_; int frames_received_; // Number of frames received by decoder. int frames_decoded_; // Number of frames decoded by decoder. @@ -124,6 +123,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, uint32_t max_pending_frames_; // Maximum number of pending input frames std::vector timestamps_; std::vector ntp_times_ms_; + std::vector frame_rtc_times_ms_; // Time when video frame is sent to + // decoder input. // State that is constant for the lifetime of this object once the ctor // returns. @@ -134,8 +135,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, jmethodID j_release_method_; jmethodID j_dequeue_input_buffer_method_; jmethodID j_queue_input_buffer_method_; - jmethodID j_dequeue_byte_buffer_method_; - jmethodID j_dequeue_texture_buffer_method_; + jmethodID j_dequeue_output_buffer_method_; jmethodID j_return_decoded_byte_buffer_method_; // MediaCodecVideoDecoder fields. jfieldID j_input_buffers_field_; @@ -145,21 +145,20 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, jfieldID j_height_field_; jfieldID j_stride_field_; jfieldID j_slice_height_field_; + jfieldID j_surface_texture_field_; // MediaCodecVideoDecoder.DecodedTextureBuffer fields. - jfieldID j_texture_id_field_; - jfieldID j_transform_matrix_field_; + jfieldID j_textureID_field_; jfieldID j_texture_presentation_timestamp_us_field_; - jfieldID j_texture_decode_time_ms_field_; - jfieldID j_texture_frame_delay_ms_field_; - // MediaCodecVideoDecoder.DecodedOutputBuffer fields. + // MediaCodecVideoDecoder.DecodedByteBuffer fields. jfieldID j_info_index_field_; jfieldID j_info_offset_field_; jfieldID j_info_size_field_; jfieldID j_info_presentation_timestamp_us_field_; - jfieldID j_byte_buffer_decode_time_ms_field_; // Global references; must be deleted in Release(). std::vector input_buffers_; + jobject surface_texture_; + jobject previous_surface_texture_; // Render EGL context - owned by factory, should not be allocated/destroyed // by VideoDecoder. @@ -173,6 +172,8 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( key_frame_required_(true), inited_(false), sw_fallback_required_(false), + surface_texture_(NULL), + previous_surface_texture_(NULL), codec_thread_(new Thread()), j_media_codec_video_decoder_class_( jni, @@ -191,22 +192,19 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( j_init_decode_method_ = GetMethodID( jni, *j_media_codec_video_decoder_class_, "initDecode", "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" - "IILorg/webrtc/SurfaceTextureHelper;)Z"); + "IILjavax/microedition/khronos/egl/EGLContext;)Z"); j_release_method_ = GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); j_dequeue_input_buffer_method_ = GetMethodID( jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); j_queue_input_buffer_method_ = GetMethodID( jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); - j_dequeue_byte_buffer_method_ = GetMethodID( + j_dequeue_output_buffer_method_ = GetMethodID( jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", - "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;"); - j_dequeue_texture_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer", - "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;"); + "(I)Ljava/lang/Object;"); j_return_decoded_byte_buffer_method_ = GetMethodID(jni, *j_media_codec_video_decoder_class_, - "returnDecodedOutputBuffer", "(I)V"); + "returnDecodedByteBuffer", "(I)V"); j_input_buffers_field_ = GetFieldID( jni, *j_media_codec_video_decoder_class_, @@ -224,32 +222,28 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( jni, *j_media_codec_video_decoder_class_, "stride", "I"); j_slice_height_field_ = GetFieldID( jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); + j_surface_texture_field_ = GetFieldID( + jni, *j_media_codec_video_decoder_class_, "surfaceTexture", + "Landroid/graphics/SurfaceTexture;"); - jclass j_decoded_texture_buffer_class = FindClass(jni, + jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); - j_texture_id_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "textureID", "I"); - j_transform_matrix_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); - j_texture_presentation_timestamp_us_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "presentationTimestampUs", "J"); - j_texture_decode_time_ms_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J"); - j_texture_frame_delay_ms_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "frameDelayMs", "J"); - - jclass j_decoded_output_buffer_class = FindClass(jni, - "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); + j_textureID_field_ = GetFieldID( + jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); + j_texture_presentation_timestamp_us_field_ = + GetFieldID(jni, j_decoder_decoded_texture_buffer_class, + "presentationTimestampUs", "J"); + + jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, + "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); j_info_index_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "index", "I"); + jni, j_decoder_decoded_byte_buffer_class, "index", "I"); j_info_offset_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "offset", "I"); + jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); j_info_size_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "size", "I"); + jni, j_decoder_decoded_byte_buffer_class, "size", "I"); j_info_presentation_timestamp_us_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "presentationTimestampUs", "J"); - j_byte_buffer_decode_time_ms_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "decodeTimeMs", "J"); + jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; use_surface_ = (render_egl_context_ != NULL); @@ -261,6 +255,14 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { // Call Release() to ensure no more callbacks to us after we are deleted. Release(); + // Delete global references. + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + if (previous_surface_texture_ != NULL) { + jni->DeleteGlobalRef(previous_surface_texture_); + } + if (surface_texture_ != NULL) { + jni->DeleteGlobalRef(surface_texture_); + } } int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, @@ -311,11 +313,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { frames_received_ = 0; frames_decoded_ = 0; - if (use_surface_) { - surface_texture_helper_ = new rtc::RefCountedObject( - jni, render_egl_context_); - } - jobject j_video_codec_enum = JavaEnumFromIndex( jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); bool success = jni->CallBooleanMethod( @@ -324,8 +321,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { j_video_codec_enum, codec_.width, codec_.height, - use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() - : nullptr); + use_surface_ ? render_egl_context_ : nullptr); if (CheckException(jni) || !success) { ALOGE << "Codec initialization error - fallback to SW codec."; sw_fallback_required_ = true; @@ -352,6 +348,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { current_decoding_time_ms_ = 0; timestamps_.clear(); ntp_times_ms_.clear(); + frame_rtc_times_ms_.clear(); jobjectArray input_buffers = (jobjectArray)GetObjectField( jni, *j_media_codec_video_decoder_, j_input_buffers_field_); @@ -368,6 +365,15 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { } } + if (use_surface_) { + jobject surface_texture = GetObjectField( + jni, *j_media_codec_video_decoder_, j_surface_texture_field_); + if (previous_surface_texture_ != NULL) { + jni->DeleteGlobalRef(previous_surface_texture_); + } + previous_surface_texture_ = surface_texture_; + surface_texture_ = jni->NewGlobalRef(surface_texture); + } codec_thread_->PostDelayed(kMediaCodecPollMs, this); return WEBRTC_VIDEO_CODEC_OK; @@ -393,7 +399,6 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { } input_buffers_.clear(); jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); - surface_texture_helper_ = nullptr; inited_ = false; rtc::MessageQueueManager::Clear(this); if (CheckException(jni)) { @@ -503,7 +508,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( if (frames_received_ > frames_decoded_ + max_pending_frames_) { ALOGV("Received: %d. Decoded: %d. Wait for output...", frames_received_, frames_decoded_); - if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { + if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { ALOGE << "DeliverPendingOutputs error. Frames received: " << frames_received_ << ". Frames decoded: " << frames_decoded_; return ProcessHWErrorOnCodecThread(); @@ -548,6 +553,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( current_bytes_ += inputImage._length; timestamps_.push_back(inputImage._timeStamp); ntp_times_ms_.push_back(inputImage.ntp_time_ms_); + frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); // Feed input to decoder. bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, @@ -570,18 +576,16 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( } bool MediaCodecVideoDecoder::DeliverPendingOutputs( - JNIEnv* jni, int dequeue_timeout_ms) { + JNIEnv* jni, int dequeue_timeout_us) { if (frames_received_ <= frames_decoded_) { // No need to query for output buffers - decoder is drained. return true; } // Get decoder output. - jobject j_decoder_output_buffer = - jni->CallObjectMethod(*j_media_codec_video_decoder_, - use_surface_ ? j_dequeue_texture_buffer_method_ - : j_dequeue_byte_buffer_method_, - dequeue_timeout_ms); - + jobject j_decoder_output_buffer = jni->CallObjectMethod( + *j_media_codec_video_decoder_, + j_dequeue_output_buffer_method_, + dequeue_timeout_us); if (CheckException(jni)) { ALOGE << "dequeueOutputBuffer() error"; return false; @@ -601,30 +605,19 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( j_slice_height_field_); rtc::scoped_refptr frame_buffer; - int64_t output_timestamps_ms = 0; - int decode_time_ms = 0; - int64_t frame_delayed_ms = 0; + long output_timestamps_ms = 0; if (use_surface_) { // Extract data from Java DecodedTextureBuffer. const int texture_id = - GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); - if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. - const jfloatArray j_transform_matrix = - reinterpret_cast(GetObjectField( - jni, j_decoder_output_buffer, j_transform_matrix_field_)); - const int64_t timestamp_us = - GetLongField(jni, j_decoder_output_buffer, - j_texture_presentation_timestamp_us_field_); - output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; - decode_time_ms = GetLongField(jni, j_decoder_output_buffer, - j_texture_decode_time_ms_field_); - frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer, - j_texture_frame_delay_ms_field_); - - // Create webrtc::VideoFrameBuffer with native texture handle. - frame_buffer = surface_texture_helper_->CreateTextureFrame( - width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); - } + GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); + const int64_t timestamp_us = + GetLongField(jni, j_decoder_output_buffer, + j_texture_presentation_timestamp_us_field_); + output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; + // Create webrtc::VideoFrameBuffer with native texture handle. + native_handle_.SetTextureObject(surface_texture_, texture_id); + frame_buffer = new rtc::RefCountedObject( + &native_handle_, width, height); } else { // Extract data from Java ByteBuffer and create output yuv420 frame - // for non surface decoding only. @@ -637,8 +630,6 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( const int64_t timestamp_us = GetLongField( jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; - decode_time_ms = GetLongField(jni, j_decoder_output_buffer, - j_byte_buffer_decode_time_ms_field_); if (output_buffer_size < width * height * 3 / 2) { ALOGE << "Insufficient output buffer size: " << output_buffer_size; @@ -696,7 +687,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( j_return_decoded_byte_buffer_method_, output_buffer_index); if (CheckException(jni)) { - ALOGE << "returnDecodedOutputBuffer error"; + ALOGE << "returnDecodedByteBuffer error"; return false; } } @@ -711,24 +702,26 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); ntp_times_ms_.erase(ntp_times_ms_.begin()); } - + int64_t frame_decoding_time_ms = 0; + if (frame_rtc_times_ms_.size() > 0) { + frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); + frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); + } if (frames_decoded_ < kMaxDecodedLogFrames) { ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << " x " << height << ". " << stride << " x " << slice_height << ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms << - ". DecTime: " << (int)decode_time_ms << - ". DelayTime: " << (int)frame_delayed_ms; + ". DecTime: " << (int)frame_decoding_time_ms; } // Calculate and print decoding statistics - every 3 seconds. frames_decoded_++; current_frames_++; - current_decoding_time_ms_ += decode_time_ms; + current_decoding_time_ms_ += frame_decoding_time_ms; int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && current_frames_ > 0) { - ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: " - << frames_received_ << ". Bitrate: " << + ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " << (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << @@ -739,15 +732,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( current_decoding_time_ms_ = 0; } - // |.IsZeroSize())| returns true when a frame has been dropped. - if (!decoded_frame.IsZeroSize()) { - // Callback - output decoded frame. - const int32_t callback_status = - callback_->Decoded(decoded_frame, decode_time_ms); - if (callback_status > 0) { - ALOGE << "callback error"; - } + // Callback - output decoded frame. + const int32_t callback_status = callback_->Decoded(decoded_frame); + if (callback_status > 0) { + ALOGE << "callback error"; } + return true; } diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc index 054719a11d..02b9f22015 100644 --- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc +++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc @@ -180,10 +180,16 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame, buffer, rotation, timestamp_ns); } -void AndroidVideoCapturerJni::OnTextureFrame(int width, - int height, - int64_t timestamp_ns, - const NativeHandleImpl& handle) { +void AndroidVideoCapturerJni::OnTextureFrame( + int width, + int height, + int64_t timestamp_ns, + const NativeTextureHandleImpl& handle) { + // TODO(magjed): Fix this. See bug webrtc:4993. + RTC_NOTREACHED() + << "The rest of the stack for Android expects the native " + "handle to be a NativeHandleImpl with a SurfaceTexture, not a " + "NativeTextureHandleImpl"; rtc::scoped_refptr buffer( new rtc::RefCountedObject( width, height, handle, @@ -228,8 +234,8 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured) jlong j_timestamp) { reinterpret_cast(j_capturer) ->OnTextureFrame(j_width, j_height, j_timestamp, - NativeHandleImpl(jni, j_oes_texture_id, - j_transform_matrix)); + NativeTextureHandleImpl(jni, j_oes_texture_id, + j_transform_matrix)); } JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted) diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h index 96def5eae3..d1eb3a0ad0 100644 --- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h +++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h @@ -39,7 +39,7 @@ namespace webrtc_jni { -class NativeHandleImpl; +class NativeTextureHandleImpl; // AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate. // The purpose of the delegate is to hide the JNI specifics from the C++ only @@ -61,7 +61,7 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate { void OnMemoryBufferFrame(void* video_frame, int length, int width, int height, int rotation, int64_t timestamp_ns); void OnTextureFrame(int width, int height, int64_t timestamp_ns, - const NativeHandleImpl& handle); + const NativeTextureHandleImpl& handle); void OnOutputFormatRequest(int width, int height, int fps); protected: diff --git a/talk/app/webrtc/java/jni/classreferenceholder.cc b/talk/app/webrtc/java/jni/classreferenceholder.cc index 13883bedbe..4c836f8252 100644 --- a/talk/app/webrtc/java/jni/classreferenceholder.cc +++ b/talk/app/webrtc/java/jni/classreferenceholder.cc @@ -85,7 +85,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) { LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); - LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); + LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType"); LoadClass(jni, "org/webrtc/SurfaceTextureHelper"); #endif diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc index ed9ad8e891..ac3e0455df 100644 --- a/talk/app/webrtc/java/jni/native_handle_impl.cc +++ b/talk/app/webrtc/java/jni/native_handle_impl.cc @@ -31,9 +31,9 @@ namespace webrtc_jni { -NativeHandleImpl::NativeHandleImpl(JNIEnv* jni, - jint j_oes_texture_id, - jfloatArray j_transform_matrix) +NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni, + jint j_oes_texture_id, + jfloatArray j_transform_matrix) : oes_texture_id(j_oes_texture_id) { RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix)); jfloat* transform_matrix_ptr = @@ -44,10 +44,38 @@ NativeHandleImpl::NativeHandleImpl(JNIEnv* jni, jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0); } +NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {} + +void* NativeHandleImpl::GetHandle() { + return texture_object_; +} + +int NativeHandleImpl::GetTextureId() { + return texture_id_; +} + +void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) { + texture_object_ = reinterpret_cast(texture_object); + texture_id_ = texture_id; +} + +JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle, + int width, + int height) + : NativeHandleBuffer(native_handle, width, height) {} + +rtc::scoped_refptr +JniNativeHandleBuffer::NativeToI420Buffer() { + // TODO(pbos): Implement before using this in the encoder pipeline (or + // remove the RTC_CHECK() in VideoCapture). + RTC_NOTREACHED(); + return nullptr; +} + AndroidTextureBuffer::AndroidTextureBuffer( int width, int height, - const NativeHandleImpl& native_handle, + const NativeTextureHandleImpl& native_handle, const rtc::Callback0& no_longer_used) : webrtc::NativeHandleBuffer(&native_handle_, width, height), native_handle_(native_handle), diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h index 16d3d7cdde..dd04bc20b1 100644 --- a/talk/app/webrtc/java/jni/native_handle_impl.h +++ b/talk/app/webrtc/java/jni/native_handle_impl.h @@ -36,26 +36,51 @@ namespace webrtc_jni { // Wrapper for texture object. -struct NativeHandleImpl { - NativeHandleImpl(JNIEnv* jni, - jint j_oes_texture_id, - jfloatArray j_transform_matrix); +struct NativeTextureHandleImpl { + NativeTextureHandleImpl(JNIEnv* jni, + jint j_oes_texture_id, + jfloatArray j_transform_matrix); const int oes_texture_id; float sampling_matrix[16]; }; +// Native handle for SurfaceTexture + texture id. +class NativeHandleImpl { + public: + NativeHandleImpl(); + + void* GetHandle(); + int GetTextureId(); + void SetTextureObject(void* texture_object, int texture_id); + + private: + jobject texture_object_; + int32_t texture_id_; +}; + +class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer { + public: + JniNativeHandleBuffer(void* native_handle, int width, int height); + + // TODO(pbos): Override destructor to release native handle, at the moment the + // native handle is not released based on refcount. + + private: + rtc::scoped_refptr NativeToI420Buffer() override; +}; + class AndroidTextureBuffer : public webrtc::NativeHandleBuffer { public: AndroidTextureBuffer(int width, int height, - const NativeHandleImpl& native_handle, + const NativeTextureHandleImpl& native_handle, const rtc::Callback0& no_longer_used); ~AndroidTextureBuffer(); rtc::scoped_refptr NativeToI420Buffer() override; private: - NativeHandleImpl native_handle_; + NativeTextureHandleImpl native_handle_; rtc::Callback0 no_longer_used_cb_; }; diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc index 984227de45..e75cd553b6 100644 --- a/talk/app/webrtc/java/jni/peerconnection_jni.cc +++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc @@ -773,7 +773,7 @@ class JavaVideoRendererWrapper : public VideoRendererInterface { jni, *j_frame_class_, "", "(III[I[Ljava/nio/ByteBuffer;J)V")), j_texture_frame_ctor_id_(GetMethodID( jni, *j_frame_class_, "", - "(IIII[FJ)V")), + "(IIILjava/lang/Object;IJ)V")), j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) { CHECK_EXCEPTION(jni); } @@ -829,13 +829,13 @@ class JavaVideoRendererWrapper : public VideoRendererInterface { jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) { NativeHandleImpl* handle = reinterpret_cast(frame->GetNativeHandle()); - jfloatArray sampling_matrix = jni()->NewFloatArray(16); - jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix); + jobject texture_object = reinterpret_cast(handle->GetHandle()); + int texture_id = handle->GetTextureId(); return jni()->NewObject( *j_frame_class_, j_texture_frame_ctor_id_, frame->GetWidth(), frame->GetHeight(), static_cast(frame->GetVideoRotation()), - handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame)); + texture_object, texture_id, javaShallowCopy(frame)); } JNIEnv* jni() { diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc index 65c1737268..05f1b23768 100644 --- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc +++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc @@ -70,7 +70,7 @@ void SurfaceTextureHelper::ReturnTextureFrame() const { rtc::scoped_refptr SurfaceTextureHelper::CreateTextureFrame(int width, int height, - const NativeHandleImpl& native_handle) { + const NativeTextureHandleImpl& native_handle) { return new rtc::RefCountedObject( width, height, native_handle, rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this)); diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h index 5bd94b5f1b..dc9d2b853d 100644 --- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h +++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h @@ -66,7 +66,7 @@ class SurfaceTextureHelper : public rtc::RefCountInterface { rtc::scoped_refptr CreateTextureFrame( int width, int height, - const NativeHandleImpl& native_handle); + const NativeTextureHandleImpl& native_handle); protected: ~SurfaceTextureHelper(); diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java index 0696440983..86221c4cd9 100644 --- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java +++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java @@ -33,19 +33,19 @@ import android.media.MediaCodecInfo; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecList; import android.media.MediaFormat; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; import android.os.Build; -import android.os.SystemClock; import android.view.Surface; import org.webrtc.Logging; import java.nio.ByteBuffer; import java.util.Arrays; -import java.util.LinkedList; import java.util.List; import java.util.concurrent.CountDownLatch; -import java.util.Queue; -import java.util.concurrent.TimeUnit; + +import javax.microedition.khronos.egl.EGLContext; // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. // This class is an implementation detail of the Java PeerConnection API. @@ -104,21 +104,14 @@ public class MediaCodecVideoDecoder { private int height; private int stride; private int sliceHeight; - private boolean hasDecodedFirstFrame; - private final Queue decodeStartTimeMs = new LinkedList(); private boolean useSurface; - - // The below variables are only used when decoding to a Surface. - private TextureListener textureListener; - // Max number of output buffers queued before starting to drop decoded frames. - private static final int MAX_QUEUED_OUTPUTBUFFERS = 3; - private int droppedFrames; - // |isWaitingForTexture| is true when waiting for the transition: - // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable(). - private boolean isWaitingForTexture; + private int textureID = 0; + private SurfaceTexture surfaceTexture = null; private Surface surface = null; - private final Queue - dequeuedSurfaceOutputBuffers = new LinkedList(); + private EglBase eglBase; + + private MediaCodecVideoDecoder() { + } // MediaCodec error handler - invoked when critical error happens which may prevent // further use of media codec API. Now it means that one of media codec instances @@ -230,13 +223,12 @@ public class MediaCodecVideoDecoder { } } - // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. - private boolean initDecode( - VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) { + // Pass null in |sharedContext| to configure the codec for ByteBuffer output. + private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { if (mediaCodecThread != null) { throw new RuntimeException("Forgot to release()?"); } - useSurface = (surfaceTextureHelper != null); + useSurface = (sharedContext != null); String mime = null; String[] supportedCodecPrefixes = null; if (type == VideoCodecType.VIDEO_CODEC_VP8) { @@ -258,6 +250,9 @@ public class MediaCodecVideoDecoder { Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + ". Color: 0x" + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface); + if (sharedContext != null) { + Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); + } runningInstance = this; // Decoder is now running and can be queried for stack traces. mediaCodecThread = Thread.currentThread(); try { @@ -267,8 +262,16 @@ public class MediaCodecVideoDecoder { sliceHeight = height; if (useSurface) { - textureListener = new TextureListener(surfaceTextureHelper); - surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); + // Create shared EGL context. + eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); + eglBase.createDummyPbufferSurface(); + eglBase.makeCurrent(); + + // Create output surface + textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); + Logging.d(TAG, "Video decoder TextureID = " + textureID); + surfaceTexture = new SurfaceTexture(textureID); + surface = new Surface(surfaceTexture); } MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); @@ -287,11 +290,6 @@ public class MediaCodecVideoDecoder { colorFormat = properties.colorFormat; outputBuffers = mediaCodec.getOutputBuffers(); inputBuffers = mediaCodec.getInputBuffers(); - decodeStartTimeMs.clear(); - hasDecodedFirstFrame = false; - dequeuedSurfaceOutputBuffers.clear(); - droppedFrames = 0; - isWaitingForTexture = false; Logging.d(TAG, "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length); return true; @@ -302,7 +300,7 @@ public class MediaCodecVideoDecoder { } private void release() { - Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames); + Logging.d(TAG, "Java releaseDecoder"); checkOnMediaCodecThread(); // Run Mediacodec stop() and release() on separate thread since sometime @@ -340,7 +338,11 @@ public class MediaCodecVideoDecoder { if (useSurface) { surface.release(); surface = null; - textureListener.release(); + Logging.d(TAG, "Delete video decoder TextureID " + textureID); + GLES20.glDeleteTextures(1, new int[] {textureID}, 0); + textureID = 0; + eglBase.release(); + eglBase = null; } Logging.d(TAG, "Java releaseDecoder done"); } @@ -363,7 +365,6 @@ public class MediaCodecVideoDecoder { try { inputBuffers[inputBufferIndex].position(0); inputBuffers[inputBufferIndex].limit(size); - decodeStartTimeMs.add(SystemClock.elapsedRealtime()); mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); return true; } @@ -373,156 +374,57 @@ public class MediaCodecVideoDecoder { } } - // Helper struct for dequeueOutputBuffer() below. - private static class DecodedOutputBuffer { - public DecodedOutputBuffer(int index, int offset, int size, long presentationTimestampUs, - long decodeTime, long endDecodeTime) { + // Helper structs for dequeueOutputBuffer() below. + private static class DecodedByteBuffer { + public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) { this.index = index; this.offset = offset; this.size = size; this.presentationTimestampUs = presentationTimestampUs; - this.decodeTimeMs = decodeTime; - this.endDecodeTimeMs = endDecodeTime; } private final int index; private final int offset; private final int size; private final long presentationTimestampUs; - // Number of ms it took to decode this frame. - private final long decodeTimeMs; - // System time when this frame finished decoding. - private final long endDecodeTimeMs; } - // Helper struct for dequeueTextureBuffer() below. private static class DecodedTextureBuffer { private final int textureID; - private final float[] transformMatrix; private final long presentationTimestampUs; - private final long decodeTimeMs; - // Interval from when the frame finished decoding until this buffer has been created. - // Since there is only one texture, this interval depend on the time from when - // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec - // so that the texture can be updated with the next decoded frame. - private final long frameDelayMs; - - // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame - // that was dropped. - public DecodedTextureBuffer(int textureID, float[] transformMatrix, - long presentationTimestampUs, long decodeTimeMs, long frameDelay) { + + public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { this.textureID = textureID; - this.transformMatrix = transformMatrix; this.presentationTimestampUs = presentationTimestampUs; - this.decodeTimeMs = decodeTimeMs; - this.frameDelayMs = frameDelay; } } - // Poll based texture listener. - private static class TextureListener - implements SurfaceTextureHelper.OnTextureFrameAvailableListener { - public static class TextureInfo { - private final int textureID; - private final float[] transformMatrix; - - TextureInfo(int textureId, float[] transformMatrix) { - this.textureID = textureId; - this.transformMatrix = transformMatrix; - } - } - private final SurfaceTextureHelper surfaceTextureHelper; - private TextureInfo textureInfo; - // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll(). - private final Object newFrameLock = new Object(); - - public TextureListener(SurfaceTextureHelper surfaceTextureHelper) { - this.surfaceTextureHelper = surfaceTextureHelper; - surfaceTextureHelper.setListener(this); - } - - // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread. - @Override - public void onTextureFrameAvailable( - int oesTextureId, float[] transformMatrix, long timestampNs) { - synchronized (newFrameLock) { - if (textureInfo != null) { - Logging.e(TAG, - "Unexpected onTextureFrameAvailable() called while already holding a texture."); - throw new IllegalStateException("Already holding a texture."); - } - // |timestampNs| is always zero on some Android versions. - textureInfo = new TextureInfo(oesTextureId, transformMatrix); - newFrameLock.notifyAll(); - } - } - - // Dequeues and returns a TextureInfo if available, or null otherwise. - public TextureInfo dequeueTextureInfo(int timeoutMs) { - synchronized (newFrameLock) { - if (textureInfo == null && timeoutMs > 0) { - try { - newFrameLock.wait(timeoutMs); - } catch(InterruptedException e) { - // Restore the interrupted status by reinterrupting the thread. - Thread.currentThread().interrupt(); - } - } - TextureInfo returnedInfo = textureInfo; - textureInfo = null; - return returnedInfo; - } - } - - public void release() { - // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in - // progress is done. Therefore, the call to disconnect() must be outside any synchronized - // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks. - surfaceTextureHelper.disconnect(); - synchronized (newFrameLock) { - if (textureInfo != null) { - surfaceTextureHelper.returnTextureFrame(); - textureInfo = null; - } - } - } - } - - // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer. + // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or + // DecodedTexturebuffer depending on |useSurface| configuration. // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException // upon codec error. - private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { + private Object dequeueOutputBuffer(int dequeueTimeoutUs) + throws IllegalStateException, MediaCodec.CodecException { checkOnMediaCodecThread(); - if (decodeStartTimeMs.isEmpty()) { - return null; - } + // Drain the decoder until receiving a decoded buffer or hitting // MediaCodec.INFO_TRY_AGAIN_LATER. final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); while (true) { - final int result = mediaCodec.dequeueOutputBuffer( - info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs)); + final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); switch (result) { + case MediaCodec.INFO_TRY_AGAIN_LATER: + return null; case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: outputBuffers = mediaCodec.getOutputBuffers(); Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); - if (hasDecodedFirstFrame) { - throw new RuntimeException("Unexpected output buffer change event."); - } break; case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: MediaFormat format = mediaCodec.getOutputFormat(); Logging.d(TAG, "Decoder format changed: " + format.toString()); - int new_width = format.getInteger(MediaFormat.KEY_WIDTH); - int new_height = format.getInteger(MediaFormat.KEY_HEIGHT); - if (hasDecodedFirstFrame && (new_width != width || new_height != height)) { - throw new RuntimeException("Unexpected size change. Configured " + width + "*" + - height + ". New " + new_width + "*" + new_height); - } width = format.getInteger(MediaFormat.KEY_WIDTH); height = format.getInteger(MediaFormat.KEY_HEIGHT); - if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); @@ -540,75 +442,18 @@ public class MediaCodecVideoDecoder { stride = Math.max(width, stride); sliceHeight = Math.max(height, sliceHeight); break; - case MediaCodec.INFO_TRY_AGAIN_LATER: - return null; default: - hasDecodedFirstFrame = true; - return new DecodedOutputBuffer(result, info.offset, info.size, info.presentationTimeUs, - SystemClock.elapsedRealtime() - decodeStartTimeMs.remove(), - SystemClock.elapsedRealtime()); - } - } - } - - // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer. - // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an - // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException - // upon codec error. - private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) { - checkOnMediaCodecThread(); - if (!useSurface) { - throw new IllegalStateException("dequeueTexture() called for byte buffer decoding."); - } - - DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs); - if (outputBuffer != null) { - if (dequeuedSurfaceOutputBuffers.size() >= Math.min( - MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)) { - ++droppedFrames; - Logging.w(TAG, "Too many output buffers. Dropping frame. Total number of dropped frames: " - + droppedFrames); - // Drop the newest frame. Don't drop the oldest since if |isWaitingForTexture| - // releaseOutputBuffer has already been called. Dropping the newest frame will lead to a - // shift of timestamps by one frame in MediaCodecVideoDecoder::DeliverPendingOutputs. - mediaCodec.releaseOutputBuffer(outputBuffer.index, false /* render */); - return new DecodedTextureBuffer(0, null, outputBuffer.presentationTimestampUs, - outputBuffer.decodeTimeMs, - SystemClock.elapsedRealtime() - outputBuffer.endDecodeTimeMs); - } - dequeuedSurfaceOutputBuffers.add(outputBuffer); - } - - if (dequeuedSurfaceOutputBuffers.isEmpty()) { - return null; - } - - if (!isWaitingForTexture) { - // Get the first frame in the queue and render to the decoder output surface. - mediaCodec.releaseOutputBuffer(dequeuedSurfaceOutputBuffers.peek().index, true /* render */); - isWaitingForTexture = true; - } - - // We are waiting for a frame to be rendered to the decoder surface. - // Check if it is ready now by waiting max |dequeueTimeoutMs|. There can only be one frame - // rendered at a time. - TextureListener.TextureInfo info = textureListener.dequeueTextureInfo(dequeueTimeoutMs); - if (info != null) { - isWaitingForTexture = false; - final DecodedOutputBuffer renderedBuffer = - dequeuedSurfaceOutputBuffers.remove(); - if (!dequeuedSurfaceOutputBuffers.isEmpty()) { - // Get the next frame in the queue and render to the decoder output surface. - mediaCodec.releaseOutputBuffer( - dequeuedSurfaceOutputBuffers.peek().index, true /* render */); - isWaitingForTexture = true; + // Output buffer decoded. + if (useSurface) { + mediaCodec.releaseOutputBuffer(result, true /* render */); + // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture + // frame. + return new DecodedTextureBuffer(textureID, info.presentationTimeUs); + } else { + return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs); + } } - - return new DecodedTextureBuffer(info.textureID, info.transformMatrix, - renderedBuffer.presentationTimestampUs, renderedBuffer.decodeTimeMs, - SystemClock.elapsedRealtime() - renderedBuffer.endDecodeTimeMs); } - return null; } // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for @@ -616,11 +461,11 @@ public class MediaCodecVideoDecoder { // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws // MediaCodec.CodecException upon codec error. - private void returnDecodedOutputBuffer(int index) + private void returnDecodedByteBuffer(int index) throws IllegalStateException, MediaCodec.CodecException { checkOnMediaCodecThread(); if (useSurface) { - throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding."); + throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding."); } mediaCodec.releaseOutputBuffer(index, false /* render */); } diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java index 2e307fc54b..3c255dd123 100644 --- a/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java +++ b/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java @@ -46,11 +46,7 @@ public class VideoRenderer { public final int[] yuvStrides; public ByteBuffer[] yuvPlanes; public final boolean yuvFrame; - // Matrix that transforms standard coordinates to their proper sampling locations in - // the texture. This transform compensates for any properties of the video source that - // cause it to appear different from a normalized texture. This matrix does not take - // |rotationDegree| into account. - public final float[] samplingMatrix; + public Object textureObject; public int textureId; // Frame pointer in C++. private long nativeFramePointer; @@ -74,27 +70,19 @@ public class VideoRenderer { if (rotationDegree % 90 != 0) { throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree); } - // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the - // top-left corner of the image, but in glTexImage2D() the first element corresponds to the - // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling - // matrix. - samplingMatrix = new float[] { - 1, 0, 0, 0, - 0, -1, 0, 0, - 0, 0, 1, 0, - 0, 1, 0, 1}; } /** * Construct a texture frame of the given dimensions with data in SurfaceTexture */ - I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix, - long nativeFramePointer) { + I420Frame( + int width, int height, int rotationDegree, + Object textureObject, int textureId, long nativeFramePointer) { this.width = width; this.height = height; this.yuvStrides = null; this.yuvPlanes = null; - this.samplingMatrix = samplingMatrix; + this.textureObject = textureObject; this.textureId = textureId; this.yuvFrame = false; this.rotationDegree = rotationDegree; @@ -137,6 +125,7 @@ public class VideoRenderer { */ public static void renderFrameDone(I420Frame frame) { frame.yuvPlanes = null; + frame.textureObject = null; frame.textureId = 0; if (frame.nativeFramePointer != 0) { releaseNativeFrame(frame.nativeFramePointer); -- cgit v1.2.3