aboutsummaryrefslogtreecommitdiff
path: root/talk/app/webrtc/java/jni
diff options
context:
space:
mode:
Diffstat (limited to 'talk/app/webrtc/java/jni')
-rw-r--r--talk/app/webrtc/java/jni/androidmediacodeccommon.h2
-rw-r--r--talk/app/webrtc/java/jni/androidmediadecoder_jni.cc303
-rw-r--r--talk/app/webrtc/java/jni/androidmediaencoder_jni.cc473
-rw-r--r--talk/app/webrtc/java/jni/androidmediaencoder_jni.h3
-rw-r--r--talk/app/webrtc/java/jni/androidvideocapturer_jni.cc100
-rw-r--r--talk/app/webrtc/java/jni/androidvideocapturer_jni.h18
-rw-r--r--talk/app/webrtc/java/jni/classreferenceholder.cc5
-rw-r--r--talk/app/webrtc/java/jni/jni_helpers.cc25
-rw-r--r--talk/app/webrtc/java/jni/jni_onload.cc55
-rw-r--r--talk/app/webrtc/java/jni/native_handle_impl.cc163
-rw-r--r--talk/app/webrtc/java/jni/native_handle_impl.h52
-rw-r--r--talk/app/webrtc/java/jni/peerconnection_jni.cc169
-rw-r--r--talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc31
-rw-r--r--talk/app/webrtc/java/jni/surfacetexturehelper_jni.h18
14 files changed, 941 insertions, 476 deletions
diff --git a/talk/app/webrtc/java/jni/androidmediacodeccommon.h b/talk/app/webrtc/java/jni/androidmediacodeccommon.h
index 348a716496..92ea135f12 100644
--- a/talk/app/webrtc/java/jni/androidmediacodeccommon.h
+++ b/talk/app/webrtc/java/jni/androidmediacodeccommon.h
@@ -72,6 +72,8 @@ enum { kMediaCodecTimeoutMs = 1000 };
enum { kMediaCodecStatisticsIntervalMs = 3000 };
// Maximum amount of pending frames for VP8 decoder.
enum { kMaxPendingFramesVp8 = 1 };
+// Maximum amount of pending frames for VP9 decoder.
+enum { kMaxPendingFramesVp9 = 1 };
// Maximum amount of pending frames for H.264 decoder.
enum { kMaxPendingFramesH264 = 30 };
// Maximum amount of decoded frames for which per-frame logging is enabled.
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
index b664f16e2e..c3d287ce0d 100644
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
@@ -33,14 +33,15 @@
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
-#include "webrtc/common_video/interface/i420_buffer_pool.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/system_wrappers/include/logcat_trace_context.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "third_party/libyuv/include/libyuv/convert.h"
@@ -62,6 +63,7 @@ using webrtc::VideoCodec;
using webrtc::VideoCodecType;
using webrtc::kVideoCodecH264;
using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
namespace webrtc_jni {
@@ -87,9 +89,14 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
int32_t Release() override;
int32_t Reset() override;
+
+ bool PrefersLateDecoding() const override { return true; }
+
// rtc::MessageHandler implementation.
void OnMessage(rtc::Message* msg) override;
+ const char* ImplementationName() const override;
+
private:
// CHECK-fail if not running on |codec_thread_|.
void CheckOnCodecThread();
@@ -105,13 +112,17 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
// Type of video codec.
VideoCodecType codecType_;
+ // Render EGL context - owned by factory, should not be allocated/destroyed
+ // by VideoDecoder.
+ jobject render_egl_context_;
+
bool key_frame_required_;
bool inited_;
bool sw_fallback_required_;
bool use_surface_;
VideoCodec codec_;
webrtc::I420BufferPool decoded_frame_pool_;
- NativeHandleImpl native_handle_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
DecodedImageCallback* callback_;
int frames_received_; // Number of frames received by decoder.
int frames_decoded_; // Number of frames decoded by decoder.
@@ -120,10 +131,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
int current_bytes_; // Encoded bytes in the current statistics interval.
int current_decoding_time_ms_; // Overall decoding time in the current second
uint32_t max_pending_frames_; // Maximum number of pending input frames
- std::vector<int32_t> timestamps_;
- std::vector<int64_t> ntp_times_ms_;
- std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
- // decoder input.
// State that is constant for the lifetime of this object once the ctor
// returns.
@@ -134,7 +141,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jmethodID j_release_method_;
jmethodID j_dequeue_input_buffer_method_;
jmethodID j_queue_input_buffer_method_;
- jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_dequeue_byte_buffer_method_;
+ jmethodID j_dequeue_texture_buffer_method_;
jmethodID j_return_decoded_byte_buffer_method_;
// MediaCodecVideoDecoder fields.
jfieldID j_input_buffers_field_;
@@ -144,24 +152,23 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jfieldID j_height_field_;
jfieldID j_stride_field_;
jfieldID j_slice_height_field_;
- jfieldID j_surface_texture_field_;
// MediaCodecVideoDecoder.DecodedTextureBuffer fields.
- jfieldID j_textureID_field_;
- jfieldID j_texture_presentation_timestamp_us_field_;
- // MediaCodecVideoDecoder.DecodedByteBuffer fields.
+ jfieldID j_texture_id_field_;
+ jfieldID j_transform_matrix_field_;
+ jfieldID j_texture_timestamp_ms_field_;
+ jfieldID j_texture_ntp_timestamp_ms_field_;
+ jfieldID j_texture_decode_time_ms_field_;
+ jfieldID j_texture_frame_delay_ms_field_;
+ // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
jfieldID j_info_index_field_;
jfieldID j_info_offset_field_;
jfieldID j_info_size_field_;
- jfieldID j_info_presentation_timestamp_us_field_;
+ jfieldID j_info_timestamp_ms_field_;
+ jfieldID j_info_ntp_timestamp_ms_field_;
+ jfieldID j_byte_buffer_decode_time_ms_field_;
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
- jobject surface_texture_;
- jobject previous_surface_texture_;
-
- // Render EGL context - owned by factory, should not be allocated/destroyed
- // by VideoDecoder.
- jobject render_egl_context_;
};
MediaCodecVideoDecoder::MediaCodecVideoDecoder(
@@ -171,8 +178,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
key_frame_required_(true),
inited_(false),
sw_fallback_required_(false),
- surface_texture_(NULL),
- previous_surface_texture_(NULL),
codec_thread_(new Thread()),
j_media_codec_video_decoder_class_(
jni,
@@ -191,19 +196,22 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
j_init_decode_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "initDecode",
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
- "IILjavax/microedition/khronos/egl/EGLContext;)Z");
+ "IILorg/webrtc/SurfaceTextureHelper;)Z");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
j_queue_input_buffer_method_ = GetMethodID(
- jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
- j_dequeue_output_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z");
+ j_dequeue_byte_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
- "(I)Ljava/lang/Object;");
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
+ j_dequeue_texture_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
j_return_decoded_byte_buffer_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_,
- "returnDecodedByteBuffer", "(I)V");
+ "returnDecodedOutputBuffer", "(I)V");
j_input_buffers_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_,
@@ -221,28 +229,36 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
jni, *j_media_codec_video_decoder_class_, "stride", "I");
j_slice_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
- j_surface_texture_field_ = GetFieldID(
- jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
- "Landroid/graphics/SurfaceTexture;");
- jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
+ jclass j_decoded_texture_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
- j_textureID_field_ = GetFieldID(
- jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
- j_texture_presentation_timestamp_us_field_ =
- GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
- "presentationTimestampUs", "J");
-
- jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
- "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
+ j_texture_id_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "textureID", "I");
+ j_transform_matrix_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
+ j_texture_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
+ j_texture_ntp_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
+ j_texture_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
+ j_texture_frame_delay_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
+
+ jclass j_decoded_output_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
j_info_index_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "index", "I");
+ jni, j_decoded_output_buffer_class, "index", "I");
j_info_offset_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
+ jni, j_decoded_output_buffer_class, "offset", "I");
j_info_size_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "size", "I");
- j_info_presentation_timestamp_us_field_ = GetFieldID(
- jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
+ jni, j_decoded_output_buffer_class, "size", "I");
+ j_info_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "timeStampMs", "J");
+ j_info_ntp_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
+ j_byte_buffer_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
use_surface_ = (render_egl_context_ != NULL);
@@ -254,14 +270,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
// Call Release() to ensure no more callbacks to us after we are deleted.
Release();
- // Delete global references.
- JNIEnv* jni = AttachCurrentThreadIfNeeded();
- if (previous_surface_texture_ != NULL) {
- jni->DeleteGlobalRef(previous_surface_texture_);
- }
- if (surface_texture_ != NULL) {
- jni->DeleteGlobalRef(surface_texture_);
- }
}
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
@@ -312,6 +320,21 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
frames_received_ = 0;
frames_decoded_ = 0;
+ jobject java_surface_texture_helper_ = nullptr;
+ if (use_surface_) {
+ java_surface_texture_helper_ = jni->CallStaticObjectMethod(
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ GetStaticMethodID(jni,
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "create",
+ "(Lorg/webrtc/EglBase$Context;)"
+ "Lorg/webrtc/SurfaceTextureHelper;"),
+ render_egl_context_);
+ RTC_CHECK(java_surface_texture_helper_ != nullptr);
+ surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, java_surface_texture_helper_);
+ }
+
jobject j_video_codec_enum = JavaEnumFromIndex(
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
bool success = jni->CallBooleanMethod(
@@ -320,7 +343,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
j_video_codec_enum,
codec_.width,
codec_.height,
- use_surface_ ? render_egl_context_ : nullptr);
+ java_surface_texture_helper_);
if (CheckException(jni) || !success) {
ALOGE << "Codec initialization error - fallback to SW codec.";
sw_fallback_required_ = true;
@@ -332,6 +355,9 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
case kVideoCodecVP8:
max_pending_frames_ = kMaxPendingFramesVp8;
break;
+ case kVideoCodecVP9:
+ max_pending_frames_ = kMaxPendingFramesVp9;
+ break;
case kVideoCodecH264:
max_pending_frames_ = kMaxPendingFramesH264;
break;
@@ -342,9 +368,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
current_frames_ = 0;
current_bytes_ = 0;
current_decoding_time_ms_ = 0;
- timestamps_.clear();
- ntp_times_ms_.clear();
- frame_rtc_times_ms_.clear();
jobjectArray input_buffers = (jobjectArray)GetObjectField(
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
@@ -361,15 +384,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
}
}
- if (use_surface_) {
- jobject surface_texture = GetObjectField(
- jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
- if (previous_surface_texture_ != NULL) {
- jni->DeleteGlobalRef(previous_surface_texture_);
- }
- previous_surface_texture_ = surface_texture_;
- surface_texture_ = jni->NewGlobalRef(surface_texture);
- }
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
return WEBRTC_VIDEO_CODEC_OK;
@@ -395,6 +409,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
}
input_buffers_.clear();
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+ surface_texture_helper_ = nullptr;
inited_ = false;
rtc::MessageQueueManager::Clear(this);
if (CheckException(jni)) {
@@ -501,19 +516,21 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Try to drain the decoder and wait until output is not too
// much behind the input.
- if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ const int64 drain_start = GetCurrentTimeMs();
+ while ((frames_received_ > frames_decoded_ + max_pending_frames_) &&
+ (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) {
ALOGV("Received: %d. Decoded: %d. Wait for output...",
frames_received_, frames_decoded_);
- if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
+ if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
ALOGE << "DeliverPendingOutputs error. Frames received: " <<
frames_received_ << ". Frames decoded: " << frames_decoded_;
return ProcessHWErrorOnCodecThread();
}
- if (frames_received_ > frames_decoded_ + max_pending_frames_) {
- ALOGE << "Output buffer dequeue timeout. Frames received: " <<
- frames_received_ << ". Frames decoded: " << frames_decoded_;
- return ProcessHWErrorOnCodecThread();
- }
+ }
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGE << "Output buffer dequeue timeout. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
}
// Get input buffer.
@@ -535,11 +552,14 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
" is bigger than buffer size " << buffer_capacity;
return ProcessHWErrorOnCodecThread();
}
- jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
+ jlong presentation_timestamp_us =
+ (frames_received_ * 1000000) / codec_.maxFramerate;
if (frames_decoded_ < kMaxDecodedLogFrames) {
ALOGD << "Decoder frame in # " << frames_received_ << ". Type: "
<< inputImage._frameType << ". Buffer # " <<
- j_input_buffer_index << ". TS: " << (int)(timestamp_us / 1000)
+ j_input_buffer_index << ". pTS: "
+ << (int)(presentation_timestamp_us / 1000)
+ << ". TS: " << inputImage._timeStamp
<< ". Size: " << inputImage._length;
}
memcpy(buffer, inputImage._buffer, inputImage._length);
@@ -547,16 +567,16 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Save input image timestamps for later output.
frames_received_++;
current_bytes_ += inputImage._length;
- timestamps_.push_back(inputImage._timeStamp);
- ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
- frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
// Feed input to decoder.
- bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
- j_queue_input_buffer_method_,
- j_input_buffer_index,
- inputImage._length,
- timestamp_us);
+ bool success = jni->CallBooleanMethod(
+ *j_media_codec_video_decoder_,
+ j_queue_input_buffer_method_,
+ j_input_buffer_index,
+ inputImage._length,
+ presentation_timestamp_us,
+ static_cast<int64_t> (inputImage._timeStamp),
+ inputImage.ntp_time_ms_);
if (CheckException(jni) || !success) {
ALOGE << "queueInputBuffer error";
return ProcessHWErrorOnCodecThread();
@@ -572,16 +592,18 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
}
bool MediaCodecVideoDecoder::DeliverPendingOutputs(
- JNIEnv* jni, int dequeue_timeout_us) {
+ JNIEnv* jni, int dequeue_timeout_ms) {
if (frames_received_ <= frames_decoded_) {
// No need to query for output buffers - decoder is drained.
return true;
}
// Get decoder output.
- jobject j_decoder_output_buffer = jni->CallObjectMethod(
- *j_media_codec_video_decoder_,
- j_dequeue_output_buffer_method_,
- dequeue_timeout_us);
+ jobject j_decoder_output_buffer =
+ jni->CallObjectMethod(*j_media_codec_video_decoder_,
+ use_surface_ ? j_dequeue_texture_buffer_method_
+ : j_dequeue_byte_buffer_method_,
+ dequeue_timeout_ms);
+
if (CheckException(jni)) {
ALOGE << "dequeueOutputBuffer() error";
return false;
@@ -601,19 +623,35 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_slice_height_field_);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
- long output_timestamps_ms = 0;
+ int64_t output_timestamps_ms = 0;
+ int64_t output_ntp_timestamps_ms = 0;
+ int decode_time_ms = 0;
+ int64_t frame_delayed_ms = 0;
if (use_surface_) {
// Extract data from Java DecodedTextureBuffer.
const int texture_id =
- GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
- const int64_t timestamp_us =
- GetLongField(jni, j_decoder_output_buffer,
- j_texture_presentation_timestamp_us_field_);
- output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
- // Create webrtc::VideoFrameBuffer with native texture handle.
- native_handle_.SetTextureObject(surface_texture_, texture_id);
- frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
- &native_handle_, width, height);
+ GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
+ if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
+ const jfloatArray j_transform_matrix =
+ reinterpret_cast<jfloatArray>(GetObjectField(
+ jni, j_decoder_output_buffer, j_transform_matrix_field_));
+ const int64_t timestamp_us =
+ GetLongField(jni, j_decoder_output_buffer,
+ j_texture_timestamp_ms_field_);
+ output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_timestamp_ms_field_);
+ output_ntp_timestamps_ms =
+ GetLongField(jni, j_decoder_output_buffer,
+ j_texture_ntp_timestamp_ms_field_);
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_decode_time_ms_field_);
+ frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_frame_delay_ms_field_);
+
+ // Create webrtc::VideoFrameBuffer with native texture handle.
+ frame_buffer = surface_texture_helper_->CreateTextureFrame(
+ width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
+ }
} else {
// Extract data from Java ByteBuffer and create output yuv420 frame -
// for non surface decoding only.
@@ -623,9 +661,14 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_);
const int output_buffer_size =
GetIntField(jni, j_decoder_output_buffer, j_info_size_field_);
- const int64_t timestamp_us = GetLongField(
- jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
- output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
+ output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_info_timestamp_ms_field_);
+ output_ntp_timestamps_ms =
+ GetLongField(jni, j_decoder_output_buffer,
+ j_info_ntp_timestamp_ms_field_);
+
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_byte_buffer_decode_time_ms_field_);
if (output_buffer_size < width * height * 3 / 2) {
ALOGE << "Insufficient output buffer size: " << output_buffer_size;
@@ -683,41 +726,31 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_return_decoded_byte_buffer_method_,
output_buffer_index);
if (CheckException(jni)) {
- ALOGE << "returnDecodedByteBuffer error";
+ ALOGE << "returnDecodedOutputBuffer error";
return false;
}
}
VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
+ decoded_frame.set_timestamp(output_timestamps_ms);
+ decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
- // Get frame timestamps from a queue.
- if (timestamps_.size() > 0) {
- decoded_frame.set_timestamp(timestamps_.front());
- timestamps_.erase(timestamps_.begin());
- }
- if (ntp_times_ms_.size() > 0) {
- decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
- ntp_times_ms_.erase(ntp_times_ms_.begin());
- }
- int64_t frame_decoding_time_ms = 0;
- if (frame_rtc_times_ms_.size() > 0) {
- frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
- frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
- }
if (frames_decoded_ < kMaxDecodedLogFrames) {
ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width <<
" x " << height << ". " << stride << " x " << slice_height <<
- ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms <<
- ". DecTime: " << (int)frame_decoding_time_ms;
+ ". Color: " << color_format << ". TS:" << decoded_frame.timestamp() <<
+ ". DecTime: " << (int)decode_time_ms <<
+ ". DelayTime: " << (int)frame_delayed_ms;
}
// Calculate and print decoding statistics - every 3 seconds.
frames_decoded_++;
current_frames_++;
- current_decoding_time_ms_ += frame_decoding_time_ms;
+ current_decoding_time_ms_ += decode_time_ms;
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) {
- ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
+ ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: "
+ << frames_received_ << ". Bitrate: " <<
(current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
<< ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
@@ -728,12 +761,15 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
current_decoding_time_ms_ = 0;
}
- // Callback - output decoded frame.
- const int32_t callback_status = callback_->Decoded(decoded_frame);
- if (callback_status > 0) {
- ALOGE << "callback error";
+ // |.IsZeroSize())| returns true when a frame has been dropped.
+ if (!decoded_frame.IsZeroSize()) {
+ // Callback - output decoded frame.
+ const int32_t callback_status =
+ callback_->Decoded(decoded_frame, decode_time_ms);
+ if (callback_status > 0) {
+ ALOGE << "callback error";
+ }
}
-
return true;
}
@@ -790,6 +826,17 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
supported_codec_types_.push_back(kVideoCodecVP8);
}
+ bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_vp9_hw_supported = false;
+ }
+ if (is_vp9_hw_supported) {
+ ALOGD << "VP9 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecVP9);
+ }
+
bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
j_decoder_class,
GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
@@ -825,7 +872,7 @@ void MediaCodecVideoDecoderFactory::SetEGLContext(
render_egl_context_ = NULL;
} else {
jclass j_egl_context_class =
- FindClass(jni, "javax/microedition/khronos/egl/EGLContext");
+ FindClass(jni, "org/webrtc/EglBase$Context");
if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
ALOGE << "Wrong EGL Context.";
jni->DeleteGlobalRef(render_egl_context_);
@@ -841,7 +888,7 @@ void MediaCodecVideoDecoderFactory::SetEGLContext(
webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
VideoCodecType type) {
if (supported_codec_types_.empty()) {
- ALOGE << "No HW video decoder for type " << (int)type;
+ ALOGW << "No HW video decoder for type " << (int)type;
return NULL;
}
for (VideoCodecType codec_type : supported_codec_types_) {
@@ -851,7 +898,7 @@ webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
AttachCurrentThreadIfNeeded(), type, render_egl_context_);
}
}
- ALOGE << "Can not find HW video decoder for type " << (int)type;
+ ALOGW << "Can not find HW video decoder for type " << (int)type;
return NULL;
}
@@ -861,5 +908,9 @@ void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
delete decoder;
}
+const char* MediaCodecVideoDecoder::ImplementationName() const {
+ return "MediaCodec";
+}
+
} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
index ac349e7faf..64831c3174 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -29,14 +29,16 @@
#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-#include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
-#include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
#include "webrtc/system_wrappers/include/field_trial.h"
#include "webrtc/system_wrappers/include/logcat_trace_context.h"
#include "third_party/libyuv/include/libyuv/convert.h"
@@ -56,6 +58,7 @@ using webrtc::VideoCodec;
using webrtc::VideoCodecType;
using webrtc::kVideoCodecH264;
using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
namespace webrtc_jni {
@@ -79,7 +82,9 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
public rtc::MessageHandler {
public:
virtual ~MediaCodecVideoEncoder();
- explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType);
+ MediaCodecVideoEncoder(JNIEnv* jni,
+ VideoCodecType codecType,
+ jobject egl_context);
// webrtc::VideoEncoder implementation. Everything trampolines to
// |codec_thread_| for execution.
@@ -103,13 +108,18 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int GetTargetFramerate() override;
+ bool SupportsNativeHandle() const override { return true; }
+ const char* ImplementationName() const override;
+
private:
// CHECK-fail if not running on |codec_thread_|.
void CheckOnCodecThread();
- // Release() and InitEncode() in an attempt to restore the codec to an
+ private:
+ // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
+ // InitEncodeOnCodecThread() in an attempt to restore the codec to an
// operable state. Necessary after all manner of OMX-layer errors.
- void ResetCodec();
+ bool ResetCodecOnCodecThread();
// Implementation of webrtc::VideoEncoder methods above, all running on the
// codec thread exclusively.
@@ -117,10 +127,20 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// If width==0 then this is assumed to be a re-initialization and the
// previously-current values are reused instead of the passed parameters
// (makes it easier to reason about thread-safety).
- int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
+ int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
+ bool use_surface);
+ // Reconfigure to match |frame| in width, height. Also reconfigures the
+ // encoder if |frame| is a texture/byte buffer and the encoder is initialized
+ // for byte buffer/texture. Returns false if reconfiguring fails.
+ bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
int32_t EncodeOnCodecThread(
const webrtc::VideoFrame& input_image,
const std::vector<webrtc::FrameType>* frame_types);
+ bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
+ bool EncodeTextureOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame);
+
int32_t RegisterEncodeCompleteCallbackOnCodecThread(
webrtc::EncodedImageCallback* callback);
int32_t ReleaseOnCodecThread();
@@ -150,11 +170,14 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// State that is constant for the lifetime of this object once the ctor
// returns.
scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
+ rtc::ThreadChecker codec_thread_checker_;
ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
jmethodID j_init_encode_method_;
+ jmethodID j_get_input_buffers_method_;
jmethodID j_dequeue_input_buffer_method_;
- jmethodID j_encode_method_;
+ jmethodID j_encode_buffer_method_;
+ jmethodID j_encode_texture_method_;
jmethodID j_release_method_;
jmethodID j_set_rates_method_;
jmethodID j_dequeue_output_buffer_method_;
@@ -170,6 +193,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int width_; // Frame width in pixels.
int height_; // Frame height in pixels.
bool inited_;
+ bool use_surface_;
uint16_t picture_id_;
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
@@ -205,6 +229,16 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// H264 bitstream parser, used to extract QP from encoded bitstreams.
webrtc::H264BitstreamParser h264_bitstream_parser_;
+
+ // VP9 variables to populate codec specific structure.
+ webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
+ // non-flexible VP9 mode.
+ uint8_t tl0_pic_idx_;
+ size_t gof_idx_;
+
+ // EGL context - owned by factory, should not be allocated/destroyed
+ // by MediaCodecVideoEncoder.
+ jobject egl_context_;
};
MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
@@ -213,11 +247,9 @@ MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
}
MediaCodecVideoEncoder::MediaCodecVideoEncoder(
- JNIEnv* jni, VideoCodecType codecType) :
+ JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
codecType_(codecType),
callback_(NULL),
- inited_(false),
- picture_id_(0),
codec_thread_(new Thread()),
j_media_codec_video_encoder_class_(
jni,
@@ -228,7 +260,11 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
GetMethodID(jni,
*j_media_codec_video_encoder_class_,
"<init>",
- "()V"))) {
+ "()V"))),
+ inited_(false),
+ use_surface_(false),
+ picture_id_(0),
+ egl_context_(egl_context) {
ScopedLocalRefFrame local_ref_frame(jni);
// It would be nice to avoid spinning up a new thread per MediaCodec, and
// instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
@@ -239,19 +275,27 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
// thread.
codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
-
+ codec_thread_checker_.DetachFromThread();
jclass j_output_buffer_info_class =
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
j_init_encode_method_ = GetMethodID(
jni,
*j_media_codec_video_encoder_class_,
"initEncode",
- "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)"
- "[Ljava/nio/ByteBuffer;");
+ "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
+ "IIIILorg/webrtc/EglBase14$Context;)Z");
+ j_get_input_buffers_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "getInputBuffers",
+ "()[Ljava/nio/ByteBuffer;");
j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
- j_encode_method_ = GetMethodID(
- jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
+ j_encode_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
+ j_encode_texture_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encodeTexture",
+ "(ZI[FJ)Z");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
j_set_rates_method_ = GetMethodID(
@@ -275,6 +319,7 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
j_info_presentation_timestamp_us_field_ = GetFieldID(
jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
+ srand(time(NULL));
AllowBlockingCalls();
}
@@ -295,8 +340,8 @@ int32_t MediaCodecVideoEncoder::InitEncode(
<< codecType_;
ALOGD << "InitEncode request";
- scale_ = webrtc::field_trial::FindFullName(
- "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled";
+ scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName(
+ "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled");
ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
if (scale_) {
if (codecType_ == kVideoCodecVP8) {
@@ -331,7 +376,8 @@ int32_t MediaCodecVideoEncoder::InitEncode(
codec_settings->width,
codec_settings->height,
codec_settings->startBitrate,
- codec_settings->maxFramerate));
+ codec_settings->maxFramerate,
+ false /* use_surface */));
}
int32_t MediaCodecVideoEncoder::Encode(
@@ -374,6 +420,7 @@ int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
}
void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
@@ -381,7 +428,6 @@ void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
// functor), so expect no ID/data.
RTC_CHECK(!msg->message_id) << "Unexpected message!";
RTC_CHECK(!msg->pdata) << "Unexpected message!";
- CheckOnCodecThread();
if (!inited_) {
return;
}
@@ -393,26 +439,24 @@ void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
}
-void MediaCodecVideoEncoder::CheckOnCodecThread() {
- RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
- << "Running on wrong thread!";
-}
-
-void MediaCodecVideoEncoder::ResetCodec() {
- ALOGE << "ResetCodec";
- if (Release() != WEBRTC_VIDEO_CODEC_OK ||
- codec_thread_->Invoke<int32_t>(Bind(
- &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
- width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
+bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ ALOGE << "ResetOnCodecThread";
+ if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
+ InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
+ WEBRTC_VIDEO_CODEC_OK) {
// TODO(fischman): wouldn't it be nice if there was a way to gracefully
// degrade to a SW encoder at this point? There isn't one AFAICT :(
// https://code.google.com/p/webrtc/issues/detail?id=2920
+ return false;
}
+ return true;
}
int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
- int width, int height, int kbps, int fps) {
- CheckOnCodecThread();
+ int width, int height, int kbps, int fps, bool use_surface) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
@@ -448,52 +492,63 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
render_times_ms_.clear();
frame_rtc_times_ms_.clear();
drop_next_input_frame_ = false;
+ use_surface_ = use_surface;
picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+ gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
+ tl0_pic_idx_ = static_cast<uint8_t>(rand());
+ gof_idx_ = 0;
+
// We enforce no extra stride/padding in the format creation step.
jobject j_video_codec_enum = JavaEnumFromIndex(
jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
- jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
- jni->CallObjectMethod(*j_media_codec_video_encoder_,
- j_init_encode_method_,
- j_video_codec_enum,
- width_,
- height_,
- kbps,
- fps));
- CHECK_EXCEPTION(jni);
- if (IsNull(jni, input_buffers)) {
+ const bool encode_status = jni->CallBooleanMethod(
+ *j_media_codec_video_encoder_, j_init_encode_method_,
+ j_video_codec_enum, width, height, kbps, fps,
+ (use_surface ? egl_context_ : nullptr));
+ if (!encode_status) {
+ ALOGE << "Failed to configure encoder.";
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ CHECK_EXCEPTION(jni);
- inited_ = true;
- switch (GetIntField(jni, *j_media_codec_video_encoder_,
- j_color_format_field_)) {
- case COLOR_FormatYUV420Planar:
- encoder_fourcc_ = libyuv::FOURCC_YU12;
- break;
- case COLOR_FormatYUV420SemiPlanar:
- case COLOR_QCOM_FormatYUV420SemiPlanar:
- case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
- encoder_fourcc_ = libyuv::FOURCC_NV12;
- break;
- default:
- LOG(LS_ERROR) << "Wrong color format.";
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
- size_t num_input_buffers = jni->GetArrayLength(input_buffers);
- RTC_CHECK(input_buffers_.empty())
- << "Unexpected double InitEncode without Release";
- input_buffers_.resize(num_input_buffers);
- for (size_t i = 0; i < num_input_buffers; ++i) {
- input_buffers_[i] =
- jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
- int64_t yuv_buffer_capacity =
- jni->GetDirectBufferCapacity(input_buffers_[i]);
+ if (!use_surface) {
+ jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+ jni->CallObjectMethod(*j_media_codec_video_encoder_,
+ j_get_input_buffers_method_));
CHECK_EXCEPTION(jni);
- RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+ if (IsNull(jni, input_buffers)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ switch (GetIntField(jni, *j_media_codec_video_encoder_,
+ j_color_format_field_)) {
+ case COLOR_FormatYUV420Planar:
+ encoder_fourcc_ = libyuv::FOURCC_YU12;
+ break;
+ case COLOR_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ encoder_fourcc_ = libyuv::FOURCC_NV12;
+ break;
+ default:
+ LOG(LS_ERROR) << "Wrong color format.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ RTC_CHECK(input_buffers_.empty())
+ << "Unexpected double InitEncode without Release";
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ int64_t yuv_buffer_capacity =
+ jni->GetDirectBufferCapacity(input_buffers_[i]);
+ CHECK_EXCEPTION(jni);
+ RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+ }
}
- CHECK_EXCEPTION(jni);
+ inited_ = true;
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -501,40 +556,53 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
const webrtc::VideoFrame& frame,
const std::vector<webrtc::FrameType>* frame_types) {
- CheckOnCodecThread();
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
+
frames_received_++;
if (!DeliverPendingOutputs(jni)) {
- ResetCodec();
- // Continue as if everything's fine.
+ if (!ResetCodecOnCodecThread())
+ return WEBRTC_VIDEO_CODEC_ERROR;
}
if (drop_next_input_frame_) {
- ALOGV("Encoder drop frame - failed callback.");
+ ALOGW << "Encoder drop frame - failed callback.";
drop_next_input_frame_ = false;
return WEBRTC_VIDEO_CODEC_OK;
}
RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
- // Check framerate before spatial resolution change.
- if (scale_)
- quality_scaler_.OnEncodeFrame(frame);
- const VideoFrame& input_frame =
- scale_ ? quality_scaler_.GetScaledFrame(frame) : frame;
+ VideoFrame input_frame = frame;
+ if (scale_) {
+ // Check framerate before spatial resolution change.
+ quality_scaler_.OnEncodeFrame(frame);
+ const webrtc::QualityScaler::Resolution scaled_resolution =
+ quality_scaler_.GetScaledResolution();
+ if (scaled_resolution.width != frame.width() ||
+ scaled_resolution.height != frame.height()) {
+ if (frame.native_handle() != nullptr) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
+ static_cast<AndroidTextureBuffer*>(
+ frame.video_frame_buffer().get())->ScaleAndRotate(
+ scaled_resolution.width,
+ scaled_resolution.height,
+ webrtc::kVideoRotation_0));
+ input_frame.set_video_frame_buffer(scaled_buffer);
+ } else {
+ input_frame = quality_scaler_.GetScaledFrame(frame);
+ }
+ }
+ }
- if (input_frame.width() != width_ || input_frame.height() != height_) {
- ALOGD << "Frame resolution change from " << width_ << " x " << height_ <<
- " to " << input_frame.width() << " x " << input_frame.height();
- width_ = input_frame.width();
- height_ = input_frame.height();
- ResetCodec();
- return WEBRTC_VIDEO_CODEC_OK;
+ if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
+ ALOGE << "Failed to reconfigure encoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
}
// Check if we accumulated too many frames in encoder input buffers
@@ -552,65 +620,138 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
}
- int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
- j_dequeue_input_buffer_method_);
- CHECK_EXCEPTION(jni);
- if (j_input_buffer_index == -1) {
- // Video codec falls behind - no input buffer available.
- ALOGV("Encoder drop frame - no input buffers available");
- frames_dropped_++;
- // Report dropped frame to quality_scaler_.
- OnDroppedFrame();
- return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
- }
- if (j_input_buffer_index == -2) {
- ResetCodec();
+ const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
+ bool encode_status = true;
+ if (!input_frame.native_handle()) {
+ int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+ j_dequeue_input_buffer_method_);
+ CHECK_EXCEPTION(jni);
+ if (j_input_buffer_index == -1) {
+ // Video codec falls behind - no input buffer available.
+ ALOGW << "Encoder drop frame - no input buffers available";
+ frames_dropped_++;
+ // Report dropped frame to quality_scaler_.
+ OnDroppedFrame();
+ return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
+ }
+ if (j_input_buffer_index == -2) {
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
+ j_input_buffer_index);
+ } else {
+ encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
+ }
+
+ if (!encode_status) {
+ ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
+ ResetCodecOnCodecThread();
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ last_input_timestamp_ms_ =
+ current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
+ frames_in_queue_++;
+
+ // Save input image timestamps for later output
+ timestamps_.push_back(input_frame.timestamp());
+ render_times_ms_.push_back(input_frame.render_time_ms());
+ frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+
+ if (!DeliverPendingOutputs(jni)) {
+ ALOGE << "Failed deliver pending outputs.";
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
+ const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+ const bool is_texture_frame = frame.native_handle() != nullptr;
+ const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
+ const bool reconfigure_due_to_size =
+ frame.width() != width_ || frame.height() != height_;
+
+ if (reconfigure_due_to_format) {
+ ALOGD << "Reconfigure encoder due to format change. "
+ << (use_surface_ ?
+ "Reconfiguring to encode from byte buffer." :
+ "Reconfiguring to encode from texture.");
+ }
+ if (reconfigure_due_to_size) {
+ ALOGD << "Reconfigure encoder due to frame resolution change from "
+ << width_ << " x " << height_ << " to " << frame.width() << " x "
+ << frame.height();
+ width_ = frame.width();
+ height_ = frame.height();
+ }
+
+ if (!reconfigure_due_to_format && !reconfigure_due_to_size)
+ return true;
+
+ ReleaseOnCodecThread();
+
+ return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
+ WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!use_surface_);
+
ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
- jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+ jobject j_input_buffer = input_buffers_[input_buffer_index];
uint8_t* yuv_buffer =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
CHECK_EXCEPTION(jni);
RTC_CHECK(yuv_buffer) << "Indirect buffer??";
RTC_CHECK(!libyuv::ConvertFromI420(
- input_frame.buffer(webrtc::kYPlane), input_frame.stride(webrtc::kYPlane),
- input_frame.buffer(webrtc::kUPlane), input_frame.stride(webrtc::kUPlane),
- input_frame.buffer(webrtc::kVPlane), input_frame.stride(webrtc::kVPlane),
+ frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
+ frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
+ frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
yuv_buffer, width_, width_, height_, encoder_fourcc_))
<< "ConvertFromI420 failed";
- last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
- frames_in_queue_++;
- // Save input image timestamps for later output
- timestamps_.push_back(input_frame.timestamp());
- render_times_ms_.push_back(input_frame.render_time_ms());
- frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
-
- bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
- j_encode_method_,
+ j_encode_buffer_method_,
key_frame,
- j_input_buffer_index,
+ input_buffer_index,
yuv_size_,
current_timestamp_us_);
CHECK_EXCEPTION(jni);
- current_timestamp_us_ += 1000000 / last_set_fps_;
+ return encode_status;
+}
- if (!encode_status || !DeliverPendingOutputs(jni)) {
- ResetCodec();
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
+bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(use_surface_);
+ NativeHandleImpl* handle =
+ static_cast<NativeHandleImpl*>(frame.native_handle());
+ jfloatArray sampling_matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
- return WEBRTC_VIDEO_CODEC_OK;
+ bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_encode_texture_method_,
+ key_frame,
+ handle->oes_texture_id,
+ sampling_matrix,
+ current_timestamp_us_);
+ CHECK_EXCEPTION(jni);
+ return encode_status;
}
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
webrtc::EncodedImageCallback* callback) {
- CheckOnCodecThread();
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
callback_ = callback;
@@ -618,10 +759,10 @@ int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
}
int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
if (!inited_) {
return WEBRTC_VIDEO_CODEC_OK;
}
- CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
frames_received_ << ". Encoded: " << frames_encoded_ <<
@@ -634,13 +775,14 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
CHECK_EXCEPTION(jni);
rtc::MessageQueueManager::Clear(this);
inited_ = false;
+ use_surface_ = false;
ALOGD << "EncoderReleaseOnCodecThread done.";
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
uint32_t frame_rate) {
- CheckOnCodecThread();
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
if (last_set_bitrate_kbps_ == new_bit_rate &&
last_set_fps_ == frame_rate) {
return WEBRTC_VIDEO_CODEC_OK;
@@ -659,7 +801,7 @@ int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
last_set_fps_);
CHECK_EXCEPTION(jni);
if (!ret) {
- ResetCodec();
+ ResetCodecOnCodecThread();
return WEBRTC_VIDEO_CODEC_ERROR;
}
return WEBRTC_VIDEO_CODEC_OK;
@@ -691,6 +833,7 @@ jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
}
bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
while (true) {
jobject j_output_buffer_info = jni->CallObjectMethod(
*j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
@@ -702,7 +845,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
int output_buffer_index =
GetOutputBufferInfoIndex(jni, j_output_buffer_info);
if (output_buffer_index == -1) {
- ResetCodec();
+ ResetCodecOnCodecThread();
return false;
}
@@ -786,19 +929,42 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
info.codecSpecific.VP8.layerSync = false;
info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
- picture_id_ = (picture_id_ + 1) & 0x7FFF;
+ } else if (codecType_ == kVideoCodecVP9) {
+ if (key_frame) {
+ gof_idx_ = 0;
+ }
+ info.codecSpecific.VP9.picture_id = picture_id_;
+ info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
+ info.codecSpecific.VP9.flexible_mode = false;
+ info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
+ info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
+ info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx;
+ info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx;
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.gof_idx =
+ static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+ info.codecSpecific.VP9.num_spatial_layers = 1;
+ info.codecSpecific.VP9.spatial_layer_resolution_present = false;
+ if (info.codecSpecific.VP9.ss_data_available) {
+ info.codecSpecific.VP9.spatial_layer_resolution_present = true;
+ info.codecSpecific.VP9.width[0] = width_;
+ info.codecSpecific.VP9.height[0] = height_;
+ info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
+ }
}
+ picture_id_ = (picture_id_ + 1) & 0x7FFF;
// Generate a header describing a single fragment.
webrtc::RTPFragmentationHeader header;
memset(&header, 0, sizeof(header));
- if (codecType_ == kVideoCodecVP8) {
+ if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
header.VerifyAndAllocateFragmentationHeader(1);
header.fragmentationOffset[0] = 0;
header.fragmentationLength[0] = image->_length;
header.fragmentationPlType[0] = 0;
header.fragmentationTimeDiff[0] = 0;
- if (scale_) {
+ if (codecType_ == kVideoCodecVP8 && scale_) {
int qp;
if (webrtc::vp8::GetQp(payload, payload_size, &qp))
quality_scaler_.ReportQP(qp);
@@ -829,7 +995,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
<< " " << image->_buffer[2] << " " << image->_buffer[3]
<< " " << image->_buffer[4] << " " << image->_buffer[5];
- ResetCodec();
+ ResetCodecOnCodecThread();
return false;
}
scPositions[scPositionsLength] = payload_size;
@@ -852,7 +1018,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
output_buffer_index);
CHECK_EXCEPTION(jni);
if (!success) {
- ResetCodec();
+ ResetCodecOnCodecThread();
return false;
}
@@ -907,7 +1073,12 @@ int MediaCodecVideoEncoder::GetTargetFramerate() {
return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
}
-MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
+const char* MediaCodecVideoEncoder::ImplementationName() const {
+ return "MediaCodec";
+}
+
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
+ : egl_context_(nullptr) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
@@ -923,6 +1094,16 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
}
+ bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_vp9_hw_supported) {
+ ALOGD << "VP9 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+
bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
j_encoder_class,
GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
@@ -936,9 +1117,37 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
+void MediaCodecVideoEncoderFactory::SetEGLContext(
+ JNIEnv* jni, jobject render_egl_context) {
+ ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
+ if (egl_context_) {
+ jni->DeleteGlobalRef(egl_context_);
+ egl_context_ = NULL;
+ }
+ if (!IsNull(jni, render_egl_context)) {
+ egl_context_ = jni->NewGlobalRef(render_egl_context);
+ if (CheckException(jni)) {
+ ALOGE << "error calling NewGlobalRef for EGL Context.";
+ egl_context_ = NULL;
+ } else {
+ jclass j_egl_context_class =
+ FindClass(jni, "org/webrtc/EglBase14$Context");
+ if (!jni->IsInstanceOf(egl_context_, j_egl_context_class)) {
+ ALOGE << "Wrong EGL Context.";
+ jni->DeleteGlobalRef(egl_context_);
+ egl_context_ = NULL;
+ }
+ }
+ }
+ if (egl_context_ == NULL) {
+ ALOGW << "NULL VideoDecoder EGL context - HW surface encoding is disabled.";
+ }
+}
+
webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
VideoCodecType type) {
if (supported_codecs_.empty()) {
+ ALOGW << "No HW video encoder for type " << (int)type;
return NULL;
}
for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
@@ -946,9 +1155,11 @@ webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
if (it->type == type) {
ALOGD << "Create HW video encoder for type " << (int)type <<
" (" << it->name << ").";
- return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
+ return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
+ egl_context_);
}
}
+ ALOGW << "Can not find HW video encoder for type " << (int)type;
return NULL;
}
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.h b/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
index ff124aa146..8ff8164c3b 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
@@ -43,6 +43,8 @@ class MediaCodecVideoEncoderFactory
MediaCodecVideoEncoderFactory();
virtual ~MediaCodecVideoEncoderFactory();
+ void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
// WebRtcVideoEncoderFactory implementation.
webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
override;
@@ -50,6 +52,7 @@ class MediaCodecVideoEncoderFactory
void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
private:
+ jobject egl_context_;
// Empty if platform support is lacking, const after ctor returns.
std::vector<VideoCodec> supported_codecs_;
};
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
index 02b9f22015..8813c89de4 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -29,8 +29,9 @@
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
#include "webrtc/base/bind.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
namespace webrtc_jni {
@@ -47,15 +48,19 @@ int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
return 0;
}
-AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
- jobject j_video_capturer)
- : j_capturer_global_(jni, j_video_capturer),
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(
+ JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_surface_texture_helper)
+ : j_video_capturer_(jni, j_video_capturer),
j_video_capturer_class_(
jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
j_observer_class_(
jni,
FindClass(jni,
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
+ surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, j_surface_texture_helper)),
capturer_(nullptr) {
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
thread_checker_.DetachFromThread();
@@ -64,7 +69,7 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
jni()->CallVoidMethod(
- *j_capturer_global_,
+ *j_video_capturer_,
GetMethodID(jni(), *j_video_capturer_class_, "release", "()V"));
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.release()";
}
@@ -90,7 +95,7 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
jni(), *j_video_capturer_class_, "startCapture",
"(IIILandroid/content/Context;"
"Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V");
- jni()->CallVoidMethod(*j_capturer_global_,
+ jni()->CallVoidMethod(*j_video_capturer_,
m, width, height,
framerate,
application_context_,
@@ -109,7 +114,7 @@ void AndroidVideoCapturerJni::Stop() {
}
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"stopCapture", "()V");
- jni()->CallVoidMethod(*j_capturer_global_, m);
+ jni()->CallVoidMethod(*j_video_capturer_, m);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
}
@@ -127,19 +132,12 @@ void AndroidVideoCapturerJni::AsyncCapturerInvoke(
invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
}
-void AndroidVideoCapturerJni::ReturnBuffer(int64_t time_stamp) {
- jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
- "returnBuffer", "(J)V");
- jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
- CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.returnBuffer";
-}
-
std::string AndroidVideoCapturerJni::GetSupportedFormats() {
jmethodID m =
GetMethodID(jni(), *j_video_capturer_class_,
"getSupportedFormatsAsJson", "()Ljava/lang/String;");
jstring j_json_caps =
- (jstring) jni()->CallObjectMethod(*j_capturer_global_, m);
+ (jstring) jni()->CallObjectMethod(*j_video_capturer_, m);
CHECK_EXCEPTION(jni()) << "error during supportedFormatsAsJson";
return JavaToStdString(jni(), j_json_caps);
}
@@ -158,46 +156,33 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
int rotation,
int64_t timestamp_ns) {
const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
- // Android guarantees that the stride is a multiple of 16.
- // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
- int y_stride;
- int uv_stride;
- webrtc::Calc16ByteAlignedStride(width, &y_stride, &uv_stride);
- const uint8_t* v_plane = y_plane + y_stride * height;
- const uint8_t* u_plane =
- v_plane + uv_stride * webrtc::AlignInt(height, 2) / 2;
-
- // Wrap the Java buffer, and call ReturnBuffer() in the wrapped
- // VideoFrameBuffer destructor.
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
- new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
- width, height, y_plane, y_stride, u_plane, uv_stride, v_plane,
- uv_stride,
- rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
- timestamp_ns)));
+ const uint8_t* vu_plane = y_plane + width * height;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ buffer_pool_.CreateBuffer(width, height);
+ libyuv::NV21ToI420(
+ y_plane, width,
+ vu_plane, width,
+ buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
+ buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
+ buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
+ width, height);
AsyncCapturerInvoke("OnIncomingFrame",
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
buffer, rotation, timestamp_ns);
}
-void AndroidVideoCapturerJni::OnTextureFrame(
- int width,
- int height,
- int64_t timestamp_ns,
- const NativeTextureHandleImpl& handle) {
- // TODO(magjed): Fix this. See bug webrtc:4993.
- RTC_NOTREACHED()
- << "The rest of the stack for Android expects the native "
- "handle to be a NativeHandleImpl with a SurfaceTexture, not a "
- "NativeTextureHandleImpl";
+void AndroidVideoCapturerJni::OnTextureFrame(int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns,
+ const NativeHandleImpl& handle) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
- new rtc::RefCountedObject<AndroidTextureBuffer>(
- width, height, handle,
- rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
- timestamp_ns)));
+ surface_texture_helper_->CreateTextureFrame(width, height, handle));
+
AsyncCapturerInvoke("OnIncomingFrame",
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
- buffer, 0, timestamp_ns);
+ buffer, rotation, timestamp_ns);
}
void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
@@ -216,13 +201,6 @@ JOW(void,
jint width, jint height, jint rotation, jlong timestamp) {
jboolean is_copy = true;
jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
- // If this is a copy of the original frame, it means that the memory
- // is not direct memory and thus VideoCapturerAndroid does not guarantee
- // that the memory is valid when we have released |j_frame|.
- // TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and
- // remove this check.
- RTC_CHECK(!is_copy)
- << "NativeObserver_nativeOnFrameCaptured: frame is a copy";
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
@@ -231,11 +209,11 @@ JOW(void,
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
(JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
jint j_oes_texture_id, jfloatArray j_transform_matrix,
- jlong j_timestamp) {
+ jint j_rotation, jlong j_timestamp) {
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
- ->OnTextureFrame(j_width, j_height, j_timestamp,
- NativeTextureHandleImpl(jni, j_oes_texture_id,
- j_transform_matrix));
+ ->OnTextureFrame(j_width, j_height, j_rotation, j_timestamp,
+ NativeHandleImpl(jni, j_oes_texture_id,
+ j_transform_matrix));
}
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
@@ -254,9 +232,11 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
}
JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer)
- (JNIEnv* jni, jclass, jobject j_video_capturer) {
+ (JNIEnv* jni, jclass,
+ jobject j_video_capturer, jobject j_surface_texture_helper) {
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
- new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer);
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer, j_surface_texture_helper);
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate));
// Caller takes ownership of the cricket::VideoCapturer* pointer.
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
index d1eb3a0ad0..89ecacb3a5 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -36,10 +36,12 @@
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
namespace webrtc_jni {
-class NativeTextureHandleImpl;
+struct NativeHandleImpl;
+class SurfaceTextureHelper;
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
// The purpose of the delegate is to hide the JNI specifics from the C++ only
@@ -48,7 +50,9 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
public:
static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
- AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
+ AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_surface_texture_helper);
void Start(int width, int height, int framerate,
webrtc::AndroidVideoCapturer* capturer) override;
@@ -60,15 +64,14 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
void OnCapturerStarted(bool success);
void OnMemoryBufferFrame(void* video_frame, int length, int width,
int height, int rotation, int64_t timestamp_ns);
- void OnTextureFrame(int width, int height, int64_t timestamp_ns,
- const NativeTextureHandleImpl& handle);
+ void OnTextureFrame(int width, int height, int rotation, int64_t timestamp_ns,
+ const NativeHandleImpl& handle);
void OnOutputFormatRequest(int width, int height, int fps);
protected:
~AndroidVideoCapturerJni();
private:
- void ReturnBuffer(int64_t time_stamp);
JNIEnv* jni();
// To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
@@ -85,10 +88,13 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
void (webrtc::AndroidVideoCapturer::*method)(Args...),
typename Identity<Args>::type... args);
- const ScopedGlobalRef<jobject> j_capturer_global_;
+ const ScopedGlobalRef<jobject> j_video_capturer_;
const ScopedGlobalRef<jclass> j_video_capturer_class_;
const ScopedGlobalRef<jclass> j_observer_class_;
+ // Used on the Java thread running the camera.
+ webrtc::I420BufferPool buffer_pool_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
rtc::ThreadChecker thread_checker_;
// |capturer| is a guaranteed to be a valid pointer between a call to
diff --git a/talk/app/webrtc/java/jni/classreferenceholder.cc b/talk/app/webrtc/java/jni/classreferenceholder.cc
index 4c836f8252..5fe8ec707c 100644
--- a/talk/app/webrtc/java/jni/classreferenceholder.cc
+++ b/talk/app/webrtc/java/jni/classreferenceholder.cc
@@ -72,20 +72,21 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/IceCandidate");
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
LoadClass(jni, "android/graphics/SurfaceTexture");
- LoadClass(jni, "javax/microedition/khronos/egl/EGLContext");
LoadClass(jni, "org/webrtc/CameraEnumerator");
LoadClass(jni, "org/webrtc/Camera2Enumerator");
LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
LoadClass(jni, "org/webrtc/EglBase");
+ LoadClass(jni, "org/webrtc/EglBase$Context");
+ LoadClass(jni, "org/webrtc/EglBase14$Context");
LoadClass(jni, "org/webrtc/NetworkMonitor");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
- LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
#endif
diff --git a/talk/app/webrtc/java/jni/jni_helpers.cc b/talk/app/webrtc/java/jni/jni_helpers.cc
index 755698e379..3a7ff21e77 100644
--- a/talk/app/webrtc/java/jni/jni_helpers.cc
+++ b/talk/app/webrtc/java/jni/jni_helpers.cc
@@ -1,4 +1,3 @@
-
/*
* libjingle
* Copyright 2015 Google Inc.
@@ -33,8 +32,6 @@
#include <sys/syscall.h>
#include <unistd.h>
-#include "unicode/unistr.h"
-
namespace webrtc_jni {
static JavaVM* g_jvm = nullptr;
@@ -46,8 +43,6 @@ static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
// were attached by the JVM because of a Java->native call.
static pthread_key_t g_jni_ptr;
-using icu::UnicodeString;
-
JavaVM *GetJVM() {
RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
return g_jvm;
@@ -232,22 +227,20 @@ bool IsNull(JNIEnv* jni, jobject obj) {
// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
- UnicodeString ustr(UnicodeString::fromUTF8(native));
- jstring jstr = jni->NewString(ustr.getBuffer(), ustr.length());
- CHECK_EXCEPTION(jni) << "error during NewString";
+ jstring jstr = jni->NewStringUTF(native.c_str());
+ CHECK_EXCEPTION(jni) << "error during NewStringUTF";
return jstr;
}
// Given a (UTF-16) jstring return a new UTF-8 native string.
std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
- const jchar* jchars = jni->GetStringChars(j_string, NULL);
- CHECK_EXCEPTION(jni) << "Error during GetStringChars";
- UnicodeString ustr(jchars, jni->GetStringLength(j_string));
- CHECK_EXCEPTION(jni) << "Error during GetStringLength";
- jni->ReleaseStringChars(j_string, jchars);
- CHECK_EXCEPTION(jni) << "Error during ReleaseStringChars";
- std::string ret;
- return ustr.toUTF8String(ret);
+ const char* chars = jni->GetStringUTFChars(j_string, NULL);
+ CHECK_EXCEPTION(jni) << "Error during GetStringUTFChars";
+ std::string str(chars, jni->GetStringUTFLength(j_string));
+ CHECK_EXCEPTION(jni) << "Error during GetStringUTFLength";
+ jni->ReleaseStringUTFChars(j_string, chars);
+ CHECK_EXCEPTION(jni) << "Error during ReleaseStringUTFChars";
+ return str;
}
// Return the (singleton) Java Enum object corresponding to |index|;
diff --git a/talk/app/webrtc/java/jni/jni_onload.cc b/talk/app/webrtc/java/jni/jni_onload.cc
new file mode 100644
index 0000000000..9664ecdca6
--- /dev/null
+++ b/talk/app/webrtc/java/jni/jni_onload.cc
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "webrtc/base/ssladapter.h"
+
+namespace webrtc_jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ RTC_DCHECK_GE(ret, 0);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ LoadGlobalClassReferenceHolder();
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
+ FreeGlobalClassReferenceHolder();
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc
index ac3e0455df..1757184154 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.cc
+++ b/talk/app/webrtc/java/jni/native_handle_impl.cc
@@ -27,14 +27,65 @@
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/logging.h"
+
+using webrtc::NativeHandleBuffer;
+
+namespace {
+
+void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
+ // Texture coordinates are in the range 0 to 1. The transformation of the last
+ // row in each rotation matrix is needed for proper translation, e.g, to
+ // mirror x, we don't replace x by -x, but by 1-x.
+ switch (rotation) {
+ case webrtc::kVideoRotation_0:
+ break;
+ case webrtc::kVideoRotation_90: {
+ const float ROTATE_90[16] =
+ { a[4], a[5], a[6], a[7],
+ -a[0], -a[1], -a[2], -a[3],
+ a[8], a[9], a[10], a[11],
+ a[0] + a[12], a[1] + a[13], a[2] + a[14], a[3] + a[15]};
+ memcpy(a, ROTATE_90, sizeof(ROTATE_90));
+ } break;
+ case webrtc::kVideoRotation_180: {
+ const float ROTATE_180[16] =
+ { -a[0], -a[1], -a[2], -a[3],
+ -a[4], -a[5], -a[6], -a[7],
+ a[8], a[9], a[10], a[11],
+ a[0] + a[4] + a[12], a[1] +a[5] + a[13], a[2] + a[6] + a[14],
+ a[3] + a[11]+ a[15]};
+ memcpy(a, ROTATE_180, sizeof(ROTATE_180));
+ }
+ break;
+ case webrtc::kVideoRotation_270: {
+ const float ROTATE_270[16] =
+ { -a[4], -a[5], -a[6], -a[7],
+ a[0], a[1], a[2], a[3],
+ a[8], a[9], a[10], a[11],
+ a[4] + a[12], a[5] + a[13], a[6] + a[14], a[7] + a[15]};
+ memcpy(a, ROTATE_270, sizeof(ROTATE_270));
+ } break;
+ }
+}
+
+} // anonymouse namespace
namespace webrtc_jni {
-NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
- jint j_oes_texture_id,
- jfloatArray j_transform_matrix)
- : oes_texture_id(j_oes_texture_id) {
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix)
+ : oes_texture_id(j_oes_texture_id) {
RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
jfloat* transform_matrix_ptr =
jni->GetFloatArrayElements(j_transform_matrix, nullptr);
@@ -44,41 +95,15 @@ NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
}
-NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {}
-
-void* NativeHandleImpl::GetHandle() {
- return texture_object_;
-}
-
-int NativeHandleImpl::GetTextureId() {
- return texture_id_;
-}
-
-void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) {
- texture_object_ = reinterpret_cast<jobject>(texture_object);
- texture_id_ = texture_id;
-}
-
-JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle,
- int width,
- int height)
- : NativeHandleBuffer(native_handle, width, height) {}
-
-rtc::scoped_refptr<webrtc::VideoFrameBuffer>
-JniNativeHandleBuffer::NativeToI420Buffer() {
- // TODO(pbos): Implement before using this in the encoder pipeline (or
- // remove the RTC_CHECK() in VideoCapture).
- RTC_NOTREACHED();
- return nullptr;
-}
-
AndroidTextureBuffer::AndroidTextureBuffer(
int width,
int height,
- const NativeTextureHandleImpl& native_handle,
+ const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used)
: webrtc::NativeHandleBuffer(&native_handle_, width, height),
native_handle_(native_handle),
+ surface_texture_helper_(surface_texture_helper),
no_longer_used_cb_(no_longer_used) {}
AndroidTextureBuffer::~AndroidTextureBuffer() {
@@ -87,9 +112,75 @@ AndroidTextureBuffer::~AndroidTextureBuffer() {
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
AndroidTextureBuffer::NativeToI420Buffer() {
- RTC_NOTREACHED()
- << "AndroidTextureBuffer::NativeToI420Buffer not implemented.";
- return nullptr;
+ int uv_width = (width()+7) / 8;
+ int stride = 8 * uv_width;
+ int uv_height = (height()+1)/2;
+ size_t size = stride * (height() + uv_height);
+ // The data is owned by the frame, and the normal case is that the
+ // data is deleted by the frame's destructor callback.
+ //
+ // TODO(nisse): Use an I420BufferPool. We then need to extend that
+ // class, and I420Buffer, to support our memory layout.
+ rtc::scoped_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
+ static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment)));
+ // See SurfaceTextureHelper.java for the required layout.
+ uint8_t* y_data = yuv_data.get();
+ uint8_t* u_data = y_data + height() * stride;
+ uint8_t* v_data = u_data + stride/2;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> copy =
+ new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+ width(), height(),
+ y_data, stride,
+ u_data, stride,
+ v_data, stride,
+ rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
+
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ jmethodID transform_mid = GetMethodID(
+ jni,
+ GetObjectClass(jni, surface_texture_helper_),
+ "textureToYUV",
+ "(Ljava/nio/ByteBuffer;IIII[F)V");
+
+ jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
+
+ // TODO(nisse): Keep java transform matrix around.
+ jfloatArray sampling_matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(sampling_matrix, 0, 16,
+ native_handle_.sampling_matrix);
+
+ jni->CallVoidMethod(surface_texture_helper_,
+ transform_mid,
+ byte_buffer, width(), height(), stride,
+ native_handle_.oes_texture_id, sampling_matrix);
+ CHECK_EXCEPTION(jni) << "textureToYUV throwed an exception";
+
+ return copy;
+}
+
+rtc::scoped_refptr<AndroidTextureBuffer>
+AndroidTextureBuffer::ScaleAndRotate(int dst_widht,
+ int dst_height,
+ webrtc::VideoRotation rotation) {
+ if (width() == dst_widht && height() == dst_height &&
+ rotation == webrtc::kVideoRotation_0) {
+ return this;
+ }
+ int rotated_width = (rotation % 180 == 0) ? dst_widht : dst_height;
+ int rotated_height = (rotation % 180 == 0) ? dst_height : dst_widht;
+
+ // Here we use Bind magic to add a reference count to |this| until the newly
+ // created AndroidTextureBuffer is destructed
+ rtc::scoped_refptr<AndroidTextureBuffer> buffer(
+ new rtc::RefCountedObject<AndroidTextureBuffer>(
+ rotated_width, rotated_height, native_handle_,
+ surface_texture_helper_, rtc::KeepRefUntilDone(this)));
+
+ RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
+ return buffer;
}
} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h
index dd04bc20b1..1d0f601d0d 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.h
+++ b/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -31,56 +31,44 @@
#include <jni.h>
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/common_video/rotation.h"
namespace webrtc_jni {
// Wrapper for texture object.
-struct NativeTextureHandleImpl {
- NativeTextureHandleImpl(JNIEnv* jni,
- jint j_oes_texture_id,
- jfloatArray j_transform_matrix);
+struct NativeHandleImpl {
+ NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix);
const int oes_texture_id;
float sampling_matrix[16];
};
-// Native handle for SurfaceTexture + texture id.
-class NativeHandleImpl {
- public:
- NativeHandleImpl();
-
- void* GetHandle();
- int GetTextureId();
- void SetTextureObject(void* texture_object, int texture_id);
-
- private:
- jobject texture_object_;
- int32_t texture_id_;
-};
-
-class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer {
- public:
- JniNativeHandleBuffer(void* native_handle, int width, int height);
-
- // TODO(pbos): Override destructor to release native handle, at the moment the
- // native handle is not released based on refcount.
-
- private:
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> NativeToI420Buffer() override;
-};
-
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
public:
AndroidTextureBuffer(int width,
int height,
- const NativeTextureHandleImpl& native_handle,
+ const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used);
~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+ rtc::scoped_refptr<AndroidTextureBuffer> ScaleAndRotate(
+ int dst_widht,
+ int dst_height,
+ webrtc::VideoRotation rotation);
+
private:
- NativeTextureHandleImpl native_handle_;
+ NativeHandleImpl native_handle_;
+ // Raw object pointer, relying on the caller, i.e.,
+ // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
+ // a global reference. TODO(nisse): Make this a reference to the C++
+ // SurfaceTextureHelper instead, but that requires some refactoring
+ // of AndroidVideoCapturerJni.
+ jobject surface_texture_helper_;
rtc::Callback0<void> no_longer_used_cb_;
};
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
index e75cd553b6..5ea63f74ae 100644
--- a/talk/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -57,6 +57,7 @@
#define JNIEXPORT __attribute__((visibility("default")))
#include <limits>
+#include <utility>
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/jni_helpers.h"
@@ -74,10 +75,11 @@
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/event_tracer.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/logsinks.h"
-#include "webrtc/base/networkmonitor.h"
#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/networkmonitor.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/system_wrappers/include/field_trial_default.h"
@@ -141,22 +143,6 @@ static bool factory_static_initialized = false;
static bool video_hw_acceleration_enabled = true;
#endif
-extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
- jint ret = InitGlobalJniVariables(jvm);
- if (ret < 0)
- return -1;
-
- RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
- LoadGlobalClassReferenceHolder();
-
- return ret;
-}
-
-extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
- FreeGlobalClassReferenceHolder();
- RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
-}
-
// Return the (singleton) Java Enum object corresponding to |index|;
// |state_class_fragment| is something like "MediaSource$State".
static jobject JavaEnumFromIndex(
@@ -545,7 +531,7 @@ class SdpObserverWrapper : public T {
protected:
// Common implementation for failure of Set & Create types, distinguished by
// |op| being "Set" or "Create".
- void OnFailure(const std::string& op, const std::string& error) {
+ void DoOnFailure(const std::string& op, const std::string& error) {
jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
"(Ljava/lang/String;)V");
jstring j_error_string = JavaStringFromStdString(jni(), error);
@@ -572,7 +558,7 @@ class CreateSdpObserverWrapper
void OnFailure(const std::string& error) override {
ScopedLocalRefFrame local_ref_frame(jni());
- SdpObserverWrapper::OnFailure(std::string("Create"), error);
+ SdpObserverWrapper::DoOnFailure(std::string("Create"), error);
}
};
@@ -585,7 +571,7 @@ class SetSdpObserverWrapper
void OnFailure(const std::string& error) override {
ScopedLocalRefFrame local_ref_frame(jni());
- SdpObserverWrapper::OnFailure(std::string("Set"), error);
+ SdpObserverWrapper::DoOnFailure(std::string("Set"), error);
}
};
@@ -773,7 +759,7 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
j_texture_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>",
- "(IIILjava/lang/Object;IJ)V")),
+ "(IIII[FJ)V")),
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
CHECK_EXCEPTION(jni);
}
@@ -829,13 +815,13 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
NativeHandleImpl* handle =
reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
- jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
- int texture_id = handle->GetTextureId();
+ jfloatArray sampling_matrix = jni()->NewFloatArray(16);
+ jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(),
static_cast<int>(frame->GetVideoRotation()),
- texture_object, texture_id, javaShallowCopy(frame));
+ handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
}
JNIEnv* jni() {
@@ -1054,6 +1040,32 @@ JOW(void, PeerConnectionFactory_initializeFieldTrials)(
webrtc::field_trial::InitFieldTrialsFromString(field_trials_init_string);
}
+JOW(void, PeerConnectionFactory_initializeInternalTracer)(JNIEnv* jni, jclass) {
+ rtc::tracing::SetupInternalTracer();
+}
+
+JOW(jboolean, PeerConnectionFactory_startInternalTracingCapture)(
+ JNIEnv* jni, jclass, jstring j_event_tracing_filename) {
+ if (!j_event_tracing_filename)
+ return false;
+
+ const char* init_string =
+ jni->GetStringUTFChars(j_event_tracing_filename, NULL);
+ LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
+ bool ret = rtc::tracing::StartInternalCapture(init_string);
+ jni->ReleaseStringUTFChars(j_event_tracing_filename, init_string);
+ return ret;
+}
+
+JOW(void, PeerConnectionFactory_stopInternalTracingCapture)(
+ JNIEnv* jni, jclass) {
+ rtc::tracing::StopInternalCapture();
+}
+
+JOW(void, PeerConnectionFactory_shutdownInternalTracer)(JNIEnv* jni, jclass) {
+ rtc::tracing::ShutdownInternalTracer();
+}
+
// Helper struct for working around the fact that CreatePeerConnectionFactory()
// comes in two flavors: either entirely automagical (constructing its own
// threads and deleting them on teardown, but no external codec factory support)
@@ -1251,6 +1263,46 @@ JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
return (jlong)track.release();
}
+JOW(jboolean, PeerConnectionFactory_nativeStartAecDump)(
+ JNIEnv* jni, jclass, jlong native_factory, jint file) {
+#if defined(ANDROID)
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ return factory->StartAecDump(file);
+#else
+ return false;
+#endif
+}
+
+JOW(void, PeerConnectionFactory_nativeStopAecDump)(
+ JNIEnv* jni, jclass, jlong native_factory) {
+#if defined(ANDROID)
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ factory->StopAecDump();
+#endif
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartRtcEventLog)(
+ JNIEnv* jni, jclass, jlong native_factory, jint file) {
+#if defined(ANDROID)
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ return factory->StartRtcEventLog(file);
+#else
+ return false;
+#endif
+}
+
+JOW(void, PeerConnectionFactory_nativeStopRtcEventLog)(
+ JNIEnv* jni, jclass, jlong native_factory) {
+#if defined(ANDROID)
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ factory->StopRtcEventLog();
+#endif
+}
+
JOW(void, PeerConnectionFactory_nativeSetOptions)(
JNIEnv* jni, jclass, jlong native_factory, jobject options) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
@@ -1292,21 +1344,35 @@ JOW(void, PeerConnectionFactory_nativeSetOptions)(
}
JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
- JNIEnv* jni, jclass, jlong native_factory, jobject render_egl_context) {
+ JNIEnv* jni, jclass, jlong native_factory, jobject local_egl_context,
+ jobject remote_egl_context) {
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
OwnedFactoryAndThreads* owned_factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+
+ jclass j_eglbase14_context_class =
+ FindClass(jni, "org/webrtc/EglBase14$Context");
+
+ MediaCodecVideoEncoderFactory* encoder_factory =
+ static_cast<MediaCodecVideoEncoderFactory*>
+ (owned_factory->encoder_factory());
+ if (encoder_factory &&
+ jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
+ LOG(LS_INFO) << "Set EGL context for HW encoding.";
+ encoder_factory->SetEGLContext(jni, local_egl_context);
+ }
+
MediaCodecVideoDecoderFactory* decoder_factory =
static_cast<MediaCodecVideoDecoderFactory*>
(owned_factory->decoder_factory());
- if (decoder_factory) {
- LOG(LS_INFO) << "Set EGL context for HW acceleration.";
- decoder_factory->SetEGLContext(jni, render_egl_context);
+ if (decoder_factory &&
+ jni->IsInstanceOf(remote_egl_context, j_eglbase14_context_class)) {
+ LOG(LS_INFO) << "Set EGL context for HW decoding.";
+ decoder_factory->SetEGLContext(jni, remote_egl_context);
}
#endif
}
-
static std::string
GetJavaEnumName(JNIEnv* jni, const std::string& className, jobject j_enum) {
jclass enumClass = FindClass(jni, className.c_str());
@@ -1503,6 +1569,9 @@ static void JavaRTCConfigurationToJsepRTCConfiguration(
jfieldID j_ice_connection_receiving_timeout_id =
GetFieldID(jni, j_rtc_config_class, "iceConnectionReceivingTimeout", "I");
+ jfieldID j_ice_backup_candidate_pair_ping_interval_id = GetFieldID(
+ jni, j_rtc_config_class, "iceBackupCandidatePairPingInterval", "I");
+
jfieldID j_continual_gathering_policy_id =
GetFieldID(jni, j_rtc_config_class, "continualGatheringPolicy",
"Lorg/webrtc/PeerConnection$ContinualGatheringPolicy;");
@@ -1524,6 +1593,8 @@ static void JavaRTCConfigurationToJsepRTCConfiguration(
jni, j_rtc_config, j_audio_jitter_buffer_fast_accelerate_id);
rtc_config->ice_connection_receiving_timeout =
GetIntField(jni, j_rtc_config, j_ice_connection_receiving_timeout_id);
+ rtc_config->ice_backup_candidate_pair_ping_interval = GetIntField(
+ jni, j_rtc_config, j_ice_backup_candidate_pair_ping_interval_id);
rtc_config->continual_gathering_policy =
JavaContinualGatheringPolicyToNativeType(
jni, j_continual_gathering_policy);
@@ -1550,7 +1621,7 @@ JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
rtc::SSLIdentity::Generate(webrtc::kIdentityName, rtc::KT_ECDSA));
if (ssl_identity.get()) {
rtc_config.certificates.push_back(
- rtc::RTCCertificate::Create(ssl_identity.Pass()));
+ rtc::RTCCertificate::Create(std::move(ssl_identity)));
LOG(LS_INFO) << "ECDSA certificate created.";
} else {
// Failing to create certificate should not abort peer connection
@@ -1704,6 +1775,29 @@ JOW(void, PeerConnection_nativeRemoveLocalStream)(
reinterpret_cast<MediaStreamInterface*>(native_stream));
}
+JOW(jobject, PeerConnection_nativeCreateSender)(
+ JNIEnv* jni, jobject j_pc, jstring j_kind, jstring j_stream_id) {
+ jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+ jmethodID j_rtp_sender_ctor =
+ GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+ std::string kind = JavaToStdString(jni, j_kind);
+ std::string stream_id = JavaToStdString(jni, j_stream_id);
+ rtc::scoped_refptr<RtpSenderInterface> sender =
+ ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
+ if (!sender.get()) {
+ return nullptr;
+ }
+ jlong nativeSenderPtr = jlongFromPointer(sender.get());
+ jobject j_sender =
+ jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+ sender->AddRef();
+ return j_sender;
+}
+
JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
jmethodID j_array_list_ctor =
@@ -1723,7 +1817,8 @@ JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
jobject j_sender =
jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
CHECK_EXCEPTION(jni) << "error during NewObject";
- // Sender is now owned by Java object, and will be freed from there.
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
sender->AddRef();
jni->CallBooleanMethod(j_senders, j_array_list_add, j_sender);
CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
@@ -1802,6 +1897,7 @@ JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)(
// Since we can't create platform specific java implementations in Java, we
// defer the creation to C land.
#if defined(ANDROID)
+ // TODO(nisse): This case is intended to be deleted.
jclass j_video_capturer_class(
FindClass(jni, "org/webrtc/VideoCapturerAndroid"));
const int camera_id = jni->CallStaticIntMethod(
@@ -1816,8 +1912,13 @@ JOW(jobject, VideoCapturer_nativeCreateVideoCapturer)(
j_video_capturer_class,
GetMethodID(jni, j_video_capturer_class, "<init>", "(I)V"), camera_id);
CHECK_EXCEPTION(jni) << "error during creation of VideoCapturerAndroid";
+ jfieldID helper_fid = GetFieldID(jni, j_video_capturer_class, "surfaceHelper",
+ "Lorg/webrtc/SurfaceTextureHelper;");
+
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
- new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer);
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer,
+ GetObjectField(jni, j_video_capturer, helper_fid));
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate));
@@ -2003,11 +2104,11 @@ JOW(jbyteArray, CallSessionFileRotatingLogSink_nativeGetLogData)(
return result;
}
-JOW(void, RtpSender_nativeSetTrack)(JNIEnv* jni,
+JOW(jboolean, RtpSender_nativeSetTrack)(JNIEnv* jni,
jclass,
jlong j_rtp_sender_pointer,
jlong j_track_pointer) {
- reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
}
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
index 05f1b23768..3e32b9a6fe 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
@@ -35,25 +35,14 @@
namespace webrtc_jni {
-SurfaceTextureHelper::SurfaceTextureHelper(JNIEnv* jni,
- jobject egl_shared_context)
- : j_surface_texture_helper_class_(
- jni,
- FindClass(jni, "org/webrtc/SurfaceTextureHelper")),
- j_surface_texture_helper_(
- jni,
- jni->CallStaticObjectMethod(
- *j_surface_texture_helper_class_,
- GetStaticMethodID(jni,
- *j_surface_texture_helper_class_,
- "create",
- "(Ljavax/microedition/khronos/egl/EGLContext;)"
- "Lorg/webrtc/SurfaceTextureHelper;"),
- egl_shared_context)),
- j_return_texture_method_(GetMethodID(jni,
- *j_surface_texture_helper_class_,
- "returnTextureFrame",
- "()V")) {
+SurfaceTextureHelper::SurfaceTextureHelper(
+ JNIEnv* jni, jobject surface_texture_helper)
+ : j_surface_texture_helper_(jni, surface_texture_helper),
+ j_return_texture_method_(
+ GetMethodID(jni,
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "returnTextureFrame",
+ "()V")) {
CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
}
@@ -70,9 +59,9 @@ void SurfaceTextureHelper::ReturnTextureFrame() const {
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
SurfaceTextureHelper::CreateTextureFrame(int width, int height,
- const NativeTextureHandleImpl& native_handle) {
+ const NativeHandleImpl& native_handle) {
return new rtc::RefCountedObject<AndroidTextureBuffer>(
- width, height, native_handle,
+ width, height, native_handle, *j_surface_texture_helper_,
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
}
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
index dc9d2b853d..8dde2b54ed 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.h
@@ -35,7 +35,7 @@
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/common_video/interface/video_frame_buffer.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc_jni {
@@ -49,24 +49,19 @@ namespace webrtc_jni {
// destroyed while a VideoFrameBuffer is in use.
// This class is the C++ counterpart of the java class SurfaceTextureHelper.
// Usage:
-// 1. Create an instance of this class.
-// 2. Call GetJavaSurfaceTextureHelper to get the Java SurfaceTextureHelper.
+// 1. Create an java instance of SurfaceTextureHelper.
+// 2. Create an instance of this class.
// 3. Register a listener to the Java SurfaceListener and start producing
// new buffers.
-// 3. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
+// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
class SurfaceTextureHelper : public rtc::RefCountInterface {
public:
- SurfaceTextureHelper(JNIEnv* jni, jobject shared_egl_context);
-
- // Returns the Java SurfaceTextureHelper.
- jobject GetJavaSurfaceTextureHelper() const {
- return *j_surface_texture_helper_;
- }
+ SurfaceTextureHelper(JNIEnv* jni, jobject surface_texture_helper);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
int width,
int height,
- const NativeTextureHandleImpl& native_handle);
+ const NativeHandleImpl& native_handle);
protected:
~SurfaceTextureHelper();
@@ -75,7 +70,6 @@ class SurfaceTextureHelper : public rtc::RefCountInterface {
// May be called on arbitrary thread.
void ReturnTextureFrame() const;
- const ScopedGlobalRef<jclass> j_surface_texture_helper_class_;
const ScopedGlobalRef<jobject> j_surface_texture_helper_;
const jmethodID j_return_texture_method_;
};