summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Chromium Automerger <chromium-automerger@android>2014-09-24 16:39:45 +0000
committerAndroid Chromium Automerger <chromium-automerger@android>2014-09-24 16:39:45 +0000
commitbcdb45bfe845ef1beeb4fe1b2e69e5768075f8f4 (patch)
tree712d0e4eb4769267e4654ad7156bc2014f08699e
parentc467126e7d86626bef03f660a8193b326c5a8745 (diff)
parent47740f2c26aea1b3b7830abdcba063a12a61d009 (diff)
downloadtalk-bcdb45bfe845ef1beeb4fe1b2e69e5768075f8f4.tar.gz
Merge third_party/libjingle/source/talk from https://chromium.googlesource.com/external/webrtc/trunk/talk.git at 47740f2c26aea1b3b7830abdcba063a12a61d009
This commit was generated by merge_from_chromium.py. Change-Id: I716b47abf872803a836ee46221e5d100dfbc3984
-rw-r--r--app/webrtc/java/android/org/webrtc/VideoRendererGui.java155
-rw-r--r--app/webrtc/java/jni/peerconnection_jni.cc88
-rw-r--r--app/webrtc/peerconnection.cc11
-rw-r--r--build/common.gypi1
-rwxr-xr-xlibjingle_tests.gyp5
-rw-r--r--media/sctp/sctpdataengine_unittest.cc11
-rw-r--r--media/webrtc/fakewebrtcvoiceengine.h2
-rw-r--r--media/webrtc/webrtcmediaengine.cc3
-rw-r--r--media/webrtc/webrtcvideoengine.cc60
-rw-r--r--media/webrtc/webrtcvideoengine.h14
-rw-r--r--media/webrtc/webrtcvideoengine2.cc118
-rw-r--r--media/webrtc/webrtcvideoengine2.h41
-rw-r--r--media/webrtc/webrtcvideoengine2_unittest.cc29
-rw-r--r--media/webrtc/webrtcvideoengine2_unittest.h11
-rw-r--r--media/webrtc/webrtcvoiceengine.cc3
-rw-r--r--session/media/mediamonitor.h3
16 files changed, 373 insertions, 182 deletions
diff --git a/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index c3bf7a0..af625c0 100644
--- a/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -64,10 +64,22 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// should be called. The variable is accessed on multiple threads and
// all accesses are synchronized on yuvImageRenderers' object lock.
private boolean onSurfaceCreatedCalled;
+ private int screenWidth;
+ private int screenHeight;
// List of yuv renderers.
private ArrayList<YuvImageRenderer> yuvImageRenderers;
private int yuvProgram;
private int oesProgram;
+ // Types of video scaling:
+ // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+ // maintaining the aspect ratio (black borders may be displayed).
+ // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+ // maintaining the aspect ratio. Some portion of the video frame may be
+ // clipped.
+ // SCALE_FILL - video frame is scaled to to fill the size of the view. Video
+ // aspect ratio is changed if necessary.
+ private static enum ScalingType
+ { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_FILL };
private final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n" +
@@ -194,7 +206,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private int id;
private int yuvProgram;
private int oesProgram;
- private FloatBuffer textureVertices;
private int[] yuvTextures = { -1, -1, -1 };
private int oesTexture = -1;
private float[] stMatrix = new float[16];
@@ -210,6 +221,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Type of video frame used for recent frame rendering.
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
private RendererType rendererType;
+ private ScalingType scalingType;
// Flag if renderFrame() was ever called.
boolean seenFrame;
// Total number of video frames received in renderFrame() call.
@@ -226,32 +238,57 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Time in ns spent in renderFrame() function - including copying frame
// data to rendering planes.
private long copyTimeNs;
-
- // Texture Coordinates mapping the entire texture.
- private final FloatBuffer textureCoords = directNativeFloatBuffer(
- new float[] {
- 0, 0, 0, 1, 1, 0, 1, 1
- });
+ // Texture vertices.
+ private float texLeft;
+ private float texRight;
+ private float texTop;
+ private float texBottom;
+ private FloatBuffer textureVertices;
+ // Texture UV coordinates offsets.
+ private float texOffsetU;
+ private float texOffsetV;
+ private FloatBuffer textureCoords;
+ // Flag if texture vertices or coordinates update is needed.
+ private boolean updateTextureProperties;
+ // Viewport dimensions.
+ private int screenWidth;
+ private int screenHeight;
+ // Video dimension.
+ private int videoWidth;
+ private int videoHeight;
private YuvImageRenderer(
GLSurfaceView surface, int id,
- int x, int y, int width, int height) {
+ int x, int y, int width, int height,
+ ScalingType scalingType) {
Log.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface;
this.id = id;
+ this.scalingType = scalingType;
frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
// Create texture vertices.
- float xLeft = (x - 50) / 50.0f;
- float yTop = (50 - y) / 50.0f;
- float xRight = Math.min(1.0f, (x + width - 50) / 50.0f);
- float yBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
+ texLeft = (x - 50) / 50.0f;
+ texTop = (50 - y) / 50.0f;
+ texRight = Math.min(1.0f, (x + width - 50) / 50.0f);
+ texBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
float textureVeticesFloat[] = new float[] {
- xLeft, yTop,
- xLeft, yBottom,
- xRight, yTop,
- xRight, yBottom
+ texLeft, texTop,
+ texLeft, texBottom,
+ texRight, texTop,
+ texRight, texBottom
};
textureVertices = directNativeFloatBuffer(textureVeticesFloat);
+ // Create texture UV coordinates.
+ texOffsetU = 0;
+ texOffsetV = 0;
+ float textureCoordinatesFloat[] = new float[] {
+ texOffsetU, texOffsetV, // left top
+ texOffsetU, 1.0f - texOffsetV, // left bottom
+ 1.0f - texOffsetU, texOffsetV, // right top
+ 1.0f - texOffsetU, 1.0f - texOffsetV // right bottom
+ };
+ textureCoords = directNativeFloatBuffer(textureCoordinatesFloat);
+ updateTextureProperties = false;
}
private void createTextures(int yuvProgram, int oesProgram) {
@@ -279,11 +316,73 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
checkNoGLES2Error();
}
+ private void checkAdjustTextureCoords() {
+ if (!updateTextureProperties ||
+ scalingType == ScalingType.SCALE_FILL) {
+ return;
+ }
+ // Re - calculate texture vertices to preserve video aspect ratio.
+ float texRight = this.texRight;
+ float texLeft = this.texLeft;
+ float texTop = this.texTop;
+ float texBottom = this.texBottom;
+ float displayWidth = (texRight - texLeft) * screenWidth / 2;
+ float displayHeight = (texTop - texBottom) * screenHeight / 2;
+ if (displayWidth > 1 && displayHeight > 1 &&
+ videoWidth > 1 && videoHeight > 1) {
+ float displayAspectRatio = displayWidth / displayHeight;
+ float videoAspectRatio = (float)videoWidth / videoHeight;
+ if (scalingType == ScalingType.SCALE_ASPECT_FIT) {
+ // Need to re-adjust vertices width or height to match video AR.
+ if (displayAspectRatio > videoAspectRatio) {
+ float deltaX = (displayWidth - videoAspectRatio * displayHeight) /
+ instance.screenWidth;
+ texRight -= deltaX;
+ texLeft += deltaX;
+ } else {
+ float deltaY = (displayHeight - displayWidth / videoAspectRatio) /
+ instance.screenHeight;
+ texTop -= deltaY;
+ texBottom += deltaY;
+ }
+ // Re-allocate vertices buffer to adjust to video aspect ratio.
+ float textureVeticesFloat[] = new float[] {
+ texLeft, texTop,
+ texLeft, texBottom,
+ texRight, texTop,
+ texRight, texBottom
+ };
+ textureVertices = directNativeFloatBuffer(textureVeticesFloat);
+ }
+ if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
+ // Need to re-adjust UV coordinates to match display AR.
+ if (displayAspectRatio > videoAspectRatio) {
+ texOffsetV = (1.0f - videoAspectRatio / displayAspectRatio) / 2.0f;
+ } else {
+ texOffsetU = (1.0f - displayAspectRatio / videoAspectRatio) / 2.0f;
+ }
+ // Re-allocate coordinates buffer to adjust to display aspect ratio.
+ float textureCoordinatesFloat[] = new float[] {
+ texOffsetU, texOffsetV, // left top
+ texOffsetU, 1.0f - texOffsetV, // left bottom
+ 1.0f - texOffsetU, texOffsetV, // right top
+ 1.0f - texOffsetU, 1.0f - texOffsetV // right bottom
+ };
+ textureCoords = directNativeFloatBuffer(textureCoordinatesFloat);
+ }
+ }
+ updateTextureProperties = false;
+ }
+
private void draw() {
if (!seenFrame) {
// No frame received yet - nothing to render.
return;
}
+ // Check if texture vertices/coordinates adjustment is required when
+ // screen orientation changes or video frame size changes.
+ checkAdjustTextureCoords();
+
long now = System.nanoTime();
I420Frame frameFromQueue;
@@ -364,7 +463,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
if (frameFromQueue != null) {
framesRendered++;
drawTimeNs += (System.nanoTime() - now);
- if ((framesRendered % 90) == 0) {
+ if ((framesRendered % 150) == 0) {
logStatistics();
}
}
@@ -384,10 +483,18 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
}
+ public void setScreenSize(final int screenWidth, final int screenHeight) {
+ this.screenWidth = screenWidth;
+ this.screenHeight = screenHeight;
+ updateTextureProperties = true;
+ }
+
@Override
public void setSize(final int width, final int height) {
Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
width + " x " + height);
+ videoWidth = width;
+ videoHeight = height;
int[] strides = { width, width / 2, width / 2 };
// Frame re-allocation need to be synchronized with copying
// frame to textures in draw() function to avoid re-allocating
@@ -398,6 +505,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Re-allocate / allocate the frame.
yuvFrameToRender = new I420Frame(width, height, strides, null);
textureFrameToRender = new I420Frame(width, height, null, -1);
+ updateTextureProperties = true;
}
}
@@ -497,7 +605,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
instance.surface, instance.yuvImageRenderers.size(),
- x, y, width, height);
+ x, y, width, height, ScalingType.SCALE_ASPECT_FIT);
synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui -
@@ -508,6 +616,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public void run() {
yuvImageRenderer.createTextures(
instance.yuvProgram, instance.oesProgram);
+ yuvImageRenderer.setScreenSize(
+ instance.screenWidth, instance.screenHeight);
countDownLatch.countDown();
}
});
@@ -545,14 +655,21 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
onSurfaceCreatedCalled = true;
}
checkNoGLES2Error();
- GLES20.glClearColor(0.0f, 0.3f, 0.1f, 1.0f);
+ GLES20.glClearColor(0.0f, 0.0f, 0.1f, 1.0f);
}
@Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
Log.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
width + " x " + height + " ");
+ screenWidth = width;
+ screenHeight = height;
GLES20.glViewport(0, 0, width, height);
+ synchronized (yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
+ }
+ }
}
@Override
diff --git a/app/webrtc/java/jni/peerconnection_jni.cc b/app/webrtc/java/jni/peerconnection_jni.cc
index e0207e4..27f69e4 100644
--- a/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/app/webrtc/java/jni/peerconnection_jni.cc
@@ -1210,9 +1210,6 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
-// Set to false to switch HW video decoder back to byte buffer output.
-#define HW_DECODER_USE_SURFACE true
-
// Color formats supported by encoder - should mirror supportedColorList
// from MediaCodecVideoEncoder.java
enum COLOR_FORMATTYPE {
@@ -2054,15 +2051,18 @@ int MediaCodecVideoDecoder::SetAndroidObjects(JNIEnv* jni,
if (render_egl_context_) {
jni->DeleteGlobalRef(render_egl_context_);
}
- render_egl_context_ = jni->NewGlobalRef(render_egl_context);
- ALOGD("VideoDecoder EGL context set");
+ if (IsNull(jni, render_egl_context)) {
+ render_egl_context_ = NULL;
+ } else {
+ render_egl_context_ = jni->NewGlobalRef(render_egl_context);
+ }
+ ALOGD("VideoDecoder EGL context set.");
return 0;
}
MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni)
: key_frame_required_(true),
inited_(false),
- use_surface_(HW_DECODER_USE_SURFACE),
codec_thread_(new Thread()),
j_media_codec_video_decoder_class_(
jni,
@@ -2127,6 +2127,9 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni)
jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
+ use_surface_ = true;
+ if (render_egl_context_ == NULL)
+ use_surface_ = false;
memset(&codec_, 0, sizeof(codec_));
}
@@ -2393,7 +2396,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
return true;
}
- // Extract data from Java DecoderOutputBufferInfo.
+ // Extract output buffer info from Java DecoderOutputBufferInfo.
int output_buffer_index =
GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
if (output_buffer_index < 0) {
@@ -2407,15 +2410,6 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
CHECK_EXCEPTION(jni);
- // Extract data from Java ByteBuffer.
- jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
- jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
- jobject output_buffer =
- jni->GetObjectArrayElement(output_buffers, output_buffer_index);
- uint8_t* payload =
- reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(output_buffer));
- CHECK_EXCEPTION(jni);
- payload += output_buffer_offset;
// Get decoded video frame properties.
int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
j_color_format_field_);
@@ -2426,27 +2420,25 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_slice_height_field_);
int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
j_textureID_field_);
- if (!use_surface_ && output_buffer_size < width * height * 3 / 2) {
- ALOGE("Insufficient output buffer size: %d", output_buffer_size);
- Reset();
- return false;
- }
- // Get frame timestamps from a queue.
- int32_t timestamp = timestamps_.front();
- timestamps_.erase(timestamps_.begin());
- int64_t ntp_time_ms = ntp_times_ms_.front();
- ntp_times_ms_.erase(ntp_times_ms_.begin());
- int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
- frame_rtc_times_ms_.front();
- frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
-
- ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
- " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
- color_format, output_buffer_size, frame_decoding_time_ms);
-
- // Create yuv420 frame.
+ // Extract data from Java ByteBuffer and create output yuv420 frame -
+ // for non surface decoding only.
if (!use_surface_) {
+ if (output_buffer_size < width * height * 3 / 2) {
+ ALOGE("Insufficient output buffer size: %d", output_buffer_size);
+ Reset();
+ return false;
+ }
+ jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
+ jobject output_buffer =
+ jni->GetObjectArrayElement(output_buffers, output_buffer_index);
+ uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
+ output_buffer));
+ CHECK_EXCEPTION(jni);
+ payload += output_buffer_offset;
+
+ // Create yuv420 frame.
if (color_format == COLOR_FormatYUV420Planar) {
decoded_image_.CreateFrame(
stride * slice_height, payload,
@@ -2471,11 +2463,25 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
}
}
+ // Get frame timestamps from a queue.
+ int32_t timestamp = timestamps_.front();
+ timestamps_.erase(timestamps_.begin());
+ int64_t ntp_time_ms = ntp_times_ms_.front();
+ ntp_times_ms_.erase(ntp_times_ms_.begin());
+ int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
+ frame_rtc_times_ms_.front();
+ frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+
+ ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
+ " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
+ color_format, output_buffer_size, frame_decoding_time_ms);
+
// Return output buffer back to codec.
- bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
- j_release_output_buffer_method_,
- output_buffer_index,
- use_surface_);
+ bool success = jni->CallBooleanMethod(
+ *j_media_codec_video_decoder_,
+ j_release_output_buffer_method_,
+ output_buffer_index,
+ use_surface_);
CHECK_EXCEPTION(jni);
if (!success) {
ALOGE("releaseOutputBuffer error");
@@ -2686,12 +2692,12 @@ JOW(void, Logging_nativeEnableTracing)(
std::string path = JavaToStdString(jni, j_path);
if (nativeLevels != webrtc::kTraceNone) {
webrtc::Trace::set_level_filter(nativeLevels);
-#ifdef ANDROID
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
if (path != "logcat:") {
#endif
CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
<< "SetTraceFile failed";
-#ifdef ANDROID
+#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
} else {
// Intentionally leak this to avoid needing to reason about its lifecycle.
// It keeps no state and functions only as a dispatch point.
diff --git a/app/webrtc/peerconnection.cc b/app/webrtc/peerconnection.cc
index 3698381..d939f98 100644
--- a/app/webrtc/peerconnection.cc
+++ b/app/webrtc/peerconnection.cc
@@ -39,6 +39,7 @@
#include "talk/session/media/channelmanager.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/stringencode.h"
+#include "webrtc/system_wrappers/interface/field_trial.h"
namespace {
@@ -353,10 +354,14 @@ bool PeerConnection::DoInitialize(
cricket::PORTALLOCATOR_ENABLE_SHARED_UFRAG |
cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET;
bool value;
+ // If IPv6 flag was specified, we'll not override it by experiment.
if (FindConstraint(
- constraints,
- MediaConstraintsInterface::kEnableIPv6,
- &value, NULL) && value) {
+ constraints, MediaConstraintsInterface::kEnableIPv6, &value, NULL)) {
+ if (value) {
+ portallocator_flags |= cricket::PORTALLOCATOR_ENABLE_IPV6;
+ }
+ } else if (webrtc::field_trial::FindFullName("WebRTC-IPv6Default") ==
+ "Enabled") {
portallocator_flags |= cricket::PORTALLOCATOR_ENABLE_IPV6;
}
diff --git a/build/common.gypi b/build/common.gypi
index 57b21af..7ee4224 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -91,6 +91,7 @@
# LateBindingSymbolTable::TableInfo from
# latebindingsymboltable.cc.def and remove below flag.
'-Wno-address-of-array-temporary',
+ '-Wthread-safety',
],
}],
],
diff --git a/libjingle_tests.gyp b/libjingle_tests.gyp
index 45f08a9..44adec3 100755
--- a/libjingle_tests.gyp
+++ b/libjingle_tests.gyp
@@ -153,6 +153,11 @@
],
'conditions': [
['OS=="win"', {
+ 'dependencies': [
+ '<(DEPTH)/net/third_party/nss/ssl.gyp:libssl',
+ '<(DEPTH)/third_party/nss/nss.gyp:nspr',
+ '<(DEPTH)/third_party/nss/nss.gyp:nss',
+ ],
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
diff --git a/media/sctp/sctpdataengine_unittest.cc b/media/sctp/sctpdataengine_unittest.cc
index eedb3cc..fb00650 100644
--- a/media/sctp/sctpdataengine_unittest.cc
+++ b/media/sctp/sctpdataengine_unittest.cc
@@ -45,6 +45,11 @@
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/thread.h"
+#ifdef HAVE_NSS_SSL_H
+// TODO(thorcarpenter): Remove after webrtc switches over to BoringSSL.
+#include "webrtc/base/nssstreamadapter.h"
+#endif // HAVE_NSS_SSL_H
+
enum {
MSG_PACKET = 1,
};
@@ -218,6 +223,12 @@ class SctpDataMediaChannelTest : public testing::Test,
// usrsctp uses the NSS random number generator on non-Android platforms,
// so we need to initialize SSL.
static void SetUpTestCase() {
+#ifdef HAVE_NSS_SSL_H
+ // TODO(thorcarpenter): Remove after webrtc switches over to BoringSSL.
+ if (!rtc::NSSContext::InitializeSSL(NULL)) {
+ LOG(LS_WARNING) << "Unabled to initialize NSS.";
+ }
+#endif // HAVE_NSS_SSL_H
rtc::InitializeSSL();
}
diff --git a/media/webrtc/fakewebrtcvoiceengine.h b/media/webrtc/fakewebrtcvoiceengine.h
index c984a01..52a50ff 100644
--- a/media/webrtc/fakewebrtcvoiceengine.h
+++ b/media/webrtc/fakewebrtcvoiceengine.h
@@ -498,8 +498,6 @@ class FakeWebRtcVoiceEngine
WEBRTC_STUB(LastError, ());
WEBRTC_STUB(SetOnHoldStatus, (int, bool, webrtc::OnHoldModes));
WEBRTC_STUB(GetOnHoldStatus, (int, bool&, webrtc::OnHoldModes&));
- WEBRTC_STUB(SetNetEQPlayoutMode, (int, webrtc::NetEqModes));
- WEBRTC_STUB(GetNetEQPlayoutMode, (int, webrtc::NetEqModes&));
// webrtc::VoECodec
WEBRTC_FUNC(NumOfCodecs, ()) {
diff --git a/media/webrtc/webrtcmediaengine.cc b/media/webrtc/webrtcmediaengine.cc
index 67bea0c..cf0fcdf 100644
--- a/media/webrtc/webrtcmediaengine.cc
+++ b/media/webrtc/webrtcmediaengine.cc
@@ -64,6 +64,8 @@ class WebRtcMediaEngine2 :
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory) {
voice_.SetAudioDeviceModule(adm, adm_sc);
+ video_.SetExternalDecoderFactory(decoder_factory);
+ video_.SetExternalEncoderFactory(encoder_factory);
video_.SetVoiceEngine(&voice_);
}
};
@@ -83,7 +85,6 @@ cricket::MediaEngineInterface* CreateWebRtcMediaEngine(
adm, adm_sc, encoder_factory, decoder_factory);
}
#endif // WEBRTC_CHROMIUM_BUILD
- // This is just to get a diff to run pulse.
return new cricket::WebRtcMediaEngine(
adm, adm_sc, encoder_factory, decoder_factory);
}
diff --git a/media/webrtc/webrtcvideoengine.cc b/media/webrtc/webrtcvideoengine.cc
index 4261ffb..b2533b3 100644
--- a/media/webrtc/webrtcvideoengine.cc
+++ b/media/webrtc/webrtcvideoengine.cc
@@ -1612,12 +1612,6 @@ WebRtcVideoMediaChannel::~WebRtcVideoMediaChannel() {
const bool render = false;
SetRender(render);
- if (voice_channel_) {
- WebRtcVoiceMediaChannel* voice_channel =
- static_cast<WebRtcVoiceMediaChannel*>(voice_channel_);
- voice_channel->SetupSharedBandwidthEstimation(NULL, -1);
- }
-
while (!send_channels_.empty()) {
if (!DeleteSendChannel(send_channels_.begin()->first)) {
LOG(LS_ERROR) << "Unable to delete channel with ssrc key "
@@ -1781,6 +1775,35 @@ bool WebRtcVideoMediaChannel::SetSendCodecs(
return true;
}
+bool WebRtcVideoMediaChannel::MaybeRegisterExternalEncoder(
+ WebRtcVideoChannelSendInfo* send_channel,
+ const webrtc::VideoCodec& codec) {
+ // Codec type not supported or encoder already registered, so
+ // nothing to do.
+ if (!engine()->IsExternalEncoderCodecType(codec.codecType)
+ || send_channel->IsEncoderRegistered(codec.plType)) {
+ return true;
+ }
+
+ webrtc::VideoEncoder* encoder =
+ engine()->CreateExternalEncoder(codec.codecType);
+ if (!encoder) {
+ // No encoder factor, so nothing to do.
+ return true;
+ }
+
+ const int channel_id = send_channel->channel_id();
+ if (engine()->vie()->ext_codec()->RegisterExternalSendCodec(
+ channel_id, codec.plType, encoder, false) != 0) {
+ LOG_RTCERR2(RegisterExternalSendCodec, channel_id, codec.plName);
+ engine()->DestroyExternalEncoder(encoder);
+ return false;
+ }
+
+ send_channel->RegisterEncoder(codec.plType, encoder);
+ return true;
+}
+
bool WebRtcVideoMediaChannel::GetSendCodec(VideoCodec* send_codec) {
if (!send_codec_) {
return false;
@@ -2890,7 +2913,7 @@ bool WebRtcVideoMediaChannel::SetStartSendBandwidth(int bps) {
}
// On success, SetSendCodec() will reset |send_start_bitrate_| to |bps/1000|,
- // by calling MaybeChangeBitrates. That method will also clamp the
+ // by calling SanitizeBitrates. That method will also clamp the
// start bitrate between min and max, consistent with the override behavior
// in SetMaxSendBandwidth.
webrtc::VideoCodec new_codec = *send_codec_;
@@ -3639,21 +3662,7 @@ bool WebRtcVideoMediaChannel::SetSendCodec(
target_codec.codecSpecific.VP8.denoisingOn = enable_denoising;
}
- // Register external encoder if codec type is supported by encoder factory.
- if (engine()->IsExternalEncoderCodecType(codec.codecType) &&
- !send_channel->IsEncoderRegistered(target_codec.plType)) {
- webrtc::VideoEncoder* encoder =
- engine()->CreateExternalEncoder(codec.codecType);
- if (encoder) {
- if (engine()->vie()->ext_codec()->RegisterExternalSendCodec(
- channel_id, target_codec.plType, encoder, false) == 0) {
- send_channel->RegisterEncoder(target_codec.plType, encoder);
- } else {
- LOG_RTCERR2(RegisterExternalSendCodec, channel_id, target_codec.plName);
- engine()->DestroyExternalEncoder(encoder);
- }
- }
- }
+ MaybeRegisterExternalEncoder(send_channel, target_codec);
// Resolution and framerate may vary for different send channels.
const VideoFormat& video_format = send_channel->video_format();
@@ -3665,7 +3674,7 @@ bool WebRtcVideoMediaChannel::SetSendCodec(
<< "for ssrc: " << ssrc << ".";
} else {
StreamParams* send_params = send_channel->stream_params();
- MaybeChangeBitrates(channel_id, &target_codec);
+ SanitizeBitrates(channel_id, &target_codec);
webrtc::VideoCodec current_codec;
if (!engine()->vie()->codec()->GetSendCodec(channel_id, current_codec)) {
// Compare against existing configured send codec.
@@ -3952,7 +3961,7 @@ bool WebRtcVideoMediaChannel::MaybeResetVieSendCodec(
vie_codec.codecSpecific.VP8.denoisingOn = enable_denoising;
vie_codec.codecSpecific.VP8.frameDroppingOn = vp8_frame_dropping;
}
- MaybeChangeBitrates(channel_id, &vie_codec);
+ SanitizeBitrates(channel_id, &vie_codec);
if (engine()->vie()->codec()->SetSendCodec(channel_id, vie_codec) != 0) {
LOG_RTCERR1(SetSendCodec, channel_id);
@@ -3990,7 +3999,7 @@ bool WebRtcVideoMediaChannel::MaybeResetVieSendCodec(
return true;
}
-void WebRtcVideoMediaChannel::MaybeChangeBitrates(
+void WebRtcVideoMediaChannel::SanitizeBitrates(
int channel_id, webrtc::VideoCodec* codec) {
codec->minBitrate = GetBitrate(codec->minBitrate, kMinVideoBitrate);
codec->startBitrate = GetBitrate(codec->startBitrate, kStartVideoBitrate);
@@ -4027,7 +4036,6 @@ void WebRtcVideoMediaChannel::MaybeChangeBitrates(
codec->startBitrate = current_target_bitrate;
}
}
-
}
void WebRtcVideoMediaChannel::OnMessage(rtc::Message* msg) {
diff --git a/media/webrtc/webrtcvideoengine.h b/media/webrtc/webrtcvideoengine.h
index bd8f1f8..6f939d2 100644
--- a/media/webrtc/webrtcvideoengine.h
+++ b/media/webrtc/webrtcvideoengine.h
@@ -310,6 +310,11 @@ class WebRtcVideoMediaChannel : public rtc::MessageHandler,
virtual int SendPacket(int channel, const void* data, int len);
virtual int SendRTCPPacket(int channel, const void* data, int len);
+ // Checks the current bitrate estimate and modifies the bitrates
+ // accordingly, including converting kAutoBandwidth to the correct defaults.
+ virtual void SanitizeBitrates(
+ int channel_id, webrtc::VideoCodec* video_codec);
+ virtual void LogSendCodecChange(const std::string& reason);
bool SetPrimaryAndRtxSsrcs(
int channel_id, int idx, uint32 primary_ssrc,
const StreamParams& send_params);
@@ -348,22 +353,23 @@ class WebRtcVideoMediaChannel : public rtc::MessageHandler,
bool SetSendCodec(const webrtc::VideoCodec& codec);
bool SetSendCodec(WebRtcVideoChannelSendInfo* send_channel,
const webrtc::VideoCodec& codec);
- void LogSendCodecChange(const std::string& reason);
// Prepares the channel with channel id |info->channel_id()| to receive all
// codecs in |receive_codecs_| and start receive packets.
bool SetReceiveCodecs(WebRtcVideoChannelRecvInfo* info);
// Returns the channel ID that receives the stream with SSRC |ssrc|.
int GetRecvChannelId(uint32 ssrc);
bool MaybeSetRtxSsrc(const StreamParams& sp, int channel_id);
+ // Create and register an external endcoder if it's possible to do
+ // so and one isn't already registered.
+ bool MaybeRegisterExternalEncoder(
+ WebRtcVideoChannelSendInfo* send_channel,
+ const webrtc::VideoCodec& codec);
// Given captured video frame size, checks if we need to reset vie send codec.
// |reset| is set to whether resetting has happened on vie or not.
// Returns false on error.
bool MaybeResetVieSendCodec(WebRtcVideoChannelSendInfo* send_channel,
int new_width, int new_height, bool is_screencast,
bool* reset);
- // Checks the current bitrate estimate and modifies the bitrates
- // accordingly, including converting kAutoBandwidth to the correct defaults.
- void MaybeChangeBitrates(int channel_id, webrtc::VideoCodec* video_codec);
// Helper function for starting the sending of media on all channels or
// |channel_id|. Note that these two function do not change |sending_|.
bool StartSend();
diff --git a/media/webrtc/webrtcvideoengine2.cc b/media/webrtc/webrtcvideoengine2.cc
index a1a9aa4..26e3079 100644
--- a/media/webrtc/webrtcvideoengine2.cc
+++ b/media/webrtc/webrtcvideoengine2.cc
@@ -229,13 +229,9 @@ void WebRtcVideoEncoderFactory2::DestroyVideoEncoderSettings(
if (encoder_settings == NULL) {
return;
}
-
if (_stricmp(codec.name.c_str(), kVp8CodecName) == 0) {
delete reinterpret_cast<webrtc::VideoCodecVP8*>(encoder_settings);
- return;
}
- // We should be able to destroy all encoder settings we've allocated.
- assert(false);
}
bool WebRtcVideoEncoderFactory2::SupportsCodec(const VideoCodec& codec) {
@@ -288,7 +284,9 @@ WebRtcVideoEngine2::WebRtcVideoEngine2()
FOURCC_ANY),
initialized_(false),
cpu_monitor_(new rtc::CpuMonitor(NULL)),
- channel_factory_(NULL) {
+ channel_factory_(NULL),
+ external_decoder_factory_(NULL),
+ external_encoder_factory_(NULL) {
LOG(LS_INFO) << "WebRtcVideoEngine2::WebRtcVideoEngine2()";
rtp_header_extensions_.push_back(
RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension,
@@ -336,14 +334,6 @@ void WebRtcVideoEngine2::Terminate() {
int WebRtcVideoEngine2::GetCapabilities() { return VIDEO_RECV | VIDEO_SEND; }
-bool WebRtcVideoEngine2::SetOptions(const VideoOptions& options) {
- // TODO(pbos): Do we need this? This is a no-op in the existing
- // WebRtcVideoEngine implementation.
- LOG(LS_VERBOSE) << "SetOptions: " << options.ToString();
- // options_ = options;
- return true;
-}
-
bool WebRtcVideoEngine2::SetDefaultEncoderConfig(
const VideoEncoderConfig& config) {
const VideoCodec& codec = config.max_codec;
@@ -405,6 +395,30 @@ void WebRtcVideoEngine2::SetLogging(int min_sev, const char* filter) {
}
}
+void WebRtcVideoEngine2::SetExternalDecoderFactory(
+ WebRtcVideoDecoderFactory* decoder_factory) {
+ external_decoder_factory_ = decoder_factory;
+}
+
+void WebRtcVideoEngine2::SetExternalEncoderFactory(
+ WebRtcVideoEncoderFactory* encoder_factory) {
+ if (external_encoder_factory_ == encoder_factory) {
+ return;
+ }
+ if (external_encoder_factory_) {
+ external_encoder_factory_->RemoveObserver(this);
+ }
+ external_encoder_factory_ = encoder_factory;
+ if (external_encoder_factory_) {
+ external_encoder_factory_->AddObserver(this);
+ }
+
+ // Invoke OnCodecAvailable() here in case the list of codecs is already
+ // available when the encoder factory is installed. If not the encoder
+ // factory will invoke the callback later when the codecs become available.
+ OnCodecsAvailable();
+}
+
bool WebRtcVideoEngine2::EnableTimedRender() {
// TODO(pbos): Figure out whether this can be removed.
return true;
@@ -492,6 +506,9 @@ WebRtcVideoEncoderFactory2* WebRtcVideoEngine2::GetVideoEncoderFactory() {
return &default_video_encoder_factory_;
}
+void WebRtcVideoEngine2::OnCodecsAvailable() {
+ // TODO(pbos): Implement.
+}
// Thin map between VideoFrame and an existing webrtc::I420VideoFrame
// to avoid having to copy the rendered VideoFrame prematurely.
// This implementation is only safe to use in a const context and should never
@@ -1364,14 +1381,8 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
<< frame->GetHeight();
// Lock before copying, can be called concurrently when swapping input source.
rtc::CritScope frame_cs(&frame_lock_);
- if (!muted_) {
- ConvertToI420VideoFrame(*frame, &video_frame_);
- } else {
- // Create a black frame to transmit instead.
- CreateBlackFrame(&video_frame_,
- static_cast<int>(frame->GetWidth()),
- static_cast<int>(frame->GetHeight()));
- }
+ ConvertToI420VideoFrame(*frame, &video_frame_);
+
rtc::CritScope cs(&lock_);
if (stream_ == NULL) {
LOG(LS_WARNING) << "Capturer inputting frames before send codecs are "
@@ -1383,14 +1394,20 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
LOG(LS_VERBOSE) << "VideoFormat 0x0 set, Dropping frame.";
return;
}
+ if (muted_) {
+ // Create a black frame to transmit instead.
+ CreateBlackFrame(&video_frame_,
+ static_cast<int>(frame->GetWidth()),
+ static_cast<int>(frame->GetHeight()));
+ }
// Reconfigure codec if necessary.
SetDimensions(
video_frame_.width(), video_frame_.height(), capturer->IsScreencast());
LOG(LS_VERBOSE) << "SwapFrame: " << video_frame_.width() << "x"
<< video_frame_.height() << " -> (codec) "
- << parameters_.video_streams.back().width << "x"
- << parameters_.video_streams.back().height;
+ << parameters_.encoder_config.streams.back().width << "x"
+ << parameters_.encoder_config.streams.back().height;
stream_->Input()->SwapFrame(&video_frame_);
}
@@ -1446,7 +1463,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoFormat(
<< parameters_.config.rtp.ssrcs[0] << ".";
} else {
// TODO(pbos): Fix me, this only affects the last stream!
- parameters_.video_streams.back().max_framerate =
+ parameters_.encoder_config.streams.back().max_framerate =
VideoFormat::IntervalToFps(format.interval);
SetDimensions(format.width, format.height, false);
}
@@ -1494,7 +1511,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetCodecAndOptions(
if (video_streams.empty()) {
return;
}
- parameters_.video_streams = video_streams;
+ parameters_.encoder_config.streams = video_streams;
format_ = VideoFormat(codec_settings.codec.width,
codec_settings.codec.height,
VideoFormat::FpsToInterval(30),
@@ -1541,7 +1558,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetDimensions(
int width,
int height,
bool override_max) {
- assert(!parameters_.video_streams.empty());
+ assert(!parameters_.encoder_config.streams.empty());
LOG(LS_VERBOSE) << "SetDimensions: " << width << "x" << height;
VideoCodecSettings codec_settings;
@@ -1554,27 +1571,30 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetDimensions(
height = codec_settings.codec.height;
}
- if (parameters_.video_streams.back().width == width &&
- parameters_.video_streams.back().height == height) {
+ if (parameters_.encoder_config.streams.back().width == width &&
+ parameters_.encoder_config.streams.back().height == height) {
return;
}
- void* encoder_settings = encoder_factory_->CreateVideoEncoderSettings(
- codec_settings.codec, parameters_.options);
+ webrtc::VideoEncoderConfig encoder_config = parameters_.encoder_config;
+ encoder_config.encoder_specific_settings =
+ encoder_factory_->CreateVideoEncoderSettings(codec_settings.codec,
+ parameters_.options);
VideoCodec codec = codec_settings.codec;
codec.width = width;
codec.height = height;
- std::vector<webrtc::VideoStream> video_streams =
- encoder_factory_->CreateVideoStreams(codec,
- parameters_.options,
- parameters_.config.rtp.ssrcs.size());
- bool stream_reconfigured = stream_->ReconfigureVideoEncoder(
- video_streams, encoder_settings);
+ encoder_config.streams = encoder_factory_->CreateVideoStreams(
+ codec, parameters_.options, parameters_.config.rtp.ssrcs.size());
+
+ bool stream_reconfigured = stream_->ReconfigureVideoEncoder(encoder_config);
- encoder_factory_->DestroyVideoEncoderSettings(codec_settings.codec,
- encoder_settings);
+ encoder_factory_->DestroyVideoEncoderSettings(
+ codec_settings.codec,
+ encoder_config.encoder_specific_settings);
+
+ encoder_config.encoder_specific_settings = NULL;
if (!stream_reconfigured) {
LOG(LS_WARNING) << "Failed to reconfigure video encoder for dimensions: "
@@ -1582,7 +1602,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetDimensions(
return;
}
- parameters_.video_streams = video_streams;
+ parameters_.encoder_config = encoder_config;
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::Start() {
@@ -1646,9 +1666,9 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
info.input_frame_width = last_captured_frame_format.width;
info.input_frame_height = last_captured_frame_format.height;
info.send_frame_width =
- static_cast<int>(parameters_.video_streams.front().width);
+ static_cast<int>(parameters_.encoder_config.streams.front().width);
info.send_frame_height =
- static_cast<int>(parameters_.video_streams.front().height);
+ static_cast<int>(parameters_.encoder_config.streams.front().height);
}
// TODO(pbos): Support or remove the following stats.
@@ -1665,14 +1685,18 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
VideoCodecSettings codec_settings;
parameters_.codec_settings.Get(&codec_settings);
- void* encoder_settings = encoder_factory_->CreateVideoEncoderSettings(
- codec_settings.codec, parameters_.options);
+ parameters_.encoder_config.encoder_specific_settings =
+ encoder_factory_->CreateVideoEncoderSettings(codec_settings.codec,
+ parameters_.options);
+
+ stream_ = call_->CreateVideoSendStream(parameters_.config,
+ parameters_.encoder_config);
- stream_ = call_->CreateVideoSendStream(
- parameters_.config, parameters_.video_streams, encoder_settings);
+ encoder_factory_->DestroyVideoEncoderSettings(
+ codec_settings.codec,
+ parameters_.encoder_config.encoder_specific_settings);
- encoder_factory_->DestroyVideoEncoderSettings(codec_settings.codec,
- encoder_settings);
+ parameters_.encoder_config.encoder_specific_settings = NULL;
if (sending_) {
stream_->Start();
diff --git a/media/webrtc/webrtcvideoengine2.h b/media/webrtc/webrtcvideoengine2.h
index d77afb9..18a80d7 100644
--- a/media/webrtc/webrtcvideoengine2.h
+++ b/media/webrtc/webrtcvideoengine2.h
@@ -34,10 +34,12 @@
#include "talk/media/base/mediaengine.h"
#include "talk/media/webrtc/webrtcvideochannelfactory.h"
+#include "talk/media/webrtc/webrtcvideodecoderfactory.h"
+#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
#include "webrtc/base/cpumonitor.h"
#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_video/interface/i420_video_frame.h"
-#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/transport.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_renderer.h"
@@ -65,14 +67,13 @@ class VideoFrame;
class VideoProcessor;
class VideoRenderer;
class VoiceMediaChannel;
-class WebRtcVideoChannel2;
class WebRtcDecoderObserver;
class WebRtcEncoderObserver;
class WebRtcLocalStreamInfo;
class WebRtcRenderAdapter;
+class WebRtcVideoChannel2;
class WebRtcVideoChannelRecvInfo;
class WebRtcVideoChannelSendInfo;
-class WebRtcVideoDecoderFactory;
class WebRtcVoiceEngine;
struct CapturedFrame;
@@ -119,9 +120,8 @@ class WebRtcVideoEncoderFactory2 {
const VideoCodec& codec,
const VideoOptions& options);
- virtual void* CreateVideoEncoderSettings(
- const VideoCodec& codec,
- const VideoOptions& options);
+ virtual void* CreateVideoEncoderSettings(const VideoCodec& codec,
+ const VideoOptions& options);
virtual void DestroyVideoEncoderSettings(const VideoCodec& codec,
void* encoder_settings);
@@ -130,7 +130,8 @@ class WebRtcVideoEncoderFactory2 {
};
// WebRtcVideoEngine2 is used for the new native WebRTC Video API (webrtc:1667).
-class WebRtcVideoEngine2 : public sigslot::has_slots<> {
+class WebRtcVideoEngine2 : public sigslot::has_slots<>,
+ public WebRtcVideoEncoderFactory::Observer {
public:
// Creates the WebRtcVideoEngine2 with internal VideoCaptureModule.
WebRtcVideoEngine2();
@@ -144,7 +145,6 @@ class WebRtcVideoEngine2 : public sigslot::has_slots<> {
void Terminate();
int GetCapabilities();
- bool SetOptions(const VideoOptions& options);
bool SetDefaultEncoderConfig(const VideoEncoderConfig& config);
VideoEncoderConfig GetDefaultEncoderConfig() const;
@@ -154,6 +154,16 @@ class WebRtcVideoEngine2 : public sigslot::has_slots<> {
const std::vector<RtpHeaderExtension>& rtp_header_extensions() const;
void SetLogging(int min_sev, const char* filter);
+ // Set a WebRtcVideoDecoderFactory for external decoding. Video engine does
+ // not take the ownership of |decoder_factory|. The caller needs to make sure
+ // that |decoder_factory| outlives the video engine.
+ void SetExternalDecoderFactory(WebRtcVideoDecoderFactory* decoder_factory);
+ // Set a WebRtcVideoEncoderFactory for external encoding. Video engine does
+ // not take the ownership of |encoder_factory|. The caller needs to make sure
+ // that |encoder_factory| outlives the video engine.
+ virtual void SetExternalEncoderFactory(
+ WebRtcVideoEncoderFactory* encoder_factory);
+
bool EnableTimedRender();
// This is currently ignored.
sigslot::repeater2<VideoCapturer*, CaptureState> SignalCaptureStateChange;
@@ -175,6 +185,8 @@ class WebRtcVideoEngine2 : public sigslot::has_slots<> {
virtual WebRtcVideoEncoderFactory2* GetVideoEncoderFactory();
private:
+ virtual void OnCodecsAvailable() OVERRIDE;
+
rtc::Thread* worker_thread_;
WebRtcVoiceEngine* voice_engine_;
std::vector<VideoCodec> video_codecs_;
@@ -190,6 +202,9 @@ class WebRtcVideoEngine2 : public sigslot::has_slots<> {
rtc::scoped_ptr<rtc::CpuMonitor> cpu_monitor_;
WebRtcVideoChannelFactory* channel_factory_;
WebRtcVideoEncoderFactory2 default_video_encoder_factory_;
+
+ WebRtcVideoDecoderFactory* external_decoder_factory_;
+ WebRtcVideoEncoderFactory* external_encoder_factory_;
};
class WebRtcVideoChannel2 : public rtc::MessageHandler,
@@ -316,14 +331,16 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
// Sent resolutions + bitrates etc. by the underlying VideoSendStream,
// typically changes when setting a new resolution or reconfiguring
// bitrates.
- std::vector<webrtc::VideoStream> video_streams;
+ webrtc::VideoEncoderConfig encoder_config;
};
void SetCodecAndOptions(const VideoCodecSettings& codec,
- const VideoOptions& options);
- void RecreateWebRtcStream();
+ const VideoOptions& options)
+ EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ void RecreateWebRtcStream() EXCLUSIVE_LOCKS_REQUIRED(lock_);
// When |override_max| is false constrain width/height to codec dimensions.
- void SetDimensions(int width, int height, bool override_max);
+ void SetDimensions(int width, int height, bool override_max)
+ EXCLUSIVE_LOCKS_REQUIRED(lock_);
webrtc::Call* const call_;
WebRtcVideoEncoderFactory2* const encoder_factory_;
diff --git a/media/webrtc/webrtcvideoengine2_unittest.cc b/media/webrtc/webrtcvideoengine2_unittest.cc
index 2178a68..6112c50 100644
--- a/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -68,13 +68,10 @@ void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec) {
namespace cricket {
FakeVideoSendStream::FakeVideoSendStream(
const webrtc::VideoSendStream::Config& config,
- const std::vector<webrtc::VideoStream>& video_streams,
- const void* encoder_settings)
- : sending_(false),
- config_(config),
- codec_settings_set_(false) {
+ const webrtc::VideoEncoderConfig& encoder_config)
+ : sending_(false), config_(config), codec_settings_set_(false) {
assert(config.encoder_settings.encoder != NULL);
- ReconfigureVideoEncoder(video_streams, encoder_settings);
+ ReconfigureVideoEncoder(encoder_config);
}
webrtc::VideoSendStream::Config FakeVideoSendStream::GetConfig() {
@@ -82,7 +79,7 @@ webrtc::VideoSendStream::Config FakeVideoSendStream::GetConfig() {
}
std::vector<webrtc::VideoStream> FakeVideoSendStream::GetVideoStreams() {
- return video_streams_;
+ return encoder_config_.streams;
}
bool FakeVideoSendStream::IsSending() const {
@@ -104,15 +101,14 @@ webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() const {
}
bool FakeVideoSendStream::ReconfigureVideoEncoder(
- const std::vector<webrtc::VideoStream>& streams,
- const void* encoder_specific) {
- video_streams_ = streams;
- if (encoder_specific != NULL) {
+ const webrtc::VideoEncoderConfig& config) {
+ encoder_config_ = config;
+ if (config.encoder_specific_settings != NULL) {
assert(config_.encoder_settings.payload_name == "VP8");
- vp8_settings_ =
- *reinterpret_cast<const webrtc::VideoCodecVP8*>(encoder_specific);
+ vp8_settings_ = *reinterpret_cast<const webrtc::VideoCodecVP8*>(
+ config.encoder_specific_settings);
}
- codec_settings_set_ = encoder_specific != NULL;
+ codec_settings_set_ = config.encoder_specific_settings != NULL;
return true;
}
@@ -226,10 +222,9 @@ webrtc::Call::NetworkState FakeCall::GetNetworkState() const {
webrtc::VideoSendStream* FakeCall::CreateVideoSendStream(
const webrtc::VideoSendStream::Config& config,
- const std::vector<webrtc::VideoStream>& video_streams,
- const void* encoder_settings) {
+ const webrtc::VideoEncoderConfig& encoder_config) {
FakeVideoSendStream* fake_stream =
- new FakeVideoSendStream(config, video_streams, encoder_settings);
+ new FakeVideoSendStream(config, encoder_config);
video_send_streams_.push_back(fake_stream);
return fake_stream;
}
diff --git a/media/webrtc/webrtcvideoengine2_unittest.h b/media/webrtc/webrtcvideoengine2_unittest.h
index 5aaa3e3..30f1efb 100644
--- a/media/webrtc/webrtcvideoengine2_unittest.h
+++ b/media/webrtc/webrtcvideoengine2_unittest.h
@@ -39,8 +39,7 @@ namespace cricket {
class FakeVideoSendStream : public webrtc::VideoSendStream {
public:
FakeVideoSendStream(const webrtc::VideoSendStream::Config& config,
- const std::vector<webrtc::VideoStream>& video_streams,
- const void* encoder_settings);
+ const webrtc::VideoEncoderConfig& encoder_config);
webrtc::VideoSendStream::Config GetConfig();
std::vector<webrtc::VideoStream> GetVideoStreams();
@@ -51,8 +50,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream {
virtual webrtc::VideoSendStream::Stats GetStats() const OVERRIDE;
virtual bool ReconfigureVideoEncoder(
- const std::vector<webrtc::VideoStream>& streams,
- const void* encoder_specific);
+ const webrtc::VideoEncoderConfig& config) OVERRIDE;
virtual webrtc::VideoSendStreamInput* Input() OVERRIDE;
@@ -61,7 +59,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream {
bool sending_;
webrtc::VideoSendStream::Config config_;
- std::vector<webrtc::VideoStream> video_streams_;
+ webrtc::VideoEncoderConfig encoder_config_;
bool codec_settings_set_;
webrtc::VideoCodecVP8 vp8_settings_;
};
@@ -108,8 +106,7 @@ class FakeCall : public webrtc::Call {
private:
virtual webrtc::VideoSendStream* CreateVideoSendStream(
const webrtc::VideoSendStream::Config& config,
- const std::vector<webrtc::VideoStream>& video_streams,
- const void* encoder_settings) OVERRIDE;
+ const webrtc::VideoEncoderConfig& encoder_config) OVERRIDE;
virtual void DestroyVideoSendStream(
webrtc::VideoSendStream* send_stream) OVERRIDE;
diff --git a/media/webrtc/webrtcvoiceengine.cc b/media/webrtc/webrtcvoiceengine.cc
index 3d5d9ee..a524bad 100644
--- a/media/webrtc/webrtcvoiceengine.cc
+++ b/media/webrtc/webrtcvoiceengine.cc
@@ -1834,8 +1834,7 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine)
WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel() {
LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel "
<< voe_channel();
- ASSERT(shared_bwe_vie_ == NULL);
- ASSERT(shared_bwe_vie_channel_ == -1);
+ SetupSharedBandwidthEstimation(NULL, -1);
// Remove any remaining send streams, the default channel will be deleted
// later.
diff --git a/session/media/mediamonitor.h b/session/media/mediamonitor.h
index d549362..89740a8 100644
--- a/session/media/mediamonitor.h
+++ b/session/media/mediamonitor.h
@@ -34,6 +34,7 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/thread.h"
+#include "webrtc/base/thread_annotations.h"
namespace cricket {
@@ -77,7 +78,7 @@ class MediaMonitorT : public MediaMonitor {
media_info_.Clear();
media_channel_->GetStats(&media_info_);
}
- virtual void Update() {
+ virtual void Update() EXCLUSIVE_LOCKS_REQUIRED(crit_) {
MI stats(media_info_);
crit_.Leave();
SignalUpdate(media_channel_, stats);