summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Chromium Automerger <chromium-automerger@android>2014-11-06 09:32:01 +0000
committerAndroid Chromium Automerger <chromium-automerger@android>2014-11-06 09:32:01 +0000
commit4a7f10204c64726a266cb4f3e18529f5656e50a8 (patch)
tree2cef7197ddf1b2ce355c390f217612054c9d9e84
parentc55e14ee6a9f788b3f4967bcd44bcf2eab26f1df (diff)
parent7d974c11e23898cd59838c79751b96c45b09ec4b (diff)
downloadtalk-4a7f10204c64726a266cb4f3e18529f5656e50a8.tar.gz
Merge third_party/libjingle/source/talk from https://chromium.googlesource.com/external/webrtc/trunk/talk.git at 7d974c11e23898cd59838c79751b96c45b09ec4b
This commit was generated by merge_from_chromium.py. Change-Id: If28216ffbc16e927cf326c567601f482dd2bf36b
-rw-r--r--app/webrtc/objc/RTCEAGLVideoView+Internal.h36
-rw-r--r--app/webrtc/objc/RTCEAGLVideoView.m58
-rw-r--r--app/webrtc/objc/RTCI420Frame.mm4
-rw-r--r--app/webrtc/objc/RTCMediaStream.mm5
-rw-r--r--app/webrtc/objc/RTCNSGLVideoView.m46
-rw-r--r--app/webrtc/objc/RTCPeerConnection.mm1
-rw-r--r--app/webrtc/objc/RTCVideoRendererAdapter.h (renamed from app/webrtc/objc/RTCVideoRenderer+Internal.h)8
-rw-r--r--app/webrtc/objc/RTCVideoRendererAdapter.mm (renamed from app/webrtc/objc/RTCVideoRenderer.mm)36
-rw-r--r--app/webrtc/objc/RTCVideoTrack+Internal.h4
-rw-r--r--app/webrtc/objc/RTCVideoTrack.mm47
-rw-r--r--app/webrtc/objc/public/RTCEAGLVideoView.h7
-rw-r--r--app/webrtc/objc/public/RTCI420Frame.h2
-rw-r--r--app/webrtc/objc/public/RTCNSGLVideoView.h5
-rw-r--r--app/webrtc/objc/public/RTCVideoRenderer.h27
-rw-r--r--app/webrtc/objc/public/RTCVideoTrack.h9
-rw-r--r--app/webrtc/objctests/RTCPeerConnectionTest.mm13
-rw-r--r--examples/android/AndroidManifest.xml4
-rw-r--r--examples/android/res/values-v21/styles.xml8
-rw-r--r--examples/android/res/values/styles.xml8
-rw-r--r--examples/objc/AppRTCDemo/ios/APPRTCViewController.m21
-rw-r--r--examples/objc/AppRTCDemo/mac/APPRTCViewController.m12
-rwxr-xr-xlibjingle.gyp5
-rw-r--r--media/webrtc/webrtcvideoengine.cc113
-rw-r--r--media/webrtc/webrtcvideoengine.h5
-rw-r--r--media/webrtc/webrtcvideoengine2.cc39
-rw-r--r--media/webrtc/webrtcvideoengine2.h1
-rw-r--r--media/webrtc/webrtcvideoengine2_unittest.cc9
-rw-r--r--media/webrtc/webrtcvideoengine2_unittest.h3
28 files changed, 259 insertions, 277 deletions
diff --git a/app/webrtc/objc/RTCEAGLVideoView+Internal.h b/app/webrtc/objc/RTCEAGLVideoView+Internal.h
deleted file mode 100644
index 10df2e3..0000000
--- a/app/webrtc/objc/RTCEAGLVideoView+Internal.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * libjingle
- * Copyright 2014, Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "RTCEAGLVideoView.h"
-#import "RTCVideoRenderer.h"
-
-// TODO(tkchin): Move declaration to implementation file. Exposed here in order
-// to support deprecated methods in RTCVideoRenderer.
-@interface RTCEAGLVideoView (Internal) <RTCVideoRendererDelegate>
-@end
diff --git a/app/webrtc/objc/RTCEAGLVideoView.m b/app/webrtc/objc/RTCEAGLVideoView.m
index faacef6..c55c87e 100644
--- a/app/webrtc/objc/RTCEAGLVideoView.m
+++ b/app/webrtc/objc/RTCEAGLVideoView.m
@@ -29,13 +29,12 @@
#error "This file requires ARC support."
#endif
-#import "RTCEAGLVideoView+Internal.h"
+#import "RTCEAGLVideoView.h"
#import <GLKit/GLKit.h>
+#import "RTCI420Frame.h"
#import "RTCOpenGLVideoRenderer.h"
-#import "RTCVideoRenderer.h"
-#import "RTCVideoTrack.h"
// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
// refreshes, which should be 30fps. We wrap the display link in order to avoid
@@ -105,7 +104,6 @@
RTCDisplayLinkTimer* _timer;
GLKView* _glkView;
RTCOpenGLVideoRenderer* _glRenderer;
- RTCVideoRenderer* _videoRenderer;
}
- (instancetype)initWithFrame:(CGRect)frame {
@@ -152,7 +150,6 @@
// GLKViewDelegate method implemented below.
[strongSelf.glkView setNeedsDisplay];
}];
- _videoRenderer = [[RTCVideoRenderer alloc] initWithDelegate:self];
[self setupGL];
}
return self;
@@ -168,18 +165,6 @@
[_timer invalidate];
}
-- (void)setVideoTrack:(RTCVideoTrack*)videoTrack {
- if (_videoTrack == videoTrack) {
- return;
- }
- [_videoTrack removeRenderer:_videoRenderer];
- self.i420Frame = nil;
- _videoTrack = videoTrack;
- [_videoTrack addRenderer:_videoRenderer];
- // TODO(tkchin): potentially handle changes in track state - e.g. render
- // black if track fails.
-}
-
#pragma mark - UIView
- (void)layoutSubviews {
@@ -197,14 +182,31 @@
[_glRenderer drawFrame:self.i420Frame];
}
+#pragma mark - RTCVideoRenderer
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+ __weak RTCEAGLVideoView* weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ RTCEAGLVideoView* strongSelf = weakSelf;
+ [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
+ });
+}
+
+- (void)renderFrame:(RTCI420Frame*)frame {
+ self.i420Frame = frame;
+}
+
#pragma mark - Private
- (void)setupGL {
+ self.i420Frame = nil;
[_glRenderer setupGL];
_timer.isPaused = NO;
}
- (void)teardownGL {
+ self.i420Frame = nil;
_timer.isPaused = YES;
[_glkView deleteDrawable];
[_glRenderer teardownGL];
@@ -219,25 +221,3 @@
}
@end
-
-@implementation RTCEAGLVideoView (Internal)
-
-#pragma mark - RTCVideoRendererDelegate
-
-// These methods are called when the video track has frame information to
-// provide. This occurs on non-main thread.
-- (void)renderer:(RTCVideoRenderer*)renderer
- didSetSize:(CGSize)size {
- __weak RTCEAGLVideoView* weakSelf = self;
- dispatch_async(dispatch_get_main_queue(), ^{
- RTCEAGLVideoView* strongSelf = weakSelf;
- [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
- });
-}
-
-- (void)renderer:(RTCVideoRenderer*)renderer
- didReceiveFrame:(RTCI420Frame*)frame {
- self.i420Frame = frame;
-}
-
-@end
diff --git a/app/webrtc/objc/RTCI420Frame.mm b/app/webrtc/objc/RTCI420Frame.mm
index 0b50691..9c394e5 100644
--- a/app/webrtc/objc/RTCI420Frame.mm
+++ b/app/webrtc/objc/RTCI420Frame.mm
@@ -78,6 +78,10 @@
return _videoFrame->GetVPitch();
}
+- (BOOL)makeExclusive {
+ return _videoFrame->MakeExclusive();
+}
+
@end
@implementation RTCI420Frame (Internal)
diff --git a/app/webrtc/objc/RTCMediaStream.mm b/app/webrtc/objc/RTCMediaStream.mm
index 27d20b8..a72508a 100644
--- a/app/webrtc/objc/RTCMediaStream.mm
+++ b/app/webrtc/objc/RTCMediaStream.mm
@@ -71,7 +71,7 @@
}
- (BOOL)addVideoTrack:(RTCVideoTrack*)track {
- if (self.mediaStream->AddTrack(track.videoTrack)) {
+ if (self.mediaStream->AddTrack(track.nativeVideoTrack)) {
[_videoTracks addObject:track];
return YES;
}
@@ -93,7 +93,8 @@
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:track];
NSAssert(index != NSNotFound,
@"|removeAudioTrack| called on unexpected RTCVideoTrack");
- if (index != NSNotFound && self.mediaStream->RemoveTrack(track.videoTrack)) {
+ if (index != NSNotFound &&
+ self.mediaStream->RemoveTrack(track.nativeVideoTrack)) {
[_videoTracks removeObjectAtIndex:index];
return YES;
}
diff --git a/app/webrtc/objc/RTCNSGLVideoView.m b/app/webrtc/objc/RTCNSGLVideoView.m
index 292e792..7aa4a11 100644
--- a/app/webrtc/objc/RTCNSGLVideoView.m
+++ b/app/webrtc/objc/RTCNSGLVideoView.m
@@ -33,10 +33,10 @@
#import <CoreVideo/CVDisplayLink.h>
#import <OpenGL/gl3.h>
+#import "RTCI420Frame.h"
#import "RTCOpenGLVideoRenderer.h"
-#import "RTCVideoRenderer.h"
-@interface RTCNSGLVideoView () <RTCVideoRendererDelegate>
+@interface RTCNSGLVideoView ()
// |i420Frame| is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
@property(atomic, strong) RTCI420Frame* i420Frame;
@@ -57,15 +57,6 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
@implementation RTCNSGLVideoView {
CVDisplayLinkRef _displayLink;
- RTCVideoRenderer* _videoRenderer;
-}
-
-- (instancetype)initWithFrame:(NSRect)frame
- pixelFormat:(NSOpenGLPixelFormat*)format {
- if (self = [super initWithFrame:frame pixelFormat:format]) {
- _videoRenderer = [[RTCVideoRenderer alloc] initWithDelegate:self];
- }
- return self;
}
- (void)dealloc {
@@ -109,37 +100,16 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
[super clearGLContext];
}
-- (void)setVideoTrack:(RTCVideoTrack*)videoTrack {
- if (_videoTrack == videoTrack) {
- return;
- }
- if (_videoTrack) {
- [_videoTrack removeRenderer:_videoRenderer];
- CVDisplayLinkStop(_displayLink);
- // Clear contents.
- self.i420Frame = nil;
- [self drawFrame];
- }
- _videoTrack = videoTrack;
- if (_videoTrack) {
- [_videoTrack addRenderer:_videoRenderer];
- CVDisplayLinkStart(_displayLink);
- }
-}
-
-#pragma mark - RTCVideoRendererDelegate
+#pragma mark - RTCVideoRenderer
-// These methods are called when the video track has frame information to
-// provide. This occurs on non-main thread.
-- (void)renderer:(RTCVideoRenderer*)renderer
- didSetSize:(CGSize)size {
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate videoView:self didChangeVideoSize:size];
});
}
-- (void)renderer:(RTCVideoRenderer*)renderer
- didReceiveFrame:(RTCI420Frame*)frame {
+- (void)renderFrame:(RTCI420Frame*)frame {
self.i420Frame = frame;
}
@@ -174,9 +144,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
_displayLink, cglContext, cglPixelFormat);
- if (_videoTrack) {
- CVDisplayLinkStart(_displayLink);
- }
+ CVDisplayLinkStart(_displayLink);
}
- (void)teardownDisplayLink {
diff --git a/app/webrtc/objc/RTCPeerConnection.mm b/app/webrtc/objc/RTCPeerConnection.mm
index 925de73..7767f76 100644
--- a/app/webrtc/objc/RTCPeerConnection.mm
+++ b/app/webrtc/objc/RTCPeerConnection.mm
@@ -68,6 +68,7 @@ class RTCCreateSessionDescriptionObserver
[_delegate peerConnection:_peerConnection
didCreateSessionDescription:session
error:nil];
+ delete desc;
}
virtual void OnFailure(const std::string& error) OVERRIDE {
diff --git a/app/webrtc/objc/RTCVideoRenderer+Internal.h b/app/webrtc/objc/RTCVideoRendererAdapter.h
index 22e445c..faf0906 100644
--- a/app/webrtc/objc/RTCVideoRenderer+Internal.h
+++ b/app/webrtc/objc/RTCVideoRendererAdapter.h
@@ -29,8 +29,12 @@
#include "talk/app/webrtc/mediastreaminterface.h"
-@interface RTCVideoRenderer (Internal)
+@interface RTCVideoRendererAdapter : NSObject
-@property(nonatomic, readonly) webrtc::VideoRendererInterface* videoRenderer;
+@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
+@property(nonatomic, readonly)
+ webrtc::VideoRendererInterface* nativeVideoRenderer;
+
+- (instancetype)initWithVideoRenderer:(id<RTCVideoRenderer>)videoRenderer;
@end
diff --git a/app/webrtc/objc/RTCVideoRenderer.mm b/app/webrtc/objc/RTCVideoRendererAdapter.mm
index 4cfe43a..e29faad 100644
--- a/app/webrtc/objc/RTCVideoRenderer.mm
+++ b/app/webrtc/objc/RTCVideoRendererAdapter.mm
@@ -29,50 +29,46 @@
#error "This file requires ARC support."
#endif
-#import "RTCVideoRenderer+Internal.h"
+#import "RTCVideoRendererAdapter.h"
#import "RTCI420Frame+Internal.h"
namespace webrtc {
-class RTCVideoRendererAdapter : public VideoRendererInterface {
+class RTCVideoRendererNativeAdapter : public VideoRendererInterface {
public:
- RTCVideoRendererAdapter(RTCVideoRenderer* renderer) { _renderer = renderer; }
+ RTCVideoRendererNativeAdapter(RTCVideoRendererAdapter* adapter) {
+ _adapter = adapter;
+ }
virtual void SetSize(int width, int height) OVERRIDE {
- [_renderer.delegate renderer:_renderer
- didSetSize:CGSizeMake(width, height)];
+ [_adapter.videoRenderer setSize:CGSizeMake(width, height)];
}
virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
- if (!_renderer.delegate) {
- return;
- }
RTCI420Frame* i420Frame = [[RTCI420Frame alloc] initWithVideoFrame:frame];
- [_renderer.delegate renderer:_renderer didReceiveFrame:i420Frame];
+ [_adapter.videoRenderer renderFrame:i420Frame];
}
private:
- __weak RTCVideoRenderer* _renderer;
+ __weak RTCVideoRendererAdapter* _adapter;
};
}
-@implementation RTCVideoRenderer {
- rtc::scoped_ptr<webrtc::RTCVideoRendererAdapter> _adapter;
+@implementation RTCVideoRendererAdapter {
+ id<RTCVideoRenderer> _videoRenderer;
+ rtc::scoped_ptr<webrtc::RTCVideoRendererNativeAdapter> _adapter;
}
-- (instancetype)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
+- (instancetype)initWithVideoRenderer:(id<RTCVideoRenderer>)videoRenderer {
+ NSParameterAssert(videoRenderer);
if (self = [super init]) {
- _delegate = delegate;
- _adapter.reset(new webrtc::RTCVideoRendererAdapter(self));
+ _videoRenderer = videoRenderer;
+ _adapter.reset(new webrtc::RTCVideoRendererNativeAdapter(self));
}
return self;
}
-@end
-
-@implementation RTCVideoRenderer (Internal)
-
-- (webrtc::VideoRendererInterface*)videoRenderer {
+- (webrtc::VideoRendererInterface*)nativeVideoRenderer {
return _adapter.get();
}
diff --git a/app/webrtc/objc/RTCVideoTrack+Internal.h b/app/webrtc/objc/RTCVideoTrack+Internal.h
index 03c8f95..5f267ac 100644
--- a/app/webrtc/objc/RTCVideoTrack+Internal.h
+++ b/app/webrtc/objc/RTCVideoTrack+Internal.h
@@ -34,7 +34,7 @@
@interface RTCVideoTrack (Internal)
-@property(nonatomic, assign, readonly)
- rtc::scoped_refptr<webrtc::VideoTrackInterface> videoTrack;
+@property(nonatomic, readonly)
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
@end
diff --git a/app/webrtc/objc/RTCVideoTrack.mm b/app/webrtc/objc/RTCVideoTrack.mm
index beebde0..959bc6d 100644
--- a/app/webrtc/objc/RTCVideoTrack.mm
+++ b/app/webrtc/objc/RTCVideoTrack.mm
@@ -32,46 +32,55 @@
#import "RTCVideoTrack+Internal.h"
#import "RTCMediaStreamTrack+Internal.h"
-#import "RTCVideoRenderer+Internal.h"
+#import "RTCVideoRendererAdapter.h"
@implementation RTCVideoTrack {
- NSMutableArray* _rendererArray;
+ NSMutableArray* _adapters;
}
- (id)initWithMediaTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)
mediaTrack {
if (self = [super initWithMediaTrack:mediaTrack]) {
- _rendererArray = [NSMutableArray array];
+ _adapters = [NSMutableArray array];
}
return self;
}
-- (void)addRenderer:(RTCVideoRenderer*)renderer {
- NSAssert1(![self.renderers containsObject:renderer],
- @"renderers already contains object [%@]",
- [renderer description]);
- [_rendererArray addObject:renderer];
- self.videoTrack->AddRenderer(renderer.videoRenderer);
-}
-
-- (void)removeRenderer:(RTCVideoRenderer*)renderer {
- NSUInteger index = [self.renderers indexOfObjectIdenticalTo:renderer];
- if (index != NSNotFound) {
- [_rendererArray removeObjectAtIndex:index];
- self.videoTrack->RemoveRenderer(renderer.videoRenderer);
+- (void)addRenderer:(id<RTCVideoRenderer>)renderer {
+ // Make sure we don't have this renderer yet.
+ for (RTCVideoRendererAdapter* adapter in _adapters) {
+ NSParameterAssert(adapter.videoRenderer != renderer);
}
+ // Create a wrapper that provides a native pointer for us.
+ RTCVideoRendererAdapter* adapter =
+ [[RTCVideoRendererAdapter alloc] initWithVideoRenderer:renderer];
+ [_adapters addObject:adapter];
+ self.nativeVideoTrack->AddRenderer(adapter.nativeVideoRenderer);
}
-- (NSArray*)renderers {
- return [_rendererArray copy];
+- (void)removeRenderer:(id<RTCVideoRenderer>)renderer {
+ RTCVideoRendererAdapter* adapter = nil;
+ NSUInteger indexToRemove = NSNotFound;
+ for (NSUInteger i = 0; i < _adapters.count; i++) {
+ adapter = _adapters[i];
+ if (adapter.videoRenderer == renderer) {
+ indexToRemove = i;
+ break;
+ }
+ }
+ if (indexToRemove == NSNotFound) {
+ return;
+ }
+ self.nativeVideoTrack->RemoveRenderer(adapter.nativeVideoRenderer);
+ [_adapters removeObjectAtIndex:indexToRemove];
}
@end
@implementation RTCVideoTrack (Internal)
-- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)videoTrack {
+- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
return static_cast<webrtc::VideoTrackInterface*>(self.mediaTrack.get());
}
diff --git a/app/webrtc/objc/public/RTCEAGLVideoView.h b/app/webrtc/objc/public/RTCEAGLVideoView.h
index c38799e..526175f 100644
--- a/app/webrtc/objc/public/RTCEAGLVideoView.h
+++ b/app/webrtc/objc/public/RTCEAGLVideoView.h
@@ -37,11 +37,10 @@
@end
-@class RTCVideoTrack;
-// RTCEAGLVideoView renders |videoTrack| onto itself using OpenGLES.
-@interface RTCEAGLVideoView : UIView
+// RTCEAGLVideoView is an RTCVideoRenderer which renders i420 frames in its
+// bounds using OpenGLES 2.0.
+@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
-@property(nonatomic, strong) RTCVideoTrack* videoTrack;
@property(nonatomic, weak) id<RTCEAGLVideoViewDelegate> delegate;
@end
diff --git a/app/webrtc/objc/public/RTCI420Frame.h b/app/webrtc/objc/public/RTCI420Frame.h
index 737968c..7a8c4d4 100644
--- a/app/webrtc/objc/public/RTCI420Frame.h
+++ b/app/webrtc/objc/public/RTCI420Frame.h
@@ -43,6 +43,8 @@
@property(nonatomic, readonly) NSInteger uPitch;
@property(nonatomic, readonly) NSInteger vPitch;
+- (BOOL)makeExclusive;
+
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
- (id)init __attribute__((
diff --git a/app/webrtc/objc/public/RTCNSGLVideoView.h b/app/webrtc/objc/public/RTCNSGLVideoView.h
index fd757cb..0af2dc5 100644
--- a/app/webrtc/objc/public/RTCNSGLVideoView.h
+++ b/app/webrtc/objc/public/RTCNSGLVideoView.h
@@ -31,7 +31,7 @@
#import <AppKit/NSOpenGLView.h>
-#import "RTCVideoTrack.h"
+#import "RTCVideoRenderer.h"
@class RTCNSGLVideoView;
@protocol RTCNSGLVideoViewDelegate
@@ -40,9 +40,8 @@
@end
-@interface RTCNSGLVideoView : NSOpenGLView
+@interface RTCNSGLVideoView : NSOpenGLView <RTCVideoRenderer>
-@property(nonatomic, strong) RTCVideoTrack* videoTrack;
@property(nonatomic, weak) id<RTCNSGLVideoViewDelegate> delegate;
@end
diff --git a/app/webrtc/objc/public/RTCVideoRenderer.h b/app/webrtc/objc/public/RTCVideoRenderer.h
index 37977ce..3c2baba 100644
--- a/app/webrtc/objc/public/RTCVideoRenderer.h
+++ b/app/webrtc/objc/public/RTCVideoRenderer.h
@@ -31,34 +31,13 @@
#endif
@class RTCI420Frame;
-@class RTCVideoRenderer;
-// RTCVideoRendererDelegate is a protocol for an object that must be
-// implemented to get messages when rendering.
-@protocol RTCVideoRendererDelegate<NSObject>
+@protocol RTCVideoRenderer<NSObject>
// The size of the frame.
-- (void)renderer:(RTCVideoRenderer*)renderer didSetSize:(CGSize)size;
+- (void)setSize:(CGSize)size;
// The frame to be displayed.
-- (void)renderer:(RTCVideoRenderer*)renderer
- didReceiveFrame:(RTCI420Frame*)frame;
-
-@end
-
-// Interface for rendering VideoFrames from a VideoTrack
-@interface RTCVideoRenderer : NSObject
-
-@property(nonatomic, weak) id<RTCVideoRendererDelegate> delegate;
-
-// Initialize the renderer. Requires a delegate which does the actual drawing
-// of frames.
-- (instancetype)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate;
-
-#ifndef DOXYGEN_SHOULD_SKIP_THIS
-// Disallow init and don't add to documentation
-- (id)init __attribute__((
- unavailable("init is not a supported initializer for this class.")));
-#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+- (void)renderFrame:(RTCI420Frame*)frame;
@end
diff --git a/app/webrtc/objc/public/RTCVideoTrack.h b/app/webrtc/objc/public/RTCVideoTrack.h
index 291c923..8385b71 100644
--- a/app/webrtc/objc/public/RTCVideoTrack.h
+++ b/app/webrtc/objc/public/RTCVideoTrack.h
@@ -27,19 +27,16 @@
#import "RTCMediaStreamTrack.h"
-@class RTCVideoRenderer;
+@protocol RTCVideoRenderer;
// RTCVideoTrack is an ObjectiveC wrapper for VideoTrackInterface.
@interface RTCVideoTrack : RTCMediaStreamTrack
-// The currently registered renderers.
-@property(nonatomic, strong, readonly) NSArray *renderers;
-
// Register a renderer that will render all frames received on this track.
-- (void)addRenderer:(RTCVideoRenderer *)renderer;
+- (void)addRenderer:(id<RTCVideoRenderer>)renderer;
// Deregister a renderer.
-- (void)removeRenderer:(RTCVideoRenderer *)renderer;
+- (void)removeRenderer:(id<RTCVideoRenderer>)renderer;
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Disallow init and don't add to documentation
diff --git a/app/webrtc/objctests/RTCPeerConnectionTest.mm b/app/webrtc/objctests/RTCPeerConnectionTest.mm
index 6c5950b..92d3c49 100644
--- a/app/webrtc/objctests/RTCPeerConnectionTest.mm
+++ b/app/webrtc/objctests/RTCPeerConnectionTest.mm
@@ -46,6 +46,16 @@
#error "This file requires ARC support."
#endif
+@interface RTCFakeRenderer : NSObject <RTCVideoRenderer>
+@end
+
+@implementation RTCFakeRenderer
+
+- (void)setSize:(CGSize)size {}
+- (void)renderFrame:(RTCI420Frame*)frame {}
+
+@end
+
@interface RTCPeerConnectionTest : NSObject
// Returns whether the two sessions are of the same type.
@@ -80,8 +90,7 @@
RTCMediaStream* localMediaStream = [factory mediaStreamWithLabel:streamLabel];
RTCVideoTrack* videoTrack =
[factory videoTrackWithID:videoTrackID source:videoSource];
- RTCVideoRenderer* videoRenderer =
- [[RTCVideoRenderer alloc] initWithDelegate:nil];
+ RTCFakeRenderer* videoRenderer = [[RTCFakeRenderer alloc] init];
[videoTrack addRenderer:videoRenderer];
[localMediaStream addVideoTrack:videoTrack];
// Test that removal/re-add works.
diff --git a/examples/android/AndroidManifest.xml b/examples/android/AndroidManifest.xml
index 30fd46c..e24942a 100644
--- a/examples/android/AndroidManifest.xml
+++ b/examples/android/AndroidManifest.xml
@@ -7,7 +7,7 @@
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
- <uses-sdk android:minSdkVersion="13" android:targetSdkVersion="19" />
+ <uses-sdk android:minSdkVersion="13" android:targetSdkVersion="21" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
@@ -43,7 +43,7 @@
android:label="@string/app_name"
android:screenOrientation="fullUser"
android:configChanges="orientation|screenSize"
- android:theme="@android:style/Theme.Black.NoTitleBar.Fullscreen">
+ android:theme="@style/AppRTCDemoActivityTheme">
</activity>
</application>
</manifest>
diff --git a/examples/android/res/values-v21/styles.xml b/examples/android/res/values-v21/styles.xml
new file mode 100644
index 0000000..95f1ac6
--- /dev/null
+++ b/examples/android/res/values-v21/styles.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <style name="AppRTCDemoActivityTheme" parent="android:Theme.Material">
+ <item name="android:windowActionBar">false</item>
+ <item name="android:windowFullscreen">true</item>
+ <item name="android:windowNoTitle">true</item>
+ </style>
+</resources>
diff --git a/examples/android/res/values/styles.xml b/examples/android/res/values/styles.xml
new file mode 100644
index 0000000..7f809a6
--- /dev/null
+++ b/examples/android/res/values/styles.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <style name="AppRTCDemoActivityTheme" parent="android:Theme.Black">
+ <item name="android:windowActionBar">false</item>
+ <item name="android:windowFullscreen">true</item>
+ <item name="android:windowNoTitle">true</item>
+ </style>
+</resources>
diff --git a/examples/objc/AppRTCDemo/ios/APPRTCViewController.m b/examples/objc/AppRTCDemo/ios/APPRTCViewController.m
index 8042762..d8d9714 100644
--- a/examples/objc/AppRTCDemo/ios/APPRTCViewController.m
+++ b/examples/objc/AppRTCDemo/ios/APPRTCViewController.m
@@ -34,6 +34,7 @@
#import <AVFoundation/AVFoundation.h>
#import "APPRTCConnectionManager.h"
#import "RTCEAGLVideoView.h"
+#import "RTCVideoTrack.h"
// Padding space for local video view with its parent.
static CGFloat const kLocalViewPadding = 20;
@@ -47,6 +48,8 @@ static CGFloat const kLocalViewPadding = 20;
@implementation APPRTCViewController {
APPRTCConnectionManager* _connectionManager;
+ RTCVideoTrack* _localVideoTrack;
+ RTCVideoTrack* _remoteVideoTrack;
CGSize _localVideoSize;
CGSize _remoteVideoSize;
}
@@ -101,13 +104,15 @@ static CGFloat const kLocalViewPadding = 20;
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveLocalVideoTrack:(RTCVideoTrack*)localVideoTrack {
+ _localVideoTrack = localVideoTrack;
+ [_localVideoTrack addRenderer:self.localVideoView];
self.localVideoView.hidden = NO;
- self.localVideoView.videoTrack = localVideoTrack;
}
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveRemoteVideoTrack:(RTCVideoTrack*)remoteVideoTrack {
- self.remoteVideoView.videoTrack = remoteVideoTrack;
+ _remoteVideoTrack = remoteVideoTrack;
+ [_remoteVideoTrack addRenderer:self.remoteVideoView];
}
- (void)connectionManagerDidReceiveHangup:(APPRTCConnectionManager*)manager {
@@ -193,8 +198,16 @@ static CGFloat const kLocalViewPadding = 20;
self.instructionsView.hidden = NO;
self.logView.hidden = YES;
self.logView.text = nil;
- self.localVideoView.videoTrack = nil;
- self.remoteVideoView.videoTrack = nil;
+ if (_localVideoTrack) {
+ [_localVideoTrack removeRenderer:self.localVideoView];
+ _localVideoTrack = nil;
+ [self.localVideoView renderFrame:nil];
+ }
+ if (_remoteVideoTrack) {
+ [_remoteVideoTrack removeRenderer:self.remoteVideoView];
+ _remoteVideoTrack = nil;
+ [self.remoteVideoView renderFrame:nil];
+ }
self.blackView.hidden = YES;
}
diff --git a/examples/objc/AppRTCDemo/mac/APPRTCViewController.m b/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
index cf5b836..08acac9 100644
--- a/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
+++ b/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
@@ -30,6 +30,7 @@
#import <AVFoundation/AVFoundation.h>
#import "APPRTCConnectionManager.h"
#import "RTCNSGLVideoView.h"
+#import "RTCVideoTrack.h"
static NSUInteger const kContentWidth = 1280;
static NSUInteger const kContentHeight = 720;
@@ -227,6 +228,8 @@ static NSUInteger const kLogViewHeight = 280;
@implementation APPRTCViewController {
APPRTCConnectionManager* _connectionManager;
+ RTCVideoTrack* _localVideoTrack;
+ RTCVideoTrack* _remoteVideoTrack;
}
- (instancetype)initWithNibName:(NSString*)nibName
@@ -258,12 +261,13 @@ static NSUInteger const kLogViewHeight = 280;
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveLocalVideoTrack:(RTCVideoTrack*)localVideoTrack {
- self.mainView.localVideoView.videoTrack = localVideoTrack;
+ _localVideoTrack = localVideoTrack;
}
- (void)connectionManager:(APPRTCConnectionManager*)manager
didReceiveRemoteVideoTrack:(RTCVideoTrack*)remoteVideoTrack {
- self.mainView.remoteVideoView.videoTrack = remoteVideoTrack;
+ _remoteVideoTrack = remoteVideoTrack;
+ [_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
}
- (void)connectionManagerDidReceiveHangup:(APPRTCConnectionManager*)manager {
@@ -305,7 +309,9 @@ static NSUInteger const kLogViewHeight = 280;
}
- (void)disconnect {
- self.mainView.remoteVideoView.videoTrack = nil;
+ [_remoteVideoTrack removeRenderer:self.mainView.remoteVideoView];
+ _remoteVideoTrack = nil;
+ [self.mainView.remoteVideoView renderFrame:nil];
[_connectionManager disconnect];
}
diff --git a/libjingle.gyp b/libjingle.gyp
index 803eaa3..90d1f41 100755
--- a/libjingle.gyp
+++ b/libjingle.gyp
@@ -204,8 +204,8 @@
'app/webrtc/objc/RTCStatsReport.mm',
'app/webrtc/objc/RTCVideoCapturer+Internal.h',
'app/webrtc/objc/RTCVideoCapturer.mm',
- 'app/webrtc/objc/RTCVideoRenderer+Internal.h',
- 'app/webrtc/objc/RTCVideoRenderer.mm',
+ 'app/webrtc/objc/RTCVideoRendererAdapter.h',
+ 'app/webrtc/objc/RTCVideoRendererAdapter.mm',
'app/webrtc/objc/RTCVideoSource+Internal.h',
'app/webrtc/objc/RTCVideoSource.mm',
'app/webrtc/objc/RTCVideoTrack+Internal.h',
@@ -259,7 +259,6 @@
'conditions': [
['OS=="ios"', {
'sources': [
- 'app/webrtc/objc/RTCEAGLVideoView+Internal.h',
'app/webrtc/objc/RTCEAGLVideoView.m',
'app/webrtc/objc/public/RTCEAGLVideoView.h',
],
diff --git a/media/webrtc/webrtcvideoengine.cc b/media/webrtc/webrtcvideoengine.cc
index 04092f3..a0a8d81 100644
--- a/media/webrtc/webrtcvideoengine.cc
+++ b/media/webrtc/webrtcvideoengine.cc
@@ -1661,10 +1661,12 @@ bool WebRtcVideoMediaChannel::Init() {
}
WebRtcVideoMediaChannel::~WebRtcVideoMediaChannel() {
- const bool send = false;
- SetSend(send);
- const bool render = false;
- SetRender(render);
+ Terminate();
+}
+
+void WebRtcVideoMediaChannel::Terminate() {
+ SetSend(false);
+ SetRender(false);
while (!send_channels_.empty()) {
if (!DeleteSendChannel(send_channels_.begin()->first)) {
@@ -1987,14 +1989,21 @@ bool WebRtcVideoMediaChannel::AddSendStream(const StreamParams& sp) {
SetReceiverReportSsrc(sp.first_ssrc());
}
- send_channel->set_stream_params(sp);
-
- // Reset send codec after stream parameters changed.
if (send_codec_) {
- if (!SetSendCodec(send_channel, *send_codec_)) {
+ send_channel->SetAdaptFormat(
+ VideoFormatFromVieCodec(*send_codec_),
+ WebRtcVideoChannelSendInfo::kAdaptFormatTypeCodec);
+
+ VideoSendParams send_params;
+ send_params.codec = *send_codec_;
+ send_params.stream = sp;
+ if (!SetSendParams(send_channel, send_params)) {
return false;
}
- LogSendCodecChange("SetSendStreamFormat()");
+ LogSendCodecChange("AddStream()");
+ } else {
+ // Save the stream params for later, when we have a codec.
+ send_channel->set_stream_params(sp);
}
if (sending_) {
@@ -2990,40 +2999,41 @@ bool WebRtcVideoMediaChannel::SetOptions(const VideoOptions &options) {
VideoOptions original = options_;
options_.SetAll(options);
- // Set CPU options for all send channels.
+ // Set CPU options and codec options for all send channels.
for (SendChannelMap::iterator iter = send_channels_.begin();
iter != send_channels_.end(); ++iter) {
WebRtcVideoChannelSendInfo* send_channel = iter->second;
send_channel->ApplyCpuOptions(options_);
- }
-
- if (send_codec_) {
- webrtc::VideoCodec new_codec = *send_codec_;
- bool conference_mode_turned_off = (
- original.conference_mode.IsSet() &&
- options.conference_mode.IsSet() &&
- original.conference_mode.GetWithDefaultIfUnset(false) &&
- !options.conference_mode.GetWithDefaultIfUnset(false));
- if (conference_mode_turned_off) {
- // This is a special case for turning conference mode off.
- // Max bitrate should go back to the default maximum value instead
- // of the current maximum.
- new_codec.maxBitrate = kAutoBandwidth;
- }
+ if (send_codec_) {
+ VideoSendParams send_params = send_channel->send_params();
+
+ bool conference_mode_turned_off = (
+ original.conference_mode.IsSet() &&
+ options.conference_mode.IsSet() &&
+ original.conference_mode.GetWithDefaultIfUnset(false) &&
+ !options.conference_mode.GetWithDefaultIfUnset(false));
+ if (conference_mode_turned_off) {
+ // This is a special case for turning conference mode off.
+ // Max bitrate should go back to the default maximum value instead
+ // of the current maximum.
+ send_params.codec.maxBitrate = kAutoBandwidth;
+ }
- // TODO(pthatcher): Remove this. We don't need 4 ways to set bitrates.
- int new_start_bitrate;
- if (options.video_start_bitrate.Get(&new_start_bitrate)) {
- new_codec.startBitrate = new_start_bitrate;
- }
+ // TODO(pthatcher): Remove this. We don't need 4 ways to set bitrates.
+ int new_start_bitrate;
+ if (options.video_start_bitrate.Get(&new_start_bitrate)) {
+ send_params.codec.startBitrate = new_start_bitrate;
+ }
- if (!SetSendCodec(new_codec)) {
- return false;
+ if (!SetSendParams(send_channel, send_params)) {
+ return false;
+ }
+ LogSendCodecChange("SetOptions()");
}
- LogSendCodecChange("SetOptions()");
}
+
int buffer_latency;
if (Changed(options.buffered_mode_latency,
original.buffered_mode_latency,
@@ -3676,26 +3686,9 @@ bool WebRtcVideoMediaChannel::SetSendCodec(
VideoFormatFromVieCodec(codec),
WebRtcVideoChannelSendInfo::kAdaptFormatTypeCodec);
- MaybeRegisterExternalEncoder(send_channel, codec);
-
VideoSendParams send_params = send_channel->send_params();
send_params.codec = codec;
- if (!SetSendParams(send_channel, send_params)) {
- return false;
- }
-
- // NOTE: SetRtxSendPayloadType must be called after all simulcast SSRCs
- // are configured. Otherwise ssrc's configured after this point will use
- // the primary PT for RTX.
- const int channel_id = send_channel->channel_id();
- if (send_rtx_type_ != -1 &&
- engine()->vie()->rtp()->SetRtxSendPayloadType(channel_id,
- send_rtx_type_) != 0) {
- LOG_RTCERR2(SetRtxSendPayloadType, channel_id, send_rtx_type_);
- return false;
- }
-
- return true;
+ return SetSendParams(send_channel, send_params);
}
static std::string ToString(webrtc::VideoCodecComplexity complexity) {
@@ -3872,6 +3865,8 @@ bool WebRtcVideoMediaChannel::SetSendParams(
const VideoSendParams& send_params) {
const int channel_id = send_channel->channel_id();
+ MaybeRegisterExternalEncoder(send_channel, send_params.codec);
+
CapturedFrameInfo frame;
send_channel->last_captured_frame_info().Get(&frame);
@@ -3923,10 +3918,18 @@ bool WebRtcVideoMediaChannel::SetSendParams(
}
engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id, true);
- if (send_channel->IsActive()) {
- if (!SetSendSsrcs(channel_id, send_params.stream, codec)) {
- return false;
- }
+ if (!SetSendSsrcs(channel_id, send_params.stream, codec)) {
+ return false;
+ }
+
+ // NOTE: SetRtxSendPayloadType must be called after all SSRCs are
+ // configured. Otherwise ssrc's configured after this point will use
+ // the primary PT for RTX.
+ if (send_rtx_type_ != -1 &&
+ engine()->vie()->rtp()->SetRtxSendPayloadType(channel_id,
+ send_rtx_type_) != 0) {
+ LOG_RTCERR2(SetRtxSendPayloadType, channel_id, send_rtx_type_);
+ return false;
}
send_channel->set_send_params(send_params);
diff --git a/media/webrtc/webrtcvideoengine.h b/media/webrtc/webrtcvideoengine.h
index cc81ee9..eee82c8 100644
--- a/media/webrtc/webrtcvideoengine.h
+++ b/media/webrtc/webrtcvideoengine.h
@@ -177,6 +177,10 @@ class WebRtcVideoEngine : public sigslot::has_slots<>,
rtc::CpuMonitor* cpu_monitor() { return cpu_monitor_.get(); }
protected:
+ bool initialized() const {
+ return initialized_;
+ }
+
// When a video processor registers with the engine.
// SignalMediaFrame will be invoked for every video frame.
// See videoprocessor.h for param reference.
@@ -326,6 +330,7 @@ class WebRtcVideoMediaChannel : public rtc::MessageHandler,
virtual void OnMessage(rtc::Message* msg) OVERRIDE;
protected:
+ void Terminate();
int GetLastEngineError() { return engine()->GetLastEngineError(); }
// webrtc::Transport:
diff --git a/media/webrtc/webrtcvideoengine2.cc b/media/webrtc/webrtcvideoengine2.cc
index d79f71d..5062fb9 100644
--- a/media/webrtc/webrtcvideoengine2.cc
+++ b/media/webrtc/webrtcvideoengine2.cc
@@ -1202,7 +1202,21 @@ void WebRtcVideoChannel2::FillReceiverStats(VideoMediaInfo* video_media_info) {
void WebRtcVideoChannel2::FillBandwidthEstimationStats(
VideoMediaInfo* video_media_info) {
- // TODO(pbos): Implement.
+ BandwidthEstimationInfo bwe_info;
+ webrtc::Call::Stats stats = call_->GetStats();
+ bwe_info.available_send_bandwidth = stats.send_bandwidth_bps;
+ bwe_info.available_recv_bandwidth = stats.recv_bandwidth_bps;
+ bwe_info.bucket_delay = stats.pacer_delay_ms;
+
+ // Get send stream bitrate stats.
+ rtc::CritScope stream_lock(&stream_crit_);
+ for (std::map<uint32, WebRtcVideoSendStream*>::iterator stream =
+ send_streams_.begin();
+ stream != send_streams_.end();
+ ++stream) {
+ stream->second->FillBandwidthEstimationInfo(&bwe_info);
+ }
+ video_media_info->bw_estimations.push_back(bwe_info);
}
bool WebRtcVideoChannel2::SetCapturer(uint32 ssrc, VideoCapturer* capturer) {
@@ -1842,12 +1856,12 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
info.framerate_input = stats.input_frame_rate;
info.framerate_sent = stats.encode_frame_rate;
- for (std::map<uint32_t, webrtc::StreamStats>::iterator it =
+ for (std::map<uint32_t, webrtc::SsrcStats>::iterator it =
stats.substreams.begin();
it != stats.substreams.end();
++it) {
// TODO(pbos): Wire up additional stats, such as padding bytes.
- webrtc::StreamStats stream_stats = it->second;
+ webrtc::SsrcStats stream_stats = it->second;
info.bytes_sent += stream_stats.rtp_stats.bytes +
stream_stats.rtp_stats.header_bytes +
stream_stats.rtp_stats.padding_bytes;
@@ -1857,7 +1871,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
if (!stats.substreams.empty()) {
// TODO(pbos): Report fraction lost per SSRC.
- webrtc::StreamStats first_stream_stats = stats.substreams.begin()->second;
+ webrtc::SsrcStats first_stream_stats = stats.substreams.begin()->second;
info.fraction_lost =
static_cast<float>(first_stream_stats.rtcp_stats.fraction_lost) /
(1 << 8);
@@ -1884,6 +1898,23 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
return info;
}
+void WebRtcVideoChannel2::WebRtcVideoSendStream::FillBandwidthEstimationInfo(
+ BandwidthEstimationInfo* bwe_info) {
+ rtc::CritScope cs(&lock_);
+ if (stream_ == NULL) {
+ return;
+ }
+ webrtc::VideoSendStream::Stats stats = stream_->GetStats();
+ for (std::map<uint32_t, webrtc::SsrcStats>::iterator it =
+ stats.substreams.begin();
+ it != stats.substreams.end();
+ ++it) {
+ bwe_info->transmit_bitrate += it->second.total_bitrate_bps;
+ bwe_info->retransmit_bitrate += it->second.retransmit_bitrate_bps;
+ }
+ bwe_info->actual_enc_bitrate = stats.media_bitrate_bps;
+}
+
void WebRtcVideoChannel2::WebRtcVideoSendStream::OnCpuResolutionRequest(
CoordinatedVideoAdapter::AdaptRequest adapt_request) {
rtc::CritScope cs(&lock_);
diff --git a/media/webrtc/webrtcvideoengine2.h b/media/webrtc/webrtcvideoengine2.h
index 0b812ef..299ac35 100644
--- a/media/webrtc/webrtcvideoengine2.h
+++ b/media/webrtc/webrtcvideoengine2.h
@@ -315,6 +315,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
void Stop();
VideoSenderInfo GetVideoSenderInfo();
+ void FillBandwidthEstimationInfo(BandwidthEstimationInfo* bwe_info);
void OnCpuResolutionRequest(
CoordinatedVideoAdapter::AdaptRequest adapt_request);
diff --git a/media/webrtc/webrtcvideoengine2_unittest.cc b/media/webrtc/webrtcvideoengine2_unittest.cc
index 0b85723..afea370 100644
--- a/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -294,12 +294,9 @@ webrtc::PacketReceiver* FakeCall::Receiver() {
return NULL;
}
-uint32_t FakeCall::SendBitrateEstimate() {
- return 0;
-}
-
-uint32_t FakeCall::ReceiveBitrateEstimate() {
- return 0;
+webrtc::Call::Stats FakeCall::GetStats() const {
+ webrtc::Call::Stats stats;
+ return stats;
}
void FakeCall::SignalNetworkState(webrtc::Call::NetworkState state) {
diff --git a/media/webrtc/webrtcvideoengine2_unittest.h b/media/webrtc/webrtcvideoengine2_unittest.h
index 3b62289..48c4f64 100644
--- a/media/webrtc/webrtcvideoengine2_unittest.h
+++ b/media/webrtc/webrtcvideoengine2_unittest.h
@@ -127,8 +127,7 @@ class FakeCall : public webrtc::Call {
webrtc::VideoReceiveStream* receive_stream) OVERRIDE;
virtual webrtc::PacketReceiver* Receiver() OVERRIDE;
- virtual uint32_t SendBitrateEstimate() OVERRIDE;
- virtual uint32_t ReceiveBitrateEstimate() OVERRIDE;
+ virtual webrtc::Call::Stats GetStats() const OVERRIDE;
virtual void SignalNetworkState(webrtc::Call::NetworkState state) OVERRIDE;