summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Chromium Automerger <chromium-automerger@android>2014-05-29 06:57:40 +0000
committerAndroid Chromium Automerger <chromium-automerger@android>2014-05-29 06:57:40 +0000
commitbf2bd58de66f8063ea4158d8f0fa4395f6b170a1 (patch)
treed2028e3cb7a099b511aabab3022004cd03989566
parent52dfe97e8b722572766b284b5dc019f6c9b4e874 (diff)
parentff6b4a8eddca609ad2691b54f443b6f1e9342579 (diff)
downloadwebrtc-bf2bd58de66f8063ea4158d8f0fa4395f6b170a1.tar.gz
Merge third_party/webrtc from https://chromium.googlesource.com/external/webrtc/trunk/webrtc.git at ff6b4a8eddca609ad2691b54f443b6f1e9342579
This commit was generated by merge_from_chromium.py. Change-Id: I3dceea02410ec9709a6dc1ffab5962cba1821b63
-rw-r--r--common_audio/signal_processing/complex_fft.c10
-rw-r--r--common_audio/signal_processing/real_fft_unittest.cc2
-rw-r--r--common_video/i420_video_frame.cc11
-rw-r--r--common_video/i420_video_frame_unittest.cc68
-rw-r--r--common_video/interface/i420_video_frame.h10
-rw-r--r--common_video/interface/texture_video_frame.h1
-rw-r--r--common_video/texture_video_frame.cc5
-rw-r--r--common_video/texture_video_frame_unittest.cc23
-rw-r--r--modules/audio_coding/neteq4/packet_buffer.cc8
-rw-r--r--modules/desktop_capture/mouse_cursor_monitor_unittest.cc5
-rw-r--r--modules/utility/interface/mock/mock_process_thread.h29
-rw-r--r--modules/utility/source/video_frames_queue.cc7
-rw-r--r--modules/video_capture/include/mock/mock_video_capture.h50
-rw-r--r--modules/video_capture/video_capture.gypi1
-rw-r--r--modules/video_render/video_render_frames.cc7
-rw-r--r--system_wrappers/interface/clock.h9
-rw-r--r--system_wrappers/source/clock.cc17
-rw-r--r--video_engine/encoder_state_feedback_unittest.cc21
-rw-r--r--video_engine/include/vie_base.h32
-rw-r--r--video_engine/mock/mock_vie_frame_provider_base.h33
-rw-r--r--video_engine/overuse_frame_detector.cc28
-rw-r--r--video_engine/overuse_frame_detector.h36
-rw-r--r--video_engine/overuse_frame_detector_unittest.cc67
-rw-r--r--video_engine/video_engine_core.gypi1
-rw-r--r--video_engine/vie_base_impl.cc33
-rw-r--r--video_engine/vie_base_impl.h2
-rw-r--r--video_engine/vie_capturer.cc50
-rw-r--r--video_engine/vie_capturer.h10
-rw-r--r--video_engine/vie_capturer_unittest.cc263
-rw-r--r--video_engine/vie_encoder.cc4
-rw-r--r--video_engine/vie_remb_unittest.cc17
-rw-r--r--webrtc_tests.gypi9
32 files changed, 686 insertions, 183 deletions
diff --git a/common_audio/signal_processing/complex_fft.c b/common_audio/signal_processing/complex_fft.c
index 2e8eb323..a026b70b 100644
--- a/common_audio/signal_processing/complex_fft.c
+++ b/common_audio/signal_processing/complex_fft.c
@@ -115,18 +115,18 @@ int WebRtcSpl_ComplexFFT(int16_t frfi[], int stages, int mode)
j = i + l;
#ifdef WEBRTC_ARCH_ARM_V7
- __asm __volatile(
+ __asm __volatile(
"pkhbt %[frfi_r], %[frfi_even], %[frfi_odd], lsl #16\n\t"
"smlsd %[tr32], %[wri], %[frfi_r], %[cfftrnd]\n\t"
- "smladx %[ti32], %[wri], %[frfi_r], %[cfftrnd]\n\t"
:[frfi_r]"+r"(frfi_r),
- [tr32]"=r"(tr32),
- [ti32]"=r"(ti32)
+ [tr32]"=r"(tr32)
:[frfi_even]"r"((int32_t)frfi[2*j]),
[frfi_odd]"r"((int32_t)frfi[2*j +1]),
[wri]"r"(wri),
[cfftrnd]"r"(CFFTRND)
- );
+ );
+ __asm __volatile("smladx %0, %1, %2, %3\n\t" : "=r"(ti32) :
+ "r"(wri), "r"(frfi_r), "r"(CFFTRND));
#else
tr32 = WEBRTC_SPL_MUL_16_16(wr, frfi[2 * j])
diff --git a/common_audio/signal_processing/real_fft_unittest.cc b/common_audio/signal_processing/real_fft_unittest.cc
index 75472e32..9bd35cd6 100644
--- a/common_audio/signal_processing/real_fft_unittest.cc
+++ b/common_audio/signal_processing/real_fft_unittest.cc
@@ -49,7 +49,7 @@ TEST_F(RealFFTTest, CreateFailsOnBadInput) {
EXPECT_TRUE(fft == NULL);
}
-TEST_F(RealFFTTest, DISABLED_ON_ANDROID(RealAndComplexMatch)) {
+TEST_F(RealFFTTest, RealAndComplexMatch) {
int i = 0;
int j = 0;
int16_t real_fft_time[kTimeDataLength] = {0};
diff --git a/common_video/i420_video_frame.cc b/common_video/i420_video_frame.cc
index 5b9543b6..fdc2bbc2 100644
--- a/common_video/i420_video_frame.cc
+++ b/common_video/i420_video_frame.cc
@@ -10,6 +10,8 @@
#include "webrtc/common_video/interface/i420_video_frame.h"
+#include <string.h>
+
#include <algorithm> // swap
namespace webrtc {
@@ -78,6 +80,15 @@ int I420VideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
return 0;
}
+I420VideoFrame* I420VideoFrame::CloneFrame() const {
+ scoped_ptr<I420VideoFrame> new_frame(new I420VideoFrame());
+ if (new_frame->CopyFrame(*this) == -1) {
+ // CopyFrame failed.
+ return NULL;
+ }
+ return new_frame.release();
+}
+
void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
y_plane_.Swap(videoFrame->y_plane_);
u_plane_.Swap(videoFrame->u_plane_);
diff --git a/common_video/i420_video_frame_unittest.cc b/common_video/i420_video_frame_unittest.cc
index 29578c73..ca01fd0c 100644
--- a/common_video/i420_video_frame_unittest.cc
+++ b/common_video/i420_video_frame_unittest.cc
@@ -19,8 +19,8 @@
namespace webrtc {
-bool EqualFrames(const I420VideoFrame& videoFrame1,
- const I420VideoFrame& videoFrame2);
+bool EqualFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
bool EqualFramesExceptSize(const I420VideoFrame& frame1,
const I420VideoFrame& frame2);
int ExpectedSize(int plane_stride, int image_height, PlaneType type);
@@ -122,6 +122,29 @@ TEST(TestI420VideoFrame, CopyFrame) {
EXPECT_TRUE(EqualFrames(frame1, frame2));
}
+TEST(TestI420VideoFrame, CloneFrame) {
+ I420VideoFrame frame1;
+ scoped_ptr<I420VideoFrame> frame2;
+ const int kSizeY = 225;
+ const int kSizeU = 80;
+ const int kSizeV = 80;
+ uint8_t buffer_y[kSizeY];
+ uint8_t buffer_u[kSizeU];
+ uint8_t buffer_v[kSizeV];
+ memset(buffer_y, 16, kSizeY);
+ memset(buffer_u, 8, kSizeU);
+ memset(buffer_v, 4, kSizeV);
+ frame1.CreateFrame(
+ kSizeY, buffer_y, kSizeU, buffer_u, kSizeV, buffer_v, 20, 20, 20, 10, 10);
+ frame1.set_timestamp(1);
+ frame1.set_ntp_time_ms(2);
+ frame1.set_render_time_ms(3);
+
+ frame2.reset(frame1.CloneFrame());
+ EXPECT_TRUE(frame2.get() != NULL);
+ EXPECT_TRUE(EqualFrames(frame1, *frame2));
+}
+
TEST(TestI420VideoFrame, CopyBuffer) {
I420VideoFrame frame1, frame2;
int width = 15;
@@ -234,29 +257,24 @@ TEST(TestI420VideoFrame, RefCountedInstantiation) {
bool EqualFrames(const I420VideoFrame& frame1,
const I420VideoFrame& frame2) {
- if (!EqualFramesExceptSize(frame1, frame2))
- return false;
- // Compare allocated memory size.
- bool ret = true;
- ret |= (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane));
- ret |= (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane));
- ret |= (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane));
- return ret;
+ return (EqualFramesExceptSize(frame1, frame2) &&
+ (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
+ (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
+ (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)));
}
bool EqualFramesExceptSize(const I420VideoFrame& frame1,
const I420VideoFrame& frame2) {
- bool ret = true;
- ret |= (frame1.width() == frame2.width());
- ret |= (frame1.height() == frame2.height());
- ret |= (frame1.stride(kYPlane) == frame2.stride(kYPlane));
- ret |= (frame1.stride(kUPlane) == frame2.stride(kUPlane));
- ret |= (frame1.stride(kVPlane) == frame2.stride(kVPlane));
- ret |= (frame1.timestamp() == frame2.timestamp());
- ret |= (frame1.ntp_time_ms() == frame2.ntp_time_ms());
- ret |= (frame1.render_time_ms() == frame2.render_time_ms());
- if (!ret)
+ if ((frame1.width() != frame2.width()) ||
+ (frame1.height() != frame2.height()) ||
+ (frame1.stride(kYPlane) != frame2.stride(kYPlane)) ||
+ (frame1.stride(kUPlane) != frame2.stride(kUPlane)) ||
+ (frame1.stride(kVPlane) != frame2.stride(kVPlane)) ||
+ (frame1.timestamp() != frame2.timestamp()) ||
+ (frame1.ntp_time_ms() != frame2.ntp_time_ms()) ||
+ (frame1.render_time_ms() != frame2.render_time_ms())) {
return false;
+ }
// Memory should be the equal for the minimum of the two sizes.
int size_y = std::min(frame1.allocated_size(kYPlane),
frame2.allocated_size(kYPlane));
@@ -264,13 +282,9 @@ bool EqualFramesExceptSize(const I420VideoFrame& frame1,
frame2.allocated_size(kUPlane));
int size_v = std::min(frame1.allocated_size(kVPlane),
frame2.allocated_size(kVPlane));
- int ret_val = 0;
- ret_val += memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), size_y);
- ret_val += memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), size_u);
- ret_val += memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), size_v);
- if (ret_val == 0)
- return true;
- return false;
+ return (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), size_y) == 0 &&
+ memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), size_u) == 0 &&
+ memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), size_v) == 0);
}
int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
diff --git a/common_video/interface/i420_video_frame.h b/common_video/interface/i420_video_frame.h
index 3f90a8e2..5f7a572b 100644
--- a/common_video/interface/i420_video_frame.h
+++ b/common_video/interface/i420_video_frame.h
@@ -51,13 +51,13 @@ class I420VideoFrame {
// on set dimensions - height and plane stride.
// If required size is bigger than the allocated one, new buffers of adequate
// size will be allocated.
- // Return value: 0 on success ,-1 on error.
+ // Return value: 0 on success, -1 on error.
virtual int CreateEmptyFrame(int width, int height,
int stride_y, int stride_u, int stride_v);
// CreateFrame: Sets the frame's members and buffers. If required size is
// bigger than allocated one, new buffers of adequate size will be allocated.
- // Return value: 0 on success ,-1 on error.
+ // Return value: 0 on success, -1 on error.
virtual int CreateFrame(int size_y, const uint8_t* buffer_y,
int size_u, const uint8_t* buffer_u,
int size_v, const uint8_t* buffer_v,
@@ -66,9 +66,13 @@ class I420VideoFrame {
// Copy frame: If required size is bigger than allocated one, new buffers of
// adequate size will be allocated.
- // Return value: 0 on success ,-1 on error.
+ // Return value: 0 on success, -1 on error.
virtual int CopyFrame(const I420VideoFrame& videoFrame);
+ // Make a copy of |this|. The caller owns the returned frame.
+ // Return value: a new frame on success, NULL on error.
+ virtual I420VideoFrame* CloneFrame() const;
+
// Swap Frame.
virtual void SwapFrame(I420VideoFrame* videoFrame);
diff --git a/common_video/interface/texture_video_frame.h b/common_video/interface/texture_video_frame.h
index e905ea73..2c625ab5 100644
--- a/common_video/interface/texture_video_frame.h
+++ b/common_video/interface/texture_video_frame.h
@@ -49,6 +49,7 @@ class TextureVideoFrame : public I420VideoFrame {
int stride_u,
int stride_v) OVERRIDE;
virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE;
+ virtual I420VideoFrame* CloneFrame() const OVERRIDE;
virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE;
virtual uint8_t* buffer(PlaneType type) OVERRIDE;
virtual const uint8_t* buffer(PlaneType type) const OVERRIDE;
diff --git a/common_video/texture_video_frame.cc b/common_video/texture_video_frame.cc
index 2dd6cadb..f301d19c 100644
--- a/common_video/texture_video_frame.cc
+++ b/common_video/texture_video_frame.cc
@@ -57,6 +57,11 @@ int TextureVideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
return -1;
}
+I420VideoFrame* TextureVideoFrame::CloneFrame() const {
+ return new TextureVideoFrame(
+ handle_, width(), height(), timestamp(), render_time_ms());
+}
+
void TextureVideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
assert(false); // Should not be called.
}
diff --git a/common_video/texture_video_frame_unittest.cc b/common_video/texture_video_frame_unittest.cc
index 04e09a67..408f5f61 100644
--- a/common_video/texture_video_frame_unittest.cc
+++ b/common_video/texture_video_frame_unittest.cc
@@ -8,9 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/common_video/interface/texture_video_frame.h"
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_video/interface/native_handle.h"
-#include "webrtc/common_video/interface/texture_video_frame.h"
namespace webrtc {
@@ -27,6 +28,9 @@ class NativeHandleImpl : public NativeHandle {
int32_t ref_count_;
};
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+
TEST(TestTextureVideoFrame, InitialValues) {
NativeHandleImpl handle;
TextureVideoFrame frame(&handle, 640, 480, 100, 10);
@@ -55,4 +59,21 @@ TEST(TestTextureVideoFrame, RefCount) {
EXPECT_EQ(0, handle.ref_count());
}
+TEST(TestTextureVideoFrame, CloneFrame) {
+ NativeHandleImpl handle;
+ TextureVideoFrame frame1(&handle, 640, 480, 100, 200);
+ scoped_ptr<I420VideoFrame> frame2(frame1.CloneFrame());
+ EXPECT_TRUE(frame2.get() != NULL);
+ EXPECT_TRUE(EqualTextureFrames(frame1, *frame2));
+}
+
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ return ((frame1.native_handle() == frame2.native_handle()) &&
+ (frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height()) &&
+ (frame1.timestamp() == frame2.timestamp()) &&
+ (frame1.render_time_ms() == frame2.render_time_ms()));
+}
+
} // namespace webrtc
diff --git a/modules/audio_coding/neteq4/packet_buffer.cc b/modules/audio_coding/neteq4/packet_buffer.cc
index bcd0e7b4..da3bdf14 100644
--- a/modules/audio_coding/neteq4/packet_buffer.cc
+++ b/modules/audio_coding/neteq4/packet_buffer.cc
@@ -217,11 +217,11 @@ int PacketBuffer::NumSamplesInBuffer(DecoderDatabase* decoder_database,
int duration;
if (packet->sync_packet) {
duration = last_duration;
+ } else if (packet->primary) {
+ duration =
+ decoder->PacketDuration(packet->payload, packet->payload_length);
} else {
- duration = packet->primary ?
- decoder->PacketDuration(packet->payload, packet->payload_length) :
- decoder->PacketDurationRedundant(packet->payload,
- packet->payload_length);
+ continue;
}
if (duration >= 0) {
last_duration = duration; // Save the most up-to-date (valid) duration.
diff --git a/modules/desktop_capture/mouse_cursor_monitor_unittest.cc b/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
index 7849005f..5e24d45d 100644
--- a/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
+++ b/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
@@ -50,7 +50,10 @@ class MouseCursorMonitorTest : public testing::Test,
// tests. Figure out how to do that without breaking other tests in
// modules_unittests and enable these tests on Mac.
// https://code.google.com/p/webrtc/issues/detail?id=2532
-#if !defined(WEBRTC_MAC)
+//
+// Disabled on Windows due to flake, see:
+// https://code.google.com/p/webrtc/issues/detail?id=3408
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN)
#define MAYBE(x) x
#else
#define MAYBE(x) DISABLED_##x
diff --git a/modules/utility/interface/mock/mock_process_thread.h b/modules/utility/interface/mock/mock_process_thread.h
new file mode 100644
index 00000000..fc0c1fb1
--- /dev/null
+++ b/modules/utility/interface/mock/mock_process_thread.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
+
+#include "webrtc/modules/utility/interface/process_thread.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockProcessThread : public ProcessThread {
+ public:
+ MOCK_METHOD0(Start, int32_t());
+ MOCK_METHOD0(Stop, int32_t());
+ MOCK_METHOD1(RegisterModule, int32_t(Module* module));
+ MOCK_METHOD1(DeRegisterModule, int32_t(const Module* module));
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
diff --git a/modules/utility/source/video_frames_queue.cc b/modules/utility/source/video_frames_queue.cc
index 63afbe9b..9ade8b51 100644
--- a/modules/utility/source/video_frames_queue.cc
+++ b/modules/utility/source/video_frames_queue.cc
@@ -38,12 +38,7 @@ VideoFramesQueue::~VideoFramesQueue() {
int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
if (newFrame.native_handle() != NULL) {
- _incomingFrames.push_back(new TextureVideoFrame(
- static_cast<NativeHandle*>(newFrame.native_handle()),
- newFrame.width(),
- newFrame.height(),
- newFrame.timestamp(),
- newFrame.render_time_ms()));
+ _incomingFrames.push_back(newFrame.CloneFrame());
return 0;
}
diff --git a/modules/video_capture/include/mock/mock_video_capture.h b/modules/video_capture/include/mock/mock_video_capture.h
new file mode 100644
index 00000000..8ad74a23
--- /dev/null
+++ b/modules/video_capture/include/mock/mock_video_capture.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
+
+#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockVideoCaptureModule : public VideoCaptureModule {
+ public:
+ // from Module
+ MOCK_METHOD0(TimeUntilNextProcess, int32_t());
+ MOCK_METHOD0(Process, int32_t());
+
+ // from RefCountedModule
+ MOCK_METHOD0(AddRef, int32_t());
+ MOCK_METHOD0(Release, int32_t());
+
+ // from VideoCaptureModule
+ MOCK_METHOD1(RegisterCaptureDataCallback,
+ void(VideoCaptureDataCallback& dataCallback));
+ MOCK_METHOD0(DeRegisterCaptureDataCallback, void());
+ MOCK_METHOD1(RegisterCaptureCallback, void(VideoCaptureFeedBack& callBack));
+ MOCK_METHOD0(DeRegisterCaptureCallback, void());
+ MOCK_METHOD1(StartCapture, int32_t(const VideoCaptureCapability& capability));
+ MOCK_METHOD0(StopCapture, int32_t());
+ MOCK_CONST_METHOD0(CurrentDeviceName, const char*());
+ MOCK_METHOD0(CaptureStarted, bool());
+ MOCK_METHOD1(CaptureSettings, int32_t(VideoCaptureCapability& settings));
+ MOCK_METHOD1(SetCaptureDelay, void(int32_t delayMS));
+ MOCK_METHOD0(CaptureDelay, int32_t());
+ MOCK_METHOD1(SetCaptureRotation, int32_t(VideoCaptureRotation rotation));
+ MOCK_METHOD1(GetEncodeInterface,
+ VideoCaptureEncodeInterface*(const VideoCodec& codec));
+ MOCK_METHOD1(EnableFrameRateCallback, void(const bool enable));
+ MOCK_METHOD1(EnableNoPictureAlarm, void(const bool enable));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
diff --git a/modules/video_capture/video_capture.gypi b/modules/video_capture/video_capture.gypi
index 6df062a8..b13b27b3 100644
--- a/modules/video_capture/video_capture.gypi
+++ b/modules/video_capture/video_capture.gypi
@@ -60,6 +60,7 @@
'link_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
+ '-framework CoreVideo',
'-framework QTKit',
],
},
diff --git a/modules/video_render/video_render_frames.cc b/modules/video_render/video_render_frames.cc
index 7025d62a..d790877e 100644
--- a/modules/video_render/video_render_frames.cc
+++ b/modules/video_render/video_render_frames.cc
@@ -55,12 +55,7 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
}
if (new_frame->native_handle() != NULL) {
- incoming_frames_.push_back(new TextureVideoFrame(
- static_cast<NativeHandle*>(new_frame->native_handle()),
- new_frame->width(),
- new_frame->height(),
- new_frame->timestamp(),
- new_frame->render_time_ms()));
+ incoming_frames_.push_back(new_frame->CloneFrame());
return static_cast<int32_t>(incoming_frames_.size());
}
diff --git a/system_wrappers/interface/clock.h b/system_wrappers/interface/clock.h
index ce326913..38ce0d58 100644
--- a/system_wrappers/interface/clock.h
+++ b/system_wrappers/interface/clock.h
@@ -11,10 +11,14 @@
#ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CLOCK_H_
#define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_CLOCK_H_
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/typedefs.h"
namespace webrtc {
+class RWLockWrapper;
+
// January 1970, in NTP seconds.
const uint32_t kNtpJan1970 = 2208988800UL;
@@ -51,7 +55,7 @@ class SimulatedClock : public Clock {
public:
explicit SimulatedClock(int64_t initial_time_us);
- virtual ~SimulatedClock() {}
+ virtual ~SimulatedClock();
// Return a timestamp in milliseconds relative to some arbitrary source; the
// source is fixed for this clock.
@@ -73,7 +77,8 @@ class SimulatedClock : public Clock {
void AdvanceTimeMicroseconds(int64_t microseconds);
private:
- int64_t time_us_;
+ int64_t time_us_ GUARDED_BY(lock_);
+ scoped_ptr<RWLockWrapper> lock_;
};
}; // namespace webrtc
diff --git a/system_wrappers/source/clock.cc b/system_wrappers/source/clock.cc
index b101e058..b341a5f0 100644
--- a/system_wrappers/source/clock.cc
+++ b/system_wrappers/source/clock.cc
@@ -20,6 +20,7 @@
#include <time.h>
#endif
+#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
namespace webrtc {
@@ -143,20 +144,27 @@ Clock* Clock::GetRealTimeClock() {
}
SimulatedClock::SimulatedClock(int64_t initial_time_us)
- : time_us_(initial_time_us) {}
+ : time_us_(initial_time_us), lock_(RWLockWrapper::CreateRWLock()) {
+}
+
+SimulatedClock::~SimulatedClock() {
+}
int64_t SimulatedClock::TimeInMilliseconds() {
+ ReadLockScoped synchronize(*lock_);
return (time_us_ + 500) / 1000;
}
int64_t SimulatedClock::TimeInMicroseconds() {
+ ReadLockScoped synchronize(*lock_);
return time_us_;
}
void SimulatedClock::CurrentNtp(uint32_t& seconds, uint32_t& fractions) {
- seconds = (TimeInMilliseconds() / 1000) + kNtpJan1970;
- fractions = (uint32_t)((TimeInMilliseconds() % 1000) *
- kMagicNtpFractionalUnit / 1000);
+ int64_t now_ms = TimeInMilliseconds();
+ seconds = (now_ms / 1000) + kNtpJan1970;
+ fractions =
+ static_cast<uint32_t>((now_ms % 1000) * kMagicNtpFractionalUnit / 1000);
}
int64_t SimulatedClock::CurrentNtpInMilliseconds() {
@@ -168,6 +176,7 @@ void SimulatedClock::AdvanceTimeMilliseconds(int64_t milliseconds) {
}
void SimulatedClock::AdvanceTimeMicroseconds(int64_t microseconds) {
+ WriteLockScoped synchronize(*lock_);
time_us_ += microseconds;
}
diff --git a/video_engine/encoder_state_feedback_unittest.cc b/video_engine/encoder_state_feedback_unittest.cc
index f85d9897..4e15752f 100644
--- a/video_engine/encoder_state_feedback_unittest.cc
+++ b/video_engine/encoder_state_feedback_unittest.cc
@@ -17,26 +17,17 @@
#include "webrtc/common.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/video_engine/vie_encoder.h"
-namespace webrtc {
+using ::testing::NiceMock;
-// TODO(mflodman) Create a common mock in module utility.
-class TestProcessThread : public ProcessThread {
- public:
- TestProcessThread() {}
- ~TestProcessThread() {}
- virtual int32_t Start() { return 0; }
- virtual int32_t Stop() { return 0; }
- virtual int32_t RegisterModule(Module* module) { return 0; }
- virtual int32_t DeRegisterModule(const Module* module) { return 0; }
-};
+namespace webrtc {
class MockVieEncoder : public ViEEncoder {
public:
- explicit MockVieEncoder(TestProcessThread* process_thread)
+ explicit MockVieEncoder(ProcessThread* process_thread)
: ViEEncoder(1, 1, 1, config_, *process_thread, NULL) {}
~MockVieEncoder() {}
@@ -55,10 +46,10 @@ class MockVieEncoder : public ViEEncoder {
class VieKeyRequestTest : public ::testing::Test {
protected:
virtual void SetUp() {
- process_thread_.reset(new TestProcessThread());
+ process_thread_.reset(new NiceMock<MockProcessThread>);
encoder_state_feedback_.reset(new EncoderStateFeedback());
}
- scoped_ptr<TestProcessThread> process_thread_;
+ scoped_ptr<MockProcessThread> process_thread_;
scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
};
diff --git a/video_engine/include/vie_base.h b/video_engine/include/vie_base.h
index a08cbacb..56916ce3 100644
--- a/video_engine/include/vie_base.h
+++ b/video_engine/include/vie_base.h
@@ -109,6 +109,24 @@ struct CpuOveruseOptions {
}
};
+struct CpuOveruseMetrics {
+ CpuOveruseMetrics()
+ : capture_jitter_ms(-1),
+ avg_encode_time_ms(-1),
+ encode_usage_percent(-1),
+ capture_queue_delay_ms_per_s(-1) {}
+
+ int capture_jitter_ms; // The current estimated jitter in ms based on
+ // incoming captured frames.
+ int avg_encode_time_ms; // The average encode time in ms.
+ int encode_usage_percent; // The average encode time divided by the average
+ // time difference between incoming captured frames.
+ int capture_queue_delay_ms_per_s; // The current time delay between an
+ // incoming captured frame until the frame
+ // is being processed. The delay is
+ // expressed in ms delay per second.
+};
+
class WEBRTC_DLLEXPORT VideoEngine {
public:
// Creates a VideoEngine object, which can then be used to acquire subā€APIs.
@@ -193,16 +211,12 @@ class WEBRTC_DLLEXPORT ViEBase {
}
// Gets cpu overuse measures.
- // capture_jitter_ms: The current estimated jitter in ms based on incoming
- // captured frames.
- // avg_encode_time_ms: The average encode time in ms.
- // encode_usage_percent: The average encode time divided by the average time
- // difference between incoming captured frames.
- // capture_queue_delay_ms_per_s: The current time delay between an incoming
- // captured frame until the frame is being
- // processed. The delay is expressed in ms
- // delay per second.
// TODO(asapersson): Remove default implementation.
+ virtual int GetCpuOveruseMetrics(int channel,
+ CpuOveruseMetrics* metrics) {
+ return -1;
+ }
+ // TODO(asapersson): Remove this function when libjingle has been updated.
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
diff --git a/video_engine/mock/mock_vie_frame_provider_base.h b/video_engine/mock/mock_vie_frame_provider_base.h
new file mode 100644
index 00000000..d4e17f4d
--- /dev/null
+++ b/video_engine/mock/mock_vie_frame_provider_base.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
+
+#include "webrtc/video_engine/vie_frame_provider_base.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockViEFrameCallback : public ViEFrameCallback {
+ public:
+ MOCK_METHOD4(DeliverFrame,
+ void(int id,
+ I420VideoFrame* video_frame,
+ int num_csrcs,
+ const uint32_t CSRC[kRtpCsrcSize]));
+ MOCK_METHOD2(DelayChanged, void(int id, int frame_delay));
+ MOCK_METHOD3(GetPreferedFrameSettings,
+ int(int* width, int* height, int* frame_rate));
+ MOCK_METHOD1(ProviderDestroyed, void(int id));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
diff --git a/video_engine/overuse_frame_detector.cc b/video_engine/overuse_frame_detector.cc
index 078c89a5..c136130b 100644
--- a/video_engine/overuse_frame_detector.cc
+++ b/video_engine/overuse_frame_detector.cc
@@ -302,29 +302,19 @@ void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) {
ResetAll(num_pixels_);
}
-int OveruseFrameDetector::CaptureJitterMs() const {
- CriticalSectionScoped cs(crit_.get());
- return static_cast<int>(capture_deltas_.StdDev() + 0.5);
-}
-
-int OveruseFrameDetector::AvgEncodeTimeMs() const {
- CriticalSectionScoped cs(crit_.get());
- return encode_time_->filtered_encode_time_ms();
-}
-
-int OveruseFrameDetector::EncodeUsagePercent() const {
- CriticalSectionScoped cs(crit_.get());
- return encode_usage_->UsageInPercent();
-}
-
-int OveruseFrameDetector::AvgCaptureQueueDelayMsPerS() const {
+int OveruseFrameDetector::CaptureQueueDelayMsPerS() const {
CriticalSectionScoped cs(crit_.get());
- return capture_queue_delay_->filtered_delay_ms_per_s();
+ return capture_queue_delay_->delay_ms();
}
-int OveruseFrameDetector::CaptureQueueDelayMsPerS() const {
+void OveruseFrameDetector::GetCpuOveruseMetrics(
+ CpuOveruseMetrics* metrics) const {
CriticalSectionScoped cs(crit_.get());
- return capture_queue_delay_->delay_ms();
+ metrics->capture_jitter_ms = static_cast<int>(capture_deltas_.StdDev() + 0.5);
+ metrics->avg_encode_time_ms = encode_time_->filtered_encode_time_ms();
+ metrics->encode_usage_percent = encode_usage_->UsageInPercent();
+ metrics->capture_queue_delay_ms_per_s =
+ capture_queue_delay_->filtered_delay_ms_per_s();
}
int32_t OveruseFrameDetector::TimeUntilNextProcess() {
diff --git a/video_engine/overuse_frame_detector.h b/video_engine/overuse_frame_detector.h
index c30bb57d..38b927ba 100644
--- a/video_engine/overuse_frame_detector.h
+++ b/video_engine/overuse_frame_detector.h
@@ -70,25 +70,23 @@ class OveruseFrameDetector : public Module {
void FrameEncoded(int encode_time_ms);
// Accessors.
- // The estimated jitter based on incoming captured frames.
- int CaptureJitterMs() const;
-
- // Running average of reported encode time (FrameEncoded()).
- // Only used for stats.
- int AvgEncodeTimeMs() const;
-
- // The average encode time divided by the average time difference between
- // incoming captured frames.
- // This variable is currently only used for statistics.
- int EncodeUsagePercent() const;
-
- // The current time delay between an incoming captured frame (FrameCaptured())
- // until the frame is being processed (FrameProcessingStarted()).
- // (Note: if a new frame is received before an old frame has been processed,
- // the old frame is skipped).
- // The delay is returned as the delay in ms per second.
- // This variable is currently only used for statistics.
- int AvgCaptureQueueDelayMsPerS() const;
+
+ // Returns CpuOveruseMetrics where
+ // capture_jitter_ms: The estimated jitter based on incoming captured frames.
+ // avg_encode_time_ms: Running average of reported encode time
+ // (FrameEncoded()). Only used for stats.
+ // encode_usage_percent: The average encode time divided by the average time
+ // difference between incoming captured frames.
+ // capture_queue_delay_ms_per_s: The current time delay between an incoming
+ // captured frame (FrameCaptured()) until the
+ // frame is being processed
+ // (FrameProcessingStarted()). (Note: if a new
+ // frame is received before an old frame has
+ // been processed, the old frame is skipped).
+ // The delay is expressed in ms delay per sec.
+ // Only used for stats.
+ void GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const;
+
int CaptureQueueDelayMsPerS() const;
// Implements Module.
diff --git a/video_engine/overuse_frame_detector_unittest.cc b/video_engine/overuse_frame_detector_unittest.cc
index a760fbfb..2d7116f8 100644
--- a/video_engine/overuse_frame_detector_unittest.cc
+++ b/video_engine/overuse_frame_detector_unittest.cc
@@ -118,6 +118,24 @@ class OveruseFrameDetectorTest : public ::testing::Test {
overuse_detector_->Process();
}
+ int CaptureJitterMs() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.capture_jitter_ms;
+ }
+
+ int AvgEncodeTimeMs() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.avg_encode_time_ms;
+ }
+
+ int EncodeUsagePercent() {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ return metrics.encode_usage_percent;
+ }
+
CpuOveruseOptions options_;
scoped_ptr<SimulatedClock> clock_;
scoped_ptr<MockCpuOveruseObserver> observer_;
@@ -196,49 +214,58 @@ TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
TriggerOveruse(1);
}
+TEST_F(OveruseFrameDetectorTest, GetCpuOveruseMetrics) {
+ CpuOveruseMetrics metrics;
+ overuse_detector_->GetCpuOveruseMetrics(&metrics);
+ EXPECT_GT(metrics.capture_jitter_ms, 0);
+ EXPECT_GT(metrics.avg_encode_time_ms, 0);
+ EXPECT_GT(metrics.encode_usage_percent, 0);
+ EXPECT_GE(metrics.capture_queue_delay_ms_per_s, 0);
+}
+
TEST_F(OveruseFrameDetectorTest, CaptureJitter) {
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
- EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterResolutionChange) {
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
- EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
// Verify reset.
InsertFramesWithInterval(1, kFrameInterval33ms, kWidth, kHeight + 1);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterFrameTimeout) {
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight);
- EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
InsertFramesWithInterval(
1, options_.frame_timeout_interval_ms, kWidth, kHeight);
- EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_NE(InitialJitter(), CaptureJitterMs());
// Verify reset.
InsertFramesWithInterval(
1, options_.frame_timeout_interval_ms + 1, kWidth, kHeight);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterChangingThreshold) {
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
options_.high_capture_jitter_threshold_ms = 90.0f;
overuse_detector_->SetOptions(options_);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
options_.low_capture_jitter_threshold_ms = 30.0f;
overuse_detector_->SetOptions(options_);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdatingCaptureJitter) {
options_.min_frame_samples = 40;
overuse_detector_->SetOptions(options_);
InsertFramesWithInterval(40, kFrameInterval33ms, kWidth, kHeight);
- EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs());
+ EXPECT_EQ(InitialJitter(), CaptureJitterMs());
}
TEST_F(OveruseFrameDetectorTest, NoCaptureQueueDelay) {
@@ -289,33 +316,33 @@ TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayNoMatchingCapturedFrame) {
TEST_F(OveruseFrameDetectorTest, EncodedFrame) {
const int kInitialAvgEncodeTimeInMs = 5;
- EXPECT_EQ(kInitialAvgEncodeTimeInMs, overuse_detector_->AvgEncodeTimeMs());
+ EXPECT_EQ(kInitialAvgEncodeTimeInMs, AvgEncodeTimeMs());
for (int i = 0; i < 30; i++) {
clock_->AdvanceTimeMilliseconds(33);
overuse_detector_->FrameEncoded(2);
}
- EXPECT_EQ(2, overuse_detector_->AvgEncodeTimeMs());
+ EXPECT_EQ(2, AvgEncodeTimeMs());
}
TEST_F(OveruseFrameDetectorTest, InitialEncodeUsage) {
- EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
}
TEST_F(OveruseFrameDetectorTest, EncodedUsage) {
const int kEncodeTimeMs = 5;
InsertAndEncodeFramesWithInterval(
1000, kFrameInterval33ms, kWidth, kHeight, kEncodeTimeMs);
- EXPECT_EQ(15, overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(15, EncodeUsagePercent());
}
TEST_F(OveruseFrameDetectorTest, EncodeUsageResetAfterChangingThreshold) {
- EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
options_.high_encode_usage_threshold_percent = 100;
overuse_detector_->SetOptions(options_);
- EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
options_.low_encode_usage_threshold_percent = 20;
overuse_detector_->SetOptions(options_);
- EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent());
+ EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent());
}
TEST_F(OveruseFrameDetectorTest, TriggerOveruseWithEncodeUsage) {
diff --git a/video_engine/video_engine_core.gypi b/video_engine/video_engine_core.gypi
index 57cdecd8..dfb48b4e 100644
--- a/video_engine/video_engine_core.gypi
+++ b/video_engine/video_engine_core.gypi
@@ -131,6 +131,7 @@
'encoder_state_feedback_unittest.cc',
'overuse_frame_detector_unittest.cc',
'stream_synchronization_unittest.cc',
+ 'vie_capturer_unittest.cc',
'vie_codec_unittest.cc',
'vie_remb_unittest.cc',
],
diff --git a/video_engine/vie_base_impl.cc b/video_engine/vie_base_impl.cc
index f4b87e8f..29fbe7fb 100644
--- a/video_engine/vie_base_impl.cc
+++ b/video_engine/vie_base_impl.cc
@@ -140,10 +140,35 @@ int ViEBaseImpl::CpuOveruseMeasures(int video_channel,
if (provider) {
ViECapturer* capturer = is.Capture(provider->Id());
if (capturer) {
- capturer->CpuOveruseMeasures(capture_jitter_ms,
- avg_encode_time_ms,
- encode_usage_percent,
- capture_queue_delay_ms_per_s);
+ CpuOveruseMetrics metrics;
+ capturer->GetCpuOveruseMetrics(&metrics);
+ *capture_jitter_ms = metrics.capture_jitter_ms;
+ *avg_encode_time_ms = metrics.avg_encode_time_ms;
+ *encode_usage_percent = metrics.encode_usage_percent;
+ *capture_queue_delay_ms_per_s = metrics.capture_queue_delay_ms_per_s;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int ViEBaseImpl::GetCpuOveruseMetrics(int video_channel,
+ CpuOveruseMetrics* metrics) {
+ ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ shared_data_.SetLastError(kViEBaseInvalidChannelId);
+ return -1;
+ }
+ ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+ assert(vie_encoder);
+
+ ViEInputManagerScoped is(*(shared_data_.input_manager()));
+ ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder);
+ if (provider) {
+ ViECapturer* capturer = is.Capture(provider->Id());
+ if (capturer) {
+ capturer->GetCpuOveruseMetrics(metrics);
return 0;
}
}
diff --git a/video_engine/vie_base_impl.h b/video_engine/vie_base_impl.h
index 52c888e3..d6a046e6 100644
--- a/video_engine/vie_base_impl.h
+++ b/video_engine/vie_base_impl.h
@@ -35,6 +35,8 @@ class ViEBaseImpl
CpuOveruseObserver* observer);
virtual int SetCpuOveruseOptions(int channel,
const CpuOveruseOptions& options);
+ virtual int GetCpuOveruseMetrics(int channel,
+ CpuOveruseMetrics* metrics);
virtual int CpuOveruseMeasures(int channel,
int* capture_jitter_ms,
int* avg_encode_time_ms,
diff --git a/video_engine/vie_capturer.cc b/video_engine/vie_capturer.cc
index f037dc82..30d66335 100644
--- a/video_engine/vie_capturer.cc
+++ b/video_engine/vie_capturer.cc
@@ -10,6 +10,7 @@
#include "webrtc/video_engine/vie_capturer.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/utility/interface/process_thread.h"
@@ -249,15 +250,8 @@ void ViECapturer::SetCpuOveruseOptions(const CpuOveruseOptions& options) {
overuse_detector_->SetOptions(options);
}
-void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s) const {
- *capture_jitter_ms = overuse_detector_->CaptureJitterMs();
- *avg_encode_time_ms = overuse_detector_->AvgEncodeTimeMs();
- *encode_usage_percent = overuse_detector_->EncodeUsagePercent();
- *capture_queue_delay_ms_per_s =
- overuse_detector_->AvgCaptureQueueDelayMsPerS();
+void ViECapturer::GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const {
+ overuse_detector_->GetCpuOveruseMetrics(metrics);
}
int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
@@ -353,11 +347,16 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
- captured_frame_.SwapFrame(&video_frame);
+ if (video_frame.native_handle() != NULL) {
+ captured_frame_.reset(video_frame.CloneFrame());
+ } else {
+ if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
+ captured_frame_.reset(new I420VideoFrame());
+ captured_frame_->SwapFrame(&video_frame);
+ }
capture_event_.Set();
- overuse_detector_->FrameCaptured(captured_frame_.width(),
- captured_frame_.height());
- return;
+ overuse_detector_->FrameCaptured(captured_frame_->width(),
+ captured_frame_->height());
}
void ViECapturer::OnCaptureDelayChanged(const int32_t id,
@@ -480,7 +479,9 @@ bool ViECapturer::ViECaptureProcess() {
deliver_cs_->Enter();
if (SwapCapturedAndDeliverFrameIfAvailable()) {
encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
- DeliverI420Frame(&deliver_frame_);
+ DeliverI420Frame(deliver_frame_.get());
+ if (deliver_frame_->native_handle() != NULL)
+ deliver_frame_.reset(); // Release the texture so it can be reused.
}
deliver_cs_->Leave();
if (current_brightness_level_ != reported_brightness_level_) {
@@ -501,6 +502,11 @@ bool ViECapturer::ViECaptureProcess() {
}
void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
+ if (video_frame->native_handle() != NULL) {
+ ViEFrameProviderBase::DeliverFrame(video_frame);
+ return;
+ }
+
// Apply image enhancement and effect filter.
if (deflicker_frame_stats_) {
if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
@@ -615,11 +621,21 @@ void ViECapturer::OnNoPictureAlarm(const int32_t id,
bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
CriticalSectionScoped cs(capture_cs_.get());
- if (captured_frame_.IsZeroSize())
+ if (captured_frame_ == NULL)
+ return false;
+
+ if (captured_frame_->native_handle() != NULL) {
+ deliver_frame_.reset(captured_frame_.release());
+ return true;
+ }
+
+ if (captured_frame_->IsZeroSize())
return false;
- deliver_frame_.SwapFrame(&captured_frame_);
- captured_frame_.ResetSize();
+ if (deliver_frame_ == NULL)
+ deliver_frame_.reset(new I420VideoFrame());
+ deliver_frame_->SwapFrame(captured_frame_.get());
+ captured_frame_->ResetSize();
return true;
}
diff --git a/video_engine/vie_capturer.h b/video_engine/vie_capturer.h
index 37f203a8..8e893577 100644
--- a/video_engine/vie_capturer.h
+++ b/video_engine/vie_capturer.h
@@ -108,11 +108,7 @@ class ViECapturer
void RegisterCpuOveruseObserver(CpuOveruseObserver* observer);
void SetCpuOveruseOptions(const CpuOveruseOptions& options);
-
- void CpuOveruseMeasures(int* capture_jitter_ms,
- int* avg_encode_time_ms,
- int* encode_usage_percent,
- int* capture_queue_delay_ms_per_s) const;
+ void GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const;
protected:
ViECapturer(int capture_id,
@@ -173,8 +169,8 @@ class ViECapturer
EventWrapper& capture_event_;
EventWrapper& deliver_event_;
- I420VideoFrame captured_frame_;
- I420VideoFrame deliver_frame_;
+ scoped_ptr<I420VideoFrame> captured_frame_;
+ scoped_ptr<I420VideoFrame> deliver_frame_;
// Image processing.
ViEEffectFilter* effect_filter_;
diff --git a/video_engine/vie_capturer_unittest.cc b/video_engine/vie_capturer_unittest.cc
new file mode 100644
index 00000000..edaf13b5
--- /dev/null
+++ b/video_engine/vie_capturer_unittest.cc
@@ -0,0 +1,263 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file includes unit tests for ViECapturer.
+
+#include "webrtc/video_engine/vie_capturer.h"
+
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common.h"
+#include "webrtc/common_video/interface/native_handle.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/modules/video_capture/include/mock/mock_video_capture.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/ref_count.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
+#include "webrtc/video_engine/mock/mock_vie_frame_provider_base.h"
+
+using ::testing::_;
+using ::testing::Invoke;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::WithArg;
+
+// If an output frame does not arrive in 500ms, the test will fail.
+#define FRAME_TIMEOUT_MS 500
+
+namespace webrtc {
+
+bool EqualFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualBufferFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2);
+bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1,
+ const ScopedVector<I420VideoFrame>& frames2);
+I420VideoFrame* CreateI420VideoFrame(uint8_t length);
+
+class FakeNativeHandle : public NativeHandle {
+ public:
+ FakeNativeHandle() {}
+ virtual ~FakeNativeHandle() {}
+ virtual void* GetHandle() { return NULL; }
+};
+
+class ViECapturerTest : public ::testing::Test {
+ protected:
+ ViECapturerTest()
+ : mock_capture_module_(new NiceMock<MockVideoCaptureModule>()),
+ mock_process_thread_(new NiceMock<MockProcessThread>),
+ mock_frame_callback_(new NiceMock<MockViEFrameCallback>),
+ data_callback_(NULL),
+ output_frame_event_(EventWrapper::Create()) {
+ }
+
+ virtual void SetUp() {
+ EXPECT_CALL(*mock_capture_module_, RegisterCaptureDataCallback(_))
+ .WillRepeatedly(Invoke(this, &ViECapturerTest::SetCaptureDataCallback));
+ EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_, _, _, _))
+ .WillRepeatedly(
+ WithArg<1>(Invoke(this, &ViECapturerTest::AddOutputFrame)));
+
+ Config config;
+ vie_capturer_.reset(
+ ViECapturer::CreateViECapture(
+ 0, 0, config, mock_capture_module_.get(), *mock_process_thread_));
+ vie_capturer_->RegisterFrameCallback(0, mock_frame_callback_.get());
+ }
+
+ virtual void TearDown() {
+ // ViECapturer accesses |mock_process_thread_| in destructor and should
+ // be deleted first.
+ vie_capturer_.reset();
+ }
+
+ void SetCaptureDataCallback(VideoCaptureDataCallback& data_callback) {
+ data_callback_ = &data_callback;
+ }
+
+ void AddInputFrame(I420VideoFrame* frame) {
+ data_callback_->OnIncomingCapturedFrame(0, *frame);
+ }
+
+ void AddOutputFrame(I420VideoFrame* frame) {
+ if (frame->native_handle() == NULL)
+ output_frame_ybuffers_.push_back(frame->buffer(kYPlane));
+ // Clone the frames because ViECapturer owns the frames.
+ output_frames_.push_back(frame->CloneFrame());
+ output_frame_event_->Set();
+ }
+
+ void WaitOutputFrame() {
+ EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+ }
+
+ scoped_ptr<MockVideoCaptureModule> mock_capture_module_;
+ scoped_ptr<MockProcessThread> mock_process_thread_;
+ scoped_ptr<MockViEFrameCallback> mock_frame_callback_;
+
+ // Used to send input capture frames to ViECapturer.
+ VideoCaptureDataCallback* data_callback_;
+
+ scoped_ptr<ViECapturer> vie_capturer_;
+
+ // Input capture frames of ViECapturer.
+ ScopedVector<I420VideoFrame> input_frames_;
+
+ // Indicate an output frame has arrived.
+ scoped_ptr<EventWrapper> output_frame_event_;
+
+ // Output delivered frames of ViECaptuer.
+ ScopedVector<I420VideoFrame> output_frames_;
+
+ // The pointers of Y plane buffers of output frames. This is used to verify
+ // the frame are swapped and not copied.
+ std::vector<uint8_t*> output_frame_ybuffers_;
+};
+
+TEST_F(ViECapturerTest, TestTextureFrames) {
+ const int kNumFrame = 3;
+ for (int i = 0 ; i < kNumFrame; ++i) {
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, i, i, i, i));
+ AddInputFrame(input_frames_[i]);
+ WaitOutputFrame();
+ }
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+TEST_F(ViECapturerTest, TestI420Frames) {
+ const int kNumFrame = 4;
+ ScopedVector<I420VideoFrame> copied_input_frames;
+ std::vector<uint8_t*> ybuffer_pointers;
+ for (int i = 0; i < kNumFrame; ++i) {
+ input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1)));
+ ybuffer_pointers.push_back(input_frames_[i]->buffer(kYPlane));
+ // Copy input frames because the buffer data will be swapped.
+ copied_input_frames.push_back(input_frames_[i]->CloneFrame());
+ AddInputFrame(input_frames_[i]);
+ WaitOutputFrame();
+ }
+
+ EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_));
+ // Make sure the buffer is swapped and not copied.
+ for (int i = 0; i < kNumFrame; ++i)
+ EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
+ // The pipeline should be filled with frames with allocated buffers. Check
+ // the last input frame has the same allocated size after swapping.
+ EXPECT_EQ(input_frames_.back()->allocated_size(kYPlane),
+ copied_input_frames.back()->allocated_size(kYPlane));
+}
+
+TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) {
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1));
+ AddInputFrame(input_frames_[0]);
+ WaitOutputFrame();
+
+ input_frames_.push_back(CreateI420VideoFrame(1));
+ scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[1]->CloneFrame());
+ AddInputFrame(copied_input_frame.get());
+ WaitOutputFrame();
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) {
+ input_frames_.push_back(CreateI420VideoFrame(1));
+ scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[0]->CloneFrame());
+ AddInputFrame(copied_input_frame.get());
+ WaitOutputFrame();
+
+ webrtc::RefCountImpl<FakeNativeHandle>* handle =
+ new webrtc::RefCountImpl<FakeNativeHandle>();
+ input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1));
+ AddInputFrame(input_frames_[1]);
+ WaitOutputFrame();
+
+ EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+bool EqualFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ if (frame1.native_handle() != NULL || frame2.native_handle() != NULL)
+ return EqualTextureFrames(frame1, frame2);
+ return EqualBufferFrames(frame1, frame2);
+}
+
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ return ((frame1.native_handle() == frame2.native_handle()) &&
+ (frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height()) &&
+ (frame1.timestamp() == frame2.timestamp()) &&
+ (frame1.render_time_ms() == frame2.render_time_ms()));
+}
+
+bool EqualBufferFrames(const I420VideoFrame& frame1,
+ const I420VideoFrame& frame2) {
+ return ((frame1.width() == frame2.width()) &&
+ (frame1.height() == frame2.height()) &&
+ (frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
+ (frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
+ (frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
+ (frame1.timestamp() == frame2.timestamp()) &&
+ (frame1.ntp_time_ms() == frame2.ntp_time_ms()) &&
+ (frame1.render_time_ms() == frame2.render_time_ms()) &&
+ (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
+ (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
+ (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) &&
+ (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane),
+ frame1.allocated_size(kYPlane)) == 0) &&
+ (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane),
+ frame1.allocated_size(kUPlane)) == 0) &&
+ (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane),
+ frame1.allocated_size(kVPlane)) == 0));
+}
+
+bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1,
+ const ScopedVector<I420VideoFrame>& frames2) {
+ if (frames1.size() != frames2.size())
+ return false;
+ for (size_t i = 0; i < frames1.size(); ++i) {
+ if (!EqualFrames(*frames1[i], *frames2[i]))
+ return false;
+ }
+ return true;
+}
+
+I420VideoFrame* CreateI420VideoFrame(uint8_t data) {
+ I420VideoFrame* frame = new I420VideoFrame();
+ const int width = 36;
+ const int height = 24;
+ const int kSizeY = width * height * 2;
+ const int kSizeUV = width * height;
+ uint8_t buffer[kSizeY];
+ memset(buffer, data, kSizeY);
+ frame->CreateFrame(
+ kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width,
+ width / 2, width / 2);
+ frame->set_timestamp(data);
+ frame->set_ntp_time_ms(data);
+ frame->set_render_time_ms(data);
+ return frame;
+}
+
+} // namespace webrtc
diff --git a/video_engine/vie_encoder.cc b/video_engine/vie_encoder.cc
index afb6d0c6..40a61deb 100644
--- a/video_engine/vie_encoder.cc
+++ b/video_engine/vie_encoder.cc
@@ -487,6 +487,10 @@ void ViEEncoder::DeliverFrame(int id,
}
encoder_paused_and_dropped_frame_ = false;
}
+ if (video_frame->native_handle() != NULL) {
+ // TODO(wuchengli): add texture support. http://crbug.com/362437
+ return;
+ }
// Convert render time, in ms, to RTP timestamp.
const int kMsToRtpTimestamp = 90;
diff --git a/video_engine/vie_remb_unittest.cc b/video_engine/vie_remb_unittest.cc
index cdfe39c7..1f0b70c5 100644
--- a/video_engine/vie_remb_unittest.cc
+++ b/video_engine/vie_remb_unittest.cc
@@ -18,35 +18,26 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/video_engine/vie_remb.h"
using ::testing::_;
using ::testing::AnyNumber;
+using ::testing::NiceMock;
using ::testing::Return;
namespace webrtc {
-class TestProcessThread : public ProcessThread {
- public:
- explicit TestProcessThread() {}
- ~TestProcessThread() {}
- virtual int32_t Start() { return 0; }
- virtual int32_t Stop() { return 0; }
- virtual int32_t RegisterModule(Module* module) { return 0; }
- virtual int32_t DeRegisterModule(const Module* module) { return 0; }
-};
-
class ViERembTest : public ::testing::Test {
protected:
virtual void SetUp() {
TickTime::UseFakeClock(12345);
- process_thread_.reset(new TestProcessThread);
+ process_thread_.reset(new NiceMock<MockProcessThread>);
vie_remb_.reset(new VieRemb());
}
- scoped_ptr<TestProcessThread> process_thread_;
+ scoped_ptr<MockProcessThread> process_thread_;
scoped_ptr<VieRemb> vie_remb_;
};
diff --git a/webrtc_tests.gypi b/webrtc_tests.gypi
index 31609670..63734586 100644
--- a/webrtc_tests.gypi
+++ b/webrtc_tests.gypi
@@ -49,6 +49,15 @@
'test/test.gyp:test_main',
'webrtc',
],
+ 'conditions': [
+ # TODO(henrike): remove build_with_chromium==1 when the bots are
+ # using Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
+ ],
},
{
'target_name': 'webrtc_perf_tests',